entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "('#disqus-on').remove()\n )\n\n # console.log \"MICK\"\n # $('#first-page-background').css(\"position\"",
"end": 2287,
"score": 0.9562557935714722,
"start": 2283,
"tag": "NAME",
"value": "MICK"
}
] | assets/_coffeescript/deadrooster.js.coffee | dirtyhenry/blog-deadrooster-org | 0 | # This is all the JavaScript required for the DeadRooster blog
jQuery ->
# Replace Spotify div by new content
$('.dr-playlist').each(() ->
# Create the button
button = $(document.createElement('button')).addClass('btn').addClass('btn-success').addClass('btn-lg')
button.attr('dr-spotify-id', $(this).attr('dr-spotify-id'))
button.attr('dr-spotify-user', $(this).attr('dr-spotify-user'))
button.attr('dr-id-target', $(this).attr('id'))
button.html "Charger le player Spotify"
# Create the link
link = $(document.createElement('a'))
link.attr("href", "http://open.spotify.com/user/" + $(this).attr('dr-spotify-user') + "/playlist/" + $(this).attr('dr-spotify-id'))
link.html "écouter sur Spotify"
# Create the span
span = $(document.createElement('span'))
span.append " ou "
span.append link
$(this).empty().append(button).append(span)
)
$('a').each(() ->
title = $(this).attr("title")
prefix = "Tooltip "
if title && (title.indexOf(prefix) == 0)
newTitle = title.substring(prefix.length)
$(this).attr("data-toggle", "tooltip").attr("data-placement", "top").attr("title", newTitle)
$(this).tooltip()
)
# On clicks on buttons with an attribute "dr-spotify-id", replace the content of the element identified
# by "dr-id-target" with a Spotify iframe
$(document).on('click', 'button[dr-spotify-id]', () ->
spotifyId = $(this).attr("dr-spotify-id")
spotifyUser = $(this).attr("dr-spotify-user")
ga('send', 'event', 'spotify-button', 'click-load-iframe', spotifyId);
spotifyIframe = $(document.createElement('iframe'))
spotifyIframe.attr('src', 'https://embed.spotify.com/?uri=spotify:user:' + spotifyUser + ':playlist:' + spotifyId)
spotifyIframe.attr('width', 300).attr('height', 380).attr('frameborder', 0).attr('allowtransparency', "true")
target = $("#" + $(this).attr("dr-id-target"))
target.empty().append(spotifyIframe)
)
$(document).on('click', '#disqus-on', () ->
$('#disqus-comments').css('display', 'block')
$('#disqus-on').remove()
)
# console.log "MICK"
# $('#first-page-background').css("position", "absolute")
# $('#first-page-background').css("top", 54)
# $('#first-page-background').css("z-index", -1)
# $('#first-page-background').css("left", 0)
# $('#first-page-background').css("width", $(window).width())
# $('#first-page-background').css("height", $(window).height() - 54)
# $('#first-page-background').css("background-color", "blue")
# $('#first-page-background').css("background-image", "url(/assets/images/neutral-milk-hotel-introduction.png)")
# $('#first-page-background').css("background-position", "50% 50%")
# $('#first-page-background').css("background-size", "cover")
# $('#first-page-background').css("background-repeat", "no-repeat")
| 937 | # This is all the JavaScript required for the DeadRooster blog
jQuery ->
# Replace Spotify div by new content
$('.dr-playlist').each(() ->
# Create the button
button = $(document.createElement('button')).addClass('btn').addClass('btn-success').addClass('btn-lg')
button.attr('dr-spotify-id', $(this).attr('dr-spotify-id'))
button.attr('dr-spotify-user', $(this).attr('dr-spotify-user'))
button.attr('dr-id-target', $(this).attr('id'))
button.html "Charger le player Spotify"
# Create the link
link = $(document.createElement('a'))
link.attr("href", "http://open.spotify.com/user/" + $(this).attr('dr-spotify-user') + "/playlist/" + $(this).attr('dr-spotify-id'))
link.html "écouter sur Spotify"
# Create the span
span = $(document.createElement('span'))
span.append " ou "
span.append link
$(this).empty().append(button).append(span)
)
$('a').each(() ->
title = $(this).attr("title")
prefix = "Tooltip "
if title && (title.indexOf(prefix) == 0)
newTitle = title.substring(prefix.length)
$(this).attr("data-toggle", "tooltip").attr("data-placement", "top").attr("title", newTitle)
$(this).tooltip()
)
# On clicks on buttons with an attribute "dr-spotify-id", replace the content of the element identified
# by "dr-id-target" with a Spotify iframe
$(document).on('click', 'button[dr-spotify-id]', () ->
spotifyId = $(this).attr("dr-spotify-id")
spotifyUser = $(this).attr("dr-spotify-user")
ga('send', 'event', 'spotify-button', 'click-load-iframe', spotifyId);
spotifyIframe = $(document.createElement('iframe'))
spotifyIframe.attr('src', 'https://embed.spotify.com/?uri=spotify:user:' + spotifyUser + ':playlist:' + spotifyId)
spotifyIframe.attr('width', 300).attr('height', 380).attr('frameborder', 0).attr('allowtransparency', "true")
target = $("#" + $(this).attr("dr-id-target"))
target.empty().append(spotifyIframe)
)
$(document).on('click', '#disqus-on', () ->
$('#disqus-comments').css('display', 'block')
$('#disqus-on').remove()
)
# console.log "<NAME>"
# $('#first-page-background').css("position", "absolute")
# $('#first-page-background').css("top", 54)
# $('#first-page-background').css("z-index", -1)
# $('#first-page-background').css("left", 0)
# $('#first-page-background').css("width", $(window).width())
# $('#first-page-background').css("height", $(window).height() - 54)
# $('#first-page-background').css("background-color", "blue")
# $('#first-page-background').css("background-image", "url(/assets/images/neutral-milk-hotel-introduction.png)")
# $('#first-page-background').css("background-position", "50% 50%")
# $('#first-page-background').css("background-size", "cover")
# $('#first-page-background').css("background-repeat", "no-repeat")
| true | # This is all the JavaScript required for the DeadRooster blog
jQuery ->
# Replace Spotify div by new content
$('.dr-playlist').each(() ->
# Create the button
button = $(document.createElement('button')).addClass('btn').addClass('btn-success').addClass('btn-lg')
button.attr('dr-spotify-id', $(this).attr('dr-spotify-id'))
button.attr('dr-spotify-user', $(this).attr('dr-spotify-user'))
button.attr('dr-id-target', $(this).attr('id'))
button.html "Charger le player Spotify"
# Create the link
link = $(document.createElement('a'))
link.attr("href", "http://open.spotify.com/user/" + $(this).attr('dr-spotify-user') + "/playlist/" + $(this).attr('dr-spotify-id'))
link.html "écouter sur Spotify"
# Create the span
span = $(document.createElement('span'))
span.append " ou "
span.append link
$(this).empty().append(button).append(span)
)
$('a').each(() ->
title = $(this).attr("title")
prefix = "Tooltip "
if title && (title.indexOf(prefix) == 0)
newTitle = title.substring(prefix.length)
$(this).attr("data-toggle", "tooltip").attr("data-placement", "top").attr("title", newTitle)
$(this).tooltip()
)
# On clicks on buttons with an attribute "dr-spotify-id", replace the content of the element identified
# by "dr-id-target" with a Spotify iframe
$(document).on('click', 'button[dr-spotify-id]', () ->
spotifyId = $(this).attr("dr-spotify-id")
spotifyUser = $(this).attr("dr-spotify-user")
ga('send', 'event', 'spotify-button', 'click-load-iframe', spotifyId);
spotifyIframe = $(document.createElement('iframe'))
spotifyIframe.attr('src', 'https://embed.spotify.com/?uri=spotify:user:' + spotifyUser + ':playlist:' + spotifyId)
spotifyIframe.attr('width', 300).attr('height', 380).attr('frameborder', 0).attr('allowtransparency', "true")
target = $("#" + $(this).attr("dr-id-target"))
target.empty().append(spotifyIframe)
)
$(document).on('click', '#disqus-on', () ->
$('#disqus-comments').css('display', 'block')
$('#disqus-on').remove()
)
# console.log "PI:NAME:<NAME>END_PI"
# $('#first-page-background').css("position", "absolute")
# $('#first-page-background').css("top", 54)
# $('#first-page-background').css("z-index", -1)
# $('#first-page-background').css("left", 0)
# $('#first-page-background').css("width", $(window).width())
# $('#first-page-background').css("height", $(window).height() - 54)
# $('#first-page-background').css("background-color", "blue")
# $('#first-page-background').css("background-image", "url(/assets/images/neutral-milk-hotel-introduction.png)")
# $('#first-page-background').css("background-position", "50% 50%")
# $('#first-page-background').css("background-size", "cover")
# $('#first-page-background').css("background-repeat", "no-repeat")
|
[
{
"context": "= false\n\n $scope.player_list = [\n {name: \"Anish George\", parish: \"Defence Colony\"},\n {name: \"Jibin ",
"end": 279,
"score": 0.9998750686645508,
"start": 267,
"tag": "NAME",
"value": "Anish George"
},
{
"context": "George\", parish: \"Defence Colony\... | js/directives/match.coffee | bassoGeorge/shuttle-up-2015 | 0 | angular.module 'shuttleApp.directives', []
.directive 'saMatch', () ->
restrict: 'E'
templateUrl: 'templates/directives/dir_match.html'
controller: ($scope, $log) ->
$scope.editting = true
$scope.played = false
$scope.player_list = [
{name: "Anish George", parish: "Defence Colony"},
{name: "Jibin George", parish: "Defence Colony"},
{name: "Vineet Topo", parish: "Defence Colony"},
{name: "Bhaskaran Vijayan", parish: "Sunlight Colony"},
{name: "Arushi someone", parish: "Sunlight Colony"},
{name: "Crazy other chick", parish: "Sunlight Colony"},
{name: "Abhilash Verghese", parish: "Okhla"},
{name: "Stuti crazy", parish: "Okhla"},
]
storeP1 = $scope.player_list[0]
if $scope.player_list.length > 1
storeP2 = $scope.player_list[1]
else storeP2 = null
$scope.reset = () ->
$scope.p1 = storeP1
$scope.p2 = storeP2
$scope.reset()
$scope.edit = (b) -> $scope.editting = Boolean(b)
$scope.submit = () ->
if $scope.matchForm.$valid
$log.info "Valid form, submitting" # Todo, submit function
storeP1 = $scope.p1
storeP2 = $scope.p2
$scope.editting = false
$scope.filtered = (player) ->
# $scope.player_list
# _.reject($scope.player_list, (obj) -> obj.name == name)
_.without($scope.player_list, player)
| 171799 | angular.module 'shuttleApp.directives', []
.directive 'saMatch', () ->
restrict: 'E'
templateUrl: 'templates/directives/dir_match.html'
controller: ($scope, $log) ->
$scope.editting = true
$scope.played = false
$scope.player_list = [
{name: "<NAME>", parish: "Defence Colony"},
{name: "<NAME>", parish: "Defence Colony"},
{name: "<NAME>", parish: "Defence Colony"},
{name: "<NAME>", parish: "Sunlight Colony"},
{name: "<NAME>", parish: "Sunlight Colony"},
{name: "<NAME> other chick", parish: "Sunlight Colony"},
{name: "<NAME>", parish: "Okhla"},
{name: "<NAME>", parish: "Okhla"},
]
storeP1 = $scope.player_list[0]
if $scope.player_list.length > 1
storeP2 = $scope.player_list[1]
else storeP2 = null
$scope.reset = () ->
$scope.p1 = storeP1
$scope.p2 = storeP2
$scope.reset()
$scope.edit = (b) -> $scope.editting = Boolean(b)
$scope.submit = () ->
if $scope.matchForm.$valid
$log.info "Valid form, submitting" # Todo, submit function
storeP1 = $scope.p1
storeP2 = $scope.p2
$scope.editting = false
$scope.filtered = (player) ->
# $scope.player_list
# _.reject($scope.player_list, (obj) -> obj.name == name)
_.without($scope.player_list, player)
| true | angular.module 'shuttleApp.directives', []
.directive 'saMatch', () ->
restrict: 'E'
templateUrl: 'templates/directives/dir_match.html'
controller: ($scope, $log) ->
$scope.editting = true
$scope.played = false
$scope.player_list = [
{name: "PI:NAME:<NAME>END_PI", parish: "Defence Colony"},
{name: "PI:NAME:<NAME>END_PI", parish: "Defence Colony"},
{name: "PI:NAME:<NAME>END_PI", parish: "Defence Colony"},
{name: "PI:NAME:<NAME>END_PI", parish: "Sunlight Colony"},
{name: "PI:NAME:<NAME>END_PI", parish: "Sunlight Colony"},
{name: "PI:NAME:<NAME>END_PI other chick", parish: "Sunlight Colony"},
{name: "PI:NAME:<NAME>END_PI", parish: "Okhla"},
{name: "PI:NAME:<NAME>END_PI", parish: "Okhla"},
]
storeP1 = $scope.player_list[0]
if $scope.player_list.length > 1
storeP2 = $scope.player_list[1]
else storeP2 = null
$scope.reset = () ->
$scope.p1 = storeP1
$scope.p2 = storeP2
$scope.reset()
$scope.edit = (b) -> $scope.editting = Boolean(b)
$scope.submit = () ->
if $scope.matchForm.$valid
$log.info "Valid form, submitting" # Todo, submit function
storeP1 = $scope.p1
storeP2 = $scope.p2
$scope.editting = false
$scope.filtered = (player) ->
# $scope.player_list
# _.reject($scope.player_list, (obj) -> obj.name == name)
_.without($scope.player_list, player)
|
[
{
"context": "s = [\n { \"id\": 1, \"text\": \"Tim Berners-Lee invented the Internet.\"},\n ",
"end": 550,
"score": 0.9996874928474426,
"start": 535,
"tag": "NAME",
"value": "Tim Berners-Lee"
}
] | mini-project-1/src/public/coffee/services/QuestionsService.coffee | codyseibert/random | 3 | # Used for retrieving the questions
angular.module('MINIAPP')
.factory('QuestionsService', [
'HttpStub',
(HttpStub) ->
theQuestions = null
ret =
# Questions are loaded from the backend
isQuestionsLoaded: () ->
return theQuestions != null
# Loads the questions from the backend (or should in the future)
loadQuestions: () ->
theQuestions = [
{ "id": 1, "text": "Tim Berners-Lee invented the Internet."},
{ "id": 2, "text": "Dogs are better than cats."},
{ "id": 3, "text": "Winter is coming."},
{ "id": 4, "text": "Internet Explorer is the most advanced browser on Earth."}
]
return HttpStub.createHttpStub(theQuestions)
# Grabs a question from the loaded questions
getQuestion: (pQuestionIndex) ->
return theQuestions[pQuestionIndex]
# Check if are on the last question
isLastQuestion: (pQuestionIndex) ->
return (pQuestionIndex >= theQuestions.length - 1)
return ret
]);
| 140759 | # Used for retrieving the questions
angular.module('MINIAPP')
.factory('QuestionsService', [
'HttpStub',
(HttpStub) ->
theQuestions = null
ret =
# Questions are loaded from the backend
isQuestionsLoaded: () ->
return theQuestions != null
# Loads the questions from the backend (or should in the future)
loadQuestions: () ->
theQuestions = [
{ "id": 1, "text": "<NAME> invented the Internet."},
{ "id": 2, "text": "Dogs are better than cats."},
{ "id": 3, "text": "Winter is coming."},
{ "id": 4, "text": "Internet Explorer is the most advanced browser on Earth."}
]
return HttpStub.createHttpStub(theQuestions)
# Grabs a question from the loaded questions
getQuestion: (pQuestionIndex) ->
return theQuestions[pQuestionIndex]
# Check if are on the last question
isLastQuestion: (pQuestionIndex) ->
return (pQuestionIndex >= theQuestions.length - 1)
return ret
]);
| true | # Used for retrieving the questions
angular.module('MINIAPP')
.factory('QuestionsService', [
'HttpStub',
(HttpStub) ->
theQuestions = null
ret =
# Questions are loaded from the backend
isQuestionsLoaded: () ->
return theQuestions != null
# Loads the questions from the backend (or should in the future)
loadQuestions: () ->
theQuestions = [
{ "id": 1, "text": "PI:NAME:<NAME>END_PI invented the Internet."},
{ "id": 2, "text": "Dogs are better than cats."},
{ "id": 3, "text": "Winter is coming."},
{ "id": 4, "text": "Internet Explorer is the most advanced browser on Earth."}
]
return HttpStub.createHttpStub(theQuestions)
# Grabs a question from the loaded questions
getQuestion: (pQuestionIndex) ->
return theQuestions[pQuestionIndex]
# Check if are on the last question
isLastQuestion: (pQuestionIndex) ->
return (pQuestionIndex >= theQuestions.length - 1)
return ret
]);
|
[
{
"context": "#################################\n\nstorage_key = 'ip_qq_1'\n\nservice = null\ncallbacks = []\n\nbinary_string_to",
"end": 3358,
"score": 0.9852441549301147,
"start": 3351,
"tag": "KEY",
"value": "ip_qq_1"
}
] | webterm/ip.coffee | iambus/WebTerm | 4 |
##################################################
# imports
##################################################
if module?.exports?
encoding = require '../encoding'
else
encoding = this.encoding
##################################################
# QQWry implementation
##################################################
class QQWry
# http://lumaqq.linuxsir.org/article/qqwry_format_detail.html
constructor: (@array) ->
@view = new DataView @array.buffer
@offset = 0
@start = @view.getUint32 0, true
@end = @view.getUint32 4, true
console.log "IP file size: #{@array.length}, index range: #{@start}, #{@end}"
validate: ->
0 < @start < @end < 0x2000000 and (@end - @start) % 7 == 0 and @end + 7 == @array.length
seek: (@offset) ->
parse_ip_string: (ip) ->
p4 = ((if n == '*' then 0 else parseInt(n)) for n in ip.split '.')
if p4.length != 4
throw new Error("Invalid IP address: #{ip}")
return (p4[0] * Math.pow(2, 24)) + (p4[1] * Math.pow(2, 16)) + (p4[2] * Math.pow(2, 8)) + p4[3] # don't use bitwise, which will overflow
unparse_ip_number: (n) ->
"#{0xff & n >> 24}.#{0xff & n >> 16}.#{0xff & n >> 8}.#{0xff & n}"
read_offset: ->
a = @array[@offset++]
b = @array[@offset++]
c = @array[@offset++]
return a | b << 8 | c << 16
read_string: ->
buffer = []
while c = @array[@offset++]
buffer.push c
return encoding.gbk_to_string(buffer)
read_ip_number: ->
@view.getUint32 @offset, true
read_ip_number_at: (@offset) ->
@read_ip_number()
read_ip_offset_at: (offset) ->
@seek offset + 4
@read_offset()
binary_search: (ip_number, start, end) ->
if (end - start) % 7 != 0
throw new Error("Invalid file? offset: [#{start}, #{end}]")
a = @read_ip_number_at start
if a == ip_number
return @read_ip_offset_at start
b = @read_ip_number_at end
if b == ip_number
return @read_ip_offset_at end
if end - start <= 7
return @read_ip_offset_at start
middle = start + Math.floor((end - start) / 14) * 7
c = @read_ip_number_at middle
if ip_number < c
return @binary_search ip_number, start, middle
else
return @binary_search ip_number, middle, end
read_address_at: (offset) ->
@seek offset + 4
n = @array[@offset]
if n == 1
@offset++
country_offset = @read_offset()
@seek country_offset
n = @array[@offset]
if n == 2
@offset++
@seek @read_offset()
country = @read_string()
@seek country_offset + 4
else
country = @read_string()
area = @read_area()
return [country, area]
else if n == 2
@offset++
country_offset = @read_offset()
@seek country_offset
country = @read_string()
@seek country_offset + 8
area = @read_area()
return [country, area]
else
@read_string()
read_area: ->
n = @array[@offset]
if n == 1
throw new Error("Not Implemented")
else if n == 2
@offset++
offset = @read_offset()
if offset == 0
throw new Error("Not Implemented")
else
@seek offset
@read_string()
else
@read_string()
lookup: (ip_string) ->
if ip_string.match /^(\d+\.){3}(\d+|\*)$/
@read_address_at @binary_search @parse_ip_string(ip_string), @start, @end
else
throw new Error("Invalid IP address: #{ip_string}")
##################################################
# storage, API, etc
##################################################
storage_key = 'ip_qq_1'
service = null
callbacks = []
binary_string_to_array = (s) ->
array = new Uint8Array s.length
for i in [0...s.length]
array[i] = s.charCodeAt i
array
delete_from_local_storage = ->
webterm.storage.remove storage_key
service = null
save_to_local_storage = (s) ->
webterm.storage.set storage_key, btoa s
load_from_local_storage = (callback) ->
webterm.storage.get storage_key, (s) ->
if s?
service = new QQWry binary_string_to_array atob s
callback? true
else
callback? false
load_from_file_system = (callback) ->
webterm.dialogs.file_open accepts: [extensions: ['dat']], format: 'binarystring', (s) ->
console.log 'IP file loaded'
console.log "binary string length: #{s.length}"
new_service = new QQWry binary_string_to_array s
if new_service.validate()
service = new_service
save_to_local_storage s
console.log 'IP service ready'
callback? true
else
console.error "May be an invalid IP file?"
callback? false
lookup_ip = (ip, callback) ->
if service?
if service
if callback?
callback service.lookup ip
else
service.lookup ip
else
throw new Error("IP service not ready")
else
throw new Error("IP service not installed")
##################################################
# imports
##################################################
if module?.exports?
module.exports =
QQWry: QQWry
else
webterm.ip =
load_from_file_system: load_from_file_system
load_from_local_storage: load_from_local_storage
is_service_installed: -> !! service
lookup: lookup_ip
load: load_from_local_storage
install: (callback) ->
load_from_file_system callback ? (successful) ->
if successful
webterm.status_bar.info "IP库加载成功"
else
webterm.status_bar.error html: "IP文件加载失败,请确认格式是否正确。点击<a eval='webterm.ip.install()'>这里</a>重新上传。"
uninstall: delete_from_local_storage
| 85262 |
##################################################
# imports
##################################################
if module?.exports?
encoding = require '../encoding'
else
encoding = this.encoding
##################################################
# QQWry implementation
##################################################
class QQWry
# http://lumaqq.linuxsir.org/article/qqwry_format_detail.html
constructor: (@array) ->
@view = new DataView @array.buffer
@offset = 0
@start = @view.getUint32 0, true
@end = @view.getUint32 4, true
console.log "IP file size: #{@array.length}, index range: #{@start}, #{@end}"
validate: ->
0 < @start < @end < 0x2000000 and (@end - @start) % 7 == 0 and @end + 7 == @array.length
seek: (@offset) ->
parse_ip_string: (ip) ->
p4 = ((if n == '*' then 0 else parseInt(n)) for n in ip.split '.')
if p4.length != 4
throw new Error("Invalid IP address: #{ip}")
return (p4[0] * Math.pow(2, 24)) + (p4[1] * Math.pow(2, 16)) + (p4[2] * Math.pow(2, 8)) + p4[3] # don't use bitwise, which will overflow
unparse_ip_number: (n) ->
"#{0xff & n >> 24}.#{0xff & n >> 16}.#{0xff & n >> 8}.#{0xff & n}"
read_offset: ->
a = @array[@offset++]
b = @array[@offset++]
c = @array[@offset++]
return a | b << 8 | c << 16
read_string: ->
buffer = []
while c = @array[@offset++]
buffer.push c
return encoding.gbk_to_string(buffer)
read_ip_number: ->
@view.getUint32 @offset, true
read_ip_number_at: (@offset) ->
@read_ip_number()
read_ip_offset_at: (offset) ->
@seek offset + 4
@read_offset()
binary_search: (ip_number, start, end) ->
if (end - start) % 7 != 0
throw new Error("Invalid file? offset: [#{start}, #{end}]")
a = @read_ip_number_at start
if a == ip_number
return @read_ip_offset_at start
b = @read_ip_number_at end
if b == ip_number
return @read_ip_offset_at end
if end - start <= 7
return @read_ip_offset_at start
middle = start + Math.floor((end - start) / 14) * 7
c = @read_ip_number_at middle
if ip_number < c
return @binary_search ip_number, start, middle
else
return @binary_search ip_number, middle, end
read_address_at: (offset) ->
@seek offset + 4
n = @array[@offset]
if n == 1
@offset++
country_offset = @read_offset()
@seek country_offset
n = @array[@offset]
if n == 2
@offset++
@seek @read_offset()
country = @read_string()
@seek country_offset + 4
else
country = @read_string()
area = @read_area()
return [country, area]
else if n == 2
@offset++
country_offset = @read_offset()
@seek country_offset
country = @read_string()
@seek country_offset + 8
area = @read_area()
return [country, area]
else
@read_string()
read_area: ->
n = @array[@offset]
if n == 1
throw new Error("Not Implemented")
else if n == 2
@offset++
offset = @read_offset()
if offset == 0
throw new Error("Not Implemented")
else
@seek offset
@read_string()
else
@read_string()
lookup: (ip_string) ->
if ip_string.match /^(\d+\.){3}(\d+|\*)$/
@read_address_at @binary_search @parse_ip_string(ip_string), @start, @end
else
throw new Error("Invalid IP address: #{ip_string}")
##################################################
# storage, API, etc
##################################################
storage_key = '<KEY>'
service = null
callbacks = []
binary_string_to_array = (s) ->
array = new Uint8Array s.length
for i in [0...s.length]
array[i] = s.charCodeAt i
array
delete_from_local_storage = ->
webterm.storage.remove storage_key
service = null
save_to_local_storage = (s) ->
webterm.storage.set storage_key, btoa s
load_from_local_storage = (callback) ->
webterm.storage.get storage_key, (s) ->
if s?
service = new QQWry binary_string_to_array atob s
callback? true
else
callback? false
load_from_file_system = (callback) ->
webterm.dialogs.file_open accepts: [extensions: ['dat']], format: 'binarystring', (s) ->
console.log 'IP file loaded'
console.log "binary string length: #{s.length}"
new_service = new QQWry binary_string_to_array s
if new_service.validate()
service = new_service
save_to_local_storage s
console.log 'IP service ready'
callback? true
else
console.error "May be an invalid IP file?"
callback? false
lookup_ip = (ip, callback) ->
if service?
if service
if callback?
callback service.lookup ip
else
service.lookup ip
else
throw new Error("IP service not ready")
else
throw new Error("IP service not installed")
##################################################
# imports
##################################################
if module?.exports?
module.exports =
QQWry: QQWry
else
webterm.ip =
load_from_file_system: load_from_file_system
load_from_local_storage: load_from_local_storage
is_service_installed: -> !! service
lookup: lookup_ip
load: load_from_local_storage
install: (callback) ->
load_from_file_system callback ? (successful) ->
if successful
webterm.status_bar.info "IP库加载成功"
else
webterm.status_bar.error html: "IP文件加载失败,请确认格式是否正确。点击<a eval='webterm.ip.install()'>这里</a>重新上传。"
uninstall: delete_from_local_storage
| true |
##################################################
# imports
##################################################
if module?.exports?
encoding = require '../encoding'
else
encoding = this.encoding
##################################################
# QQWry implementation
##################################################
class QQWry
# http://lumaqq.linuxsir.org/article/qqwry_format_detail.html
constructor: (@array) ->
@view = new DataView @array.buffer
@offset = 0
@start = @view.getUint32 0, true
@end = @view.getUint32 4, true
console.log "IP file size: #{@array.length}, index range: #{@start}, #{@end}"
validate: ->
0 < @start < @end < 0x2000000 and (@end - @start) % 7 == 0 and @end + 7 == @array.length
seek: (@offset) ->
parse_ip_string: (ip) ->
p4 = ((if n == '*' then 0 else parseInt(n)) for n in ip.split '.')
if p4.length != 4
throw new Error("Invalid IP address: #{ip}")
return (p4[0] * Math.pow(2, 24)) + (p4[1] * Math.pow(2, 16)) + (p4[2] * Math.pow(2, 8)) + p4[3] # don't use bitwise, which will overflow
unparse_ip_number: (n) ->
"#{0xff & n >> 24}.#{0xff & n >> 16}.#{0xff & n >> 8}.#{0xff & n}"
read_offset: ->
a = @array[@offset++]
b = @array[@offset++]
c = @array[@offset++]
return a | b << 8 | c << 16
read_string: ->
buffer = []
while c = @array[@offset++]
buffer.push c
return encoding.gbk_to_string(buffer)
read_ip_number: ->
@view.getUint32 @offset, true
read_ip_number_at: (@offset) ->
@read_ip_number()
read_ip_offset_at: (offset) ->
@seek offset + 4
@read_offset()
binary_search: (ip_number, start, end) ->
if (end - start) % 7 != 0
throw new Error("Invalid file? offset: [#{start}, #{end}]")
a = @read_ip_number_at start
if a == ip_number
return @read_ip_offset_at start
b = @read_ip_number_at end
if b == ip_number
return @read_ip_offset_at end
if end - start <= 7
return @read_ip_offset_at start
middle = start + Math.floor((end - start) / 14) * 7
c = @read_ip_number_at middle
if ip_number < c
return @binary_search ip_number, start, middle
else
return @binary_search ip_number, middle, end
read_address_at: (offset) ->
@seek offset + 4
n = @array[@offset]
if n == 1
@offset++
country_offset = @read_offset()
@seek country_offset
n = @array[@offset]
if n == 2
@offset++
@seek @read_offset()
country = @read_string()
@seek country_offset + 4
else
country = @read_string()
area = @read_area()
return [country, area]
else if n == 2
@offset++
country_offset = @read_offset()
@seek country_offset
country = @read_string()
@seek country_offset + 8
area = @read_area()
return [country, area]
else
@read_string()
read_area: ->
n = @array[@offset]
if n == 1
throw new Error("Not Implemented")
else if n == 2
@offset++
offset = @read_offset()
if offset == 0
throw new Error("Not Implemented")
else
@seek offset
@read_string()
else
@read_string()
lookup: (ip_string) ->
if ip_string.match /^(\d+\.){3}(\d+|\*)$/
@read_address_at @binary_search @parse_ip_string(ip_string), @start, @end
else
throw new Error("Invalid IP address: #{ip_string}")
##################################################
# storage, API, etc
##################################################
storage_key = 'PI:KEY:<KEY>END_PI'
service = null
callbacks = []
binary_string_to_array = (s) ->
array = new Uint8Array s.length
for i in [0...s.length]
array[i] = s.charCodeAt i
array
delete_from_local_storage = ->
webterm.storage.remove storage_key
service = null
save_to_local_storage = (s) ->
webterm.storage.set storage_key, btoa s
load_from_local_storage = (callback) ->
webterm.storage.get storage_key, (s) ->
if s?
service = new QQWry binary_string_to_array atob s
callback? true
else
callback? false
load_from_file_system = (callback) ->
webterm.dialogs.file_open accepts: [extensions: ['dat']], format: 'binarystring', (s) ->
console.log 'IP file loaded'
console.log "binary string length: #{s.length}"
new_service = new QQWry binary_string_to_array s
if new_service.validate()
service = new_service
save_to_local_storage s
console.log 'IP service ready'
callback? true
else
console.error "May be an invalid IP file?"
callback? false
lookup_ip = (ip, callback) ->
if service?
if service
if callback?
callback service.lookup ip
else
service.lookup ip
else
throw new Error("IP service not ready")
else
throw new Error("IP service not installed")
##################################################
# imports
##################################################
if module?.exports?
module.exports =
QQWry: QQWry
else
webterm.ip =
load_from_file_system: load_from_file_system
load_from_local_storage: load_from_local_storage
is_service_installed: -> !! service
lookup: lookup_ip
load: load_from_local_storage
install: (callback) ->
load_from_file_system callback ? (successful) ->
if successful
webterm.status_bar.info "IP库加载成功"
else
webterm.status_bar.error html: "IP文件加载失败,请确认格式是否正确。点击<a eval='webterm.ip.install()'>这里</a>重新上传。"
uninstall: delete_from_local_storage
|
[
{
"context": " pkg.version %>\n * https://github.com/dfournier/plasticine\n *\n * Copyrigh",
"end": 3020,
"score": 0.8997528553009033,
"start": 3011,
"tag": "USERNAME",
"value": "dfournier"
},
{
"context": "icine\n *\n * Cop... | Gruntfile.coffee | davidfou/plasticine | 1 | "use strict"
module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
# Metadata.
pkg: grunt.file.readJSON("package.json")
dir:
source : 'source/'
bower : 'components/'
tmp : '.tmp/'
# Task configuration.
clean:
tmp : ["<%= dir.tmp %>"]
dist : ["dist"]
copy:
source:
files: [
expand: true
cwd: "<%= dir.source %>"
src: ['**', '!**/*.coffee']
dest: "<%= dir.tmp %>"
]
components:
files: [
expand: true
cwd: "<%= dir.bower %>"
src: ['**']
dest: "<%= dir.tmp %><%= dir.bower %>"
]
coffee:
compile:
options:
bare : true
sourceMap : false
expand : true
cwd : "<%= dir.source %>"
src : ['**/*.coffee']
dest : "<%= dir.tmp %>"
ext : '.js'
amdwrap:
compile:
expand : true
cwd : "<%= dir.tmp %>"
src : ['app/**/*.js', 'test/spec/**/*.js']
dest : "<%= dir.tmp %>"
wrap:
dist:
expand: true
cwd: "dist/"
src: ["**"]
dest: "dist/"
options:
wrapper: [
"""
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['plasticine'], factory);
} else {
// Browser globals
root.Plasticine = factory(root);
}
}(this, function (global) {
"""
"""
return require('plasticine');
}));
"""]
preprocess:
javascript:
options:
inline: true
src: ["<%= dir.tmp %>app/**/*.js"]
watch:
options:
livereload: true
spawn: false
cwd : "<%= dir.source %>"
coffeeFileModified:
files: "**/*.coffee"
tasks: ["coffee", "amdwrap:compile", "mocha"]
options:
event: ['changed']
coffeeFileAdded:
files: "**/*.coffee"
tasks: ["coffee", "amdwrap:compile", "mocha"]
options:
event: ['added']
coffeeFileDeleted:
files: "**/*.coffee"
tasks: ["clean:tmp", "coffee", "mocha"]
options:
event: ['deleted']
requirejs:
compile:
options:
mainConfigFile: "<%= dir.tmp %>app/main.js"
out: "dist/plasticine.js"
optimize: 'none'
cjsTranslate: true
baseUrl: '<%= dir.tmp %>app'
paths:
requireLib: '../components/almond/almond'
include: ['requireLib']
usebanner:
dist:
options:
position: 'top'
linebreak: true
banner:
"""
/*!
* plasticine JavaScript Library <%= pkg.version %>
* https://github.com/dfournier/plasticine
*
* Copyright 2014 David Fournier <fr.david.fournier@gmail.com>
* Released under the MIT license
* https://github.com/dfournier/plasticine/blob/master/LICENSE-MIT
*
* Date: <%= grunt.template.today() %>
*/
"""
files:
src: "dist/plasticine.js"
grunt.task.loadTasks 'grunt_tasks'
grunt.loadNpmTasks "grunt-contrib-clean"
grunt.loadNpmTasks "grunt-mocha"
grunt.loadNpmTasks "grunt-contrib-watch"
grunt.loadNpmTasks "grunt-contrib-connect"
grunt.loadNpmTasks "grunt-contrib-copy"
grunt.loadNpmTasks "grunt-contrib-coffee"
grunt.loadNpmTasks "grunt-amd-wrap"
grunt.loadNpmTasks "grunt-renaming-wrap"
grunt.loadNpmTasks 'grunt-preprocess'
grunt.loadNpmTasks "grunt-contrib-requirejs"
grunt.loadNpmTasks "grunt-banner"
grunt.event.on 'watch', (action, filepath, target) ->
coffee_files = []
compile_config = ->
coffee_files.push 'test/config.coffee'
coffee_task = 'coffee.compile'
root_path = grunt.config.get("#{coffee_task}.cwd")
relative_path = filepath.replace(new RegExp("^#{root_path}"), '')
ext = grunt.config.get("#{coffee_task}.ext")
relative_compiled_path = relative_path.replace(/.coffee$/, ext)
compiled_file = grunt.config.get('dir.tmp') + relative_compiled_path
if target in ['coffeeFileModified', 'coffeeFileAdded']
coffee_files.push relative_path
grunt.config("amdwrap.compile.src", relative_compiled_path)
# recompile test/config.coffee if a file is added or deleted in test/spec folder
if action in ['deleted', 'added'] and (/^test\/spec\//).test relative_path
compile_config()
if target is 'coffeeFileDeleted'
grunt.config('clean.tmp', compiled_file)
compile_config() if (/^test\/spec\//).test relative_path
grunt.config("#{coffee_task}.src", coffee_files)
grunt.registerTask "compileTest", ["amdwrap:compile"]
grunt.registerTask "default", ["test"]
grunt.registerTask "compile", ["clean:tmp", "coffee", "copy", "preprocess"]
grunt.registerTask "build", ["clean:dist", "compile", "requirejs", "wrap:dist", "usebanner"]
grunt.registerTask "start", ["compile", "compileTest", "connect:development", "watch"]
grunt.registerTask "test", ["compile", "compileTest", "mocha"]
| 168307 | "use strict"
module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
# Metadata.
pkg: grunt.file.readJSON("package.json")
dir:
source : 'source/'
bower : 'components/'
tmp : '.tmp/'
# Task configuration.
clean:
tmp : ["<%= dir.tmp %>"]
dist : ["dist"]
copy:
source:
files: [
expand: true
cwd: "<%= dir.source %>"
src: ['**', '!**/*.coffee']
dest: "<%= dir.tmp %>"
]
components:
files: [
expand: true
cwd: "<%= dir.bower %>"
src: ['**']
dest: "<%= dir.tmp %><%= dir.bower %>"
]
coffee:
compile:
options:
bare : true
sourceMap : false
expand : true
cwd : "<%= dir.source %>"
src : ['**/*.coffee']
dest : "<%= dir.tmp %>"
ext : '.js'
amdwrap:
compile:
expand : true
cwd : "<%= dir.tmp %>"
src : ['app/**/*.js', 'test/spec/**/*.js']
dest : "<%= dir.tmp %>"
wrap:
dist:
expand: true
cwd: "dist/"
src: ["**"]
dest: "dist/"
options:
wrapper: [
"""
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['plasticine'], factory);
} else {
// Browser globals
root.Plasticine = factory(root);
}
}(this, function (global) {
"""
"""
return require('plasticine');
}));
"""]
preprocess:
javascript:
options:
inline: true
src: ["<%= dir.tmp %>app/**/*.js"]
watch:
options:
livereload: true
spawn: false
cwd : "<%= dir.source %>"
coffeeFileModified:
files: "**/*.coffee"
tasks: ["coffee", "amdwrap:compile", "mocha"]
options:
event: ['changed']
coffeeFileAdded:
files: "**/*.coffee"
tasks: ["coffee", "amdwrap:compile", "mocha"]
options:
event: ['added']
coffeeFileDeleted:
files: "**/*.coffee"
tasks: ["clean:tmp", "coffee", "mocha"]
options:
event: ['deleted']
requirejs:
compile:
options:
mainConfigFile: "<%= dir.tmp %>app/main.js"
out: "dist/plasticine.js"
optimize: 'none'
cjsTranslate: true
baseUrl: '<%= dir.tmp %>app'
paths:
requireLib: '../components/almond/almond'
include: ['requireLib']
usebanner:
dist:
options:
position: 'top'
linebreak: true
banner:
"""
/*!
* plasticine JavaScript Library <%= pkg.version %>
* https://github.com/dfournier/plasticine
*
* Copyright 2014 <NAME> <<EMAIL>>
* Released under the MIT license
* https://github.com/dfournier/plasticine/blob/master/LICENSE-MIT
*
* Date: <%= grunt.template.today() %>
*/
"""
files:
src: "dist/plasticine.js"
grunt.task.loadTasks 'grunt_tasks'
grunt.loadNpmTasks "grunt-contrib-clean"
grunt.loadNpmTasks "grunt-mocha"
grunt.loadNpmTasks "grunt-contrib-watch"
grunt.loadNpmTasks "grunt-contrib-connect"
grunt.loadNpmTasks "grunt-contrib-copy"
grunt.loadNpmTasks "grunt-contrib-coffee"
grunt.loadNpmTasks "grunt-amd-wrap"
grunt.loadNpmTasks "grunt-renaming-wrap"
grunt.loadNpmTasks 'grunt-preprocess'
grunt.loadNpmTasks "grunt-contrib-requirejs"
grunt.loadNpmTasks "grunt-banner"
grunt.event.on 'watch', (action, filepath, target) ->
coffee_files = []
compile_config = ->
coffee_files.push 'test/config.coffee'
coffee_task = 'coffee.compile'
root_path = grunt.config.get("#{coffee_task}.cwd")
relative_path = filepath.replace(new RegExp("^#{root_path}"), '')
ext = grunt.config.get("#{coffee_task}.ext")
relative_compiled_path = relative_path.replace(/.coffee$/, ext)
compiled_file = grunt.config.get('dir.tmp') + relative_compiled_path
if target in ['coffeeFileModified', 'coffeeFileAdded']
coffee_files.push relative_path
grunt.config("amdwrap.compile.src", relative_compiled_path)
# recompile test/config.coffee if a file is added or deleted in test/spec folder
if action in ['deleted', 'added'] and (/^test\/spec\//).test relative_path
compile_config()
if target is 'coffeeFileDeleted'
grunt.config('clean.tmp', compiled_file)
compile_config() if (/^test\/spec\//).test relative_path
grunt.config("#{coffee_task}.src", coffee_files)
grunt.registerTask "compileTest", ["amdwrap:compile"]
grunt.registerTask "default", ["test"]
grunt.registerTask "compile", ["clean:tmp", "coffee", "copy", "preprocess"]
grunt.registerTask "build", ["clean:dist", "compile", "requirejs", "wrap:dist", "usebanner"]
grunt.registerTask "start", ["compile", "compileTest", "connect:development", "watch"]
grunt.registerTask "test", ["compile", "compileTest", "mocha"]
| true | "use strict"
module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
# Metadata.
pkg: grunt.file.readJSON("package.json")
dir:
source : 'source/'
bower : 'components/'
tmp : '.tmp/'
# Task configuration.
clean:
tmp : ["<%= dir.tmp %>"]
dist : ["dist"]
copy:
source:
files: [
expand: true
cwd: "<%= dir.source %>"
src: ['**', '!**/*.coffee']
dest: "<%= dir.tmp %>"
]
components:
files: [
expand: true
cwd: "<%= dir.bower %>"
src: ['**']
dest: "<%= dir.tmp %><%= dir.bower %>"
]
coffee:
compile:
options:
bare : true
sourceMap : false
expand : true
cwd : "<%= dir.source %>"
src : ['**/*.coffee']
dest : "<%= dir.tmp %>"
ext : '.js'
amdwrap:
compile:
expand : true
cwd : "<%= dir.tmp %>"
src : ['app/**/*.js', 'test/spec/**/*.js']
dest : "<%= dir.tmp %>"
wrap:
dist:
expand: true
cwd: "dist/"
src: ["**"]
dest: "dist/"
options:
wrapper: [
"""
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['plasticine'], factory);
} else {
// Browser globals
root.Plasticine = factory(root);
}
}(this, function (global) {
"""
"""
return require('plasticine');
}));
"""]
preprocess:
javascript:
options:
inline: true
src: ["<%= dir.tmp %>app/**/*.js"]
watch:
options:
livereload: true
spawn: false
cwd : "<%= dir.source %>"
coffeeFileModified:
files: "**/*.coffee"
tasks: ["coffee", "amdwrap:compile", "mocha"]
options:
event: ['changed']
coffeeFileAdded:
files: "**/*.coffee"
tasks: ["coffee", "amdwrap:compile", "mocha"]
options:
event: ['added']
coffeeFileDeleted:
files: "**/*.coffee"
tasks: ["clean:tmp", "coffee", "mocha"]
options:
event: ['deleted']
requirejs:
compile:
options:
mainConfigFile: "<%= dir.tmp %>app/main.js"
out: "dist/plasticine.js"
optimize: 'none'
cjsTranslate: true
baseUrl: '<%= dir.tmp %>app'
paths:
requireLib: '../components/almond/almond'
include: ['requireLib']
usebanner:
dist:
options:
position: 'top'
linebreak: true
banner:
"""
/*!
* plasticine JavaScript Library <%= pkg.version %>
* https://github.com/dfournier/plasticine
*
* Copyright 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
* Released under the MIT license
* https://github.com/dfournier/plasticine/blob/master/LICENSE-MIT
*
* Date: <%= grunt.template.today() %>
*/
"""
files:
src: "dist/plasticine.js"
grunt.task.loadTasks 'grunt_tasks'
grunt.loadNpmTasks "grunt-contrib-clean"
grunt.loadNpmTasks "grunt-mocha"
grunt.loadNpmTasks "grunt-contrib-watch"
grunt.loadNpmTasks "grunt-contrib-connect"
grunt.loadNpmTasks "grunt-contrib-copy"
grunt.loadNpmTasks "grunt-contrib-coffee"
grunt.loadNpmTasks "grunt-amd-wrap"
grunt.loadNpmTasks "grunt-renaming-wrap"
grunt.loadNpmTasks 'grunt-preprocess'
grunt.loadNpmTasks "grunt-contrib-requirejs"
grunt.loadNpmTasks "grunt-banner"
grunt.event.on 'watch', (action, filepath, target) ->
coffee_files = []
compile_config = ->
coffee_files.push 'test/config.coffee'
coffee_task = 'coffee.compile'
root_path = grunt.config.get("#{coffee_task}.cwd")
relative_path = filepath.replace(new RegExp("^#{root_path}"), '')
ext = grunt.config.get("#{coffee_task}.ext")
relative_compiled_path = relative_path.replace(/.coffee$/, ext)
compiled_file = grunt.config.get('dir.tmp') + relative_compiled_path
if target in ['coffeeFileModified', 'coffeeFileAdded']
coffee_files.push relative_path
grunt.config("amdwrap.compile.src", relative_compiled_path)
# recompile test/config.coffee if a file is added or deleted in test/spec folder
if action in ['deleted', 'added'] and (/^test\/spec\//).test relative_path
compile_config()
if target is 'coffeeFileDeleted'
grunt.config('clean.tmp', compiled_file)
compile_config() if (/^test\/spec\//).test relative_path
grunt.config("#{coffee_task}.src", coffee_files)
grunt.registerTask "compileTest", ["amdwrap:compile"]
grunt.registerTask "default", ["test"]
grunt.registerTask "compile", ["clean:tmp", "coffee", "copy", "preprocess"]
grunt.registerTask "build", ["clean:dist", "compile", "requirejs", "wrap:dist", "usebanner"]
grunt.registerTask "start", ["compile", "compileTest", "connect:development", "watch"]
grunt.registerTask "test", ["compile", "compileTest", "mocha"]
|
[
{
"context": "#\n# Code from [`groc/lib/utils`][1]\n#\n# @copyright Ian MacLeod and groc contributors\n#\n# [1]: https://github.com",
"end": 87,
"score": 0.9998384714126587,
"start": 76,
"tag": "NAME",
"value": "Ian MacLeod"
},
{
"context": "and groc contributors\n#\n# [1]: https://g... | lib/utils/processDocTags.coffee | vitkarpov/grock | 1 | ###
# # Process Doc Tags
#
# Code from [`groc/lib/utils`][1]
#
# @copyright Ian MacLeod and groc contributors
#
# [1]: https://github.com/nevir/groc/blob/b626e45ebf/lib/utils.coffee
###
Q = require 'q'
DOC_TAGS = require '../docTags'
humanize = require './humanize'
TAG_REGEX = /(?:^|\s)@(\w+)(?:\s+(.*))?/
TAG_VALUE_REGEX = /^(?:"(.*)"|'(.*)'|\{(.*)\}|(.*))$/
###
# ## Parse Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Object} segment
# @param {[String]} segment.comments
# @return {Object} The processed segment
###
parseDocTagsForSegment = (segment) ->
tags = []
currTag = {
name: 'description'
value: ''
}
tags.push currTag
tagSections = {}
for line in segment.comments when line?
if (match = line.match TAG_REGEX)?
currTag = {
name: match[1]
value: match[2] || ''
}
tags.push currTag
else
currTag.value += "\n#{line}"
for tag in tags
tag.value = tag.value.replace /^\n|\n$/g, ''
tagDefinition = DOC_TAGS[tag.name]
unless tagDefinition?
if tag.value.length is 0
tagDefinition = 'defaultNoValue'
else
tagDefinition = 'defaultHasValue'
if typeof tagDefinition is 'string'
tagDefinition = DOC_TAGS[tagDefinition]
tag.definition = tagDefinition
tag.section = tagDefinition.section
if tagDefinition.valuePrefix?
tag.value = tag.value.replace ///#{tagDefinition.valuePrefix?}\s+///, ''
if tagDefinition.parseValue?
try
tag.value = tagDefinition.parseValue tag.value
catch e
else if not /\n/.test tag.value
tag.value = tag.value.match(TAG_VALUE_REGEX)[1..].join('')
tagSections[tag.section] = [] unless tagSections[tag.section]?
tagSections[tag.section].push tag
segment.tags = tags
segment.tagSections = tagSections
return segment
###
# ## Markdown Doc Tags
# @description Transform each doc tag entry to markdown
# @param {Object} segment
# @param {Array} segment.tags List of doc tags
# @return {Object} The processed segment
###
markdownDocTagsForSegment = (segment) ->
for tag in segment.tags
if tag.definition.markdown?
if 'string' == typeof tag.definition.markdown
tag.markdown = tag.definition.markdown.replace /\{value\}/g, tag.value
else
try
tag.markdown = tag.definition.markdown(tag.value)
catch e
tag.markdown = tag.value
else
if tag.value.length > 0
tag.markdown = "#{tag.name} #{tag.value}"
else
tag.markdown = tag.name
return segment
###
# ## Render Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Object} segment
# @param {Object} segment.tagSections
# @return {Object} The processed segment
###
renderDocTagsForSegment = (segment) ->
sections = segment.tagSections
output = []
metaOutput = ''
accessClasses = 'doc-section'
accessClasses += " doc-section-#{tag.name}" for tag in sections.access if sections.access?
segment.accessClasses = accessClasses
firstPart = []
firstPart.push tag.markdown for tag in sections.access if sections.access?
firstPart.push tag.markdown for tag in sections.special if sections.special?
firstPart.push tag.markdown for tag in sections.type if sections.type?
metaOutput += "#{humanize.capitalize firstPart.join(' ')}"
if sections.flags? or sections.metadata?
secondPart = []
secondPart.push tag.markdown for tag in sections.flags if sections.flags?
secondPart.push tag.markdown for tag in sections.metadata if sections.metadata?
metaOutput += " #{humanize.joinSentence secondPart}"
output.push "<span class='doc-section-header'>\n#{metaOutput}\n</span>", "" if metaOutput isnt ''
output.push "#{tag.markdown}", "" for tag in sections.description if sections.description?
output.push "#{tag.markdown}", "" for tag in sections.todo if sections.todo?
if sections.params?
output.push 'Parameters:', ""
for tag in sections.params
output.push "#{tag.markdown}"
output.push ""
if sections.returns?
returns = for tag in sections.returns
humanize.capitalize(tag.markdown)
output.push returns.join('<br/>**and** ')
if sections.howto?
for tag in sections.howto
output.push "", "How-To:", "", humanize.gutterify(tag.markdown, 0)
if sections.example?
for tag in sections.example
output.push "", "Example:", "", humanize.gutterify(tag.markdown, 4)
segment.comments = output
return segment
###
# ## Export Methods for Segment Arrays
###
module.exports =
###
# @method Parse Doc Tags
# @description Parses comments of segment for doc tags. Adds `tags` and
# `tagSections` to each segment.
# @param {Array} segments `[{code, comments}]`
# @return {Promise} Resolves when segment comments have been processed
###
parseDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter (segment) ->
TAG_REGEX.test segment.comments.join('\n')
.forEach(parseDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
###
# @method Markdown Doc Tags
# @description Transform each doc tag entry to markdown
# @param {Array} segments `[{code, comments, tags}]`
# @return {Promise} Resolves when all tags have been processed
###
markdownDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter((segment) -> segment.tags?)
.forEach(markdownDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
###
# @method Render Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Array} segments `[{code, comments, tags, tagSections}]`
# @return {Promise} Resolves when all segments have been processed
###
renderDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter((segment) -> segment.tagSections?)
.forEach(renderDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
| 195138 | ###
# # Process Doc Tags
#
# Code from [`groc/lib/utils`][1]
#
# @copyright <NAME> and groc contributors
#
# [1]: https://github.com/nevir/groc/blob/b626e45ebf/lib/utils.coffee
###
Q = require 'q'
DOC_TAGS = require '../docTags'
humanize = require './humanize'
TAG_REGEX = /(?:^|\s)@(\w+)(?:\s+(.*))?/
TAG_VALUE_REGEX = /^(?:"(.*)"|'(.*)'|\{(.*)\}|(.*))$/
###
# ## Parse Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Object} segment
# @param {[String]} segment.comments
# @return {Object} The processed segment
###
parseDocTagsForSegment = (segment) ->
tags = []
currTag = {
name: 'description'
value: ''
}
tags.push currTag
tagSections = {}
for line in segment.comments when line?
if (match = line.match TAG_REGEX)?
currTag = {
name: match[1]
value: match[2] || ''
}
tags.push currTag
else
currTag.value += "\n#{line}"
for tag in tags
tag.value = tag.value.replace /^\n|\n$/g, ''
tagDefinition = DOC_TAGS[tag.name]
unless tagDefinition?
if tag.value.length is 0
tagDefinition = 'defaultNoValue'
else
tagDefinition = 'defaultHasValue'
if typeof tagDefinition is 'string'
tagDefinition = DOC_TAGS[tagDefinition]
tag.definition = tagDefinition
tag.section = tagDefinition.section
if tagDefinition.valuePrefix?
tag.value = tag.value.replace ///#{tagDefinition.valuePrefix?}\s+///, ''
if tagDefinition.parseValue?
try
tag.value = tagDefinition.parseValue tag.value
catch e
else if not /\n/.test tag.value
tag.value = tag.value.match(TAG_VALUE_REGEX)[1..].join('')
tagSections[tag.section] = [] unless tagSections[tag.section]?
tagSections[tag.section].push tag
segment.tags = tags
segment.tagSections = tagSections
return segment
###
# ## Markdown Doc Tags
# @description Transform each doc tag entry to markdown
# @param {Object} segment
# @param {Array} segment.tags List of doc tags
# @return {Object} The processed segment
###
markdownDocTagsForSegment = (segment) ->
for tag in segment.tags
if tag.definition.markdown?
if 'string' == typeof tag.definition.markdown
tag.markdown = tag.definition.markdown.replace /\{value\}/g, tag.value
else
try
tag.markdown = tag.definition.markdown(tag.value)
catch e
tag.markdown = tag.value
else
if tag.value.length > 0
tag.markdown = "#{tag.name} #{tag.value}"
else
tag.markdown = tag.name
return segment
###
# ## Render Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Object} segment
# @param {Object} segment.tagSections
# @return {Object} The processed segment
###
renderDocTagsForSegment = (segment) ->
sections = segment.tagSections
output = []
metaOutput = ''
accessClasses = 'doc-section'
accessClasses += " doc-section-#{tag.name}" for tag in sections.access if sections.access?
segment.accessClasses = accessClasses
firstPart = []
firstPart.push tag.markdown for tag in sections.access if sections.access?
firstPart.push tag.markdown for tag in sections.special if sections.special?
firstPart.push tag.markdown for tag in sections.type if sections.type?
metaOutput += "#{humanize.capitalize firstPart.join(' ')}"
if sections.flags? or sections.metadata?
secondPart = []
secondPart.push tag.markdown for tag in sections.flags if sections.flags?
secondPart.push tag.markdown for tag in sections.metadata if sections.metadata?
metaOutput += " #{humanize.joinSentence secondPart}"
output.push "<span class='doc-section-header'>\n#{metaOutput}\n</span>", "" if metaOutput isnt ''
output.push "#{tag.markdown}", "" for tag in sections.description if sections.description?
output.push "#{tag.markdown}", "" for tag in sections.todo if sections.todo?
if sections.params?
output.push 'Parameters:', ""
for tag in sections.params
output.push "#{tag.markdown}"
output.push ""
if sections.returns?
returns = for tag in sections.returns
humanize.capitalize(tag.markdown)
output.push returns.join('<br/>**and** ')
if sections.howto?
for tag in sections.howto
output.push "", "How-To:", "", humanize.gutterify(tag.markdown, 0)
if sections.example?
for tag in sections.example
output.push "", "Example:", "", humanize.gutterify(tag.markdown, 4)
segment.comments = output
return segment
###
# ## Export Methods for Segment Arrays
###
module.exports =
###
# @method Parse Doc Tags
# @description Parses comments of segment for doc tags. Adds `tags` and
# `tagSections` to each segment.
# @param {Array} segments `[{code, comments}]`
# @return {Promise} Resolves when segment comments have been processed
###
parseDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter (segment) ->
TAG_REGEX.test segment.comments.join('\n')
.forEach(parseDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
###
# @method Markdown Doc Tags
# @description Transform each doc tag entry to markdown
# @param {Array} segments `[{code, comments, tags}]`
# @return {Promise} Resolves when all tags have been processed
###
markdownDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter((segment) -> segment.tags?)
.forEach(markdownDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
###
# @method Render Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Array} segments `[{code, comments, tags, tagSections}]`
# @return {Promise} Resolves when all segments have been processed
###
renderDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter((segment) -> segment.tagSections?)
.forEach(renderDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
| true | ###
# # Process Doc Tags
#
# Code from [`groc/lib/utils`][1]
#
# @copyright PI:NAME:<NAME>END_PI and groc contributors
#
# [1]: https://github.com/nevir/groc/blob/b626e45ebf/lib/utils.coffee
###
Q = require 'q'
DOC_TAGS = require '../docTags'
humanize = require './humanize'
TAG_REGEX = /(?:^|\s)@(\w+)(?:\s+(.*))?/
TAG_VALUE_REGEX = /^(?:"(.*)"|'(.*)'|\{(.*)\}|(.*))$/
###
# ## Parse Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Object} segment
# @param {[String]} segment.comments
# @return {Object} The processed segment
###
parseDocTagsForSegment = (segment) ->
tags = []
currTag = {
name: 'description'
value: ''
}
tags.push currTag
tagSections = {}
for line in segment.comments when line?
if (match = line.match TAG_REGEX)?
currTag = {
name: match[1]
value: match[2] || ''
}
tags.push currTag
else
currTag.value += "\n#{line}"
for tag in tags
tag.value = tag.value.replace /^\n|\n$/g, ''
tagDefinition = DOC_TAGS[tag.name]
unless tagDefinition?
if tag.value.length is 0
tagDefinition = 'defaultNoValue'
else
tagDefinition = 'defaultHasValue'
if typeof tagDefinition is 'string'
tagDefinition = DOC_TAGS[tagDefinition]
tag.definition = tagDefinition
tag.section = tagDefinition.section
if tagDefinition.valuePrefix?
tag.value = tag.value.replace ///#{tagDefinition.valuePrefix?}\s+///, ''
if tagDefinition.parseValue?
try
tag.value = tagDefinition.parseValue tag.value
catch e
else if not /\n/.test tag.value
tag.value = tag.value.match(TAG_VALUE_REGEX)[1..].join('')
tagSections[tag.section] = [] unless tagSections[tag.section]?
tagSections[tag.section].push tag
segment.tags = tags
segment.tagSections = tagSections
return segment
###
# ## Markdown Doc Tags
# @description Transform each doc tag entry to markdown
# @param {Object} segment
# @param {Array} segment.tags List of doc tags
# @return {Object} The processed segment
###
markdownDocTagsForSegment = (segment) ->
for tag in segment.tags
if tag.definition.markdown?
if 'string' == typeof tag.definition.markdown
tag.markdown = tag.definition.markdown.replace /\{value\}/g, tag.value
else
try
tag.markdown = tag.definition.markdown(tag.value)
catch e
tag.markdown = tag.value
else
if tag.value.length > 0
tag.markdown = "#{tag.name} #{tag.value}"
else
tag.markdown = tag.name
return segment
###
# ## Render Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Object} segment
# @param {Object} segment.tagSections
# @return {Object} The processed segment
###
renderDocTagsForSegment = (segment) ->
sections = segment.tagSections
output = []
metaOutput = ''
accessClasses = 'doc-section'
accessClasses += " doc-section-#{tag.name}" for tag in sections.access if sections.access?
segment.accessClasses = accessClasses
firstPart = []
firstPart.push tag.markdown for tag in sections.access if sections.access?
firstPart.push tag.markdown for tag in sections.special if sections.special?
firstPart.push tag.markdown for tag in sections.type if sections.type?
metaOutput += "#{humanize.capitalize firstPart.join(' ')}"
if sections.flags? or sections.metadata?
secondPart = []
secondPart.push tag.markdown for tag in sections.flags if sections.flags?
secondPart.push tag.markdown for tag in sections.metadata if sections.metadata?
metaOutput += " #{humanize.joinSentence secondPart}"
output.push "<span class='doc-section-header'>\n#{metaOutput}\n</span>", "" if metaOutput isnt ''
output.push "#{tag.markdown}", "" for tag in sections.description if sections.description?
output.push "#{tag.markdown}", "" for tag in sections.todo if sections.todo?
if sections.params?
output.push 'Parameters:', ""
for tag in sections.params
output.push "#{tag.markdown}"
output.push ""
if sections.returns?
returns = for tag in sections.returns
humanize.capitalize(tag.markdown)
output.push returns.join('<br/>**and** ')
if sections.howto?
for tag in sections.howto
output.push "", "How-To:", "", humanize.gutterify(tag.markdown, 0)
if sections.example?
for tag in sections.example
output.push "", "Example:", "", humanize.gutterify(tag.markdown, 4)
segment.comments = output
return segment
###
# ## Export Methods for Segment Arrays
###
module.exports =
###
# @method Parse Doc Tags
# @description Parses comments of segment for doc tags. Adds `tags` and
# `tagSections` to each segment.
# @param {Array} segments `[{code, comments}]`
# @return {Promise} Resolves when segment comments have been processed
###
parseDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter (segment) ->
TAG_REGEX.test segment.comments.join('\n')
.forEach(parseDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
###
# @method Markdown Doc Tags
# @description Transform each doc tag entry to markdown
# @param {Array} segments `[{code, comments, tags}]`
# @return {Promise} Resolves when all tags have been processed
###
markdownDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter((segment) -> segment.tags?)
.forEach(markdownDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
###
# @method Render Doc Tags
# @description Combine Array of doc tags to HTML string
# @param {Array} segments `[{code, comments, tags, tagSections}]`
# @return {Promise} Resolves when all segments have been processed
###
renderDocTags: (segments) ->
deferred = Q.defer()
try
segments
.filter((segment) -> segment.tagSections?)
.forEach(renderDocTagsForSegment)
deferred.resolve(segments)
catch error
deferred.reject(error)
return deferred.promise
|
[
{
"context": "->\n assert.equal utils.capitalize('tiger'), 'Tiger'\n\n it 'undefined handling', ->\n assert.equal ",
"end": 271,
"score": 0.6417661905288696,
"start": 267,
"tag": "NAME",
"value": "iger"
},
{
"context": "defined\n\ndescribe 'splitName', ->\n expected = [ 'Don... | test/test.coffee | mullerzs/scapula-utils | 0 | _ = require 'underscore'
utils = require '../lib/scapula-utils'
assert = require 'assert'
# String ----------------------------------------------------------------------
describe 'capitalize', ->
it 'basic func', ->
assert.equal utils.capitalize('tiger'), 'Tiger'
it 'undefined handling', ->
assert.equal utils.capitalize(), undefined
describe 'splitName', ->
expected = [ 'Donnie', 'Yen' ]
it 'splits normal name', ->
assert.deepEqual utils.splitName('Donnie Yen'), expected
it 'splits sort name', ->
assert.deepEqual utils.splitName('Yen, Donnie'), expected
describe 'joinName', ->
it 'joins normal name parts', ->
assert.equal utils.joinName('Li Mu', 'Bai'), 'Li Mu Bai'
it 'joins sort name parts', ->
assert.equal utils.joinName('Scott', 'Adkins', sort: true),
'Adkins, Scott'
describe 'wrap', ->
it 'wraps string', ->
assert.equal utils.wrap('samba', [ '>', '<' ]), '>samba<'
describe 'extractKeywords', ->
it 'extracts keywords', ->
assert.deepEqual utils.extractKeywords(' pear " sour cherry"'),
[ 'pear', 'sour cherry' ]
it 'extracts keywords with marks', ->
expected =
'': [ 'french fries', 'computer' ]
car: [ 'honda', 'mazda' ]
fruit: [ 'pear', 'sour cherry' ]
assert.deepEqual expected, utils.extractKeywords \
'{pear} "french fries" [honda] {sour cherry} [mazda] computer',
'{}': 'fruit', '[]': 'car', '"': ''
describe 'html', ->
beforeEach ->
@testEncoded = '&<tag><br> &123><<br> '
@testDecoded = "&<tag>\n &123><\n"
describe 'encodeHtml', ->
it 'encodes html', ->
assert.equal utils.encodeHtml(@testDecoded), @testEncoded
it 'handles invalid input', ->
assert.equal utils.encodeHtml(), undefined
describe 'decodeHtml', ->
it 'decodes html', ->
assert.equal utils.decodeHtml(@testEncoded), @testDecoded.trim()
it 'handles invalid input', ->
assert.equal utils.decodeHtml(), undefined
describe 'quoteMeta', ->
it 'quotes meta chars', ->
assert.equal utils.quoteMeta('abc123.\\+*?[^]$()-{}|'),
'abc123\\.\\\\\\+\\*\\?\\[\\^\\]\\$\\(\\)\\-\\{\\}\\|'
it 'handles invalid input', ->
assert.equal utils.quoteMeta(), undefined
describe 'startMatch', ->
it 'matches starting chars', ->
samples = [
[ 'Honda', 'ho' ]
[ 'sour cherry', 'so' ]
[ ' pear', 'pea' ]
[ 'baNanA', 'banana' ]
[ 'x', '' ]
]
for sample in samples
assert utils.startMatch sample[0], sample[1]
inv_samples = [
[ 'Honda', 'do' ]
[ 'sour cherry', 'ch' ]
[ 'pear', 'ear' ]
]
for inv_sample in inv_samples
assert !utils.startMatch inv_sample[0], inv_sample[1]
it 'handles invalid input', ->
assert !utils.startMatch()
assert !utils.startMatch 'honda'
assert !utils.startMatch null, 'ho'
# Checkers --------------------------------------------------------------------
describe 'chkEmail', ->
it 'passes valid emails', ->
for str in [ 'jetli123@hero.org',
'HiroyukiSanada@Ninja86.jp',
'van-damme.dolph_lundgren@some.uni-soldier.com' ]
assert utils.chkEmail str
it 'blocks invalid emails', ->
for str in [ 'hello', 'hello@', 'hello@baby', '@baby' ]
assert !utils.chkEmail str
describe 'chkIP', ->
it 'passes valid IPs', ->
for str in [ '192.168.10.124', '10.2.4.1', '250.0.255.100' ]
assert utils.chkIP str
it 'blocks invalid IPs', ->
for str in [ '192', '192.168.10', 'asdf', '123.256.1.1' ]
assert !utils.chkIP str
describe 'chkHost', ->
it 'passes valid host names', ->
for str in [ 'test'
'google.com'
'rambo-online.2rockets.org'
[11 .. 31].join('-')
[7 .. 71].join('.a') ]
assert utils.chkHost str
it 'blocks invalid host names', ->
for str in [ 'jetli123@hero.org'
'@ninja.edu'
'!hello'
'hello-'
[11 .. 32].join('-')
[8 .. 72].join('.a') ]
assert !utils.chkHost str
# Object / "class" ------------------------------------------------------------
describe 'extendMethod', ->
it 'extends method', ->
obj1 = addProp: (par = {}) ->
par.y = 2
par
obj2 = addProp: (par = {}) ->
par.z = 3
par
utils.extendMethod obj2, obj1, 'addProp'
assert.deepEqual obj2.addProp(x: 1), x: 1, y: 2, z: 3
describe 'mixin', ->
it 'mixins class', ->
fruitMixin =
slice: -> @size /= 2
peel: -> @size--
class Fruit
constructor: (@size = 10) ->
slice: -> @size /= 4
SliceFruit = utils.mixin fruitMixin, Fruit
pear = new SliceFruit 40
pear.slice()
pear.peel()
assert.equal pear.size, 4
describe 'obj2Array', ->
it 'converts deep obj to array', ->
obj =
pear:
size: 10
color: 'yellow'
apple:
size: 8
color: 'red'
expected = [
_.extend type: 'pear', obj.pear
_.extend type: 'apple', obj.apple
]
assert.deepEqual utils.obj2Array(obj), expected
it 'converts simple obj to array with keyname', ->
obj =
pear: 20
apple: 18
expected = [
name: 'pear', value: 20
,
name: 'apple', value: 18
]
assert.deepEqual utils.obj2Array(obj, keyname: 'name'), expected
describe 'getProp', ->
it 'gets prop by default', ->
assert.equal utils.getProp({ pear: 10 }, 'pear'), 10
it 'gets prop as attr when get is present', ->
obj =
pear: 42
get: -> @pear.toString()
assert.strictEqual utils.getProp(obj, 'pear', attr: true), '42'
describe 'adoptProps', ->
it 'adopts props', ->
obj1 =
pear: 10
obj2 =
banana: 12
ananas: 13
mango: 14
assert.deepEqual utils.adoptProps(obj1, obj2, 'banana', 'mango'),
pear: 10, banana: 12, mango: 14
# Calc / conversion -----------------------------------------------------------
describe 'parseNum', ->
it 'parses float', ->
assert.equal utils.parseNum('4.5'), 4.5
it 'parses int', ->
assert.equal utils.parseNum('4.5', int: true), 4
it 'parses with default', ->
assert.equal utils.parseNum(), undefined
assert.equal utils.parseNum('pear', def: 0), 0
describe 'limitNum', ->
it 'limits number with all param passing methods', ->
for p in [ { min: 200, max: 300 }, { min: 200 }, { max: 300 }, {} ]
for num in [ 100, 250, 350 ]
expected = if num < p.min
p.min
else if num > p.max
p.max
else
num
assert.equal utils.limitNum(num, p.min, p.max), expected
assert.equal utils.limitNum(num, [ p.min, p.max ]), expected
assert.equal utils.limitNum(num, p), expected
it 'handles invalid input', ->
assert isNaN utils.limitNum()
assert isNaN utils.limitNum 'x'
describe 'roundTo', ->
it 'rounds with positive precision', ->
nums = [
[ 1.23, 1, 1.2, 1.23, 1.23 ]
[ 1.567, 2, 1.6, 1.57, 1.567 ]
]
for n in nums
num = n.shift()
for rounded, i in n
assert.equal utils.roundTo(num, i), rounded
it 'rounds with negative precision', ->
nums = [
[ 16.4, 16, 20, 0, 0 ]
[ 724.1, 724, 720, 700, 1000 ]
]
for n in nums
num = n.shift()
for rounded, i in n
assert.equal utils.roundTo(num, i * -1), rounded
it 'handles non-number input', ->
assert.equal utils.roundTo('12.6'), 13
assert.equal utils.roundTo(), undefined
describe 'getFrac', ->
it 'gets fractional part of num', ->
assert.equal utils.getFrac(1.234), '.234'
assert.equal utils.getFrac('3.14'), '.14'
assert.equal utils.getFrac('3.1416', 2), '.14'
it 'handles non-fractional input', ->
assert.equal utils.getFrac(1), undefined
assert.equal utils.getFrac(), undefined
describe 'calcRank', ->
it 'calcs default rank', ->
assert.equal utils.calcRank(), 1
it 'calcs basic rank', ->
assert.equal utils.calcRank(2, 4), 3
it 'calcs forward rank', ->
assert.equal utils.calcRank(5), 6
it 'calcs backward rank', ->
assert.equal utils.calcRank(null, 4), 2
assert.equal utils.calcRank(null, 0.2, signed: true), -0.8
describe 'prec2Step', ->
it 'translates prec to step', ->
vals = [
[ null, 1 ]
[ 0, 1 ]
[ 1, 0.1 ]
[ 2, 0.01 ]
[ -1, 10 ]
[ -2, 100 ]
]
assert.equal utils.prec2Step(v[0]), v[1] for v in vals
describe 'num2Letters', ->
it 'converts num to one char', ->
assert.equal utils.num2Letters(2), 'B'
it 'converts num to two chars', ->
assert.equal utils.num2Letters(28), 'AB'
it 'returns undefined on invalid input', ->
assert.strictEqual utils.num2Letters('pear'), undefined
it 'returns empty string on 0 or less', ->
assert.equal utils.num2Letters(num), '' for num in [ 0, -2 ]
describe 'maxVersion', ->
it 'picks the max version', ->
assert.equal utils.maxVersion('1.2', '1.1.0', '1.3'), '1.3'
assert.equal utils.maxVersion('1.2', '1.8.9', 3, '0.1'), 3
describe 'isNewerVersion', ->
it 'determines the newer version', ->
assert utils.isNewerVersion '1.4', '1.2'
assert utils.isNewerVersion '1.2.6', '1.2.3'
assert !utils.isNewerVersion '1.2', '1.2'
assert !utils.isNewerVersion '1.2.2', '1.2.3'
describe 'formatFileSize', ->
it 'formats size with units & decimal digits', ->
sizes =
'16k' : [ 16384, unit: 'k' ]
'0.02M' : [ 16384 ]
'117.7M' : [ 123456789, prec: 1 ]
'0.115G' : [ 123456789, unit: 'G', prec: 3 ]
for expected, args of sizes
assert.equal utils.formatFileSize.apply(utils, args), expected
it 'handles invalid input', ->
assert.equal utils.formatFileSize(), 'NA'
assert.equal utils.formatFileSize('X'), 'NA'
assert.equal utils.formatFileSize(null, na: '-'), '-'
# Misc ------------------------------------------------------------------------
describe 'sort', ->
beforeEach ->
@nums = [ 5, 3, 2, 9 ]
@numsNull = [ 5, 3, null, 2, null, 9 ]
@strs = [ 'mango', 'carrot', 'pear', 'banana' ]
@props = [
name : 'carrot'
type : 'vegetable'
value : 0
,
name : 'mango 2'
type : 'fruit'
value : 2
,
name : 'banana'
type : 'fruit'
,
name : 'mango 10'
type : 'fruit'
value : 4
,
name : 'carrot'
type : 'vegetable'
value : 3
]
prop.id = i for prop, i in @props
it 'sorts nums', ->
assert.deepEqual @nums.sort(utils.sort), [ 2, 3, 5, 9 ]
it 'sorts nums desc', ->
assert.deepEqual \
@nums.sort( (a, b) -> utils.sort a, b, desc: true),
[ 9, 5, 3, 2 ]
it 'sorts nulls', ->
assert.deepEqual @numsNull.sort(utils.sort), [ null, null, 2, 3, 5, 9 ]
assert.deepEqual @numsNull.sort( (a, b) ->
utils.sort a, b, nullToEnd: true
), [ 2, 3, 5, 9, null, null ]
it 'sorts strings', ->
assert.deepEqual @strs.sort(utils.sort),
[ 'banana', 'carrot', 'mango', 'pear' ]
it 'sorts collections', ->
assert.deepEqual \
_.pluck(@props.sort( (a, b) ->
utils.sort a, b, [
'type'
,
name : 'name'
natural : true
,
name : 'value'
desc : true
]), 'id'),
[ 2, 1, 3, 4, 0 ]
# Link / URL / Client ---------------------------------------------------------
describe 'addUrlParams', ->
beforeEach ->
@url = 'https://locahost'
it 'adds params with encoding', ->
assert.equal \
utils.addUrlParams(@url,
{ fruits: 'pear cherry', banana: 2 }, encode: true),
@url + "?fruits=#{encodeURIComponent('pear cherry')}&banana=2"
it 'adds params without encoding', ->
assert.equal \
utils.addUrlParams(@url, fruits: 'pear cherry', banana: 2),
@url + '?fruits=pear cherry&banana=2'
it 'adds params after an existing one', ->
assert.equal \
utils.addUrlParams(@url + '?car=1', fruits: 'pear cherry', banana: 2),
@url + '?car=1&fruits=pear cherry&banana=2'
it 'handles invalid input', ->
assert.equal utils.addUrlParams(), undefined
describe 'getUrlParams', ->
it 'gets params from url', ->
assert.deepEqual \
utils.getUrlParams('https://localhost?pear=sweet&cherry=sour'),
{ pear: 'sweet', cherry: 'sour' }
describe 'shareUrlSocial', ->
it 'constructs share url', ->
url = 'https://localhost'
for k, v of { facebook: 'FB', google: 'G' }
assert utils.shareUrlSocial(url, v).match new RegExp "#{k}.+localhost"
it 'handles invalid input', ->
assert.equal utils.shareUrlSocial(), undefined
describe 'videoUrl', ->
it 'constructs video url', ->
assert utils.videoUrl('abc').match /youtube\.com.+abc$/
assert utils.videoUrl('xyz', type: 'vimeo').match /vimeo\.com.+xyz$/
it 'constructs iframe video url', ->
assert utils.videoUrl('abc', iframe: true).match /youtube\-nocookie/
assert \
utils.videoUrl('xyz', iframe: true, type: 'vimeo').match /player\.vimeo/
it 'handles invalid input', ->
assert.equal utils.videoUrl(), undefined
describe 'parseVideoUrl', ->
it 'parses video url', ->
for url in [
'https://www.youtube.com/watch?v=Pmmh69G-pt0'
'youtu.be/Pmmh69G-pt0'
]
assert.deepEqual utils.parseVideoUrl(url),
id: 'Pmmh69G-pt0', type: 'youtube'
assert.deepEqual \
utils.parseVideoUrl('https://vimeo.com/73604196'),
id: '73604196', type: 'vimeo'
it 'handles invalid input', ->
assert.equal utils.parseVideoUrl(), undefined
assert.equal utils.parseVideoUrl('abc'), undefined
describe 'videoIframe', ->
it 'creates iframe tag', ->
for type, vid of { youtube: 'Pmmh69G-pt0', vimeo: '73604196' }
iframe = utils.videoIframe vid, type: type
res = [ '<iframe.+></iframe>' ].concat _.map [
'width="320"'
'height="240"'
'frameborder="0"'
"src=\"#{utils.videoUrl vid, type: type, iframe: true}\""
], (s) -> utils.quoteMeta s
for re in res
assert iframe.match new RegExp re
it 'handles invalid input', ->
assert.equal utils.videoIframe(), undefined
describe 'link', ->
it 'linkifies', ->
samples = [
prot: 'http://'
link: 'localhost/test'
,
prot: 'https://'
link: 'honda.jp/accord'
,
prot: '//'
link: 'fenimore.eugene.be/triboulet'
target: '_blank'
text: 'Albert Vandenbosh'
]
samples.forEach (s) ->
href = s.prot + s.link
text = s.text ? s.link
target = if s.target then " target=\"#{s.target}\"" else ''
opts = _.pick s, 'target', 'text'
assert.equal \
utils.link(s.prot + s.link, _.pick s, 'target', 'text'),
"<a href=\"#{href}\"#{target}>#{text}</a>"
it 'handles invalid input', ->
assert.equal utils.link(), undefined
describe 'mailtoLink', ->
it 'mailto linkifies', ->
assert.equal utils.mailtoLink('honda@accord.jp'),
'mailto:honda@accord.jp'
assert.equal utils.mailtoLink('fenimore', ninja: 'japo', gibert: 1),
'mailto:fenimore?ninja=japo&gibert=1'
it 'handles invalid input', ->
assert.equal utils.mailtoLink(), undefined
describe 'browser', ->
browsers = [
[ 'chrome', 53,
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/53.0.2785.143 Safari/537.36' ]
[ 'safari', 9,
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/601.7.8
(KHTML, like Gecko) Version/9.1.3 Safari/601.7.8' ]
[ 'ie', 11,
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E;
.NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; rv:11.0) like
Gecko' ]
[ 'edge', 14,
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393' ]
[ 'firefox', 49,
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:49.0) Gecko/20100101
Firefox/49.0' ]
]
browsers.forEach (b) ->
[ browser, version, ua ] = b
it "#{browser} -> true", ->
assert utils.browser browser, version, ua
assert utils.browser browser, "#{version}-", ua
assert utils.browser browser, "#{version}+", ua
assert utils.browser browser, undefined, ua
it "#{browser} -> false versions", ->
assert !utils.browser browser, version - 1, ua
assert !utils.browser browser, version + 1, ua
assert !utils.browser browser, "#{version + 1}+", ua
assert !utils.browser browser, "#{version - 1}-", ua
it "#{browser} -> false browsers", ->
others = _.reject browsers, (_b) ->
if browser is 'edge'
_b[0] in [ 'chrome', 'safari', browser ]
else
_b[0] is browser
assert !utils.browser other[0], undefined, ua for other in others
describe 'platform', ->
platforms = [
[ 'linux', 'Linux i686' ]
[ 'win', 'Win32' ]
[ 'mac', 'MacIntel' ]
[ 'ios', 'iPad' ]
]
platforms.forEach (p) ->
[ platform, navp ] = p
it "#{platform} -> true", ->
assert utils.platform platform, navp
it "#{platform} -> false platforms", ->
others = _.reject platforms, (_p) ->
_p[0] is platform
assert !utils.platform other[0], navp for other in others
describe 'isMobile', ->
mobiles = [
'Mozilla/5.0 (Linux; Android 6.0.1; SM-A310F Build/MMB29K)
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.91 Mobile
Safari/537.36'
'Mozilla/5.0 (iPad; CPU OS 10_2_1 like Mac OS X) AppleWebKit/602.4.6
(KHTML, like Gecko) Version/10.0 Mobile/14D23 Safari/602.1'
]
nonmobiles = [
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/53.0.2785.143 Safari/537.36'
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/601.7.8
(KHTML, like Gecko) Version/9.1.3 Safari/601.7.8'
]
it 'detects mobile', ->
assert utils.isMobile mobile for mobile in mobiles
it 'detects non-mobile', ->
assert !utils.isMobile nonmobile for nonmobile in nonmobiles
describe 'selectOptions', ->
samples = [
[
options: [ { descr: 'o1', value: 'v1' }, { descr: 'o2', value: 'v2' } ]
'<option value="v1">o1</option><option value="v2">o2</option>'
],
[
optgroups: [
label: 'group1'
options: [ { value: 'g1v1' }, { value: 'g1v2' } ]
,
label: 'group2'
options: [ { value: 'g2v1' }, { value: 'g2v2' } ]
]
'<optgroup label="group1">' +
'<option value="g1v1">g1v1</option>' +
'<option value="g1v2">g1v2</option>' +
'</optgroup>' +
'<optgroup label="group2">' +
'<option value="g2v1">g2v1</option>' +
'<option value="g2v2">g2v2</option>' +
'</optgroup>'
]
]
samples.forEach (s) ->
[ input, output ] = s
it "generates #{_.keys input} html", ->
assert.equal utils.selectOptions(input), output
| 86827 | _ = require 'underscore'
utils = require '../lib/scapula-utils'
assert = require 'assert'
# String ----------------------------------------------------------------------
describe 'capitalize', ->
it 'basic func', ->
assert.equal utils.capitalize('tiger'), 'T<NAME>'
it 'undefined handling', ->
assert.equal utils.capitalize(), undefined
describe 'splitName', ->
expected = [ '<NAME>', '<NAME>' ]
it 'splits normal name', ->
assert.deepEqual utils.splitName('<NAME>'), expected
it 'splits sort name', ->
assert.deepEqual utils.splitName('<NAME>, <NAME>'), expected
describe 'joinName', ->
it 'joins normal name parts', ->
assert.equal utils.joinName('<NAME>', '<NAME>'), '<NAME>'
it 'joins sort name parts', ->
assert.equal utils.joinName('<NAME>', '<NAME>', sort: true),
'<NAME>'
describe 'wrap', ->
it 'wraps string', ->
assert.equal utils.wrap('samba', [ '>', '<' ]), '>samba<'
describe 'extractKeywords', ->
it 'extracts keywords', ->
assert.deepEqual utils.extractKeywords(' pear " sour cherry"'),
[ 'pear', 'sour cherry' ]
it 'extracts keywords with marks', ->
expected =
'': [ 'french fries', 'computer' ]
car: [ 'honda', 'mazda' ]
fruit: [ 'pear', 'sour cherry' ]
assert.deepEqual expected, utils.extractKeywords \
'{pear} "french fries" [honda] {sour cherry} [mazda] computer',
'{}': 'fruit', '[]': 'car', '"': ''
describe 'html', ->
beforeEach ->
@testEncoded = '&<tag><br> &123><<br> '
@testDecoded = "&<tag>\n &123><\n"
describe 'encodeHtml', ->
it 'encodes html', ->
assert.equal utils.encodeHtml(@testDecoded), @testEncoded
it 'handles invalid input', ->
assert.equal utils.encodeHtml(), undefined
describe 'decodeHtml', ->
it 'decodes html', ->
assert.equal utils.decodeHtml(@testEncoded), @testDecoded.trim()
it 'handles invalid input', ->
assert.equal utils.decodeHtml(), undefined
describe 'quoteMeta', ->
it 'quotes meta chars', ->
assert.equal utils.quoteMeta('abc123.\\+*?[^]$()-{}|'),
'abc123\\.\\\\\\+\\*\\?\\[\\^\\]\\$\\(\\)\\-\\{\\}\\|'
it 'handles invalid input', ->
assert.equal utils.quoteMeta(), undefined
describe 'startMatch', ->
it 'matches starting chars', ->
samples = [
[ 'Honda', 'ho' ]
[ 'sour cherry', 'so' ]
[ ' pear', 'pea' ]
[ 'baNanA', 'banana' ]
[ 'x', '' ]
]
for sample in samples
assert utils.startMatch sample[0], sample[1]
inv_samples = [
[ 'Honda', 'do' ]
[ 'sour cherry', 'ch' ]
[ 'pear', 'ear' ]
]
for inv_sample in inv_samples
assert !utils.startMatch inv_sample[0], inv_sample[1]
it 'handles invalid input', ->
assert !utils.startMatch()
assert !utils.startMatch 'honda'
assert !utils.startMatch null, 'ho'
# Checkers --------------------------------------------------------------------
describe 'chkEmail', ->
it 'passes valid emails', ->
for str in [ '<EMAIL>',
'<EMAIL>',
'<EMAIL>' ]
assert utils.chkEmail str
it 'blocks invalid emails', ->
for str in [ 'hello', 'hello@', 'hello@baby', '@baby' ]
assert !utils.chkEmail str
describe 'chkIP', ->
it 'passes valid IPs', ->
for str in [ '192.168.10.124', '10.2.4.1', '250.0.255.100' ]
assert utils.chkIP str
it 'blocks invalid IPs', ->
for str in [ '192', '192.168.10', 'asdf', '123.256.1.1' ]
assert !utils.chkIP str
describe 'chkHost', ->
it 'passes valid host names', ->
for str in [ 'test'
'google.com'
'rambo-online.2rockets.org'
[11 .. 31].join('-')
[7 .. 71].join('.a') ]
assert utils.chkHost str
it 'blocks invalid host names', ->
for str in [ '<EMAIL>'
'@ninja.edu'
'!hello'
'hello-'
[11 .. 32].join('-')
[8 .. 72].join('.a') ]
assert !utils.chkHost str
# Object / "class" ------------------------------------------------------------
describe 'extendMethod', ->
it 'extends method', ->
obj1 = addProp: (par = {}) ->
par.y = 2
par
obj2 = addProp: (par = {}) ->
par.z = 3
par
utils.extendMethod obj2, obj1, 'addProp'
assert.deepEqual obj2.addProp(x: 1), x: 1, y: 2, z: 3
describe 'mixin', ->
it 'mixins class', ->
fruitMixin =
slice: -> @size /= 2
peel: -> @size--
class Fruit
constructor: (@size = 10) ->
slice: -> @size /= 4
SliceFruit = utils.mixin fruitMixin, Fruit
pear = new SliceFruit 40
pear.slice()
pear.peel()
assert.equal pear.size, 4
describe 'obj2Array', ->
it 'converts deep obj to array', ->
obj =
pear:
size: 10
color: 'yellow'
apple:
size: 8
color: 'red'
expected = [
_.extend type: 'pear', obj.pear
_.extend type: 'apple', obj.apple
]
assert.deepEqual utils.obj2Array(obj), expected
it 'converts simple obj to array with keyname', ->
obj =
pear: 20
apple: 18
expected = [
name: 'pear', value: 20
,
name: 'apple', value: 18
]
assert.deepEqual utils.obj2Array(obj, keyname: 'name'), expected
describe 'getProp', ->
it 'gets prop by default', ->
assert.equal utils.getProp({ pear: 10 }, 'pear'), 10
it 'gets prop as attr when get is present', ->
obj =
pear: 42
get: -> @pear.toString()
assert.strictEqual utils.getProp(obj, 'pear', attr: true), '42'
describe 'adoptProps', ->
it 'adopts props', ->
obj1 =
pear: 10
obj2 =
banana: 12
ananas: 13
mango: 14
assert.deepEqual utils.adoptProps(obj1, obj2, 'banana', 'mango'),
pear: 10, banana: 12, mango: 14
# Calc / conversion -----------------------------------------------------------
describe 'parseNum', ->
it 'parses float', ->
assert.equal utils.parseNum('4.5'), 4.5
it 'parses int', ->
assert.equal utils.parseNum('4.5', int: true), 4
it 'parses with default', ->
assert.equal utils.parseNum(), undefined
assert.equal utils.parseNum('pear', def: 0), 0
describe 'limitNum', ->
it 'limits number with all param passing methods', ->
for p in [ { min: 200, max: 300 }, { min: 200 }, { max: 300 }, {} ]
for num in [ 100, 250, 350 ]
expected = if num < p.min
p.min
else if num > p.max
p.max
else
num
assert.equal utils.limitNum(num, p.min, p.max), expected
assert.equal utils.limitNum(num, [ p.min, p.max ]), expected
assert.equal utils.limitNum(num, p), expected
it 'handles invalid input', ->
assert isNaN utils.limitNum()
assert isNaN utils.limitNum 'x'
describe 'roundTo', ->
it 'rounds with positive precision', ->
nums = [
[ 1.23, 1, 1.2, 1.23, 1.23 ]
[ 1.567, 2, 1.6, 1.57, 1.567 ]
]
for n in nums
num = n.shift()
for rounded, i in n
assert.equal utils.roundTo(num, i), rounded
it 'rounds with negative precision', ->
nums = [
[ 16.4, 16, 20, 0, 0 ]
[ 724.1, 724, 720, 700, 1000 ]
]
for n in nums
num = n.shift()
for rounded, i in n
assert.equal utils.roundTo(num, i * -1), rounded
it 'handles non-number input', ->
assert.equal utils.roundTo('12.6'), 13
assert.equal utils.roundTo(), undefined
describe 'getFrac', ->
it 'gets fractional part of num', ->
assert.equal utils.getFrac(1.234), '.234'
assert.equal utils.getFrac('3.14'), '.14'
assert.equal utils.getFrac('3.1416', 2), '.14'
it 'handles non-fractional input', ->
assert.equal utils.getFrac(1), undefined
assert.equal utils.getFrac(), undefined
describe 'calcRank', ->
it 'calcs default rank', ->
assert.equal utils.calcRank(), 1
it 'calcs basic rank', ->
assert.equal utils.calcRank(2, 4), 3
it 'calcs forward rank', ->
assert.equal utils.calcRank(5), 6
it 'calcs backward rank', ->
assert.equal utils.calcRank(null, 4), 2
assert.equal utils.calcRank(null, 0.2, signed: true), -0.8
describe 'prec2Step', ->
it 'translates prec to step', ->
vals = [
[ null, 1 ]
[ 0, 1 ]
[ 1, 0.1 ]
[ 2, 0.01 ]
[ -1, 10 ]
[ -2, 100 ]
]
assert.equal utils.prec2Step(v[0]), v[1] for v in vals
describe 'num2Letters', ->
it 'converts num to one char', ->
assert.equal utils.num2Letters(2), 'B'
it 'converts num to two chars', ->
assert.equal utils.num2Letters(28), 'AB'
it 'returns undefined on invalid input', ->
assert.strictEqual utils.num2Letters('pear'), undefined
it 'returns empty string on 0 or less', ->
assert.equal utils.num2Letters(num), '' for num in [ 0, -2 ]
describe 'maxVersion', ->
it 'picks the max version', ->
assert.equal utils.maxVersion('1.2', '1.1.0', '1.3'), '1.3'
assert.equal utils.maxVersion('1.2', '1.8.9', 3, '0.1'), 3
describe 'isNewerVersion', ->
it 'determines the newer version', ->
assert utils.isNewerVersion '1.4', '1.2'
assert utils.isNewerVersion '1.2.6', '1.2.3'
assert !utils.isNewerVersion '1.2', '1.2'
assert !utils.isNewerVersion '1.2.2', '1.2.3'
describe 'formatFileSize', ->
it 'formats size with units & decimal digits', ->
sizes =
'16k' : [ 16384, unit: 'k' ]
'0.02M' : [ 16384 ]
'117.7M' : [ 123456789, prec: 1 ]
'0.115G' : [ 123456789, unit: 'G', prec: 3 ]
for expected, args of sizes
assert.equal utils.formatFileSize.apply(utils, args), expected
it 'handles invalid input', ->
assert.equal utils.formatFileSize(), 'NA'
assert.equal utils.formatFileSize('X'), 'NA'
assert.equal utils.formatFileSize(null, na: '-'), '-'
# Misc ------------------------------------------------------------------------
describe 'sort', ->
beforeEach ->
@nums = [ 5, 3, 2, 9 ]
@numsNull = [ 5, 3, null, 2, null, 9 ]
@strs = [ 'mango', 'carrot', 'pear', 'banana' ]
@props = [
name : '<NAME>'
type : 'vegetable'
value : 0
,
name : '<NAME> 2'
type : 'fruit'
value : 2
,
name : '<NAME>ana'
type : 'fruit'
,
name : '<NAME> 10'
type : 'fruit'
value : 4
,
name : '<NAME>'
type : 'vegetable'
value : 3
]
prop.id = i for prop, i in @props
it 'sorts nums', ->
assert.deepEqual @nums.sort(utils.sort), [ 2, 3, 5, 9 ]
it 'sorts nums desc', ->
assert.deepEqual \
@nums.sort( (a, b) -> utils.sort a, b, desc: true),
[ 9, 5, 3, 2 ]
it 'sorts nulls', ->
assert.deepEqual @numsNull.sort(utils.sort), [ null, null, 2, 3, 5, 9 ]
assert.deepEqual @numsNull.sort( (a, b) ->
utils.sort a, b, nullToEnd: true
), [ 2, 3, 5, 9, null, null ]
it 'sorts strings', ->
assert.deepEqual @strs.sort(utils.sort),
[ 'banana', 'carrot', 'mango', 'pear' ]
it 'sorts collections', ->
assert.deepEqual \
_.pluck(@props.sort( (a, b) ->
utils.sort a, b, [
'type'
,
name : 'name'
natural : true
,
name : 'value'
desc : true
]), 'id'),
[ 2, 1, 3, 4, 0 ]
# Link / URL / Client ---------------------------------------------------------
describe 'addUrlParams', ->
beforeEach ->
@url = 'https://locahost'
it 'adds params with encoding', ->
assert.equal \
utils.addUrlParams(@url,
{ fruits: 'pear cherry', banana: 2 }, encode: true),
@url + "?fruits=#{encodeURIComponent('pear cherry')}&banana=2"
it 'adds params without encoding', ->
assert.equal \
utils.addUrlParams(@url, fruits: 'pear cherry', banana: 2),
@url + '?fruits=pear cherry&banana=2'
it 'adds params after an existing one', ->
assert.equal \
utils.addUrlParams(@url + '?car=1', fruits: 'pear cherry', banana: 2),
@url + '?car=1&fruits=pear cherry&banana=2'
it 'handles invalid input', ->
assert.equal utils.addUrlParams(), undefined
describe 'getUrlParams', ->
it 'gets params from url', ->
assert.deepEqual \
utils.getUrlParams('https://localhost?pear=sweet&cherry=sour'),
{ pear: 'sweet', cherry: 'sour' }
describe 'shareUrlSocial', ->
it 'constructs share url', ->
url = 'https://localhost'
for k, v of { facebook: 'FB', google: 'G' }
assert utils.shareUrlSocial(url, v).match new RegExp "#{k}.+localhost"
it 'handles invalid input', ->
assert.equal utils.shareUrlSocial(), undefined
describe 'videoUrl', ->
it 'constructs video url', ->
assert utils.videoUrl('abc').match /youtube\.com.+abc$/
assert utils.videoUrl('xyz', type: 'vimeo').match /vimeo\.com.+xyz$/
it 'constructs iframe video url', ->
assert utils.videoUrl('abc', iframe: true).match /youtube\-nocookie/
assert \
utils.videoUrl('xyz', iframe: true, type: 'vimeo').match /player\.vimeo/
it 'handles invalid input', ->
assert.equal utils.videoUrl(), undefined
describe 'parseVideoUrl', ->
it 'parses video url', ->
for url in [
'https://www.youtube.com/watch?v=Pmmh69G-pt0'
'youtu.be/Pmmh69G-pt0'
]
assert.deepEqual utils.parseVideoUrl(url),
id: 'Pmmh69G-pt0', type: 'youtube'
assert.deepEqual \
utils.parseVideoUrl('https://vimeo.com/73604196'),
id: '73604196', type: 'vimeo'
it 'handles invalid input', ->
assert.equal utils.parseVideoUrl(), undefined
assert.equal utils.parseVideoUrl('abc'), undefined
describe 'videoIframe', ->
it 'creates iframe tag', ->
for type, vid of { youtube: 'Pmmh69G-pt0', vimeo: '73604196' }
iframe = utils.videoIframe vid, type: type
res = [ '<iframe.+></iframe>' ].concat _.map [
'width="320"'
'height="240"'
'frameborder="0"'
"src=\"#{utils.videoUrl vid, type: type, iframe: true}\""
], (s) -> utils.quoteMeta s
for re in res
assert iframe.match new RegExp re
it 'handles invalid input', ->
assert.equal utils.videoIframe(), undefined
describe 'link', ->
it 'linkifies', ->
samples = [
prot: 'http://'
link: 'localhost/test'
,
prot: 'https://'
link: 'honda.jp/accord'
,
prot: '//'
link: 'fenimore.eugene.be/triboulet'
target: '_blank'
text: '<NAME>'
]
samples.forEach (s) ->
href = s.prot + s.link
text = s.text ? s.link
target = if s.target then " target=\"#{s.target}\"" else ''
opts = _.pick s, 'target', 'text'
assert.equal \
utils.link(s.prot + s.link, _.pick s, 'target', 'text'),
"<a href=\"#{href}\"#{target}>#{text}</a>"
it 'handles invalid input', ->
assert.equal utils.link(), undefined
describe 'mailtoLink', ->
it 'mailto linkifies', ->
assert.equal utils.mailtoLink('<EMAIL>'),
'mailto:<EMAIL>'
assert.equal utils.mailtoLink('fenimore', ninja: 'japo', gibert: 1),
'mailto:fenimore?ninja=japo&gibert=1'
it 'handles invalid input', ->
assert.equal utils.mailtoLink(), undefined
describe 'browser', ->
browsers = [
[ 'chrome', 53,
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/53.0.2785.143 Safari/537.36' ]
[ 'safari', 9,
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/601.7.8
(KHTML, like Gecko) Version/9.1.3 Safari/601.7.8' ]
[ 'ie', 11,
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E;
.NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; rv:11.0) like
Gecko' ]
[ 'edge', 14,
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393' ]
[ 'firefox', 49,
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:49.0) Gecko/20100101
Firefox/49.0' ]
]
browsers.forEach (b) ->
[ browser, version, ua ] = b
it "#{browser} -> true", ->
assert utils.browser browser, version, ua
assert utils.browser browser, "#{version}-", ua
assert utils.browser browser, "#{version}+", ua
assert utils.browser browser, undefined, ua
it "#{browser} -> false versions", ->
assert !utils.browser browser, version - 1, ua
assert !utils.browser browser, version + 1, ua
assert !utils.browser browser, "#{version + 1}+", ua
assert !utils.browser browser, "#{version - 1}-", ua
it "#{browser} -> false browsers", ->
others = _.reject browsers, (_b) ->
if browser is 'edge'
_b[0] in [ 'chrome', 'safari', browser ]
else
_b[0] is browser
assert !utils.browser other[0], undefined, ua for other in others
describe 'platform', ->
platforms = [
[ 'linux', 'Linux i686' ]
[ 'win', 'Win32' ]
[ 'mac', 'MacIntel' ]
[ 'ios', 'iPad' ]
]
platforms.forEach (p) ->
[ platform, navp ] = p
it "#{platform} -> true", ->
assert utils.platform platform, navp
it "#{platform} -> false platforms", ->
others = _.reject platforms, (_p) ->
_p[0] is platform
assert !utils.platform other[0], navp for other in others
describe 'isMobile', ->
mobiles = [
'Mozilla/5.0 (Linux; Android 6.0.1; SM-A310F Build/MMB29K)
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.91 Mobile
Safari/537.36'
'Mozilla/5.0 (iPad; CPU OS 10_2_1 like Mac OS X) AppleWebKit/602.4.6
(KHTML, like Gecko) Version/10.0 Mobile/14D23 Safari/602.1'
]
nonmobiles = [
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/53.0.2785.143 Safari/537.36'
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/601.7.8
(KHTML, like Gecko) Version/9.1.3 Safari/601.7.8'
]
it 'detects mobile', ->
assert utils.isMobile mobile for mobile in mobiles
it 'detects non-mobile', ->
assert !utils.isMobile nonmobile for nonmobile in nonmobiles
describe 'selectOptions', ->
samples = [
[
options: [ { descr: 'o1', value: 'v1' }, { descr: 'o2', value: 'v2' } ]
'<option value="v1">o1</option><option value="v2">o2</option>'
],
[
optgroups: [
label: 'group1'
options: [ { value: 'g1v1' }, { value: 'g1v2' } ]
,
label: 'group2'
options: [ { value: 'g2v1' }, { value: 'g2v2' } ]
]
'<optgroup label="group1">' +
'<option value="g1v1">g1v1</option>' +
'<option value="g1v2">g1v2</option>' +
'</optgroup>' +
'<optgroup label="group2">' +
'<option value="g2v1">g2v1</option>' +
'<option value="g2v2">g2v2</option>' +
'</optgroup>'
]
]
samples.forEach (s) ->
[ input, output ] = s
it "generates #{_.keys input} html", ->
assert.equal utils.selectOptions(input), output
| true | _ = require 'underscore'
utils = require '../lib/scapula-utils'
assert = require 'assert'
# String ----------------------------------------------------------------------
describe 'capitalize', ->
it 'basic func', ->
assert.equal utils.capitalize('tiger'), 'TPI:NAME:<NAME>END_PI'
it 'undefined handling', ->
assert.equal utils.capitalize(), undefined
describe 'splitName', ->
expected = [ 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI' ]
it 'splits normal name', ->
assert.deepEqual utils.splitName('PI:NAME:<NAME>END_PI'), expected
it 'splits sort name', ->
assert.deepEqual utils.splitName('PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI'), expected
describe 'joinName', ->
it 'joins normal name parts', ->
assert.equal utils.joinName('PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'), 'PI:NAME:<NAME>END_PI'
it 'joins sort name parts', ->
assert.equal utils.joinName('PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', sort: true),
'PI:NAME:<NAME>END_PI'
describe 'wrap', ->
it 'wraps string', ->
assert.equal utils.wrap('samba', [ '>', '<' ]), '>samba<'
describe 'extractKeywords', ->
it 'extracts keywords', ->
assert.deepEqual utils.extractKeywords(' pear " sour cherry"'),
[ 'pear', 'sour cherry' ]
it 'extracts keywords with marks', ->
expected =
'': [ 'french fries', 'computer' ]
car: [ 'honda', 'mazda' ]
fruit: [ 'pear', 'sour cherry' ]
assert.deepEqual expected, utils.extractKeywords \
'{pear} "french fries" [honda] {sour cherry} [mazda] computer',
'{}': 'fruit', '[]': 'car', '"': ''
describe 'html', ->
beforeEach ->
@testEncoded = '&<tag><br> &123><<br> '
@testDecoded = "&<tag>\n &123><\n"
describe 'encodeHtml', ->
it 'encodes html', ->
assert.equal utils.encodeHtml(@testDecoded), @testEncoded
it 'handles invalid input', ->
assert.equal utils.encodeHtml(), undefined
describe 'decodeHtml', ->
it 'decodes html', ->
assert.equal utils.decodeHtml(@testEncoded), @testDecoded.trim()
it 'handles invalid input', ->
assert.equal utils.decodeHtml(), undefined
describe 'quoteMeta', ->
it 'quotes meta chars', ->
assert.equal utils.quoteMeta('abc123.\\+*?[^]$()-{}|'),
'abc123\\.\\\\\\+\\*\\?\\[\\^\\]\\$\\(\\)\\-\\{\\}\\|'
it 'handles invalid input', ->
assert.equal utils.quoteMeta(), undefined
describe 'startMatch', ->
it 'matches starting chars', ->
samples = [
[ 'Honda', 'ho' ]
[ 'sour cherry', 'so' ]
[ ' pear', 'pea' ]
[ 'baNanA', 'banana' ]
[ 'x', '' ]
]
for sample in samples
assert utils.startMatch sample[0], sample[1]
inv_samples = [
[ 'Honda', 'do' ]
[ 'sour cherry', 'ch' ]
[ 'pear', 'ear' ]
]
for inv_sample in inv_samples
assert !utils.startMatch inv_sample[0], inv_sample[1]
it 'handles invalid input', ->
assert !utils.startMatch()
assert !utils.startMatch 'honda'
assert !utils.startMatch null, 'ho'
# Checkers --------------------------------------------------------------------
describe 'chkEmail', ->
it 'passes valid emails', ->
for str in [ 'PI:EMAIL:<EMAIL>END_PI',
'PI:EMAIL:<EMAIL>END_PI',
'PI:EMAIL:<EMAIL>END_PI' ]
assert utils.chkEmail str
it 'blocks invalid emails', ->
for str in [ 'hello', 'hello@', 'hello@baby', '@baby' ]
assert !utils.chkEmail str
describe 'chkIP', ->
it 'passes valid IPs', ->
for str in [ '192.168.10.124', '10.2.4.1', '250.0.255.100' ]
assert utils.chkIP str
it 'blocks invalid IPs', ->
for str in [ '192', '192.168.10', 'asdf', '123.256.1.1' ]
assert !utils.chkIP str
describe 'chkHost', ->
it 'passes valid host names', ->
for str in [ 'test'
'google.com'
'rambo-online.2rockets.org'
[11 .. 31].join('-')
[7 .. 71].join('.a') ]
assert utils.chkHost str
it 'blocks invalid host names', ->
for str in [ 'PI:EMAIL:<EMAIL>END_PI'
'@ninja.edu'
'!hello'
'hello-'
[11 .. 32].join('-')
[8 .. 72].join('.a') ]
assert !utils.chkHost str
# Object / "class" ------------------------------------------------------------
describe 'extendMethod', ->
it 'extends method', ->
obj1 = addProp: (par = {}) ->
par.y = 2
par
obj2 = addProp: (par = {}) ->
par.z = 3
par
utils.extendMethod obj2, obj1, 'addProp'
assert.deepEqual obj2.addProp(x: 1), x: 1, y: 2, z: 3
describe 'mixin', ->
it 'mixins class', ->
fruitMixin =
slice: -> @size /= 2
peel: -> @size--
class Fruit
constructor: (@size = 10) ->
slice: -> @size /= 4
SliceFruit = utils.mixin fruitMixin, Fruit
pear = new SliceFruit 40
pear.slice()
pear.peel()
assert.equal pear.size, 4
describe 'obj2Array', ->
it 'converts deep obj to array', ->
obj =
pear:
size: 10
color: 'yellow'
apple:
size: 8
color: 'red'
expected = [
_.extend type: 'pear', obj.pear
_.extend type: 'apple', obj.apple
]
assert.deepEqual utils.obj2Array(obj), expected
it 'converts simple obj to array with keyname', ->
obj =
pear: 20
apple: 18
expected = [
name: 'pear', value: 20
,
name: 'apple', value: 18
]
assert.deepEqual utils.obj2Array(obj, keyname: 'name'), expected
describe 'getProp', ->
it 'gets prop by default', ->
assert.equal utils.getProp({ pear: 10 }, 'pear'), 10
it 'gets prop as attr when get is present', ->
obj =
pear: 42
get: -> @pear.toString()
assert.strictEqual utils.getProp(obj, 'pear', attr: true), '42'
describe 'adoptProps', ->
it 'adopts props', ->
obj1 =
pear: 10
obj2 =
banana: 12
ananas: 13
mango: 14
assert.deepEqual utils.adoptProps(obj1, obj2, 'banana', 'mango'),
pear: 10, banana: 12, mango: 14
# Calc / conversion -----------------------------------------------------------
describe 'parseNum', ->
it 'parses float', ->
assert.equal utils.parseNum('4.5'), 4.5
it 'parses int', ->
assert.equal utils.parseNum('4.5', int: true), 4
it 'parses with default', ->
assert.equal utils.parseNum(), undefined
assert.equal utils.parseNum('pear', def: 0), 0
describe 'limitNum', ->
it 'limits number with all param passing methods', ->
for p in [ { min: 200, max: 300 }, { min: 200 }, { max: 300 }, {} ]
for num in [ 100, 250, 350 ]
expected = if num < p.min
p.min
else if num > p.max
p.max
else
num
assert.equal utils.limitNum(num, p.min, p.max), expected
assert.equal utils.limitNum(num, [ p.min, p.max ]), expected
assert.equal utils.limitNum(num, p), expected
it 'handles invalid input', ->
assert isNaN utils.limitNum()
assert isNaN utils.limitNum 'x'
describe 'roundTo', ->
it 'rounds with positive precision', ->
nums = [
[ 1.23, 1, 1.2, 1.23, 1.23 ]
[ 1.567, 2, 1.6, 1.57, 1.567 ]
]
for n in nums
num = n.shift()
for rounded, i in n
assert.equal utils.roundTo(num, i), rounded
it 'rounds with negative precision', ->
nums = [
[ 16.4, 16, 20, 0, 0 ]
[ 724.1, 724, 720, 700, 1000 ]
]
for n in nums
num = n.shift()
for rounded, i in n
assert.equal utils.roundTo(num, i * -1), rounded
it 'handles non-number input', ->
assert.equal utils.roundTo('12.6'), 13
assert.equal utils.roundTo(), undefined
describe 'getFrac', ->
it 'gets fractional part of num', ->
assert.equal utils.getFrac(1.234), '.234'
assert.equal utils.getFrac('3.14'), '.14'
assert.equal utils.getFrac('3.1416', 2), '.14'
it 'handles non-fractional input', ->
assert.equal utils.getFrac(1), undefined
assert.equal utils.getFrac(), undefined
describe 'calcRank', ->
it 'calcs default rank', ->
assert.equal utils.calcRank(), 1
it 'calcs basic rank', ->
assert.equal utils.calcRank(2, 4), 3
it 'calcs forward rank', ->
assert.equal utils.calcRank(5), 6
it 'calcs backward rank', ->
assert.equal utils.calcRank(null, 4), 2
assert.equal utils.calcRank(null, 0.2, signed: true), -0.8
describe 'prec2Step', ->
it 'translates prec to step', ->
vals = [
[ null, 1 ]
[ 0, 1 ]
[ 1, 0.1 ]
[ 2, 0.01 ]
[ -1, 10 ]
[ -2, 100 ]
]
assert.equal utils.prec2Step(v[0]), v[1] for v in vals
describe 'num2Letters', ->
it 'converts num to one char', ->
assert.equal utils.num2Letters(2), 'B'
it 'converts num to two chars', ->
assert.equal utils.num2Letters(28), 'AB'
it 'returns undefined on invalid input', ->
assert.strictEqual utils.num2Letters('pear'), undefined
it 'returns empty string on 0 or less', ->
assert.equal utils.num2Letters(num), '' for num in [ 0, -2 ]
describe 'maxVersion', ->
it 'picks the max version', ->
assert.equal utils.maxVersion('1.2', '1.1.0', '1.3'), '1.3'
assert.equal utils.maxVersion('1.2', '1.8.9', 3, '0.1'), 3
describe 'isNewerVersion', ->
it 'determines the newer version', ->
assert utils.isNewerVersion '1.4', '1.2'
assert utils.isNewerVersion '1.2.6', '1.2.3'
assert !utils.isNewerVersion '1.2', '1.2'
assert !utils.isNewerVersion '1.2.2', '1.2.3'
describe 'formatFileSize', ->
it 'formats size with units & decimal digits', ->
sizes =
'16k' : [ 16384, unit: 'k' ]
'0.02M' : [ 16384 ]
'117.7M' : [ 123456789, prec: 1 ]
'0.115G' : [ 123456789, unit: 'G', prec: 3 ]
for expected, args of sizes
assert.equal utils.formatFileSize.apply(utils, args), expected
it 'handles invalid input', ->
assert.equal utils.formatFileSize(), 'NA'
assert.equal utils.formatFileSize('X'), 'NA'
assert.equal utils.formatFileSize(null, na: '-'), '-'
# Misc ------------------------------------------------------------------------
describe 'sort', ->
beforeEach ->
@nums = [ 5, 3, 2, 9 ]
@numsNull = [ 5, 3, null, 2, null, 9 ]
@strs = [ 'mango', 'carrot', 'pear', 'banana' ]
@props = [
name : 'PI:NAME:<NAME>END_PI'
type : 'vegetable'
value : 0
,
name : 'PI:NAME:<NAME>END_PI 2'
type : 'fruit'
value : 2
,
name : 'PI:NAME:<NAME>END_PIana'
type : 'fruit'
,
name : 'PI:NAME:<NAME>END_PI 10'
type : 'fruit'
value : 4
,
name : 'PI:NAME:<NAME>END_PI'
type : 'vegetable'
value : 3
]
prop.id = i for prop, i in @props
it 'sorts nums', ->
assert.deepEqual @nums.sort(utils.sort), [ 2, 3, 5, 9 ]
it 'sorts nums desc', ->
assert.deepEqual \
@nums.sort( (a, b) -> utils.sort a, b, desc: true),
[ 9, 5, 3, 2 ]
it 'sorts nulls', ->
assert.deepEqual @numsNull.sort(utils.sort), [ null, null, 2, 3, 5, 9 ]
assert.deepEqual @numsNull.sort( (a, b) ->
utils.sort a, b, nullToEnd: true
), [ 2, 3, 5, 9, null, null ]
it 'sorts strings', ->
assert.deepEqual @strs.sort(utils.sort),
[ 'banana', 'carrot', 'mango', 'pear' ]
it 'sorts collections', ->
assert.deepEqual \
_.pluck(@props.sort( (a, b) ->
utils.sort a, b, [
'type'
,
name : 'name'
natural : true
,
name : 'value'
desc : true
]), 'id'),
[ 2, 1, 3, 4, 0 ]
# Link / URL / Client ---------------------------------------------------------
describe 'addUrlParams', ->
beforeEach ->
@url = 'https://locahost'
it 'adds params with encoding', ->
assert.equal \
utils.addUrlParams(@url,
{ fruits: 'pear cherry', banana: 2 }, encode: true),
@url + "?fruits=#{encodeURIComponent('pear cherry')}&banana=2"
it 'adds params without encoding', ->
assert.equal \
utils.addUrlParams(@url, fruits: 'pear cherry', banana: 2),
@url + '?fruits=pear cherry&banana=2'
it 'adds params after an existing one', ->
assert.equal \
utils.addUrlParams(@url + '?car=1', fruits: 'pear cherry', banana: 2),
@url + '?car=1&fruits=pear cherry&banana=2'
it 'handles invalid input', ->
assert.equal utils.addUrlParams(), undefined
describe 'getUrlParams', ->
it 'gets params from url', ->
assert.deepEqual \
utils.getUrlParams('https://localhost?pear=sweet&cherry=sour'),
{ pear: 'sweet', cherry: 'sour' }
describe 'shareUrlSocial', ->
it 'constructs share url', ->
url = 'https://localhost'
for k, v of { facebook: 'FB', google: 'G' }
assert utils.shareUrlSocial(url, v).match new RegExp "#{k}.+localhost"
it 'handles invalid input', ->
assert.equal utils.shareUrlSocial(), undefined
describe 'videoUrl', ->
it 'constructs video url', ->
assert utils.videoUrl('abc').match /youtube\.com.+abc$/
assert utils.videoUrl('xyz', type: 'vimeo').match /vimeo\.com.+xyz$/
it 'constructs iframe video url', ->
assert utils.videoUrl('abc', iframe: true).match /youtube\-nocookie/
assert \
utils.videoUrl('xyz', iframe: true, type: 'vimeo').match /player\.vimeo/
it 'handles invalid input', ->
assert.equal utils.videoUrl(), undefined
describe 'parseVideoUrl', ->
it 'parses video url', ->
for url in [
'https://www.youtube.com/watch?v=Pmmh69G-pt0'
'youtu.be/Pmmh69G-pt0'
]
assert.deepEqual utils.parseVideoUrl(url),
id: 'Pmmh69G-pt0', type: 'youtube'
assert.deepEqual \
utils.parseVideoUrl('https://vimeo.com/73604196'),
id: '73604196', type: 'vimeo'
it 'handles invalid input', ->
assert.equal utils.parseVideoUrl(), undefined
assert.equal utils.parseVideoUrl('abc'), undefined
describe 'videoIframe', ->
it 'creates iframe tag', ->
for type, vid of { youtube: 'Pmmh69G-pt0', vimeo: '73604196' }
iframe = utils.videoIframe vid, type: type
res = [ '<iframe.+></iframe>' ].concat _.map [
'width="320"'
'height="240"'
'frameborder="0"'
"src=\"#{utils.videoUrl vid, type: type, iframe: true}\""
], (s) -> utils.quoteMeta s
for re in res
assert iframe.match new RegExp re
it 'handles invalid input', ->
assert.equal utils.videoIframe(), undefined
describe 'link', ->
it 'linkifies', ->
samples = [
prot: 'http://'
link: 'localhost/test'
,
prot: 'https://'
link: 'honda.jp/accord'
,
prot: '//'
link: 'fenimore.eugene.be/triboulet'
target: '_blank'
text: 'PI:NAME:<NAME>END_PI'
]
samples.forEach (s) ->
href = s.prot + s.link
text = s.text ? s.link
target = if s.target then " target=\"#{s.target}\"" else ''
opts = _.pick s, 'target', 'text'
assert.equal \
utils.link(s.prot + s.link, _.pick s, 'target', 'text'),
"<a href=\"#{href}\"#{target}>#{text}</a>"
it 'handles invalid input', ->
assert.equal utils.link(), undefined
describe 'mailtoLink', ->
it 'mailto linkifies', ->
assert.equal utils.mailtoLink('PI:EMAIL:<EMAIL>END_PI'),
'mailto:PI:EMAIL:<EMAIL>END_PI'
assert.equal utils.mailtoLink('fenimore', ninja: 'japo', gibert: 1),
'mailto:fenimore?ninja=japo&gibert=1'
it 'handles invalid input', ->
assert.equal utils.mailtoLink(), undefined
describe 'browser', ->
browsers = [
[ 'chrome', 53,
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/53.0.2785.143 Safari/537.36' ]
[ 'safari', 9,
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/601.7.8
(KHTML, like Gecko) Version/9.1.3 Safari/601.7.8' ]
[ 'ie', 11,
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E;
.NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; rv:11.0) like
Gecko' ]
[ 'edge', 14,
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393' ]
[ 'firefox', 49,
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:49.0) Gecko/20100101
Firefox/49.0' ]
]
browsers.forEach (b) ->
[ browser, version, ua ] = b
it "#{browser} -> true", ->
assert utils.browser browser, version, ua
assert utils.browser browser, "#{version}-", ua
assert utils.browser browser, "#{version}+", ua
assert utils.browser browser, undefined, ua
it "#{browser} -> false versions", ->
assert !utils.browser browser, version - 1, ua
assert !utils.browser browser, version + 1, ua
assert !utils.browser browser, "#{version + 1}+", ua
assert !utils.browser browser, "#{version - 1}-", ua
it "#{browser} -> false browsers", ->
others = _.reject browsers, (_b) ->
if browser is 'edge'
_b[0] in [ 'chrome', 'safari', browser ]
else
_b[0] is browser
assert !utils.browser other[0], undefined, ua for other in others
describe 'platform', ->
platforms = [
[ 'linux', 'Linux i686' ]
[ 'win', 'Win32' ]
[ 'mac', 'MacIntel' ]
[ 'ios', 'iPad' ]
]
platforms.forEach (p) ->
[ platform, navp ] = p
it "#{platform} -> true", ->
assert utils.platform platform, navp
it "#{platform} -> false platforms", ->
others = _.reject platforms, (_p) ->
_p[0] is platform
assert !utils.platform other[0], navp for other in others
describe 'isMobile', ->
mobiles = [
'Mozilla/5.0 (Linux; Android 6.0.1; SM-A310F Build/MMB29K)
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.91 Mobile
Safari/537.36'
'Mozilla/5.0 (iPad; CPU OS 10_2_1 like Mac OS X) AppleWebKit/602.4.6
(KHTML, like Gecko) Version/10.0 Mobile/14D23 Safari/602.1'
]
nonmobiles = [
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like
Gecko) Chrome/53.0.2785.143 Safari/537.36'
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/601.7.8
(KHTML, like Gecko) Version/9.1.3 Safari/601.7.8'
]
it 'detects mobile', ->
assert utils.isMobile mobile for mobile in mobiles
it 'detects non-mobile', ->
assert !utils.isMobile nonmobile for nonmobile in nonmobiles
describe 'selectOptions', ->
samples = [
[
options: [ { descr: 'o1', value: 'v1' }, { descr: 'o2', value: 'v2' } ]
'<option value="v1">o1</option><option value="v2">o2</option>'
],
[
optgroups: [
label: 'group1'
options: [ { value: 'g1v1' }, { value: 'g1v2' } ]
,
label: 'group2'
options: [ { value: 'g2v1' }, { value: 'g2v2' } ]
]
'<optgroup label="group1">' +
'<option value="g1v1">g1v1</option>' +
'<option value="g1v2">g1v2</option>' +
'</optgroup>' +
'<optgroup label="group2">' +
'<option value="g2v1">g2v1</option>' +
'<option value="g2v2">g2v2</option>' +
'</optgroup>'
]
]
samples.forEach (s) ->
[ input, output ] = s
it "generates #{_.keys input} html", ->
assert.equal utils.selectOptions(input), output
|
[
{
"context": "to: \"author\"\n post = new Post(author: { name: \"Jonas\", email: \"jonas@elabs.se\" })\n expect(post.name",
"end": 274,
"score": 0.9996686577796936,
"start": 269,
"tag": "NAME",
"value": "Jonas"
},
{
"context": " post = new Post(author: { name: \"Jonas\", email:... | test/model/delegate.spec.coffee | jnicklas/serenade.js | 1 | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe "Serenade.Model.delegate", ->
it "sets up delegated attributes", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: { name: "Jonas", email: "jonas@elabs.se" })
expect(post.name).to.eql("Jonas")
expect(post.email).to.eql("jonas@elabs.se")
it "assigns value to delegated object when given", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: {})
post.name = "Jonas"
expect(post.author.name).to.eql("Jonas")
it "does nothign when delegated object does not exist", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post()
post.name = "Jonas"
expect(post.author).to.be.undefined
it "returns undefined when the attribute being delegated to is undefined", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: undefined)
expect(post.name).to.eql(undefined)
expect(post.email).to.eql(undefined)
it "notifies of changes when delegated attributes are changed", ->
author = Serenade(name: "Jonas", email: "jonas@elabs.se")
class Post extends Serenade.Model
@attribute "author"
@delegate "name", "email", to: "author"
post = new Post(author: author)
post["@name"].trigger()
post["@email"].trigger()
expect(-> author.name = "peter").to.emit(post["@name"], with: "peter")
expect(-> author.email = "peter@elabs.se").to.emit(post["@email"], with: "peter@elabs.se")
it "allows dependencies to be overwritten", ->
author = Serenade(name: "Jonas", email: "jonas@elabs.se")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", dependsOn: []
post = new Post(author: author)
expect(-> author.name = "peter").not.to.emit(post["@name"])
it "can set prefix", ->
author = Serenade(name: "Jonas", email: "jonas@elabs.se")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", prefix: true
post = new Post(author: { name: "Jonas", email: "jonas@elabs.se" })
expect(post.authorName).to.eql("Jonas")
expect(post.authorEmail).to.eql("jonas@elabs.se")
it "can set suffix", ->
author = Serenade(name: "Jonas", email: "jonas@elabs.se")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", suffix: true
post = new Post(author: { name: "Jonas", email: "jonas@elabs.se" })
expect(post.nameAuthor).to.eql("Jonas")
expect(post.emailAuthor).to.eql("jonas@elabs.se")
it "can set prefix as string", ->
author = Serenade(name: "Jonas", email: "jonas@elabs.se")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", prefix: "quox"
post = new Post(author: { name: "Jonas", email: "jonas@elabs.se" })
expect(post.quoxName).to.eql("Jonas")
expect(post.quoxEmail).to.eql("jonas@elabs.se")
it "can set suffix as string", ->
author = Serenade(name: "Jonas", email: "jonas@elabs.se")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", suffix: "Quox"
post = new Post(author: { name: "Jonas", email: "jonas@elabs.se" })
expect(post.nameQuox).to.eql("Jonas")
expect(post.emailQuox).to.eql("jonas@elabs.se")
| 29980 | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe "Serenade.Model.delegate", ->
it "sets up delegated attributes", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: { name: "<NAME>", email: "<EMAIL>" })
expect(post.name).to.eql("<NAME>")
expect(post.email).to.eql("<EMAIL>")
it "assigns value to delegated object when given", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: {})
post.name = "<NAME>"
expect(post.author.name).to.eql("<NAME>")
it "does nothign when delegated object does not exist", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post()
post.name = "<NAME>"
expect(post.author).to.be.undefined
it "returns undefined when the attribute being delegated to is undefined", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: undefined)
expect(post.name).to.eql(undefined)
expect(post.email).to.eql(undefined)
it "notifies of changes when delegated attributes are changed", ->
author = Serenade(name: "<NAME>", email: "<EMAIL>")
class Post extends Serenade.Model
@attribute "author"
@delegate "name", "email", to: "author"
post = new Post(author: author)
post["@name"].trigger()
post["@email"].trigger()
expect(-> author.name = "peter").to.emit(post["@name"], with: "peter")
expect(-> author.email = "<EMAIL>").to.emit(post["@email"], with: "<EMAIL>")
it "allows dependencies to be overwritten", ->
author = Serenade(name: "<NAME>", email: "<EMAIL>")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", dependsOn: []
post = new Post(author: author)
expect(-> author.name = "peter").not.to.emit(post["@name"])
it "can set prefix", ->
author = Serenade(name: "<NAME>", email: "<EMAIL>")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", prefix: true
post = new Post(author: { name: "<NAME>", email: "<EMAIL>" })
expect(post.authorName).to.eql("<NAME>")
expect(post.authorEmail).to.eql("<EMAIL>")
it "can set suffix", ->
author = Serenade(name: "<NAME>", email: "<EMAIL>")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", suffix: true
post = new Post(author: { name: "<NAME>", email: "<EMAIL>" })
expect(post.nameAuthor).to.eql("<NAME>")
expect(post.emailAuthor).to.eql("<EMAIL>")
it "can set prefix as string", ->
author = Serenade(name: "<NAME>", email: "<EMAIL>")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", prefix: "quox"
post = new Post(author: { name: "<NAME>", email: "<EMAIL>" })
expect(post.quoxName).to.eql("<NAME>")
expect(post.quoxEmail).to.eql("<EMAIL>")
it "can set suffix as string", ->
author = Serenade(name: "<NAME>", email: "<EMAIL>")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", suffix: "Quox"
post = new Post(author: { name: "<NAME>", email: "<EMAIL>" })
expect(post.nameQuox).to.eql("<NAME>")
expect(post.emailQuox).to.eql("<EMAIL>")
| true | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe "Serenade.Model.delegate", ->
it "sets up delegated attributes", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: { name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI" })
expect(post.name).to.eql("PI:NAME:<NAME>END_PI")
expect(post.email).to.eql("PI:EMAIL:<EMAIL>END_PI")
it "assigns value to delegated object when given", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: {})
post.name = "PI:NAME:<NAME>END_PI"
expect(post.author.name).to.eql("PI:NAME:<NAME>END_PI")
it "does nothign when delegated object does not exist", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post()
post.name = "PI:NAME:<NAME>END_PI"
expect(post.author).to.be.undefined
it "returns undefined when the attribute being delegated to is undefined", ->
class Post extends Serenade.Model
@delegate "name", "email", to: "author"
post = new Post(author: undefined)
expect(post.name).to.eql(undefined)
expect(post.email).to.eql(undefined)
it "notifies of changes when delegated attributes are changed", ->
author = Serenade(name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI")
class Post extends Serenade.Model
@attribute "author"
@delegate "name", "email", to: "author"
post = new Post(author: author)
post["@name"].trigger()
post["@email"].trigger()
expect(-> author.name = "peter").to.emit(post["@name"], with: "peter")
expect(-> author.email = "PI:EMAIL:<EMAIL>END_PI").to.emit(post["@email"], with: "PI:EMAIL:<EMAIL>END_PI")
it "allows dependencies to be overwritten", ->
author = Serenade(name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", dependsOn: []
post = new Post(author: author)
expect(-> author.name = "peter").not.to.emit(post["@name"])
it "can set prefix", ->
author = Serenade(name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", prefix: true
post = new Post(author: { name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI" })
expect(post.authorName).to.eql("PI:NAME:<NAME>END_PI")
expect(post.authorEmail).to.eql("PI:EMAIL:<EMAIL>END_PI")
it "can set suffix", ->
author = Serenade(name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", suffix: true
post = new Post(author: { name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI" })
expect(post.nameAuthor).to.eql("PI:NAME:<NAME>END_PI")
expect(post.emailAuthor).to.eql("PI:EMAIL:<EMAIL>END_PI")
it "can set prefix as string", ->
author = Serenade(name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", prefix: "quox"
post = new Post(author: { name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI" })
expect(post.quoxName).to.eql("PI:NAME:<NAME>END_PI")
expect(post.quoxEmail).to.eql("PI:EMAIL:<EMAIL>END_PI")
it "can set suffix as string", ->
author = Serenade(name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI")
class Post extends Serenade.Model
@delegate "name", "email", to: "author", suffix: "Quox"
post = new Post(author: { name: "PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI" })
expect(post.nameQuox).to.eql("PI:NAME:<NAME>END_PI")
expect(post.emailQuox).to.eql("PI:EMAIL:<EMAIL>END_PI")
|
[
{
"context": " at: \"@\",\n insertTpl: '<a href=\"/users/${name}\" target=\"_blank\">@${name}</a>',\n data: @par",
"end": 5677,
"score": 0.5140715837478638,
"start": 5673,
"tag": "NAME",
"value": "name"
},
{
"context": "nsertTpl: '<a href=\"/users/${name}\" target=\"_bl... | app/assets/javascripts/spaces/tasks.coffee | pamelladnanexus/pfda-release-repo | 57 | class SpacesTasksView extends Precision.SpacesTasks.PageModel
getTaskById: (task_id) ->
return $.get("/spaces/#{@space_id}/tasks/#{task_id}/task")
updateSelected: (e) ->
tasksActions = @selectedTasksActions()
@selectedItems.remove e.target.value
delete(tasksActions[e.target.value])
if e.target.checked
@selectedItems.push e.target.value
tasksActions[e.target.value] = $(e.target).attr('data-actions')
@selectedTasksActions(tasksActions)
@toggleActions()
toggleActions: () ->
actions = []
selectedActions = @selectedTasksActions()
for taskId of selectedActions
actions = actions.concat selectedActions[taskId].split(' ')
actions = _.uniq(actions)
$('[data-task-action]').addClass('hidden')
for action in actions
$("[data-task-action=#{'"' + action + '"'}]").removeClass('hidden')
return actions
acceptTasks: () ->
@postAction("/spaces/#{@space_id}/tasks/accept", {
task_ids: @selectedItems()
}, 'Task(s) has been marked as accepted!')
completeTasks: () ->
@postAction("/spaces/#{@space_id}/tasks/complete", {
task_ids: @selectedItems()
}, 'Task(s) has been marked as completed!')
createTask: () ->
@newTaskModal.createTask()
declineTasks: () ->
@declineTaskModal.declineTask(@selectedItems())
reopenTasks: () ->
@reopenTaskModal.reopenTasks(@selectedItems())
makeActiveTasks: () ->
@makeActiveTaskModal.makeActiveTasks(@selectedItems())
cloneTask: () ->
@cloneTaskModal.cloneTask()
editTask: () ->
@editTaskModal.editTask(@selectedItems()[0])
commentTask: () ->
@newCommentModal.commentTask(@selectedItems()[0])
reassignTask: () ->
@reassignTaskModal.reassignTask(@selectedItems()[0])
deleteTask: () ->
@deleteTaskModal.deleteTask(@selectedItems()[0])
constructor: (params) ->
super()
@selectedItems = ko.observableArray([])
@selectedTasksActions = ko.observable({})
@singleActions = ko.computed(() => !@actionsDisabled() and @selectedItems().length == 1)
@multiActions = ko.computed(() => !@actionsDisabled() and @selectedItems().length > 0)
@space_id = params.space_id
@newTaskModal = new Precision.SpacesTasks.NewTaskModal(params)
@newCommentModal = new Precision.SpacesTasks.NewCommentModal(params, 'comment_task_modal')
@declineTaskModal = new Precision.SpacesTasks.DeclineTaskModal(params)
@declineTaskModal.modal.on 'show.bs.modal', =>
@declineTaskModal.tasksCount(@selectedItems().length)
@reopenTaskModal = new Precision.SpacesTasks.ReopenTaskModal(params)
@reopenTaskModal.modal.on 'show.bs.modal', =>
@reopenTaskModal.tasksCount(@selectedItems().length)
@makeActiveTaskModal = new Precision.SpacesTasks.MakeActiveTaskModal(params)
@makeActiveTaskModal.modal.on 'show.bs.modal', =>
@makeActiveTaskModal.tasksCount(@selectedItems().length)
@cloneTaskModal = new Precision.SpacesTasks.CloneTaskModal(params)
@cloneTaskModal.modal.on 'show.bs.modal', =>
@cloneTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@cloneTaskModal.task.name(res.name)
@cloneTaskModal.task.assignee_id(res.assignee_id)
@cloneTaskModal.task.response_deadline(res.response_deadline)
@cloneTaskModal.task.completion_deadline(res.completion_deadline)
@cloneTaskModal.task.description(res.description)
@cloneTaskModal.isLoading(false)
@editTaskModal = new Precision.SpacesTasks.EditTaskModal(params)
@editTaskModal.modal.on 'show.bs.modal', =>
@editTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@editTaskModal.task.name(res.name)
@editTaskModal.task.assignee_id(res.assignee_id)
@editTaskModal.task.response_deadline(res.response_deadline)
@editTaskModal.task.completion_deadline(res.completion_deadline)
@editTaskModal.task.description(res.description)
@editTaskModal.isLoading(false)
@reassignTaskModal = new Precision.SpacesTasks.ReassignTaskModal(params)
@reassignTaskModal.modal.on 'show.bs.modal', =>
@reassignTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@reassignTaskModal.task.name(res.name)
@reassignTaskModal.task.assignee_id(res.assignee_id)
@reassignTaskModal.task.response_deadline(res.response_deadline_f)
@reassignTaskModal.task.completion_deadline(res.completion_deadline_f)
@reassignTaskModal.task.description(res.description)
@reassignTaskModal.isLoading(false)
@deleteTaskModal = new Precision.SpacesTasks.DeleteTaskModal(params)
@deleteTaskModal.modal.on 'show.bs.modal', =>
@deleteTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@deleteTaskModal.task.name(res.name)
@deleteTaskModal.task.assignee_id(res.assignee_id)
@deleteTaskModal.task.response_deadline(res.response_deadline)
@deleteTaskModal.task.completion_deadline(res.completion_deadline)
@deleteTaskModal.task.description(res.description)
@deleteTaskModal.isLoading(false)
#########################################################
#
#
# PALOMA CONTROLLER
#
#
#########################################################
SpacesController = Paloma.controller('Spaces', {
tasks: ->
$container = $("#ko_spaces_tasks_container")
viewModel = new SpacesTasksView(@params)
ko.applyBindings(viewModel, $container[0])
editable = $(viewModel.newCommentModal.modal).find('.add-atwho')
editable.atwho({
at: "@",
insertTpl: '<a href="/users/${name}" target="_blank">@${name}</a>',
data: @params.users.map (user) -> user.label
})
editable.on 'input', viewModel.newCommentModal.changeCommentText
editable.on 'inserted.atwho', viewModel.newCommentModal.changeCommentText
$('.select-all-tasks').on 'change', (e) ->
checked = e.target.checked
$('.select-task').each((index, item) ->
item.checked = checked
$(item).trigger('change')
return true
)
$('.select-task').on 'change', (e) ->
viewModel.updateSelected(e)
$('#spaces_tasks_accept').on 'click', (e) ->
viewModel.acceptTasks()
$('#spaces_tasks_complete').on 'click', (e) ->
viewModel.completeTasks()
$('#create_task_modal_submit').on 'click', (e) ->
viewModel.createTask()
$('#decline_task_modal_submit').on 'click', (e) ->
viewModel.declineTasks()
$('#reopen_task_modal_submit').on 'click', (e) ->
viewModel.reopenTasks()
$('#make_active_task_modal_submit').on 'click', (e) ->
viewModel.makeActiveTasks()
$('#clone_task_modal_submit').on 'click', (e) ->
viewModel.cloneTask()
$('#edit_task_modal_submit').on 'click', (e) ->
viewModel.editTask()
$('#reassign_task_modal_submit').on 'click', (e) ->
viewModel.reassignTask()
$('#delete_task_modal_submit').on 'click', (e) ->
viewModel.deleteTask()
$('#comment_task_modal_submit').on 'click', (e) ->
viewModel.commentTask()
$('.modal').on 'hide.bs.modal', () ->
viewModel.newTaskModal.clear()
viewModel.newCommentModal.clear()
viewModel.reopenTaskModal.clear()
viewModel.makeActiveTaskModal.clear()
viewModel.declineTaskModal.clear()
viewModel.cloneTaskModal.clear()
viewModel.editTaskModal.clear()
viewModel.reassignTaskModal.clear()
viewModel.deleteTaskModal.clear()
})
| 72987 | class SpacesTasksView extends Precision.SpacesTasks.PageModel
getTaskById: (task_id) ->
return $.get("/spaces/#{@space_id}/tasks/#{task_id}/task")
updateSelected: (e) ->
tasksActions = @selectedTasksActions()
@selectedItems.remove e.target.value
delete(tasksActions[e.target.value])
if e.target.checked
@selectedItems.push e.target.value
tasksActions[e.target.value] = $(e.target).attr('data-actions')
@selectedTasksActions(tasksActions)
@toggleActions()
toggleActions: () ->
actions = []
selectedActions = @selectedTasksActions()
for taskId of selectedActions
actions = actions.concat selectedActions[taskId].split(' ')
actions = _.uniq(actions)
$('[data-task-action]').addClass('hidden')
for action in actions
$("[data-task-action=#{'"' + action + '"'}]").removeClass('hidden')
return actions
acceptTasks: () ->
@postAction("/spaces/#{@space_id}/tasks/accept", {
task_ids: @selectedItems()
}, 'Task(s) has been marked as accepted!')
completeTasks: () ->
@postAction("/spaces/#{@space_id}/tasks/complete", {
task_ids: @selectedItems()
}, 'Task(s) has been marked as completed!')
createTask: () ->
@newTaskModal.createTask()
declineTasks: () ->
@declineTaskModal.declineTask(@selectedItems())
reopenTasks: () ->
@reopenTaskModal.reopenTasks(@selectedItems())
makeActiveTasks: () ->
@makeActiveTaskModal.makeActiveTasks(@selectedItems())
cloneTask: () ->
@cloneTaskModal.cloneTask()
editTask: () ->
@editTaskModal.editTask(@selectedItems()[0])
commentTask: () ->
@newCommentModal.commentTask(@selectedItems()[0])
reassignTask: () ->
@reassignTaskModal.reassignTask(@selectedItems()[0])
deleteTask: () ->
@deleteTaskModal.deleteTask(@selectedItems()[0])
constructor: (params) ->
super()
@selectedItems = ko.observableArray([])
@selectedTasksActions = ko.observable({})
@singleActions = ko.computed(() => !@actionsDisabled() and @selectedItems().length == 1)
@multiActions = ko.computed(() => !@actionsDisabled() and @selectedItems().length > 0)
@space_id = params.space_id
@newTaskModal = new Precision.SpacesTasks.NewTaskModal(params)
@newCommentModal = new Precision.SpacesTasks.NewCommentModal(params, 'comment_task_modal')
@declineTaskModal = new Precision.SpacesTasks.DeclineTaskModal(params)
@declineTaskModal.modal.on 'show.bs.modal', =>
@declineTaskModal.tasksCount(@selectedItems().length)
@reopenTaskModal = new Precision.SpacesTasks.ReopenTaskModal(params)
@reopenTaskModal.modal.on 'show.bs.modal', =>
@reopenTaskModal.tasksCount(@selectedItems().length)
@makeActiveTaskModal = new Precision.SpacesTasks.MakeActiveTaskModal(params)
@makeActiveTaskModal.modal.on 'show.bs.modal', =>
@makeActiveTaskModal.tasksCount(@selectedItems().length)
@cloneTaskModal = new Precision.SpacesTasks.CloneTaskModal(params)
@cloneTaskModal.modal.on 'show.bs.modal', =>
@cloneTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@cloneTaskModal.task.name(res.name)
@cloneTaskModal.task.assignee_id(res.assignee_id)
@cloneTaskModal.task.response_deadline(res.response_deadline)
@cloneTaskModal.task.completion_deadline(res.completion_deadline)
@cloneTaskModal.task.description(res.description)
@cloneTaskModal.isLoading(false)
@editTaskModal = new Precision.SpacesTasks.EditTaskModal(params)
@editTaskModal.modal.on 'show.bs.modal', =>
@editTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@editTaskModal.task.name(res.name)
@editTaskModal.task.assignee_id(res.assignee_id)
@editTaskModal.task.response_deadline(res.response_deadline)
@editTaskModal.task.completion_deadline(res.completion_deadline)
@editTaskModal.task.description(res.description)
@editTaskModal.isLoading(false)
@reassignTaskModal = new Precision.SpacesTasks.ReassignTaskModal(params)
@reassignTaskModal.modal.on 'show.bs.modal', =>
@reassignTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@reassignTaskModal.task.name(res.name)
@reassignTaskModal.task.assignee_id(res.assignee_id)
@reassignTaskModal.task.response_deadline(res.response_deadline_f)
@reassignTaskModal.task.completion_deadline(res.completion_deadline_f)
@reassignTaskModal.task.description(res.description)
@reassignTaskModal.isLoading(false)
@deleteTaskModal = new Precision.SpacesTasks.DeleteTaskModal(params)
@deleteTaskModal.modal.on 'show.bs.modal', =>
@deleteTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@deleteTaskModal.task.name(res.name)
@deleteTaskModal.task.assignee_id(res.assignee_id)
@deleteTaskModal.task.response_deadline(res.response_deadline)
@deleteTaskModal.task.completion_deadline(res.completion_deadline)
@deleteTaskModal.task.description(res.description)
@deleteTaskModal.isLoading(false)
#########################################################
#
#
# PALOMA CONTROLLER
#
#
#########################################################
SpacesController = Paloma.controller('Spaces', {
tasks: ->
$container = $("#ko_spaces_tasks_container")
viewModel = new SpacesTasksView(@params)
ko.applyBindings(viewModel, $container[0])
editable = $(viewModel.newCommentModal.modal).find('.add-atwho')
editable.atwho({
at: "@",
insertTpl: '<a href="/users/${<NAME>}" target="_blank">@${name}</a>',
data: @params.users.map (user) -> user.label
})
editable.on 'input', viewModel.newCommentModal.changeCommentText
editable.on 'inserted.atwho', viewModel.newCommentModal.changeCommentText
$('.select-all-tasks').on 'change', (e) ->
checked = e.target.checked
$('.select-task').each((index, item) ->
item.checked = checked
$(item).trigger('change')
return true
)
$('.select-task').on 'change', (e) ->
viewModel.updateSelected(e)
$('#spaces_tasks_accept').on 'click', (e) ->
viewModel.acceptTasks()
$('#spaces_tasks_complete').on 'click', (e) ->
viewModel.completeTasks()
$('#create_task_modal_submit').on 'click', (e) ->
viewModel.createTask()
$('#decline_task_modal_submit').on 'click', (e) ->
viewModel.declineTasks()
$('#reopen_task_modal_submit').on 'click', (e) ->
viewModel.reopenTasks()
$('#make_active_task_modal_submit').on 'click', (e) ->
viewModel.makeActiveTasks()
$('#clone_task_modal_submit').on 'click', (e) ->
viewModel.cloneTask()
$('#edit_task_modal_submit').on 'click', (e) ->
viewModel.editTask()
$('#reassign_task_modal_submit').on 'click', (e) ->
viewModel.reassignTask()
$('#delete_task_modal_submit').on 'click', (e) ->
viewModel.deleteTask()
$('#comment_task_modal_submit').on 'click', (e) ->
viewModel.commentTask()
$('.modal').on 'hide.bs.modal', () ->
viewModel.newTaskModal.clear()
viewModel.newCommentModal.clear()
viewModel.reopenTaskModal.clear()
viewModel.makeActiveTaskModal.clear()
viewModel.declineTaskModal.clear()
viewModel.cloneTaskModal.clear()
viewModel.editTaskModal.clear()
viewModel.reassignTaskModal.clear()
viewModel.deleteTaskModal.clear()
})
| true | class SpacesTasksView extends Precision.SpacesTasks.PageModel
getTaskById: (task_id) ->
return $.get("/spaces/#{@space_id}/tasks/#{task_id}/task")
updateSelected: (e) ->
tasksActions = @selectedTasksActions()
@selectedItems.remove e.target.value
delete(tasksActions[e.target.value])
if e.target.checked
@selectedItems.push e.target.value
tasksActions[e.target.value] = $(e.target).attr('data-actions')
@selectedTasksActions(tasksActions)
@toggleActions()
toggleActions: () ->
actions = []
selectedActions = @selectedTasksActions()
for taskId of selectedActions
actions = actions.concat selectedActions[taskId].split(' ')
actions = _.uniq(actions)
$('[data-task-action]').addClass('hidden')
for action in actions
$("[data-task-action=#{'"' + action + '"'}]").removeClass('hidden')
return actions
acceptTasks: () ->
@postAction("/spaces/#{@space_id}/tasks/accept", {
task_ids: @selectedItems()
}, 'Task(s) has been marked as accepted!')
completeTasks: () ->
@postAction("/spaces/#{@space_id}/tasks/complete", {
task_ids: @selectedItems()
}, 'Task(s) has been marked as completed!')
createTask: () ->
@newTaskModal.createTask()
declineTasks: () ->
@declineTaskModal.declineTask(@selectedItems())
reopenTasks: () ->
@reopenTaskModal.reopenTasks(@selectedItems())
makeActiveTasks: () ->
@makeActiveTaskModal.makeActiveTasks(@selectedItems())
cloneTask: () ->
@cloneTaskModal.cloneTask()
editTask: () ->
@editTaskModal.editTask(@selectedItems()[0])
commentTask: () ->
@newCommentModal.commentTask(@selectedItems()[0])
reassignTask: () ->
@reassignTaskModal.reassignTask(@selectedItems()[0])
deleteTask: () ->
@deleteTaskModal.deleteTask(@selectedItems()[0])
constructor: (params) ->
super()
@selectedItems = ko.observableArray([])
@selectedTasksActions = ko.observable({})
@singleActions = ko.computed(() => !@actionsDisabled() and @selectedItems().length == 1)
@multiActions = ko.computed(() => !@actionsDisabled() and @selectedItems().length > 0)
@space_id = params.space_id
@newTaskModal = new Precision.SpacesTasks.NewTaskModal(params)
@newCommentModal = new Precision.SpacesTasks.NewCommentModal(params, 'comment_task_modal')
@declineTaskModal = new Precision.SpacesTasks.DeclineTaskModal(params)
@declineTaskModal.modal.on 'show.bs.modal', =>
@declineTaskModal.tasksCount(@selectedItems().length)
@reopenTaskModal = new Precision.SpacesTasks.ReopenTaskModal(params)
@reopenTaskModal.modal.on 'show.bs.modal', =>
@reopenTaskModal.tasksCount(@selectedItems().length)
@makeActiveTaskModal = new Precision.SpacesTasks.MakeActiveTaskModal(params)
@makeActiveTaskModal.modal.on 'show.bs.modal', =>
@makeActiveTaskModal.tasksCount(@selectedItems().length)
@cloneTaskModal = new Precision.SpacesTasks.CloneTaskModal(params)
@cloneTaskModal.modal.on 'show.bs.modal', =>
@cloneTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@cloneTaskModal.task.name(res.name)
@cloneTaskModal.task.assignee_id(res.assignee_id)
@cloneTaskModal.task.response_deadline(res.response_deadline)
@cloneTaskModal.task.completion_deadline(res.completion_deadline)
@cloneTaskModal.task.description(res.description)
@cloneTaskModal.isLoading(false)
@editTaskModal = new Precision.SpacesTasks.EditTaskModal(params)
@editTaskModal.modal.on 'show.bs.modal', =>
@editTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@editTaskModal.task.name(res.name)
@editTaskModal.task.assignee_id(res.assignee_id)
@editTaskModal.task.response_deadline(res.response_deadline)
@editTaskModal.task.completion_deadline(res.completion_deadline)
@editTaskModal.task.description(res.description)
@editTaskModal.isLoading(false)
@reassignTaskModal = new Precision.SpacesTasks.ReassignTaskModal(params)
@reassignTaskModal.modal.on 'show.bs.modal', =>
@reassignTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@reassignTaskModal.task.name(res.name)
@reassignTaskModal.task.assignee_id(res.assignee_id)
@reassignTaskModal.task.response_deadline(res.response_deadline_f)
@reassignTaskModal.task.completion_deadline(res.completion_deadline_f)
@reassignTaskModal.task.description(res.description)
@reassignTaskModal.isLoading(false)
@deleteTaskModal = new Precision.SpacesTasks.DeleteTaskModal(params)
@deleteTaskModal.modal.on 'show.bs.modal', =>
@deleteTaskModal.isLoading(true)
@getTaskById(@selectedItems()[0]).then (res) =>
@deleteTaskModal.task.name(res.name)
@deleteTaskModal.task.assignee_id(res.assignee_id)
@deleteTaskModal.task.response_deadline(res.response_deadline)
@deleteTaskModal.task.completion_deadline(res.completion_deadline)
@deleteTaskModal.task.description(res.description)
@deleteTaskModal.isLoading(false)
#########################################################
#
#
# PALOMA CONTROLLER
#
#
#########################################################
SpacesController = Paloma.controller('Spaces', {
tasks: ->
$container = $("#ko_spaces_tasks_container")
viewModel = new SpacesTasksView(@params)
ko.applyBindings(viewModel, $container[0])
editable = $(viewModel.newCommentModal.modal).find('.add-atwho')
editable.atwho({
at: "@",
insertTpl: '<a href="/users/${PI:NAME:<NAME>END_PI}" target="_blank">@${name}</a>',
data: @params.users.map (user) -> user.label
})
editable.on 'input', viewModel.newCommentModal.changeCommentText
editable.on 'inserted.atwho', viewModel.newCommentModal.changeCommentText
$('.select-all-tasks').on 'change', (e) ->
checked = e.target.checked
$('.select-task').each((index, item) ->
item.checked = checked
$(item).trigger('change')
return true
)
$('.select-task').on 'change', (e) ->
viewModel.updateSelected(e)
$('#spaces_tasks_accept').on 'click', (e) ->
viewModel.acceptTasks()
$('#spaces_tasks_complete').on 'click', (e) ->
viewModel.completeTasks()
$('#create_task_modal_submit').on 'click', (e) ->
viewModel.createTask()
$('#decline_task_modal_submit').on 'click', (e) ->
viewModel.declineTasks()
$('#reopen_task_modal_submit').on 'click', (e) ->
viewModel.reopenTasks()
$('#make_active_task_modal_submit').on 'click', (e) ->
viewModel.makeActiveTasks()
$('#clone_task_modal_submit').on 'click', (e) ->
viewModel.cloneTask()
$('#edit_task_modal_submit').on 'click', (e) ->
viewModel.editTask()
$('#reassign_task_modal_submit').on 'click', (e) ->
viewModel.reassignTask()
$('#delete_task_modal_submit').on 'click', (e) ->
viewModel.deleteTask()
$('#comment_task_modal_submit').on 'click', (e) ->
viewModel.commentTask()
$('.modal').on 'hide.bs.modal', () ->
viewModel.newTaskModal.clear()
viewModel.newCommentModal.clear()
viewModel.reopenTaskModal.clear()
viewModel.makeActiveTaskModal.clear()
viewModel.declineTaskModal.clear()
viewModel.cloneTaskModal.clear()
viewModel.editTaskModal.clear()
viewModel.reassignTaskModal.clear()
viewModel.deleteTaskModal.clear()
})
|
[
{
"context": "\nevents = new EventEmitter()\n\nserverUrl = 'http://82.240.88.61:3030'\n\n# if !fs.existsSync __dirname+'/bd.json'\n#",
"end": 340,
"score": 0.9985998272895813,
"start": 328,
"tag": "IP_ADDRESS",
"value": "82.240.88.61"
}
] | src/app/app.coffee | CrepiereAuto/Soft | 0 | 'use strict'
jQuery = $ = require 'jquery'
EventEmitter = require 'events'
io = require 'socket.io-client'
jsonfile = require 'jsonfile'
Handlebars = require 'handlebars'
fs = require 'fs'
request = require 'request'
gpio = require 'gpio'
IOT = require('socket.io-iot').default
events = new EventEmitter()
serverUrl = 'http://82.240.88.61:3030'
# if !fs.existsSync __dirname+'/bd.json'
# fs.writeFileSync __dirname+'/bd.json', "{}"
# bd = jsonfile.readFileSync __dirname+'/bd.json'
iot = new IOT serverUrl, 'server'
room = null
iot.on 'connect', (msg) ->
console.log 'connected'
room = msg.room
iot.on 'id', (id) ->
# jsonfile.writeFileSync __dirname+'/bd.json', {id: id}
iot.on 'get', ->
iot.send 'command', {todo: command.todo, done: command.done, progress: command.progress(), room: room}
iot.on 'command', (d) ->
command.set d.todo
# commands = [{todo: 2, done:1}, {todo: 5, done: 0}]
#
# setCommand = (n) ->
# if commands[0]
# commands[0].todo = n
# else
# commands.push {todo: n, done: 0}
# renderCommand()
#
# renderCommand = ->
# view.contents.work = {
# timer: '15:12',
# percent: Math.round(commands[0].done*100/commands[0].todo),
# done: commands[0].done,
# todo: commands[0].todo
# }
# if view.view == 'work'
# view.update()
#
# addDone = ->
# commands[0].done = commands[0].done + 1
# renderCommand()
# if commands[0].done == commands[0].todo
# commands.splice(0,1)
# if commands[0]
# command.set(commands[0].todo)
# renderCommand()
# else
# command.set(0)
# console.log commands
viewStart = {
started: 0,
contents: {},
add: (name, params) ->
if @started
view.contents[name] = params
else
@contents[name] = params
start: ->
@started = 1
view.contents = @contents
view.set 'menu'
}
fs.existsSync = (filePath) ->
try
fs.statSync filePath
catch error
if error.code == 'ENOENT'
return false
return true
update = (changes) ->
for i of changes
change = changes[i]
console.log i+' = '+change
ctr_cmd[i] = change
ctr_cmd.selector -3
events.emit 'update'
| 53910 | 'use strict'
jQuery = $ = require 'jquery'
EventEmitter = require 'events'
io = require 'socket.io-client'
jsonfile = require 'jsonfile'
Handlebars = require 'handlebars'
fs = require 'fs'
request = require 'request'
gpio = require 'gpio'
IOT = require('socket.io-iot').default
events = new EventEmitter()
serverUrl = 'http://192.168.3.11:3030'
# if !fs.existsSync __dirname+'/bd.json'
# fs.writeFileSync __dirname+'/bd.json', "{}"
# bd = jsonfile.readFileSync __dirname+'/bd.json'
iot = new IOT serverUrl, 'server'
room = null
iot.on 'connect', (msg) ->
console.log 'connected'
room = msg.room
iot.on 'id', (id) ->
# jsonfile.writeFileSync __dirname+'/bd.json', {id: id}
iot.on 'get', ->
iot.send 'command', {todo: command.todo, done: command.done, progress: command.progress(), room: room}
iot.on 'command', (d) ->
command.set d.todo
# commands = [{todo: 2, done:1}, {todo: 5, done: 0}]
#
# setCommand = (n) ->
# if commands[0]
# commands[0].todo = n
# else
# commands.push {todo: n, done: 0}
# renderCommand()
#
# renderCommand = ->
# view.contents.work = {
# timer: '15:12',
# percent: Math.round(commands[0].done*100/commands[0].todo),
# done: commands[0].done,
# todo: commands[0].todo
# }
# if view.view == 'work'
# view.update()
#
# addDone = ->
# commands[0].done = commands[0].done + 1
# renderCommand()
# if commands[0].done == commands[0].todo
# commands.splice(0,1)
# if commands[0]
# command.set(commands[0].todo)
# renderCommand()
# else
# command.set(0)
# console.log commands
viewStart = {
started: 0,
contents: {},
add: (name, params) ->
if @started
view.contents[name] = params
else
@contents[name] = params
start: ->
@started = 1
view.contents = @contents
view.set 'menu'
}
fs.existsSync = (filePath) ->
try
fs.statSync filePath
catch error
if error.code == 'ENOENT'
return false
return true
update = (changes) ->
for i of changes
change = changes[i]
console.log i+' = '+change
ctr_cmd[i] = change
ctr_cmd.selector -3
events.emit 'update'
| true | 'use strict'
jQuery = $ = require 'jquery'
EventEmitter = require 'events'
io = require 'socket.io-client'
jsonfile = require 'jsonfile'
Handlebars = require 'handlebars'
fs = require 'fs'
request = require 'request'
gpio = require 'gpio'
IOT = require('socket.io-iot').default
events = new EventEmitter()
serverUrl = 'http://PI:IP_ADDRESS:192.168.3.11END_PI:3030'
# if !fs.existsSync __dirname+'/bd.json'
# fs.writeFileSync __dirname+'/bd.json', "{}"
# bd = jsonfile.readFileSync __dirname+'/bd.json'
iot = new IOT serverUrl, 'server'
room = null
iot.on 'connect', (msg) ->
console.log 'connected'
room = msg.room
iot.on 'id', (id) ->
# jsonfile.writeFileSync __dirname+'/bd.json', {id: id}
iot.on 'get', ->
iot.send 'command', {todo: command.todo, done: command.done, progress: command.progress(), room: room}
iot.on 'command', (d) ->
command.set d.todo
# commands = [{todo: 2, done:1}, {todo: 5, done: 0}]
#
# setCommand = (n) ->
# if commands[0]
# commands[0].todo = n
# else
# commands.push {todo: n, done: 0}
# renderCommand()
#
# renderCommand = ->
# view.contents.work = {
# timer: '15:12',
# percent: Math.round(commands[0].done*100/commands[0].todo),
# done: commands[0].done,
# todo: commands[0].todo
# }
# if view.view == 'work'
# view.update()
#
# addDone = ->
# commands[0].done = commands[0].done + 1
# renderCommand()
# if commands[0].done == commands[0].todo
# commands.splice(0,1)
# if commands[0]
# command.set(commands[0].todo)
# renderCommand()
# else
# command.set(0)
# console.log commands
viewStart = {
started: 0,
contents: {},
add: (name, params) ->
if @started
view.contents[name] = params
else
@contents[name] = params
start: ->
@started = 1
view.contents = @contents
view.set 'menu'
}
fs.existsSync = (filePath) ->
try
fs.statSync filePath
catch error
if error.code == 'ENOENT'
return false
return true
update = (changes) ->
for i of changes
change = changes[i]
console.log i+' = '+change
ctr_cmd[i] = change
ctr_cmd.selector -3
events.emit 'update'
|
[
{
"context": ": 1, hostname: 'host1', ip: '1.2.3.4', username: 'Glowacki', password: '12345' },\n { id: 2, hostname: '",
"end": 357,
"score": 0.9996243715286255,
"start": 349,
"tag": "USERNAME",
"value": "Glowacki"
},
{
"context": ", ip: '1.2.3.4', username: 'Glowacki', password... | app/assets/javascript/models/Host.coffee | ehudkaldor/GateKeeper | 0 | define ["App", "ember", "ember-data"], (App, Ember, DS) ->
App.Host = DS.Model.extend
hostname: DS.attr('string'),
ip: DS.attr('string'),
username: DS.attr('string'),
password: DS.attr('string')
console.log("Host model")
App.Host.reopenClass
FIXTURES: [
{ id: 1, hostname: 'host1', ip: '1.2.3.4', username: 'Glowacki', password: '12345' },
{ id: 2, hostname: 'host2', ip: '1.2.3.5' , username: 'Dale', password: '12345'}
]
| 109307 | define ["App", "ember", "ember-data"], (App, Ember, DS) ->
App.Host = DS.Model.extend
hostname: DS.attr('string'),
ip: DS.attr('string'),
username: DS.attr('string'),
password: DS.attr('string')
console.log("Host model")
App.Host.reopenClass
FIXTURES: [
{ id: 1, hostname: 'host1', ip: '1.2.3.4', username: 'Glowacki', password: '<PASSWORD>' },
{ id: 2, hostname: 'host2', ip: '1.2.3.5' , username: 'Dale', password: '<PASSWORD>'}
]
| true | define ["App", "ember", "ember-data"], (App, Ember, DS) ->
App.Host = DS.Model.extend
hostname: DS.attr('string'),
ip: DS.attr('string'),
username: DS.attr('string'),
password: DS.attr('string')
console.log("Host model")
App.Host.reopenClass
FIXTURES: [
{ id: 1, hostname: 'host1', ip: '1.2.3.4', username: 'Glowacki', password: 'PI:PASSWORD:<PASSWORD>END_PI' },
{ id: 2, hostname: 'host2', ip: '1.2.3.5' , username: 'Dale', password: 'PI:PASSWORD:<PASSWORD>END_PI'}
]
|
[
{
"context": "map).toBeDefined()\n map2 = new Map({'20': 'ok', 'alice': 'wonderland'})\n it \"should add passed object's",
"end": 188,
"score": 0.9414763450622559,
"start": 183,
"tag": "NAME",
"value": "alice"
},
{
"context": "efined()\n map2 = new Map({'20': 'ok', 'alice': 'wonde... | tests/Map.spec.coffee | chenglou/data-structures | 106 | Map = require('../source').Map
describe "Create hash map", ->
map = new Map()
it "should give an empty hash map", ->
expect(map).toBeDefined()
map2 = new Map({'20': 'ok', 'alice': 'wonderland'})
it "should add passed object's (key, value) pairs to the map", ->
# console.log map2
callback = jasmine.createSpy()
map2.forEach callback
expect(callback).toHaveBeenCalledWith '20', 'ok'
expect(callback).toHaveBeenCalledWith 'alice', 'wonderland'
expect(callback).not.toHaveBeenCalledWith undefined, undefined
# Usually, the hash function should be hidden. But we allow the possibility of
# user-defined hash function.
describe "Hash function", ->
map = new Map()
it "should generate a unique hash for simple hashable types", ->
expect(map.hash 5, yes).toBe 'Number_5'
expect(map.hash "5", yes).toBe 'String_5'
expect(map.hash undefined, yes).toBe 'Undefined_undefined'
expect(map.hash null, yes).toBe 'Null_null'
expect(map.hash true, yes).toBe 'Boolean_true'
expect(map.hash /asd/, yes).toBe 'RegExp_/asd/'
expect(map.hash (-> "hello"), yes).toMatch /Function_.+/
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
date = new Date()
it "should generate a unique hash for objects, arrays, and dates", ->
expect(map.hash arr, yes).toMatch /_mapId_\d+_\d+/
expect(map.hash obj, yes).toMatch /_mapId_\d+_\d+/
expect(map.hash date, yes).toMatch /_mapId_\d+_\d+/
it "should have used obscure hacks by putting an id in arr and obj", ->
expect(arr._mapId_2).toEqual any Number
expect(obj._mapId_2).toEqual any Number
expect(date._mapId_2).toEqual any Number
describe "Set and get/has", ->
map = new Map()
it "should have an empty size initially", ->
expect(map.size).toBe 0
it "should have nothing when map's empty", ->
expect(map.get 5).toBeUndefined()
expect(map.get undefined).toBeUndefined()
expect(map.get null).toBeUndefined()
expect(map.get 0).toBeUndefined()
expect(map.get []).toBeUndefined()
expect(map.get -> "hello").toBeUndefined()
expect(map.get "5").toBeUndefined()
expect(map.has 5).toBeFalsy()
expect(map.has undefined).toBeFalsy()
expect(map.has null).toBeFalsy()
expect(map.has 0).toBeFalsy()
expect(map.has []).toBeFalsy()
expect(map.has -> "hello").toBeFalsy()
expect(map.has "5").toBeFalsy()
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
it "should put the key and its value into the map", ->
expect(map.set 5, "number 5").toBe "number 5"
expect(map.set "5", "string 5").toBe "string 5"
expect(map.set undefined, [1, 2, 3]).toEqual [1, 2, 3]
expect(map.set null, {a: 10}).toEqual {a: 10}
expect(map.set true, "ok").toBe "ok"
expect(map.set /asd/, false).toBe false
expect(map.set (-> "hello"), 99).toBe 99
expect(map.set arr, "array").toBe "array"
expect(map.set obj, "obj").toBe "obj"
expect(map.get 5).toBe "number 5"
expect(map.get "5").toBe "string 5"
expect(map.get undefined).toEqual [1, 2, 3]
expect(map.get null).toEqual {a: 10}
expect(map.get true).toBe "ok"
expect(map.get /asd/).toBe false
expect(map.get -> "hello").toBe 99
expect(map.get arr, "time").toBe "array"
expect(map.get obj, "time").toBe "obj"
expect(map.has 5).toBeTruthy()
expect(map.has "5").toBeTruthy()
expect(map.has undefined).toBeTruthy()
expect(map.has null).toBeTruthy()
expect(map.has true).toBeTruthy()
expect(map.has /asd/).toBeTruthy()
expect(map.has -> "hello").toBeTruthy()
expect(map.has arr, "time").toBeTruthy()
expect(map.has obj, "time").toBeTruthy()
it "should keep track of map size", ->
expect(map.size).toBe 9
it "should override previous value", ->
map.set 5, "number 6"
map.set "5", "string 6"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
expect(map.get 5).toBe "number 6"
expect(map.get "5").toBe "string 6"
expect(map.get undefined).toEqual [3, 2]
expect(map.get null).toEqual {b: 12}
expect(map.get true).toBe "okay"
expect(map.get /asd/).toBe true
expect(map.get -> "hello").toBe 10
it "shouldn't have changed the map's size", ->
expect(map.size).toBe 9
it "should return undefined if the key's not found", ->
expect(map.get 6).toBeUndefined()
expect(map.get -> "bye").toBeUndefined()
# TODO: might not be the desired behavior. Maybe it should be found.
expect(map.get [1, 2, {a: "hello"}, [3, 4, 5]]).toBeUndefined()
expect(map.get {a: "hi", b: {}, c: [1, 2], d: arr}).toBeUndefined()
expect(map.has [1, 2, {a: "hello"}, [3, 4, 5]]).toBeFalsy()
expect(map.has {a: "hi", b: {}, c: [1, 2], d: arr}).toBeFalsy()
date1 = new Date()
date2 = new Date()
it "should store two Date objects distinctively", ->
map.set date1, "date 1"
map.set date2, "date 2"
expect(map.get date1).toBe "date 1"
expect(map.get date2).toBe "date 2"
expect(map.get new Date()).toBeUndefined()
expect(map.has new Date()).toBeFalsy()
map2 = new Map()
it "should store a id on the key that's unique to each map", ->
expect(map2.get date1).toBeUndefined()
map2.set(date1, "date 1 on map 2")
expect(map.get date1).toBe "date 1"
expect(map2.get date1).toBe "date 1 on map 2"
describe "Delete", ->
map = new Map()
it "should return false for deleting nonexistent keys", ->
expect(map.delete 5).toBeFalsy()
expect(map.delete undefined).toBeFalsy()
expect(map.delete null).toBeFalsy()
expect(map.delete 0).toBeFalsy()
expect(map.delete []).toBeFalsy()
expect(map.delete -> "hello").toBeFalsy()
expect(map.delete "5").toBeFalsy()
it "shouldn't decrease the size count after fail deletes", ->
expect(map.size).toBe 0
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
date1 = new Date()
date2 = new Date()
it "should return true after deleting a valid key", ->
map.set 5, "number 6"
map.set "5", "string 6"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
expect(map.delete 5).toBeTruthy()
expect(map.delete "5").toBeTruthy()
expect(map.delete undefined).toBeTruthy()
expect(map.delete null).toBeTruthy()
expect(map.delete true).toBeTruthy()
expect(map.delete /asd/).toBeTruthy()
expect(map.delete -> "hello").toBeTruthy()
it "should have updated the size", ->
expect(map.size).toBe 0
it "shouldn't find anything after emptying a map", ->
expect(map.delete 5).toBeFalsy()
expect(map.delete "5").toBeFalsy()
expect(map.delete undefined).toBeFalsy()
expect(map.delete null).toBeFalsy()
expect(map.delete true).toBeFalsy()
expect(map.delete /asd/).toBeFalsy()
expect(map.delete -> "hello").toBeFalsy()
it "should delete the hacky property from special data types", ->
map.set arr, "array"
map.set obj, "object"
map.set date1, "date 1"
map.set date2, "date 2"
map.delete arr
map.delete obj
map.delete date1
map.delete date2
expect(arr._mapId_3).toBeUndefined()
expect(obj._mapId_3).toBeUndefined()
expect(date1._mapId_3).toBeUndefined()
expect(date2._mapId_3).toBeUndefined()
it "should keep the count correct after removing special keys", ->
expect(map.size).toBe 0
describe "Iterate through items", ->
map = new Map()
it "shouldn't call the callback when there's nothing to iterate through", ->
callback = jasmine.createSpy()
map.forEach callback
expect(callback).not.toHaveBeenCalled()
it "should have called the callback correctly", ->
map.set 5, "number 5"
map.set "5", "string 5"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
callback = jasmine.createSpy()
map.forEach callback
expect(callback).toHaveBeenCalledWith 5, 'number 5'
expect(callback).toHaveBeenCalledWith '5', 'string 5'
expect(callback).toHaveBeenCalledWith undefined, [ 3, 2 ]
expect(callback).toHaveBeenCalledWith null, { b : 12 }
expect(callback).toHaveBeenCalledWith true, 'okay'
expect(callback).toHaveBeenCalledWith /asd/, true
expect(callback).toHaveBeenCalledWith any(Function), 10
| 147197 | Map = require('../source').Map
describe "Create hash map", ->
map = new Map()
it "should give an empty hash map", ->
expect(map).toBeDefined()
map2 = new Map({'20': 'ok', '<NAME>': 'w<NAME>'})
it "should add passed object's (key, value) pairs to the map", ->
# console.log map2
callback = jasmine.createSpy()
map2.forEach callback
expect(callback).toHaveBeenCalledWith '20', 'ok'
expect(callback).toHaveBeenCalledWith '<NAME>', 'wonderland'
expect(callback).not.toHaveBeenCalledWith undefined, undefined
# Usually, the hash function should be hidden. But we allow the possibility of
# user-defined hash function.
describe "Hash function", ->
map = new Map()
it "should generate a unique hash for simple hashable types", ->
expect(map.hash 5, yes).toBe 'Number_5'
expect(map.hash "5", yes).toBe 'String_5'
expect(map.hash undefined, yes).toBe 'Undefined_undefined'
expect(map.hash null, yes).toBe 'Null_null'
expect(map.hash true, yes).toBe 'Boolean_true'
expect(map.hash /asd/, yes).toBe 'RegExp_/asd/'
expect(map.hash (-> "hello"), yes).toMatch /Function_.+/
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
date = new Date()
it "should generate a unique hash for objects, arrays, and dates", ->
expect(map.hash arr, yes).toMatch /_mapId_\d+_\d+/
expect(map.hash obj, yes).toMatch /_mapId_\d+_\d+/
expect(map.hash date, yes).toMatch /_mapId_\d+_\d+/
it "should have used obscure hacks by putting an id in arr and obj", ->
expect(arr._mapId_2).toEqual any Number
expect(obj._mapId_2).toEqual any Number
expect(date._mapId_2).toEqual any Number
describe "Set and get/has", ->
map = new Map()
it "should have an empty size initially", ->
expect(map.size).toBe 0
it "should have nothing when map's empty", ->
expect(map.get 5).toBeUndefined()
expect(map.get undefined).toBeUndefined()
expect(map.get null).toBeUndefined()
expect(map.get 0).toBeUndefined()
expect(map.get []).toBeUndefined()
expect(map.get -> "hello").toBeUndefined()
expect(map.get "5").toBeUndefined()
expect(map.has 5).toBeFalsy()
expect(map.has undefined).toBeFalsy()
expect(map.has null).toBeFalsy()
expect(map.has 0).toBeFalsy()
expect(map.has []).toBeFalsy()
expect(map.has -> "hello").toBeFalsy()
expect(map.has "5").toBeFalsy()
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
it "should put the key and its value into the map", ->
expect(map.set 5, "number 5").toBe "number 5"
expect(map.set "5", "string 5").toBe "string 5"
expect(map.set undefined, [1, 2, 3]).toEqual [1, 2, 3]
expect(map.set null, {a: 10}).toEqual {a: 10}
expect(map.set true, "ok").toBe "ok"
expect(map.set /asd/, false).toBe false
expect(map.set (-> "hello"), 99).toBe 99
expect(map.set arr, "array").toBe "array"
expect(map.set obj, "obj").toBe "obj"
expect(map.get 5).toBe "number 5"
expect(map.get "5").toBe "string 5"
expect(map.get undefined).toEqual [1, 2, 3]
expect(map.get null).toEqual {a: 10}
expect(map.get true).toBe "ok"
expect(map.get /asd/).toBe false
expect(map.get -> "hello").toBe 99
expect(map.get arr, "time").toBe "array"
expect(map.get obj, "time").toBe "obj"
expect(map.has 5).toBeTruthy()
expect(map.has "5").toBeTruthy()
expect(map.has undefined).toBeTruthy()
expect(map.has null).toBeTruthy()
expect(map.has true).toBeTruthy()
expect(map.has /asd/).toBeTruthy()
expect(map.has -> "hello").toBeTruthy()
expect(map.has arr, "time").toBeTruthy()
expect(map.has obj, "time").toBeTruthy()
it "should keep track of map size", ->
expect(map.size).toBe 9
it "should override previous value", ->
map.set 5, "number 6"
map.set "5", "string 6"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
expect(map.get 5).toBe "number 6"
expect(map.get "5").toBe "string 6"
expect(map.get undefined).toEqual [3, 2]
expect(map.get null).toEqual {b: 12}
expect(map.get true).toBe "okay"
expect(map.get /asd/).toBe true
expect(map.get -> "hello").toBe 10
it "shouldn't have changed the map's size", ->
expect(map.size).toBe 9
it "should return undefined if the key's not found", ->
expect(map.get 6).toBeUndefined()
expect(map.get -> "bye").toBeUndefined()
# TODO: might not be the desired behavior. Maybe it should be found.
expect(map.get [1, 2, {a: "hello"}, [3, 4, 5]]).toBeUndefined()
expect(map.get {a: "hi", b: {}, c: [1, 2], d: arr}).toBeUndefined()
expect(map.has [1, 2, {a: "hello"}, [3, 4, 5]]).toBeFalsy()
expect(map.has {a: "hi", b: {}, c: [1, 2], d: arr}).toBeFalsy()
date1 = new Date()
date2 = new Date()
it "should store two Date objects distinctively", ->
map.set date1, "date 1"
map.set date2, "date 2"
expect(map.get date1).toBe "date 1"
expect(map.get date2).toBe "date 2"
expect(map.get new Date()).toBeUndefined()
expect(map.has new Date()).toBeFalsy()
map2 = new Map()
it "should store a id on the key that's unique to each map", ->
expect(map2.get date1).toBeUndefined()
map2.set(date1, "date 1 on map 2")
expect(map.get date1).toBe "date 1"
expect(map2.get date1).toBe "date 1 on map 2"
describe "Delete", ->
map = new Map()
it "should return false for deleting nonexistent keys", ->
expect(map.delete 5).toBeFalsy()
expect(map.delete undefined).toBeFalsy()
expect(map.delete null).toBeFalsy()
expect(map.delete 0).toBeFalsy()
expect(map.delete []).toBeFalsy()
expect(map.delete -> "hello").toBeFalsy()
expect(map.delete "5").toBeFalsy()
it "shouldn't decrease the size count after fail deletes", ->
expect(map.size).toBe 0
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
date1 = new Date()
date2 = new Date()
it "should return true after deleting a valid key", ->
map.set 5, "number 6"
map.set "5", "string 6"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
expect(map.delete 5).toBeTruthy()
expect(map.delete "5").toBeTruthy()
expect(map.delete undefined).toBeTruthy()
expect(map.delete null).toBeTruthy()
expect(map.delete true).toBeTruthy()
expect(map.delete /asd/).toBeTruthy()
expect(map.delete -> "hello").toBeTruthy()
it "should have updated the size", ->
expect(map.size).toBe 0
it "shouldn't find anything after emptying a map", ->
expect(map.delete 5).toBeFalsy()
expect(map.delete "5").toBeFalsy()
expect(map.delete undefined).toBeFalsy()
expect(map.delete null).toBeFalsy()
expect(map.delete true).toBeFalsy()
expect(map.delete /asd/).toBeFalsy()
expect(map.delete -> "hello").toBeFalsy()
it "should delete the hacky property from special data types", ->
map.set arr, "array"
map.set obj, "object"
map.set date1, "date 1"
map.set date2, "date 2"
map.delete arr
map.delete obj
map.delete date1
map.delete date2
expect(arr._mapId_3).toBeUndefined()
expect(obj._mapId_3).toBeUndefined()
expect(date1._mapId_3).toBeUndefined()
expect(date2._mapId_3).toBeUndefined()
it "should keep the count correct after removing special keys", ->
expect(map.size).toBe 0
describe "Iterate through items", ->
map = new Map()
it "shouldn't call the callback when there's nothing to iterate through", ->
callback = jasmine.createSpy()
map.forEach callback
expect(callback).not.toHaveBeenCalled()
it "should have called the callback correctly", ->
map.set 5, "number 5"
map.set "5", "string 5"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
callback = jasmine.createSpy()
map.forEach callback
expect(callback).toHaveBeenCalledWith 5, 'number 5'
expect(callback).toHaveBeenCalledWith '5', 'string 5'
expect(callback).toHaveBeenCalledWith undefined, [ 3, 2 ]
expect(callback).toHaveBeenCalledWith null, { b : 12 }
expect(callback).toHaveBeenCalledWith true, 'okay'
expect(callback).toHaveBeenCalledWith /asd/, true
expect(callback).toHaveBeenCalledWith any(Function), 10
| true | Map = require('../source').Map
describe "Create hash map", ->
map = new Map()
it "should give an empty hash map", ->
expect(map).toBeDefined()
map2 = new Map({'20': 'ok', 'PI:NAME:<NAME>END_PI': 'wPI:NAME:<NAME>END_PI'})
it "should add passed object's (key, value) pairs to the map", ->
# console.log map2
callback = jasmine.createSpy()
map2.forEach callback
expect(callback).toHaveBeenCalledWith '20', 'ok'
expect(callback).toHaveBeenCalledWith 'PI:NAME:<NAME>END_PI', 'wonderland'
expect(callback).not.toHaveBeenCalledWith undefined, undefined
# Usually, the hash function should be hidden. But we allow the possibility of
# user-defined hash function.
describe "Hash function", ->
map = new Map()
it "should generate a unique hash for simple hashable types", ->
expect(map.hash 5, yes).toBe 'Number_5'
expect(map.hash "5", yes).toBe 'String_5'
expect(map.hash undefined, yes).toBe 'Undefined_undefined'
expect(map.hash null, yes).toBe 'Null_null'
expect(map.hash true, yes).toBe 'Boolean_true'
expect(map.hash /asd/, yes).toBe 'RegExp_/asd/'
expect(map.hash (-> "hello"), yes).toMatch /Function_.+/
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
date = new Date()
it "should generate a unique hash for objects, arrays, and dates", ->
expect(map.hash arr, yes).toMatch /_mapId_\d+_\d+/
expect(map.hash obj, yes).toMatch /_mapId_\d+_\d+/
expect(map.hash date, yes).toMatch /_mapId_\d+_\d+/
it "should have used obscure hacks by putting an id in arr and obj", ->
expect(arr._mapId_2).toEqual any Number
expect(obj._mapId_2).toEqual any Number
expect(date._mapId_2).toEqual any Number
describe "Set and get/has", ->
map = new Map()
it "should have an empty size initially", ->
expect(map.size).toBe 0
it "should have nothing when map's empty", ->
expect(map.get 5).toBeUndefined()
expect(map.get undefined).toBeUndefined()
expect(map.get null).toBeUndefined()
expect(map.get 0).toBeUndefined()
expect(map.get []).toBeUndefined()
expect(map.get -> "hello").toBeUndefined()
expect(map.get "5").toBeUndefined()
expect(map.has 5).toBeFalsy()
expect(map.has undefined).toBeFalsy()
expect(map.has null).toBeFalsy()
expect(map.has 0).toBeFalsy()
expect(map.has []).toBeFalsy()
expect(map.has -> "hello").toBeFalsy()
expect(map.has "5").toBeFalsy()
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
it "should put the key and its value into the map", ->
expect(map.set 5, "number 5").toBe "number 5"
expect(map.set "5", "string 5").toBe "string 5"
expect(map.set undefined, [1, 2, 3]).toEqual [1, 2, 3]
expect(map.set null, {a: 10}).toEqual {a: 10}
expect(map.set true, "ok").toBe "ok"
expect(map.set /asd/, false).toBe false
expect(map.set (-> "hello"), 99).toBe 99
expect(map.set arr, "array").toBe "array"
expect(map.set obj, "obj").toBe "obj"
expect(map.get 5).toBe "number 5"
expect(map.get "5").toBe "string 5"
expect(map.get undefined).toEqual [1, 2, 3]
expect(map.get null).toEqual {a: 10}
expect(map.get true).toBe "ok"
expect(map.get /asd/).toBe false
expect(map.get -> "hello").toBe 99
expect(map.get arr, "time").toBe "array"
expect(map.get obj, "time").toBe "obj"
expect(map.has 5).toBeTruthy()
expect(map.has "5").toBeTruthy()
expect(map.has undefined).toBeTruthy()
expect(map.has null).toBeTruthy()
expect(map.has true).toBeTruthy()
expect(map.has /asd/).toBeTruthy()
expect(map.has -> "hello").toBeTruthy()
expect(map.has arr, "time").toBeTruthy()
expect(map.has obj, "time").toBeTruthy()
it "should keep track of map size", ->
expect(map.size).toBe 9
it "should override previous value", ->
map.set 5, "number 6"
map.set "5", "string 6"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
expect(map.get 5).toBe "number 6"
expect(map.get "5").toBe "string 6"
expect(map.get undefined).toEqual [3, 2]
expect(map.get null).toEqual {b: 12}
expect(map.get true).toBe "okay"
expect(map.get /asd/).toBe true
expect(map.get -> "hello").toBe 10
it "shouldn't have changed the map's size", ->
expect(map.size).toBe 9
it "should return undefined if the key's not found", ->
expect(map.get 6).toBeUndefined()
expect(map.get -> "bye").toBeUndefined()
# TODO: might not be the desired behavior. Maybe it should be found.
expect(map.get [1, 2, {a: "hello"}, [3, 4, 5]]).toBeUndefined()
expect(map.get {a: "hi", b: {}, c: [1, 2], d: arr}).toBeUndefined()
expect(map.has [1, 2, {a: "hello"}, [3, 4, 5]]).toBeFalsy()
expect(map.has {a: "hi", b: {}, c: [1, 2], d: arr}).toBeFalsy()
date1 = new Date()
date2 = new Date()
it "should store two Date objects distinctively", ->
map.set date1, "date 1"
map.set date2, "date 2"
expect(map.get date1).toBe "date 1"
expect(map.get date2).toBe "date 2"
expect(map.get new Date()).toBeUndefined()
expect(map.has new Date()).toBeFalsy()
map2 = new Map()
it "should store a id on the key that's unique to each map", ->
expect(map2.get date1).toBeUndefined()
map2.set(date1, "date 1 on map 2")
expect(map.get date1).toBe "date 1"
expect(map2.get date1).toBe "date 1 on map 2"
describe "Delete", ->
map = new Map()
it "should return false for deleting nonexistent keys", ->
expect(map.delete 5).toBeFalsy()
expect(map.delete undefined).toBeFalsy()
expect(map.delete null).toBeFalsy()
expect(map.delete 0).toBeFalsy()
expect(map.delete []).toBeFalsy()
expect(map.delete -> "hello").toBeFalsy()
expect(map.delete "5").toBeFalsy()
it "shouldn't decrease the size count after fail deletes", ->
expect(map.size).toBe 0
arr = [1, 2, {a: "hello"}, [3, 4, 5]]
obj = {a: "hi", b: {}, c: [1, 2], d: arr}
date1 = new Date()
date2 = new Date()
it "should return true after deleting a valid key", ->
map.set 5, "number 6"
map.set "5", "string 6"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
expect(map.delete 5).toBeTruthy()
expect(map.delete "5").toBeTruthy()
expect(map.delete undefined).toBeTruthy()
expect(map.delete null).toBeTruthy()
expect(map.delete true).toBeTruthy()
expect(map.delete /asd/).toBeTruthy()
expect(map.delete -> "hello").toBeTruthy()
it "should have updated the size", ->
expect(map.size).toBe 0
it "shouldn't find anything after emptying a map", ->
expect(map.delete 5).toBeFalsy()
expect(map.delete "5").toBeFalsy()
expect(map.delete undefined).toBeFalsy()
expect(map.delete null).toBeFalsy()
expect(map.delete true).toBeFalsy()
expect(map.delete /asd/).toBeFalsy()
expect(map.delete -> "hello").toBeFalsy()
it "should delete the hacky property from special data types", ->
map.set arr, "array"
map.set obj, "object"
map.set date1, "date 1"
map.set date2, "date 2"
map.delete arr
map.delete obj
map.delete date1
map.delete date2
expect(arr._mapId_3).toBeUndefined()
expect(obj._mapId_3).toBeUndefined()
expect(date1._mapId_3).toBeUndefined()
expect(date2._mapId_3).toBeUndefined()
it "should keep the count correct after removing special keys", ->
expect(map.size).toBe 0
describe "Iterate through items", ->
map = new Map()
it "shouldn't call the callback when there's nothing to iterate through", ->
callback = jasmine.createSpy()
map.forEach callback
expect(callback).not.toHaveBeenCalled()
it "should have called the callback correctly", ->
map.set 5, "number 5"
map.set "5", "string 5"
map.set undefined, [3, 2]
map.set null, {b: 12}
map.set true, "okay"
map.set /asd/, true
map.set (-> "hello"), 10
callback = jasmine.createSpy()
map.forEach callback
expect(callback).toHaveBeenCalledWith 5, 'number 5'
expect(callback).toHaveBeenCalledWith '5', 'string 5'
expect(callback).toHaveBeenCalledWith undefined, [ 3, 2 ]
expect(callback).toHaveBeenCalledWith null, { b : 12 }
expect(callback).toHaveBeenCalledWith true, 'okay'
expect(callback).toHaveBeenCalledWith /asd/, true
expect(callback).toHaveBeenCalledWith any(Function), 10
|
[
{
"context": "-exunit': exunit\n\n provideProvider: ->\n key: 'elixir-mix'\n mod: require './mix'\n",
"end": 230,
"score": 0.9967201352119446,
"start": 220,
"tag": "KEY",
"value": "elixir-mix"
}
] | lib/build-tools-elixir.coffee | deprint/build-tools-elixir | 0 | compiler = require './compiler-errors'
exunit = require './exunit'
module.exports = BuildToolsElixir =
provideProfiles: ->
'elixir-compiler': compiler
'elixir-exunit': exunit
provideProvider: ->
key: 'elixir-mix'
mod: require './mix'
| 216212 | compiler = require './compiler-errors'
exunit = require './exunit'
module.exports = BuildToolsElixir =
provideProfiles: ->
'elixir-compiler': compiler
'elixir-exunit': exunit
provideProvider: ->
key: '<KEY>'
mod: require './mix'
| true | compiler = require './compiler-errors'
exunit = require './exunit'
module.exports = BuildToolsElixir =
provideProfiles: ->
'elixir-compiler': compiler
'elixir-exunit': exunit
provideProvider: ->
key: 'PI:KEY:<KEY>END_PI'
mod: require './mix'
|
[
{
"context": "## Project: Ember Facebook\n## Copyright: ©2012 Luan Santos\n## License: Licensed under MIT license (see LI",
"end": 140,
"score": 0.9993424415588379,
"start": 129,
"tag": "NAME",
"value": "Luan Santos"
}
] | src/ember-facebook.coffee | ryac/ember-facebook | 1 | ## ==========================================================================
## Project: Ember Facebook
## Copyright: ©2012 Luan Santos
## License: Licensed under MIT license (see LICENSE)
## ==========================================================================
## ------------------------------------------------------------
## Facebook Mixin..
## ------------------------------------------------------------
Ember.Facebook = Ember.Mixin.create
FBUser: undefined
appId: undefined
facebookParams: Ember.Object.create()
fetchPicture: true
init: ->
@_super()
window.FBApp = this
facebookConfigChanged: (->
# Em.Logger.info '--> in facebookConfigChanged..'
@removeObserver('appId')
window.fbAsyncInit = => @fbAsyncInit()
$ ()->
js = document.createElement 'script'
$(js).attr {
id: 'facebook-jssdk'
# async: true
src: '//connect.facebook.net/en_US/sdk.js'
}
$('body').prepend js
$('body').prepend $('<div>').attr('id', 'fb-root')
).observes('appId')
fbAsyncInit: ->
# Em.Logger.info '--> in fbAsyncInit..'
facebookParams = @get('facebookParams')
facebookParams = facebookParams.setProperties
appId: @get 'appId' || facebookParams.get('appId') || undefined
status: facebookParams.get('status') || true
cookie: facebookParams.get('cookie') || true
xfbml: facebookParams.get('xfbml') || true
channelUrl: facebookParams.get('channelUrl') || undefined
version: 'v2.1'
# Em.Logger.info 'facebookParams', facebookParams
FB.init facebookParams
# @set 'FBloading', true
# FB.Event.subscribe 'auth.authResponseChange', (response) => @updateFBUser(response)
FB.getLoginStatus (response) => @updateFBUser(response)
updateFBUser: (response) ->
FB.Event.subscribe 'auth.authResponseChange', (response) => @updateFBUser(response)
# Em.Logger.info '--> in updateFBUser..', response
if response.status is 'connected'
# to check for permissions..
# FB.api '/me/permissions', (response)=>
# if response and !response.error
# Em.Logger.info 'permissions >>', response
FB.api '/me', (user) =>
FBUser = Ember.Object.create user
FBUser.set 'accessToken', response.authResponse.accessToken
FBUser.set 'expiresIn', response.authResponse.expiresIn
if @get 'fetchPicture'
FB.api '/me/picture?redirect=0&width=50&height=50&type=normal', (resp) =>
FBUser.picture = resp.data.url
@set('FBUser', FBUser)
@checkEmail(FBUser)
else
@set('FBUser', FBUser)
@checkEmail(FBUser)
else
# @set 'User', false
@set 'FBUser', false
@set 'FBloading', false
checkEmail: (FBUser)->
if Ember.empty FBUser.email
Em.Logger.info 'email does not exist!!'
@set 'FBloading', false
else
@wpLogin(FBUser)
wpLogin: (FBUser)->
Em.Logger.info 'in FB wpCreateUser.....', FBUser
data =
action: 'users',
fbId: FBUser.id
firstName: FBUser.first_name
lastName: FBUser.last_name
email: FBUser.email
profilePic: FBUser.picture
Em.$.post(App.ajaxUrl, data).then (response)=>
@set 'FBloading', false
# User = Ember.Object.create()
# App.User.set 'id', response.user.id
# App.User.set 'fbId', response.user.fbId
# App.User.set 'firstName', response.user.firstName
# App.User.set 'lastName', response.user.lastName
# App.User.set 'email', response.user.email
# App.User.set 'profilePic', @FBUser.picture
# @set 'User', User
# App.ctrl.send 'setUser', response.user, response.WP_API_Settings
# App.ctrl.send 'setUser', response.user
# window.location.reload()
## ------------------------------------------------------------
## FacebookView
## ------------------------------------------------------------
Ember.FacebookView = Ember.View.extend
classNameBindings: ['className']
attributeBindings: []
init: ->
@_super()
@setClassName()
@attributeBindings.pushObjects(attr for attr of this when attr.match(/^data-/)?)
setClassName: ->
@set 'className', "fb-#{@type}"
parse: ->
FB.XFBML.parse @$().parent()[0].context if FB?
didInsertElement: ->
@parse() | 196647 | ## ==========================================================================
## Project: Ember Facebook
## Copyright: ©2012 <NAME>
## License: Licensed under MIT license (see LICENSE)
## ==========================================================================
## ------------------------------------------------------------
## Facebook Mixin..
## ------------------------------------------------------------
Ember.Facebook = Ember.Mixin.create
FBUser: undefined
appId: undefined
facebookParams: Ember.Object.create()
fetchPicture: true
init: ->
@_super()
window.FBApp = this
facebookConfigChanged: (->
# Em.Logger.info '--> in facebookConfigChanged..'
@removeObserver('appId')
window.fbAsyncInit = => @fbAsyncInit()
$ ()->
js = document.createElement 'script'
$(js).attr {
id: 'facebook-jssdk'
# async: true
src: '//connect.facebook.net/en_US/sdk.js'
}
$('body').prepend js
$('body').prepend $('<div>').attr('id', 'fb-root')
).observes('appId')
fbAsyncInit: ->
# Em.Logger.info '--> in fbAsyncInit..'
facebookParams = @get('facebookParams')
facebookParams = facebookParams.setProperties
appId: @get 'appId' || facebookParams.get('appId') || undefined
status: facebookParams.get('status') || true
cookie: facebookParams.get('cookie') || true
xfbml: facebookParams.get('xfbml') || true
channelUrl: facebookParams.get('channelUrl') || undefined
version: 'v2.1'
# Em.Logger.info 'facebookParams', facebookParams
FB.init facebookParams
# @set 'FBloading', true
# FB.Event.subscribe 'auth.authResponseChange', (response) => @updateFBUser(response)
FB.getLoginStatus (response) => @updateFBUser(response)
updateFBUser: (response) ->
FB.Event.subscribe 'auth.authResponseChange', (response) => @updateFBUser(response)
# Em.Logger.info '--> in updateFBUser..', response
if response.status is 'connected'
# to check for permissions..
# FB.api '/me/permissions', (response)=>
# if response and !response.error
# Em.Logger.info 'permissions >>', response
FB.api '/me', (user) =>
FBUser = Ember.Object.create user
FBUser.set 'accessToken', response.authResponse.accessToken
FBUser.set 'expiresIn', response.authResponse.expiresIn
if @get 'fetchPicture'
FB.api '/me/picture?redirect=0&width=50&height=50&type=normal', (resp) =>
FBUser.picture = resp.data.url
@set('FBUser', FBUser)
@checkEmail(FBUser)
else
@set('FBUser', FBUser)
@checkEmail(FBUser)
else
# @set 'User', false
@set 'FBUser', false
@set 'FBloading', false
checkEmail: (FBUser)->
if Ember.empty FBUser.email
Em.Logger.info 'email does not exist!!'
@set 'FBloading', false
else
@wpLogin(FBUser)
wpLogin: (FBUser)->
Em.Logger.info 'in FB wpCreateUser.....', FBUser
data =
action: 'users',
fbId: FBUser.id
firstName: FBUser.first_name
lastName: FBUser.last_name
email: FBUser.email
profilePic: FBUser.picture
Em.$.post(App.ajaxUrl, data).then (response)=>
@set 'FBloading', false
# User = Ember.Object.create()
# App.User.set 'id', response.user.id
# App.User.set 'fbId', response.user.fbId
# App.User.set 'firstName', response.user.firstName
# App.User.set 'lastName', response.user.lastName
# App.User.set 'email', response.user.email
# App.User.set 'profilePic', @FBUser.picture
# @set 'User', User
# App.ctrl.send 'setUser', response.user, response.WP_API_Settings
# App.ctrl.send 'setUser', response.user
# window.location.reload()
## ------------------------------------------------------------
## FacebookView
## ------------------------------------------------------------
Ember.FacebookView = Ember.View.extend
classNameBindings: ['className']
attributeBindings: []
init: ->
@_super()
@setClassName()
@attributeBindings.pushObjects(attr for attr of this when attr.match(/^data-/)?)
setClassName: ->
@set 'className', "fb-#{@type}"
parse: ->
FB.XFBML.parse @$().parent()[0].context if FB?
didInsertElement: ->
@parse() | true | ## ==========================================================================
## Project: Ember Facebook
## Copyright: ©2012 PI:NAME:<NAME>END_PI
## License: Licensed under MIT license (see LICENSE)
## ==========================================================================
## ------------------------------------------------------------
## Facebook Mixin..
## ------------------------------------------------------------
Ember.Facebook = Ember.Mixin.create
FBUser: undefined
appId: undefined
facebookParams: Ember.Object.create()
fetchPicture: true
init: ->
@_super()
window.FBApp = this
facebookConfigChanged: (->
# Em.Logger.info '--> in facebookConfigChanged..'
@removeObserver('appId')
window.fbAsyncInit = => @fbAsyncInit()
$ ()->
js = document.createElement 'script'
$(js).attr {
id: 'facebook-jssdk'
# async: true
src: '//connect.facebook.net/en_US/sdk.js'
}
$('body').prepend js
$('body').prepend $('<div>').attr('id', 'fb-root')
).observes('appId')
fbAsyncInit: ->
# Em.Logger.info '--> in fbAsyncInit..'
facebookParams = @get('facebookParams')
facebookParams = facebookParams.setProperties
appId: @get 'appId' || facebookParams.get('appId') || undefined
status: facebookParams.get('status') || true
cookie: facebookParams.get('cookie') || true
xfbml: facebookParams.get('xfbml') || true
channelUrl: facebookParams.get('channelUrl') || undefined
version: 'v2.1'
# Em.Logger.info 'facebookParams', facebookParams
FB.init facebookParams
# @set 'FBloading', true
# FB.Event.subscribe 'auth.authResponseChange', (response) => @updateFBUser(response)
FB.getLoginStatus (response) => @updateFBUser(response)
updateFBUser: (response) ->
FB.Event.subscribe 'auth.authResponseChange', (response) => @updateFBUser(response)
# Em.Logger.info '--> in updateFBUser..', response
if response.status is 'connected'
# to check for permissions..
# FB.api '/me/permissions', (response)=>
# if response and !response.error
# Em.Logger.info 'permissions >>', response
FB.api '/me', (user) =>
FBUser = Ember.Object.create user
FBUser.set 'accessToken', response.authResponse.accessToken
FBUser.set 'expiresIn', response.authResponse.expiresIn
if @get 'fetchPicture'
FB.api '/me/picture?redirect=0&width=50&height=50&type=normal', (resp) =>
FBUser.picture = resp.data.url
@set('FBUser', FBUser)
@checkEmail(FBUser)
else
@set('FBUser', FBUser)
@checkEmail(FBUser)
else
# @set 'User', false
@set 'FBUser', false
@set 'FBloading', false
checkEmail: (FBUser)->
if Ember.empty FBUser.email
Em.Logger.info 'email does not exist!!'
@set 'FBloading', false
else
@wpLogin(FBUser)
wpLogin: (FBUser)->
Em.Logger.info 'in FB wpCreateUser.....', FBUser
data =
action: 'users',
fbId: FBUser.id
firstName: FBUser.first_name
lastName: FBUser.last_name
email: FBUser.email
profilePic: FBUser.picture
Em.$.post(App.ajaxUrl, data).then (response)=>
@set 'FBloading', false
# User = Ember.Object.create()
# App.User.set 'id', response.user.id
# App.User.set 'fbId', response.user.fbId
# App.User.set 'firstName', response.user.firstName
# App.User.set 'lastName', response.user.lastName
# App.User.set 'email', response.user.email
# App.User.set 'profilePic', @FBUser.picture
# @set 'User', User
# App.ctrl.send 'setUser', response.user, response.WP_API_Settings
# App.ctrl.send 'setUser', response.user
# window.location.reload()
## ------------------------------------------------------------
## FacebookView
## ------------------------------------------------------------
Ember.FacebookView = Ember.View.extend
classNameBindings: ['className']
attributeBindings: []
init: ->
@_super()
@setClassName()
@attributeBindings.pushObjects(attr for attr of this when attr.match(/^data-/)?)
setClassName: ->
@set 'className', "fb-#{@type}"
parse: ->
FB.XFBML.parse @$().parent()[0].context if FB?
didInsertElement: ->
@parse() |
[
{
"context": " event.preventDefault()\n\n password = $('#auth-setup-password').val()\n confirmPassword = $('#auth-setup-conf",
"end": 438,
"score": 0.7918656468391418,
"start": 424,
"tag": "PASSWORD",
"value": "setup-password"
},
{
"context": " event.preventDefault()\n ... | src/js/extension/ux/auth.coffee | obi1kenobi/jester | 2 | logger = require('../../lib/util/logging').logger(['ext', 'ux', 'auth'])
sender = require('../messaging/ui/sender')
authCleanup = () ->
logger("Cleaning up auth...")
$('#jester-auth').addClass('hidden')
getSetupSubmitHandler = (authFinishedCb) ->
return (event) ->
# we never want to leave the page,
# or the extension popup will close
event.preventDefault()
password = $('#auth-setup-password').val()
confirmPassword = $('#auth-setup-confpassword').val()
if password != confirmPassword
logger('Entered passwords do not match!')
$('#auth-setup-password').addClass('form-control-wrong-input')
$('#auth-setup-confpassword').addClass('form-control-wrong-input')
return false
logger('Passwords match, setting up...')
$('#auth-setup-password').removeClass('form-control-wrong-input')
$('#auth-setup-confpassword').removeClass('form-control-wrong-input')
sender.sendSetConfigMessage password, {}, (err) ->
if err?
logger('Received error on sendSetConfigMessage', err)
return
else
authCleanup()
authFinishedCb(null, password)
return
getAuthSubmitHandler = (authFinishedCb) ->
return (event) ->
# remove the wrong-input class if present
# to avoid confusing the user
$('#auth-password').removeClass('form-control-wrong-input')
# we never want to leave the page,
# or the extension popup will close
event.preventDefault()
password = $('#auth-password').val()
sender.sendGetConfigMessage password, (err, config) ->
if err?
logger("Received error on sendGetConfigMessage, " + \
"likely wrong password", err)
$('#auth-password').addClass('form-control-wrong-input')
return false
else
authCleanup()
authFinishedCb(null, password)
return
Auth =
setup: (authFinishedCb) ->
sender.sendConfigExistsMessage (err, exists) ->
if err?
logger('Received error on sendConfigExistsMessage', err)
return
if exists
$('#jester-enter').removeClass('hidden')
$('#auth-creds').submit(getAuthSubmitHandler(authFinishedCb))
else
$('#jester-setup').removeClass('hidden')
$('#auth-setup-creds').submit(getSetupSubmitHandler(authFinishedCb))
module.exports = Auth
| 136786 | logger = require('../../lib/util/logging').logger(['ext', 'ux', 'auth'])
sender = require('../messaging/ui/sender')
authCleanup = () ->
logger("Cleaning up auth...")
$('#jester-auth').addClass('hidden')
getSetupSubmitHandler = (authFinishedCb) ->
return (event) ->
# we never want to leave the page,
# or the extension popup will close
event.preventDefault()
password = $('#auth-<PASSWORD>').val()
confirmPassword = $('#auth-setup-confpassword').val()
if password != confirmPassword
logger('Entered passwords do not match!')
$('#auth-setup-password').addClass('form-control-wrong-input')
$('#auth-setup-confpassword').addClass('form-control-wrong-input')
return false
logger('Passwords match, setting up...')
$('#auth-setup-password').removeClass('form-control-wrong-input')
$('#auth-setup-confpassword').removeClass('form-control-wrong-input')
sender.sendSetConfigMessage password, {}, (err) ->
if err?
logger('Received error on sendSetConfigMessage', err)
return
else
authCleanup()
authFinishedCb(null, password)
return
getAuthSubmitHandler = (authFinishedCb) ->
return (event) ->
# remove the wrong-input class if present
# to avoid confusing the user
$('#auth-password').removeClass('form-control-wrong-input')
# we never want to leave the page,
# or the extension popup will close
event.preventDefault()
password = $('#auth-<PASSWORD>').val()
sender.sendGetConfigMessage password, (err, config) ->
if err?
logger("Received error on sendGetConfigMessage, " + \
"likely wrong password", err)
$('#auth-password').addClass('form-control-wrong-input')
return false
else
authCleanup()
authFinishedCb(null, password)
return
Auth =
setup: (authFinishedCb) ->
sender.sendConfigExistsMessage (err, exists) ->
if err?
logger('Received error on sendConfigExistsMessage', err)
return
if exists
$('#jester-enter').removeClass('hidden')
$('#auth-creds').submit(getAuthSubmitHandler(authFinishedCb))
else
$('#jester-setup').removeClass('hidden')
$('#auth-setup-creds').submit(getSetupSubmitHandler(authFinishedCb))
module.exports = Auth
| true | logger = require('../../lib/util/logging').logger(['ext', 'ux', 'auth'])
sender = require('../messaging/ui/sender')
authCleanup = () ->
logger("Cleaning up auth...")
$('#jester-auth').addClass('hidden')
getSetupSubmitHandler = (authFinishedCb) ->
return (event) ->
# we never want to leave the page,
# or the extension popup will close
event.preventDefault()
password = $('#auth-PI:PASSWORD:<PASSWORD>END_PI').val()
confirmPassword = $('#auth-setup-confpassword').val()
if password != confirmPassword
logger('Entered passwords do not match!')
$('#auth-setup-password').addClass('form-control-wrong-input')
$('#auth-setup-confpassword').addClass('form-control-wrong-input')
return false
logger('Passwords match, setting up...')
$('#auth-setup-password').removeClass('form-control-wrong-input')
$('#auth-setup-confpassword').removeClass('form-control-wrong-input')
sender.sendSetConfigMessage password, {}, (err) ->
if err?
logger('Received error on sendSetConfigMessage', err)
return
else
authCleanup()
authFinishedCb(null, password)
return
getAuthSubmitHandler = (authFinishedCb) ->
return (event) ->
# remove the wrong-input class if present
# to avoid confusing the user
$('#auth-password').removeClass('form-control-wrong-input')
# we never want to leave the page,
# or the extension popup will close
event.preventDefault()
password = $('#auth-PI:PASSWORD:<PASSWORD>END_PI').val()
sender.sendGetConfigMessage password, (err, config) ->
if err?
logger("Received error on sendGetConfigMessage, " + \
"likely wrong password", err)
$('#auth-password').addClass('form-control-wrong-input')
return false
else
authCleanup()
authFinishedCb(null, password)
return
Auth =
setup: (authFinishedCb) ->
sender.sendConfigExistsMessage (err, exists) ->
if err?
logger('Received error on sendConfigExistsMessage', err)
return
if exists
$('#jester-enter').removeClass('hidden')
$('#auth-creds').submit(getAuthSubmitHandler(authFinishedCb))
else
$('#jester-setup').removeClass('hidden')
$('#auth-setup-creds').submit(getSetupSubmitHandler(authFinishedCb))
module.exports = Auth
|
[
{
"context": "ntials =\n email: email\n password: password\n\n Account.login {rememberMe: rememberMe}, _cr",
"end": 763,
"score": 0.9980359673500061,
"start": 755,
"tag": "PASSWORD",
"value": "password"
}
] | frontend/common/services/authorization/authorization.coffee | Contactis/translation-manager | 0 | angular.module 'translation.services.authorization', [
'ui.router'
'toastr'
'ngCookies'
'lbServices'
'translation.modules.languages'
'translation.services.account'
'translation.providers.userPermissionsSettings'
]
.service 'AuthorizationService', ($q, $state, $timeout, $log, $filter, toastr, AccountService,
Account, LanguagesService, UserPermissionsSettings) ->
userRoles = UserPermissionsSettings.userRoles
_authorizePageAccess = (accessLevel, role) ->
if typeof role is 'undefined'
role = AccountService.getData('role')
result = accessLevel.bitMask & role.bitMask
return result
_login = (rememberMe, email, password) ->
_deferred = $q.defer()
_credentials =
email: email
password: password
Account.login {rememberMe: rememberMe}, _credentials
, (response) ->
response = response.toJSON()
response.user.role = userRoles[response.user.role]
AccountService.setAccount response.user
userLang = LanguagesService.getStartupLanguage(response.user.interfaceLanguage)
LanguagesService.setLanguage(userLang)
_deferred.resolve response.user
, (error) ->
console.log "error", error
_deferred.reject error
return _deferred.promise
_register = (attributes) ->
_deferred = $q.defer()
Account.create(attributes).$promise.then (response) ->
_deferred.resolve response
, (err) ->
_deferred.reject err
return _deferred.promise
_kickUnauthorised = (queue, event) ->
event.preventDefault()
if Account.isAuthenticated()
$state.go 'app.manager.dashboard'
else
$state.go 'login'
return queue.resolve()
api =
authorizePageAccess: _authorizePageAccess
login: _login
register: _register
logout: ->
Account.logout().$promise.then ->
AccountService.resetAccount()
$state.go 'login'
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.YOU_HAD_BEEN_LOGGED_OUT')
toastr.info msg
return
return
accessCheck: (event, toState) ->
_accessDeffered = $q.defer()
if angular.isUndefined(toState) or !('data' of toState) or !('access' of toState.data)
if angular.isDefined(event)
event.preventDefault()
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.ACCESS_UNDEFINED_FOR_THIS_STATE')
toastr.warning msg
_kickUnauthorised _accessDeffered, event
else
if _authorizePageAccess(toState.data.access)
_accessDeffered.resolve()
else
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.SEEMS_LIKE_YOU_DONT_HAVE_PERMISSIONS')
toastr.error msg
_kickUnauthorised _accessDeffered, event
return _accessDeffered.promise
return api
| 37776 | angular.module 'translation.services.authorization', [
'ui.router'
'toastr'
'ngCookies'
'lbServices'
'translation.modules.languages'
'translation.services.account'
'translation.providers.userPermissionsSettings'
]
.service 'AuthorizationService', ($q, $state, $timeout, $log, $filter, toastr, AccountService,
Account, LanguagesService, UserPermissionsSettings) ->
userRoles = UserPermissionsSettings.userRoles
_authorizePageAccess = (accessLevel, role) ->
if typeof role is 'undefined'
role = AccountService.getData('role')
result = accessLevel.bitMask & role.bitMask
return result
_login = (rememberMe, email, password) ->
_deferred = $q.defer()
_credentials =
email: email
password: <PASSWORD>
Account.login {rememberMe: rememberMe}, _credentials
, (response) ->
response = response.toJSON()
response.user.role = userRoles[response.user.role]
AccountService.setAccount response.user
userLang = LanguagesService.getStartupLanguage(response.user.interfaceLanguage)
LanguagesService.setLanguage(userLang)
_deferred.resolve response.user
, (error) ->
console.log "error", error
_deferred.reject error
return _deferred.promise
_register = (attributes) ->
_deferred = $q.defer()
Account.create(attributes).$promise.then (response) ->
_deferred.resolve response
, (err) ->
_deferred.reject err
return _deferred.promise
_kickUnauthorised = (queue, event) ->
event.preventDefault()
if Account.isAuthenticated()
$state.go 'app.manager.dashboard'
else
$state.go 'login'
return queue.resolve()
api =
authorizePageAccess: _authorizePageAccess
login: _login
register: _register
logout: ->
Account.logout().$promise.then ->
AccountService.resetAccount()
$state.go 'login'
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.YOU_HAD_BEEN_LOGGED_OUT')
toastr.info msg
return
return
accessCheck: (event, toState) ->
_accessDeffered = $q.defer()
if angular.isUndefined(toState) or !('data' of toState) or !('access' of toState.data)
if angular.isDefined(event)
event.preventDefault()
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.ACCESS_UNDEFINED_FOR_THIS_STATE')
toastr.warning msg
_kickUnauthorised _accessDeffered, event
else
if _authorizePageAccess(toState.data.access)
_accessDeffered.resolve()
else
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.SEEMS_LIKE_YOU_DONT_HAVE_PERMISSIONS')
toastr.error msg
_kickUnauthorised _accessDeffered, event
return _accessDeffered.promise
return api
| true | angular.module 'translation.services.authorization', [
'ui.router'
'toastr'
'ngCookies'
'lbServices'
'translation.modules.languages'
'translation.services.account'
'translation.providers.userPermissionsSettings'
]
.service 'AuthorizationService', ($q, $state, $timeout, $log, $filter, toastr, AccountService,
Account, LanguagesService, UserPermissionsSettings) ->
userRoles = UserPermissionsSettings.userRoles
_authorizePageAccess = (accessLevel, role) ->
if typeof role is 'undefined'
role = AccountService.getData('role')
result = accessLevel.bitMask & role.bitMask
return result
_login = (rememberMe, email, password) ->
_deferred = $q.defer()
_credentials =
email: email
password: PI:PASSWORD:<PASSWORD>END_PI
Account.login {rememberMe: rememberMe}, _credentials
, (response) ->
response = response.toJSON()
response.user.role = userRoles[response.user.role]
AccountService.setAccount response.user
userLang = LanguagesService.getStartupLanguage(response.user.interfaceLanguage)
LanguagesService.setLanguage(userLang)
_deferred.resolve response.user
, (error) ->
console.log "error", error
_deferred.reject error
return _deferred.promise
_register = (attributes) ->
_deferred = $q.defer()
Account.create(attributes).$promise.then (response) ->
_deferred.resolve response
, (err) ->
_deferred.reject err
return _deferred.promise
_kickUnauthorised = (queue, event) ->
event.preventDefault()
if Account.isAuthenticated()
$state.go 'app.manager.dashboard'
else
$state.go 'login'
return queue.resolve()
api =
authorizePageAccess: _authorizePageAccess
login: _login
register: _register
logout: ->
Account.logout().$promise.then ->
AccountService.resetAccount()
$state.go 'login'
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.YOU_HAD_BEEN_LOGGED_OUT')
toastr.info msg
return
return
accessCheck: (event, toState) ->
_accessDeffered = $q.defer()
if angular.isUndefined(toState) or !('data' of toState) or !('access' of toState.data)
if angular.isDefined(event)
event.preventDefault()
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.ACCESS_UNDEFINED_FOR_THIS_STATE')
toastr.warning msg
_kickUnauthorised _accessDeffered, event
else
if _authorizePageAccess(toState.data.access)
_accessDeffered.resolve()
else
msg = $filter('translate')('APP.FRONTEND_MESSAGES.AUTHORIZATION.SEEMS_LIKE_YOU_DONT_HAVE_PERMISSIONS')
toastr.error msg
_kickUnauthorised _accessDeffered, event
return _accessDeffered.promise
return api
|
[
{
"context": "t 1\n\n parsed = [\n {chalk:\"overview\",name:\"hello\"}\n {chalk:\"function\",type:[\"function\"],name:",
"end": 198,
"score": 0.6569427251815796,
"start": 193,
"tag": "NAME",
"value": "hello"
},
{
"context": "}\n {chalk:\"function\",type:[\"function\... | test/format_test.coffee | adrianlee44/chalkboard | 1 | require "coffee-script/register"
chalkboard = require "../src/chalkboard"
exports.formatTest =
"header only test": (test) ->
test.expect 1
parsed = [
{chalk:"overview",name:"hello"}
{chalk:"function",type:["function"],name:"wah"}
]
formatted = chalkboard.format parsed, header: true
expected = '\nhello\n===\n\nwah\n---\n\nType: `function` \n\n'
test.equal formatted, expected, "Should only format the first section"
test.done()
"rendering private functions/variables": (test) ->
test.expect 2
parsed = [
{chalk: "function", name: "Test", access: "private"}
]
formatted = chalkboard.format parsed, private: false
test.equal formatted, "", "Should not render private function"
formatted2 = chalkboard.format parsed, private: true
expected = "\nTest\n===\n### Access\nprivate\n"
test.equal formatted2, expected, "Should render private function"
test.done()
"depcreated": (test) ->
test.expect 1
parsed = [
{chalk:"overview", name:"hello"}
{chalk:"function", name: "Test", deprecated: true}
]
formatted = chalkboard.format parsed
expected = "\nhello\n===\n\nTest (Deprecated)\n---\n\n"
test.equal formatted, expected, "Should indicate function as deprecated"
test.done()
"header with url": (test) ->
test.expect 2
parsed = [
{chalk:"overview", name:"Hello", url: "http://www.example.com"}
]
parsed2 = [
{chalk:"overview", name:"Hello"}
]
formatted = chalkboard.format parsed
expected = "\n[Hello](http://www.example.com)\n===\n"
test.equal formatted, expected, "Should included url with the header name"
formatted2 = chalkboard.format parsed2
expected2 = "\nHello\n===\n"
test.equal formatted2, expected2, "Should render header name without url"
test.done()
"description": (test) ->
test.expect 1
parsed = [
{chalk:"overview", name:"Test", description:"Just another description"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nJust another description \n"
test.equal formatted, expected, "Should render description without title"
test.done()
"type": (test) ->
test.expect 2
parsed = [
{chalk: "overview", name:"Test", type:["one"]}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nType: `one` \n\n"
test.equal formatted, expected, "Should render one type"
parsed2 = [
{chalk: "overview", name:"Test", type:["one", "two"]}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\nType: `one, two` \n\n"
test.equal formatted2, expected2, "Should render both types"
test.done()
"version": (test) ->
test.expect 1
parsed = [
{chalk: "overview", name:"Test", version: "1.0.0"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nVersion: `1.0.0` \n\n"
test.equal formatted, expected, "Should render version"
test.done()
"author and email": (test) ->
test.expect 2
parsed = [
{chalk: "overview", name:"Test", author:"Adrian", email: "test@example.com"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\n## Author\nAdrian (test@example.com)\n"
test.equal formatted, expected, "Should render author and email"
parsed2 = [
{chalk: "overview", name:"Test", author:"Adrian"}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\n## Author\nAdrian\n"
test.equal formatted2, expected2, "Should render author"
test.done()
"copyright and license": (test) ->
test.expect 3
parsed = [
{chalk: "overview", name:"Test", copyright:"Somone 2013"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\n## Copyright\nSomone 2013\n"
test.equal formatted, expected, "Should render copyright"
parsed2 = [
{chalk: "overview", name:"Test", license:"MIT"}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\n## License\nMIT\n"
test.equal formatted2, expected2, "Should render license"
parsed3 = [
{chalk: "overview", name:"Test", license:"MIT", copyright:"Someone 2013"}
]
formatted3 = chalkboard.format parsed3
expected3 = "\nTest\n===\n## Copyright and license\nSomeone 2013\n\nMIT\n"
test.equal formatted3, expected3, "Should render license and copyright"
test.done()
"other tags": (test) ->
tags = [
{tag: "default", value: "123", upper: "Default"}
{tag: "since", value: "yesterday", upper: "Since"}
{tag: "param", value: "123", upper: "Parameters"}
{tag: "TODO", value: "something", upper: "TODO"}
{tag: "example", value: "chalkboard hello", upper: "Example"}
]
test.expect tags.length
nameOut = "\nTest\n===\n"
for tag in tags
parsed = [{chalk:"overview", name:"Test"}]
parsed[0][tag.tag] = tag.value
formatted = chalkboard.format parsed
expected = "#{nameOut}### #{tag.upper}\n#{tag.value}\n"
test.equal formatted, expected, "Render @#{tag.tag} correctly"
test.done()
| 54172 | require "coffee-script/register"
chalkboard = require "../src/chalkboard"
exports.formatTest =
"header only test": (test) ->
test.expect 1
parsed = [
{chalk:"overview",name:"<NAME>"}
{chalk:"function",type:["function"],name:"<NAME>ah"}
]
formatted = chalkboard.format parsed, header: true
expected = '\nhello\n===\n\nwah\n---\n\nType: `function` \n\n'
test.equal formatted, expected, "Should only format the first section"
test.done()
"rendering private functions/variables": (test) ->
test.expect 2
parsed = [
{chalk: "function", name: "<NAME>", access: "private"}
]
formatted = chalkboard.format parsed, private: false
test.equal formatted, "", "Should not render private function"
formatted2 = chalkboard.format parsed, private: true
expected = "\nTest\n===\n### Access\nprivate\n"
test.equal formatted2, expected, "Should render private function"
test.done()
"depcreated": (test) ->
test.expect 1
parsed = [
{chalk:"overview", name:"<NAME>"}
{chalk:"function", name: "<NAME>", deprecated: true}
]
formatted = chalkboard.format parsed
expected = "\nhello\n===\n\nTest (Deprecated)\n---\n\n"
test.equal formatted, expected, "Should indicate function as deprecated"
test.done()
"header with url": (test) ->
test.expect 2
parsed = [
{chalk:"overview", name:"<NAME>", url: "http://www.example.com"}
]
parsed2 = [
{chalk:"overview", name:"<NAME>"}
]
formatted = chalkboard.format parsed
expected = "\n[Hello](http://www.example.com)\n===\n"
test.equal formatted, expected, "Should included url with the header name"
formatted2 = chalkboard.format parsed2
expected2 = "\nHello\n===\n"
test.equal formatted2, expected2, "Should render header name without url"
test.done()
"description": (test) ->
test.expect 1
parsed = [
{chalk:"overview", name:"Test", description:"Just another description"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nJust another description \n"
test.equal formatted, expected, "Should render description without title"
test.done()
"type": (test) ->
test.expect 2
parsed = [
{chalk: "overview", name:"Test", type:["one"]}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nType: `one` \n\n"
test.equal formatted, expected, "Should render one type"
parsed2 = [
{chalk: "overview", name:"Test", type:["one", "two"]}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\nType: `one, two` \n\n"
test.equal formatted2, expected2, "Should render both types"
test.done()
"version": (test) ->
test.expect 1
parsed = [
{chalk: "overview", name:"Test", version: "1.0.0"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nVersion: `1.0.0` \n\n"
test.equal formatted, expected, "Should render version"
test.done()
"author and email": (test) ->
test.expect 2
parsed = [
{chalk: "overview", name:"Test", author:"<NAME>", email: "<EMAIL>"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\n## Author\n<NAME> (<EMAIL>)\n"
test.equal formatted, expected, "Should render author and email"
parsed2 = [
{chalk: "overview", name:"Test", author:"<NAME>"}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\n## Author\n<NAME>\n"
test.equal formatted2, expected2, "Should render author"
test.done()
"copyright and license": (test) ->
test.expect 3
parsed = [
{chalk: "overview", name:"<NAME>", copyright:"Somone 2013"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\n## Copyright\nSomone 2013\n"
test.equal formatted, expected, "Should render copyright"
parsed2 = [
{chalk: "overview", name:"<NAME>", license:"MIT"}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\n## License\nMIT\n"
test.equal formatted2, expected2, "Should render license"
parsed3 = [
{chalk: "overview", name:"<NAME>", license:"MIT", copyright:"Someone 2013"}
]
formatted3 = chalkboard.format parsed3
expected3 = "\nTest\n===\n## Copyright and license\nSomeone 2013\n\nMIT\n"
test.equal formatted3, expected3, "Should render license and copyright"
test.done()
"other tags": (test) ->
tags = [
{tag: "default", value: "123", upper: "Default"}
{tag: "since", value: "yesterday", upper: "Since"}
{tag: "param", value: "123", upper: "Parameters"}
{tag: "TODO", value: "something", upper: "TODO"}
{tag: "example", value: "chalkboard hello", upper: "Example"}
]
test.expect tags.length
nameOut = "\nTest\n===\n"
for tag in tags
parsed = [{chalk:"overview", name:"Test"}]
parsed[0][tag.tag] = tag.value
formatted = chalkboard.format parsed
expected = "#{nameOut}### #{tag.upper}\n#{tag.value}\n"
test.equal formatted, expected, "Render @#{tag.tag} correctly"
test.done()
| true | require "coffee-script/register"
chalkboard = require "../src/chalkboard"
exports.formatTest =
"header only test": (test) ->
test.expect 1
parsed = [
{chalk:"overview",name:"PI:NAME:<NAME>END_PI"}
{chalk:"function",type:["function"],name:"PI:NAME:<NAME>END_PIah"}
]
formatted = chalkboard.format parsed, header: true
expected = '\nhello\n===\n\nwah\n---\n\nType: `function` \n\n'
test.equal formatted, expected, "Should only format the first section"
test.done()
"rendering private functions/variables": (test) ->
test.expect 2
parsed = [
{chalk: "function", name: "PI:NAME:<NAME>END_PI", access: "private"}
]
formatted = chalkboard.format parsed, private: false
test.equal formatted, "", "Should not render private function"
formatted2 = chalkboard.format parsed, private: true
expected = "\nTest\n===\n### Access\nprivate\n"
test.equal formatted2, expected, "Should render private function"
test.done()
"depcreated": (test) ->
test.expect 1
parsed = [
{chalk:"overview", name:"PI:NAME:<NAME>END_PI"}
{chalk:"function", name: "PI:NAME:<NAME>END_PI", deprecated: true}
]
formatted = chalkboard.format parsed
expected = "\nhello\n===\n\nTest (Deprecated)\n---\n\n"
test.equal formatted, expected, "Should indicate function as deprecated"
test.done()
"header with url": (test) ->
test.expect 2
parsed = [
{chalk:"overview", name:"PI:NAME:<NAME>END_PI", url: "http://www.example.com"}
]
parsed2 = [
{chalk:"overview", name:"PI:NAME:<NAME>END_PI"}
]
formatted = chalkboard.format parsed
expected = "\n[Hello](http://www.example.com)\n===\n"
test.equal formatted, expected, "Should included url with the header name"
formatted2 = chalkboard.format parsed2
expected2 = "\nHello\n===\n"
test.equal formatted2, expected2, "Should render header name without url"
test.done()
"description": (test) ->
test.expect 1
parsed = [
{chalk:"overview", name:"Test", description:"Just another description"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nJust another description \n"
test.equal formatted, expected, "Should render description without title"
test.done()
"type": (test) ->
test.expect 2
parsed = [
{chalk: "overview", name:"Test", type:["one"]}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nType: `one` \n\n"
test.equal formatted, expected, "Should render one type"
parsed2 = [
{chalk: "overview", name:"Test", type:["one", "two"]}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\nType: `one, two` \n\n"
test.equal formatted2, expected2, "Should render both types"
test.done()
"version": (test) ->
test.expect 1
parsed = [
{chalk: "overview", name:"Test", version: "1.0.0"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\nVersion: `1.0.0` \n\n"
test.equal formatted, expected, "Should render version"
test.done()
"author and email": (test) ->
test.expect 2
parsed = [
{chalk: "overview", name:"Test", author:"PI:NAME:<NAME>END_PI", email: "PI:EMAIL:<EMAIL>END_PI"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\n## Author\nPI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)\n"
test.equal formatted, expected, "Should render author and email"
parsed2 = [
{chalk: "overview", name:"Test", author:"PI:NAME:<NAME>END_PI"}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\n## Author\nPI:NAME:<NAME>END_PI\n"
test.equal formatted2, expected2, "Should render author"
test.done()
"copyright and license": (test) ->
test.expect 3
parsed = [
{chalk: "overview", name:"PI:NAME:<NAME>END_PI", copyright:"Somone 2013"}
]
formatted = chalkboard.format parsed
expected = "\nTest\n===\n## Copyright\nSomone 2013\n"
test.equal formatted, expected, "Should render copyright"
parsed2 = [
{chalk: "overview", name:"PI:NAME:<NAME>END_PI", license:"MIT"}
]
formatted2 = chalkboard.format parsed2
expected2 = "\nTest\n===\n## License\nMIT\n"
test.equal formatted2, expected2, "Should render license"
parsed3 = [
{chalk: "overview", name:"PI:NAME:<NAME>END_PI", license:"MIT", copyright:"Someone 2013"}
]
formatted3 = chalkboard.format parsed3
expected3 = "\nTest\n===\n## Copyright and license\nSomeone 2013\n\nMIT\n"
test.equal formatted3, expected3, "Should render license and copyright"
test.done()
"other tags": (test) ->
tags = [
{tag: "default", value: "123", upper: "Default"}
{tag: "since", value: "yesterday", upper: "Since"}
{tag: "param", value: "123", upper: "Parameters"}
{tag: "TODO", value: "something", upper: "TODO"}
{tag: "example", value: "chalkboard hello", upper: "Example"}
]
test.expect tags.length
nameOut = "\nTest\n===\n"
for tag in tags
parsed = [{chalk:"overview", name:"Test"}]
parsed[0][tag.tag] = tag.value
formatted = chalkboard.format parsed
expected = "#{nameOut}### #{tag.upper}\n#{tag.value}\n"
test.equal formatted, expected, "Render @#{tag.tag} correctly"
test.done()
|
[
{
"context": " change at any give point in time.\n#\n# Author:\n# aglover\nmodule.exports = (robot) ->\n robot.respond /dyno",
"end": 513,
"score": 0.999657392501831,
"start": 506,
"tag": "USERNAME",
"value": "aglover"
},
{
"context": "robot.respond /dynos (.*)/i, (msg) ->\n \tto... | src/heroku-dynos.coffee | rlugojr/hubot-heroku-dynos | 3 | # Description
# This script will display the current number and types of dynos in use for a Heroku app.
#
# Dependencies:
# none
#
# Configuration:
# HUBOT_HEROKU_API_TOKEN
#
# Commands:
# hubot dynos <app name> - responds with "There are x web dynos and y worker dynos"
#
# Notes:
# This script assumes at least one web dyno and is intended to be used with apps that leverage auto-scaling
# features like HireFire where the number of dynos can change at any give point in time.
#
# Author:
# aglover
module.exports = (robot) ->
robot.respond /dynos (.*)/i, (msg) ->
token = 'Basic ' + new Buffer(':' + process.env.HUBOT_HEROKU_API_TOKEN).toString('base64')
msg.http("https://api.heroku.com/apps/#{escape(msg.match[1])}/dynos")
.headers(Authorization: token, Accept: 'application/vnd.heroku+json; version=3')
.get() (err, res, body) ->
response = JSON.parse(body)
[web, worker] = [0,0]
for dynoDoc in response
if dynoDoc.type == 'web' then web++ else worker++
webMessage = if web > 1 then "There are #{web} web dynos" else "There is 1 web dyno"
if worker > 1
workerMessage = "#{worker} worker dynos"
else if worker == 1
workerMessage = "1 worker dyno"
else
workerMessage = "no worker dynos"
msg.send "#{webMessage} and #{workerMessage}"
| 56827 | # Description
# This script will display the current number and types of dynos in use for a Heroku app.
#
# Dependencies:
# none
#
# Configuration:
# HUBOT_HEROKU_API_TOKEN
#
# Commands:
# hubot dynos <app name> - responds with "There are x web dynos and y worker dynos"
#
# Notes:
# This script assumes at least one web dyno and is intended to be used with apps that leverage auto-scaling
# features like HireFire where the number of dynos can change at any give point in time.
#
# Author:
# aglover
module.exports = (robot) ->
robot.respond /dynos (.*)/i, (msg) ->
token = '<KEY> + <KEY>(':' + process.env.HUBOT_HEROKU_API_TOKEN).<KEY>('base64')
msg.http("https://api.heroku.com/apps/#{escape(msg.match[1])}/dynos")
.headers(Authorization: token, Accept: 'application/vnd.heroku+json; version=3')
.get() (err, res, body) ->
response = JSON.parse(body)
[web, worker] = [0,0]
for dynoDoc in response
if dynoDoc.type == 'web' then web++ else worker++
webMessage = if web > 1 then "There are #{web} web dynos" else "There is 1 web dyno"
if worker > 1
workerMessage = "#{worker} worker dynos"
else if worker == 1
workerMessage = "1 worker dyno"
else
workerMessage = "no worker dynos"
msg.send "#{webMessage} and #{workerMessage}"
| true | # Description
# This script will display the current number and types of dynos in use for a Heroku app.
#
# Dependencies:
# none
#
# Configuration:
# HUBOT_HEROKU_API_TOKEN
#
# Commands:
# hubot dynos <app name> - responds with "There are x web dynos and y worker dynos"
#
# Notes:
# This script assumes at least one web dyno and is intended to be used with apps that leverage auto-scaling
# features like HireFire where the number of dynos can change at any give point in time.
#
# Author:
# aglover
module.exports = (robot) ->
robot.respond /dynos (.*)/i, (msg) ->
token = 'PI:KEY:<KEY>END_PI + PI:KEY:<KEY>END_PI(':' + process.env.HUBOT_HEROKU_API_TOKEN).PI:KEY:<KEY>END_PI('base64')
msg.http("https://api.heroku.com/apps/#{escape(msg.match[1])}/dynos")
.headers(Authorization: token, Accept: 'application/vnd.heroku+json; version=3')
.get() (err, res, body) ->
response = JSON.parse(body)
[web, worker] = [0,0]
for dynoDoc in response
if dynoDoc.type == 'web' then web++ else worker++
webMessage = if web > 1 then "There are #{web} web dynos" else "There is 1 web dyno"
if worker > 1
workerMessage = "#{worker} worker dynos"
else if worker == 1
workerMessage = "1 worker dyno"
else
workerMessage = "no worker dynos"
msg.send "#{webMessage} and #{workerMessage}"
|
[
{
"context": "Home Page - Hapi Framework'\n\t\tnames: [\n\t\t\t{name: 'Scott'}\n\t\t\t{name: 'Angela'}\n\t\t\t{name: 'Matthew'}\n\t\t\t{na",
"end": 117,
"score": 0.9994130730628967,
"start": 112,
"tag": "NAME",
"value": "Scott"
},
{
"context": "amework'\n\t\tnames: [\n\t\t\t{name: 'S... | chapters/chapter4/modules/base/home.coffee | scotthathaway/hapifw | 4 | module.exports = (request, reply) ->
context =
pageTitle: 'Home Page - Hapi Framework'
names: [
{name: 'Scott'}
{name: 'Angela'}
{name: 'Matthew'}
{name: 'Kaylie'}
{name: 'Joshua'}
{name: 'Karis'}
]
reply.view('home', context)
| 85335 | module.exports = (request, reply) ->
context =
pageTitle: 'Home Page - Hapi Framework'
names: [
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
]
reply.view('home', context)
| true | module.exports = (request, reply) ->
context =
pageTitle: 'Home Page - Hapi Framework'
names: [
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
]
reply.view('home', context)
|
[
{
"context": "-success\" type=\"email\" placeholder=\"Email\" value=\"alex@smith.com\">\n <span class=\"icon is-small is-lef",
"end": 2645,
"score": 0.9998610615730286,
"start": 2631,
"tag": "EMAIL",
"value": "alex@smith.com"
}
] | snippets/bulma_form.cson | kikoseijo/atom-bootstrap-snippets | 0 | '.text.html.php.blade, .text.html, .text.html.php, .text.html.hack':
'Bulma Form - field':
prefix: 'bulma:field'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field'
body: """
<div class="field">
<label class="label" for="${1:name}">${2:Label}</label>
<div class="control">
<input class="input" id="${1:name}" name="${1:name}" type="text" value="" placeholder="${2:Label}">
</div>
<p class="help">This_is_a_help_text</p>
</div>
"""
'Bulma Form - field + Icon':
prefix: 'bulma:field with icon'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field + Icon'
body: """
<div class="field">
<p class="control has-icons-left has-icons-right">
<input class="input" type="email" placeholder="${1:Placeholder}">
<span class="icon is-small is-left">
<i class="fa fa-envelope"></i>
</span>
<span class="icon is-small is-right">
<i class="fa fa-check"></i>
</span>
</p>
</div>
"""
'Bulma Form - field + Addon':
prefix: 'bulma:field with addon'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field + Addon'
body: """
<div class="field has-addons">
<div class="control is-expanded">
<input class="input" type="text" placeholder="Find a repository">
</div>
<div class="control">
<a class="button is-info">
Search
</a>
</div>
</div>
"""
'Bulma Form - (full)':
prefix: 'bulma:form-full'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form - (full)'
body: """
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">From</label>
</div>
<div class="field-body">
<div class="field">
<p class="control is-expanded has-icons-left">
<input class="input" type="text" placeholder="Name">
<span class="icon is-small is-left">
<i class="fa fa-user"></i>
</span>
</p>
</div>
<div class="field">
<p class="control is-expanded has-icons-left has-icons-right">
<input class="input is-success" type="email" placeholder="Email" value="alex@smith.com">
<span class="icon is-small is-left">
<i class="fa fa-envelope"></i>
</span>
<span class="icon is-small is-right">
<i class="fa fa-check"></i>
</span>
</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label"></div>
<div class="field-body">
<div class="field is-expanded">
<div class="field has-addons">
<p class="control">
<a class="button is-static">
+44
</a>
</p>
<p class="control is-expanded">
<input class="input" type="tel" placeholder="Your phone number">
</p>
</div>
<p class="help">Do not enter the first zero</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Department</label>
</div>
<div class="field-body">
<div class="field is-narrow">
<div class="control">
<div class="select is-fullwidth">
<select>
<option>Business development</option>
<option>Marketing</option>
<option>Sales</option>
</select>
</div>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label">
<label class="label">Already a member?</label>
</div>
<div class="field-body">
<div class="field is-narrow">
<div class="control">
<label class="radio">
<input type="radio" name="member">
Yes
</label>
<label class="radio">
<input type="radio" name="member">
No
</label>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Subject</label>
</div>
<div class="field-body">
<div class="field">
<div class="control">
<input class="input is-danger" type="text" placeholder="e.g. Partnership opportunity">
</div>
<p class="help is-danger">
This field is required
</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Question</label>
</div>
<div class="field-body">
<div class="field">
<div class="control">
<textarea class="textarea" placeholder="Explain how we can help you"></textarea>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label">
<!-- Left empty for spacing -->
</div>
<div class="field-body">
<div class="field">
<div class="control">
<button class="button is-primary">
Send message
</button>
</div>
</div>
</div>
</div>
"""
| 105804 | '.text.html.php.blade, .text.html, .text.html.php, .text.html.hack':
'Bulma Form - field':
prefix: 'bulma:field'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field'
body: """
<div class="field">
<label class="label" for="${1:name}">${2:Label}</label>
<div class="control">
<input class="input" id="${1:name}" name="${1:name}" type="text" value="" placeholder="${2:Label}">
</div>
<p class="help">This_is_a_help_text</p>
</div>
"""
'Bulma Form - field + Icon':
prefix: 'bulma:field with icon'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field + Icon'
body: """
<div class="field">
<p class="control has-icons-left has-icons-right">
<input class="input" type="email" placeholder="${1:Placeholder}">
<span class="icon is-small is-left">
<i class="fa fa-envelope"></i>
</span>
<span class="icon is-small is-right">
<i class="fa fa-check"></i>
</span>
</p>
</div>
"""
'Bulma Form - field + Addon':
prefix: 'bulma:field with addon'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field + Addon'
body: """
<div class="field has-addons">
<div class="control is-expanded">
<input class="input" type="text" placeholder="Find a repository">
</div>
<div class="control">
<a class="button is-info">
Search
</a>
</div>
</div>
"""
'Bulma Form - (full)':
prefix: 'bulma:form-full'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form - (full)'
body: """
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">From</label>
</div>
<div class="field-body">
<div class="field">
<p class="control is-expanded has-icons-left">
<input class="input" type="text" placeholder="Name">
<span class="icon is-small is-left">
<i class="fa fa-user"></i>
</span>
</p>
</div>
<div class="field">
<p class="control is-expanded has-icons-left has-icons-right">
<input class="input is-success" type="email" placeholder="Email" value="<EMAIL>">
<span class="icon is-small is-left">
<i class="fa fa-envelope"></i>
</span>
<span class="icon is-small is-right">
<i class="fa fa-check"></i>
</span>
</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label"></div>
<div class="field-body">
<div class="field is-expanded">
<div class="field has-addons">
<p class="control">
<a class="button is-static">
+44
</a>
</p>
<p class="control is-expanded">
<input class="input" type="tel" placeholder="Your phone number">
</p>
</div>
<p class="help">Do not enter the first zero</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Department</label>
</div>
<div class="field-body">
<div class="field is-narrow">
<div class="control">
<div class="select is-fullwidth">
<select>
<option>Business development</option>
<option>Marketing</option>
<option>Sales</option>
</select>
</div>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label">
<label class="label">Already a member?</label>
</div>
<div class="field-body">
<div class="field is-narrow">
<div class="control">
<label class="radio">
<input type="radio" name="member">
Yes
</label>
<label class="radio">
<input type="radio" name="member">
No
</label>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Subject</label>
</div>
<div class="field-body">
<div class="field">
<div class="control">
<input class="input is-danger" type="text" placeholder="e.g. Partnership opportunity">
</div>
<p class="help is-danger">
This field is required
</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Question</label>
</div>
<div class="field-body">
<div class="field">
<div class="control">
<textarea class="textarea" placeholder="Explain how we can help you"></textarea>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label">
<!-- Left empty for spacing -->
</div>
<div class="field-body">
<div class="field">
<div class="control">
<button class="button is-primary">
Send message
</button>
</div>
</div>
</div>
</div>
"""
| true | '.text.html.php.blade, .text.html, .text.html.php, .text.html.hack':
'Bulma Form - field':
prefix: 'bulma:field'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field'
body: """
<div class="field">
<label class="label" for="${1:name}">${2:Label}</label>
<div class="control">
<input class="input" id="${1:name}" name="${1:name}" type="text" value="" placeholder="${2:Label}">
</div>
<p class="help">This_is_a_help_text</p>
</div>
"""
'Bulma Form - field + Icon':
prefix: 'bulma:field with icon'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field + Icon'
body: """
<div class="field">
<p class="control has-icons-left has-icons-right">
<input class="input" type="email" placeholder="${1:Placeholder}">
<span class="icon is-small is-left">
<i class="fa fa-envelope"></i>
</span>
<span class="icon is-small is-right">
<i class="fa fa-check"></i>
</span>
</p>
</div>
"""
'Bulma Form - field + Addon':
prefix: 'bulma:field with addon'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form Field + Addon'
body: """
<div class="field has-addons">
<div class="control is-expanded">
<input class="input" type="text" placeholder="Find a repository">
</div>
<div class="control">
<a class="button is-info">
Search
</a>
</div>
</div>
"""
'Bulma Form - (full)':
prefix: 'bulma:form-full'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#00d1b2">Bulma.io</span> Form - (full)'
body: """
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">From</label>
</div>
<div class="field-body">
<div class="field">
<p class="control is-expanded has-icons-left">
<input class="input" type="text" placeholder="Name">
<span class="icon is-small is-left">
<i class="fa fa-user"></i>
</span>
</p>
</div>
<div class="field">
<p class="control is-expanded has-icons-left has-icons-right">
<input class="input is-success" type="email" placeholder="Email" value="PI:EMAIL:<EMAIL>END_PI">
<span class="icon is-small is-left">
<i class="fa fa-envelope"></i>
</span>
<span class="icon is-small is-right">
<i class="fa fa-check"></i>
</span>
</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label"></div>
<div class="field-body">
<div class="field is-expanded">
<div class="field has-addons">
<p class="control">
<a class="button is-static">
+44
</a>
</p>
<p class="control is-expanded">
<input class="input" type="tel" placeholder="Your phone number">
</p>
</div>
<p class="help">Do not enter the first zero</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Department</label>
</div>
<div class="field-body">
<div class="field is-narrow">
<div class="control">
<div class="select is-fullwidth">
<select>
<option>Business development</option>
<option>Marketing</option>
<option>Sales</option>
</select>
</div>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label">
<label class="label">Already a member?</label>
</div>
<div class="field-body">
<div class="field is-narrow">
<div class="control">
<label class="radio">
<input type="radio" name="member">
Yes
</label>
<label class="radio">
<input type="radio" name="member">
No
</label>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Subject</label>
</div>
<div class="field-body">
<div class="field">
<div class="control">
<input class="input is-danger" type="text" placeholder="e.g. Partnership opportunity">
</div>
<p class="help is-danger">
This field is required
</p>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label">Question</label>
</div>
<div class="field-body">
<div class="field">
<div class="control">
<textarea class="textarea" placeholder="Explain how we can help you"></textarea>
</div>
</div>
</div>
</div>
<div class="field is-horizontal">
<div class="field-label">
<!-- Left empty for spacing -->
</div>
<div class="field-body">
<div class="field">
<div class="control">
<button class="button is-primary">
Send message
</button>
</div>
</div>
</div>
</div>
"""
|
[
{
"context": "An example Backbone application contributed by\n# [Jérôme Gravel-Niquet](http://jgn.me/). This demo uses a simple\n# [Loca",
"end": 72,
"score": 0.999896228313446,
"start": 52,
"tag": "NAME",
"value": "Jérôme Gravel-Niquet"
},
{
"context": "m/coffee-script/) variation h... | todos.coffee | JasonGiedymin/backbone-todojs-coffeescript | 14 | # An example Backbone application contributed by
# [Jérôme Gravel-Niquet](http://jgn.me/). This demo uses a simple
# [LocalStorage adapter](backbone-localstorage.html)
# to persist Backbone models within your browser.
#
# This [CoffeeScript](http://jashkenas.github.com/coffee-script/) variation has been provided by [Jason Giedymin](http://jasongiedymin.com/).
#
# Note: two things you will notice with my CoffeeScript are that I prefer to
# use four space indents and prefer to use `()` for all functions.
# Load the application once the DOM is ready, using a `jQuery.ready` shortcut.
$ ->
### Todo Model ###
# Our basic **Todo** model has `content`, `order`, and `done` attributes.
class Todo extends Backbone.Model
# Default attributes for the todo.
defaults:
content: "empty todo..."
done: false
# Ensure that each todo created has `content`.
initialize: ->
if !@get("content")
@set({ "content": @defaults.content })
# Toggle the `done` state of this todo item.
toggle: ->
@save({ done: !@get("done") })
# Remove this Todo from *localStorage* and delete its view.
clear: ->
@destroy()
@view.remove()
### Todo Collection ###
# The collection of todos is backed by *localStorage* instead of a remote
# server.
class TodoList extends Backbone.Collection
# Reference to this collection's model.
model: Todo
# Save all of the todo items under the `"todos"` namespace.
localStorage: new Store("todos")
# Attribute getter/setter
getDone = (todo) ->
return todo.get("done")
# Filter down the list of all todo items that are finished.
done: ->
return @filter( getDone )
# Filter down the list to only todo items that are still not finished.
remaining: ->
return @without.apply( this, @done() )
# We keep the Todos in sequential order, despite being saved by unordered
# GUID in the database. This generates the next order number for new items.
nextOrder: ->
return 1 if !@length
return @last().get('order') + 1
# Todos are sorted by their original insertion order.
comparator: (todo) ->
return todo.get("order")
### Todo Item View ###
# The DOM element for a todo item...
class TodoView extends Backbone.View
#... is a list tag.
tagName: "li"
# Cache the template function for a single item.
template: _.template( $("#item-template").html() )
# The DOM events specific to an item.
events:
"click .check" : "toggleDone",
"dblclick div.todo-content" : "edit",
"click span.todo-destroy" : "clear",
"keypress .todo-input" : "updateOnEnter"
# The TodoView listens for changes to its model, re-rendering. Since there's
# a one-to-one correspondence between a **Todo** and a **TodoView** in this
# app, we set a direct reference on the model for convenience.
initialize: ->
@model.bind('change', this.render);
@model.view = this;
# Re-render the contents of the todo item.
render: =>
this.$(@el).html( @template(@model.toJSON()) )
@setContent()
return this
# To avoid XSS (not that it would be harmful in this particular app),
# we use `jQuery.text` to set the contents of the todo item.
setContent: ->
content = @model.get("content")
this.$(".todo-content").text(content)
@input = this.$(".todo-input");
@input.bind("blur", @close);
@input.val(content);
# Toggle the `"done"` state of the model.
toggleDone: ->
@model.toggle()
# Switch this view into `"editing"` mode, displaying the input field.
edit: =>
this.$(@el).addClass("editing")
@input.focus()
# Close the `"editing"` mode, saving changes to the todo.
close: =>
@model.save({ content: @input.val() })
$(@el).removeClass("editing")
# If you hit `enter`, we're through editing the item.
updateOnEnter: (e) =>
@close() if e.keyCode is 13
# Remove this view from the DOM.
remove: ->
$(@el).remove()
# Remove the item, destroy the model.
clear: () ->
@model.clear()
### The Application ###
# Our overall **AppView** is the top-level piece of UI.
class AppView extends Backbone.View
# Instead of generating a new element, bind to the existing skeleton of
# the App already present in the HTML.
el_tag = "#todoapp"
el: $(el_tag)
# Our template for the line of statistics at the bottom of the app.
statsTemplate: _.template( $("#stats-template").html() )
# Delegated events for creating new items, and clearing completed ones.
events:
"keypress #new-todo" : "createOnEnter",
"keyup #new-todo" : "showTooltip",
"click .todo-clear a" : "clearCompleted"
# At initialization we bind to the relevant events on the `Todos`
# collection, when items are added or changed. Kick things off by
# loading any preexisting todos that might be saved in *localStorage*.
initialize: =>
@input = this.$("#new-todo")
Todos.bind("add", @addOne)
Todos.bind("reset", @addAll)
Todos.bind("all", @render)
Todos.fetch()
# Re-rendering the App just means refreshing the statistics -- the rest
# of the app doesn't change.
render: =>
this.$('#todo-stats').html( @statsTemplate({
total: Todos.length,
done: Todos.done().length,
remaining: Todos.remaining().length
}))
# Add a single todo item to the list by creating a view for it, and
# appending its element to the `<ul>`.
addOne: (todo) =>
view = new TodoView( {model: todo} )
this.$("#todo-list").append( view.render().el )
# Add all items in the **Todos** collection at once.
addAll: =>
Todos.each(@addOne);
# Generate the attributes for a new Todo item.
newAttributes: ->
return {
content: @input.val(),
order: Todos.nextOrder(),
done: false
}
# If you hit return in the main input field, create new **Todo** model,
# persisting it to *localStorage*.
createOnEnter: (e) ->
return if (e.keyCode != 13)
Todos.create( @newAttributes() )
@input.val('')
# Clear all done todo items, destroying their models.
clearCompleted: ->
_.each(Todos.done(), (todo) ->
todo.clear()
)
return false
# Lazily show the tooltip that tells you to press `enter` to save
# a new todo item, after one second.
showTooltip: (e) ->
tooltip = this.$(".ui-tooltip-top")
val = @input.val()
tooltip.fadeOut()
clearTimeout(@tooltipTimeout) if (@tooltipTimeout)
return if (val is '' || val is @input.attr("placeholder"))
show = () ->
tooltip.show().fadeIn()
@tooltipTimeout = _.delay(show, 1000)
# Create our global collection of **Todos**.
# Note: I've actually chosen not to export globally to `window`.
# Original documentation has been left intact.
Todos = new TodoList
App = new AppView()
| 90155 | # An example Backbone application contributed by
# [<NAME>](http://jgn.me/). This demo uses a simple
# [LocalStorage adapter](backbone-localstorage.html)
# to persist Backbone models within your browser.
#
# This [CoffeeScript](http://jashkenas.github.com/coffee-script/) variation has been provided by [<NAME>](http://jasongiedymin.com/).
#
# Note: two things you will notice with my CoffeeScript are that I prefer to
# use four space indents and prefer to use `()` for all functions.
# Load the application once the DOM is ready, using a `jQuery.ready` shortcut.
$ ->
### Todo Model ###
# Our basic **Todo** model has `content`, `order`, and `done` attributes.
class Todo extends Backbone.Model
# Default attributes for the todo.
defaults:
content: "empty todo..."
done: false
# Ensure that each todo created has `content`.
initialize: ->
if !@get("content")
@set({ "content": @defaults.content })
# Toggle the `done` state of this todo item.
toggle: ->
@save({ done: !@get("done") })
# Remove this Todo from *localStorage* and delete its view.
clear: ->
@destroy()
@view.remove()
### Todo Collection ###
# The collection of todos is backed by *localStorage* instead of a remote
# server.
class TodoList extends Backbone.Collection
# Reference to this collection's model.
model: Todo
# Save all of the todo items under the `"todos"` namespace.
localStorage: new Store("todos")
# Attribute getter/setter
getDone = (todo) ->
return todo.get("done")
# Filter down the list of all todo items that are finished.
done: ->
return @filter( getDone )
# Filter down the list to only todo items that are still not finished.
remaining: ->
return @without.apply( this, @done() )
# We keep the Todos in sequential order, despite being saved by unordered
# GUID in the database. This generates the next order number for new items.
nextOrder: ->
return 1 if !@length
return @last().get('order') + 1
# Todos are sorted by their original insertion order.
comparator: (todo) ->
return todo.get("order")
### Todo Item View ###
# The DOM element for a todo item...
class TodoView extends Backbone.View
#... is a list tag.
tagName: "li"
# Cache the template function for a single item.
template: _.template( $("#item-template").html() )
# The DOM events specific to an item.
events:
"click .check" : "toggleDone",
"dblclick div.todo-content" : "edit",
"click span.todo-destroy" : "clear",
"keypress .todo-input" : "updateOnEnter"
# The TodoView listens for changes to its model, re-rendering. Since there's
# a one-to-one correspondence between a **Todo** and a **TodoView** in this
# app, we set a direct reference on the model for convenience.
initialize: ->
@model.bind('change', this.render);
@model.view = this;
# Re-render the contents of the todo item.
render: =>
this.$(@el).html( @template(@model.toJSON()) )
@setContent()
return this
# To avoid XSS (not that it would be harmful in this particular app),
# we use `jQuery.text` to set the contents of the todo item.
setContent: ->
content = @model.get("content")
this.$(".todo-content").text(content)
@input = this.$(".todo-input");
@input.bind("blur", @close);
@input.val(content);
# Toggle the `"done"` state of the model.
toggleDone: ->
@model.toggle()
# Switch this view into `"editing"` mode, displaying the input field.
edit: =>
this.$(@el).addClass("editing")
@input.focus()
# Close the `"editing"` mode, saving changes to the todo.
close: =>
@model.save({ content: @input.val() })
$(@el).removeClass("editing")
# If you hit `enter`, we're through editing the item.
updateOnEnter: (e) =>
@close() if e.keyCode is 13
# Remove this view from the DOM.
remove: ->
$(@el).remove()
# Remove the item, destroy the model.
clear: () ->
@model.clear()
### The Application ###
# Our overall **AppView** is the top-level piece of UI.
class AppView extends Backbone.View
# Instead of generating a new element, bind to the existing skeleton of
# the App already present in the HTML.
el_tag = "#todoapp"
el: $(el_tag)
# Our template for the line of statistics at the bottom of the app.
statsTemplate: _.template( $("#stats-template").html() )
# Delegated events for creating new items, and clearing completed ones.
events:
"keypress #new-todo" : "createOnEnter",
"keyup #new-todo" : "showTooltip",
"click .todo-clear a" : "clearCompleted"
# At initialization we bind to the relevant events on the `Todos`
# collection, when items are added or changed. Kick things off by
# loading any preexisting todos that might be saved in *localStorage*.
initialize: =>
@input = this.$("#new-todo")
Todos.bind("add", @addOne)
Todos.bind("reset", @addAll)
Todos.bind("all", @render)
Todos.fetch()
# Re-rendering the App just means refreshing the statistics -- the rest
# of the app doesn't change.
render: =>
this.$('#todo-stats').html( @statsTemplate({
total: Todos.length,
done: Todos.done().length,
remaining: Todos.remaining().length
}))
# Add a single todo item to the list by creating a view for it, and
# appending its element to the `<ul>`.
addOne: (todo) =>
view = new TodoView( {model: todo} )
this.$("#todo-list").append( view.render().el )
# Add all items in the **Todos** collection at once.
addAll: =>
Todos.each(@addOne);
# Generate the attributes for a new Todo item.
newAttributes: ->
return {
content: @input.val(),
order: Todos.nextOrder(),
done: false
}
# If you hit return in the main input field, create new **Todo** model,
# persisting it to *localStorage*.
createOnEnter: (e) ->
return if (e.keyCode != 13)
Todos.create( @newAttributes() )
@input.val('')
# Clear all done todo items, destroying their models.
clearCompleted: ->
_.each(Todos.done(), (todo) ->
todo.clear()
)
return false
# Lazily show the tooltip that tells you to press `enter` to save
# a new todo item, after one second.
showTooltip: (e) ->
tooltip = this.$(".ui-tooltip-top")
val = @input.val()
tooltip.fadeOut()
clearTimeout(@tooltipTimeout) if (@tooltipTimeout)
return if (val is '' || val is @input.attr("placeholder"))
show = () ->
tooltip.show().fadeIn()
@tooltipTimeout = _.delay(show, 1000)
# Create our global collection of **Todos**.
# Note: I've actually chosen not to export globally to `window`.
# Original documentation has been left intact.
Todos = new TodoList
App = new AppView()
| true | # An example Backbone application contributed by
# [PI:NAME:<NAME>END_PI](http://jgn.me/). This demo uses a simple
# [LocalStorage adapter](backbone-localstorage.html)
# to persist Backbone models within your browser.
#
# This [CoffeeScript](http://jashkenas.github.com/coffee-script/) variation has been provided by [PI:NAME:<NAME>END_PI](http://jasongiedymin.com/).
#
# Note: two things you will notice with my CoffeeScript are that I prefer to
# use four space indents and prefer to use `()` for all functions.
# Load the application once the DOM is ready, using a `jQuery.ready` shortcut.
$ ->
### Todo Model ###
# Our basic **Todo** model has `content`, `order`, and `done` attributes.
class Todo extends Backbone.Model
# Default attributes for the todo.
defaults:
content: "empty todo..."
done: false
# Ensure that each todo created has `content`.
initialize: ->
if !@get("content")
@set({ "content": @defaults.content })
# Toggle the `done` state of this todo item.
toggle: ->
@save({ done: !@get("done") })
# Remove this Todo from *localStorage* and delete its view.
clear: ->
@destroy()
@view.remove()
### Todo Collection ###
# The collection of todos is backed by *localStorage* instead of a remote
# server.
class TodoList extends Backbone.Collection
# Reference to this collection's model.
model: Todo
# Save all of the todo items under the `"todos"` namespace.
localStorage: new Store("todos")
# Attribute getter/setter
getDone = (todo) ->
return todo.get("done")
# Filter down the list of all todo items that are finished.
done: ->
return @filter( getDone )
# Filter down the list to only todo items that are still not finished.
remaining: ->
return @without.apply( this, @done() )
# We keep the Todos in sequential order, despite being saved by unordered
# GUID in the database. This generates the next order number for new items.
nextOrder: ->
return 1 if !@length
return @last().get('order') + 1
# Todos are sorted by their original insertion order.
comparator: (todo) ->
return todo.get("order")
### Todo Item View ###
# The DOM element for a todo item...
class TodoView extends Backbone.View
#... is a list tag.
tagName: "li"
# Cache the template function for a single item.
template: _.template( $("#item-template").html() )
# The DOM events specific to an item.
events:
"click .check" : "toggleDone",
"dblclick div.todo-content" : "edit",
"click span.todo-destroy" : "clear",
"keypress .todo-input" : "updateOnEnter"
# The TodoView listens for changes to its model, re-rendering. Since there's
# a one-to-one correspondence between a **Todo** and a **TodoView** in this
# app, we set a direct reference on the model for convenience.
initialize: ->
@model.bind('change', this.render);
@model.view = this;
# Re-render the contents of the todo item.
render: =>
this.$(@el).html( @template(@model.toJSON()) )
@setContent()
return this
# To avoid XSS (not that it would be harmful in this particular app),
# we use `jQuery.text` to set the contents of the todo item.
setContent: ->
content = @model.get("content")
this.$(".todo-content").text(content)
@input = this.$(".todo-input");
@input.bind("blur", @close);
@input.val(content);
# Toggle the `"done"` state of the model.
toggleDone: ->
@model.toggle()
# Switch this view into `"editing"` mode, displaying the input field.
edit: =>
this.$(@el).addClass("editing")
@input.focus()
# Close the `"editing"` mode, saving changes to the todo.
close: =>
@model.save({ content: @input.val() })
$(@el).removeClass("editing")
# If you hit `enter`, we're through editing the item.
updateOnEnter: (e) =>
@close() if e.keyCode is 13
# Remove this view from the DOM.
remove: ->
$(@el).remove()
# Remove the item, destroy the model.
clear: () ->
@model.clear()
### The Application ###
# Our overall **AppView** is the top-level piece of UI.
class AppView extends Backbone.View
# Instead of generating a new element, bind to the existing skeleton of
# the App already present in the HTML.
el_tag = "#todoapp"
el: $(el_tag)
# Our template for the line of statistics at the bottom of the app.
statsTemplate: _.template( $("#stats-template").html() )
# Delegated events for creating new items, and clearing completed ones.
events:
"keypress #new-todo" : "createOnEnter",
"keyup #new-todo" : "showTooltip",
"click .todo-clear a" : "clearCompleted"
# At initialization we bind to the relevant events on the `Todos`
# collection, when items are added or changed. Kick things off by
# loading any preexisting todos that might be saved in *localStorage*.
initialize: =>
@input = this.$("#new-todo")
Todos.bind("add", @addOne)
Todos.bind("reset", @addAll)
Todos.bind("all", @render)
Todos.fetch()
# Re-rendering the App just means refreshing the statistics -- the rest
# of the app doesn't change.
render: =>
this.$('#todo-stats').html( @statsTemplate({
total: Todos.length,
done: Todos.done().length,
remaining: Todos.remaining().length
}))
# Add a single todo item to the list by creating a view for it, and
# appending its element to the `<ul>`.
addOne: (todo) =>
view = new TodoView( {model: todo} )
this.$("#todo-list").append( view.render().el )
# Add all items in the **Todos** collection at once.
addAll: =>
Todos.each(@addOne);
# Generate the attributes for a new Todo item.
newAttributes: ->
return {
content: @input.val(),
order: Todos.nextOrder(),
done: false
}
# If you hit return in the main input field, create new **Todo** model,
# persisting it to *localStorage*.
createOnEnter: (e) ->
return if (e.keyCode != 13)
Todos.create( @newAttributes() )
@input.val('')
# Clear all done todo items, destroying their models.
clearCompleted: ->
_.each(Todos.done(), (todo) ->
todo.clear()
)
return false
# Lazily show the tooltip that tells you to press `enter` to save
# a new todo item, after one second.
showTooltip: (e) ->
tooltip = this.$(".ui-tooltip-top")
val = @input.val()
tooltip.fadeOut()
clearTimeout(@tooltipTimeout) if (@tooltipTimeout)
return if (val is '' || val is @input.attr("placeholder"))
show = () ->
tooltip.show().fadeIn()
@tooltipTimeout = _.delay(show, 1000)
# Create our global collection of **Todos**.
# Note: I've actually chosen not to export globally to `window`.
# Original documentation has been left intact.
Todos = new TodoList
App = new AppView()
|
[
{
"context": " ->\n @before ->\n @concert =\n artist: \"Boredoms\",\n venueName: \"The Forum\",\n cityName: ",
"end": 97,
"score": 0.9941705465316772,
"start": 89,
"tag": "NAME",
"value": "Boredoms"
},
{
"context": " \"renders the artist name\", ->\n @assert... | browser/coffee_uglify/spec/concert_template_spec.coffee | bcowgill/javascript-testing-recipes | 7 | JS.Test.describe "templates.concert()", ->
@before ->
@concert =
artist: "Boredoms",
venueName: "The Forum",
cityName: "Kentish Town",
country: "UK"
@html = $(Handlebars.templates.concert(@concert))
@it "renders the artist name", ->
@assertEqual "Boredoms", @html.find(".artist").text()
@it "renders the venue details", ->
@assertEqual "The Forum, Kentish Town, UK", @html.find(".venue").text()
| 26343 | JS.Test.describe "templates.concert()", ->
@before ->
@concert =
artist: "<NAME>",
venueName: "The Forum",
cityName: "Kentish Town",
country: "UK"
@html = $(Handlebars.templates.concert(@concert))
@it "renders the artist name", ->
@assertEqual "B<NAME>", @html.find(".artist").text()
@it "renders the venue details", ->
@assertEqual "The Forum, Kentish Town, UK", @html.find(".venue").text()
| true | JS.Test.describe "templates.concert()", ->
@before ->
@concert =
artist: "PI:NAME:<NAME>END_PI",
venueName: "The Forum",
cityName: "Kentish Town",
country: "UK"
@html = $(Handlebars.templates.concert(@concert))
@it "renders the artist name", ->
@assertEqual "BPI:NAME:<NAME>END_PI", @html.find(".artist").text()
@it "renders the venue details", ->
@assertEqual "The Forum, Kentish Town, UK", @html.find(".venue").text()
|
[
{
"context": "# Copyright (C) 2017 Alexandre Pielucha\n#\n# Permission to use, copy, modify, and/or distr",
"end": 39,
"score": 0.9998682141304016,
"start": 21,
"tag": "NAME",
"value": "Alexandre Pielucha"
}
] | src/app/http/router.coffee | Riemannn/ast_lab | 0 | # Copyright (C) 2017 Alexandre Pielucha
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
# OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
server = require './server'
url = require 'url'
controller =
error: require './controllers/error'
home: require './controllers/home'
public: require './controllers/public'
module.exports =
logic: (req, res) ->
url = url.parse req.url
[ _, directory, type ] = url.pathname.split '/', 3
filename = url.pathname.substring(1 + (directory||'').length + 1 + (type||'').length + 1) || ''
directory = '/' if (!directory? || directory=='')
switch directory
when '/'
controller.home.index res
when 'contact'
controller.home.contact res
when 'public'
controller.public.static type, filename, res
else
controller.error.http_404 res
| 102397 | # Copyright (C) 2017 <NAME>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
# OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
server = require './server'
url = require 'url'
controller =
error: require './controllers/error'
home: require './controllers/home'
public: require './controllers/public'
module.exports =
logic: (req, res) ->
url = url.parse req.url
[ _, directory, type ] = url.pathname.split '/', 3
filename = url.pathname.substring(1 + (directory||'').length + 1 + (type||'').length + 1) || ''
directory = '/' if (!directory? || directory=='')
switch directory
when '/'
controller.home.index res
when 'contact'
controller.home.contact res
when 'public'
controller.public.static type, filename, res
else
controller.error.http_404 res
| true | # Copyright (C) 2017 PI:NAME:<NAME>END_PI
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
# OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
server = require './server'
url = require 'url'
controller =
error: require './controllers/error'
home: require './controllers/home'
public: require './controllers/public'
module.exports =
logic: (req, res) ->
url = url.parse req.url
[ _, directory, type ] = url.pathname.split '/', 3
filename = url.pathname.substring(1 + (directory||'').length + 1 + (type||'').length + 1) || ''
directory = '/' if (!directory? || directory=='')
switch directory
when '/'
controller.home.index res
when 'contact'
controller.home.contact res
when 'public'
controller.public.static type, filename, res
else
controller.error.http_404 res
|
[
{
"context": "'hero' }\n { eid: 'e1', type: 'character', name: 'Link' }\n { eid: 'e1', type: 'bbox', shape: [1,2,3,4] ",
"end": 391,
"score": 0.9564181566238403,
"start": 387,
"tag": "NAME",
"value": "Link"
},
{
"context": "enemy' }\n { eid: 'e2', type: 'character', name: 'Tektike... | spec/search/immutable_object_finder_spec.coffee | dcrosby42/metroid-clone | 5 | Finder = require '../../src/javascript/search/immutable_object_finder'
Immutable = require 'immutable'
ExpectHelpers = require '../helpers/expect_helpers'
expectIs = ExpectHelpers.expectIs
chai = require('chai')
expect = chai.expect
assert = chai.assert
imm = Immutable.fromJS
zeldaObjects = imm [
{ eid: 'e1', type: 'tag', value: 'hero' }
{ eid: 'e1', type: 'character', name: 'Link' }
{ eid: 'e1', type: 'bbox', shape: [1,2,3,4] }
{ eid: 'e1', type: 'inventory', stuff: 'items' }
{ eid: 'e1', type: 'tag', value: 'enemy' }
{ eid: 'e2', type: 'character', name: 'Tektike' }
{ eid: 'e2', type: 'bbox', shape: [3,4,5,6] }
{ eid: 'e2', type: 'digger', status: 'burrowing' }
{ eid: 'e1', type: 'hat', color: 'green' }
{ eid: 'e99', extraneous: 'hat', type: 'other-thing', sha: 'zam' }
]
searchZelda = (filters) -> Finder.search zeldaObjects, imm(filters)
typeFilter = (t) -> imm { match: { type: t } }
describe 'ImmutableObjectFinder.search', ->
it 'can match on a single criteria', ->
charFilter = imm
match: { type: 'character' }
as: 'char'
expectIs searchZelda([charFilter]), imm([
{ char: zeldaObjects.get(1) }
{ char: zeldaObjects.get(5) }
])
it 'can match on multiple criteria', ->
linkFilter = imm
match: { type: 'character', name: 'Link' }
as: 'linkChar'
expectIs searchZelda([linkFilter]), imm([
{ linkChar: zeldaObjects.get(1) }
])
describe 'when filters omit "as"', ->
it 'labels results based on first matcher value', ->
filter = imm
match:
name: 'Tektike'
expectIs searchZelda([filter]), imm([
{ "Tektike": zeldaObjects.get(5) }
])
filter2 = imm
match:
type: 'digger'
expectIs searchZelda([filter2]), imm [
{ digger: zeldaObjects.get(7) }
]
describe 'with multiple filters', ->
it 'permutes the combinations of objects', ->
cf = typeFilter 'character'
bf = typeFilter 'bbox'
expectIs searchZelda([cf,bf]), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(6) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(6) }
]
it 'does not mistakenly include other objects based on values alone', ->
cf = typeFilter 'character'
bf = typeFilter 'hat'
expectIs searchZelda([cf,bf]), imm [
{ character: zeldaObjects.get(1), hat: zeldaObjects.get(8) }
{ character: zeldaObjects.get(5), hat: zeldaObjects.get(8) }
]
describe 'with joins', ->
charFilter = imm
match: { type: 'character' }
boxFilter = imm
match: { type: 'bbox' }
join: 'character.eid'
heroTagFilter = imm
match:
type: 'tag'
value: 'hero'
join: 'character.eid'
it 'constrains results by matching joined attributes', ->
expectIs searchZelda([charFilter, boxFilter]), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(6) }
]
it 'joins and filters on multiple components', ->
filters = [
charFilter
boxFilter
heroTagFilter
]
expectIs searchZelda(filters), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2), tag: zeldaObjects.get(0) }
]
it 'does nothing with superfluous joins', ->
f =
match: { type: 'character' }
join: 'super.fluous'
expectIs searchZelda([f]), imm([
{ character: zeldaObjects.get(1) }
{ character: zeldaObjects.get(5) }
])
describe 'with pairs of joins', ->
c1 = imm { e: 1, id: 'c1', t: 'bullet' }
c2 = imm { e: 1, id: 'c2', t: 'box' }
c3 = imm { e: 1, id: 'c3', t: 'position' }
c4 = imm { e: 2, id: 'c4', t: 'enemy' }
c5 = imm { e: 2, id: 'c5', t: 'box' }
c6 = imm { e: 3, id: 'c6', t: 'enemy' }
c7 = imm { e: 3, id: 'c7', t: 'box' }
c8 = imm { e: 4, id: 'c8', t: 'bullet' }
c9 = imm { e: 4, id: 'c9', t: 'box' }
c10= imm { e: 4, id: 'c10', t: 'position' }
objects = imm [ c1,c2,c3,c4,c5,c6,c7,c8,c9,c10 ]
it 'confines joins to their respective sub-groups of objects', ->
# bullet+box+position * enemy+x
f0_a = imm
match: { t: 'bullet' }
as: 'bullet0'
f0_b = imm
match: { t: 'box' }
join: 'bullet0.e'
as: 'box0'
f0_c = imm
match: { t: 'position' }
join: 'box0.e'
as: 'position0'
f1_a = imm
match: { t: 'enemy' }
as: 'enemy0'
f1_b = imm
match: { t: 'box' }
join: 'enemy0.e'
as: 'box1'
results = Finder.search objects, [f0_a,f0_b,f0_c, f1_a,f1_b]
expectIs results, imm [
{ bullet0: c1, box0: c2, position0: c3, enemy0: c4, box1: c5 }
{ bullet0: c1, box0: c2, position0: c3, enemy0: c6, box1: c7 }
{ bullet0: c8, box0: c9, position0: c10, enemy0: c4, box1: c5 }
{ bullet0: c8, box0: c9, position0: c10, enemy0: c6, box1: c7 }
]
| 10223 | Finder = require '../../src/javascript/search/immutable_object_finder'
Immutable = require 'immutable'
ExpectHelpers = require '../helpers/expect_helpers'
expectIs = ExpectHelpers.expectIs
chai = require('chai')
expect = chai.expect
assert = chai.assert
imm = Immutable.fromJS
zeldaObjects = imm [
{ eid: 'e1', type: 'tag', value: 'hero' }
{ eid: 'e1', type: 'character', name: '<NAME>' }
{ eid: 'e1', type: 'bbox', shape: [1,2,3,4] }
{ eid: 'e1', type: 'inventory', stuff: 'items' }
{ eid: 'e1', type: 'tag', value: 'enemy' }
{ eid: 'e2', type: 'character', name: '<NAME>' }
{ eid: 'e2', type: 'bbox', shape: [3,4,5,6] }
{ eid: 'e2', type: 'digger', status: 'burrowing' }
{ eid: 'e1', type: 'hat', color: 'green' }
{ eid: 'e99', extraneous: 'hat', type: 'other-thing', sha: 'zam' }
]
searchZelda = (filters) -> Finder.search zeldaObjects, imm(filters)
typeFilter = (t) -> imm { match: { type: t } }
describe 'ImmutableObjectFinder.search', ->
it 'can match on a single criteria', ->
charFilter = imm
match: { type: 'character' }
as: 'char'
expectIs searchZelda([charFilter]), imm([
{ char: zeldaObjects.get(1) }
{ char: zeldaObjects.get(5) }
])
it 'can match on multiple criteria', ->
linkFilter = imm
match: { type: 'character', name: '<NAME>' }
as: 'linkChar'
expectIs searchZelda([linkFilter]), imm([
{ linkChar: zeldaObjects.get(1) }
])
describe 'when filters omit "as"', ->
it 'labels results based on first matcher value', ->
filter = imm
match:
name: '<NAME>'
expectIs searchZelda([filter]), imm([
{ "<NAME>": zeldaObjects.get(5) }
])
filter2 = imm
match:
type: 'digger'
expectIs searchZelda([filter2]), imm [
{ digger: zeldaObjects.get(7) }
]
describe 'with multiple filters', ->
it 'permutes the combinations of objects', ->
cf = typeFilter 'character'
bf = typeFilter 'bbox'
expectIs searchZelda([cf,bf]), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(6) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(6) }
]
it 'does not mistakenly include other objects based on values alone', ->
cf = typeFilter 'character'
bf = typeFilter 'hat'
expectIs searchZelda([cf,bf]), imm [
{ character: zeldaObjects.get(1), hat: zeldaObjects.get(8) }
{ character: zeldaObjects.get(5), hat: zeldaObjects.get(8) }
]
describe 'with joins', ->
charFilter = imm
match: { type: 'character' }
boxFilter = imm
match: { type: 'bbox' }
join: 'character.eid'
heroTagFilter = imm
match:
type: 'tag'
value: 'hero'
join: 'character.eid'
it 'constrains results by matching joined attributes', ->
expectIs searchZelda([charFilter, boxFilter]), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(6) }
]
it 'joins and filters on multiple components', ->
filters = [
charFilter
boxFilter
heroTagFilter
]
expectIs searchZelda(filters), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2), tag: zeldaObjects.get(0) }
]
it 'does nothing with superfluous joins', ->
f =
match: { type: 'character' }
join: 'super.fluous'
expectIs searchZelda([f]), imm([
{ character: zeldaObjects.get(1) }
{ character: zeldaObjects.get(5) }
])
describe 'with pairs of joins', ->
c1 = imm { e: 1, id: 'c1', t: 'bullet' }
c2 = imm { e: 1, id: 'c2', t: 'box' }
c3 = imm { e: 1, id: 'c3', t: 'position' }
c4 = imm { e: 2, id: 'c4', t: 'enemy' }
c5 = imm { e: 2, id: 'c5', t: 'box' }
c6 = imm { e: 3, id: 'c6', t: 'enemy' }
c7 = imm { e: 3, id: 'c7', t: 'box' }
c8 = imm { e: 4, id: 'c8', t: 'bullet' }
c9 = imm { e: 4, id: 'c9', t: 'box' }
c10= imm { e: 4, id: 'c10', t: 'position' }
objects = imm [ c1,c2,c3,c4,c5,c6,c7,c8,c9,c10 ]
it 'confines joins to their respective sub-groups of objects', ->
# bullet+box+position * enemy+x
f0_a = imm
match: { t: 'bullet' }
as: 'bullet0'
f0_b = imm
match: { t: 'box' }
join: 'bullet0.e'
as: 'box0'
f0_c = imm
match: { t: 'position' }
join: 'box0.e'
as: 'position0'
f1_a = imm
match: { t: 'enemy' }
as: 'enemy0'
f1_b = imm
match: { t: 'box' }
join: 'enemy0.e'
as: 'box1'
results = Finder.search objects, [f0_a,f0_b,f0_c, f1_a,f1_b]
expectIs results, imm [
{ bullet0: c1, box0: c2, position0: c3, enemy0: c4, box1: c5 }
{ bullet0: c1, box0: c2, position0: c3, enemy0: c6, box1: c7 }
{ bullet0: c8, box0: c9, position0: c10, enemy0: c4, box1: c5 }
{ bullet0: c8, box0: c9, position0: c10, enemy0: c6, box1: c7 }
]
| true | Finder = require '../../src/javascript/search/immutable_object_finder'
Immutable = require 'immutable'
ExpectHelpers = require '../helpers/expect_helpers'
expectIs = ExpectHelpers.expectIs
chai = require('chai')
expect = chai.expect
assert = chai.assert
imm = Immutable.fromJS
zeldaObjects = imm [
{ eid: 'e1', type: 'tag', value: 'hero' }
{ eid: 'e1', type: 'character', name: 'PI:NAME:<NAME>END_PI' }
{ eid: 'e1', type: 'bbox', shape: [1,2,3,4] }
{ eid: 'e1', type: 'inventory', stuff: 'items' }
{ eid: 'e1', type: 'tag', value: 'enemy' }
{ eid: 'e2', type: 'character', name: 'PI:NAME:<NAME>END_PI' }
{ eid: 'e2', type: 'bbox', shape: [3,4,5,6] }
{ eid: 'e2', type: 'digger', status: 'burrowing' }
{ eid: 'e1', type: 'hat', color: 'green' }
{ eid: 'e99', extraneous: 'hat', type: 'other-thing', sha: 'zam' }
]
searchZelda = (filters) -> Finder.search zeldaObjects, imm(filters)
typeFilter = (t) -> imm { match: { type: t } }
describe 'ImmutableObjectFinder.search', ->
it 'can match on a single criteria', ->
charFilter = imm
match: { type: 'character' }
as: 'char'
expectIs searchZelda([charFilter]), imm([
{ char: zeldaObjects.get(1) }
{ char: zeldaObjects.get(5) }
])
it 'can match on multiple criteria', ->
linkFilter = imm
match: { type: 'character', name: 'PI:NAME:<NAME>END_PI' }
as: 'linkChar'
expectIs searchZelda([linkFilter]), imm([
{ linkChar: zeldaObjects.get(1) }
])
describe 'when filters omit "as"', ->
it 'labels results based on first matcher value', ->
filter = imm
match:
name: 'PI:NAME:<NAME>END_PI'
expectIs searchZelda([filter]), imm([
{ "PI:NAME:<NAME>END_PI": zeldaObjects.get(5) }
])
filter2 = imm
match:
type: 'digger'
expectIs searchZelda([filter2]), imm [
{ digger: zeldaObjects.get(7) }
]
describe 'with multiple filters', ->
it 'permutes the combinations of objects', ->
cf = typeFilter 'character'
bf = typeFilter 'bbox'
expectIs searchZelda([cf,bf]), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(6) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(6) }
]
it 'does not mistakenly include other objects based on values alone', ->
cf = typeFilter 'character'
bf = typeFilter 'hat'
expectIs searchZelda([cf,bf]), imm [
{ character: zeldaObjects.get(1), hat: zeldaObjects.get(8) }
{ character: zeldaObjects.get(5), hat: zeldaObjects.get(8) }
]
describe 'with joins', ->
charFilter = imm
match: { type: 'character' }
boxFilter = imm
match: { type: 'bbox' }
join: 'character.eid'
heroTagFilter = imm
match:
type: 'tag'
value: 'hero'
join: 'character.eid'
it 'constrains results by matching joined attributes', ->
expectIs searchZelda([charFilter, boxFilter]), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2) }
{ character: zeldaObjects.get(5), bbox: zeldaObjects.get(6) }
]
it 'joins and filters on multiple components', ->
filters = [
charFilter
boxFilter
heroTagFilter
]
expectIs searchZelda(filters), imm [
{ character: zeldaObjects.get(1), bbox: zeldaObjects.get(2), tag: zeldaObjects.get(0) }
]
it 'does nothing with superfluous joins', ->
f =
match: { type: 'character' }
join: 'super.fluous'
expectIs searchZelda([f]), imm([
{ character: zeldaObjects.get(1) }
{ character: zeldaObjects.get(5) }
])
describe 'with pairs of joins', ->
c1 = imm { e: 1, id: 'c1', t: 'bullet' }
c2 = imm { e: 1, id: 'c2', t: 'box' }
c3 = imm { e: 1, id: 'c3', t: 'position' }
c4 = imm { e: 2, id: 'c4', t: 'enemy' }
c5 = imm { e: 2, id: 'c5', t: 'box' }
c6 = imm { e: 3, id: 'c6', t: 'enemy' }
c7 = imm { e: 3, id: 'c7', t: 'box' }
c8 = imm { e: 4, id: 'c8', t: 'bullet' }
c9 = imm { e: 4, id: 'c9', t: 'box' }
c10= imm { e: 4, id: 'c10', t: 'position' }
objects = imm [ c1,c2,c3,c4,c5,c6,c7,c8,c9,c10 ]
it 'confines joins to their respective sub-groups of objects', ->
# bullet+box+position * enemy+x
f0_a = imm
match: { t: 'bullet' }
as: 'bullet0'
f0_b = imm
match: { t: 'box' }
join: 'bullet0.e'
as: 'box0'
f0_c = imm
match: { t: 'position' }
join: 'box0.e'
as: 'position0'
f1_a = imm
match: { t: 'enemy' }
as: 'enemy0'
f1_b = imm
match: { t: 'box' }
join: 'enemy0.e'
as: 'box1'
results = Finder.search objects, [f0_a,f0_b,f0_c, f1_a,f1_b]
expectIs results, imm [
{ bullet0: c1, box0: c2, position0: c3, enemy0: c4, box1: c5 }
{ bullet0: c1, box0: c2, position0: c3, enemy0: c6, box1: c7 }
{ bullet0: c8, box0: c9, position0: c10, enemy0: c4, box1: c5 }
{ bullet0: c8, box0: c9, position0: c10, enemy0: c6, box1: c7 }
]
|
[
{
"context": "lass\n FIXTURES: [\n {\n id: 1\n name: \"Agastache rugosa 'After Eight'\"\n priceInCents: 240\n posi",
"end": 1241,
"score": 0.9482429027557373,
"start": 1225,
"tag": "NAME",
"value": "Agastache rugosa"
},
{
"context": "opagated: no\n }\n {... | app/models/plant.coffee | koenig/moosi | 0 | `import DS from 'ember-data'`
`import divideWithHundret from '../utils/divide-with-hundret'`
[attr, hasMany, belongsTo] = [DS.attr, DS.hasMany, DS.belongsTo]
Plant = DS.Model.extend
onInit: (-> @get('searchName')).observes('name').on('init')
name: attr 'string', defaultValue: ''
propagated: attr 'boolean', defaultValue: false
labelled: attr 'boolean', defaultValue: false
priceInCents: attr 'number', defaultValue: 0
price: divideWithHundret 'priceInCents'
searchName: Em.computed 'name', -> @get('name').toLowerCase()
# search
hideOnPlantsList: no
positions: hasMany 'position'
orderItems: hasMany 'orderItem'
youngPlantPositions: Em.computed.filterBy 'positions', 'isYoung'
youngPlantsquantityValues: Em.computed.mapBy 'youngPlantPositions', 'quantity'
youngPlantQuantity: Em.computed.sum 'youngPlantsquantityValues'
plantPositions: Em.computed.filterBy 'positions', 'isPlant'
quantityValues: Em.computed.mapBy 'plantPositions', 'quantity'
quantity: Em.computed.sum 'quantityValues'
createNewPosition: (quarter) ->
position = @store.createRecord 'position',
quarter: quarter
plant: @
position
Plant.reopenClass
FIXTURES: [
{
id: 1
name: "Agastache rugosa 'After Eight'"
priceInCents: 240
positions: [1, 2]
orderItems: [1, 3]
labelled: yes
propagated: no
}
{
id: 2
name: "Echinacea purpurea 'Baby Swan White'"
priceInCents: 320
positions: [3, 4]
orderItems: [2]
labelled: no
propagated: yes
}
]
`export default Plant`
| 149933 | `import DS from 'ember-data'`
`import divideWithHundret from '../utils/divide-with-hundret'`
[attr, hasMany, belongsTo] = [DS.attr, DS.hasMany, DS.belongsTo]
Plant = DS.Model.extend
onInit: (-> @get('searchName')).observes('name').on('init')
name: attr 'string', defaultValue: ''
propagated: attr 'boolean', defaultValue: false
labelled: attr 'boolean', defaultValue: false
priceInCents: attr 'number', defaultValue: 0
price: divideWithHundret 'priceInCents'
searchName: Em.computed 'name', -> @get('name').toLowerCase()
# search
hideOnPlantsList: no
positions: hasMany 'position'
orderItems: hasMany 'orderItem'
youngPlantPositions: Em.computed.filterBy 'positions', 'isYoung'
youngPlantsquantityValues: Em.computed.mapBy 'youngPlantPositions', 'quantity'
youngPlantQuantity: Em.computed.sum 'youngPlantsquantityValues'
plantPositions: Em.computed.filterBy 'positions', 'isPlant'
quantityValues: Em.computed.mapBy 'plantPositions', 'quantity'
quantity: Em.computed.sum 'quantityValues'
createNewPosition: (quarter) ->
position = @store.createRecord 'position',
quarter: quarter
plant: @
position
Plant.reopenClass
FIXTURES: [
{
id: 1
name: "<NAME> 'After Eight'"
priceInCents: 240
positions: [1, 2]
orderItems: [1, 3]
labelled: yes
propagated: no
}
{
id: 2
name: "<NAME>a 'Baby Swan White'"
priceInCents: 320
positions: [3, 4]
orderItems: [2]
labelled: no
propagated: yes
}
]
`export default Plant`
| true | `import DS from 'ember-data'`
`import divideWithHundret from '../utils/divide-with-hundret'`
[attr, hasMany, belongsTo] = [DS.attr, DS.hasMany, DS.belongsTo]
Plant = DS.Model.extend
onInit: (-> @get('searchName')).observes('name').on('init')
name: attr 'string', defaultValue: ''
propagated: attr 'boolean', defaultValue: false
labelled: attr 'boolean', defaultValue: false
priceInCents: attr 'number', defaultValue: 0
price: divideWithHundret 'priceInCents'
searchName: Em.computed 'name', -> @get('name').toLowerCase()
# search
hideOnPlantsList: no
positions: hasMany 'position'
orderItems: hasMany 'orderItem'
youngPlantPositions: Em.computed.filterBy 'positions', 'isYoung'
youngPlantsquantityValues: Em.computed.mapBy 'youngPlantPositions', 'quantity'
youngPlantQuantity: Em.computed.sum 'youngPlantsquantityValues'
plantPositions: Em.computed.filterBy 'positions', 'isPlant'
quantityValues: Em.computed.mapBy 'plantPositions', 'quantity'
quantity: Em.computed.sum 'quantityValues'
createNewPosition: (quarter) ->
position = @store.createRecord 'position',
quarter: quarter
plant: @
position
Plant.reopenClass
FIXTURES: [
{
id: 1
name: "PI:NAME:<NAME>END_PI 'After Eight'"
priceInCents: 240
positions: [1, 2]
orderItems: [1, 3]
labelled: yes
propagated: no
}
{
id: 2
name: "PI:NAME:<NAME>END_PIa 'Baby Swan White'"
priceInCents: 320
positions: [3, 4]
orderItems: [2]
labelled: no
propagated: yes
}
]
`export default Plant`
|
[
{
"context": "sync'\n\n\n\n\n\n# messages in a group\n# key prefix is `m:{group}`\nmodule.exports = (orm) ->\n\treturn {\n\n\t\th",
"end": 71,
"score": 0.6413065791130066,
"start": 70,
"tag": "KEY",
"value": "m"
}
] | src/messages.coffee | derhuerst/shout-orm | 0 | async = require 'async'
# messages in a group
# key prefix is `m:{group}`
module.exports = (orm) ->
return {
has: (group, id) ->
return orm._exists "m:#{group}:#{id}"
.then (exists) -> !!exists
get: (group, id) ->
return orm._get "m:#{group}:#{id}"
.then (data) ->
if not data then throw boom.notFound "Message `#{id}` doesn't exist in group `#{group}`."
data = JSON.parse data
return {
date: data.d
body: data.b
}
add: (groupId, id, body, date = Date.now()) ->
self = this
return orm.groups.get groupId
.then (group) ->
if group.locked then throw new Error "The group `#{group}` is locked."
orm._set "m:#{groupId}:#{id}", JSON.stringify
d: 0 + date
b: body
.then () -> orm.redis.publish 'm', "#{groupId}:#{id}"
all: (group) ->
self = this
# todo: find a way to stream keys for performance
return new Promise (resolve, reject) ->
results = []
orm._keys "m:#{group}:*"
.then (ids) ->
async.eachLimit ids, 50, ((id, cb) ->
# todo: use [redis transactions](http://redis.io/topics/transactions) or at least [redis pipelining](http://redis.io/topics/pipelining)
self.get group, id.split(':')[2]
.then (message) ->
results.push message
cb()
), () ->
resolve results
rm: (group, id) -> orm._del "m:#{group}:#{id}"
}
| 225694 | async = require 'async'
# messages in a group
# key prefix is `<KEY>:{group}`
module.exports = (orm) ->
return {
has: (group, id) ->
return orm._exists "m:#{group}:#{id}"
.then (exists) -> !!exists
get: (group, id) ->
return orm._get "m:#{group}:#{id}"
.then (data) ->
if not data then throw boom.notFound "Message `#{id}` doesn't exist in group `#{group}`."
data = JSON.parse data
return {
date: data.d
body: data.b
}
add: (groupId, id, body, date = Date.now()) ->
self = this
return orm.groups.get groupId
.then (group) ->
if group.locked then throw new Error "The group `#{group}` is locked."
orm._set "m:#{groupId}:#{id}", JSON.stringify
d: 0 + date
b: body
.then () -> orm.redis.publish 'm', "#{groupId}:#{id}"
all: (group) ->
self = this
# todo: find a way to stream keys for performance
return new Promise (resolve, reject) ->
results = []
orm._keys "m:#{group}:*"
.then (ids) ->
async.eachLimit ids, 50, ((id, cb) ->
# todo: use [redis transactions](http://redis.io/topics/transactions) or at least [redis pipelining](http://redis.io/topics/pipelining)
self.get group, id.split(':')[2]
.then (message) ->
results.push message
cb()
), () ->
resolve results
rm: (group, id) -> orm._del "m:#{group}:#{id}"
}
| true | async = require 'async'
# messages in a group
# key prefix is `PI:KEY:<KEY>END_PI:{group}`
module.exports = (orm) ->
return {
has: (group, id) ->
return orm._exists "m:#{group}:#{id}"
.then (exists) -> !!exists
get: (group, id) ->
return orm._get "m:#{group}:#{id}"
.then (data) ->
if not data then throw boom.notFound "Message `#{id}` doesn't exist in group `#{group}`."
data = JSON.parse data
return {
date: data.d
body: data.b
}
add: (groupId, id, body, date = Date.now()) ->
self = this
return orm.groups.get groupId
.then (group) ->
if group.locked then throw new Error "The group `#{group}` is locked."
orm._set "m:#{groupId}:#{id}", JSON.stringify
d: 0 + date
b: body
.then () -> orm.redis.publish 'm', "#{groupId}:#{id}"
all: (group) ->
self = this
# todo: find a way to stream keys for performance
return new Promise (resolve, reject) ->
results = []
orm._keys "m:#{group}:*"
.then (ids) ->
async.eachLimit ids, 50, ((id, cb) ->
# todo: use [redis transactions](http://redis.io/topics/transactions) or at least [redis pipelining](http://redis.io/topics/pipelining)
self.get group, id.split(':')[2]
.then (message) ->
results.push message
cb()
), () ->
resolve results
rm: (group, id) -> orm._del "m:#{group}:#{id}"
}
|
[
{
"context": " email: user[\"email\"]\n password: user[\"password\"]\n\n User.create data, (err, user) ->\n if ",
"end": 586,
"score": 0.9914951324462891,
"start": 578,
"tag": "PASSWORD",
"value": "password"
}
] | routes/user.coffee | mattstone/StarterApp-Server | 0 | path = require 'path'
fs = require 'fs'
module.exports = (app) ->
exp = app.exp
User = app.models.User
isEmpty = app.helpers.isEmpty
isNotEmpty = app.helpers.isNotEmpty
exp.post '/signup', (req, res) ->
# validate form
user = req.body.user
if !user? then return res.json { error: 'invalid request' }
if !user["email"]? then return res.json { error: 'email is required' }
if !user["password"]? then return res.json { error: 'password is required' }
data =
email: user["email"]
password: user["password"]
User.create data, (err, user) ->
if err
res.json { error: err, user: null }
else
confirmationCode = user.confirmationCode()
app.r.setConfirmationCode user, confirmationCode, app.config.email.confirmationCodeTTL, (err, data) ->
app.models.QEmail.sendConfirmationEmail user, app.config.email.marketing, confirmationCode
res.json { error: err, user: user.toJSON() }
exp.get '/confirm/:confirmationCode', (req, res) ->
if !req.params or !req.params.confirmationCode
return res.json { error: 'Invalid confirmation code', user: null }
else
app.r.getConfirmationCode req.params.confirmationCode, (err, data) ->
if err
return res.json { error: 'There was a problem. Please try again later', user: null }
else if !data || !data.user
return res.json { error: 'This confirmation code is invalid. Please request another', user: null }
else
query =
_id: data.user
app.models.User.findOne query, (err, user) ->
if err
return res.json { error: 'There was a problem. Please try again later', user: null }
else if !user
return res.json { error: 'This confirmation code is invalid. Please request another', user: null }
else
user.confirmUser()
app.models.QEmail.sendWelcomeEmail(user, app.config.email.marketing);
return res.json { error: 'null', user: user.toJSON() }
exp.get '/resend-confirmation-code/:email', (req, res) ->
if !req.params or !req.params.email
return res.json { error: 'Invalid resend confirmation code request: 1', user: null }
else
# test email exists in redis - much faster and does not hit DB for false requests
app.r.getUserByEmail req.params.email, (err, data) ->
if err
return res.json { error: err, success: null }
else if !data
return res.json { error: 'Invalid resent confirmation code request: 2', success: null }
else
query = { _id: data.id }
app.models.User.findOne query, (err, user) ->
confirmationCode = user.confirmationCode()
app.r.setConfirmationCode user, confirmationCode, app.config.email.confirmationCodeTTL, (err, data) ->
app.models.QEmail.sendConfirmationEmail user, app.config.email.marketing, confirmationCode
res.json { error: err, success: 'OK' }
exp.get '/reset-password/:email', (req, res) ->
if !req.params or !req.params.email
return res.json { error: 'Invalid reset password code request: 1', user: null }
else
# test email exists in redis - much faster and does not hit DB for false requests
app.r.getUserByEmail req.params.email, (err, data) ->
if err
return res.json { error: err, success: null }
else if !data
return res.json { error: 'Invalid reset password code request: 2', success: null }
else
query = { _id: data.id }
app.models.User.findOne query, (err, user) ->
if err or !user
return res.json { error: 'Invalid resend confirmation code request: 3', success: null }
else
app.models.QEmail.sendResetPasswordEmail user, app.config.email.marketing
res.json { error: err, success: 'OK' }
| 82919 | path = require 'path'
fs = require 'fs'
module.exports = (app) ->
exp = app.exp
User = app.models.User
isEmpty = app.helpers.isEmpty
isNotEmpty = app.helpers.isNotEmpty
exp.post '/signup', (req, res) ->
# validate form
user = req.body.user
if !user? then return res.json { error: 'invalid request' }
if !user["email"]? then return res.json { error: 'email is required' }
if !user["password"]? then return res.json { error: 'password is required' }
data =
email: user["email"]
password: user["<PASSWORD>"]
User.create data, (err, user) ->
if err
res.json { error: err, user: null }
else
confirmationCode = user.confirmationCode()
app.r.setConfirmationCode user, confirmationCode, app.config.email.confirmationCodeTTL, (err, data) ->
app.models.QEmail.sendConfirmationEmail user, app.config.email.marketing, confirmationCode
res.json { error: err, user: user.toJSON() }
exp.get '/confirm/:confirmationCode', (req, res) ->
if !req.params or !req.params.confirmationCode
return res.json { error: 'Invalid confirmation code', user: null }
else
app.r.getConfirmationCode req.params.confirmationCode, (err, data) ->
if err
return res.json { error: 'There was a problem. Please try again later', user: null }
else if !data || !data.user
return res.json { error: 'This confirmation code is invalid. Please request another', user: null }
else
query =
_id: data.user
app.models.User.findOne query, (err, user) ->
if err
return res.json { error: 'There was a problem. Please try again later', user: null }
else if !user
return res.json { error: 'This confirmation code is invalid. Please request another', user: null }
else
user.confirmUser()
app.models.QEmail.sendWelcomeEmail(user, app.config.email.marketing);
return res.json { error: 'null', user: user.toJSON() }
exp.get '/resend-confirmation-code/:email', (req, res) ->
if !req.params or !req.params.email
return res.json { error: 'Invalid resend confirmation code request: 1', user: null }
else
# test email exists in redis - much faster and does not hit DB for false requests
app.r.getUserByEmail req.params.email, (err, data) ->
if err
return res.json { error: err, success: null }
else if !data
return res.json { error: 'Invalid resent confirmation code request: 2', success: null }
else
query = { _id: data.id }
app.models.User.findOne query, (err, user) ->
confirmationCode = user.confirmationCode()
app.r.setConfirmationCode user, confirmationCode, app.config.email.confirmationCodeTTL, (err, data) ->
app.models.QEmail.sendConfirmationEmail user, app.config.email.marketing, confirmationCode
res.json { error: err, success: 'OK' }
exp.get '/reset-password/:email', (req, res) ->
if !req.params or !req.params.email
return res.json { error: 'Invalid reset password code request: 1', user: null }
else
# test email exists in redis - much faster and does not hit DB for false requests
app.r.getUserByEmail req.params.email, (err, data) ->
if err
return res.json { error: err, success: null }
else if !data
return res.json { error: 'Invalid reset password code request: 2', success: null }
else
query = { _id: data.id }
app.models.User.findOne query, (err, user) ->
if err or !user
return res.json { error: 'Invalid resend confirmation code request: 3', success: null }
else
app.models.QEmail.sendResetPasswordEmail user, app.config.email.marketing
res.json { error: err, success: 'OK' }
| true | path = require 'path'
fs = require 'fs'
module.exports = (app) ->
exp = app.exp
User = app.models.User
isEmpty = app.helpers.isEmpty
isNotEmpty = app.helpers.isNotEmpty
exp.post '/signup', (req, res) ->
# validate form
user = req.body.user
if !user? then return res.json { error: 'invalid request' }
if !user["email"]? then return res.json { error: 'email is required' }
if !user["password"]? then return res.json { error: 'password is required' }
data =
email: user["email"]
password: user["PI:PASSWORD:<PASSWORD>END_PI"]
User.create data, (err, user) ->
if err
res.json { error: err, user: null }
else
confirmationCode = user.confirmationCode()
app.r.setConfirmationCode user, confirmationCode, app.config.email.confirmationCodeTTL, (err, data) ->
app.models.QEmail.sendConfirmationEmail user, app.config.email.marketing, confirmationCode
res.json { error: err, user: user.toJSON() }
exp.get '/confirm/:confirmationCode', (req, res) ->
if !req.params or !req.params.confirmationCode
return res.json { error: 'Invalid confirmation code', user: null }
else
app.r.getConfirmationCode req.params.confirmationCode, (err, data) ->
if err
return res.json { error: 'There was a problem. Please try again later', user: null }
else if !data || !data.user
return res.json { error: 'This confirmation code is invalid. Please request another', user: null }
else
query =
_id: data.user
app.models.User.findOne query, (err, user) ->
if err
return res.json { error: 'There was a problem. Please try again later', user: null }
else if !user
return res.json { error: 'This confirmation code is invalid. Please request another', user: null }
else
user.confirmUser()
app.models.QEmail.sendWelcomeEmail(user, app.config.email.marketing);
return res.json { error: 'null', user: user.toJSON() }
exp.get '/resend-confirmation-code/:email', (req, res) ->
if !req.params or !req.params.email
return res.json { error: 'Invalid resend confirmation code request: 1', user: null }
else
# test email exists in redis - much faster and does not hit DB for false requests
app.r.getUserByEmail req.params.email, (err, data) ->
if err
return res.json { error: err, success: null }
else if !data
return res.json { error: 'Invalid resent confirmation code request: 2', success: null }
else
query = { _id: data.id }
app.models.User.findOne query, (err, user) ->
confirmationCode = user.confirmationCode()
app.r.setConfirmationCode user, confirmationCode, app.config.email.confirmationCodeTTL, (err, data) ->
app.models.QEmail.sendConfirmationEmail user, app.config.email.marketing, confirmationCode
res.json { error: err, success: 'OK' }
exp.get '/reset-password/:email', (req, res) ->
if !req.params or !req.params.email
return res.json { error: 'Invalid reset password code request: 1', user: null }
else
# test email exists in redis - much faster and does not hit DB for false requests
app.r.getUserByEmail req.params.email, (err, data) ->
if err
return res.json { error: err, success: null }
else if !data
return res.json { error: 'Invalid reset password code request: 2', success: null }
else
query = { _id: data.id }
app.models.User.findOne query, (err, user) ->
if err or !user
return res.json { error: 'Invalid resend confirmation code request: 3', success: null }
else
app.models.QEmail.sendResetPasswordEmail user, app.config.email.marketing
res.json { error: err, success: 'OK' }
|
[
{
"context": "s.titleUIText = {\n \"font\": { \n \"name\": \"Verdana\", \n \"size\": 26, \n \"smallCaps\": ",
"end": 622,
"score": 0.8543887734413147,
"start": 619,
"tag": "NAME",
"value": "Ver"
},
{
"context": ".audioVolText = {\n \"font\": { \n \"n... | Base UI Modifications/VS_MessageBox/LEDA_VS_Style.coffee | Archeia/Archeia-Engine-LEDA | 4 | # ===================================================================
#
# Global Styles
# Create a unique font setting and style for most texts found in
# the game.
#
# ===================================================================
# ===================================================================
# Menu Styles
# ===================================================================
#
#-----------------------------------------------------------
# Title UI Display
#-----------------------------------------------------------
ui.UIManager.styles.titleUIText = {
"font": {
"name": "Verdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Audio Text Display
#-----------------------------------------------------------
ui.UIManager.styles.audioVolText = {
"font": {
"name": "Verdana",
"size": 48,
"smallCaps": false,
"bold": false,
"italic": true,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
# ===================================================================
# Message Styles
# ===================================================================
#
#-----------------------------------------------------------
# Show Choices Message Settings
#-----------------------------------------------------------
ui.UIManager.styles.choiceUIText = {
"font": {
"name": "Verdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
ui.UIManager.styles["choiceUIText:enabled"] = {
"font": {
"name": "Verdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Backlog, etc. Message Settings
#-----------------------------------------------------------
ui.UIManager.styles.messageText = {
"font": {
"name": "Verdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# ADV Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.advMessageText = {
"font": {
"name": "Verdana",
"size": 21,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# NVL Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.nvlMessageText = {
"font": {
"name": "Verdana",
"size": 22,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Custom Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.customMessageText = {
"font": {
"name": "Verdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Ruby Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.rubyText = {
"font": {
"name": "Verdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Show Message -> Character Name Display
#-----------------------------------------------------------
ui.UIManager.styles.messageBoxNameText = {
"font": {
"name": "Verdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
} | 151446 | # ===================================================================
#
# Global Styles
# Create a unique font setting and style for most texts found in
# the game.
#
# ===================================================================
# ===================================================================
# Menu Styles
# ===================================================================
#
#-----------------------------------------------------------
# Title UI Display
#-----------------------------------------------------------
ui.UIManager.styles.titleUIText = {
"font": {
"name": "<NAME>dana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Audio Text Display
#-----------------------------------------------------------
ui.UIManager.styles.audioVolText = {
"font": {
"name": "<NAME>dana",
"size": 48,
"smallCaps": false,
"bold": false,
"italic": true,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
# ===================================================================
# Message Styles
# ===================================================================
#
#-----------------------------------------------------------
# Show Choices Message Settings
#-----------------------------------------------------------
ui.UIManager.styles.choiceUIText = {
"font": {
"name": "<NAME>dana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
ui.UIManager.styles["choiceUIText:enabled"] = {
"font": {
"name": "<NAME>",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Backlog, etc. Message Settings
#-----------------------------------------------------------
ui.UIManager.styles.messageText = {
"font": {
"name": "<NAME>",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# ADV Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.advMessageText = {
"font": {
"name": "<NAME>",
"size": 21,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# NVL Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.nvlMessageText = {
"font": {
"name": "<NAME>dana",
"size": 22,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Custom Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.customMessageText = {
"font": {
"name": "<NAME>dana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Ruby Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.rubyText = {
"font": {
"name": "<NAME>",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Show Message -> Character Name Display
#-----------------------------------------------------------
ui.UIManager.styles.messageBoxNameText = {
"font": {
"name": "<NAME>",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
} | true | # ===================================================================
#
# Global Styles
# Create a unique font setting and style for most texts found in
# the game.
#
# ===================================================================
# ===================================================================
# Menu Styles
# ===================================================================
#
#-----------------------------------------------------------
# Title UI Display
#-----------------------------------------------------------
ui.UIManager.styles.titleUIText = {
"font": {
"name": "PI:NAME:<NAME>END_PIdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Audio Text Display
#-----------------------------------------------------------
ui.UIManager.styles.audioVolText = {
"font": {
"name": "PI:NAME:<NAME>END_PIdana",
"size": 48,
"smallCaps": false,
"bold": false,
"italic": true,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
# ===================================================================
# Message Styles
# ===================================================================
#
#-----------------------------------------------------------
# Show Choices Message Settings
#-----------------------------------------------------------
ui.UIManager.styles.choiceUIText = {
"font": {
"name": "PI:NAME:<NAME>END_PIdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
ui.UIManager.styles["choiceUIText:enabled"] = {
"font": {
"name": "PI:NAME:<NAME>END_PI",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Backlog, etc. Message Settings
#-----------------------------------------------------------
ui.UIManager.styles.messageText = {
"font": {
"name": "PI:NAME:<NAME>END_PI",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# ADV Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.advMessageText = {
"font": {
"name": "PI:NAME:<NAME>END_PI",
"size": 21,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# NVL Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.nvlMessageText = {
"font": {
"name": "PI:NAME:<NAME>END_PIdana",
"size": 22,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Custom Show Message Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.customMessageText = {
"font": {
"name": "PI:NAME:<NAME>END_PIdana",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Ruby Text Settings
#-----------------------------------------------------------
ui.UIManager.styles.rubyText = {
"font": {
"name": "PI:NAME:<NAME>END_PI",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
}
#-----------------------------------------------------------
# Show Message -> Character Name Display
#-----------------------------------------------------------
ui.UIManager.styles.messageBoxNameText = {
"font": {
"name": "PI:NAME:<NAME>END_PI",
"size": 26,
"smallCaps": false,
"bold": false,
"italic": false,
"color": [255, 255, 255, 255],
"border": false,
"shadow": true,
"shadowColor": [24, 55, 100, 255],
"shadowOffsetX": 2,
"shadowOffsetY": 2
},
} |
[
{
"context": "bull;•••••'\n name: 'Full Name'\n bankName: 'Bank Name'\n order: '______",
"end": 2017,
"score": 0.9980733394622803,
"start": 2008,
"tag": "NAME",
"value": "Full Name"
}
] | src/coffee/check.coffee | gavruk/check | 1 | $ = jQuery
$.check = {}
$.check.fn = {}
$.fn.check = (opts) ->
$.check.fn.construct.apply(this, opts)
class Check
checkTemplate: """
<div class="check-container">
<div class="check">
<div class="front">
<div class="higher">
<div class="name display">{{name}}</div>
<div class="order display">
<div class="ordertext">PAY TO THE ORDER OF</div>
<div class="orderval">{{order}}</div>
</div>
<div class="bankname display">{{bankName}}</div>
</div>
<div class="lower">
<div class="routing-box">
<div class="label">Routing Number</div>
<div class="routingnumber display">{{routingNumber}}</div>
</div>
<div class="account-box">
<div class="label">Account Number</div>
<div class="accountnumber display">{{accountNumber}}</div>
</div>
<div class="numbers">1234</div>
</div>
</div>
</div>
</div>
"""
template: (tpl, data) ->
tpl.replace /\{\{(.*?)\}\}/g, (match, key, str) ->
data[key]
defaults:
formSelectors:
accountNumberInput: 'input[name="account-number"]'
routingNumberInput: 'input[name="routing-number"]'
nameInput: 'input[name="name"]'
bankNameInput: 'input[name="bank-name"]'
orderInput: 'input[name="order"]'
checkSelectors:
checkContainer: '.check-container'
check: '.check'
accountNumberDisplay: '.accountnumber'
routingNumberDisplay: '.routingnumber'
nameDisplay: '.name'
bankNameDisplay: '.bankname'
orderDisplay: '.orderval'
values:
accountNumber: '••••••••••'
routingNumber: '•••••••••'
name: 'Full Name'
bankName: 'Bank Name'
order: '_________________________'
constructor: (el, opts) ->
@options = $.extend(true, {}, @defaults, opts)
$.extend @options.values, $.check.values
@$el = $(el)
unless @options.container
console.log "Please provide a container"
return
@$container = $(@options.container)
@render()
@attachHandlers()
@handleInitialValues()
render: ->
@$container.append(@template(
@checkTemplate,
$.extend({}, @options.values)
))
$.each @options.checkSelectors, (name, selector) =>
this["$#{name}"] = @$container.find(selector)
$.each @options.formSelectors, (name, selector) =>
if @options[name]
obj = $(@options[name])
else
obj = @$el.find(selector)
this["$#{name}"] = obj
if @options.width
baseWidth = parseInt @$checkContainer.css('width')
@$checkContainer.css "transform", "scale(#{@options.width / baseWidth})"
attachHandlers: ->
@$accountNumberInput
.bindVal @$accountNumberDisplay,
fill: false,
filters: @accountNumberFilter
.on 'keydown', @handle('captureAccountNumber')
@$routingNumberInput
.bindVal @$routingNumberDisplay,
fill: false,
.on 'keydown', @handle('captureRoutingNumber')
@$bankNameInput
.bindVal @$bankNameDisplay,
fill: false,
.on 'keydown', @handle('captureName')
@$orderInput
.bindVal @$orderDisplay,
fill: false,
@$nameInput
.bindVal @$nameDisplay,
fill: false
join: ' '
.on 'keydown', @handle('captureName')
accountNumberFilter: (val, $el) ->
if val.length <= 10
return val
beg = val.substring(0, 3)
end = val.substring(val.length - 4)
return beg + '...' + end
handleInitialValues: ->
$.each @options.formSelectors, (name, selector) =>
el = this["$#{name}"]
if el.val()
# if the input has a value, we want to trigger a refresh
el.trigger 'paste'
setTimeout -> el.trigger 'keyup'
handle: (fn) ->
(e) =>
$el = $(e.currentTarget)
args = Array.prototype.slice.call arguments
args.unshift $el
@handlers[fn].apply this, args
handlers:
captureName: ($el, e) ->
keyCode = e.which or e.keyCode
banKeyCodes = [48,49,50,51,52,53,54,55,56,57,106,107,109,110,111,186,187,188,189,190,191,192,219,220,221,222]
# Allow special symbols:
# - hyphen
# - dot
# - apostrophe
allowedSymbols = [
189, 109 # hyphen (when not using shiftKey)
190, 110 # dot (when not using shiftKey)
222 # apostrophe (when not using shiftKey)
]
if banKeyCodes.indexOf(keyCode) != -1 and not (!e.shiftKey and keyCode in allowedSymbols)
e.preventDefault()
captureAccountNumber: ($el, e) ->
val = $el.val()
maxLength = 10
if !@isKeyAllowedForNumber(e)
e.preventDefault()
return
#if val.length == maxLength and !@isSpecialKey(e)
#e.preventDefault()
captureRoutingNumber: ($el, e) ->
val = $el.val()
maxLength = 9
if !@isKeyAllowedForNumber(e)
e.preventDefault()
return
if val.length == maxLength and !@isSpecialKey(e)
e.preventDefault()
isKeyAllowedForNumber: (e) ->
keyCode = e.which or e.keyCode
if keyCode >= 48 and keyCode <= 57
return true
if keyCode >= 96 and keyCode <= 105
return true
if e.ctrlKey or e.metaKey
return true
allowedKeyCodes = [8,9,17,35,36,37,39,46,91,92,144,145]
if allowedKeyCodes.indexOf(keyCode) != -1
return true
return false
isSpecialKey: (e) ->
keyCode = e.which or e.keyCode
allowedKeyCodes = [8,9,17,35,36,37,39,46,91,92,144,145]
if allowedKeyCodes.indexOf(keyCode) != -1
return true
return false
$.fn.bindVal = (out, opts={}) ->
opts.fill = opts.fill || false
opts.filters = opts.filters || []
opts.filters = [opts.filters] unless opts.filters instanceof Array
opts.join = opts.join || ""
if !(typeof(opts.join) == "function")
joiner = opts.join
opts.join = () -> joiner
$el = $(this)
outDefaults = (out.eq(i).text() for o, i in out)
$el.on 'focus', ->
out.addClass 'focused'
$el.on 'blur', ->
out.removeClass 'focused'
$el.on 'keyup change paste', (e) ->
val = $el.map(-> $(this).val()).get()
join = opts.join(val)
val = val.join(join)
val = "" if val == join
for filter in opts.filters
val = filter(val, $el, out)
for o, i in out
if opts.fill
outVal = val + outDefaults[i].substring(val.length)
else
outVal = val or outDefaults[i]
out.eq(i).text(outVal)
$el
$.fn.extend check: (option, args...) ->
@each ->
$this = $(this)
data = $this.data('check')
if !data
$this.data 'check', (data = new Check(this, option))
if typeof option == 'string'
data[option].apply(data, args)
| 140722 | $ = jQuery
$.check = {}
$.check.fn = {}
$.fn.check = (opts) ->
$.check.fn.construct.apply(this, opts)
class Check
checkTemplate: """
<div class="check-container">
<div class="check">
<div class="front">
<div class="higher">
<div class="name display">{{name}}</div>
<div class="order display">
<div class="ordertext">PAY TO THE ORDER OF</div>
<div class="orderval">{{order}}</div>
</div>
<div class="bankname display">{{bankName}}</div>
</div>
<div class="lower">
<div class="routing-box">
<div class="label">Routing Number</div>
<div class="routingnumber display">{{routingNumber}}</div>
</div>
<div class="account-box">
<div class="label">Account Number</div>
<div class="accountnumber display">{{accountNumber}}</div>
</div>
<div class="numbers">1234</div>
</div>
</div>
</div>
</div>
"""
template: (tpl, data) ->
tpl.replace /\{\{(.*?)\}\}/g, (match, key, str) ->
data[key]
defaults:
formSelectors:
accountNumberInput: 'input[name="account-number"]'
routingNumberInput: 'input[name="routing-number"]'
nameInput: 'input[name="name"]'
bankNameInput: 'input[name="bank-name"]'
orderInput: 'input[name="order"]'
checkSelectors:
checkContainer: '.check-container'
check: '.check'
accountNumberDisplay: '.accountnumber'
routingNumberDisplay: '.routingnumber'
nameDisplay: '.name'
bankNameDisplay: '.bankname'
orderDisplay: '.orderval'
values:
accountNumber: '••••••••••'
routingNumber: '•••••••••'
name: '<NAME>'
bankName: 'Bank Name'
order: '_________________________'
constructor: (el, opts) ->
@options = $.extend(true, {}, @defaults, opts)
$.extend @options.values, $.check.values
@$el = $(el)
unless @options.container
console.log "Please provide a container"
return
@$container = $(@options.container)
@render()
@attachHandlers()
@handleInitialValues()
render: ->
@$container.append(@template(
@checkTemplate,
$.extend({}, @options.values)
))
$.each @options.checkSelectors, (name, selector) =>
this["$#{name}"] = @$container.find(selector)
$.each @options.formSelectors, (name, selector) =>
if @options[name]
obj = $(@options[name])
else
obj = @$el.find(selector)
this["$#{name}"] = obj
if @options.width
baseWidth = parseInt @$checkContainer.css('width')
@$checkContainer.css "transform", "scale(#{@options.width / baseWidth})"
attachHandlers: ->
@$accountNumberInput
.bindVal @$accountNumberDisplay,
fill: false,
filters: @accountNumberFilter
.on 'keydown', @handle('captureAccountNumber')
@$routingNumberInput
.bindVal @$routingNumberDisplay,
fill: false,
.on 'keydown', @handle('captureRoutingNumber')
@$bankNameInput
.bindVal @$bankNameDisplay,
fill: false,
.on 'keydown', @handle('captureName')
@$orderInput
.bindVal @$orderDisplay,
fill: false,
@$nameInput
.bindVal @$nameDisplay,
fill: false
join: ' '
.on 'keydown', @handle('captureName')
accountNumberFilter: (val, $el) ->
if val.length <= 10
return val
beg = val.substring(0, 3)
end = val.substring(val.length - 4)
return beg + '...' + end
handleInitialValues: ->
$.each @options.formSelectors, (name, selector) =>
el = this["$#{name}"]
if el.val()
# if the input has a value, we want to trigger a refresh
el.trigger 'paste'
setTimeout -> el.trigger 'keyup'
handle: (fn) ->
(e) =>
$el = $(e.currentTarget)
args = Array.prototype.slice.call arguments
args.unshift $el
@handlers[fn].apply this, args
handlers:
captureName: ($el, e) ->
keyCode = e.which or e.keyCode
banKeyCodes = [48,49,50,51,52,53,54,55,56,57,106,107,109,110,111,186,187,188,189,190,191,192,219,220,221,222]
# Allow special symbols:
# - hyphen
# - dot
# - apostrophe
allowedSymbols = [
189, 109 # hyphen (when not using shiftKey)
190, 110 # dot (when not using shiftKey)
222 # apostrophe (when not using shiftKey)
]
if banKeyCodes.indexOf(keyCode) != -1 and not (!e.shiftKey and keyCode in allowedSymbols)
e.preventDefault()
captureAccountNumber: ($el, e) ->
val = $el.val()
maxLength = 10
if !@isKeyAllowedForNumber(e)
e.preventDefault()
return
#if val.length == maxLength and !@isSpecialKey(e)
#e.preventDefault()
captureRoutingNumber: ($el, e) ->
val = $el.val()
maxLength = 9
if !@isKeyAllowedForNumber(e)
e.preventDefault()
return
if val.length == maxLength and !@isSpecialKey(e)
e.preventDefault()
isKeyAllowedForNumber: (e) ->
keyCode = e.which or e.keyCode
if keyCode >= 48 and keyCode <= 57
return true
if keyCode >= 96 and keyCode <= 105
return true
if e.ctrlKey or e.metaKey
return true
allowedKeyCodes = [8,9,17,35,36,37,39,46,91,92,144,145]
if allowedKeyCodes.indexOf(keyCode) != -1
return true
return false
isSpecialKey: (e) ->
keyCode = e.which or e.keyCode
allowedKeyCodes = [8,9,17,35,36,37,39,46,91,92,144,145]
if allowedKeyCodes.indexOf(keyCode) != -1
return true
return false
$.fn.bindVal = (out, opts={}) ->
opts.fill = opts.fill || false
opts.filters = opts.filters || []
opts.filters = [opts.filters] unless opts.filters instanceof Array
opts.join = opts.join || ""
if !(typeof(opts.join) == "function")
joiner = opts.join
opts.join = () -> joiner
$el = $(this)
outDefaults = (out.eq(i).text() for o, i in out)
$el.on 'focus', ->
out.addClass 'focused'
$el.on 'blur', ->
out.removeClass 'focused'
$el.on 'keyup change paste', (e) ->
val = $el.map(-> $(this).val()).get()
join = opts.join(val)
val = val.join(join)
val = "" if val == join
for filter in opts.filters
val = filter(val, $el, out)
for o, i in out
if opts.fill
outVal = val + outDefaults[i].substring(val.length)
else
outVal = val or outDefaults[i]
out.eq(i).text(outVal)
$el
$.fn.extend check: (option, args...) ->
@each ->
$this = $(this)
data = $this.data('check')
if !data
$this.data 'check', (data = new Check(this, option))
if typeof option == 'string'
data[option].apply(data, args)
| true | $ = jQuery
$.check = {}
$.check.fn = {}
$.fn.check = (opts) ->
$.check.fn.construct.apply(this, opts)
class Check
checkTemplate: """
<div class="check-container">
<div class="check">
<div class="front">
<div class="higher">
<div class="name display">{{name}}</div>
<div class="order display">
<div class="ordertext">PAY TO THE ORDER OF</div>
<div class="orderval">{{order}}</div>
</div>
<div class="bankname display">{{bankName}}</div>
</div>
<div class="lower">
<div class="routing-box">
<div class="label">Routing Number</div>
<div class="routingnumber display">{{routingNumber}}</div>
</div>
<div class="account-box">
<div class="label">Account Number</div>
<div class="accountnumber display">{{accountNumber}}</div>
</div>
<div class="numbers">1234</div>
</div>
</div>
</div>
</div>
"""
template: (tpl, data) ->
tpl.replace /\{\{(.*?)\}\}/g, (match, key, str) ->
data[key]
defaults:
formSelectors:
accountNumberInput: 'input[name="account-number"]'
routingNumberInput: 'input[name="routing-number"]'
nameInput: 'input[name="name"]'
bankNameInput: 'input[name="bank-name"]'
orderInput: 'input[name="order"]'
checkSelectors:
checkContainer: '.check-container'
check: '.check'
accountNumberDisplay: '.accountnumber'
routingNumberDisplay: '.routingnumber'
nameDisplay: '.name'
bankNameDisplay: '.bankname'
orderDisplay: '.orderval'
values:
accountNumber: '••••••••••'
routingNumber: '•••••••••'
name: 'PI:NAME:<NAME>END_PI'
bankName: 'Bank Name'
order: '_________________________'
constructor: (el, opts) ->
@options = $.extend(true, {}, @defaults, opts)
$.extend @options.values, $.check.values
@$el = $(el)
unless @options.container
console.log "Please provide a container"
return
@$container = $(@options.container)
@render()
@attachHandlers()
@handleInitialValues()
render: ->
@$container.append(@template(
@checkTemplate,
$.extend({}, @options.values)
))
$.each @options.checkSelectors, (name, selector) =>
this["$#{name}"] = @$container.find(selector)
$.each @options.formSelectors, (name, selector) =>
if @options[name]
obj = $(@options[name])
else
obj = @$el.find(selector)
this["$#{name}"] = obj
if @options.width
baseWidth = parseInt @$checkContainer.css('width')
@$checkContainer.css "transform", "scale(#{@options.width / baseWidth})"
attachHandlers: ->
@$accountNumberInput
.bindVal @$accountNumberDisplay,
fill: false,
filters: @accountNumberFilter
.on 'keydown', @handle('captureAccountNumber')
@$routingNumberInput
.bindVal @$routingNumberDisplay,
fill: false,
.on 'keydown', @handle('captureRoutingNumber')
@$bankNameInput
.bindVal @$bankNameDisplay,
fill: false,
.on 'keydown', @handle('captureName')
@$orderInput
.bindVal @$orderDisplay,
fill: false,
@$nameInput
.bindVal @$nameDisplay,
fill: false
join: ' '
.on 'keydown', @handle('captureName')
accountNumberFilter: (val, $el) ->
if val.length <= 10
return val
beg = val.substring(0, 3)
end = val.substring(val.length - 4)
return beg + '...' + end
handleInitialValues: ->
$.each @options.formSelectors, (name, selector) =>
el = this["$#{name}"]
if el.val()
# if the input has a value, we want to trigger a refresh
el.trigger 'paste'
setTimeout -> el.trigger 'keyup'
handle: (fn) ->
(e) =>
$el = $(e.currentTarget)
args = Array.prototype.slice.call arguments
args.unshift $el
@handlers[fn].apply this, args
handlers:
captureName: ($el, e) ->
keyCode = e.which or e.keyCode
banKeyCodes = [48,49,50,51,52,53,54,55,56,57,106,107,109,110,111,186,187,188,189,190,191,192,219,220,221,222]
# Allow special symbols:
# - hyphen
# - dot
# - apostrophe
allowedSymbols = [
189, 109 # hyphen (when not using shiftKey)
190, 110 # dot (when not using shiftKey)
222 # apostrophe (when not using shiftKey)
]
if banKeyCodes.indexOf(keyCode) != -1 and not (!e.shiftKey and keyCode in allowedSymbols)
e.preventDefault()
captureAccountNumber: ($el, e) ->
val = $el.val()
maxLength = 10
if !@isKeyAllowedForNumber(e)
e.preventDefault()
return
#if val.length == maxLength and !@isSpecialKey(e)
#e.preventDefault()
captureRoutingNumber: ($el, e) ->
val = $el.val()
maxLength = 9
if !@isKeyAllowedForNumber(e)
e.preventDefault()
return
if val.length == maxLength and !@isSpecialKey(e)
e.preventDefault()
isKeyAllowedForNumber: (e) ->
keyCode = e.which or e.keyCode
if keyCode >= 48 and keyCode <= 57
return true
if keyCode >= 96 and keyCode <= 105
return true
if e.ctrlKey or e.metaKey
return true
allowedKeyCodes = [8,9,17,35,36,37,39,46,91,92,144,145]
if allowedKeyCodes.indexOf(keyCode) != -1
return true
return false
isSpecialKey: (e) ->
keyCode = e.which or e.keyCode
allowedKeyCodes = [8,9,17,35,36,37,39,46,91,92,144,145]
if allowedKeyCodes.indexOf(keyCode) != -1
return true
return false
$.fn.bindVal = (out, opts={}) ->
opts.fill = opts.fill || false
opts.filters = opts.filters || []
opts.filters = [opts.filters] unless opts.filters instanceof Array
opts.join = opts.join || ""
if !(typeof(opts.join) == "function")
joiner = opts.join
opts.join = () -> joiner
$el = $(this)
outDefaults = (out.eq(i).text() for o, i in out)
$el.on 'focus', ->
out.addClass 'focused'
$el.on 'blur', ->
out.removeClass 'focused'
$el.on 'keyup change paste', (e) ->
val = $el.map(-> $(this).val()).get()
join = opts.join(val)
val = val.join(join)
val = "" if val == join
for filter in opts.filters
val = filter(val, $el, out)
for o, i in out
if opts.fill
outVal = val + outDefaults[i].substring(val.length)
else
outVal = val or outDefaults[i]
out.eq(i).text(outVal)
$el
$.fn.extend check: (option, args...) ->
@each ->
$this = $(this)
data = $this.data('check')
if !data
$this.data 'check', (data = new Check(this, option))
if typeof option == 'string'
data[option].apply(data, args)
|
[
{
"context": "\n url : \"/home\"\n views:\n 'main@app.sidebarTheme':\n templateUrl: 'app/features/home/mai",
"end": 311,
"score": 0.9895609021186829,
"start": 290,
"tag": "EMAIL",
"value": "main@app.sidebarTheme"
},
{
"context": "stomer&department&u... | src/app/features/home/main/scripts/routes.coffee | TimeoutZero/basebuild-boilerplate | 0 |
# =============================================
# Main Module
# =============================================
angular.module 'BBB'
.config ($stateProvider, $urlRouterProvider) ->
$stateProvider
.state "app.sidebarTheme.home",
url : "/home"
views:
'main@app.sidebarTheme':
templateUrl: 'app/features/home/main/templates/main.html'
.state "app.sidebarTheme.overview.dashboard",
url : "/dashboard?date&customer&department&user"
views:
'dashboard@app.sidebarTheme.home':
templateUrl : 'app/features/home/dashboard/templates/main.html'
controller : 'OverviewDashboardController as dashboard'
'dashForm@app.sidebarTheme.home':
templateUrl : 'app/features/home/dashboard/subviews/dashForm/template.html'
controller : 'OverviewDashFormController as dashForm'
| 124683 |
# =============================================
# Main Module
# =============================================
angular.module 'BBB'
.config ($stateProvider, $urlRouterProvider) ->
$stateProvider
.state "app.sidebarTheme.home",
url : "/home"
views:
'<EMAIL>':
templateUrl: 'app/features/home/main/templates/main.html'
.state "app.sidebarTheme.overview.dashboard",
url : "/dashboard?date&customer&department&user"
views:
'<EMAIL>':
templateUrl : 'app/features/home/dashboard/templates/main.html'
controller : 'OverviewDashboardController as dashboard'
'<EMAIL>':
templateUrl : 'app/features/home/dashboard/subviews/dashForm/template.html'
controller : 'OverviewDashFormController as dashForm'
| true |
# =============================================
# Main Module
# =============================================
angular.module 'BBB'
.config ($stateProvider, $urlRouterProvider) ->
$stateProvider
.state "app.sidebarTheme.home",
url : "/home"
views:
'PI:EMAIL:<EMAIL>END_PI':
templateUrl: 'app/features/home/main/templates/main.html'
.state "app.sidebarTheme.overview.dashboard",
url : "/dashboard?date&customer&department&user"
views:
'PI:EMAIL:<EMAIL>END_PI':
templateUrl : 'app/features/home/dashboard/templates/main.html'
controller : 'OverviewDashboardController as dashboard'
'PI:EMAIL:<EMAIL>END_PI':
templateUrl : 'app/features/home/dashboard/subviews/dashForm/template.html'
controller : 'OverviewDashFormController as dashForm'
|
[
{
"context": "###\n *\n * jQuery ResponsiveText by Gary Hepting\n * https://github.com/ghepting/jquery-responsive",
"end": 48,
"score": 0.9998887181282043,
"start": 36,
"tag": "NAME",
"value": "Gary Hepting"
},
{
"context": "onsiveText by Gary Hepting\n * https://github.com/ghepti... | src/coffee/plugins/jquery-responsiveText.coffee | katophelix/PristinePooch | 357 | ###
*
* jQuery ResponsiveText by Gary Hepting
* https://github.com/ghepting/jquery-responsive-text
*
* Open source under the MIT License.
*
* Copyright © 2013 Gary Hepting. All rights reserved.
*
###
delayedAdjustText = []
responsiveTextIndex = 0
class ResponsiveText
constructor: (el) ->
@index = responsiveTextIndex++
@el = el
@compression = $(@el).data('compression') || 10
@minFontSize = $(@el).data('min') || Number.NEGATIVE_INFINITY
@maxFontSize = $(@el).data('max') || Number.POSITIVE_INFINITY
@scrollable = $(@el).data('scrollable') || false
@scrollSpeed = $(@el).data('scrollspeed') || 650
@scrollReset = $(@el).data('scrollreset') || 200
@init()
init: ->
$(@el).wrapInner('<span class="responsiveText-wrapper" />')
@adjustOnLoad()
@adjustOnResize()
@scrollOnHover() if @scrollable
resizeText: ->
calculatedFontSize = $(@el).width() / @compression
fontSize = Math.max(Math.min(calculatedFontSize,@maxFontSize),@minFontSize)
$(@el).css
"font-size": Math.floor(fontSize)
adjustOnLoad: ->
$(window).on 'load', =>
@resizeText()
adjustOnResize: ->
$(window).on 'resize', =>
clearTimeout(delayedAdjustText[@index])
delayedAdjustText[@index] = setTimeout(=>
@resizeText()
, 20)
scrollOnHover: ->
$(@el).css
'overflow': 'hidden'
'text-overflow': 'ellipsis'
'white-space': 'nowrap'
$(@el).hover =>
@difference = @el.scrollWidth - $(@el).width()
@scrollSpeed = @difference if @difference > @scrollSpeed
if @difference > 0
$(@el).css('cursor', 'e-resize')
$(@el).stop().animate
"text-indent": -@difference
, @scrollSpeed
, =>
$(@el).css('cursor', 'text')
, =>
$(@el).stop().animate
"text-indent": 0
, @scrollReset
(($) ->
responsiveTextElements = []
$.fn.responsiveText = (options) ->
@each ->
responsiveTextElements.push( new ResponsiveText(@) )
) jQuery
$(document).ready ->
$(".responsive").not('table').responsiveText()
| 68933 | ###
*
* jQuery ResponsiveText by <NAME>
* https://github.com/ghepting/jquery-responsive-text
*
* Open source under the MIT License.
*
* Copyright © 2013 <NAME>. All rights reserved.
*
###
delayedAdjustText = []
responsiveTextIndex = 0
class ResponsiveText
constructor: (el) ->
@index = responsiveTextIndex++
@el = el
@compression = $(@el).data('compression') || 10
@minFontSize = $(@el).data('min') || Number.NEGATIVE_INFINITY
@maxFontSize = $(@el).data('max') || Number.POSITIVE_INFINITY
@scrollable = $(@el).data('scrollable') || false
@scrollSpeed = $(@el).data('scrollspeed') || 650
@scrollReset = $(@el).data('scrollreset') || 200
@init()
init: ->
$(@el).wrapInner('<span class="responsiveText-wrapper" />')
@adjustOnLoad()
@adjustOnResize()
@scrollOnHover() if @scrollable
resizeText: ->
calculatedFontSize = $(@el).width() / @compression
fontSize = Math.max(Math.min(calculatedFontSize,@maxFontSize),@minFontSize)
$(@el).css
"font-size": Math.floor(fontSize)
adjustOnLoad: ->
$(window).on 'load', =>
@resizeText()
adjustOnResize: ->
$(window).on 'resize', =>
clearTimeout(delayedAdjustText[@index])
delayedAdjustText[@index] = setTimeout(=>
@resizeText()
, 20)
scrollOnHover: ->
$(@el).css
'overflow': 'hidden'
'text-overflow': 'ellipsis'
'white-space': 'nowrap'
$(@el).hover =>
@difference = @el.scrollWidth - $(@el).width()
@scrollSpeed = @difference if @difference > @scrollSpeed
if @difference > 0
$(@el).css('cursor', 'e-resize')
$(@el).stop().animate
"text-indent": -@difference
, @scrollSpeed
, =>
$(@el).css('cursor', 'text')
, =>
$(@el).stop().animate
"text-indent": 0
, @scrollReset
(($) ->
responsiveTextElements = []
$.fn.responsiveText = (options) ->
@each ->
responsiveTextElements.push( new ResponsiveText(@) )
) jQuery
$(document).ready ->
$(".responsive").not('table').responsiveText()
| true | ###
*
* jQuery ResponsiveText by PI:NAME:<NAME>END_PI
* https://github.com/ghepting/jquery-responsive-text
*
* Open source under the MIT License.
*
* Copyright © 2013 PI:NAME:<NAME>END_PI. All rights reserved.
*
###
delayedAdjustText = []
responsiveTextIndex = 0
class ResponsiveText
constructor: (el) ->
@index = responsiveTextIndex++
@el = el
@compression = $(@el).data('compression') || 10
@minFontSize = $(@el).data('min') || Number.NEGATIVE_INFINITY
@maxFontSize = $(@el).data('max') || Number.POSITIVE_INFINITY
@scrollable = $(@el).data('scrollable') || false
@scrollSpeed = $(@el).data('scrollspeed') || 650
@scrollReset = $(@el).data('scrollreset') || 200
@init()
init: ->
$(@el).wrapInner('<span class="responsiveText-wrapper" />')
@adjustOnLoad()
@adjustOnResize()
@scrollOnHover() if @scrollable
resizeText: ->
calculatedFontSize = $(@el).width() / @compression
fontSize = Math.max(Math.min(calculatedFontSize,@maxFontSize),@minFontSize)
$(@el).css
"font-size": Math.floor(fontSize)
adjustOnLoad: ->
$(window).on 'load', =>
@resizeText()
adjustOnResize: ->
$(window).on 'resize', =>
clearTimeout(delayedAdjustText[@index])
delayedAdjustText[@index] = setTimeout(=>
@resizeText()
, 20)
scrollOnHover: ->
$(@el).css
'overflow': 'hidden'
'text-overflow': 'ellipsis'
'white-space': 'nowrap'
$(@el).hover =>
@difference = @el.scrollWidth - $(@el).width()
@scrollSpeed = @difference if @difference > @scrollSpeed
if @difference > 0
$(@el).css('cursor', 'e-resize')
$(@el).stop().animate
"text-indent": -@difference
, @scrollSpeed
, =>
$(@el).css('cursor', 'text')
, =>
$(@el).stop().animate
"text-indent": 0
, @scrollReset
(($) ->
responsiveTextElements = []
$.fn.responsiveText = (options) ->
@each ->
responsiveTextElements.push( new ResponsiveText(@) )
) jQuery
$(document).ready ->
$(".responsive").not('table').responsiveText()
|
[
{
"context": " }\n\n options2 = \n {\n username: 711500\n password: \"hackathon2\"\n token: \"ab",
"end": 373,
"score": 0.9992011189460754,
"start": 367,
"tag": "USERNAME",
"value": "711500"
},
{
"context": " {\n username: 711500\n pa... | server/methods/methods.coffee | niinyarko/retail-pay | 0 | Meteor.methods
createTransaction: (doc) ->
res = Transactions.insert doc
payee_phone = Payees.findOne(doc.payee)?.phone_number
payer_phone = Meteor.users.findOne(_id: @userId)?.profile?.phone_number
options1 =
{
vendor: 711500
amount: doc.amount
phone: payer_phone
}
options2 =
{
username: 711500
password: "hackathon2"
token: "abc1234"
amount: doc.amount
}
@unblock()
Meteor.defer ->
try
result = HTTP.call('POST', 'http://testpay.vodafonecash.com.gh/SendSMS.php', params: options1)
console.log result
catch e
console.log e
return
| 129763 | Meteor.methods
createTransaction: (doc) ->
res = Transactions.insert doc
payee_phone = Payees.findOne(doc.payee)?.phone_number
payer_phone = Meteor.users.findOne(_id: @userId)?.profile?.phone_number
options1 =
{
vendor: 711500
amount: doc.amount
phone: payer_phone
}
options2 =
{
username: 711500
password: "<PASSWORD>"
token: "<KEY>"
amount: doc.amount
}
@unblock()
Meteor.defer ->
try
result = HTTP.call('POST', 'http://testpay.vodafonecash.com.gh/SendSMS.php', params: options1)
console.log result
catch e
console.log e
return
| true | Meteor.methods
createTransaction: (doc) ->
res = Transactions.insert doc
payee_phone = Payees.findOne(doc.payee)?.phone_number
payer_phone = Meteor.users.findOne(_id: @userId)?.profile?.phone_number
options1 =
{
vendor: 711500
amount: doc.amount
phone: payer_phone
}
options2 =
{
username: 711500
password: "PI:PASSWORD:<PASSWORD>END_PI"
token: "PI:KEY:<KEY>END_PI"
amount: doc.amount
}
@unblock()
Meteor.defer ->
try
result = HTTP.call('POST', 'http://testpay.vodafonecash.com.gh/SendSMS.php', params: options1)
console.log result
catch e
console.log e
return
|
[
{
"context": "e('gapi')\n .value 'GoogleApp', \n apiKey: '1234'\n clientId: 'abcd'\n\n beforeEach module 'gap",
"end": 221,
"score": 0.9986239671707153,
"start": 217,
"tag": "KEY",
"value": "1234"
},
{
"context": "pp = {\n oauthToken: {\n access_token: '1234... | public/libs/ngGAPI/test/spec/YoutubeSpec.coffee | Evezown/evezown_production | 94 | describe 'GAPI', ->
{
GAPI,Youtube,
$httpBackend,baseUrl,
getHeaders,postHeaders,putHeaders,deleteHeaders,
authorization
} = {}
angular.module('gapi')
.value 'GoogleApp',
apiKey: '1234'
clientId: 'abcd'
beforeEach module 'gapi'
beforeEach inject ($injector) ->
GAPI = $injector.get 'GAPI'
$httpBackend = $injector.get '$httpBackend'
GAPI.app = {
oauthToken: {
access_token: '1234abcd'
}
}
getHeaders = deleteHeaders =
"Authorization":"Bearer 1234abcd"
"Accept":"application/json, text/plain, */*"
postHeaders = putHeaders =
"Authorization":"Bearer 1234abcd"
"Accept":"application/json, text/plain, */*"
"Content-Type":"application/json;charset=utf-8"
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
describe 'Youtube', ->
beforeEach inject ($injector) ->
Youtube = $injector.get('Youtube')
# SERVICE PROPERTIES
it 'should refer to the youtube api', ->
expect(Youtube.api).toBe 'youtube'
it 'should refer to version 3', ->
expect(Youtube.version).toBe 'v3'
it 'should refer to the correct url', ->
expect(Youtube.url).toBe 'https://www.googleapis.com/youtube/v3/'
# ACTIVITIES
it 'should list activities', ->
url = "#{Youtube.url}activities?home=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listActivities({part:'snippet', home:true})
$httpBackend.flush()
it 'should insert activities', ->
url = "#{Youtube.url}activities?part=snippet"
data = snippet: { description: 'description' }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertActivities(data, { part: 'snippet' })
$httpBackend.flush()
# This one uploads a file. How do we do uploads?
it 'should insert channel banners'
# CHANNELS
it 'should list channels', ->
url = "#{Youtube.url}channels?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listChannels({part:'snippet', mine:true})
$httpBackend.flush()
it 'should update channels', ->
url = "#{Youtube.url}channels?part=id"
data = { id: 'qwerty' }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updateChannels(data, { part: 'id' })
$httpBackend.flush()
# GUIDE CATEGORIES
it 'should list guide categories', ->
url = "#{Youtube.url}guideCategories?part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listGuideCategories({part:'snippet'})
$httpBackend.flush()
# LIVE BROADCASTS
it 'should bind live broadcasts'
it 'should control live broadcasts'
it 'should transition live broadcasts'
it 'should list live broadcasts'
it 'should insert live broadcasts'
it 'should update live broadcasts'
it 'should delete live broadcasts'
# LIVE STREAMS
it 'should list live streams'
it 'should insert live streams'
it 'should update live streams'
it 'should delete live streams'
# PLAYLIST ITEMS
it 'should list playlist items', ->
url = "#{Youtube.url}playlistItems?maxResults=50&part=snippet&playlistId=1234"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listPlaylistItems
part: 'snippet'
maxResults: 50
playlistId: '1234'
$httpBackend.flush()
it 'should insert playlist items', ->
url = "#{Youtube.url}playlistItems?part=snippet"
data = { snippet: { playlistId: 'x', resourceId: 'y'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertPlaylistItems(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update playlist items', ->
url = "#{Youtube.url}playlistItems?part=snippet"
data = { id: '123', snippet: { playlistId: 'x', resourceId: 'y'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updatePlaylistItems(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete playlist items', ->
url = "#{Youtube.url}playlistItems?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deletePlaylistItems id: 'xyz'
$httpBackend.flush()
# PLAYLISTS
it 'should list playlists', ->
url = "#{Youtube.url}playlists?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listPlaylists({part:'snippet', mine:true})
$httpBackend.flush()
it 'should insert playlists', ->
url = "#{Youtube.url}playlists?part=snippet"
data = { snippet: { title: 'abc'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertPlaylists(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update playlists', ->
url = "#{Youtube.url}playlists?part=snippet"
data = { id: '123', snippet: { title: 'updated'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updatePlaylists(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete playlists', ->
url = "#{Youtube.url}playlists?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deletePlaylists id: 'xyz'
$httpBackend.flush()
# SUBSCRIPTIONS
it 'should list subscriptions', ->
url = "#{Youtube.url}subscriptions?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listSubscriptions({part:'snippet', mine:true})
$httpBackend.flush()
it 'should insert subscriptions', ->
url = "#{Youtube.url}subscriptions?part=snippet"
data = { snippet: { resourceId: 'abc'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertSubscriptions(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete subscriptions', ->
url = "#{Youtube.url}subscriptions?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deleteSubscriptions id: 'xyz'
$httpBackend.flush()
# THUMBNAILS
it 'should set thumbnails', ->
url = "#{Youtube.url}thumbnails/set?videoId=123"
headers =
"Authorization":"Bearer 1234abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.setThumbnails({ videoId: '123' })
$httpBackend.flush()
# VIDEO CATEGORIES
it 'should list video categories', ->
url = "#{Youtube.url}videoCategories?part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listVideoCategories({ part:'snippet' })
$httpBackend.flush()
# VIDEOS
it 'should list videos', ->
url = "#{Youtube.url}videos?myRating=like&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listVideos({part:'snippet', myRating:'like'})
$httpBackend.flush()
it 'should insert videos', ->
url = "#{Youtube.url}videos?part=snippet"
data = { snippet: { title: 'foo'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertVideos(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update videos', ->
url = "#{Youtube.url}videos?part=snippet"
data = { id: '123', snippet: { title: 'updated'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updateVideos(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete videos', ->
url = "#{Youtube.url}videos?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deleteVideos id: 'xyz'
$httpBackend.flush()
it 'should rate videos', ->
url = "#{Youtube.url}videos/rate?id=xyz&rating=like"
headers =
"Authorization":"Bearer 1234abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.rateVideos({ id: 'xyz', rating: 'like' })
$httpBackend.flush()
it 'should get rating', ->
url = "#{Youtube.url}videos/getRating?id=xyz"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.getVideoRating({ id: 'xyz' })
$httpBackend.flush()
# WATERMARKS
it 'should set watermarks', ->
url = "#{Youtube.url}watermarks/set?channelId=123"
headers =
"Authorization":"Bearer 1234abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.setWatermarks({ channelId: '123' })
$httpBackend.flush()
it 'should unset watermark', ->
url = "#{Youtube.url}watermarks/unset?channelId=123"
headers =
"Authorization":"Bearer 1234abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.unsetWatermarks({ channelId: '123' })
$httpBackend.flush()
# SEARCH
it 'should search', ->
url = "#{Youtube.url}search?part=snippet&q=terms"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.search({ part: 'snippet', q: 'terms' })
$httpBackend.flush()
| 43184 | describe 'GAPI', ->
{
GAPI,Youtube,
$httpBackend,baseUrl,
getHeaders,postHeaders,putHeaders,deleteHeaders,
authorization
} = {}
angular.module('gapi')
.value 'GoogleApp',
apiKey: '<KEY>'
clientId: 'abcd'
beforeEach module 'gapi'
beforeEach inject ($injector) ->
GAPI = $injector.get 'GAPI'
$httpBackend = $injector.get '$httpBackend'
GAPI.app = {
oauthToken: {
access_token: '<KEY> <PASSWORD>'
}
}
getHeaders = deleteHeaders =
"Authorization":"Bearer <KEY>"
"Accept":"application/json, text/plain, */*"
postHeaders = putHeaders =
"Authorization":"Bearer <KEY>"
"Accept":"application/json, text/plain, */*"
"Content-Type":"application/json;charset=utf-8"
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
describe 'Youtube', ->
beforeEach inject ($injector) ->
Youtube = $injector.get('Youtube')
# SERVICE PROPERTIES
it 'should refer to the youtube api', ->
expect(Youtube.api).toBe 'youtube'
it 'should refer to version 3', ->
expect(Youtube.version).toBe 'v3'
it 'should refer to the correct url', ->
expect(Youtube.url).toBe 'https://www.googleapis.com/youtube/v3/'
# ACTIVITIES
it 'should list activities', ->
url = "#{Youtube.url}activities?home=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listActivities({part:'snippet', home:true})
$httpBackend.flush()
it 'should insert activities', ->
url = "#{Youtube.url}activities?part=snippet"
data = snippet: { description: 'description' }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertActivities(data, { part: 'snippet' })
$httpBackend.flush()
# This one uploads a file. How do we do uploads?
it 'should insert channel banners'
# CHANNELS
it 'should list channels', ->
url = "#{Youtube.url}channels?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listChannels({part:'snippet', mine:true})
$httpBackend.flush()
it 'should update channels', ->
url = "#{Youtube.url}channels?part=id"
data = { id: 'qwerty' }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updateChannels(data, { part: 'id' })
$httpBackend.flush()
# GUIDE CATEGORIES
it 'should list guide categories', ->
url = "#{Youtube.url}guideCategories?part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listGuideCategories({part:'snippet'})
$httpBackend.flush()
# LIVE BROADCASTS
it 'should bind live broadcasts'
it 'should control live broadcasts'
it 'should transition live broadcasts'
it 'should list live broadcasts'
it 'should insert live broadcasts'
it 'should update live broadcasts'
it 'should delete live broadcasts'
# LIVE STREAMS
it 'should list live streams'
it 'should insert live streams'
it 'should update live streams'
it 'should delete live streams'
# PLAYLIST ITEMS
it 'should list playlist items', ->
url = "#{Youtube.url}playlistItems?maxResults=50&part=snippet&playlistId=1234"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listPlaylistItems
part: 'snippet'
maxResults: 50
playlistId: '1234'
$httpBackend.flush()
it 'should insert playlist items', ->
url = "#{Youtube.url}playlistItems?part=snippet"
data = { snippet: { playlistId: 'x', resourceId: 'y'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertPlaylistItems(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update playlist items', ->
url = "#{Youtube.url}playlistItems?part=snippet"
data = { id: '123', snippet: { playlistId: 'x', resourceId: 'y'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updatePlaylistItems(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete playlist items', ->
url = "#{Youtube.url}playlistItems?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deletePlaylistItems id: 'xyz'
$httpBackend.flush()
# PLAYLISTS
it 'should list playlists', ->
url = "#{Youtube.url}playlists?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listPlaylists({part:'snippet', mine:true})
$httpBackend.flush()
it 'should insert playlists', ->
url = "#{Youtube.url}playlists?part=snippet"
data = { snippet: { title: 'abc'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertPlaylists(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update playlists', ->
url = "#{Youtube.url}playlists?part=snippet"
data = { id: '123', snippet: { title: 'updated'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updatePlaylists(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete playlists', ->
url = "#{Youtube.url}playlists?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deletePlaylists id: 'xyz'
$httpBackend.flush()
# SUBSCRIPTIONS
it 'should list subscriptions', ->
url = "#{Youtube.url}subscriptions?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listSubscriptions({part:'snippet', mine:true})
$httpBackend.flush()
it 'should insert subscriptions', ->
url = "#{Youtube.url}subscriptions?part=snippet"
data = { snippet: { resourceId: 'abc'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertSubscriptions(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete subscriptions', ->
url = "#{Youtube.url}subscriptions?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deleteSubscriptions id: 'xyz'
$httpBackend.flush()
# THUMBNAILS
it 'should set thumbnails', ->
url = "#{Youtube.url}thumbnails/set?videoId=123"
headers =
"Authorization":"Bearer 1234abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.setThumbnails({ videoId: '123' })
$httpBackend.flush()
# VIDEO CATEGORIES
it 'should list video categories', ->
url = "#{Youtube.url}videoCategories?part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listVideoCategories({ part:'snippet' })
$httpBackend.flush()
# VIDEOS
it 'should list videos', ->
url = "#{Youtube.url}videos?myRating=like&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listVideos({part:'snippet', myRating:'like'})
$httpBackend.flush()
it 'should insert videos', ->
url = "#{Youtube.url}videos?part=snippet"
data = { snippet: { title: 'foo'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertVideos(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update videos', ->
url = "#{Youtube.url}videos?part=snippet"
data = { id: '123', snippet: { title: 'updated'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updateVideos(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete videos', ->
url = "#{Youtube.url}videos?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deleteVideos id: 'xyz'
$httpBackend.flush()
it 'should rate videos', ->
url = "#{Youtube.url}videos/rate?id=xyz&rating=like"
headers =
"Authorization":"Bearer 1<KEY>abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.rateVideos({ id: 'xyz', rating: 'like' })
$httpBackend.flush()
it 'should get rating', ->
url = "#{Youtube.url}videos/getRating?id=xyz"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.getVideoRating({ id: 'xyz' })
$httpBackend.flush()
# WATERMARKS
it 'should set watermarks', ->
url = "#{Youtube.url}watermarks/set?channelId=123"
headers =
"Authorization":"Bearer 1<KEY>34abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.setWatermarks({ channelId: '123' })
$httpBackend.flush()
it 'should unset watermark', ->
url = "#{Youtube.url}watermarks/unset?channelId=123"
headers =
"Authorization":"Bearer 1<KEY>34abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.unsetWatermarks({ channelId: '123' })
$httpBackend.flush()
# SEARCH
it 'should search', ->
url = "#{Youtube.url}search?part=snippet&q=terms"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.search({ part: 'snippet', q: 'terms' })
$httpBackend.flush()
| true | describe 'GAPI', ->
{
GAPI,Youtube,
$httpBackend,baseUrl,
getHeaders,postHeaders,putHeaders,deleteHeaders,
authorization
} = {}
angular.module('gapi')
.value 'GoogleApp',
apiKey: 'PI:KEY:<KEY>END_PI'
clientId: 'abcd'
beforeEach module 'gapi'
beforeEach inject ($injector) ->
GAPI = $injector.get 'GAPI'
$httpBackend = $injector.get '$httpBackend'
GAPI.app = {
oauthToken: {
access_token: 'PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI'
}
}
getHeaders = deleteHeaders =
"Authorization":"Bearer PI:KEY:<KEY>END_PI"
"Accept":"application/json, text/plain, */*"
postHeaders = putHeaders =
"Authorization":"Bearer PI:KEY:<KEY>END_PI"
"Accept":"application/json, text/plain, */*"
"Content-Type":"application/json;charset=utf-8"
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
describe 'Youtube', ->
beforeEach inject ($injector) ->
Youtube = $injector.get('Youtube')
# SERVICE PROPERTIES
it 'should refer to the youtube api', ->
expect(Youtube.api).toBe 'youtube'
it 'should refer to version 3', ->
expect(Youtube.version).toBe 'v3'
it 'should refer to the correct url', ->
expect(Youtube.url).toBe 'https://www.googleapis.com/youtube/v3/'
# ACTIVITIES
it 'should list activities', ->
url = "#{Youtube.url}activities?home=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listActivities({part:'snippet', home:true})
$httpBackend.flush()
it 'should insert activities', ->
url = "#{Youtube.url}activities?part=snippet"
data = snippet: { description: 'description' }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertActivities(data, { part: 'snippet' })
$httpBackend.flush()
# This one uploads a file. How do we do uploads?
it 'should insert channel banners'
# CHANNELS
it 'should list channels', ->
url = "#{Youtube.url}channels?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listChannels({part:'snippet', mine:true})
$httpBackend.flush()
it 'should update channels', ->
url = "#{Youtube.url}channels?part=id"
data = { id: 'qwerty' }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updateChannels(data, { part: 'id' })
$httpBackend.flush()
# GUIDE CATEGORIES
it 'should list guide categories', ->
url = "#{Youtube.url}guideCategories?part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listGuideCategories({part:'snippet'})
$httpBackend.flush()
# LIVE BROADCASTS
it 'should bind live broadcasts'
it 'should control live broadcasts'
it 'should transition live broadcasts'
it 'should list live broadcasts'
it 'should insert live broadcasts'
it 'should update live broadcasts'
it 'should delete live broadcasts'
# LIVE STREAMS
it 'should list live streams'
it 'should insert live streams'
it 'should update live streams'
it 'should delete live streams'
# PLAYLIST ITEMS
it 'should list playlist items', ->
url = "#{Youtube.url}playlistItems?maxResults=50&part=snippet&playlistId=1234"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listPlaylistItems
part: 'snippet'
maxResults: 50
playlistId: '1234'
$httpBackend.flush()
it 'should insert playlist items', ->
url = "#{Youtube.url}playlistItems?part=snippet"
data = { snippet: { playlistId: 'x', resourceId: 'y'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertPlaylistItems(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update playlist items', ->
url = "#{Youtube.url}playlistItems?part=snippet"
data = { id: '123', snippet: { playlistId: 'x', resourceId: 'y'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updatePlaylistItems(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete playlist items', ->
url = "#{Youtube.url}playlistItems?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deletePlaylistItems id: 'xyz'
$httpBackend.flush()
# PLAYLISTS
it 'should list playlists', ->
url = "#{Youtube.url}playlists?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listPlaylists({part:'snippet', mine:true})
$httpBackend.flush()
it 'should insert playlists', ->
url = "#{Youtube.url}playlists?part=snippet"
data = { snippet: { title: 'abc'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertPlaylists(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update playlists', ->
url = "#{Youtube.url}playlists?part=snippet"
data = { id: '123', snippet: { title: 'updated'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updatePlaylists(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete playlists', ->
url = "#{Youtube.url}playlists?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deletePlaylists id: 'xyz'
$httpBackend.flush()
# SUBSCRIPTIONS
it 'should list subscriptions', ->
url = "#{Youtube.url}subscriptions?mine=true&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listSubscriptions({part:'snippet', mine:true})
$httpBackend.flush()
it 'should insert subscriptions', ->
url = "#{Youtube.url}subscriptions?part=snippet"
data = { snippet: { resourceId: 'abc'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertSubscriptions(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete subscriptions', ->
url = "#{Youtube.url}subscriptions?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deleteSubscriptions id: 'xyz'
$httpBackend.flush()
# THUMBNAILS
it 'should set thumbnails', ->
url = "#{Youtube.url}thumbnails/set?videoId=123"
headers =
"Authorization":"Bearer 1234abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.setThumbnails({ videoId: '123' })
$httpBackend.flush()
# VIDEO CATEGORIES
it 'should list video categories', ->
url = "#{Youtube.url}videoCategories?part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listVideoCategories({ part:'snippet' })
$httpBackend.flush()
# VIDEOS
it 'should list videos', ->
url = "#{Youtube.url}videos?myRating=like&part=snippet"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.listVideos({part:'snippet', myRating:'like'})
$httpBackend.flush()
it 'should insert videos', ->
url = "#{Youtube.url}videos?part=snippet"
data = { snippet: { title: 'foo'} }
$httpBackend.expectPOST(url, data, postHeaders).respond null
Youtube.insertVideos(data, { part: 'snippet' })
$httpBackend.flush()
it 'should update videos', ->
url = "#{Youtube.url}videos?part=snippet"
data = { id: '123', snippet: { title: 'updated'} }
$httpBackend.expectPUT(url, data, postHeaders).respond null
Youtube.updateVideos(data, { part: 'snippet' })
$httpBackend.flush()
it 'should delete videos', ->
url = "#{Youtube.url}videos?id=xyz"
$httpBackend.expectDELETE(url, deleteHeaders).respond null
Youtube.deleteVideos id: 'xyz'
$httpBackend.flush()
it 'should rate videos', ->
url = "#{Youtube.url}videos/rate?id=xyz&rating=like"
headers =
"Authorization":"Bearer 1PI:KEY:<KEY>END_PIabcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.rateVideos({ id: 'xyz', rating: 'like' })
$httpBackend.flush()
it 'should get rating', ->
url = "#{Youtube.url}videos/getRating?id=xyz"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.getVideoRating({ id: 'xyz' })
$httpBackend.flush()
# WATERMARKS
it 'should set watermarks', ->
url = "#{Youtube.url}watermarks/set?channelId=123"
headers =
"Authorization":"Bearer 1PI:KEY:<KEY>END_PI34abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.setWatermarks({ channelId: '123' })
$httpBackend.flush()
it 'should unset watermark', ->
url = "#{Youtube.url}watermarks/unset?channelId=123"
headers =
"Authorization":"Bearer 1PI:KEY:<KEY>END_PI34abcd"
"Accept":"application/json, text/plain, */*"
$httpBackend.expectPOST(url, undefined, headers).respond null
Youtube.unsetWatermarks({ channelId: '123' })
$httpBackend.flush()
# SEARCH
it 'should search', ->
url = "#{Youtube.url}search?part=snippet&q=terms"
$httpBackend.expectGET(url, getHeaders).respond null
Youtube.search({ part: 'snippet', q: 'terms' })
$httpBackend.flush()
|
[
{
"context": "Key, i) ->\n <Line\n {...@props}\n key={i}\n color={@props.dataColors[i]}\n coords=",
"end": 1397,
"score": 0.7546555399894714,
"start": 1396,
"tag": "KEY",
"value": "i"
}
] | app/dashboard/line-graph/line-graph.cjsx | Crentist/Panoptes-frontend-spanish | 1 | React = require 'react'
HoverBox = require './hover-box'
Line = require './line'
XAxis = require './x-axis'
XLabels = require './x-labels'
YRules = require './y-rules'
YLabels = require './y-labels'
module?.exports = React.createClass
displayName: 'LineGraph'
axisColor: 'lightgrey'
propTypes:
data: React.PropTypes.array # Array of objects to plot
xKey: React.PropTypes.string # Key for x-values in each object
yKeys: React.PropTypes.array # Keys for y-values to plot in each object
dataColors: React.PropTypes.array # Colors for lines in yKeys
pointRadius: React.PropTypes.number # Radius of circle on graph
height: React.PropTypes.number # Height of Graph
yLines: React.PropTypes.number # Number of y-axis horizontal rules
yLabel: React.PropTypes.string # Label for hover-box value
getInitialState: ->
circleHover: {x: 0, y: 0}
coords: (yKey) ->
dataMax = @dataMax()
radiusPercent = @radiusPercent()
xWidth = @percentWidth()
@props.data.map (obj, i) =>
{x: @xPercent(i, xWidth), y: @yPercent(obj[yKey], dataMax, radiusPercent), value: obj[yKey]}
coordPairs: (yKey) ->
coords = @coords(yKey)
coords
.map (coord, i) =>
{coord1: coord, coord2: coords[i + 1]}
.filter(@coordPairNotUndefined)
line: (yKey, i) ->
<Line
{...@props}
key={i}
color={@props.dataColors[i]}
coords={@coords(yKey)}
coordPairs={@coordPairs(yKey)}
onCircleMouseOver={@onCircleMouseOver}
onCircleMouseOut={@onCircleMouseOut} />
onCircleMouseOver: (e) ->
position = e.target.getBoundingClientRect()
dataValue = e.target.getAttribute('value')
@setState circleHover: {x: +position.left, y: +position.top, content: "#{dataValue} #{@props.yLabel}"}
onCircleMouseOut: (e) ->
@setState circleHover: {}
render: ->
lines = @props.yKeys.map(@line)
<div className='line-graph'>
<svg width="100%" height={@props.height}>
<YLabels {...@props} max={@dataMax()} />
<g transform="scale(1,-1), translate(0, -#{@props.height})">
<XAxis {...@props} color={@axisColor} ticks={@xAxisPercentages()}/>
<YRules yLines={@props.yLines} height=@props.height />
{lines}
</g>
</svg>
<XLabels {...@props} />
<HoverBox top={@state.circleHover.y} left={@state.circleHover.x}>
{@state.circleHover.content}
</HoverBox>
</div>
flatten: (arrayOfArrays) ->
arrayOfArrays.reduce (flatArr, nestedArr) => flatArr.concat nestedArr
allDataValues: ->
# returns 1 dimensional array of all raw data values
@flatten @props.yKeys.map (key) => @props.data.map (obj) =>
obj[key] ? 0
dataMax: ->
Math.max @allDataValues()...
xAxisPercentages: ->
xWidth = @percentWidth()
[0...@props.data.length].map (i) => @xPercent(i, xWidth)
percentWidth: ->
(1 / @props.data.length) * 100
xPercent: (i, xWidth) ->
# percentage width of x values
xWidth * i + (xWidth / 2)
radiusPercent: ->
((@props.pointRadius / @props.height) * 100) ? 0
yPercent: (dataVal, dataMax, radiusPercent) ->
# ensures circles are above x-axis line and inside the top of the graph
# TODO: how to best handle extreme outliers?
percentVal = (dataVal / dataMax) * 100
(percentVal - ((percentVal / dataMax) * radiusPercent)) + radiusPercent
coordPairNotUndefined: (p) ->
p.coord1? and p.coord2?
| 159619 | React = require 'react'
HoverBox = require './hover-box'
Line = require './line'
XAxis = require './x-axis'
XLabels = require './x-labels'
YRules = require './y-rules'
YLabels = require './y-labels'
module?.exports = React.createClass
displayName: 'LineGraph'
axisColor: 'lightgrey'
propTypes:
data: React.PropTypes.array # Array of objects to plot
xKey: React.PropTypes.string # Key for x-values in each object
yKeys: React.PropTypes.array # Keys for y-values to plot in each object
dataColors: React.PropTypes.array # Colors for lines in yKeys
pointRadius: React.PropTypes.number # Radius of circle on graph
height: React.PropTypes.number # Height of Graph
yLines: React.PropTypes.number # Number of y-axis horizontal rules
yLabel: React.PropTypes.string # Label for hover-box value
getInitialState: ->
circleHover: {x: 0, y: 0}
coords: (yKey) ->
dataMax = @dataMax()
radiusPercent = @radiusPercent()
xWidth = @percentWidth()
@props.data.map (obj, i) =>
{x: @xPercent(i, xWidth), y: @yPercent(obj[yKey], dataMax, radiusPercent), value: obj[yKey]}
coordPairs: (yKey) ->
coords = @coords(yKey)
coords
.map (coord, i) =>
{coord1: coord, coord2: coords[i + 1]}
.filter(@coordPairNotUndefined)
line: (yKey, i) ->
<Line
{...@props}
key={<KEY>}
color={@props.dataColors[i]}
coords={@coords(yKey)}
coordPairs={@coordPairs(yKey)}
onCircleMouseOver={@onCircleMouseOver}
onCircleMouseOut={@onCircleMouseOut} />
onCircleMouseOver: (e) ->
position = e.target.getBoundingClientRect()
dataValue = e.target.getAttribute('value')
@setState circleHover: {x: +position.left, y: +position.top, content: "#{dataValue} #{@props.yLabel}"}
onCircleMouseOut: (e) ->
@setState circleHover: {}
render: ->
lines = @props.yKeys.map(@line)
<div className='line-graph'>
<svg width="100%" height={@props.height}>
<YLabels {...@props} max={@dataMax()} />
<g transform="scale(1,-1), translate(0, -#{@props.height})">
<XAxis {...@props} color={@axisColor} ticks={@xAxisPercentages()}/>
<YRules yLines={@props.yLines} height=@props.height />
{lines}
</g>
</svg>
<XLabels {...@props} />
<HoverBox top={@state.circleHover.y} left={@state.circleHover.x}>
{@state.circleHover.content}
</HoverBox>
</div>
flatten: (arrayOfArrays) ->
arrayOfArrays.reduce (flatArr, nestedArr) => flatArr.concat nestedArr
allDataValues: ->
# returns 1 dimensional array of all raw data values
@flatten @props.yKeys.map (key) => @props.data.map (obj) =>
obj[key] ? 0
dataMax: ->
Math.max @allDataValues()...
xAxisPercentages: ->
xWidth = @percentWidth()
[0...@props.data.length].map (i) => @xPercent(i, xWidth)
percentWidth: ->
(1 / @props.data.length) * 100
xPercent: (i, xWidth) ->
# percentage width of x values
xWidth * i + (xWidth / 2)
radiusPercent: ->
((@props.pointRadius / @props.height) * 100) ? 0
yPercent: (dataVal, dataMax, radiusPercent) ->
# ensures circles are above x-axis line and inside the top of the graph
# TODO: how to best handle extreme outliers?
percentVal = (dataVal / dataMax) * 100
(percentVal - ((percentVal / dataMax) * radiusPercent)) + radiusPercent
coordPairNotUndefined: (p) ->
p.coord1? and p.coord2?
| true | React = require 'react'
HoverBox = require './hover-box'
Line = require './line'
XAxis = require './x-axis'
XLabels = require './x-labels'
YRules = require './y-rules'
YLabels = require './y-labels'
module?.exports = React.createClass
displayName: 'LineGraph'
axisColor: 'lightgrey'
propTypes:
data: React.PropTypes.array # Array of objects to plot
xKey: React.PropTypes.string # Key for x-values in each object
yKeys: React.PropTypes.array # Keys for y-values to plot in each object
dataColors: React.PropTypes.array # Colors for lines in yKeys
pointRadius: React.PropTypes.number # Radius of circle on graph
height: React.PropTypes.number # Height of Graph
yLines: React.PropTypes.number # Number of y-axis horizontal rules
yLabel: React.PropTypes.string # Label for hover-box value
getInitialState: ->
circleHover: {x: 0, y: 0}
coords: (yKey) ->
dataMax = @dataMax()
radiusPercent = @radiusPercent()
xWidth = @percentWidth()
@props.data.map (obj, i) =>
{x: @xPercent(i, xWidth), y: @yPercent(obj[yKey], dataMax, radiusPercent), value: obj[yKey]}
coordPairs: (yKey) ->
coords = @coords(yKey)
coords
.map (coord, i) =>
{coord1: coord, coord2: coords[i + 1]}
.filter(@coordPairNotUndefined)
line: (yKey, i) ->
<Line
{...@props}
key={PI:KEY:<KEY>END_PI}
color={@props.dataColors[i]}
coords={@coords(yKey)}
coordPairs={@coordPairs(yKey)}
onCircleMouseOver={@onCircleMouseOver}
onCircleMouseOut={@onCircleMouseOut} />
onCircleMouseOver: (e) ->
position = e.target.getBoundingClientRect()
dataValue = e.target.getAttribute('value')
@setState circleHover: {x: +position.left, y: +position.top, content: "#{dataValue} #{@props.yLabel}"}
onCircleMouseOut: (e) ->
@setState circleHover: {}
render: ->
lines = @props.yKeys.map(@line)
<div className='line-graph'>
<svg width="100%" height={@props.height}>
<YLabels {...@props} max={@dataMax()} />
<g transform="scale(1,-1), translate(0, -#{@props.height})">
<XAxis {...@props} color={@axisColor} ticks={@xAxisPercentages()}/>
<YRules yLines={@props.yLines} height=@props.height />
{lines}
</g>
</svg>
<XLabels {...@props} />
<HoverBox top={@state.circleHover.y} left={@state.circleHover.x}>
{@state.circleHover.content}
</HoverBox>
</div>
flatten: (arrayOfArrays) ->
arrayOfArrays.reduce (flatArr, nestedArr) => flatArr.concat nestedArr
allDataValues: ->
# returns 1 dimensional array of all raw data values
@flatten @props.yKeys.map (key) => @props.data.map (obj) =>
obj[key] ? 0
dataMax: ->
Math.max @allDataValues()...
xAxisPercentages: ->
xWidth = @percentWidth()
[0...@props.data.length].map (i) => @xPercent(i, xWidth)
percentWidth: ->
(1 / @props.data.length) * 100
xPercent: (i, xWidth) ->
# percentage width of x values
xWidth * i + (xWidth / 2)
radiusPercent: ->
((@props.pointRadius / @props.height) * 100) ? 0
yPercent: (dataVal, dataMax, radiusPercent) ->
# ensures circles are above x-axis line and inside the top of the graph
# TODO: how to best handle extreme outliers?
percentVal = (dataVal / dataMax) * 100
(percentVal - ((percentVal / dataMax) * radiusPercent)) + radiusPercent
coordPairNotUndefined: (p) ->
p.coord1? and p.coord2?
|
[
{
"context": " session:\n secret: \"I'mma GREYLIEN!\"\n doorman(config)\n # config.doorma",
"end": 681,
"score": 0.8390476703643799,
"start": 667,
"tag": "NAME",
"value": "I'mma GREYLIEN"
},
{
"context": " # config.doorman.session.secret... | src/server/auth/config_test.coffee | RupertJS/rupert-doorman | 0 | doorman = require('./config')
Config = require('rupert').Config
describe 'Doorman', ->
it 'exports a function to configure', ->
doorman.should.be.an.instanceof Function
it 'configures the Rupert config with its route', ->
config = new Config {}
doorman(config)
config.routing.length.should.equal 1
config.routing[0].should.match /src.server.auth.route\.js/
it 'sets good session defaults', ->
config = new Config {}
doorman(config)
config.doorman.session.secret.length.should.equal 24
config = new Config
doorman:
session:
secret: "I'mma GREYLIEN!"
doorman(config)
# config.doorman.session.secret.should.equal "I'mma GREYLIEN!"
config.doorman.session.cookie.secure.should.equal false
| 132791 | doorman = require('./config')
Config = require('rupert').Config
describe 'Doorman', ->
it 'exports a function to configure', ->
doorman.should.be.an.instanceof Function
it 'configures the Rupert config with its route', ->
config = new Config {}
doorman(config)
config.routing.length.should.equal 1
config.routing[0].should.match /src.server.auth.route\.js/
it 'sets good session defaults', ->
config = new Config {}
doorman(config)
config.doorman.session.secret.length.should.equal 24
config = new Config
doorman:
session:
secret: "<NAME>!"
doorman(config)
# config.doorman.session.secret.should.equal "<NAME>!"
config.doorman.session.cookie.secure.should.equal false
| true | doorman = require('./config')
Config = require('rupert').Config
describe 'Doorman', ->
it 'exports a function to configure', ->
doorman.should.be.an.instanceof Function
it 'configures the Rupert config with its route', ->
config = new Config {}
doorman(config)
config.routing.length.should.equal 1
config.routing[0].should.match /src.server.auth.route\.js/
it 'sets good session defaults', ->
config = new Config {}
doorman(config)
config.doorman.session.secret.length.should.equal 24
config = new Config
doorman:
session:
secret: "PI:NAME:<NAME>END_PI!"
doorman(config)
# config.doorman.session.secret.should.equal "PI:NAME:<NAME>END_PI!"
config.doorman.session.cookie.secure.should.equal false
|
[
{
"context": "true\n off: false\n long: 'baz'\n 'name=': 'Andrew'\n list: ['rope', 'jelly']\n X: true\n Given ",
"end": 130,
"score": 0.9994250535964966,
"start": 124,
"tag": "NAME",
"value": "Andrew"
},
{
"context": " long: 'baz'\n 'name=': 'Andrew'\n list: ... | node_modules/opted/test/opted.coffee | Arnaelcio/nodeRed | 0 | describe 'opted', ->
Given -> @options =
a: true
b: 'b'
on: true
off: false
long: 'baz'
'name=': 'Andrew'
list: ['rope', 'jelly']
X: true
Given -> @opted = require '../lib/opted'
context 'singleDash false', ->
When -> @args = @opted(@options)
Then -> @args.should.eql [
'-a',
'-b',
'b',
'--on',
'--no-off',
'--long',
'baz',
'--name=Andrew',
'--list',
'rope',
'--list',
'jelly',
'-X'
]
context 'singleDash true', ->
When -> @args = @opted(@options, true)
Then -> @args.should.eql [
'-a',
'-b',
'b',
'-on',
'-no-off',
'-long',
'baz',
'-name=Andrew',
'-list',
'rope',
'-list',
'jelly',
'-X'
]
| 39381 | describe 'opted', ->
Given -> @options =
a: true
b: 'b'
on: true
off: false
long: 'baz'
'name=': '<NAME>'
list: ['<NAME>', '<NAME>']
X: true
Given -> @opted = require '../lib/opted'
context 'singleDash false', ->
When -> @args = @opted(@options)
Then -> @args.should.eql [
'-a',
'-b',
'b',
'--on',
'--no-off',
'--long',
'baz',
'--name=<NAME>',
'--list',
'rope',
'--list',
'<NAME>',
'-X'
]
context 'singleDash true', ->
When -> @args = @opted(@options, true)
Then -> @args.should.eql [
'-a',
'-b',
'b',
'-on',
'-no-off',
'-long',
'baz',
'-name=<NAME>',
'-list',
'rope',
'-list',
'<NAME>ly',
'-X'
]
| true | describe 'opted', ->
Given -> @options =
a: true
b: 'b'
on: true
off: false
long: 'baz'
'name=': 'PI:NAME:<NAME>END_PI'
list: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
X: true
Given -> @opted = require '../lib/opted'
context 'singleDash false', ->
When -> @args = @opted(@options)
Then -> @args.should.eql [
'-a',
'-b',
'b',
'--on',
'--no-off',
'--long',
'baz',
'--name=PI:NAME:<NAME>END_PI',
'--list',
'rope',
'--list',
'PI:NAME:<NAME>END_PI',
'-X'
]
context 'singleDash true', ->
When -> @args = @opted(@options, true)
Then -> @args.should.eql [
'-a',
'-b',
'b',
'-on',
'-no-off',
'-long',
'baz',
'-name=PI:NAME:<NAME>END_PI',
'-list',
'rope',
'-list',
'PI:NAME:<NAME>END_PIly',
'-X'
]
|
[
{
"context": ", res) ->\n user =\n username: req.body.username\n password: req.body.password\n\n ctx.mo",
"end": 109,
"score": 0.8888164162635803,
"start": 101,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username: req.body.username\n p... | src/modules/controllers/user.controller.coffee | Soundscape/sublime-oauth2 | 0 | module.exports = (ctx) ->
return {
post: (req, res) ->
user =
username: req.body.username
password: req.body.password
ctx.models.user
.create user
.exec (err, user) ->
if err then res.send err
else res.json user
get: (req, res) ->
ctx.models.user
.find()
.exec (err, users) ->
if err then res.send err
else res.json users
}
| 132144 | module.exports = (ctx) ->
return {
post: (req, res) ->
user =
username: req.body.username
password: <PASSWORD>
ctx.models.user
.create user
.exec (err, user) ->
if err then res.send err
else res.json user
get: (req, res) ->
ctx.models.user
.find()
.exec (err, users) ->
if err then res.send err
else res.json users
}
| true | module.exports = (ctx) ->
return {
post: (req, res) ->
user =
username: req.body.username
password: PI:PASSWORD:<PASSWORD>END_PI
ctx.models.user
.create user
.exec (err, user) ->
if err then res.send err
else res.json user
get: (req, res) ->
ctx.models.user
.find()
.exec (err, users) ->
if err then res.send err
else res.json users
}
|
[
{
"context": "/memecaptain.com/\n# API Docs at:\n# github.com/mmb/meme_captain_web/blob/master/doc/api/create_meme_",
"end": 94,
"score": 0.9993071556091309,
"start": 91,
"tag": "USERNAME",
"value": "mmb"
},
{
"context": "<text> / <text> - Meme: Bad joke eel\n# Author:\n# boban... | node_modules/hubot-meme/src/meme.coffee | edwardfernando/jualobot | 0 | # Description:
# Get a meme from http://memecaptain.com/
# API Docs at:
# github.com/mmb/meme_captain_web/blob/master/doc/api/create_meme_image.md
#
# Dependencies:
# None
#
# Commands:
# hubot Y U NO <text> - Meme: Y U NO GUY w/ bottom caption
# hubot I don't always <something> but when i do <text> - Meme: The Most Interesting man in the World
# hubot <text> (SUCCESS|NAILED IT) - Meme: Success kid w/ top caption
# hubot <text> ALL the <things> - Meme: ALL THE THINGS
# hubot <text> TOO DAMN <high> - Meme: THE RENT IS TOO DAMN HIGH guy
# hubot Yo dawg <text> so <text> - Meme: Yo Dawg
# hubot All your <text> are belong to <text> - Meme: All your <text> are belong to <text>
# hubot If <text>, <question> <text>? - Meme: Philosoraptor
# hubot <text>, BITCH PLEASE <text> - Meme: Yao Ming
# hubot <text>, COURAGE <text> - Meme: Courage Wolf
# hubot ONE DOES NOT SIMPLY <text> - Meme: Boromir
# hubot IF YOU <text> GONNA HAVE A BAD TIME - Meme: Ski Instructor
# hubot IF YOU <text> TROLLFACE <text> - Meme: Troll Face
# hubot Aliens guy <text> - Meme: Aliens guy
# hubot Brace yourself <text> - Meme: Ned Stark braces for <text>
# hubot Iron Price <text> - Meme: To get <text>? Pay the iron price!
# hubot Not sure if <something> or <something else> - Meme: Futurama Fry
# hubot <text>, AND IT'S GONE - Meme: Bank Teller
# hubot WHAT IF I TOLD YOU <text> - Meme: Morpheus "What if I told you"
# hubot WTF <text> - Meme: Picard WTF
# hubot IF <text> THAT'D BE GREAT - Meme: Generates Lumberg
# hubot MUCH <text> (SO|VERY) <text> - Meme: Generates Doge
# hubot <text>, <text> EVERYWHERE - Meme: Generates Buzz Lightyear
# hubot khanify <text> - Meme: Has Shatner yell your phrase
# hubot pun | bad joke eel <text>? <text> - Meme: Bad joke eel
# hubot pun | bad joke eel <text> / <text> - Meme: Bad joke eel
# Author:
# bobanj
# cycomachead, Michael Ball <cycomachead@gmail.com>
# peelman, Nick Peelman <nick@peelman.us>
# ericjsilva, Eric Silva
# lukewaite, Luke Waite
# Should hubot 'respond' or 'listen'?
listen_func = if process.env.HUBOT_EXTRA_MEMES then 'hear' else 'respond'
memeGenerator = require "./lib/memecaptain.coffee"
module.exports = (robot) ->
robot[listen_func] /Y U NO (.+)/i, id: 'meme.y-u-no', (msg) ->
memeGenerator msg, 'NryNmg', 'Y U NO', msg.match[1]
robot[listen_func] /aliens guy (.+)/i, id: 'meme.aliens', (msg) ->
memeGenerator msg, 'sO-Hng', '', msg.match[1]
robot[listen_func] /iron price (.+)/i, id: 'meme.iron-price', (msg) ->
memeGenerator msg, 'q06KuA', msg.match[1], 'Pay the iron price'
robot[listen_func] /brace yourself (.+)/i, id: 'meme.brace-yourself', (msg) ->
memeGenerator msg, '_I74XA', 'Brace Yourself', msg.match[1]
robot[listen_func] /(.+) (ALL the .+)/i, id: 'meme.all-the-things', (msg) ->
memeGenerator msg, 'Dv99KQ', msg.match[1], msg.match[2]
robot[listen_func] /(I DON'?T ALWAYS .*) (BUT WHEN I DO,? .*)/i, id: 'meme.interesting-man', (msg) ->
memeGenerator msg, 'V8QnRQ', msg.match[1], msg.match[2]
robot[listen_func] /(.*)(SUCCESS|NAILED IT.*)/i, id: 'meme.success-kid', (msg) ->
memeGenerator msg, 'AbNPRQ', msg.match[1], msg.match[2]
robot[listen_func] /(.*) (\w+\sTOO DAMN .*)/i, id: 'meme.too-damn-high', (msg) ->
memeGenerator msg, 'RCkv6Q', msg.match[1], msg.match[2]
robot[listen_func] /(NOT SURE IF .*) (OR .*)/i, id: 'meme.not-sure-fry', (msg) ->
memeGenerator msg, 'CsNF8w', msg.match[1], msg.match[2]
robot[listen_func] /(YO DAWG .*) (SO .*)/i, id: 'meme.yo-dawg', (msg) ->
memeGenerator msg, 'Yqk_kg', msg.match[1], msg.match[2]
robot[listen_func] /(All your .*) (are belong to .*)/i, id: 'meme.base-are-belong', (msg) ->
memeGenerator msg, '76CAvA', msg.match[1], msg.match[2]
robot[listen_func] /(.*)\s*BITCH PLEASE\s*(.*)/i, id: 'meme.bitch-please', (msg) ->
memeGenerator msg, 'jo9J0Q', msg.match[1], msg.match[2]
robot[listen_func] /(.*)\s*COURAGE\s*(.*)/i, id: 'meme.courage', (msg) ->
memeGenerator msg, 'IMQ72w', msg.match[1], msg.match[2]
robot[listen_func] /ONE DOES NOT SIMPLY (.*)/i, id: 'meme.not-simply', (msg) ->
memeGenerator msg, 'da2i4A', 'ONE DOES NOT SIMPLY', msg.match[1]
robot[listen_func] /(IF YOU .*\s)(.* GONNA HAVE A BAD TIME)/i, id: 'meme.bad-time', (msg) ->
memeGenerator msg, 'lfSVJw', msg.match[1], msg.match[2]
robot[listen_func] /(.*)TROLLFACE(.*)/i, id: 'meme.trollface', (msg) ->
memeGenerator msg, 'mEK-TA', msg.match[1], msg.match[2]
robot[listen_func] /(IF .*), ((ARE|CAN|DO|DOES|HOW|IS|MAY|MIGHT|SHOULD|THEN|WHAT|WHEN|WHERE|WHICH|WHO|WHY|WILL|WON\'T|WOULD)[ \'N].*)/i, id: 'meme.philosoraptor', (msg) ->
memeGenerator msg, '-kFVmQ', msg.match[1], msg.match[2] + (if msg.match[2].search(/\?$/)==(-1) then '?' else '')
robot[listen_func] /(.*)(A+ND IT\'S GONE.*)/i, id: 'meme.its-gone', (msg) ->
memeGenerator msg, 'uIZe3Q', msg.match[1], msg.match[2]
robot[listen_func] /WHAT IF I TOLD YOU (.*)/i, id: 'meme.told-you', (msg) ->
memeGenerator msg, 'fWle1w', 'WHAT IF I TOLD YOU', msg.match[1]
robot[listen_func] /(WHY THE (FUCK|FRIEND)) (.*)/i, id: 'meme.why-the-friend', (msg) ->
memeGenerator msg, 'z8IPtw', msg.match[1], msg.match[3]
robot[listen_func] /WTF (.*)/i, id: 'meme.wtf', (msg) ->
memeGenerator msg, 'z8IPtw', 'WTF', msg.match[1]
robot[listen_func] /(IF .*)(THAT'D BE GREAT)/i, id: 'meme.be-great', (msg) ->
memeGenerator msg, 'q1cQXg', msg.match[1], msg.match[2]
robot[listen_func] /((?:WOW )?(?:SUCH|MUCH) .*) ((SUCH|MUCH|SO|VERY|MANY) .*)/i, id: 'meme.doge', (msg) ->
memeGenerator msg, 'AfO6hw', msg.match[1], msg.match[2]
robot[listen_func] /(.+, .+)(EVERYWHERE.*)/i, id: 'meme.everywhere', (msg) ->
memeGenerator msg, 'yDcY5w', msg.match[1], msg.match[2]
robot[listen_func] /KHANIFY (.+)$/i, id: 'meme.khan', (msg) ->
# Characters we can duplicate to make it KHAAAAAANy
extendyChars = ['a', 'e', 'o', 'u']
khan = ''
# Only duplicate the first vowel (except i) we find
extended = false
for c in msg.match[1]
if c in extendyChars and not extended
khan += c for _ in [1..6]
extended = true
else
khan += c
# If there were no vowels, we need more 'oomph!'
khan += if extended then '!' else '!!!!!'
memeGenerator msg, 'DoLEMA', '', khan
robot[listen_func] /(?:bad joke eel|pun)(.+\?) (.+)/i, id: 'meme.bad-joke-eel', (msg) ->
memeGenerator msg, 'R35VNw', msg.match[1], msg.match[2]
| 25854 | # Description:
# Get a meme from http://memecaptain.com/
# API Docs at:
# github.com/mmb/meme_captain_web/blob/master/doc/api/create_meme_image.md
#
# Dependencies:
# None
#
# Commands:
# hubot Y U NO <text> - Meme: Y U NO GUY w/ bottom caption
# hubot I don't always <something> but when i do <text> - Meme: The Most Interesting man in the World
# hubot <text> (SUCCESS|NAILED IT) - Meme: Success kid w/ top caption
# hubot <text> ALL the <things> - Meme: ALL THE THINGS
# hubot <text> TOO DAMN <high> - Meme: THE RENT IS TOO DAMN HIGH guy
# hubot Yo dawg <text> so <text> - Meme: Yo Dawg
# hubot All your <text> are belong to <text> - Meme: All your <text> are belong to <text>
# hubot If <text>, <question> <text>? - Meme: Philosoraptor
# hubot <text>, BITCH PLEASE <text> - Meme: Yao Ming
# hubot <text>, COURAGE <text> - Meme: Courage Wolf
# hubot ONE DOES NOT SIMPLY <text> - Meme: Boromir
# hubot IF YOU <text> GONNA HAVE A BAD TIME - Meme: Ski Instructor
# hubot IF YOU <text> TROLLFACE <text> - Meme: Troll Face
# hubot Aliens guy <text> - Meme: Aliens guy
# hubot Brace yourself <text> - Meme: Ned Stark braces for <text>
# hubot Iron Price <text> - Meme: To get <text>? Pay the iron price!
# hubot Not sure if <something> or <something else> - Meme: Futurama Fry
# hubot <text>, AND IT'S GONE - Meme: Bank Teller
# hubot WHAT IF I TOLD YOU <text> - Meme: Morpheus "What if I told you"
# hubot WTF <text> - Meme: Picard WTF
# hubot IF <text> THAT'D BE GREAT - Meme: Generates Lumberg
# hubot MUCH <text> (SO|VERY) <text> - Meme: Generates Doge
# hubot <text>, <text> EVERYWHERE - Meme: Generates Buzz Lightyear
# hubot khanify <text> - Meme: Has Shatner yell your phrase
# hubot pun | bad joke eel <text>? <text> - Meme: Bad joke eel
# hubot pun | bad joke eel <text> / <text> - Meme: Bad joke eel
# Author:
# bobanj
# cycomachead, <NAME> <<EMAIL>>
# peelman, <NAME> <<EMAIL>>
# ericjsilva, <NAME>
# lukewaite, <NAME>
# Should hubot 'respond' or 'listen'?
listen_func = if process.env.HUBOT_EXTRA_MEMES then 'hear' else 'respond'
memeGenerator = require "./lib/memecaptain.coffee"
module.exports = (robot) ->
robot[listen_func] /Y U NO (.+)/i, id: 'meme.y-u-no', (msg) ->
memeGenerator msg, 'NryNmg', 'Y U NO', msg.match[1]
robot[listen_func] /aliens guy (.+)/i, id: 'meme.aliens', (msg) ->
memeGenerator msg, 'sO-Hng', '', msg.match[1]
robot[listen_func] /iron price (.+)/i, id: 'meme.iron-price', (msg) ->
memeGenerator msg, 'q06KuA', msg.match[1], 'Pay the iron price'
robot[listen_func] /brace yourself (.+)/i, id: 'meme.brace-yourself', (msg) ->
memeGenerator msg, '_I74XA', 'Brace Yourself', msg.match[1]
robot[listen_func] /(.+) (ALL the .+)/i, id: 'meme.all-the-things', (msg) ->
memeGenerator msg, 'Dv99KQ', msg.match[1], msg.match[2]
robot[listen_func] /(I DON'?T ALWAYS .*) (BUT WHEN I DO,? .*)/i, id: 'meme.interesting-man', (msg) ->
memeGenerator msg, 'V8QnRQ', msg.match[1], msg.match[2]
robot[listen_func] /(.*)(SUCCESS|NAILED IT.*)/i, id: 'meme.success-kid', (msg) ->
memeGenerator msg, 'AbNPRQ', msg.match[1], msg.match[2]
robot[listen_func] /(.*) (\w+\sTOO DAMN .*)/i, id: 'meme.too-damn-high', (msg) ->
memeGenerator msg, 'RCkv6Q', msg.match[1], msg.match[2]
robot[listen_func] /(NOT SURE IF .*) (OR .*)/i, id: 'meme.not-sure-fry', (msg) ->
memeGenerator msg, 'CsNF8w', msg.match[1], msg.match[2]
robot[listen_func] /(YO DAWG .*) (SO .*)/i, id: 'meme.yo-dawg', (msg) ->
memeGenerator msg, 'Yqk_kg', msg.match[1], msg.match[2]
robot[listen_func] /(All your .*) (are belong to .*)/i, id: 'meme.base-are-belong', (msg) ->
memeGenerator msg, '76CAvA', msg.match[1], msg.match[2]
robot[listen_func] /(.*)\s*BITCH PLEASE\s*(.*)/i, id: 'meme.bitch-please', (msg) ->
memeGenerator msg, 'jo9J0Q', msg.match[1], msg.match[2]
robot[listen_func] /(.*)\s*COURAGE\s*(.*)/i, id: 'meme.courage', (msg) ->
memeGenerator msg, 'IMQ72w', msg.match[1], msg.match[2]
robot[listen_func] /ONE DOES NOT SIMPLY (.*)/i, id: 'meme.not-simply', (msg) ->
memeGenerator msg, 'da2i4A', 'ONE DOES NOT SIMPLY', msg.match[1]
robot[listen_func] /(IF YOU .*\s)(.* GONNA HAVE A BAD TIME)/i, id: 'meme.bad-time', (msg) ->
memeGenerator msg, 'lfSVJw', msg.match[1], msg.match[2]
robot[listen_func] /(.*)TROLLFACE(.*)/i, id: 'meme.trollface', (msg) ->
memeGenerator msg, 'mEK-TA', msg.match[1], msg.match[2]
robot[listen_func] /(IF .*), ((ARE|CAN|DO|DOES|HOW|IS|MAY|MIGHT|SHOULD|THEN|WHAT|WHEN|WHERE|WHICH|WHO|WHY|WILL|WON\'T|WOULD)[ \'N].*)/i, id: 'meme.philosoraptor', (msg) ->
memeGenerator msg, '-kFVmQ', msg.match[1], msg.match[2] + (if msg.match[2].search(/\?$/)==(-1) then '?' else '')
robot[listen_func] /(.*)(A+ND IT\'S GONE.*)/i, id: 'meme.its-gone', (msg) ->
memeGenerator msg, 'uIZe3Q', msg.match[1], msg.match[2]
robot[listen_func] /WHAT IF I TOLD YOU (.*)/i, id: 'meme.told-you', (msg) ->
memeGenerator msg, 'fWle1w', 'WHAT IF I TOLD YOU', msg.match[1]
robot[listen_func] /(WHY THE (FUCK|FRIEND)) (.*)/i, id: 'meme.why-the-friend', (msg) ->
memeGenerator msg, 'z8IPtw', msg.match[1], msg.match[3]
robot[listen_func] /WTF (.*)/i, id: 'meme.wtf', (msg) ->
memeGenerator msg, 'z8IPtw', 'WTF', msg.match[1]
robot[listen_func] /(IF .*)(THAT'D BE GREAT)/i, id: 'meme.be-great', (msg) ->
memeGenerator msg, 'q1cQXg', msg.match[1], msg.match[2]
robot[listen_func] /((?:WOW )?(?:SUCH|MUCH) .*) ((SUCH|MUCH|SO|VERY|MANY) .*)/i, id: 'meme.doge', (msg) ->
memeGenerator msg, 'AfO6hw', msg.match[1], msg.match[2]
robot[listen_func] /(.+, .+)(EVERYWHERE.*)/i, id: 'meme.everywhere', (msg) ->
memeGenerator msg, 'yDcY5w', msg.match[1], msg.match[2]
robot[listen_func] /KHANIFY (.+)$/i, id: 'meme.khan', (msg) ->
# Characters we can duplicate to make it KHAAAAAANy
extendyChars = ['a', 'e', 'o', 'u']
khan = ''
# Only duplicate the first vowel (except i) we find
extended = false
for c in msg.match[1]
if c in extendyChars and not extended
khan += c for _ in [1..6]
extended = true
else
khan += c
# If there were no vowels, we need more 'oomph!'
khan += if extended then '!' else '!!!!!'
memeGenerator msg, 'DoLEMA', '', khan
robot[listen_func] /(?:bad joke eel|pun)(.+\?) (.+)/i, id: 'meme.bad-joke-eel', (msg) ->
memeGenerator msg, 'R35VNw', msg.match[1], msg.match[2]
| true | # Description:
# Get a meme from http://memecaptain.com/
# API Docs at:
# github.com/mmb/meme_captain_web/blob/master/doc/api/create_meme_image.md
#
# Dependencies:
# None
#
# Commands:
# hubot Y U NO <text> - Meme: Y U NO GUY w/ bottom caption
# hubot I don't always <something> but when i do <text> - Meme: The Most Interesting man in the World
# hubot <text> (SUCCESS|NAILED IT) - Meme: Success kid w/ top caption
# hubot <text> ALL the <things> - Meme: ALL THE THINGS
# hubot <text> TOO DAMN <high> - Meme: THE RENT IS TOO DAMN HIGH guy
# hubot Yo dawg <text> so <text> - Meme: Yo Dawg
# hubot All your <text> are belong to <text> - Meme: All your <text> are belong to <text>
# hubot If <text>, <question> <text>? - Meme: Philosoraptor
# hubot <text>, BITCH PLEASE <text> - Meme: Yao Ming
# hubot <text>, COURAGE <text> - Meme: Courage Wolf
# hubot ONE DOES NOT SIMPLY <text> - Meme: Boromir
# hubot IF YOU <text> GONNA HAVE A BAD TIME - Meme: Ski Instructor
# hubot IF YOU <text> TROLLFACE <text> - Meme: Troll Face
# hubot Aliens guy <text> - Meme: Aliens guy
# hubot Brace yourself <text> - Meme: Ned Stark braces for <text>
# hubot Iron Price <text> - Meme: To get <text>? Pay the iron price!
# hubot Not sure if <something> or <something else> - Meme: Futurama Fry
# hubot <text>, AND IT'S GONE - Meme: Bank Teller
# hubot WHAT IF I TOLD YOU <text> - Meme: Morpheus "What if I told you"
# hubot WTF <text> - Meme: Picard WTF
# hubot IF <text> THAT'D BE GREAT - Meme: Generates Lumberg
# hubot MUCH <text> (SO|VERY) <text> - Meme: Generates Doge
# hubot <text>, <text> EVERYWHERE - Meme: Generates Buzz Lightyear
# hubot khanify <text> - Meme: Has Shatner yell your phrase
# hubot pun | bad joke eel <text>? <text> - Meme: Bad joke eel
# hubot pun | bad joke eel <text> / <text> - Meme: Bad joke eel
# Author:
# bobanj
# cycomachead, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# peelman, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# ericjsilva, PI:NAME:<NAME>END_PI
# lukewaite, PI:NAME:<NAME>END_PI
# Should hubot 'respond' or 'listen'?
listen_func = if process.env.HUBOT_EXTRA_MEMES then 'hear' else 'respond'
memeGenerator = require "./lib/memecaptain.coffee"
module.exports = (robot) ->
robot[listen_func] /Y U NO (.+)/i, id: 'meme.y-u-no', (msg) ->
memeGenerator msg, 'NryNmg', 'Y U NO', msg.match[1]
robot[listen_func] /aliens guy (.+)/i, id: 'meme.aliens', (msg) ->
memeGenerator msg, 'sO-Hng', '', msg.match[1]
robot[listen_func] /iron price (.+)/i, id: 'meme.iron-price', (msg) ->
memeGenerator msg, 'q06KuA', msg.match[1], 'Pay the iron price'
robot[listen_func] /brace yourself (.+)/i, id: 'meme.brace-yourself', (msg) ->
memeGenerator msg, '_I74XA', 'Brace Yourself', msg.match[1]
robot[listen_func] /(.+) (ALL the .+)/i, id: 'meme.all-the-things', (msg) ->
memeGenerator msg, 'Dv99KQ', msg.match[1], msg.match[2]
robot[listen_func] /(I DON'?T ALWAYS .*) (BUT WHEN I DO,? .*)/i, id: 'meme.interesting-man', (msg) ->
memeGenerator msg, 'V8QnRQ', msg.match[1], msg.match[2]
robot[listen_func] /(.*)(SUCCESS|NAILED IT.*)/i, id: 'meme.success-kid', (msg) ->
memeGenerator msg, 'AbNPRQ', msg.match[1], msg.match[2]
robot[listen_func] /(.*) (\w+\sTOO DAMN .*)/i, id: 'meme.too-damn-high', (msg) ->
memeGenerator msg, 'RCkv6Q', msg.match[1], msg.match[2]
robot[listen_func] /(NOT SURE IF .*) (OR .*)/i, id: 'meme.not-sure-fry', (msg) ->
memeGenerator msg, 'CsNF8w', msg.match[1], msg.match[2]
robot[listen_func] /(YO DAWG .*) (SO .*)/i, id: 'meme.yo-dawg', (msg) ->
memeGenerator msg, 'Yqk_kg', msg.match[1], msg.match[2]
robot[listen_func] /(All your .*) (are belong to .*)/i, id: 'meme.base-are-belong', (msg) ->
memeGenerator msg, '76CAvA', msg.match[1], msg.match[2]
robot[listen_func] /(.*)\s*BITCH PLEASE\s*(.*)/i, id: 'meme.bitch-please', (msg) ->
memeGenerator msg, 'jo9J0Q', msg.match[1], msg.match[2]
robot[listen_func] /(.*)\s*COURAGE\s*(.*)/i, id: 'meme.courage', (msg) ->
memeGenerator msg, 'IMQ72w', msg.match[1], msg.match[2]
robot[listen_func] /ONE DOES NOT SIMPLY (.*)/i, id: 'meme.not-simply', (msg) ->
memeGenerator msg, 'da2i4A', 'ONE DOES NOT SIMPLY', msg.match[1]
robot[listen_func] /(IF YOU .*\s)(.* GONNA HAVE A BAD TIME)/i, id: 'meme.bad-time', (msg) ->
memeGenerator msg, 'lfSVJw', msg.match[1], msg.match[2]
robot[listen_func] /(.*)TROLLFACE(.*)/i, id: 'meme.trollface', (msg) ->
memeGenerator msg, 'mEK-TA', msg.match[1], msg.match[2]
robot[listen_func] /(IF .*), ((ARE|CAN|DO|DOES|HOW|IS|MAY|MIGHT|SHOULD|THEN|WHAT|WHEN|WHERE|WHICH|WHO|WHY|WILL|WON\'T|WOULD)[ \'N].*)/i, id: 'meme.philosoraptor', (msg) ->
memeGenerator msg, '-kFVmQ', msg.match[1], msg.match[2] + (if msg.match[2].search(/\?$/)==(-1) then '?' else '')
robot[listen_func] /(.*)(A+ND IT\'S GONE.*)/i, id: 'meme.its-gone', (msg) ->
memeGenerator msg, 'uIZe3Q', msg.match[1], msg.match[2]
robot[listen_func] /WHAT IF I TOLD YOU (.*)/i, id: 'meme.told-you', (msg) ->
memeGenerator msg, 'fWle1w', 'WHAT IF I TOLD YOU', msg.match[1]
robot[listen_func] /(WHY THE (FUCK|FRIEND)) (.*)/i, id: 'meme.why-the-friend', (msg) ->
memeGenerator msg, 'z8IPtw', msg.match[1], msg.match[3]
robot[listen_func] /WTF (.*)/i, id: 'meme.wtf', (msg) ->
memeGenerator msg, 'z8IPtw', 'WTF', msg.match[1]
robot[listen_func] /(IF .*)(THAT'D BE GREAT)/i, id: 'meme.be-great', (msg) ->
memeGenerator msg, 'q1cQXg', msg.match[1], msg.match[2]
robot[listen_func] /((?:WOW )?(?:SUCH|MUCH) .*) ((SUCH|MUCH|SO|VERY|MANY) .*)/i, id: 'meme.doge', (msg) ->
memeGenerator msg, 'AfO6hw', msg.match[1], msg.match[2]
robot[listen_func] /(.+, .+)(EVERYWHERE.*)/i, id: 'meme.everywhere', (msg) ->
memeGenerator msg, 'yDcY5w', msg.match[1], msg.match[2]
robot[listen_func] /KHANIFY (.+)$/i, id: 'meme.khan', (msg) ->
# Characters we can duplicate to make it KHAAAAAANy
extendyChars = ['a', 'e', 'o', 'u']
khan = ''
# Only duplicate the first vowel (except i) we find
extended = false
for c in msg.match[1]
if c in extendyChars and not extended
khan += c for _ in [1..6]
extended = true
else
khan += c
# If there were no vowels, we need more 'oomph!'
khan += if extended then '!' else '!!!!!'
memeGenerator msg, 'DoLEMA', '', khan
robot[listen_func] /(?:bad joke eel|pun)(.+\?) (.+)/i, id: 'meme.bad-joke-eel', (msg) ->
memeGenerator msg, 'R35VNw', msg.match[1], msg.match[2]
|
[
{
"context": "e: \"string\"\n password:\n description: \"Password of the backup server\"\n type: \"string\"\n ",
"end": 527,
"score": 0.8523281216621399,
"start": 519,
"tag": "PASSWORD",
"value": "Password"
},
{
"context": "e: \"string\"\n password:\n ... | device-config-schema.coffee | bertreb/pimatic-save | 1 | module.exports = {
title: "pimatic-save device config schemas"
SaveFtpDevice: {
title: "SaveFtp config options"
type: "object"
extensions: ["xLink", "xAttributeOptions"]
properties:
host:
description: "Url to the backup server"
type: "string"
port:
description: "Port of the backup server"
type: "number"
default: 21
username:
description: "Username of the backup server"
type: "string"
password:
description: "Password of the backup server"
type: "string"
path:
description: "Remote path of the backup server where the config should be saved"
type: "string"
}
SaveMailDevice: {
title: "SaveMail config options"
type: "object"
extensions: ["xLink", "xAttributeOptions"]
properties:
address:
description: "Your email address"
type: "string"
password:
description: "Your email password"
type: "string"
server:
description: "smtp server address"
type: "string"
port:
description: "smtp port used, default 587"
type: "number"
default: 587
to:
description: "The email address the mail is sent to. If empty the from email is used"
type: "string"
subject:
description: "The optional subject for the save mail"
type: "string"
text:
description: "The optional text for the save mail"
type: "string"
}
}
| 19178 | module.exports = {
title: "pimatic-save device config schemas"
SaveFtpDevice: {
title: "SaveFtp config options"
type: "object"
extensions: ["xLink", "xAttributeOptions"]
properties:
host:
description: "Url to the backup server"
type: "string"
port:
description: "Port of the backup server"
type: "number"
default: 21
username:
description: "Username of the backup server"
type: "string"
password:
description: "<PASSWORD> of the backup server"
type: "string"
path:
description: "Remote path of the backup server where the config should be saved"
type: "string"
}
SaveMailDevice: {
title: "SaveMail config options"
type: "object"
extensions: ["xLink", "xAttributeOptions"]
properties:
address:
description: "Your email address"
type: "string"
password:
description: "<PASSWORD>"
type: "string"
server:
description: "smtp server address"
type: "string"
port:
description: "smtp port used, default 587"
type: "number"
default: 587
to:
description: "The email address the mail is sent to. If empty the from email is used"
type: "string"
subject:
description: "The optional subject for the save mail"
type: "string"
text:
description: "The optional text for the save mail"
type: "string"
}
}
| true | module.exports = {
title: "pimatic-save device config schemas"
SaveFtpDevice: {
title: "SaveFtp config options"
type: "object"
extensions: ["xLink", "xAttributeOptions"]
properties:
host:
description: "Url to the backup server"
type: "string"
port:
description: "Port of the backup server"
type: "number"
default: 21
username:
description: "Username of the backup server"
type: "string"
password:
description: "PI:PASSWORD:<PASSWORD>END_PI of the backup server"
type: "string"
path:
description: "Remote path of the backup server where the config should be saved"
type: "string"
}
SaveMailDevice: {
title: "SaveMail config options"
type: "object"
extensions: ["xLink", "xAttributeOptions"]
properties:
address:
description: "Your email address"
type: "string"
password:
description: "PI:PASSWORD:<PASSWORD>END_PI"
type: "string"
server:
description: "smtp server address"
type: "string"
port:
description: "smtp port used, default 587"
type: "number"
default: 587
to:
description: "The email address the mail is sent to. If empty the from email is used"
type: "string"
subject:
description: "The optional subject for the save mail"
type: "string"
text:
description: "The optional text for the save mail"
type: "string"
}
}
|
[
{
"context": "uter.get '/', ->\n @res.html app.bind -> h1 'Hello Coffeecup'\napp.start 3000, -> console.log 'Listening.. on p",
"end": 291,
"score": 0.5912797451019287,
"start": 282,
"tag": "NAME",
"value": "Coffeecup"
}
] | node_modules/ethercalc/node_modules/zappajs/node_modules/coffeecup/examples/flatiron/single/app.coffee | kurakuradave/Etherboard | 21 | flatiron = require 'flatiron'
creamer = require 'creamer'
app = flatiron.app
app.use flatiron.plugins.http
app.use creamer, layout: ->
doctype 5
html ->
head ->
title 'Single Page'
body ->
content()
app.router.get '/', ->
@res.html app.bind -> h1 'Hello Coffeecup'
app.start 3000, -> console.log 'Listening.. on port 3000'
| 33613 | flatiron = require 'flatiron'
creamer = require 'creamer'
app = flatiron.app
app.use flatiron.plugins.http
app.use creamer, layout: ->
doctype 5
html ->
head ->
title 'Single Page'
body ->
content()
app.router.get '/', ->
@res.html app.bind -> h1 'Hello <NAME>'
app.start 3000, -> console.log 'Listening.. on port 3000'
| true | flatiron = require 'flatiron'
creamer = require 'creamer'
app = flatiron.app
app.use flatiron.plugins.http
app.use creamer, layout: ->
doctype 5
html ->
head ->
title 'Single Page'
body ->
content()
app.router.get '/', ->
@res.html app.bind -> h1 'Hello PI:NAME:<NAME>END_PI'
app.start 3000, -> console.log 'Listening.. on port 3000'
|
[
{
"context": "en_idx > -1\n # I Don't Wanna Care - feat. Jim => I Don't Wanna Care\n str = str.substri",
"end": 385,
"score": 0.9788892269134521,
"start": 382,
"tag": "NAME",
"value": "Jim"
},
{
"context": "en_sq_bracket_idx > -1\n # Pharaohs [feat. Roses G... | app/scripts/services/track_cleanup.coffee | moneypenny/seasonal-playlister | 0 | 'use strict'
###*
# @ngdoc service
# @name seasonSoundApp.TrackCleanupSvc
# @description
# # TrackCleanupSvc
# Service in the seasonSoundApp.
###
angular.module('seasonSoundApp')
.service 'TrackCleanupSvc', ->
class TrackCleanup
strip_after_hyphen: (str) ->
hyphen_idx = str.indexOf(' - ')
if hyphen_idx > -1
# I Don't Wanna Care - feat. Jim => I Don't Wanna Care
str = str.substring(0, hyphen_idx)
str
strip_square_brackets: (str) ->
open_sq_bracket_idx = str.indexOf('[')
if open_sq_bracket_idx > -1
# Pharaohs [feat. Roses Gabor] whee => Pharoahs whee
close_sq_bracket_idx = str.indexOf(']', open_sq_bracket_idx)
if close_sq_bracket_idx > -1
str = str.substring(0, open_sq_bracket_idx) +
str.substring(close_sq_bracket_idx + 1)
else
str = str.substring(0, open_sq_bracket_idx)
str
strip_parentheses: (str) ->
open_paren_idx = str.indexOf('(')
if open_paren_idx > -1
# Pharaohs (feat. Roses Gabor) whee => Pharoahs whee
close_paren_idx = str.indexOf(')', open_paren_idx)
if close_paren_idx > -1
str = str.substring(0, open_paren_idx) +
str.substring(close_paren_idx + 1)
else
str = str.substring(0, open_paren_idx)
str
clean_track_name: (track_name) =>
clean_name = @strip_after_hyphen(track_name)
clean_name = @strip_square_brackets(clean_name)
clean_name = @strip_parentheses(clean_name)
clean_name.trim()
new TrackCleanup()
| 75369 | 'use strict'
###*
# @ngdoc service
# @name seasonSoundApp.TrackCleanupSvc
# @description
# # TrackCleanupSvc
# Service in the seasonSoundApp.
###
angular.module('seasonSoundApp')
.service 'TrackCleanupSvc', ->
class TrackCleanup
strip_after_hyphen: (str) ->
hyphen_idx = str.indexOf(' - ')
if hyphen_idx > -1
# I Don't Wanna Care - feat. <NAME> => I Don't Wanna Care
str = str.substring(0, hyphen_idx)
str
strip_square_brackets: (str) ->
open_sq_bracket_idx = str.indexOf('[')
if open_sq_bracket_idx > -1
# Pharaohs [feat. <NAME>] whee => Pharoahs whee
close_sq_bracket_idx = str.indexOf(']', open_sq_bracket_idx)
if close_sq_bracket_idx > -1
str = str.substring(0, open_sq_bracket_idx) +
str.substring(close_sq_bracket_idx + 1)
else
str = str.substring(0, open_sq_bracket_idx)
str
strip_parentheses: (str) ->
open_paren_idx = str.indexOf('(')
if open_paren_idx > -1
# Pharaohs (feat. <NAME>) whee => <NAME>aroah<NAME> whee
close_paren_idx = str.indexOf(')', open_paren_idx)
if close_paren_idx > -1
str = str.substring(0, open_paren_idx) +
str.substring(close_paren_idx + 1)
else
str = str.substring(0, open_paren_idx)
str
clean_track_name: (track_name) =>
clean_name = @strip_after_hyphen(track_name)
clean_name = @strip_square_brackets(clean_name)
clean_name = @strip_parentheses(clean_name)
clean_name.trim()
new TrackCleanup()
| true | 'use strict'
###*
# @ngdoc service
# @name seasonSoundApp.TrackCleanupSvc
# @description
# # TrackCleanupSvc
# Service in the seasonSoundApp.
###
angular.module('seasonSoundApp')
.service 'TrackCleanupSvc', ->
class TrackCleanup
strip_after_hyphen: (str) ->
hyphen_idx = str.indexOf(' - ')
if hyphen_idx > -1
# I Don't Wanna Care - feat. PI:NAME:<NAME>END_PI => I Don't Wanna Care
str = str.substring(0, hyphen_idx)
str
strip_square_brackets: (str) ->
open_sq_bracket_idx = str.indexOf('[')
if open_sq_bracket_idx > -1
# Pharaohs [feat. PI:NAME:<NAME>END_PI] whee => Pharoahs whee
close_sq_bracket_idx = str.indexOf(']', open_sq_bracket_idx)
if close_sq_bracket_idx > -1
str = str.substring(0, open_sq_bracket_idx) +
str.substring(close_sq_bracket_idx + 1)
else
str = str.substring(0, open_sq_bracket_idx)
str
strip_parentheses: (str) ->
open_paren_idx = str.indexOf('(')
if open_paren_idx > -1
# Pharaohs (feat. PI:NAME:<NAME>END_PI) whee => PI:NAME:<NAME>END_PIaroahPI:NAME:<NAME>END_PI whee
close_paren_idx = str.indexOf(')', open_paren_idx)
if close_paren_idx > -1
str = str.substring(0, open_paren_idx) +
str.substring(close_paren_idx + 1)
else
str = str.substring(0, open_paren_idx)
str
clean_track_name: (track_name) =>
clean_name = @strip_after_hyphen(track_name)
clean_name = @strip_square_brackets(clean_name)
clean_name = @strip_parentheses(clean_name)
clean_name.trim()
new TrackCleanup()
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9990048408508301,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-http-full-response.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# This test requires the program 'ab'
runAb = (opts, callback) ->
command = "ab " + opts + " http://127.0.0.1:" + common.PORT + "/"
exec command, (err, stdout, stderr) ->
if err
if /ab|apr/i.test(stderr)
console.log "problem spawning ab - skipping test.\n" + stderr
process.reallyExit 0
process.exit()
return
m = /Document Length:\s*(\d+) bytes/i.exec(stdout)
documentLength = parseInt(m[1])
m = /Complete requests:\s*(\d+)/i.exec(stdout)
completeRequests = parseInt(m[1])
m = /HTML transferred:\s*(\d+) bytes/i.exec(stdout)
htmlTransfered = parseInt(m[1])
assert.equal bodyLength, documentLength
assert.equal completeRequests * documentLength, htmlTransfered
runs++
callback() if callback
return
return
common = require("../common")
assert = require("assert")
http = require("http")
exec = require("child_process").exec
bodyLength = 12345
body = ""
i = 0
while i < bodyLength
body += "c"
i++
server = http.createServer((req, res) ->
res.writeHead 200,
"Content-Length": bodyLength
"Content-Type": "text/plain"
res.end body
return
)
runs = 0
server.listen common.PORT, ->
runAb "-c 1 -n 10", ->
console.log "-c 1 -n 10 okay"
runAb "-c 1 -n 100", ->
console.log "-c 1 -n 100 okay"
runAb "-c 1 -n 1000", ->
console.log "-c 1 -n 1000 okay"
server.close()
return
return
return
return
process.on "exit", ->
assert.equal 3, runs
return
| 64119 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# This test requires the program 'ab'
runAb = (opts, callback) ->
command = "ab " + opts + " http://127.0.0.1:" + common.PORT + "/"
exec command, (err, stdout, stderr) ->
if err
if /ab|apr/i.test(stderr)
console.log "problem spawning ab - skipping test.\n" + stderr
process.reallyExit 0
process.exit()
return
m = /Document Length:\s*(\d+) bytes/i.exec(stdout)
documentLength = parseInt(m[1])
m = /Complete requests:\s*(\d+)/i.exec(stdout)
completeRequests = parseInt(m[1])
m = /HTML transferred:\s*(\d+) bytes/i.exec(stdout)
htmlTransfered = parseInt(m[1])
assert.equal bodyLength, documentLength
assert.equal completeRequests * documentLength, htmlTransfered
runs++
callback() if callback
return
return
common = require("../common")
assert = require("assert")
http = require("http")
exec = require("child_process").exec
bodyLength = 12345
body = ""
i = 0
while i < bodyLength
body += "c"
i++
server = http.createServer((req, res) ->
res.writeHead 200,
"Content-Length": bodyLength
"Content-Type": "text/plain"
res.end body
return
)
runs = 0
server.listen common.PORT, ->
runAb "-c 1 -n 10", ->
console.log "-c 1 -n 10 okay"
runAb "-c 1 -n 100", ->
console.log "-c 1 -n 100 okay"
runAb "-c 1 -n 1000", ->
console.log "-c 1 -n 1000 okay"
server.close()
return
return
return
return
process.on "exit", ->
assert.equal 3, runs
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# This test requires the program 'ab'
runAb = (opts, callback) ->
command = "ab " + opts + " http://127.0.0.1:" + common.PORT + "/"
exec command, (err, stdout, stderr) ->
if err
if /ab|apr/i.test(stderr)
console.log "problem spawning ab - skipping test.\n" + stderr
process.reallyExit 0
process.exit()
return
m = /Document Length:\s*(\d+) bytes/i.exec(stdout)
documentLength = parseInt(m[1])
m = /Complete requests:\s*(\d+)/i.exec(stdout)
completeRequests = parseInt(m[1])
m = /HTML transferred:\s*(\d+) bytes/i.exec(stdout)
htmlTransfered = parseInt(m[1])
assert.equal bodyLength, documentLength
assert.equal completeRequests * documentLength, htmlTransfered
runs++
callback() if callback
return
return
common = require("../common")
assert = require("assert")
http = require("http")
exec = require("child_process").exec
bodyLength = 12345
body = ""
i = 0
while i < bodyLength
body += "c"
i++
server = http.createServer((req, res) ->
res.writeHead 200,
"Content-Length": bodyLength
"Content-Type": "text/plain"
res.end body
return
)
runs = 0
server.listen common.PORT, ->
runAb "-c 1 -n 10", ->
console.log "-c 1 -n 10 okay"
runAb "-c 1 -n 100", ->
console.log "-c 1 -n 100 okay"
runAb "-c 1 -n 1000", ->
console.log "-c 1 -n 1000 okay"
server.close()
return
return
return
return
process.on "exit", ->
assert.equal 3, runs
return
|
[
{
"context": "# Copyright (c) 2015 naymspace software (Dennis Nissen)\n#\n# Licensed under the Apache License, Version 2",
"end": 54,
"score": 0.9998693466186523,
"start": 41,
"tag": "NAME",
"value": "Dennis Nissen"
}
] | src/app/components/alert/alert.controller.coffee | ogumi/client | 0 | # Copyright (c) 2015 naymspace software (Dennis Nissen)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
angular.module "webClient"
.controller "AlertCtrl",($scope, $rootScope, $window) ->
$scope.alerts = []
$scope.addAlert = (obj) ->
$scope.alerts.push obj
$scope.closeAlert = (index) ->
$scope.alerts.splice index, 1
$scope.empty = () ->
$scope.alerts = []
$rootScope.$on 'showAlert', (ev, obj) ->
ev.preventDefault()
ev.stopPropagation()
$scope.addAlert obj
$window.scrollTo 0, 0
return false
return
| 45240 | # Copyright (c) 2015 naymspace software (<NAME>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
angular.module "webClient"
.controller "AlertCtrl",($scope, $rootScope, $window) ->
$scope.alerts = []
$scope.addAlert = (obj) ->
$scope.alerts.push obj
$scope.closeAlert = (index) ->
$scope.alerts.splice index, 1
$scope.empty = () ->
$scope.alerts = []
$rootScope.$on 'showAlert', (ev, obj) ->
ev.preventDefault()
ev.stopPropagation()
$scope.addAlert obj
$window.scrollTo 0, 0
return false
return
| true | # Copyright (c) 2015 naymspace software (PI:NAME:<NAME>END_PI)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
angular.module "webClient"
.controller "AlertCtrl",($scope, $rootScope, $window) ->
$scope.alerts = []
$scope.addAlert = (obj) ->
$scope.alerts.push obj
$scope.closeAlert = (index) ->
$scope.alerts.splice index, 1
$scope.empty = () ->
$scope.alerts = []
$rootScope.$on 'showAlert', (ev, obj) ->
ev.preventDefault()
ev.stopPropagation()
$scope.addAlert obj
$window.scrollTo 0, 0
return false
return
|
[
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino ",
"end": 38,
"score": 0.9998899698257446,
"start": 25,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright... | public/taiga-front/app/coffee/modules/admin/roles.coffee | mabotech/maboss | 0 | ###
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/admin/memberships.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
bindMethods = @.taiga.bindMethods
module = angular.module("taigaAdmin")
#############################################################################
## Project Roles Controller
#############################################################################
class RolesController extends mixOf(taiga.Controller, taiga.PageMixin, taiga.FiltersMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$appTitle"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @appTitle) ->
bindMethods(@)
@scope.sectionName = "Permissions" #i18n
@scope.project = {}
@scope.anyComputableRole = true
promise = @.loadInitialData()
promise.then () =>
@appTitle.set("Roles - " + @scope.project.name)
promise.then null, @.onInitialDataError.bind(@)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
@scope.anyComputableRole = _.some(_.map(project.roles, (point) -> point.computable))
return project
loadRoles: ->
return @rs.roles.list(@scope.projectId).then (data) =>
@scope.roles = data
@scope.role = @scope.roles[0]
return data
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then(=> @.loadProject())
.then(=> @.loadUsersAndRoles())
.then(=> @.loadRoles())
setRole: (role) ->
@scope.role = role
@scope.$broadcast("role:changed", @scope.role)
delete: ->
# TODO: i18n
title = "Delete Role" # TODO: i18n
subtitle = @scope.role.name
replacement = "All the users with this role will be moved to" # TODO: i18n
warning = "<strong>Be careful, all role estimations will be removed</strong>" # TODO: i18n
choices = {}
for role in @scope.roles
if role.id != @scope.role.id
choices[role.id] = role.name
if _.keys(choices).length == 0
return @confirm.error("You can't delete all values.") # TODO: i18n
return @confirm.askChoice(title, subtitle, choices, replacement, warning).then (response) =>
promise = @repo.remove(@scope.role, {moveTo: response.selected})
promise.then =>
@.loadProject()
@.loadRoles().finally ->
response.finish()
promise.then null, =>
@confirm.notify('error')
setComputable: debounce 2000, ->
onSuccess = =>
@confirm.notify("success")
@.loadProject()
onError = =>
@confirm.notify("error")
@scope.role.revert()
@repo.save(@scope.role).then onSuccess, onError
module.controller("RolesController", RolesController)
EditRoleDirective = ($repo, $confirm) ->
link = ($scope, $el, $attrs) ->
toggleView = ->
$el.find('.total').toggle()
$el.find('.edit-role').toggle()
submit = () ->
$scope.role.name = $el.find("input").val()
promise = $repo.save($scope.role)
promise.then ->
$confirm.notify("success")
promise.then null, (data) ->
$confirm.notify("error")
toggleView()
$el.on "click", "a.icon-edit", ->
toggleView()
$el.find("input").focus()
$el.on "click", "a.save", submit
$el.on "keyup", "input", (event) ->
if event.keyCode == 13 # Enter key
submit()
else if event.keyCode == 27 # ESC key
toggleView()
$scope.$on "role:changed", ->
if $el.find('.edit-role').is(":visible")
toggleView()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgEditRole", ["$tgRepo", "$tgConfirm", EditRoleDirective])
RolesDirective = ->
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgRoles", RolesDirective)
NewRoleDirective = ($tgrepo, $confirm) ->
DEFAULT_PERMISSIONS = ["view_project", "view_milestones", "view_us", "view_tasks", "view_issues"]
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
$scope.$on "$destroy", ->
$el.off()
$el.on "click", "a.add-button", (event) ->
event.preventDefault()
$el.find(".new").removeClass("hidden")
$el.find(".new").focus()
$el.find(".add-button").hide()
$el.on "keyup", ".new", (event) ->
event.preventDefault()
if event.keyCode == 13 # Enter key
target = angular.element(event.currentTarget)
newRole = {
project: $scope.projectId
name: target.val()
permissions: DEFAULT_PERMISSIONS
order: _.max($scope.roles, (r) -> r.order).order + 1
computable: false
}
$el.find(".new").addClass("hidden")
$el.find(".new").val('')
onSuccess = (role) ->
$scope.roles.push(role)
$ctrl.setRole(role)
$el.find(".add-button").show()
$ctrl.loadProject()
onError = ->
$confirm.notify("error")
$tgrepo.create("roles", newRole).then(onSuccess, onError)
else if event.keyCode == 27 # ESC key
target = angular.element(event.currentTarget)
$el.find(".new").addClass("hidden")
$el.find(".new").val('')
$el.find(".add-button").show()
return {link:link}
module.directive("tgNewRole", ["$tgRepo", "$tgConfirm", NewRoleDirective])
# Use category-config.scss styles
RolePermissionsDirective = ($rootscope, $repo, $confirm) ->
resumeTemplate = _.template("""
<div class="resume-title"><%- category.name %></div>
<div class="summary-role">
<div class="count"><%- category.activePermissions %>/<%- category.permissions.length %></div>
<% _.each(category.permissions, function(permission) { %>
<div class="role-summary-single <% if(permission.active) { %>active<% } %>"
title="<%- permission.description %>"></div>
<% }) %>
</div>
<div class="icon icon-arrow-bottom"></div>
""")
categoryTemplate = _.template("""
<div class="category-config" data-id="<%- index %>">
<div class="resume">
</div>
<div class="category-items">
<div class="items-container">
<% _.each(category.permissions, function(permission) { %>
<div class="category-item" data-id="<%- permission.key %>">
<span><%- permission.description %></span>
<div class="check">
<input type="checkbox" <% if(permission.active) { %>checked="checked"<% } %>/>
<div></div>
<span class="check-text check-yes">Yes</span>
<span class="check-text check-no">No</span>
</div>
</div>
<% }) %>
</div>
</div>
</div>
""")
baseTemplate = _.template("""
<div class="category-config-list"></div>
""")
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
generateCategoriesFromRole = (role) ->
setActivePermissions = (permissions) ->
return _.map(permissions, (x) -> _.extend({}, x, {active: x["key"] in role.permissions}))
setActivePermissionsPerCategory = (category) ->
return _.map(category, (x) ->
_.extend({}, x, {
activePermissions: _.filter(x["permissions"], "active").length
})
)
categories = []
milestonePermissions = [
{ key: "view_milestones", description: "View sprints" }
{ key: "add_milestone", description: "Add sprint" }
{ key: "modify_milestone", description: "Modify sprint" }
{ key: "delete_milestone", description: "Delete sprint" }
]
categories.push({ name: "Sprints", permissions: setActivePermissions(milestonePermissions) })
userStoryPermissions = [
{ key: "view_us", description: "View user story" }
{ key: "add_us", description: "Add user story" }
{ key: "modify_us", description: "Modify user story" }
{ key: "delete_us", description: "Delete user story" }
]
categories.push({ name: "User Stories", permissions: setActivePermissions(userStoryPermissions) })
taskPermissions = [
{ key: "view_tasks", description: "View tasks" }
{ key: "add_task", description: "Add task" }
{ key: "modify_task", description: "Modify task" }
{ key: "delete_task", description: "Delete task" }
]
categories.push({ name: "Tasks", permissions: setActivePermissions(taskPermissions) })
issuePermissions = [
{ key: "view_issues", description: "View issues" }
{ key: "add_issue", description: "Add issue" }
{ key: "modify_issue", description: "Modify issue" }
{ key: "delete_issue", description: "Delete issue" }
]
categories.push({ name: "Issues", permissions: setActivePermissions(issuePermissions) })
wikiPermissions = [
{ key: "view_wiki_pages", description: "View wiki pages" }
{ key: "add_wiki_page", description: "Add wiki page" }
{ key: "modify_wiki_page", description: "Modify wiki page" }
{ key: "delete_wiki_page", description: "Delete wiki page" }
{ key: "view_wiki_links", description: "View wiki links" }
{ key: "add_wiki_link", description: "Add wiki link" }
{ key: "delete_wiki_link", description: "Delete wiki link" }
]
categories.push({ name: "Wiki", permissions: setActivePermissions(wikiPermissions) })
return setActivePermissionsPerCategory(categories)
renderResume = (element, category) ->
element.find(".resume").html(resumeTemplate({category: category}))
renderCategory = (category, index) ->
html = categoryTemplate({category: category, index: index})
html = angular.element(html)
renderResume(html, category)
return html
renderPermissions = () ->
$el.off()
html = baseTemplate()
_.each generateCategoriesFromRole($scope.role), (category, index) ->
html = angular.element(html).append(renderCategory(category, index))
$el.html(html)
$el.on "click", ".resume", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
target.next().toggleClass("open")
$el.on "change", ".category-item input", (event) ->
getActivePermissions = ->
activePermissions = _.filter($el.find(".category-item input"), (t) ->
angular.element(t).is(":checked")
)
activePermissions = _.sortBy(_.map(activePermissions, (t) ->
permission = angular.element(t).parents(".category-item").data("id")
))
activePermissions.push("view_project")
return activePermissions
target = angular.element(event.currentTarget)
$scope.role.permissions = getActivePermissions()
onSuccess = (role) ->
categories = generateCategoriesFromRole(role)
categoryId = target.parents(".category-config").data("id")
renderResume(target.parents(".category-config"), categories[categoryId])
$rootscope.$broadcast("projects:reload")
$confirm.notify("success")
$ctrl.loadProject()
onError = ->
$confirm.notify("error")
target.prop "checked", !target.prop("checked")
$scope.role.permissions = getActivePermissions()
$repo.save($scope.role).then onSuccess, onError
$scope.$on "$destroy", ->
$el.off()
$scope.$on "role:changed", ->
renderPermissions()
bindOnce($scope, $attrs.ngModel, renderPermissions)
return {link:link}
module.directive("tgRolePermissions", ["$rootScope", "$tgRepo", "$tgConfirm", RolePermissionsDirective])
| 131976 | ###
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/admin/memberships.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
bindMethods = @.taiga.bindMethods
module = angular.module("taigaAdmin")
#############################################################################
## Project Roles Controller
#############################################################################
class RolesController extends mixOf(taiga.Controller, taiga.PageMixin, taiga.FiltersMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$appTitle"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @appTitle) ->
bindMethods(@)
@scope.sectionName = "Permissions" #i18n
@scope.project = {}
@scope.anyComputableRole = true
promise = @.loadInitialData()
promise.then () =>
@appTitle.set("Roles - " + @scope.project.name)
promise.then null, @.onInitialDataError.bind(@)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
@scope.anyComputableRole = _.some(_.map(project.roles, (point) -> point.computable))
return project
loadRoles: ->
return @rs.roles.list(@scope.projectId).then (data) =>
@scope.roles = data
@scope.role = @scope.roles[0]
return data
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then(=> @.loadProject())
.then(=> @.loadUsersAndRoles())
.then(=> @.loadRoles())
setRole: (role) ->
@scope.role = role
@scope.$broadcast("role:changed", @scope.role)
delete: ->
# TODO: i18n
title = "Delete Role" # TODO: i18n
subtitle = @scope.role.name
replacement = "All the users with this role will be moved to" # TODO: i18n
warning = "<strong>Be careful, all role estimations will be removed</strong>" # TODO: i18n
choices = {}
for role in @scope.roles
if role.id != @scope.role.id
choices[role.id] = role.name
if _.keys(choices).length == 0
return @confirm.error("You can't delete all values.") # TODO: i18n
return @confirm.askChoice(title, subtitle, choices, replacement, warning).then (response) =>
promise = @repo.remove(@scope.role, {moveTo: response.selected})
promise.then =>
@.loadProject()
@.loadRoles().finally ->
response.finish()
promise.then null, =>
@confirm.notify('error')
setComputable: debounce 2000, ->
onSuccess = =>
@confirm.notify("success")
@.loadProject()
onError = =>
@confirm.notify("error")
@scope.role.revert()
@repo.save(@scope.role).then onSuccess, onError
module.controller("RolesController", RolesController)
EditRoleDirective = ($repo, $confirm) ->
link = ($scope, $el, $attrs) ->
toggleView = ->
$el.find('.total').toggle()
$el.find('.edit-role').toggle()
submit = () ->
$scope.role.name = $el.find("input").val()
promise = $repo.save($scope.role)
promise.then ->
$confirm.notify("success")
promise.then null, (data) ->
$confirm.notify("error")
toggleView()
$el.on "click", "a.icon-edit", ->
toggleView()
$el.find("input").focus()
$el.on "click", "a.save", submit
$el.on "keyup", "input", (event) ->
if event.keyCode == 13 # Enter key
submit()
else if event.keyCode == 27 # ESC key
toggleView()
$scope.$on "role:changed", ->
if $el.find('.edit-role').is(":visible")
toggleView()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgEditRole", ["$tgRepo", "$tgConfirm", EditRoleDirective])
RolesDirective = ->
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgRoles", RolesDirective)
NewRoleDirective = ($tgrepo, $confirm) ->
DEFAULT_PERMISSIONS = ["view_project", "view_milestones", "view_us", "view_tasks", "view_issues"]
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
$scope.$on "$destroy", ->
$el.off()
$el.on "click", "a.add-button", (event) ->
event.preventDefault()
$el.find(".new").removeClass("hidden")
$el.find(".new").focus()
$el.find(".add-button").hide()
$el.on "keyup", ".new", (event) ->
event.preventDefault()
if event.keyCode == 13 # Enter key
target = angular.element(event.currentTarget)
newRole = {
project: $scope.projectId
name: target.val()
permissions: DEFAULT_PERMISSIONS
order: _.max($scope.roles, (r) -> r.order).order + 1
computable: false
}
$el.find(".new").addClass("hidden")
$el.find(".new").val('')
onSuccess = (role) ->
$scope.roles.push(role)
$ctrl.setRole(role)
$el.find(".add-button").show()
$ctrl.loadProject()
onError = ->
$confirm.notify("error")
$tgrepo.create("roles", newRole).then(onSuccess, onError)
else if event.keyCode == 27 # ESC key
target = angular.element(event.currentTarget)
$el.find(".new").addClass("hidden")
$el.find(".new").val('')
$el.find(".add-button").show()
return {link:link}
module.directive("tgNewRole", ["$tgRepo", "$tgConfirm", NewRoleDirective])
# Use category-config.scss styles
RolePermissionsDirective = ($rootscope, $repo, $confirm) ->
resumeTemplate = _.template("""
<div class="resume-title"><%- category.name %></div>
<div class="summary-role">
<div class="count"><%- category.activePermissions %>/<%- category.permissions.length %></div>
<% _.each(category.permissions, function(permission) { %>
<div class="role-summary-single <% if(permission.active) { %>active<% } %>"
title="<%- permission.description %>"></div>
<% }) %>
</div>
<div class="icon icon-arrow-bottom"></div>
""")
categoryTemplate = _.template("""
<div class="category-config" data-id="<%- index %>">
<div class="resume">
</div>
<div class="category-items">
<div class="items-container">
<% _.each(category.permissions, function(permission) { %>
<div class="category-item" data-id="<%- permission.key %>">
<span><%- permission.description %></span>
<div class="check">
<input type="checkbox" <% if(permission.active) { %>checked="checked"<% } %>/>
<div></div>
<span class="check-text check-yes">Yes</span>
<span class="check-text check-no">No</span>
</div>
</div>
<% }) %>
</div>
</div>
</div>
""")
baseTemplate = _.template("""
<div class="category-config-list"></div>
""")
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
generateCategoriesFromRole = (role) ->
setActivePermissions = (permissions) ->
return _.map(permissions, (x) -> _.extend({}, x, {active: x["key"] in role.permissions}))
setActivePermissionsPerCategory = (category) ->
return _.map(category, (x) ->
_.extend({}, x, {
activePermissions: _.filter(x["permissions"], "active").length
})
)
categories = []
milestonePermissions = [
{ key: "view_milestones", description: "View sprints" }
{ key: "add_milestone", description: "Add sprint" }
{ key: "modify_milestone", description: "Modify sprint" }
{ key: "delete_milestone", description: "Delete sprint" }
]
categories.push({ name: "Sprints", permissions: setActivePermissions(milestonePermissions) })
userStoryPermissions = [
{ key: "view_us", description: "View user story" }
{ key: "add_us", description: "Add user story" }
{ key: "modify_us", description: "Modify user story" }
{ key: "delete_us", description: "Delete user story" }
]
categories.push({ name: "User Stories", permissions: setActivePermissions(userStoryPermissions) })
taskPermissions = [
{ key: "view_tasks", description: "View tasks" }
{ key: "add_task", description: "Add task" }
{ key: "modify_task", description: "Modify task" }
{ key: "delete_task", description: "Delete task" }
]
categories.push({ name: "Tasks", permissions: setActivePermissions(taskPermissions) })
issuePermissions = [
{ key: "view_issues", description: "View issues" }
{ key: "add_issue", description: "Add issue" }
{ key: "modify_issue", description: "Modify issue" }
{ key: "delete_issue", description: "Delete issue" }
]
categories.push({ name: "Issues", permissions: setActivePermissions(issuePermissions) })
wikiPermissions = [
{ key: "view_wiki_pages", description: "View wiki pages" }
{ key: "add_wiki_page", description: "Add wiki page" }
{ key: "modify_wiki_page", description: "Modify wiki page" }
{ key: "delete_wiki_page", description: "Delete wiki page" }
{ key: "view_wiki_links", description: "View wiki links" }
{ key: "add_wiki_link", description: "Add wiki link" }
{ key: "delete_wiki_link", description: "Delete wiki link" }
]
categories.push({ name: "Wiki", permissions: setActivePermissions(wikiPermissions) })
return setActivePermissionsPerCategory(categories)
renderResume = (element, category) ->
element.find(".resume").html(resumeTemplate({category: category}))
renderCategory = (category, index) ->
html = categoryTemplate({category: category, index: index})
html = angular.element(html)
renderResume(html, category)
return html
renderPermissions = () ->
$el.off()
html = baseTemplate()
_.each generateCategoriesFromRole($scope.role), (category, index) ->
html = angular.element(html).append(renderCategory(category, index))
$el.html(html)
$el.on "click", ".resume", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
target.next().toggleClass("open")
$el.on "change", ".category-item input", (event) ->
getActivePermissions = ->
activePermissions = _.filter($el.find(".category-item input"), (t) ->
angular.element(t).is(":checked")
)
activePermissions = _.sortBy(_.map(activePermissions, (t) ->
permission = angular.element(t).parents(".category-item").data("id")
))
activePermissions.push("view_project")
return activePermissions
target = angular.element(event.currentTarget)
$scope.role.permissions = getActivePermissions()
onSuccess = (role) ->
categories = generateCategoriesFromRole(role)
categoryId = target.parents(".category-config").data("id")
renderResume(target.parents(".category-config"), categories[categoryId])
$rootscope.$broadcast("projects:reload")
$confirm.notify("success")
$ctrl.loadProject()
onError = ->
$confirm.notify("error")
target.prop "checked", !target.prop("checked")
$scope.role.permissions = getActivePermissions()
$repo.save($scope.role).then onSuccess, onError
$scope.$on "$destroy", ->
$el.off()
$scope.$on "role:changed", ->
renderPermissions()
bindOnce($scope, $attrs.ngModel, renderPermissions)
return {link:link}
module.directive("tgRolePermissions", ["$rootScope", "$tgRepo", "$tgConfirm", RolePermissionsDirective])
| true | ###
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/admin/memberships.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
bindMethods = @.taiga.bindMethods
module = angular.module("taigaAdmin")
#############################################################################
## Project Roles Controller
#############################################################################
class RolesController extends mixOf(taiga.Controller, taiga.PageMixin, taiga.FiltersMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$appTitle"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @appTitle) ->
bindMethods(@)
@scope.sectionName = "Permissions" #i18n
@scope.project = {}
@scope.anyComputableRole = true
promise = @.loadInitialData()
promise.then () =>
@appTitle.set("Roles - " + @scope.project.name)
promise.then null, @.onInitialDataError.bind(@)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
@scope.anyComputableRole = _.some(_.map(project.roles, (point) -> point.computable))
return project
loadRoles: ->
return @rs.roles.list(@scope.projectId).then (data) =>
@scope.roles = data
@scope.role = @scope.roles[0]
return data
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then(=> @.loadProject())
.then(=> @.loadUsersAndRoles())
.then(=> @.loadRoles())
setRole: (role) ->
@scope.role = role
@scope.$broadcast("role:changed", @scope.role)
delete: ->
# TODO: i18n
title = "Delete Role" # TODO: i18n
subtitle = @scope.role.name
replacement = "All the users with this role will be moved to" # TODO: i18n
warning = "<strong>Be careful, all role estimations will be removed</strong>" # TODO: i18n
choices = {}
for role in @scope.roles
if role.id != @scope.role.id
choices[role.id] = role.name
if _.keys(choices).length == 0
return @confirm.error("You can't delete all values.") # TODO: i18n
return @confirm.askChoice(title, subtitle, choices, replacement, warning).then (response) =>
promise = @repo.remove(@scope.role, {moveTo: response.selected})
promise.then =>
@.loadProject()
@.loadRoles().finally ->
response.finish()
promise.then null, =>
@confirm.notify('error')
setComputable: debounce 2000, ->
onSuccess = =>
@confirm.notify("success")
@.loadProject()
onError = =>
@confirm.notify("error")
@scope.role.revert()
@repo.save(@scope.role).then onSuccess, onError
module.controller("RolesController", RolesController)
EditRoleDirective = ($repo, $confirm) ->
link = ($scope, $el, $attrs) ->
toggleView = ->
$el.find('.total').toggle()
$el.find('.edit-role').toggle()
submit = () ->
$scope.role.name = $el.find("input").val()
promise = $repo.save($scope.role)
promise.then ->
$confirm.notify("success")
promise.then null, (data) ->
$confirm.notify("error")
toggleView()
$el.on "click", "a.icon-edit", ->
toggleView()
$el.find("input").focus()
$el.on "click", "a.save", submit
$el.on "keyup", "input", (event) ->
if event.keyCode == 13 # Enter key
submit()
else if event.keyCode == 27 # ESC key
toggleView()
$scope.$on "role:changed", ->
if $el.find('.edit-role').is(":visible")
toggleView()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgEditRole", ["$tgRepo", "$tgConfirm", EditRoleDirective])
RolesDirective = ->
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgRoles", RolesDirective)
NewRoleDirective = ($tgrepo, $confirm) ->
DEFAULT_PERMISSIONS = ["view_project", "view_milestones", "view_us", "view_tasks", "view_issues"]
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
$scope.$on "$destroy", ->
$el.off()
$el.on "click", "a.add-button", (event) ->
event.preventDefault()
$el.find(".new").removeClass("hidden")
$el.find(".new").focus()
$el.find(".add-button").hide()
$el.on "keyup", ".new", (event) ->
event.preventDefault()
if event.keyCode == 13 # Enter key
target = angular.element(event.currentTarget)
newRole = {
project: $scope.projectId
name: target.val()
permissions: DEFAULT_PERMISSIONS
order: _.max($scope.roles, (r) -> r.order).order + 1
computable: false
}
$el.find(".new").addClass("hidden")
$el.find(".new").val('')
onSuccess = (role) ->
$scope.roles.push(role)
$ctrl.setRole(role)
$el.find(".add-button").show()
$ctrl.loadProject()
onError = ->
$confirm.notify("error")
$tgrepo.create("roles", newRole).then(onSuccess, onError)
else if event.keyCode == 27 # ESC key
target = angular.element(event.currentTarget)
$el.find(".new").addClass("hidden")
$el.find(".new").val('')
$el.find(".add-button").show()
return {link:link}
module.directive("tgNewRole", ["$tgRepo", "$tgConfirm", NewRoleDirective])
# Use category-config.scss styles
RolePermissionsDirective = ($rootscope, $repo, $confirm) ->
resumeTemplate = _.template("""
<div class="resume-title"><%- category.name %></div>
<div class="summary-role">
<div class="count"><%- category.activePermissions %>/<%- category.permissions.length %></div>
<% _.each(category.permissions, function(permission) { %>
<div class="role-summary-single <% if(permission.active) { %>active<% } %>"
title="<%- permission.description %>"></div>
<% }) %>
</div>
<div class="icon icon-arrow-bottom"></div>
""")
categoryTemplate = _.template("""
<div class="category-config" data-id="<%- index %>">
<div class="resume">
</div>
<div class="category-items">
<div class="items-container">
<% _.each(category.permissions, function(permission) { %>
<div class="category-item" data-id="<%- permission.key %>">
<span><%- permission.description %></span>
<div class="check">
<input type="checkbox" <% if(permission.active) { %>checked="checked"<% } %>/>
<div></div>
<span class="check-text check-yes">Yes</span>
<span class="check-text check-no">No</span>
</div>
</div>
<% }) %>
</div>
</div>
</div>
""")
baseTemplate = _.template("""
<div class="category-config-list"></div>
""")
link = ($scope, $el, $attrs) ->
$ctrl = $el.controller()
generateCategoriesFromRole = (role) ->
setActivePermissions = (permissions) ->
return _.map(permissions, (x) -> _.extend({}, x, {active: x["key"] in role.permissions}))
setActivePermissionsPerCategory = (category) ->
return _.map(category, (x) ->
_.extend({}, x, {
activePermissions: _.filter(x["permissions"], "active").length
})
)
categories = []
milestonePermissions = [
{ key: "view_milestones", description: "View sprints" }
{ key: "add_milestone", description: "Add sprint" }
{ key: "modify_milestone", description: "Modify sprint" }
{ key: "delete_milestone", description: "Delete sprint" }
]
categories.push({ name: "Sprints", permissions: setActivePermissions(milestonePermissions) })
userStoryPermissions = [
{ key: "view_us", description: "View user story" }
{ key: "add_us", description: "Add user story" }
{ key: "modify_us", description: "Modify user story" }
{ key: "delete_us", description: "Delete user story" }
]
categories.push({ name: "User Stories", permissions: setActivePermissions(userStoryPermissions) })
taskPermissions = [
{ key: "view_tasks", description: "View tasks" }
{ key: "add_task", description: "Add task" }
{ key: "modify_task", description: "Modify task" }
{ key: "delete_task", description: "Delete task" }
]
categories.push({ name: "Tasks", permissions: setActivePermissions(taskPermissions) })
issuePermissions = [
{ key: "view_issues", description: "View issues" }
{ key: "add_issue", description: "Add issue" }
{ key: "modify_issue", description: "Modify issue" }
{ key: "delete_issue", description: "Delete issue" }
]
categories.push({ name: "Issues", permissions: setActivePermissions(issuePermissions) })
wikiPermissions = [
{ key: "view_wiki_pages", description: "View wiki pages" }
{ key: "add_wiki_page", description: "Add wiki page" }
{ key: "modify_wiki_page", description: "Modify wiki page" }
{ key: "delete_wiki_page", description: "Delete wiki page" }
{ key: "view_wiki_links", description: "View wiki links" }
{ key: "add_wiki_link", description: "Add wiki link" }
{ key: "delete_wiki_link", description: "Delete wiki link" }
]
categories.push({ name: "Wiki", permissions: setActivePermissions(wikiPermissions) })
return setActivePermissionsPerCategory(categories)
renderResume = (element, category) ->
element.find(".resume").html(resumeTemplate({category: category}))
renderCategory = (category, index) ->
html = categoryTemplate({category: category, index: index})
html = angular.element(html)
renderResume(html, category)
return html
renderPermissions = () ->
$el.off()
html = baseTemplate()
_.each generateCategoriesFromRole($scope.role), (category, index) ->
html = angular.element(html).append(renderCategory(category, index))
$el.html(html)
$el.on "click", ".resume", (event) ->
event.preventDefault()
target = angular.element(event.currentTarget)
target.next().toggleClass("open")
$el.on "change", ".category-item input", (event) ->
getActivePermissions = ->
activePermissions = _.filter($el.find(".category-item input"), (t) ->
angular.element(t).is(":checked")
)
activePermissions = _.sortBy(_.map(activePermissions, (t) ->
permission = angular.element(t).parents(".category-item").data("id")
))
activePermissions.push("view_project")
return activePermissions
target = angular.element(event.currentTarget)
$scope.role.permissions = getActivePermissions()
onSuccess = (role) ->
categories = generateCategoriesFromRole(role)
categoryId = target.parents(".category-config").data("id")
renderResume(target.parents(".category-config"), categories[categoryId])
$rootscope.$broadcast("projects:reload")
$confirm.notify("success")
$ctrl.loadProject()
onError = ->
$confirm.notify("error")
target.prop "checked", !target.prop("checked")
$scope.role.permissions = getActivePermissions()
$repo.save($scope.role).then onSuccess, onError
$scope.$on "$destroy", ->
$el.off()
$scope.$on "role:changed", ->
renderPermissions()
bindOnce($scope, $attrs.ngModel, renderPermissions)
return {link:link}
module.directive("tgRolePermissions", ["$rootScope", "$tgRepo", "$tgConfirm", RolePermissionsDirective])
|
[
{
"context": "Modules.postify(a_post, {\n csrfToken: \"foobar\"\n })\n\n afterEach ->\n Array.from(",
"end": 411,
"score": 0.8702115416526794,
"start": 405,
"tag": "PASSWORD",
"value": "foobar"
}
] | game/static/spirit/scripts/test/suites/postify-spec.coffee | Yoann-Vie/esgi-hearthstone | 3 | describe "postify plugin tests", ->
a_post = null
plugin_postify = null
isHidden = stModules.utils.isHidden
beforeEach ->
fixtures = jasmine.getFixtures()
fixtures.fixturesPath = 'base/test/fixtures/'
loadFixtures('postify.html')
a_post = document.querySelectorAll('.js-post')
plugin_postify = stModules.postify(a_post, {
csrfToken: "foobar"
})
afterEach ->
Array.from(document.querySelectorAll('.js-postify-form')).forEach((elm) ->
elm.parentNode.removeChild(elm)
)
it "prevents the default click behaviour on click", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
a_post[0].dispatchEvent(evt)
expect(submit.calls.count()).toEqual(1)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "submits the form", ->
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
a_post[0].click()
form = document.querySelector('.js-postify-form')
expect(submit.calls.count()).toEqual(1)
expect(form.getAttribute('action')).toEqual("/link1/")
expect(isHidden([form])).toEqual(true)
expect(document.querySelector('input[name=csrfmiddlewaretoken]').value).toEqual("foobar")
| 85468 | describe "postify plugin tests", ->
a_post = null
plugin_postify = null
isHidden = stModules.utils.isHidden
beforeEach ->
fixtures = jasmine.getFixtures()
fixtures.fixturesPath = 'base/test/fixtures/'
loadFixtures('postify.html')
a_post = document.querySelectorAll('.js-post')
plugin_postify = stModules.postify(a_post, {
csrfToken: "<PASSWORD>"
})
afterEach ->
Array.from(document.querySelectorAll('.js-postify-form')).forEach((elm) ->
elm.parentNode.removeChild(elm)
)
it "prevents the default click behaviour on click", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
a_post[0].dispatchEvent(evt)
expect(submit.calls.count()).toEqual(1)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "submits the form", ->
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
a_post[0].click()
form = document.querySelector('.js-postify-form')
expect(submit.calls.count()).toEqual(1)
expect(form.getAttribute('action')).toEqual("/link1/")
expect(isHidden([form])).toEqual(true)
expect(document.querySelector('input[name=csrfmiddlewaretoken]').value).toEqual("foobar")
| true | describe "postify plugin tests", ->
a_post = null
plugin_postify = null
isHidden = stModules.utils.isHidden
beforeEach ->
fixtures = jasmine.getFixtures()
fixtures.fixturesPath = 'base/test/fixtures/'
loadFixtures('postify.html')
a_post = document.querySelectorAll('.js-post')
plugin_postify = stModules.postify(a_post, {
csrfToken: "PI:PASSWORD:<PASSWORD>END_PI"
})
afterEach ->
Array.from(document.querySelectorAll('.js-postify-form')).forEach((elm) ->
elm.parentNode.removeChild(elm)
)
it "prevents the default click behaviour on click", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
a_post[0].dispatchEvent(evt)
expect(submit.calls.count()).toEqual(1)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "submits the form", ->
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
a_post[0].click()
form = document.querySelector('.js-postify-form')
expect(submit.calls.count()).toEqual(1)
expect(form.getAttribute('action')).toEqual("/link1/")
expect(isHidden([form])).toEqual(true)
expect(document.querySelector('input[name=csrfmiddlewaretoken]').value).toEqual("foobar")
|
[
{
"context": "\n role_id: required: true\n password: required: true\n password_confirmation:\n required",
"end": 439,
"score": 0.96543949842453,
"start": 425,
"tag": "PASSWORD",
"value": "required: true"
},
{
"context": ": 'Role is required'\n pas... | vteams-open-source-billing-7978d62/app/javascript/packs/validations.js.coffee | Ayan-devops/web-app | 0 | class window.Validation
@UserSettingForm = ->
$('#user_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
user_name: required: true
email: required: true
role_id: required: true
password: required: true
password_confirmation:
required: true
equalTo: '#side_form_password'
messages:
user_name: required: 'Full Name is required'
email: required: 'Email is required'
role_id: required: 'Role is required'
password: required: 'Password is required'
password_confirmation: required: 'Password confirmation is required', equalTo: 'must be equal to above password'
@CompanySettingForm = ->
$('#company_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'company[company_name]': required: true
'company[contact_name]': required: true
'company[email]': required: true
messages:
'company[company_name]': required: 'Company Name is required'
'company[contact_name]': required: 'Contact Name is required'
'company[email]': required: 'Email is required'
@RoleSettingForm = ->
$('#role_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'role[name]': required: true
messages:
'role[name]': required: 'Name is required'
@InvoiceForm = ->
$('.invoice-client-select').on 'focusout', (e) ->
$('#invoice_client_id').valid()
$('#s2id_invoice_invoice_line_items_attributes_0_item_id').on 'focusout', (e) ->
$('#invoice_invoice_line_items_attributes_0_item_id').valid()
jQuery.validator.addMethod 'lessThan', ((value, element) ->
return value <= $('#invoice_due_date_picker').val()
), 'Must be less or equal to invoice due date.'
jQuery.validator.addMethod 'greaterThan', ((value, element) ->
return value >= $('#invoice_date_picker').val()
), 'Must be greater or equal to invoice date.'
$('.invoice-form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
'invoice[client_id]': required: true
'invoice[invoice_date]': lessThan: true
'invoice[due_date]': greaterThan: true
'invoice[invoice_line_items_attributes][0][item_id]': required: true
messages:
'invoice[client_id]': required: 'Client is required'
'invoice[invoice_line_items_attributes][0][item_id]': required: 'Atleast one line item is required'
@EstimateForm = ->
$('.estimate-select-client').on 'focusout', (e) ->
$('#estimate_client_id').valid()
$('#s2id_estimate_estimate_line_items_attributes_0_item_id').on 'focusout', (e) ->
$('#estimate_estimate_line_items_attributes_0_item_id').valid()
$('.estimate-form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
'estimate[client_id]': required: true
'estimate[estimate_line_items_attributes][0][item_id]': required: true
messages:
'estimate[client_id]': required: 'Client is required'
'estimate[estimate_line_items_attributes][0][item_id]': required: 'Atleast one line item is required'
@ItemForm = ->
$('.item_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'item[item_name]': required: true
'item[item_description]': required: true
'item[unit_cost]': required: true, number: true
'item[quantity]': required: true, number: true
messages:
'item[item_name]': required: 'Name is required'
'item[item_description]': required: 'Description is required'
'item[unit_cost]': required: 'Unit Cost is required', number: 'Unit cost should be in numbers'
'item[quantity]': required: 'Quantity is required', number: 'Quantity should be in numbers'
@TaxForm = ->
$('.tax_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'tax[name]': required: true
'tax[percentage]': required: true, number: true
messages:
'tax[name]': required: 'Name is required'
'tax[percentage]': required: 'Percentage is required', number: 'Percentage should be in numbers'
@ClientForm = ->
$('#newClient').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'client[organization_name]': required: true
'client[first_name]': required: true
'client[last_name]': required: true
'client[email]': required: true
messages:
'client[organization_name]': required: 'Organization Name is required'
'client[first_name]': required: 'First Name is required'
'client[last_name]': required: 'Last Name is required'
'client[email]': required: 'Email is required'
@PaymentForm = ->
$('#payments_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'payments[][payment_amount]': required: true, number: true
messages:
'payments[][payment_amount]': required: 'Amount is required', number: 'Please enter a valid amount'
| 202305 | class window.Validation
@UserSettingForm = ->
$('#user_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
user_name: required: true
email: required: true
role_id: required: true
password: <PASSWORD>
password_confirmation:
required: true
equalTo: '#side_form_password'
messages:
user_name: required: 'Full Name is required'
email: required: 'Email is required'
role_id: required: 'Role is required'
password: required: '<PASSWORD>'
password_confirmation: required: 'Password <PASSWORD>', equalTo: 'must be equal to above password'
@CompanySettingForm = ->
$('#company_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'company[company_name]': required: true
'company[contact_name]': required: true
'company[email]': required: true
messages:
'company[company_name]': required: 'Company Name is required'
'company[contact_name]': required: 'Contact Name is required'
'company[email]': required: 'Email is required'
@RoleSettingForm = ->
$('#role_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'role[name]': required: true
messages:
'role[name]': required: 'Name is required'
@InvoiceForm = ->
$('.invoice-client-select').on 'focusout', (e) ->
$('#invoice_client_id').valid()
$('#s2id_invoice_invoice_line_items_attributes_0_item_id').on 'focusout', (e) ->
$('#invoice_invoice_line_items_attributes_0_item_id').valid()
jQuery.validator.addMethod 'lessThan', ((value, element) ->
return value <= $('#invoice_due_date_picker').val()
), 'Must be less or equal to invoice due date.'
jQuery.validator.addMethod 'greaterThan', ((value, element) ->
return value >= $('#invoice_date_picker').val()
), 'Must be greater or equal to invoice date.'
$('.invoice-form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
'invoice[client_id]': required: true
'invoice[invoice_date]': lessThan: true
'invoice[due_date]': greaterThan: true
'invoice[invoice_line_items_attributes][0][item_id]': required: true
messages:
'invoice[client_id]': required: 'Client is required'
'invoice[invoice_line_items_attributes][0][item_id]': required: 'Atleast one line item is required'
@EstimateForm = ->
$('.estimate-select-client').on 'focusout', (e) ->
$('#estimate_client_id').valid()
$('#s2id_estimate_estimate_line_items_attributes_0_item_id').on 'focusout', (e) ->
$('#estimate_estimate_line_items_attributes_0_item_id').valid()
$('.estimate-form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
'estimate[client_id]': required: true
'estimate[estimate_line_items_attributes][0][item_id]': required: true
messages:
'estimate[client_id]': required: 'Client is required'
'estimate[estimate_line_items_attributes][0][item_id]': required: 'Atleast one line item is required'
@ItemForm = ->
$('.item_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'item[item_name]': required: true
'item[item_description]': required: true
'item[unit_cost]': required: true, number: true
'item[quantity]': required: true, number: true
messages:
'item[item_name]': required: 'Name is required'
'item[item_description]': required: 'Description is required'
'item[unit_cost]': required: 'Unit Cost is required', number: 'Unit cost should be in numbers'
'item[quantity]': required: 'Quantity is required', number: 'Quantity should be in numbers'
@TaxForm = ->
$('.tax_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'tax[name]': required: true
'tax[percentage]': required: true, number: true
messages:
'tax[name]': required: 'Name is required'
'tax[percentage]': required: 'Percentage is required', number: 'Percentage should be in numbers'
@ClientForm = ->
$('#newClient').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'client[organization_name]': required: true
'client[first_name]': required: true
'client[last_name]': required: true
'client[email]': required: true
messages:
'client[organization_name]': required: 'Organization Name is required'
'client[first_name]': required: 'First Name is required'
'client[last_name]': required: 'Last Name is required'
'client[email]': required: 'Email is required'
@PaymentForm = ->
$('#payments_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'payments[][payment_amount]': required: true, number: true
messages:
'payments[][payment_amount]': required: 'Amount is required', number: 'Please enter a valid amount'
| true | class window.Validation
@UserSettingForm = ->
$('#user_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
user_name: required: true
email: required: true
role_id: required: true
password: PI:PASSWORD:<PASSWORD>END_PI
password_confirmation:
required: true
equalTo: '#side_form_password'
messages:
user_name: required: 'Full Name is required'
email: required: 'Email is required'
role_id: required: 'Role is required'
password: required: 'PI:PASSWORD:<PASSWORD>END_PI'
password_confirmation: required: 'Password PI:PASSWORD:<PASSWORD>END_PI', equalTo: 'must be equal to above password'
@CompanySettingForm = ->
$('#company_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'company[company_name]': required: true
'company[contact_name]': required: true
'company[email]': required: true
messages:
'company[company_name]': required: 'Company Name is required'
'company[contact_name]': required: 'Contact Name is required'
'company[email]': required: 'Email is required'
@RoleSettingForm = ->
$('#role_side_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'role[name]': required: true
messages:
'role[name]': required: 'Name is required'
@InvoiceForm = ->
$('.invoice-client-select').on 'focusout', (e) ->
$('#invoice_client_id').valid()
$('#s2id_invoice_invoice_line_items_attributes_0_item_id').on 'focusout', (e) ->
$('#invoice_invoice_line_items_attributes_0_item_id').valid()
jQuery.validator.addMethod 'lessThan', ((value, element) ->
return value <= $('#invoice_due_date_picker').val()
), 'Must be less or equal to invoice due date.'
jQuery.validator.addMethod 'greaterThan', ((value, element) ->
return value >= $('#invoice_date_picker').val()
), 'Must be greater or equal to invoice date.'
$('.invoice-form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
'invoice[client_id]': required: true
'invoice[invoice_date]': lessThan: true
'invoice[due_date]': greaterThan: true
'invoice[invoice_line_items_attributes][0][item_id]': required: true
messages:
'invoice[client_id]': required: 'Client is required'
'invoice[invoice_line_items_attributes][0][item_id]': required: 'Atleast one line item is required'
@EstimateForm = ->
$('.estimate-select-client').on 'focusout', (e) ->
$('#estimate_client_id').valid()
$('#s2id_estimate_estimate_line_items_attributes_0_item_id').on 'focusout', (e) ->
$('#estimate_estimate_line_items_attributes_0_item_id').valid()
$('.estimate-form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
ignore: 'input[type=hidden]'
rules:
'estimate[client_id]': required: true
'estimate[estimate_line_items_attributes][0][item_id]': required: true
messages:
'estimate[client_id]': required: 'Client is required'
'estimate[estimate_line_items_attributes][0][item_id]': required: 'Atleast one line item is required'
@ItemForm = ->
$('.item_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'item[item_name]': required: true
'item[item_description]': required: true
'item[unit_cost]': required: true, number: true
'item[quantity]': required: true, number: true
messages:
'item[item_name]': required: 'Name is required'
'item[item_description]': required: 'Description is required'
'item[unit_cost]': required: 'Unit Cost is required', number: 'Unit cost should be in numbers'
'item[quantity]': required: 'Quantity is required', number: 'Quantity should be in numbers'
@TaxForm = ->
$('.tax_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'tax[name]': required: true
'tax[percentage]': required: true, number: true
messages:
'tax[name]': required: 'Name is required'
'tax[percentage]': required: 'Percentage is required', number: 'Percentage should be in numbers'
@ClientForm = ->
$('#newClient').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'client[organization_name]': required: true
'client[first_name]': required: true
'client[last_name]': required: true
'client[email]': required: true
messages:
'client[organization_name]': required: 'Organization Name is required'
'client[first_name]': required: 'First Name is required'
'client[last_name]': required: 'Last Name is required'
'client[email]': required: 'Email is required'
@PaymentForm = ->
$('#payments_form').validate
onfocusout: (element) ->
$(element).valid()
onkeyup: (element) ->
$(element).valid()
errorClass: 'error invalid-error'
errorElement: 'span'
rules:
'payments[][payment_amount]': required: true, number: true
messages:
'payments[][payment_amount]': required: 'Amount is required', number: 'Please enter a valid amount'
|
[
{
"context": "###\n * Author: Steven Meyer <svm9@aber.ac.uk>\n * File: bootstrap.coffe",
"end": 32,
"score": 0.9998878240585327,
"start": 20,
"tag": "NAME",
"value": "Steven Meyer"
},
{
"context": "###\n * Author: Steven Meyer <svm9@aber.ac.uk>\n * File: boots... | src/bootstraplib-2.coffee | StevenMeyer/BootstrapLib | 0 | ###
* Author: Steven Meyer <svm9@aber.ac.uk>
* File: bootstrap.coffee
* Description: Scriptable Twitter Bootstrap widgets and component creation.
*
* DO NOT EDIT THE JAVASCRIPT .js FILE DIRECTLY.
* THE JAVASCRIPT IS GENERATED FROM COFFEESCRIPT.
###
# [namespace.coffee](http://github.com/CodeCatalyst/namespace.coffee) v1.0.1
# Copyright (c) 2011-2012 [CodeCatalyst, LLC](http://www.codecatalyst.com/).
# Open source under the [MIT License](http://en.wikipedia.org/wiki/MIT_License).
# A lean namespace implementation for JavaScript written in [CoffeeScript](http://coffeescript.com/).
# *Export the specified value(s) to the specified package name.*
window.namespace or= ( name, values ) ->
# Export to `exports` for node.js or `window` for the browser.
target = exports ? window
# Nested packages may be specified using dot-notation, and are automatically created as needed.
if name.length > 0
target = target[ subpackage ] ||= {} for subpackage in name.split( '.' )
# Export each value in the specified values Object to the specified package name by the value's key.
target[ key ] = value for key, value of values
# Return a reference to the specified namespace.
return target
# *Export the namespace function to global scope, using itself.*
namespace( '', namespace: namespace )
isDOMNode = (node) ->
if typeof Node is "object"
node instanceof Node
else
node? and typeof node is "object" and typeof node.nodeType is "number" and typeof node.nodeName is "string"
getRenderedCSS = (element, key) ->
if element not instanceof jQuery
element = jQuery element
if element.length isnt 0
value = element.css key
if value is ""
# element hasn't been added to DOM and has no style set
$body = $ "body"
$temp = element.clone()
$body.append $temp
value = $temp.css key
$temp.remove()
value
$ = jQuery
baseNamespace = "uk.co.stevenmeyer.bootstrap"
cssNamespace = "#{baseNamespace}.css"
baseClass = class (namespace baseNamespace).Bootstrap extends $
constructor: () ->
$jQuery = $.apply this, arguments
$.extend this, $jQuery
this
toString: () ->
$("<p />").append(@clone()).html()
#no dependencies
class (namespace cssNamespace).Button extends baseClass
BLOCK: "btn-block"
DISABLED: "disabled"
inputButtonTypes:
BUTTON: "button"
RESET: "reset"
SUBMIT: "submit"
toArray: () ->
Button::inputButtonTypes[type] for type of Button::inputButtonTypes when type isnt "toArray"
options:
DANGER: "btn-danger"
DEFAULT: ""
INFO: "btn-info"
LINK: "btn-link"
PRIMARY: "btn-primary"
SUCCESS: "btn-success"
WARNING: "btn-warning"
toArray: () ->
Button::options[style] for style of Button::options when style isnt "toArray"
sizes:
DEFAULT: ""
EXTRASMALL: "btn-xs"
LARGE: "btn-lg"
SMALL: "btn-sm"
toArray: () ->
Button::sizes[size] for size of Button::sizes when size isnt "toArray"
constructor: () ->
args = Array::slice.call arguments
if not args[0]?
args[0] = "<button />"
args[1] =
type: "button"
Button.__super__.constructor.apply this, args
@size = () => Button::size.apply this, arguments
@text = () => Button::text.apply this, arguments
@addClass "btn"
block: (block = true) ->
if block is false
@removeClass Button::BLOCK
else
@addClass Button::BLOCK
danger: () ->
@option Button::options.DANGER
defaultSize: () ->
@size Button::sizes.DEFAULT
defaultStyle: () ->
@option()
disable: () ->
@each (index, DOMElement) ->
$element = $ DOMElement
$element.attr "disabled", "disabled" if not $element.is "a"
$element.addClass Button::DISABLED if not $element.is "button,input"
exclusiveClass = (style = "", classes) ->
@removeClass classes.join " "
if style in classes then @addClass style else this
extraSmall: () ->
@size Button::sizes.EXTRASMALL
getText = () ->
if @is "input"
types = Button::inputButtonTypes.toArray()
@map (index, DOMElement) ->
$element = $ DOMElement
if ($element.is "input") and ($element.attr "type") in types
$element.val()
else
$element.text()
.get().join()
else
$.fn.text.apply this, []
info: () ->
@option Button::options.INFO
large: () ->
@size Button::sizes.LARGE
link: () ->
@option Button::options.LINK
option: (emphasis = "") ->
exclusiveClass.call this, emphasis, Button::options.toArray()
primary: () ->
@option Button::options.PRIMARY
setText = (text) ->
if @is "input"
types = Button::inputButtonTypes.toArray()
@each (index, DOMElement) ->
$element = $ DOMElement
# cannot just have $element.val(text), here:
# if text were a function, then this @each loop would cause
# such a function to have an index value of 0 on every execution
# because $element is just one item (instead of all of the items
# in 'this').
value = if typeof text is "function" then text.call DOMElement, index, $element.text() else text
if ($element.is "input") and ($element.attr "type") in types
$element.val value
else
$element.text value
else
$.fn.text.call this, text
size: () ->
if arguments[0]?
exclusiveClass.call this, arguments[0], Button::sizes.toArray()
else
@length # original, deprecated jQuery size() function
small: () ->
@size Button::sizes.SMALL
success: () ->
@option Button::options.SUCCESS
text: () ->
if arguments[0]?
setText.apply this, arguments
else
getText.apply this, arguments
warning: () ->
@option Button::options.WARNING
#no dependencies
class (namespace cssNamespace).Code extends baseClass
constructor: () ->
args = Array::slice.call arguments
if not args[0]?
args[0] = $ "<code />"
Code.__super__.constructor.apply this, args
@append = () => Code::append.apply this, arguments
@prepend = () => Code::prepend.apply this, arguments
this
addItem = (op, items) ->
lastTry = (item) =>
$item = $ item
if $item
$.fn[op].call this, document.createTextNode $("<p />").append($item.clone()).html()
else
$.fn[op].call this, document.createTextNode "#{$item}"
for arg of items
if typeof items[arg] is "string"
$.fn[op].call this, document.createTextNode items[arg]
else if items[arg] instanceof $
$.fn[op].call this, document.createTextNode $("<p>").append(items[arg].clone()).html()
else if items[arg] instanceof Array
Code::[op].apply this, items[arg]
else if typeof items[arg] is "function"
Code::[op].call this, items[arg].call this, 0, @html()
else if isDOMNode items[arg]
switch items[arg].nodeType
when 1, 9, 11
$.fn[op].call this, document.createTextNode $("<p>").append(items[arg]).html()
when 3 then $.fn[op].call this, items[arg]
else lastTry items[arg]
else
lastTry items[arg]
this
append: () ->
args = Array::slice.call arguments
addItem.call this, "append", args
appendHTML: () ->
$.fn.append.apply this, arguments
isBlock: () ->
display = getRenderedCSS this, "display"
switch display
when "block", "inline-block", "list-item", "table", "table-caption", "table-row"
true
else false
isInline: () ->
display = getRenderedCSS this, "display"
switch display
when "inline", "inline-table", "table-cell", "table-column"
true
else false
prepend: () ->
args = Array::slice.call arguments
addItem.call this, "prepend", args
prependHTML: () ->
$.fn.append.apply this, arguments
scrollable: (scroll) ->
className = "pre-scrollable"
if scroll is false
@removeClass className
else if @isBlock()
@addClass className
this
class (namespace cssNamespace).Code.BlockCode extends (namespace cssNamespace).Code
constructor: () ->
args = Array::slice.call arguments
$element = $ "<pre />"
if args[0]?
if isDOMNode(args[0]) or typeof args[0] is "string" or args[0] instanceof $
args[0] = $element
else
args = args.unshift $element
else
args[0] = $element
BlockCode.__super__.constructor.apply this, args
this
class (namespace cssNamespace).Code.InlineCode extends (namespace cssNamespace).Code
constructor: () ->
args = Array::slice.call arguments
$element = $ "<code />"
if args[0]?
if isDOMNode(args[0]) or typeof args[0] is "string" or args[0] instanceof $
args[0] = $element
else
args = args.unshift $element
else
args[0] = $element
InlineCode.__super__.constructor.apply this, args
this | 217616 | ###
* Author: <NAME> <<EMAIL>>
* File: bootstrap.coffee
* Description: Scriptable Twitter Bootstrap widgets and component creation.
*
* DO NOT EDIT THE JAVASCRIPT .js FILE DIRECTLY.
* THE JAVASCRIPT IS GENERATED FROM COFFEESCRIPT.
###
# [namespace.coffee](http://github.com/CodeCatalyst/namespace.coffee) v1.0.1
# Copyright (c) 2011-2012 [CodeCatalyst, LLC](http://www.codecatalyst.com/).
# Open source under the [MIT License](http://en.wikipedia.org/wiki/MIT_License).
# A lean namespace implementation for JavaScript written in [CoffeeScript](http://coffeescript.com/).
# *Export the specified value(s) to the specified package name.*
window.namespace or= ( name, values ) ->
# Export to `exports` for node.js or `window` for the browser.
target = exports ? window
# Nested packages may be specified using dot-notation, and are automatically created as needed.
if name.length > 0
target = target[ subpackage ] ||= {} for subpackage in name.split( '.' )
# Export each value in the specified values Object to the specified package name by the value's key.
target[ key ] = value for key, value of values
# Return a reference to the specified namespace.
return target
# *Export the namespace function to global scope, using itself.*
namespace( '', namespace: namespace )
isDOMNode = (node) ->
if typeof Node is "object"
node instanceof Node
else
node? and typeof node is "object" and typeof node.nodeType is "number" and typeof node.nodeName is "string"
getRenderedCSS = (element, key) ->
if element not instanceof jQuery
element = jQuery element
if element.length isnt 0
value = element.css key
if value is ""
# element hasn't been added to DOM and has no style set
$body = $ "body"
$temp = element.clone()
$body.append $temp
value = $temp.css key
$temp.remove()
value
$ = jQuery
baseNamespace = "uk.co.stevenmeyer.bootstrap"
cssNamespace = "#{baseNamespace}.css"
baseClass = class (namespace baseNamespace).Bootstrap extends $
constructor: () ->
$jQuery = $.apply this, arguments
$.extend this, $jQuery
this
toString: () ->
$("<p />").append(@clone()).html()
#no dependencies
class (namespace cssNamespace).Button extends baseClass
BLOCK: "btn-block"
DISABLED: "disabled"
inputButtonTypes:
BUTTON: "button"
RESET: "reset"
SUBMIT: "submit"
toArray: () ->
Button::inputButtonTypes[type] for type of Button::inputButtonTypes when type isnt "toArray"
options:
DANGER: "btn-danger"
DEFAULT: ""
INFO: "btn-info"
LINK: "btn-link"
PRIMARY: "btn-primary"
SUCCESS: "btn-success"
WARNING: "btn-warning"
toArray: () ->
Button::options[style] for style of Button::options when style isnt "toArray"
sizes:
DEFAULT: ""
EXTRASMALL: "btn-xs"
LARGE: "btn-lg"
SMALL: "btn-sm"
toArray: () ->
Button::sizes[size] for size of Button::sizes when size isnt "toArray"
constructor: () ->
args = Array::slice.call arguments
if not args[0]?
args[0] = "<button />"
args[1] =
type: "button"
Button.__super__.constructor.apply this, args
@size = () => Button::size.apply this, arguments
@text = () => Button::text.apply this, arguments
@addClass "btn"
block: (block = true) ->
if block is false
@removeClass Button::BLOCK
else
@addClass Button::BLOCK
danger: () ->
@option Button::options.DANGER
defaultSize: () ->
@size Button::sizes.DEFAULT
defaultStyle: () ->
@option()
disable: () ->
@each (index, DOMElement) ->
$element = $ DOMElement
$element.attr "disabled", "disabled" if not $element.is "a"
$element.addClass Button::DISABLED if not $element.is "button,input"
exclusiveClass = (style = "", classes) ->
@removeClass classes.join " "
if style in classes then @addClass style else this
extraSmall: () ->
@size Button::sizes.EXTRASMALL
getText = () ->
if @is "input"
types = Button::inputButtonTypes.toArray()
@map (index, DOMElement) ->
$element = $ DOMElement
if ($element.is "input") and ($element.attr "type") in types
$element.val()
else
$element.text()
.get().join()
else
$.fn.text.apply this, []
info: () ->
@option Button::options.INFO
large: () ->
@size Button::sizes.LARGE
link: () ->
@option Button::options.LINK
option: (emphasis = "") ->
exclusiveClass.call this, emphasis, Button::options.toArray()
primary: () ->
@option Button::options.PRIMARY
setText = (text) ->
if @is "input"
types = Button::inputButtonTypes.toArray()
@each (index, DOMElement) ->
$element = $ DOMElement
# cannot just have $element.val(text), here:
# if text were a function, then this @each loop would cause
# such a function to have an index value of 0 on every execution
# because $element is just one item (instead of all of the items
# in 'this').
value = if typeof text is "function" then text.call DOMElement, index, $element.text() else text
if ($element.is "input") and ($element.attr "type") in types
$element.val value
else
$element.text value
else
$.fn.text.call this, text
size: () ->
if arguments[0]?
exclusiveClass.call this, arguments[0], Button::sizes.toArray()
else
@length # original, deprecated jQuery size() function
small: () ->
@size Button::sizes.SMALL
success: () ->
@option Button::options.SUCCESS
text: () ->
if arguments[0]?
setText.apply this, arguments
else
getText.apply this, arguments
warning: () ->
@option Button::options.WARNING
#no dependencies
class (namespace cssNamespace).Code extends baseClass
constructor: () ->
args = Array::slice.call arguments
if not args[0]?
args[0] = $ "<code />"
Code.__super__.constructor.apply this, args
@append = () => Code::append.apply this, arguments
@prepend = () => Code::prepend.apply this, arguments
this
addItem = (op, items) ->
lastTry = (item) =>
$item = $ item
if $item
$.fn[op].call this, document.createTextNode $("<p />").append($item.clone()).html()
else
$.fn[op].call this, document.createTextNode "#{$item}"
for arg of items
if typeof items[arg] is "string"
$.fn[op].call this, document.createTextNode items[arg]
else if items[arg] instanceof $
$.fn[op].call this, document.createTextNode $("<p>").append(items[arg].clone()).html()
else if items[arg] instanceof Array
Code::[op].apply this, items[arg]
else if typeof items[arg] is "function"
Code::[op].call this, items[arg].call this, 0, @html()
else if isDOMNode items[arg]
switch items[arg].nodeType
when 1, 9, 11
$.fn[op].call this, document.createTextNode $("<p>").append(items[arg]).html()
when 3 then $.fn[op].call this, items[arg]
else lastTry items[arg]
else
lastTry items[arg]
this
append: () ->
args = Array::slice.call arguments
addItem.call this, "append", args
appendHTML: () ->
$.fn.append.apply this, arguments
isBlock: () ->
display = getRenderedCSS this, "display"
switch display
when "block", "inline-block", "list-item", "table", "table-caption", "table-row"
true
else false
isInline: () ->
display = getRenderedCSS this, "display"
switch display
when "inline", "inline-table", "table-cell", "table-column"
true
else false
prepend: () ->
args = Array::slice.call arguments
addItem.call this, "prepend", args
prependHTML: () ->
$.fn.append.apply this, arguments
scrollable: (scroll) ->
className = "pre-scrollable"
if scroll is false
@removeClass className
else if @isBlock()
@addClass className
this
class (namespace cssNamespace).Code.BlockCode extends (namespace cssNamespace).Code
constructor: () ->
args = Array::slice.call arguments
$element = $ "<pre />"
if args[0]?
if isDOMNode(args[0]) or typeof args[0] is "string" or args[0] instanceof $
args[0] = $element
else
args = args.unshift $element
else
args[0] = $element
BlockCode.__super__.constructor.apply this, args
this
class (namespace cssNamespace).Code.InlineCode extends (namespace cssNamespace).Code
constructor: () ->
args = Array::slice.call arguments
$element = $ "<code />"
if args[0]?
if isDOMNode(args[0]) or typeof args[0] is "string" or args[0] instanceof $
args[0] = $element
else
args = args.unshift $element
else
args[0] = $element
InlineCode.__super__.constructor.apply this, args
this | true | ###
* Author: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
* File: bootstrap.coffee
* Description: Scriptable Twitter Bootstrap widgets and component creation.
*
* DO NOT EDIT THE JAVASCRIPT .js FILE DIRECTLY.
* THE JAVASCRIPT IS GENERATED FROM COFFEESCRIPT.
###
# [namespace.coffee](http://github.com/CodeCatalyst/namespace.coffee) v1.0.1
# Copyright (c) 2011-2012 [CodeCatalyst, LLC](http://www.codecatalyst.com/).
# Open source under the [MIT License](http://en.wikipedia.org/wiki/MIT_License).
# A lean namespace implementation for JavaScript written in [CoffeeScript](http://coffeescript.com/).
# *Export the specified value(s) to the specified package name.*
window.namespace or= ( name, values ) ->
# Export to `exports` for node.js or `window` for the browser.
target = exports ? window
# Nested packages may be specified using dot-notation, and are automatically created as needed.
if name.length > 0
target = target[ subpackage ] ||= {} for subpackage in name.split( '.' )
# Export each value in the specified values Object to the specified package name by the value's key.
target[ key ] = value for key, value of values
# Return a reference to the specified namespace.
return target
# *Export the namespace function to global scope, using itself.*
namespace( '', namespace: namespace )
isDOMNode = (node) ->
if typeof Node is "object"
node instanceof Node
else
node? and typeof node is "object" and typeof node.nodeType is "number" and typeof node.nodeName is "string"
getRenderedCSS = (element, key) ->
if element not instanceof jQuery
element = jQuery element
if element.length isnt 0
value = element.css key
if value is ""
# element hasn't been added to DOM and has no style set
$body = $ "body"
$temp = element.clone()
$body.append $temp
value = $temp.css key
$temp.remove()
value
$ = jQuery
baseNamespace = "uk.co.stevenmeyer.bootstrap"
cssNamespace = "#{baseNamespace}.css"
baseClass = class (namespace baseNamespace).Bootstrap extends $
constructor: () ->
$jQuery = $.apply this, arguments
$.extend this, $jQuery
this
toString: () ->
$("<p />").append(@clone()).html()
#no dependencies
class (namespace cssNamespace).Button extends baseClass
BLOCK: "btn-block"
DISABLED: "disabled"
inputButtonTypes:
BUTTON: "button"
RESET: "reset"
SUBMIT: "submit"
toArray: () ->
Button::inputButtonTypes[type] for type of Button::inputButtonTypes when type isnt "toArray"
options:
DANGER: "btn-danger"
DEFAULT: ""
INFO: "btn-info"
LINK: "btn-link"
PRIMARY: "btn-primary"
SUCCESS: "btn-success"
WARNING: "btn-warning"
toArray: () ->
Button::options[style] for style of Button::options when style isnt "toArray"
sizes:
DEFAULT: ""
EXTRASMALL: "btn-xs"
LARGE: "btn-lg"
SMALL: "btn-sm"
toArray: () ->
Button::sizes[size] for size of Button::sizes when size isnt "toArray"
constructor: () ->
args = Array::slice.call arguments
if not args[0]?
args[0] = "<button />"
args[1] =
type: "button"
Button.__super__.constructor.apply this, args
@size = () => Button::size.apply this, arguments
@text = () => Button::text.apply this, arguments
@addClass "btn"
block: (block = true) ->
if block is false
@removeClass Button::BLOCK
else
@addClass Button::BLOCK
danger: () ->
@option Button::options.DANGER
defaultSize: () ->
@size Button::sizes.DEFAULT
defaultStyle: () ->
@option()
disable: () ->
@each (index, DOMElement) ->
$element = $ DOMElement
$element.attr "disabled", "disabled" if not $element.is "a"
$element.addClass Button::DISABLED if not $element.is "button,input"
exclusiveClass = (style = "", classes) ->
@removeClass classes.join " "
if style in classes then @addClass style else this
extraSmall: () ->
@size Button::sizes.EXTRASMALL
getText = () ->
if @is "input"
types = Button::inputButtonTypes.toArray()
@map (index, DOMElement) ->
$element = $ DOMElement
if ($element.is "input") and ($element.attr "type") in types
$element.val()
else
$element.text()
.get().join()
else
$.fn.text.apply this, []
info: () ->
@option Button::options.INFO
large: () ->
@size Button::sizes.LARGE
link: () ->
@option Button::options.LINK
option: (emphasis = "") ->
exclusiveClass.call this, emphasis, Button::options.toArray()
primary: () ->
@option Button::options.PRIMARY
setText = (text) ->
if @is "input"
types = Button::inputButtonTypes.toArray()
@each (index, DOMElement) ->
$element = $ DOMElement
# cannot just have $element.val(text), here:
# if text were a function, then this @each loop would cause
# such a function to have an index value of 0 on every execution
# because $element is just one item (instead of all of the items
# in 'this').
value = if typeof text is "function" then text.call DOMElement, index, $element.text() else text
if ($element.is "input") and ($element.attr "type") in types
$element.val value
else
$element.text value
else
$.fn.text.call this, text
size: () ->
if arguments[0]?
exclusiveClass.call this, arguments[0], Button::sizes.toArray()
else
@length # original, deprecated jQuery size() function
small: () ->
@size Button::sizes.SMALL
success: () ->
@option Button::options.SUCCESS
text: () ->
if arguments[0]?
setText.apply this, arguments
else
getText.apply this, arguments
warning: () ->
@option Button::options.WARNING
#no dependencies
class (namespace cssNamespace).Code extends baseClass
constructor: () ->
args = Array::slice.call arguments
if not args[0]?
args[0] = $ "<code />"
Code.__super__.constructor.apply this, args
@append = () => Code::append.apply this, arguments
@prepend = () => Code::prepend.apply this, arguments
this
addItem = (op, items) ->
lastTry = (item) =>
$item = $ item
if $item
$.fn[op].call this, document.createTextNode $("<p />").append($item.clone()).html()
else
$.fn[op].call this, document.createTextNode "#{$item}"
for arg of items
if typeof items[arg] is "string"
$.fn[op].call this, document.createTextNode items[arg]
else if items[arg] instanceof $
$.fn[op].call this, document.createTextNode $("<p>").append(items[arg].clone()).html()
else if items[arg] instanceof Array
Code::[op].apply this, items[arg]
else if typeof items[arg] is "function"
Code::[op].call this, items[arg].call this, 0, @html()
else if isDOMNode items[arg]
switch items[arg].nodeType
when 1, 9, 11
$.fn[op].call this, document.createTextNode $("<p>").append(items[arg]).html()
when 3 then $.fn[op].call this, items[arg]
else lastTry items[arg]
else
lastTry items[arg]
this
append: () ->
args = Array::slice.call arguments
addItem.call this, "append", args
appendHTML: () ->
$.fn.append.apply this, arguments
isBlock: () ->
display = getRenderedCSS this, "display"
switch display
when "block", "inline-block", "list-item", "table", "table-caption", "table-row"
true
else false
isInline: () ->
display = getRenderedCSS this, "display"
switch display
when "inline", "inline-table", "table-cell", "table-column"
true
else false
prepend: () ->
args = Array::slice.call arguments
addItem.call this, "prepend", args
prependHTML: () ->
$.fn.append.apply this, arguments
scrollable: (scroll) ->
className = "pre-scrollable"
if scroll is false
@removeClass className
else if @isBlock()
@addClass className
this
class (namespace cssNamespace).Code.BlockCode extends (namespace cssNamespace).Code
constructor: () ->
args = Array::slice.call arguments
$element = $ "<pre />"
if args[0]?
if isDOMNode(args[0]) or typeof args[0] is "string" or args[0] instanceof $
args[0] = $element
else
args = args.unshift $element
else
args[0] = $element
BlockCode.__super__.constructor.apply this, args
this
class (namespace cssNamespace).Code.InlineCode extends (namespace cssNamespace).Code
constructor: () ->
args = Array::slice.call arguments
$element = $ "<code />"
if args[0]?
if isDOMNode(args[0]) or typeof args[0] is "string" or args[0] instanceof $
args[0] = $element
else
args = args.unshift $element
else
args[0] = $element
InlineCode.__super__.constructor.apply this, args
this |
[
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino ",
"end": 38,
"score": 0.9998880624771118,
"start": 25,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright... | public/taiga-front/app/coffee/modules/resources/user-settings.coffee | mabotech/maboss | 0 | ###
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/resources/memberships.coffee
###
taiga = @.taiga
sizeFormat = @.taiga.sizeFormat
resourceProvider = ($config, $repo, $http, $urls, $q) ->
service = {}
service.changeAvatar = (file) ->
maxFileSize = $config.get("maxUploadFileSize", null)
if maxFileSize and file.size > maxFileSize
response = {
status: 413,
data: _error_message: "'#{file.name}' (#{sizeFormat(file.size)}) is too heavy for our oompa
loompas, try it with a smaller than (#{sizeFormat(maxFileSize)})"
}
defered = $q.defer()
defered.reject(response)
return defered.promise
data = new FormData()
data.append('avatar', file)
options = {
transformRequest: angular.identity,
headers: {'Content-Type': undefined}
}
url = "#{$urls.resolve("users")}/change_avatar"
return $http.post(url, data, {}, options)
service.removeAvatar = () ->
url = "#{$urls.resolve("users")}/remove_avatar"
return $http.post(url)
service.changePassword = (currentPassword, newPassword) ->
url = "#{$urls.resolve("users")}/change_password"
data = {
current_password: currentPassword
password: newPassword
}
return $http.post(url, data)
return (instance) ->
instance.userSettings = service
module = angular.module("taigaResources")
module.factory("$tgUserSettingsResourcesProvider", ["$tgConfig", "$tgRepo", "$tgHttp", "$tgUrls", "$q",
resourceProvider])
| 216601 | ###
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/resources/memberships.coffee
###
taiga = @.taiga
sizeFormat = @.taiga.sizeFormat
resourceProvider = ($config, $repo, $http, $urls, $q) ->
service = {}
service.changeAvatar = (file) ->
maxFileSize = $config.get("maxUploadFileSize", null)
if maxFileSize and file.size > maxFileSize
response = {
status: 413,
data: _error_message: "'#{file.name}' (#{sizeFormat(file.size)}) is too heavy for our oompa
loompas, try it with a smaller than (#{sizeFormat(maxFileSize)})"
}
defered = $q.defer()
defered.reject(response)
return defered.promise
data = new FormData()
data.append('avatar', file)
options = {
transformRequest: angular.identity,
headers: {'Content-Type': undefined}
}
url = "#{$urls.resolve("users")}/change_avatar"
return $http.post(url, data, {}, options)
service.removeAvatar = () ->
url = "#{$urls.resolve("users")}/remove_avatar"
return $http.post(url)
service.changePassword = (currentPassword, newPassword) ->
url = "#{$urls.resolve("users")}/change_password"
data = {
current_password: <PASSWORD>
password: <PASSWORD>
}
return $http.post(url, data)
return (instance) ->
instance.userSettings = service
module = angular.module("taigaResources")
module.factory("$tgUserSettingsResourcesProvider", ["$tgConfig", "$tgRepo", "$tgHttp", "$tgUrls", "$q",
resourceProvider])
| true | ###
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/resources/memberships.coffee
###
taiga = @.taiga
sizeFormat = @.taiga.sizeFormat
resourceProvider = ($config, $repo, $http, $urls, $q) ->
service = {}
service.changeAvatar = (file) ->
maxFileSize = $config.get("maxUploadFileSize", null)
if maxFileSize and file.size > maxFileSize
response = {
status: 413,
data: _error_message: "'#{file.name}' (#{sizeFormat(file.size)}) is too heavy for our oompa
loompas, try it with a smaller than (#{sizeFormat(maxFileSize)})"
}
defered = $q.defer()
defered.reject(response)
return defered.promise
data = new FormData()
data.append('avatar', file)
options = {
transformRequest: angular.identity,
headers: {'Content-Type': undefined}
}
url = "#{$urls.resolve("users")}/change_avatar"
return $http.post(url, data, {}, options)
service.removeAvatar = () ->
url = "#{$urls.resolve("users")}/remove_avatar"
return $http.post(url)
service.changePassword = (currentPassword, newPassword) ->
url = "#{$urls.resolve("users")}/change_password"
data = {
current_password: PI:PASSWORD:<PASSWORD>END_PI
password: PI:PASSWORD:<PASSWORD>END_PI
}
return $http.post(url, data)
return (instance) ->
instance.userSettings = service
module = angular.module("taigaResources")
module.factory("$tgUserSettingsResourcesProvider", ["$tgConfig", "$tgRepo", "$tgHttp", "$tgUrls", "$q",
resourceProvider])
|
[
{
"context": "ask for a random Chuck Norris joke\n#\n# Author:\n# Rodrigo De Frutos <darkrodry@gmail.com>\n\nmodule.exports = (robot) -",
"end": 155,
"score": 0.9998805522918701,
"start": 138,
"tag": "NAME",
"value": "Rodrigo De Frutos"
},
{
"context": "ck Norris joke\n#\n# Author:... | src/chuck-norris-icndb.coffee | darkrodry/hubot-chuck-norris-icndb | 1 | # Description
# Response with a joke from ICNDb
#
# Commands:
# hubot chuck joke - ask for a random Chuck Norris joke
#
# Author:
# Rodrigo De Frutos <darkrodry@gmail.com>
module.exports = (robot) ->
robot.respond /chuck joke/, (msg) ->
msg.http("http://api.icndb.com/jokes/random")
.get() (err, res, body) ->
msg.send JSON.parse(body).value.joke
| 106909 | # Description
# Response with a joke from ICNDb
#
# Commands:
# hubot chuck joke - ask for a random Chuck Norris joke
#
# Author:
# <NAME> <<EMAIL>>
module.exports = (robot) ->
robot.respond /chuck joke/, (msg) ->
msg.http("http://api.icndb.com/jokes/random")
.get() (err, res, body) ->
msg.send JSON.parse(body).value.joke
| true | # Description
# Response with a joke from ICNDb
#
# Commands:
# hubot chuck joke - ask for a random Chuck Norris joke
#
# Author:
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
module.exports = (robot) ->
robot.respond /chuck joke/, (msg) ->
msg.http("http://api.icndb.com/jokes/random")
.get() (err, res, body) ->
msg.send JSON.parse(body).value.joke
|
[
{
"context": "terableColumns = ->\n return [\n {\n name: 'test'\n },{\n name: 'test2'\n }\n ]\n",
"end": 1413,
"score": 0.9168381690979004,
"start": 1409,
"tag": "NAME",
"value": "test"
},
{
"context": "n [\n {\n name: 'test'\n },{\n name: 'test2'... | lib/plugins/columnFilter/columnFilter.plugin.coffee | LumaPictures/meteor-jquery-datatables | 38 | # ##### Column Filters
ColumnDrillDownFilters =
initializeColumnDrilldownFilters: ->
@prepareColumnDrilldownFilterContainer()
return @getColumnDrilldownFilterContainer()
prepareColumnDrilldownFilterContainer: ->
container = UI.renderWithData Template.dataTableColumnDrilldownFilterContainer, @getData()
@setColumnDrilldownFilterContainer container
setColumnDrilldownFilterContainer: ( markup ) ->
Match.test markup, String
@getTemplateInstance().$ColumnDrilldownFilterContainer = $( markup )
getColumnDrilldownFilterContainer: ->
if @getTemplateInstance().$ColumnDrilldownFilterContainer
return @getTemplateInstance().$ColumnDrilldownFilterContainer[ 0 ].dom.members[ 1 ] or false
Template.dataTable = _.extend Template.dataTable, ColumnDrillDownFilters
Template.dataTable.events
'click .drilldown.column-filter-widget': ( event, template ) ->
console.log template
console.log event
# * Register the Columng Filter Widget feature with DataTables
$.fn.dataTableExt.aoFeatures.push
fnInit: ( oSettings ) ->
component = oSettings.oInstance.fnGetComponent()
return component.initializeColumnDrilldownFilters()
cFeature: "W"
sFeature: "ColumnDrilldownFilters"
Template.dataTableColumnDrilldownFilterContainer.created = ->
console.log @
Template.dataTableColumnDrilldownFilterContainer.filterableColumns = ->
return [
{
name: 'test'
},{
name: 'test2'
}
]
| 75430 | # ##### Column Filters
ColumnDrillDownFilters =
initializeColumnDrilldownFilters: ->
@prepareColumnDrilldownFilterContainer()
return @getColumnDrilldownFilterContainer()
prepareColumnDrilldownFilterContainer: ->
container = UI.renderWithData Template.dataTableColumnDrilldownFilterContainer, @getData()
@setColumnDrilldownFilterContainer container
setColumnDrilldownFilterContainer: ( markup ) ->
Match.test markup, String
@getTemplateInstance().$ColumnDrilldownFilterContainer = $( markup )
getColumnDrilldownFilterContainer: ->
if @getTemplateInstance().$ColumnDrilldownFilterContainer
return @getTemplateInstance().$ColumnDrilldownFilterContainer[ 0 ].dom.members[ 1 ] or false
Template.dataTable = _.extend Template.dataTable, ColumnDrillDownFilters
Template.dataTable.events
'click .drilldown.column-filter-widget': ( event, template ) ->
console.log template
console.log event
# * Register the Columng Filter Widget feature with DataTables
$.fn.dataTableExt.aoFeatures.push
fnInit: ( oSettings ) ->
component = oSettings.oInstance.fnGetComponent()
return component.initializeColumnDrilldownFilters()
cFeature: "W"
sFeature: "ColumnDrilldownFilters"
Template.dataTableColumnDrilldownFilterContainer.created = ->
console.log @
Template.dataTableColumnDrilldownFilterContainer.filterableColumns = ->
return [
{
name: '<NAME>'
},{
name: '<NAME>'
}
]
| true | # ##### Column Filters
ColumnDrillDownFilters =
initializeColumnDrilldownFilters: ->
@prepareColumnDrilldownFilterContainer()
return @getColumnDrilldownFilterContainer()
prepareColumnDrilldownFilterContainer: ->
container = UI.renderWithData Template.dataTableColumnDrilldownFilterContainer, @getData()
@setColumnDrilldownFilterContainer container
setColumnDrilldownFilterContainer: ( markup ) ->
Match.test markup, String
@getTemplateInstance().$ColumnDrilldownFilterContainer = $( markup )
getColumnDrilldownFilterContainer: ->
if @getTemplateInstance().$ColumnDrilldownFilterContainer
return @getTemplateInstance().$ColumnDrilldownFilterContainer[ 0 ].dom.members[ 1 ] or false
Template.dataTable = _.extend Template.dataTable, ColumnDrillDownFilters
Template.dataTable.events
'click .drilldown.column-filter-widget': ( event, template ) ->
console.log template
console.log event
# * Register the Columng Filter Widget feature with DataTables
$.fn.dataTableExt.aoFeatures.push
fnInit: ( oSettings ) ->
component = oSettings.oInstance.fnGetComponent()
return component.initializeColumnDrilldownFilters()
cFeature: "W"
sFeature: "ColumnDrilldownFilters"
Template.dataTableColumnDrilldownFilterContainer.created = ->
console.log @
Template.dataTableColumnDrilldownFilterContainer.filterableColumns = ->
return [
{
name: 'PI:NAME:<NAME>END_PI'
},{
name: 'PI:NAME:<NAME>END_PI'
}
]
|
[
{
"context": " : 'us-east-1'\n access_key : generateRandomString()\n secret_key : generateRandomStri",
"end": 530,
"score": 0.9582037329673767,
"start": 510,
"tag": "KEY",
"value": "generateRandomString"
},
{
"context": "enerateRandomString()\n ... | workers/social/testhelper/models/computeproviders/credentialhelper.coffee | ezgikaysi/koding | 1 | { async
expect
withConvertedUser
generateRandomEmail
generateRandomString } = require '../../index'
JCredential = require '../../../lib/social/models/computeproviders/credential'
generateMetaData = (provider) ->
meta = switch provider
when 'google'
projectId : generateRandomString()
privateKeyContent : generateRandomString()
clientSecretsContent : generateRandomString()
when 'aws'
region : 'us-east-1'
access_key : generateRandomString()
secret_key : generateRandomString()
storage_size : 2
instance_type : 't2.nano'
when 'koding'
type : 'aws'
region : region ? SUPPORTED_REGIONS[0]
source_ami : ''
instance_type : 't2.nano'
storage_size : storage
alwaysOn : no
when 'custom', 'userInput' then {}
else 'unimplemented provider'
return meta
CREDENTIALS = {}
createCredential = (client, options, callback) ->
options.provider ?= 'aws'
options.meta ?= generateMetaData options.provider
options.title ?= 'koding'
JCredential.create client, options, (err, credential) ->
addToRemoveList client, credential.identifier if credential
console.log '>>> ERROR ON CREATE CREDENTIAL', err if err
callback err, { credential }
withConvertedUserAndCredential = (options, callback) ->
[options, callback] = [callback, options] unless callback
options ?= {}
withConvertedUser options, (data) ->
{ client } = data
createCredential client, options, (err, { credential }) ->
expect(err).to.not.exist
data.credential = credential
callback data
removeGeneratedCredentials = (callback) ->
CredentialStore = require '../../../lib/social/models/computeproviders/credentialstore'
queue = [ ]
(Object.keys CREDENTIALS).forEach (identifier) -> queue.push (next) ->
CredentialStore.remove CREDENTIALS[identifier], identifier, (err) ->
expect(err).to.not.exist
next()
async.series queue, callback
addToRemoveList = (client, identifier) ->
CREDENTIALS[identifier] = client
module.exports = {
addToRemoveList
createCredential
generateMetaData
removeGeneratedCredentials
withConvertedUserAndCredential
}
| 182064 | { async
expect
withConvertedUser
generateRandomEmail
generateRandomString } = require '../../index'
JCredential = require '../../../lib/social/models/computeproviders/credential'
generateMetaData = (provider) ->
meta = switch provider
when 'google'
projectId : generateRandomString()
privateKeyContent : generateRandomString()
clientSecretsContent : generateRandomString()
when 'aws'
region : 'us-east-1'
access_key : <KEY>()
secret_key : <KEY>()
storage_size : 2
instance_type : 't2.nano'
when 'koding'
type : 'aws'
region : region ? SUPPORTED_REGIONS[0]
source_ami : ''
instance_type : 't2.nano'
storage_size : storage
alwaysOn : no
when 'custom', 'userInput' then {}
else 'unimplemented provider'
return meta
CREDENTIALS = {}
createCredential = (client, options, callback) ->
options.provider ?= 'aws'
options.meta ?= generateMetaData options.provider
options.title ?= 'koding'
JCredential.create client, options, (err, credential) ->
addToRemoveList client, credential.identifier if credential
console.log '>>> ERROR ON CREATE CREDENTIAL', err if err
callback err, { credential }
withConvertedUserAndCredential = (options, callback) ->
[options, callback] = [callback, options] unless callback
options ?= {}
withConvertedUser options, (data) ->
{ client } = data
createCredential client, options, (err, { credential }) ->
expect(err).to.not.exist
data.credential = credential
callback data
removeGeneratedCredentials = (callback) ->
CredentialStore = require '../../../lib/social/models/computeproviders/credentialstore'
queue = [ ]
(Object.keys CREDENTIALS).forEach (identifier) -> queue.push (next) ->
CredentialStore.remove CREDENTIALS[identifier], identifier, (err) ->
expect(err).to.not.exist
next()
async.series queue, callback
addToRemoveList = (client, identifier) ->
CREDENTIALS[identifier] = client
module.exports = {
addToRemoveList
createCredential
generateMetaData
removeGeneratedCredentials
withConvertedUserAndCredential
}
| true | { async
expect
withConvertedUser
generateRandomEmail
generateRandomString } = require '../../index'
JCredential = require '../../../lib/social/models/computeproviders/credential'
generateMetaData = (provider) ->
meta = switch provider
when 'google'
projectId : generateRandomString()
privateKeyContent : generateRandomString()
clientSecretsContent : generateRandomString()
when 'aws'
region : 'us-east-1'
access_key : PI:KEY:<KEY>END_PI()
secret_key : PI:KEY:<KEY>END_PI()
storage_size : 2
instance_type : 't2.nano'
when 'koding'
type : 'aws'
region : region ? SUPPORTED_REGIONS[0]
source_ami : ''
instance_type : 't2.nano'
storage_size : storage
alwaysOn : no
when 'custom', 'userInput' then {}
else 'unimplemented provider'
return meta
CREDENTIALS = {}
createCredential = (client, options, callback) ->
options.provider ?= 'aws'
options.meta ?= generateMetaData options.provider
options.title ?= 'koding'
JCredential.create client, options, (err, credential) ->
addToRemoveList client, credential.identifier if credential
console.log '>>> ERROR ON CREATE CREDENTIAL', err if err
callback err, { credential }
withConvertedUserAndCredential = (options, callback) ->
[options, callback] = [callback, options] unless callback
options ?= {}
withConvertedUser options, (data) ->
{ client } = data
createCredential client, options, (err, { credential }) ->
expect(err).to.not.exist
data.credential = credential
callback data
removeGeneratedCredentials = (callback) ->
CredentialStore = require '../../../lib/social/models/computeproviders/credentialstore'
queue = [ ]
(Object.keys CREDENTIALS).forEach (identifier) -> queue.push (next) ->
CredentialStore.remove CREDENTIALS[identifier], identifier, (err) ->
expect(err).to.not.exist
next()
async.series queue, callback
addToRemoveList = (client, identifier) ->
CREDENTIALS[identifier] = client
module.exports = {
addToRemoveList
createCredential
generateMetaData
removeGeneratedCredentials
withConvertedUserAndCredential
}
|
[
{
"context": "ileoverview Tests for no-dupe-keys rule.\n# @author Ian Christian Myers\n###\n\n'use strict'\n\n#-----------------------------",
"end": 79,
"score": 0.9997736811637878,
"start": 60,
"tag": "NAME",
"value": "Ian Christian Myers"
}
] | src/tests/rules/no-dupe-keys.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-dupe-keys rule.
# @author Ian Christian Myers
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-dupe-keys'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-dupe-keys', rule,
valid: [
'foo = { __proto__: 1, two: 2}'
'x = foo: 1, bar: 2'
# '+{ get a() { }, set a(b) { } };'
'x = { a: b, [a]: b }'
'x = { a: b, ...c }'
# ,
# code: 'var x = { get a() {}, set a (value) {} };'
# parserOptions: ecmaVersion: 6
'x = a: 1, b: { a: 2 }'
'{a, a} = obj'
]
invalid: [
code: "x = { a: b, ['a']: b }"
errors: [
messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
]
,
code: 'x = { y: 1, y: 2 }'
errors: [
messageId: 'unexpected', data: {name: 'y'}, type: 'ObjectExpression'
]
,
code: 'foo = { 0x1: 1, 1: 2};'
errors: [
messageId: 'unexpected', data: {name: '1'}, type: 'ObjectExpression'
]
,
code: 'x = { "z": 1, z: 2 }'
errors: [
messageId: 'unexpected', data: {name: 'z'}, type: 'ObjectExpression'
]
,
code: '''
foo = {
bar: 1
bar: 1
}
'''
errors: [messageId: 'unexpected', data: {name: 'bar'}, line: 3, column: 3]
# ,
# code: 'var x = { a: 1, get a() {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
# ]
# ,
# code: 'var x = { a: 1, set a(value) {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
# ]
# ,
# code: 'var x = { a: 1, b: { a: 2 }, get b() {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'b'}, type: 'ObjectExpression'
# ]
]
| 213353 | ###*
# @fileoverview Tests for no-dupe-keys rule.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-dupe-keys'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-dupe-keys', rule,
valid: [
'foo = { __proto__: 1, two: 2}'
'x = foo: 1, bar: 2'
# '+{ get a() { }, set a(b) { } };'
'x = { a: b, [a]: b }'
'x = { a: b, ...c }'
# ,
# code: 'var x = { get a() {}, set a (value) {} };'
# parserOptions: ecmaVersion: 6
'x = a: 1, b: { a: 2 }'
'{a, a} = obj'
]
invalid: [
code: "x = { a: b, ['a']: b }"
errors: [
messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
]
,
code: 'x = { y: 1, y: 2 }'
errors: [
messageId: 'unexpected', data: {name: 'y'}, type: 'ObjectExpression'
]
,
code: 'foo = { 0x1: 1, 1: 2};'
errors: [
messageId: 'unexpected', data: {name: '1'}, type: 'ObjectExpression'
]
,
code: 'x = { "z": 1, z: 2 }'
errors: [
messageId: 'unexpected', data: {name: 'z'}, type: 'ObjectExpression'
]
,
code: '''
foo = {
bar: 1
bar: 1
}
'''
errors: [messageId: 'unexpected', data: {name: 'bar'}, line: 3, column: 3]
# ,
# code: 'var x = { a: 1, get a() {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
# ]
# ,
# code: 'var x = { a: 1, set a(value) {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
# ]
# ,
# code: 'var x = { a: 1, b: { a: 2 }, get b() {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'b'}, type: 'ObjectExpression'
# ]
]
| true | ###*
# @fileoverview Tests for no-dupe-keys rule.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-dupe-keys'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-dupe-keys', rule,
valid: [
'foo = { __proto__: 1, two: 2}'
'x = foo: 1, bar: 2'
# '+{ get a() { }, set a(b) { } };'
'x = { a: b, [a]: b }'
'x = { a: b, ...c }'
# ,
# code: 'var x = { get a() {}, set a (value) {} };'
# parserOptions: ecmaVersion: 6
'x = a: 1, b: { a: 2 }'
'{a, a} = obj'
]
invalid: [
code: "x = { a: b, ['a']: b }"
errors: [
messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
]
,
code: 'x = { y: 1, y: 2 }'
errors: [
messageId: 'unexpected', data: {name: 'y'}, type: 'ObjectExpression'
]
,
code: 'foo = { 0x1: 1, 1: 2};'
errors: [
messageId: 'unexpected', data: {name: '1'}, type: 'ObjectExpression'
]
,
code: 'x = { "z": 1, z: 2 }'
errors: [
messageId: 'unexpected', data: {name: 'z'}, type: 'ObjectExpression'
]
,
code: '''
foo = {
bar: 1
bar: 1
}
'''
errors: [messageId: 'unexpected', data: {name: 'bar'}, line: 3, column: 3]
# ,
# code: 'var x = { a: 1, get a() {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
# ]
# ,
# code: 'var x = { a: 1, set a(value) {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'a'}, type: 'ObjectExpression'
# ]
# ,
# code: 'var x = { a: 1, b: { a: 2 }, get b() {} };'
# parserOptions: ecmaVersion: 6
# errors: [
# messageId: 'unexpected', data: {name: 'b'}, type: 'ObjectExpression'
# ]
]
|
[
{
"context": "ageContent[0].content}</p>\n <p>\n A Kristensen,\n A Neil,\n A. Nikstat,\n ",
"end": 342,
"score": 0.9998148083686829,
"start": 330,
"tag": "NAME",
"value": "A Kristensen"
},
{
"context": "\n <p>\n A Kristense... | app/about/authors.cjsx | zooniverse/chimpandsee | 2 | React = require 'react/addons'
cx = React.addons.classSet
Authors = React.createClass
displayName: 'Authors'
render: ->
<div className="authors-page">
<section className="about-section content">
<h2>{@props.pageContent[0].header}</h2>
<p>{@props.pageContent[0].content}</p>
<p>
A Kristensen,
A Neil,
A. Nikstat,
A. Stamatiou,
A. V. Asha,
A. Ziegler,
A.C. Preger,
A.zorba,
A13FAB,
AEagon,
AHOtuulikki,
AJ Nickas,
AJ1984,
AJTOTHEMAXX13,
AKGal,
ALCAVTAY,
ALMasching,
ALP1,
ALR20,
AMGoodwin,
APav,
ARD1999,
AUDRAIN G.,
AVWilliams,
A_MiLe,
Aaniya Williams,
Aaron Bürger,
Aaron Fisher,
Aaron SObel,
Aaron Smith,
Aaronaguila1,
Abbeyplankinton,
Abby Swanson,
Abhinav Dave,
Abigail Roberts,
AbigailKullberg,
Abrooksher,
Abrougha,
Abuelanow,
Acipi9,
Adagietto,
Adaku,
Adam McMaster,
Adam Riddle,
Adam Trimm,
AdamHolton,
Adammada1,
Adamvanc,
Addison Gruber,
Adedoyin Adegunwa,
Adeelia S. Goffe,
Adelya,
Adelyncj,
AdleyOliveira,
Adrian Cawdery,
Adrian Serpas,
AdrianaM2001,
Adrien Boël,
Adrien Jouret,
Aeffchen,
Aegypius,
Aela,
AeshnaGrandis,
AfricasFinest,
Agent_Bookworm,
AgoraChat,
Ahearyn,
Aidan OFM,
Aigh90,
Aikane,
Ailurus,
Ailz79,
Aimee Haran,
Aimee L Stephens,
AimeeJo,
Ainmhi,
Aislin McClarnon,
Aiygeo,
Akaito,
Akat13nmkb,
Akoyan,
Akyu,
AlBatin,
AlJC,
Alagorn,
Alan Bell,
Alana McMillan,
AlanaKM,
Alanah Stephens-Savant,
Alanatkinson123,
Alba,
Alealonna,
Alec Garfinkel,
Alejandrina Cristia,
Alejandro Fernandez,
Alejandro Fernandez III,
Alejandro Hernandez,
Alejandro Linconao,
Alejandro Mateos Fernández,
Alejandro Tobon,
Aleksandra Khatova,
Alex Bowyer,
Alex Q. Xu,
Alex Schmitt,
AlexHorn103,
AlexLiamJensen,
Alexa Lander,
Alexa O'Flah,
Alexander Beilby,
Alexander Gilliland,
Alexandra,
Alexandra Howard,
Alexandra Kroliczek,
Alexandra Linder,
Alexandra1,
AlexandraD16000,
Alexandre Larue,
Alexandre Pires,
AlexandreLamothe,
Alexis,
Alexis Castro,
Alexis_V,
Alfred Nettleingham,
Ali_Oldenburg,
Alice Chalmers,
Alice.Rose182,
Aliceee11,
Alico,
Aligeeach,
Alison,
Alison Davidson,
Alison Lewis,
Alison Shaw,
Alison Ward,
AlisonField,
Alister Elliot Puddifer,
Alix2017,
Allan Michael,
AllegraS,
Allerik,
AlligatorCreator,
Allison,
Allison Chiles,
Allison Epling,
Allison M Priebe,
Allison Wacho ,
Alpha27,
Alvgeir,
AlxV,
Alyfc210,
Alyssa ,
Alyssa King,
Alyssa Watts,
Alyssa.G,
Amanda Cogan Barber,
Amanda F,
Amanda Hartman,
Amanda Kruvand,
Amanda Mount,
Amanda S. Webb,
Amanda Sullivan,
Amanda1961,
Amanda1975,
AmandaG,
Amber Adams,
Amber Beard,
Amber Van Harlinger,
AmeliaW,
Amelie T.,
Ameliee,
Americanthai,
Ami Shirley,
Amily.Nash,
Amiryfey,
Amita,
Ammathor,
Amorina,
Ampatent,
Amr Tarek,
Amruta Bhat,
Amulus,
Amy Hangen ,
Amy Kalbfleisch,
Amy Manina,
Amy S,
Amy T,
Amy1964,
Ana Page,
AnalytischeATI,
Anastasia Haack,
Anastasia44mk,
Anastasija Grigorjeva,
Anastassia,
Anaïs ,
Anca Popa,
Andersonarmy,
AndieLollo,
Andr0meda,
Andrea Anserson,
Andrea Burciaga ,
Andrea Wiggins,
Andreas Blaeser,
Andreas38871,
Andreea Bobariu,
Andreia Mesquita,
Andrej Galinac,
Andres Sierra,
Andrew,
Andrew Ball,
Andrew Buskey,
Andrew DeLuca,
Andrew Dickens,
Andrew Domman,
Andrew G. Mumma,
Andrew Goldsmith,
Andrew Guzik,
Andrew Kramb,
Andrew Pacious,
Andrew Whiten,
Andrew de Haast,
AndrewCave,
AndrewJTalon,
Andrina Alvarado,
Andy Becker,
Andy Stevens,
Ang-valerie,
Angel60SX,
Angela Calhoun,
Angela Phillips-Smith,
Angelia,
Angelica Gerlach,
AngelicaKaufmann,
AngelynR7480,
Angie Lebo,
AngieEads,
Angus.pearson,
Anik Boileau,
Aniko Nagyne Vig ,
AnilkumarA,
Anisha Prinzessin Saszowski von Saszow,
Anita,
Anita Poon,
Anita Springer,
Anja Elise Buljo Hansen,
Anja Hoitz,
Anja Landsmann,
Anja van Oostrum,
Anja66,
Ann D. Wilson,
Ann Kronick, PhD ,
Ann-Kathrin Daum,
Anna,
Anna Allen,
Anna Gruchała,
Anna Kobak,
Anna Nowakowski,
Anna Ramsden,
Anna Scheidel,
Anna Taylor,
Anna Wermert,
Anna and Audrey,
Anna-Banana,
AnnaTee,
Annabz,
Annau2,
Anne John,
Anne Kelly,
Anne Rozekrans ,
Anne Solaret,
AnneMusson,
AnnePackrat,
Annepet,
Annie Christou,
Annie Dillon,
Annie Fourny,
AnnieOrlando,
Annie_Wiener,
Annika Lessing,
AnnikaC,
Ansel Lewis,
Antek7,
Antemmasia,
Anthony John Chapman,
Anthony Pulvino,
Anthonyames,
Anthonyrice89,
Anthro-ginger,
Antihelios,
Antoinetje1959,
Anton Polukhin,
Antonette Lestelle,
Antonio Gouveia,
Antwan2004,
AnzelGreyling,
Aoibh.Costello,
AoifeN,
Aparna Krishnan,
April Baker,
Aqua_Fairy,
Aralest,
Archiesgirl,
Ariane Miyasaki,
Ariane Ramaekers,
ArianeCooremans,
Ariani,
Arianne Serrano,
Arielle Hughes,
Arius, Susan, and Demetrius,
Arlene Beech,
Arlette ,
Arman,
Aron2007,
AronC,
Arrakatanga33,
Arsinowey,
Artemision,
ArturoG,
AruAxe,
ArwingPilot64,
Asaan Shakir,
Asadora,
Asampson,
Ascencia Fike Komala,
Asd3r,
Ashaheedq,
Ashley ,
Ashley Bejarano,
Ashley Burhoe,
Ashley Morren,
Ashley Ward,
AshleySawyer,
Ashling Donnelly,
Ashlyn,
Ashton Colbert,
Asillage,
Astra Navén,
Astro_Max,
Astromarm,
AstronautCapybara,
Astyanaxx,
Ataticek,
AtomAnt,
Atti,
Atti_der_Kleine,
Audburrito,
Audrey Carstensen,
Audrey Philippon,
Augdog,
Augustine Cordonnier,
Aukej,
AuntieA,
Aurelide,
AuroraCV,
Austin Duarte,
Austin M,
Austin Simmon,
Ava B,
Ava Strybosch,
AvanW,
Avatarnavi,
Avery Allen,
Avery11,
Aw80,
AwesomeAva,
Ayanori,
Azra Say,
Azurite,
B ,
B. D. Simmons,
B. Eggen,
B.J.Parker,
BCC,
BDNeil,
BKNBLK13,
BKaller,
BRutherford,
BS1234,
BWPLS29,
BZW77,
B_Grenette,
B_McC65,
BabettevanRijn,
Badgerx,
Bahroze Rakeen,
BakerStreet,
Balduin Landolt,
Banana,
BananaChimp,
Bananenbert,
BarabasE,
Barbara,
Barbara Barnett,
Barbara Baum,
Barbara Ottnad,
Barbara Quein,
Barbaras,
Barbera,
Barina Haner ,
Barrie Roberts,
Bart Nijssen,
Bart Van Audenhove,
Basil Christopher,
Bastian_Peripheral_Area,
BattyElly,
Baudratte Djiogo,
BbnV,
BeHotSiii,
Beanysteve,
Bearaffe,
BearmanK,
BeaterGirl,
Beatrice Geissinger Cutchinsf,
BeauRoch,
Beckster,
Becky Rother,
Becky.k,
BeckyK8,
Beecher,
Beetleboy,
Bekki,
Belbelleb,
BelleB,
Ben,
Ben Dover,
Ben Havlicek,
Ben Jahnke,
Ben Reeve,
Ben W,
Ben Walters,
Ben Wiels,
Ben and Emma Kiermaier,
BenAdams34,
BenEmma,
BenStraw,
Benafsha Wahab,
Benjamin Campbell,
Benjamin Schwartz,
Benjamin Wilson,
BenjaminW,
Benjamin_1,
Benshlomo,
Benwickersstg,
Berlherm,
Bernd Kiesling,
Berner-88,
Bernice Bond,
Berry101,
Bertnorb,
Besotted,
Bess Holt,
Bestboy,
Bestlandlord,
Beth,
Beth Sweet,
Beth8,
Bethany Smith,
Bethany6659,
Bethels54,
Betmac,
Beto100,
BettyBuh,
Beverly Gundrum,
Bfurn4s,
Bgrundy,
Bianca Tyrell,
Bibliophile,
BicTigrou,
Big-Dipper,
BiggerJ,
Bill Stephens,
Bill Wagner,
Billy-n-Mandy,
BillyBrad,
Binerexis,
BiologyDrew,
Biologyzone,
Birgit Bossbach,
Birgit Sun,
Biti,
BjornPB,
Björn Riesbeck,
BlackLotos,
Blackdraumdancer,
Blactyde,
Blake,
Blauvogel,
BlightEdge,
BlindOracle,
BlossLearningSpot,
BlueGull,
BlueWhovian,
Blumi,
Bmnewcom2005,
Bob Dupuy,
BobConner,
Bobby Harrell,
Bonnie King,
Bonnie123,
BonnieC,
Bonus,
Bonut,
Boqboq,
BorisBee,
Boumans_,
Bozinator,
BradToTheBone,
BradWilliam91,
Bradner C. Jones,
Brady G,
Brady Rae,
BrainstormCreativity,
Branden Collins,
Brandi7293,
BrandiAlba,
Brandon,
Brandon Harris ,
Brandon Hayes,
Brandon Michael Edelbach,
Brandon Trawick,
Brandon Walker,
Brandy Gott,
Brandy Mae Morris,
Brangela,
Branson White,
Breanna Bushnell,
Brechtje Siobhan Veenstra,
Bredit,
Bregtje,
Brent Sigman,
BrewterBohn,
BriPriUK,
Briana Harder,
Brianna Carter,
Brianna Linette,
Brianne A. McClaflin,
Bridgitte,
BrielleJMBaker,
Brittany Barlow,
Brittany E. Jones,
Brittany Shelton,
Brittenie,
Britti,
Brnadie Aucoin,
Brodstier,
Bronte,
Bronty Hannah Layton,
Brontë ,
Bronze,
Brooke,
Brooke Beecroft,
Bruce Buddin,
Brunitski,
Bruno Lefèvre,
Bryn Middleton ,
Brytt,
BubbleMan,
Bubo,
BuffS,
Burbear1,
Buwa,
Byrdie,
C A Byrne,
C. Hope Bias,
CATALIN NAFTANAILA,
CCCC-Unit,
CHSAPBio,
CHaRLieDonTsURf,
CJF64,
CJLSMITH,
CK Chung,
CLilley,
COMPUTIAC,
COwenSmith,
CRoC,
CThomas,
CTidwell3,
C_McC,
CacaCrow,
Cade Richardson,
Caecilia42,
Caitlin,
Caitlin F,
Caitlin Hier,
Caitlyn Fisher,
Caitlyn Riehle,
CalJames,
Caleb,
CaliforniaBear,
Callum Brown,
Camda5585,
Camdenmo,
Cameron,
Cameron Schwab,
Cameron Vinyard,
Cameron12,
Campbell Allen,
Camrynn,
Canckaitis,
Candace Trent,
Candice Sooknarine,
Candlemas,
Candy Johnson-Brown,
CaoimheK,
Cape2cape,
Capitano78,
Captain_Ruby,
Captain_skug,
Cara Michas,
Cara_1981,
Carissa Kowalski Dougherty,
Carl Gylling,
Carla D. McKinley,
Carlos German,
Carly Kuva,
Caro,
Carol Hargreaves,
Carolanne Grogan,
Caroline Chittenden,
Caroline Esteves,
Caroline L Shearer,
Caroline Lavin,
Caroline Stern,
Caroline Sumners,
Carolo52,
Carolyn Bolus ,
Carolyn Schlesinger,
Carrie Bamper,
Carrie Bowie,
Carrie Ekeroth,
Carrie Ryman,
CarrieLue,
CarrieVS,
Carrienails,
CarsonClan92,
Carsten Østerlund,
Cartycam,
Caryl62,
Caryn Bunshaft,
Casen Mackin,
Cassidy Janelle Ward,
Cassie Crowe,
CastawayAngel,
Caswell,
Catarina,
Cath Cockeram,
Catherine,
Catherine Calderon-Lajoie,
Catherine Ellis,
Catherine Hayday,
Catherine Marsh,
Catherine Ward,
Catherine Yakubik,
Cathy Conroy,
Cathy Emanuel,
Cathérine Delnooz,
CatsEyes,
CatyB,
Cayt Landis,
Cazzoo,
Cbramkumar,
Cecilia Nicholas,
Cecilia Tanami Beck,
CeciliaTidwell,
Cedar Student,
CefalofoRosso,
Celeste Roche,
Celestial_Caticorn,
Celia Vann,
Celiaros,
Cellendylle,
Celurca,
Cenkdem,
Ceri W,
Chairun Siregar,
Chanamon Chaiyakul,
Chanel,
Channille VanStory,
Char2002,
Chara Lang,
Charity Talbot,
Charlene Wood,
Charles Caldwell,
Charles Durrin,
CharlesDickinson,
CharlieGazer,
CharlieNewton,
Charlotte Ang,
Charlotte Burton-Bell,
Charlottie96,
Charmayne Anderson ,
Chas,
Chase Allen,
Cheavus,
Checkheck,
Cheesemouse,
Chelsea Williams ,
ChelseaL,
Cherita Marquez,
Cherlyn Bryans,
Chewyswimmer,
Cheyenne baird,
Chezza6,
Chg507,
ChiliP2000,
Chimper,
Chimpfinder,
Chimpinator,
Chippietom,
ChizCT,
Chloe,
ChloeHaigh,
Chouchou_Bidou,
Chris Layton-Watts,
Chris Lintott,
Chris N. Michels,
Chris Nicholls,
Chris Otahal,
Chris Palm,
Chris Pringuer,
Chris Racicot,
Chris Sfanos,
Chris Swanepoel,
Chris Wild,
Chris Williamson,
ChrisMM76,
ChrisRogue,
Chrissena Weaver,
Christa Oehler,
Christian,
Christian Köttschau ,
Christian Werner,
Christina Erhardt,
Christina Lasam,
Christina Perillo,
Christina Unzicker,
Christina williams,
Christine Kraft,
Christine Murcott,
Christopher David Hancock,
Christopher Herden,
Christopher J.H. Neufeld,
Christopher Keil,
Christopher Miles,
Christopher S. Bird,
Chube,
Cindy Nguyen,
Cinnia Rosa Matthews,
Cissy,
Claire,
Claire Egan,
Claire Morgan,
ClaireAB,
ClaireG,
ClaireMorley,
Clara Sonne Korsholm,
Clare ,
Clare Gibson,
Clare McCormack,
Clareyclarey,
Classof2015,
Claudia,
Claudia Breu & Marco Franz,
Claudia Christina Buron,
Claudia Wisniewski,
Claudius Rompeltien,
Claudybell,
Claus114-DK,
Clay Garner,
Clefairy131,
Clemdalfit,
Cletus2014,
Clifford Michael Christ,
Clode13,
Clue4fun4,
Cmick,
Cmk923,
CoconutPete90,
Cody Murphy,
Cody Stevens,
Codyking24,
Cole_Durden,
Coleman Krawczyk,
Colin Davies,
Colin Izatt,
Coline Vaysse,
Colombier,
ColourFromSpace,
Colton,
CommanderMander,
Coneth,
Connie Caballero,
Connie Tayler,
Connor Hann,
Connor James,
ConnyBenny,
Constance Hilliard ,
Constantino Dragicevic,
CooperSchulze,
Copper54,
Coral Nolan,
Corbin,
Corcaroli,
Cordae McKelvy ,
Corianne Wilson,
CoriellK18,
Corinna Reinicke,
Cornelia Huerzeler,
Corrie Herrick ,
Corrinne Florance,
CoryLehan,
CosimoMontagu,
CosmicLatte,
CosmicZephyr,
Cosmonautic_Ape,
Costanza111,
Costimos,
Cougarrakira,
Courtney,
Courtney Adkins,
Courtney Drew ,
Courtney Thomas,
Craen,
Crawfork,
CrazyChicken,
Crazycatz935,
Crentist,
Creslin73,
CressBookworm,
CricketG,
Crinoline,
Cristian Gruppi,
Cristóbal,
Crlucas322,
Cromby3,
Croquet,
Cruuux,
CryptoLight1,
Csili,
Culbin,
Culdrum,
Curculionidae,
CvanNoort,
Czandigar,
D. W. Szomm,
D1verDad,
DB,
DBell87,
DEP,
DIDDLE,
DJ Riley,
DJAI1979,
DK86,
DM Ravnsborg,
DUDONE2,
Daikoro,
Daisy-May,
Daisy_of_Doom,
Daja_GAlaxie,
Dale W. Russell, PhD,
Dalitasdrain,
Dalta,
Damir.Masic,
Damon22,
Dan Davis ,
Dan iel Hinton,
Dan987,
Dana23,
Danel890,
DanelD,
Danell711,
Dangerfield1982,
DangerousDD,
Danica Hershey,
Daniel,
Daniel ,
DanielCrumpet,
Daniela Diina,
Danieljc,
Danielle Evans,
Danielle Garner,
Danielle Gelpi,
Danielle Hartey,
Danielle Kenney,
Danielle Lawton,
Danielle Marston,
Danielle Rahmoeller,
DanielleFox-Collins,
DaniloStadelli,
Dannette Greenslade,
Dara Patrascu,
DarbyHeart1,
Daria Caria,
Daria Kolotov,
Daria Smolkina,
Darren McRoy,
Darren Morris,
Darth.Vader,
DarthBec,
DatAlien,
DataDroid,
Dave Henderson,
DavePallant,
Dave_Reynolds,
Davejaw,
David BROWNE,
David Bygott,
David Ford,
David Glasgow, Jr.,
David Graybeal,
David James Facteau,
David Kummetz,
David Loving,
David M. Ghelfi,
David Maetschke,
David Marshall,
David Powell,
David Wolber,
DavidBHamilton,
Dawn Hartsuff,
Dawna Wallis,
Dawne Coyston,
DazzlingDaisy,
Dduncan13,
DeMaddin,
DeafieGamer,
Deanna,
Deanna J. Boothby,
Deanna Walkom,
DearClaudio,
DearestPuppy,
DebJ,
DebNicoll,
Debbie McWilliam,
Debbie Veitch,
Debbinsky,
Deborah Legoff,
Deborah1955,
DeborahV,
Debra Rene Clark ,
Debra Schutz,
Debra Tardif,
Dee-Moe,
Defender,
Deichpflanze,
Deidre R. Warner,
Deirdre Weary,
Della,
Della Riley,
Denehbe,
Denese McArthur, PhD,
Denise Kelsey,
Denise Sanabria,
DenmanGuy,
Dennis Ward,
Dent94,
Deputy VanHalen,
Der_Gregor,
Derek Middleton,
Derek Silver,
Desi Gensch,
Desiree Jaeger-Fine ,
Desiree Willemse,
Devilstower,
Devon Pollard,
Dewdove,
DiJoJo,
Diana P,
Diana Seymour,
DianamAtkinson,
Diandra.mr,
Diane Farquharson,
Diego,
Dielle Monis,
Diemorfeldinies,
Dinahsaw,
DinoD123,
DinoGuy25,
Dinopenguin,
DiscoNixon,
Divas3458,
Dixybird,
Djhorsegirl,
DknightNZ,
Dlf143,
Dmitri Siegel,
Dod,
Doktor Semmele,
Dominik Głębocki,
Don Pichu,
DonGiovanni,
Don_S,
Donald Parker,
DonaldWM,
Dongguilla,
Donna,
Donna Brander,
Dorienne Jaffé,
Doschps,
DotMatrix,
Dotti,
Doubleknot888,
Dr Georgette Kluiters,
Dr Laura Anne Rollison,
Dr Rebecca Whitehead,
Dr. Georgette Plemper van Balen,
Dr. Petra Bolte-Picker,
Dr. Stefan Reisner,
Dr. Steiner,
Dr.PepperBrony,
DrDaveJ,
DrHox,
DrPongo,
DrRogg,
Dracus,
Dragonffrind,
Dragows,
DrasticDreamer,
Dre39,
Drew richardson,
Drew0rZ,
Drichelle,
Drosoph,
Drouard Gabin,
Droyster,
Dschamilija,
Dublinstein,
Duchna,
Duckhem,
Duffking,
Dugg Haynes,
Duke Pitteroff,
Duncaroo,
Dyciek,
Dylan Kavanagh,
Dylan Lase,
Dylan Winchell,
E Scott,
E.N.G.,
ECPetra,
EDonahue,
EFlesch,
EValleyY6,
EagleLoft,
Eaglesoul,
Eamon A. Capps,
Eamon Hogan,
East74,
Echo Brooks,
EclipseCeLL,
Ed Miller,
EdMorgan,
Eda Yilmaz,
Edgardo Lugaresi,
Edisislost,
Edna Porter,
Edsploration,
Eduardo Lovera,
Edward Swiniarski,
Edynard13,
Egidijus Zemleckas,
Eileen,
Ejdarrow,
Ekiaer,
Ekima,
Elagorn,
Elaina_science,
Elaine Fung,
Elaine Susan Marchant,
Elayan,
ElberethG.,
Eleanor L. Rayle,
Eleanor Morrison,
Elena Saylor,
Elena Zvereva,
Eleonora Paglialunga,
Elettra,
Eli Braathen,
Elias,
Elias Kurani,
Elifel,
Elijah Goldsmith,
Elijah Lamont,
Elijah Southwick,
Elisa Di Dio,
Elisa Kreibich,
Elisabeth Baeten,
Elisabeth Barbetsea,
Elise de Reijer,
Elise2222,
Eliza Jackson,
Elizabeth,
Elizabeth "Lolly" Lockwood,
Elizabeth Burleigh,
Elizabeth Chen,
Elizabeth Goddard,
Elizabeth Kaschmitter,
Elizabeth Lowther,
Elizabeth Mahoney,
Elizabeth Morris,
Elizabeth Shutters,
Elizabeth Trapnell,
Elizaveta2,
Elke Briese,
Ella Robb,
Ella Woolrich,
Ellen Langford,
Ellen904,
ElliF,
Ellie Alfson,
Elliot Witt,
Ellis Claridge,
Ellis Tsu,
EllisBell,
Els Boudewijnse,
ElsieUnderscore,
Elspeth Kane,
Elun,
Elvira Lakovic,
Elwaight,
Em.Ross,
Emanuele676,
Emera9,
Emilia Duque,
Emilia Manosalva ,
Emily ,
Emily Aisa Leary,
Emily Anne Marie Whitehead,
Emily Baker ,
Emily Cohn,
Emily Cook,
Emily Duboy,
Emily Fox,
Emily Gambrell,
Emily Jenkins,
Emily Matassa,
Emily Priddy,
Emily Sullivan ,
Emily Summer,
Emily Walters,
Emily Webb,
Emily.H.zooniverse,
EmilyBacke,
Emma Allen,
Emma Andersson,
Emma Finch,
Emma Leach,
Emma Lear,
Emma Naujokat,
Emma Rice,
Emma Rounds,
Emma Wrake,
Emma.O,
EmmaJacobs,
EmmaVR,
Emmawhinnie2,
Emmelmann,
EmpireMousetrap,
Emptyrainbowbbls,
Emrosie,
Engler-Schneider,
EnjoySasa,
Ennis Tulloch,
Enora J,
EnoraNedelec,
EnragedPlatypus,
Enya Kaonga,
Eowulia,
EpicBlackDragon,
Ercydive,
Erdhummel,
Eric Kavander,
EricDamonWalters,
Erica Burton,
Erica Zeschke,
EricaYeah,
Ericakras,
ErichG,
Erick Romero,
Erik Rosenberg,
ErikJBenitez,
Erika Perez,
Erin Clements,
Erin L Willis,
Erin_Thelander,
Eroush,
Erowin,
Erufailon42,
Esai,
Esbn,
Esme Wallace,
Ester Matieschek,
EstyMay,
Eswimmer0206,
Eszter Belteki,
Ethan Brewer,
Ethan G. Reichard,
Ethan Martin,
EthereaL,
Etienne Herrick,
Eugenio Fernández Suárez,
EvEnuS17,
Eva Erle,
Eva Humphrey,
Evan Reichard,
Evan Roberts,
Eve Simner,
Eve00675,
Evelyn Graham,
EvenSerieus,
Everett Fisher,
EvieJ,
Evija,
Ewina,
Ezhekiel,
Ezreal,
F. Allan Pope,
FB-XL5,
FCardou,
FINDALLTEHALIENZ,
FaZe_Leder,
Fabi1923,
Fahina Chowdhury ,
Faith Taylor,
Family St Quintin,
Farah_2005,
FarrarFel,
FayeSomething,
Fearnaught,
FedTheMo,
Feliade,
Felicia Phelps,
Felicia Styer,
Felicia Yllenius,
Felicity Walton,
Felipe Pinheiro,
Felix.S,
Fergie,
Fernando González,
Fernández-Rodríguez, Pedro,
Ferrimjones,
FightingDodo,
Filibuster,
FilipeFrango,
Fingula,
Fiona Brooks ,
Fiona Leiper,
Fiona Marshall,
Fiona mills,
FizzPlease,
Flamingo,
Flexo221281,
FlightoftheValkerries,
Flint1545,
FlipperBozz,
FloMe,
Florian Fieber,
Flying_J,
Fodor Cristian,
Foggyworld,
FourWinds,
Foxstar82,
Fr3d3r1k_M,
FranB25,
FranBow,
Franca Knab,
Francesca Cecinati,
Francisca Ferreira,
Francisco Torrado Fernandez,
Frank DELON,
Fratella,
Freckkles,
FredVanCor,
Frederick D. Kleist,
Freya Debler,
Freya Tischkowitz ,
FreyaJC,
Frozyth,
Fuchskind,
Funkyfalcon,
Funnyfavorer101,
Funnyguts,
Furiat,
Furniture307,
FuzzyJones,
G Moore,
G. Shearwater,
G.Haldursson,
G.evelyn1214,
G.gorilla324,
G2EK,
GBauses,
GKaramanski,
GLambourne,
GMadeiros,
GSmith7018,
Gabeth123,
Gabriel Abalos,
Gabriel Jones,
Gabriel Soto,
Gabriela Wei,
GabrielaMacsim,
Gabriella De Felice,
Gabriella Melo,
Gabriella Reyes,
Gabriellajm,
Gabrielle,
Gabrielle Conidi,
Gabrielle Swartz,
Gage Wells,
Gail E Pinter,
Gail E. Wagner,
GalaxyTrain420,
GalenaBear,
Gallium,
GameBrown_Family,
GammaGreenthumb,
Gandalv,
GanneC,
Garlandiana,
Garrison Weaver,
Gary White,
Gauravvk,
Gavin,
Gavitron_2000,
Gbaez,
GdVI,
Ge0de,
GeWe,
Gedelgo,
Geeds,
Geek2,
Geeklette,
GeirM,
Geire Kami,
Gekco,
Gemma Kinsman,
GenerDavid Quitevis,
Genevieve Finerty,
Genevieve Yew Siew Bee,
Genosse Boss,
Genotype2,
Geode_,
Geometer,
Georga Beak,
George Torres,
GeorgiaGootee,
Georgina Dunlop,
Gerasimos Loutos,
Gethere2,
Ggatton,
Ghazi,
Gianna Segnatelli,
Gideon Fleming,
GigantoPeach,
Gill_666,
Gillian Fee,
Gillian_Borland,
Gillis57,
Gina Hatfield,
Gina Sleeper,
Ginger Gritzo,
Gingertwin,
Ginnyfan1,
Gintas,
Giomv,
Giovanna Maretti,
Giselle Marquez,
Giu_Gennaio,
Giuseppe,
GizmoMischief,
Gloria Jódar Valderrama,
GloriousMundane,
Glosoli,
GnatMan,
Godchild,
Gonodactylus,
Gordon_Forrest,
Gordonalistair,
Gorgar,
Gorgonos,
Gotenks,
Goupus,
Grace Bowers,
Grace Carter,
Grace Ilbery,
Grace Kayla Allende,
Grace Koenigstein,
Grace Rigas,
Grace_Banks,
Grace_Little,
Graceling1,
Graeme Fraser,
Graeme Hewson,
Graham Rintoul,
Graham Simpson,
Granan,
Grant Miller,
Grant Tildsley,
Grant White,
Grayson Heath,
Greenfields,
Greengirl65,
Greenlightrj,
Greg Lang ,
Greg Langermeier,
Greg Partyka,
Greg Stillwell,
Gregoireblond,
Gregory Dreyfuss-Kaufman,
Gregory James,
GrepPip,
Greta Kitts,
Greta van Zyl,
Grethe,
GreyPhoenix,
Gribby,
Grimnir,
Grintalsm,
Gumby,
Gumok,
Gunnar Matthaei,
Gunther Willinger,
GuteMine,
Gwillewyn,
Gyla Fowler,
H8stinks,
HKE,
HMPenguins,
Haedyn,
Hailey,
Hailey ,
Hailey Grimes,
Hal Emmerich,
Hal Heinrich,
Halana Hayner,
Haley,
Haleywigal,
Hamdi,
Hana Wills,
Handa,
Hanias,
Hanibal94,
Hannah Brass,
Hannah Man,
Hannah McDowell,
Hannah Stines,
Hans,
Hans Heilman,
HansIJpma,
Hanschibal,
HaplessHero,
HappyEve,
Harambe Lives On ,
Hardrockhopper,
Harper Jocque,
Harry Edwards,
Harry Nelson,
Harshit Agrawal,
Harvardman,
Hasni,
Hassan Bin Haseeb,
Hassan Saber Ewida,
Havarie,
Haya,
Hayley,
HeathRussiaGc,
Heather,
Heather Rider,
Heather Torpy,
Heather Wilson,
Hedda,
Heidi Barney,
Heidi Fischer,
Heidi Pfund,
Helen Petzl,
Helen Zadey,
Helen.A.Brown,
HelenAC,
HelenRolph,
Helenamena,
Helenmaxwell,
Henni Fell,
Henrik87,
Hermenau,
HerrvonSpeck,
Hetttty,
Hilaryjb,
Hilke von Kienle,
Hillyhenhouse,
Himmaysion,
Hippie1427,
Historysid,
Hitodama,
Hnigg,
Hoggo,
Holden Woofter,
Holly Adkins,
HomerJ,
Hookage,
HoosierGen,
Hope Eden Flores,
Horia007,
Howard_Richards69FL,
Howardsn68,
Hrusten,
Hubert Wolf,
Hucki,
Huddo,
Huffer907,
Hugo Andrés Durantini Luca,
HugoBallee,
HumbertoRickenbacker,
Hunter McDonald,
Hunter Smith,
Huntress,
HybridX,
HydroxyChloride,
Hye Mi Jeon,
Hypatia1,
I'm not telling you my real name. Give up.,
I. Keurntjes,
IEM,
IL-K,
IMadeThisForYou,
Ian D.L. Hyde,
Ian McKerrow,
Ian Squire,
IanGalaxyZoo,
Iasmim,
Icecrasher,
Icephoenix96,
Iduun,
Idw,
Igor Malušević,
IkariaDaviau,
IlseB,
Iltis,
Ilya Dvorchuk,
Imes,
Inaari,
India Yip,
IngerInger,
Ingrid DOMKPO,
Ingridanita,
Ingridbio,
Inki81,
Ionie,
Ircen,
Irene Bacchi,
Irene Bassi,
Irene Clifton,
Irene Holiday, Britney Andes, Valerie Chew,
Iris-oxf,
IrisB,
IrishAstro,
IrishRottie,
Irissofie,
IronWhale,
Is0cre,
Isaac Lamery,
Isaac Young,
Isabel ,
Isabela,
Isabella Ferrer Fernandez,
Isabella Suzanne Valentine,
Isaman10,
Isannah,
Iseeit,
Isissea,
Isitme,
Issiah,
Itallcounts,
Itsasmallworld27,
Itsug,
Ivan ,
Ivanovna,
Ivar Lauritzen,
IvdO,
Ivorostojic,
Ivy Leavitt-Carlson,
Ivy Munnerlyn,
Iwona Mertin,
Iyahalyssah2,
J. Christian Larsen,
J. J. Dziak,
J. T. M. Daniels,
J. Zhou,
J.A.J,
J3300918,
JDLA,
JDonnici,
JF392813E,
JG1996,
JGF,
JJ199,
JJ53,
JK Lynch,
JMuller,
JOB JOSEPH,
JOsment,
JPJP_333,
JPenn2,
JPenn550,
JPlegge,
JPrice01,
JSN2001,
JVerspeek,
JWearth,
Ja'Lyn Lee,
Jabejafe,
Jablon,
Jack Holland,
Jack Tapson,
Jacki Boyle,
Jackie Lagasse,
Jackielharris,
Jaclyn Rickards,
Jacob Leavey,
Jacob Ruiz,
Jacob Warren,
Jacque Blundell,
Jadawashington,
Jade Duhsmann,
Jade Lauren Cawthray,
Jaeti13,
Jaglavak,
Jaimee Adams,
Jake Bulkowski,
Jake Josepher,
JakeHill,
JakeStudebaker,
JakobPallesem,
Jakobiter,
James Bradley,
James Brammer,
James Coultas,
James E. Drewry,
James Fitzgerald,
James Hooven,
James J. Marshall,
James Loope,
James Neilson,
James Ridley,
James S. Preston,
James Tuvell,
James White,
James.Ryan,
JamesPaulson,
Jamesbram,
Jamie,
Jamie Clapperton,
Jamie Ling,
Jamie Woodcock,
Jamoni,
Jan Agatz,
Jan Eltner,
Jan Smith,
Jan11965,
JanMcGuire,
Jana Hamade,
Jane Downer,
Jane Liddle,
Jane M Smith,
Jane Williamson,
Janelle Miller,
Janerik,
Janet Jenkins,
Janet Patton,
Janet Vogt,
JanetCCS,
JanetCSB,
Januarye,
Jaouen,
Jared,
Jared Cashman,
Jared O'Quinn,
Jari Haapatalo,
Jari-Pekka Pääkkönen,
Jarildy Javier,
Jasmin Carne,
Jasmin.Nicholas111,
Jasminejo24,
JasonJason,
Jasper H-N,
Javiermv,
Jawofech,
Jaxiie,
Jay B,
Jaye Van Kirk,
Jaynemet,
Jaynet88,
Jazlynnedawson,
Jazmyne Zampell,
Jdj1,
Je11y,
Jean Walker,
Jean noren,
Jeanah Kim ,
Jeanette Garnett,
Jeanne Coleman,
Jeannie Colley,
JeeaaR,
Jeff Cransulin,
Jeff Hodder,
Jeff Walker,
Jeff0253,
Jeffery Snyder,
Jeffrey Rhodes,
Jeffrey Slott,
Jehuggett,
JellyBeanPawz,
Jeltz,
JenB24,
JenCs,
Jenna,
Jenna Himsel,
Jenna Morris,
Jenna Richardson ,
Jenna Sullivan,
Jenni Blake ,
Jenni Goodchild,
Jennie Couling,
Jennifer,
Jennifer Chau,
Jennifer Chiz,
Jennifer Jones,
Jennifer Komlyk,
Jennifer Lawless,
Jennifer LeBlanc,
Jennifer Leong Cardwell ,
Jennifer Pawlak,
Jennifer Schultz,
Jennifer Thornton,
Jennifer Tomes,
Jennifer Wyborny,
Jenny Ross,
JennyLiz,
JennyTake,
Jenx595,
Jenzoo44,
JeraldnotHerald,
Jeremy A Hansen,
Jeroen Goud,
JeroenMJ,
Jeskosmerick,
Jess,
JessDonn,
Jesse Helser,
Jesse Jordan,
Jesselowes,
Jessi Jones,
Jessica,
Jessica Abbott,
Jessica Adkins,
Jessica Banks,
Jessica Black,
Jessica Bordelon,
Jessica Cooper,
Jessica Cromwell,
Jessica Dunne,
Jessica May,
Jessica Neumann,
Jessie Paul Garza,
Jesus Sanchez,
Jewelanne,
JeweloftheNile,
Jgronmark,
Jhf,
JillLamede,
Jim O'Donnell,
Jim Pea,
Jim Wilson,
JimmieLaBrown,
Jimmy2010,
Jimusan,
Jinx,
Jinxy,
Jjuju,
Jmallory1996,
Jmart3029,
Jmyers0517,
Jo Dorning,
Jo.mansson,
JoHB,
JoKD,
JoanL,
JoanaPereira,
Joanna Paniagua,
Joanne Moore,
Jobobaars,
JockYukon,
Joe Enright,
Joe Standring,
Joel,
Joel Marx,
Joel Tack,
JoelDR,
Johann J. Grimm,
Johanna Valencia,
JohannaThePenguinCounter,
John Anglin,
John Bardinelli ,
John Burrows,
John Chestnut,
John D. Krull,
John Guido ,
John Hedger,
John Metcalfe,
John Seed,
John Vandenberg,
John Winkworth,
John Wirzburger,
John170,
John93,
Jolinah,
Jonah Huhges,
JonasStefan,
Jonathan Bala,
Jonathan Copelin,
Jonathan Kraus,
Jonathan Steele Hall,
JonathanBohlen,
Joost Dijkerman,
Jordan Friedemann,
Jordan San Miguel,
Jordanboys4,
Jorge2898,
Josephine Brigham,
Josh,
Josh Carter,
Josh Russell,
Josh.davies87,
Joshua A. Taylor,
Josie,
Josie Schott,
Jowuijts,
Joy Gulick,
JoyWorld,
Jr9355,
Juan Carlos Garza Lopez,
Juan Manuel Martinez Rodriguez,
Juan Manuel Rosso,
Juanan_89,
Jude Ajalat,
Judith Engel,
Judy,
Judy Prouty,
Judy Reichmuth,
Jujubee18,
Julebambus,
Jules Borel-Saladin,
Julesdomalain,
Julia Casey,
Julia Groman,
Julia Hickin,
Julia Hoskins,
Julia Kendall Gill,
Julia Poor,
Julia Rosser,
Julia Son,
Julia Wilkinson,
Juliasch,
Julie Ann Feldt,
Julie Marlay,
Julie McMurray,
Julie Pearl,
Julie Peterson,
Julie Sansum,
JulieHix,
Julien Giffard,
JulietteHotel,
Juln,
Juls2783,
JungleHyena,
Junior_Guide,
Juraj,
JustKatie,
JusticeStone,
Justin Forrester,
Justin Goodrich,
Justin Offner,
Justin Sanchez,
Justin Smith,
Justin YIu,
Juty,
Jyard2,
Jörg Christiansen,
Jörg Meili,
K Miner,
K Navarro,
K P Stephenson,
K20Kat,
KGuk,
KHowlett,
KK_27,
KKramke1014,
KLUTSCH Alexis,
KS_RM_CB,
KTMorton,
KTcakie,
KYL,
Kaate86,
Kaelee,
Kagantravis Travis,
Kaida,
Kaitlinawilliams,
Kaitlyn Carr,
KaiyolahCraiyola,
Kajo,
Kakan,
Kaleigh E. Duncan,
Kalipso,
Kangaroo36,
Kantos,
Kaotix,
Karen,
Karen Bridgeman,
Karen Dagsen,
Karen Hughes,
Karen L Masters,
Karen O'Sullivan,
Karen Wallace,
Karen Wan,
KarenWatermelon,
Karen_c.,
Kari Matuszak,
Kariah Humston,
Karijn,
Karin Johansson,
Karin Schedel,
Karina Folkman,
Karina28,
Karl Mayerhofer,
KarmanVanderStel,
Karol Safianik,
KarolKotowicz,
Karon Gates,
Kasey,
Kasia Croken,
KasiaMiko,
Kassandra Lynch,
Kat Brimhall,
KatS82,
Katalina Engel,
Katavitje,
Kate,
Kate Allen,
Kate Fergusson,
Kate Rogers,
Kateconina,
Katelyn Carnes,
Katelynh,
KateyND,
Katfish,
Katharine Stratford,
Katherin Rodriguez ,
Katherine Donofrio ,
Katherine Lond,
Katherine Sellick,
Katherine Stott,
Katherine williams,
Kathleen Augusta,
KathleenCO,
KathleenZ,
Kathryn MIranda,
Kathryn Nicholson,
Kathryn Sancilio,
Kathrynp,
Kathy Jambor,
Kathy Short,
Katie,
Katie Dickson,
Katie Keyser,
Katie Proctor,
KatieOxley3,
KatieSmith17,
Katja Novitskova,
Katrina Sutherland,
Katt_25,
Katy Godinez,
Katy J. Garrity ,
Katybb,
Kaushal Yadav,
Kautton,
Kay L. Litz,
Kay Taylor Hume,
Kaycee N.,
Kayla Strong,
Kayleigh Downing,
Kazesim,
Kazuya Jaimes,
Kbzephyr,
Kcilf90,
Keats14,
Keep Bees,
Keerthana11,
Kees Roovers,
Keircurb,
Keith A. Harmon,
Keith La Rue,
KeithMason,
Keli C. Bolin,
KelliK2,
Kellie920,
Kelly A. Sutphin-Borden,
Kelly B,
Kelly Binnington,
Kelly Gilliam,
KellyBe,
KellySearcy,
Kelly_Barth,
Kellyke,
Kelsey,
Kelsey Neff,
Kelsey Reich,
Kelsie,
Kelton Johnson,
Kelvinaardvark,
KennSpaulding,
Kenneth Campbell,
KenzieV.2005,
Keren,
Keri Tucker,
Kerima_Hill,
Kerry Bell,
Kerry Diehl,
Kerry Kelly,
Kessilari,
Kevichella,
Kevin Donoghue,
Kevin Frausto,
Kevin Hartmam ,
Kevin Ng,
Kevin O'Neill,
KevinP2,
Kevinique Mckinley,
Kgo1526,
Khalinka,
Khas,
Kiana,
Kim Jakobs,
Kimberly Henry-Karr,
Kimberly Williams,
Kimberz86,
Kimbo_2112,
Kimvbelle,
Kirsi87,
Kirsten A. Rohrbach,
KirstenDawes,
Kiru,
Kishachimp,
Kist,
Kit Ruseau,
Kitsuneko,
Kitty Foxglove,
Kjo1626,
KlaraMaria,
Klein Harmonie,
Kleio Kartali,
Kmljohnson,
Knowles1315,
Knut Hansen,
Kobold27,
KoldPhire,
Kortney Morris,
Koukouwaya,
Kownie,
Kralan,
Krglass10,
Kris K. Bancroft,
Kris Reig,
Krista Pojman,
Krista Tock,
Kristeena Sigler,
Kristensop,
Kristin Downey,
Kristin Havercamp,
Kristin Sauerland,
Kristin Wright,
Kristina Klug,
Kristina Lapp,
Kristine Batenhorst,
Kristy B,
KrkSmth,
Kruss375,
Krystal,
Kshinta,
Ksiusia,
Ksloots,
Ku,
Kukalakana,
Kurt Winsler,
Kyle Colvin,
Kyle Willett,
Kylie McDermott,
Kymee Bair,
KyoVang,
L-AForbes,
L-Glendinning,
L. Williams,
LEGO_NINJA,
LH_Kelley,
LINAC,
LJE,
LKavanagh,
LN8x,
LOLeannie,
LPO,
LRC Academy,
LSpeedie,
LaWayne Reeve,
Lachlan S.T.,
Lacy Hartis,
Ladanmusic,
Lady12,
Lady_Godiva22,
Ladyann,
Laila Karam,
Laila Tejeda,
LailaA,
Laile Meraz,
Lais M,
Lamorna,
Lamutamu,
Lan Duong,
Lan Thai,
Landvermesser,
Laquaine2737,
LaraM,
Larissa Krentz,
LarkinC,
Larmbelastigung,
Larry Casey,
Latitude23,
Laura Deacon,
Laura Dunn,
Laura Engle ,
Laura K. Lynn,
Laura Martinez-Inigo,
Laura Merchan ,
Laura Tongue,
Laura Trouille,
Laura Wilson,
LauraCun,
LauraTheExplorer,
Lauralamble,
Laurapinguin,
LauravdMark,
Laure Granarolo,
Laure Joanny,
LaurelG,
Lauren B,
Lauren Culbertson,
Lauren Douglas,
Lauren Gardner,
Lauren Lopes,
Lauren Riley,
Lauren Thompson,
Laurin_003,
Layla Johnston,
LayneNola,
LazyL,
LeRyck,
Lea Hawkins,
LeaGallagher,
Leah Bauer,
Leah Rush,
Leah Salyers,
Leah Taylor,
Leanne Condon,
Leanne Deuchar,
Lecter7,
Lee M,
LeeBurn,
LeeKick,
Lee_Harris0n,
Lee_Reed,
Leibniz,
Leichman Career & Transition Center,
Leine.03,
Lemaire Nolwenn,
Lemoncupcakes37,
Lemonlorraine,
LenaLeo,
Lena_elh,
Lene Rosenberg,
Lennoxville,
Leo,
LeoMFR,
Leona Barocas,
Leonardo Altadonna,
Lersday,
Lesley Lupo,
LesleyLee,
Leslie Summerlin,
Lester Jean,
Lesterj1972,
Lesvon,
Leticia Baldo,
Leticia Isabel Avila Garcia,
Leusane da Rocha Lordêlo,
Levicorpus,
Levon Tumanyan,
Lewis Seed,
LewisOLeary,
Lewyke,
Lia A. Cariglia,
Liam Doolin,
Liambebb,
Lianus,
Librarian22,
Lichtecho,
LifeBounces,
LifeScienceMojo,
Lifeforce,
Ligia Kamann,
Liie74,
Liisu.R.,
Lilianna Barchers,
Lillafjanten,
Lillian Bodenheimer,
Lillias Forsyth,
Lillie Doyle,
LillieC,
Lilmissmechanic,
Lilunebrium,
Lily,
Lily Schultz,
LilySchultz2002,
Lilygwen,
Linda Hunsicker,
Linda L Cooper,
Linda Peat,
LindaChen,
LindaHagbergSweden,
Linguin,
LinkandNavi,
LinkyLongleaf,
Linnie14,
Lisa Beckrich,
Lisa Kristjanson ,
Lisa Mendez,
Lisa Minarovich,
LisainVA,
Lisanne Dijk,
Lissa623,
LiverLover,
Liz Dils ,
Liz Minkert Johnson,
LizCoops,
LizDownes,
Liziuri,
Lizzietish81,
Lizzifer711,
Lizzy06,
Lizzyf23,
Lmcminn,
Lmcurls,
Lofty_h,
LoganSammy,
Lolipop666,
Lololiz,
Lomky,
Lone Kunkel,
Lonewolf66,
Look4Star,
Loony_Lovegood,
Lordofthepies,
Lori Nicholas,
LoriM,
Loschmidt,
Lotte D'Hulster,
Lotte Dikuzoleleko,
Louann S. Manning,
Louccby,
Louis Shamel,
Louise Court,
Louise Ratcliffe,
Lovarq,
Lparkhurst,
Ltlsun,
Lu72,
Luan Roberts,
Luana Kosciuczyk,
Luca Haubensak,
Luca Menarini,
Lucas B,
Lucas McNall,
Lucie Laporte,
Lucifer Addams,
Lucy Arditi,
Lucy Conover,
LucyitSwD,
Ludika,
Ludovic,
Luispeikou,
Lukas Hackl,
Luke .S,
Luke Middleton,
Luke Tyhurst,
Luna54,
LunaMona,
Luqras,
Luthar22,
Lvova Anastasiya,
Lyckerish,
Lydia Fraser,
Lydia Longwell,
Lyla Jones,
Lyn cusdin,
Lyndsey Simpson,
Lynee Tieu,
Lyneth,
Lynn,
Lynn Allen,
Lynnmrose,
Lynzw,
Lyrajane,
Lyss2303,
Léopoldine Litch,
M Passage,
M Wong,
M&P Noonan,
M.Mehala,
M00N,
MALASSIGNE Florian,
MBear,
MBrett,
MDA,
MDrass,
MKDS,
MLSBetreuung,
MM-Noorsalehi,
MMT90,
MNeilson,
MOBI,
MOSI's Youth Advisory Board,
MPIchimp01,
MPIchimp02,
MSMurphy,
MaSeKind,
MabelH,
Mabi,
MacKenzie Nikol Greenwood,
Machielovic,
Mackenzie Sweet,
Macrell,
MadAlice,
Maddie Dart,
Maddie L,
Maddie Schiller,
Maddy1221,
MaddyMarier,
Madeline Brown,
Madeline Eckert ,
Madeline Maier,
Madison Hess,
Madison Jarvis ,
Madison Voelker,
Maeve Doolin,
Mafalda2015,
Magdalena,
Magdalena Goworek,
Magellan_Lin,
Magessa,
Maggie Canvin,
Maggie Flynn-Fleet,
Maggie McLaughlin,
Maggie Read ,
Maggielikeszoology,
Maggiewill,
MagicMitch,
Magzie2000,
Mahmoth,
Mahmoud El Hakim,
Mai ,
Maide Calix,
Maika Fano,
Maike42,
Mainge,
Majda Oblak,
Mako001,
MaleneT,
Malin Rast,
Mamameerkat,
Mamphie7ty,
Manar Shawkey Hussien,
Mancunian_Nick,
Mandi Armstrong,
Mandi Dalziel,
Mandieroehl,
MandyLHeddle,
MandyTow,
Manfred Mumpitz,
Mang19,
Mangojean,
Mangosong,
Mannenberger,
ManuelP,
Manuela11,
Manyac,
MarJanina,
Mar_cus,
Mara Chandler,
Marc-Antoine Contant,
Marcelle Boyer McGovern,
Marcia Souza,
Marco ,
Marco Degener,
Marco Grecomoro,
Marco Tison,
Marcus Arcanjo,
Marcus LaBrie,
MarcusFarstad,
Marcy9,
Marecky,
Marena84,
Margaret Carthew,
Margaret Chirdo,
Margaret Kosmala,
Margaret West,
MargaretW,
Maria Ann Freel,
Maria Corona,
Maria Garcia,
Maria Janosko ,
Maria Lopez,
Maria Mora,
Maria Paz Rodriguez Puerto,
Maria Pohl,
Maria Tereshchenko,
Maria del Rosario Canelas Berrocal,
MariaCristobal,
Mariah Gutiierez,
Mariah Munoz ,
Mariana Dutra Fogaça,
Marianagazga,
Marianne Wohlgemuth,
Marie Andrews,
Marie Padberg,
Marie-France martin,
MariePG,
Marie_eve_78,
Marilisa Moio,
MariluC,
Marinka1704,
Mario Marani,
MarioMe,
Marion Addo,
Marion Joy Brown,
Maripanna,
Mariposa123,
Marisachimpa,
Marissa Thonius,
Marjan Moallem ,
Marjolaine,
Marjorie Williams,
Mark,
Mark Draper,
Mark Edwin Hurd,
Mark Pearse,
Mark Thurlow,
Marlijn,
Marsha K,
Marshall6,
Mart2x,
Marta Kotarba,
Martabon,
Martha Cordova,
Martha DiPalma,
Martha Moore,
Martianpolestar,
Martin C Eschholz,
Martin L Mooney,
Martin Vejražka ,
MartinJende,
Marton Tompos,
Marvin Wumbo,
Mary,
Mary Compton,
Mary Dion,
Mary Hatton,
Mary Hawkins,
Mary Hayslett,
Mary Karapogosian,
Mary Kay Van Sistine,
Mary196,
MaryMicket,
MaryPat,
MaryZaher,
Mas2201,
MasonPeace,
Masonscience,
MateaH,
Matess,
Mateusz Kudelski,
Mathew McKane,
Mathide,
Mathilde Merle,
Matilda Fraser,
Matt Baughman,
Matt Martinez ,
Matt Skalla,
MattB559,
Matteo,
Matthew Robbins,
Matthew Rose,
Matthew Stavig,
Matthew Vetkoetter,
Matthias Breimann,
Maura Bozeman,
Maureen Hangen ,
Maureen77,
Maurice A. Hippleheuser,
Maurice Tijm,
Max Carlile,
Max Compton Trevitt,
Max Underhill,
MaxKa,
Maxbax,
Maxwell Pinkerton,
May Hornbuckle,
Maya,
Maya Rouy-Tard,
Mazzy,
Małgorzata Polaczek,
Mburke777,
McKenzie Wragge,
McardleCarrington,
Meanjean4321,
Mear48,
Meduza B,
Meg Schwamb,
Megan,
Megan Lingrell,
Megan McNaughton,
Megan Pantiskas,
Megan Weber,
Meghan Eggert,
Meglet,
Mel-e,
MelPagano,
Melakmen,
Melamela,
Melanie Fisher,
Melanie Regalado,
Melanie_Hoffman,
Melaron,
Meliks,
Melinda Hargis,
Melissa,
Melissa Brewer,
Melissa Brown,
Melissa Haney,
Melissa J Mills,
Melissa Koch,
Melissa Sullivan,
Melissa Van Rijnen,
Melissa Zago,
MelissaBee,
Melle Heijenga,
Melodie Misseri,
Meloetta,
Melvis,
Menna Limbani,
Mercer Butts,
Mercury32,
Mercury82,
Mergie,
Merianna Desilets,
MermaidBird,
Merowig,
Messi0088,
Metamikt,
Mht,
Mia ,
MiaTheFeline,
Micaela Jimenez,
Micah,
MicahB,
Mich.bell,
MichLM,
Michael A. Lawrence,
Michael Becker,
Michael Burns,
Michael Gallagher,
Michael Good,
Michael Kenntenich,
Michael McGory,
Michael Paci,
Michael R. Bryson,
Michael REISS,
Michael Rudolph,
Michael Stein,
Michael_B,
Michaela810,
Michal Sirton,
Michal Wlodkowski,
Michał Biesiada,
Michelle Gollubske,
Michelle Kohler,
Michelle L Hemperly,
Michelle Lynn Salgado,
Michelle R. Dempsky, Esq.,
Michiko N.,
Michkov,
Mick Ruby,
Micmac1,
MidgeUniverse,
MidnightRook,
Miepie88,
Migitri,
Miguel Eduardo Leon Toscano,
Miguel Martinez,
Miguel Ángel Muñoz Cruz,
Mikaylab,
Mike A Klotz,
Mike Panter,
Mike Wiles,
MikeHoit,
Mikerttt,
Mikeyser,
Miki Garau,
Mikko Oksanen,
Mikroklima,
Mikytron,
Mila Kolundžić,
Milan Matthes-Kale,
Milanp,
Miles S.,
Miles_John,
Milos Di Gregorio,
Mima2,
Mimi Arandjelovic,
Mimiko,
Mimster2,
Mina Gabra,
Minda H.,
Mindslaver,
Minev,
MingMing,
Minna Viljamaa,
Miranda Spanjaard,
Mirco Schlatter,
Miriam Osland,
Mirime,
Mirjam,
Mirkomaniac,
Mirrakor,
MirrorscapeDC,
Mirsandia,
MissS,
Mission73,
Missoedipa,
Missybee35,
MisterMorris,
Misterhamm,
Mistrinanka,
Misty Arterburn,
Mitsiy14,
Mizzou,
Mjay,
Mkdtasha,
Mkele,
Mmjm,
Mna225284,
Mochyn,
Mohot,
Mokka,
Mollie Kemp,
MolniyaD,
Momkey,
Mona09506,
Monet Garrett,
Monica Alves ,
Monica Maraglio,
MonicaMacD,
MonkeySis,
Monolakes,
Montexes,
Moonbeam,
Moone,
Morgan,
Morgan Baddeley,
Morgan Eisenhauer,
Morgan Griffin,
Morgan May,
Morgan Roach,
Morganavila,
Morra,
Mortal_Speaker,
Motherjoanne,
MotionsensorElke,
Mowat,
Mr-Slater,
Mr.To,
MrBill,
MrBray,
MrCup,
MrES,
Mrs Elaine Millard,
Mrs. Paranczak,
Mrslisaharrison,
Mrsthayer13,
MsTurcotte,
Msatchfield,
Msusan,
Muggy7,
Mullimaus,
Multistrada,
Munkácsi József,
Munni,
Muriel Moon,
Murray Cumming,
Muskratte,
Mvpiggelen,
Mwaldie,
Mydrynn,
MyersScienceLab,
Myoste,
MysteriousAlina,
N,
N. C.,
N165617,
N419045,
NA_18,
NCT,
NFust,
NUKE1989,
Nachtvogel,
Nada Abdel-Salam,
Nada Farag,
Nada Shorbagy,
Nadia Brady,
Nadia Corp,
Nadine Allen,
Nadine B.,
Nadya Bruce,
Naica Cherilus,
Nakaleen,
Nakiki,
Nalyla,
Namibier,
Nan,
NanaHub,
Nancy,
Nancy Godber ,
Nancy Neville Cordell, Ph.D.,
Nancy Nietupski,
NancyWWest,
Naomi Hoglund,
Naomi2809,
Nargess.g,
Narnian,
Nasreen Rollins,
Natalie Baker,
Natalie K. Wells,
Natalie McClintock,
Natasha Nemyre,
Natassjia Ragbeer,
Nate1,
NateF,
Nathalie Paquin,
Nathan Eisenlohr,
Nathaniel Hümpfner,
Natsang,
Nauka,
Navi Johal,
Navonil Mukherjee,
Nbhedgehog,
Necker15,
Neeve OFM,
Nenni,
Nepenthes1991,
NephelenArne,
NerissaBurns,
Nerrida Holland,
Nerruache,
Netballstar333,
Neurophile,
Nev Rowley,
Newellhauser,
NewtonH,
NextRoman,
NiSte,
Niccolo,
Nicholas Dalby,
Nicholas Patrinos,
Nick Anderson ,
Nickamilodeon,
Nickolus Patraszewski,
Nico Palomo,
Nicola,
NicolaMalavasi,
Nicolas Quintairos,
Nicolas et Simon Bouchet,
Nicole Cawthorne,
Nicole Hammond,
Nicole Hisatomi,
Nicole Olmschenk ,
Nicole Ursprung,
Nicolle Garmon,
Nik123Joh,
NikGoesRawr,
Niki Aktipis,
Nikki Chappell,
Nikki Hoofwijk,
Nikki76,
NiklasB,
Niknak2707,
Nikolaus Heveker,
Nilsholland,
Nina Angelo,
Nina Hayes,
Nina Rossetti,
Nina Scheich,
Nina-pilcher,
NinaYu,
Nina_Andries,
Ninady,
Ninjoid,
Ninoue,
NoTigersInAfrica,
Noctiferix,
Node801,
Noel Torres,
Noemiimeon,
NoisyMicrobe,
Nola Beck,
Nolan Hey,
NoraAlJarbou,
Norah R. Cunningham,
North Central High School in Kershaw, SC,
NotoriousVHD,
Nova329,
Novawatch,
Nrodrigo777,
NukeArts,
NunesJF,
Nuria Gonzalo,
Nuria Maldonado,
Nusswiesel,
Nyamba,
Nycticorax,
Nye,
NynkS2,
OCaravella0829,
OGNaes,
OKutoNoSushi,
ORIELTON,
ObservatoryPark,
Oevans82,
Ofelia,
Ohayfield,
Ojetuno,
OkapiLove,
Okbrice14,
Okieh,
OldBluntman,
OldSwampy,
OleksiiD,
Olena-ua,
Olga Turnova,
Olimar,
Oliver Kemmer,
Oliver Wolf,
Olivia ,
Olivia B.,
Olivia Crawford,
Olivia Friedemann,
Olivia Wilson,
Omar Roberts,
Omar Sultan,
Omi,
OnePunchMan,
Oneloveson,
OnlyIsland,
Onthorfast,
OoooMatron,
OppaiDesu23,
Oranje99,
OriXan,
Ornit,
Orohena,
Oscar_Chavez,
Osnat Atias,
Osprey49,
Osquid,
Owen ,
Owen Littlejohn,
Owen Whitby,
Owl12345,
OwlAli,
Ozro,
P.A.M.,
P.B. Longmei,
PALewis,
PAlex,
PClarke,
PFonseca,
PHald,
PMSPR,
PRANAV S JOSHI,
PWD,
PWDDCh,
PWDdr,
PWDkm,
PWDmb,
PWDsz,
Pablo Marcos López,
Packo,
Paharov,
Paige29,
PaigeGabriellex3,
Pais-ily,
Paizli Palmrose,
Pam Burns,
Pamela Bejarano,
Pamela M. Urban,
Pamelahs,
PandaLion98,
PandaNation,
PandaOne,
Pandasoccer,
Pandora,
Paola Marques Novo,
Paolo Barocci,
Paolo Calabretto,
Papa John,
Papayette,
Paracebo,
Paramitha,
Parker Stewart,
Parminu,
Parsa,
Pascal Petit,
Pascha Al-Qassab ,
Patricia Walker,
Patricia_McWhirter,
Patrick Fitzsimons,
Patrick Mangan,
Patrick Schröder,
Patrycja Dzianok,
Patsy49,
PauDG,
Paul E. Pope III,
Paul Grigsby,
Paul Pugh,
Paul Tomlinson,
Paul Wilhelm,
Paul martinez,
PaulMakin,
Paula A. Zitzler,
Paula26,
PaulaTomlinson,
Paulietta,
Paulina Andrews,
Paulina Popielarz,
Paulina Schad,
Paulino,
Pavel,
Pearshape,
PedroC,
PedroHafermann,
Pelo,
Pengueena,
Penguinkid,
Pepsin,
Perada90,
Perhill,
Perihan,
Perlmutt,
Perlo121,
Persephonerose,
Petanimal22,
Peter Edwards,
Peter Evans,
Peter Fortenberry,
Peter H W Newton,
Peter K,
Peter Lauris,
Peter Lee,
Peter Marantz,
Peter Wijnsouw,
Peterbrit,
Petitepete,
Petr Dvorak,
Petr Gintar,
Petr Skacha,
Petr Vinklárek,
Petra E. Thomson,
Petri Sirkkala,
Peyton Miller,
Phasewave,
PherosNike,
Phil Marshall,
Phil405,
Philip Hickman,
Philip J. Eversman DDS,
Philip Kingsley,
Philipirvine,
Philippe,
Philisiphie,
Philosophe,
Phiral,
PhlashPhlash,
Phosie,
Photog,
Phox,
Phryxe,
Pi_14159,
Pia Elgaard,
Pici,
Pier,
Pierre Vandeginste,
Piia Puranen,
Pilaruja,
PineapplesRCool,
Pinely,
Pinguana,
Pinguin411,
PiperRocks,
PiperS,
Pirandella,
Pito13,
Pitsi,
Pixie777,
Plinko,
Pocket_watch,
PollywithPockets,
Ponderwomum,
Pontus Enström,
PonyFriend23,
Popcycler,
PopiTomi,
Popmusicluv123,
Poppet13th,
Portalic,
Porter Science,
Possbert,
Poupinette,
PrairieGirl,
Pranjal Bhatnagar,
Prateek Tiwana,
Precious Smith,
Predrag Djordjevic,
PrimericaRep,
ProfessorKid,
Pru_of,
Prupalmer,
Przemysław Wróbel,
Psaltriparus_minimus,
PublicY,
Pudding2,
Pueppie68,
Purplecavingcat,
Pywacket1952,
Qadsia Javed,
Quatsino,
QueenDianna,
QueenOfGasoline,
Quenby Wilson,
QuirkyPlatypus,
R.H.,
R1chard,
RENERIBI,
RRbend,
RS,
RSchobermayr,
RSing,
RUS0032,
Rach1787,
Rachael Haensly,
Rachel A. Ford,
Rachel Alsheikh,
Rachel Beech,
Rachel Dean,
Rachel Lee,
Rachel Sales,
Rachel Walter,
RachelLevine,
Rachel_R.,
Rachel_Rutland9,
Racox,
Raef Wolf,
Raewyn Sills,
Rafael Young,
Rafata,
Rafał ,
Rahni,
Rajagopal Srinivasan,
Ralf Stephan,
Ram Hattikudur,
Randall Blake,
Randall Stralen,
Randy_Vreeke,
Raoul,
Rappie,
Rashas Weber,
Rasputin13,
RattySmith,
Raul019,
Ray Perry,
Rayan B. Lachguel,
Razvan Andrei,
Rebecca Beach,
Rebecca Bell,
Rebecca Catt ,
Rebecca Curtis,
Rebecca Dobson,
Rebecca J F Lee,
Rebecca Robinson,
Rebecca Sweet,
Rebecca Vu ,
Rebeki,
RedTrev,
Redd2205,
Reece Melendez,
Regina Dircio,
Regina342,
Reichukey,
Reilly Naff,
Relentless363332,
Ren Bin,
Renee Stanford,
Resi,
Reventos,
Reyline,
Reynaldo yanez,
Rhalli,
Rhian Bennell,
Rhian Lowe,
Rhiannon47,
Rhona Alcorn,
Rhonan,
Rialuam,
Ricardo130cc,
Riccardo Ginevri,
Richard Burkhill,
Richard Cashion,
Richard Danby,
Richard Ostrowski,
Richard Rohlf,
Richard Wareham,
RickM1989,
Ricky Wilton,
Rigel,
Riley Dimech :),
Riley20,
Rille54,
Ringwood School Eco Committee,
Rita ,
Rita Smaniotto,
RitaAusBerlin,
RitterGrube,
River Jackson,
Riverdog,
Riverfusch,
Rkcomyn,
Rmackey,
Roanne de Haast,
Rob Conger,
RobbieID,
Robert Beilby,
Robert Gillespie,
Robert Hansen,
Robert Schoenthal,
Roberta Musso,
RobertoD,
Robin L. Cooley, MSc,
Robin L. Hollander,
Robin L. Shepard,
Robin Tunger,
RobinK,
Robinjoy73,
Robinkay2,
Robyn ,
Rock Hyrax,
Rockette62,
RodMacca,
Rodan Matthews,
Roger,
Rohan Reddy,
Rohit Mujumdar,
Roman Winter,
Romy Thomas,
Ronald Hattwig,
Ronan Le Bras,
Rondine,
Roneila,
Roobee,
Roown,
RoryMurph,
Rosa,
RosalieS,
Rosalind Mackey,
Rosamond Stannard,
Rosana Romero,
Rose Brandon ,
Rose Summerlin,
Rose Tierney,
Roshy Chhillar,
Rotoiti,
RoverD,
Rowan Chaboyer,
Rowanstar77,
RowantheRed,
RowdyWeeps,
Rsh3222,
Ruben Kremer,
Rubru,
Rumen Martin,
RuthE,
Ruubjaah,
Rvriske,
Ry5Ghost,
Ryan Barron,
Ryan Chang-Wu,
Ryan Conard,
Ryan Ellis,
Ryan Martino,
Ryan Rodney Massey,
Ryan Vogan,
Ryan Worthey,
RylanKleber,
S Bies,
S E Brown,
S Hartman,
S-a-be,
S. Dalton,
S. Leaf,
S00Z,
S1404377,
S1421881,
S745245,
SAGE-1,
SANEAlex,
SBolt89,
SCOUSER1964,
SCWilliams42,
SHowson,
SMWheeler,
SMike11,
SORINORAH,
SR9,
SRH23,
STARIDER,
SWTN,
SWude,
SZubic,
Saar Dewint,
Sabine Kuhne-Velte,
Sadaf Pruitt,
SafariLiz,
Saibot82,
Saichira,
Sakel,
Salininaysa,
Sallideighton,
SallyL,
Sam Baker,
Sam BeDunnah,
Sam Hollister,
SamBridgers,
Samalexmummy,
Samantha Alonso,
Samantha Arnold,
Samantha Bolibruch,
Samantha Nichols,
Samantha Schmidt,
Samantha Shackleford,
Samantha Sparks,
Samara Affonso,
Samoht,
Samppants,
Samuel Feaster,
Samuel Nibouche,
Samuel Sutter,
Sana Dhingra,
SandSmith,
Sander Snoeren,
Sanderus,
Sandra Bergmeir,
Sandra Godman Brown,
Sandra St.,
Sandra Thompson,
Sandra Tinoco,
Sandra W.,
Sandra987,
SanjayCob,
SannaBergstr0m,
Santana Chisum,
SantanaA,
Sapphirepegasus,
SappireTitan,
Sara Anthony ,
Sara Dyck,
Sara M. Atkins,
Sara Monroe,
Sara Resendiz,
Sara_Houseman,
Sarah Bartoletti,
Sarah Basiliere,
Sarah Beaupre,
Sarah Colllins,
Sarah Cunningham ,
Sarah D.,
Sarah Donley,
Sarah Fitzpatrick,
Sarah Groves,
Sarah Gutting Sheehan,
Sarah Hall,
Sarah Hanley,
Sarah Hinton,
Sarah Hoogerwerf,
Sarah Jackson,
Sarah Jane Fischer,
Sarah Jane Walker,
Sarah Jenny,
Sarah Johanna Willingham,
Sarah Johnson,
Sarah Lewis,
Sarah Martin,
Sarah Mizrahi,
Sarah Parnell,
Sarah Pomfrett,
Sarah Powers,
Sarah Smith,
Sarah Stowell,
Sarah jordan,
Sarah mandella,
SarahPapworth,
Sarah_Mannion,
SarahvanGils,
Saralisa,
Sariemarais,
Sarkadier,
Sarkis,
Sasank Ganta,
Sascha1989,
Sathish.pr,
Satincat,
SatsumaConsumer,
Savanah Jio,
Savannah ,
Savannah Payne,
Savannah Roy,
Scaramorey8,
Scarcat,
ScarlettBlacks,
Scarymum,
Schalk Pienaar,
Schniertshauer,
Schoolio,
Schorsch87,
Schule,
Schultzi,
Schwarzenbach Andreas,
Scissorbird,
Scotssunflower,
Scott2742,
Scrappe3,
Seabury4,
Seafish,
SeaforCinnamon,
Seamsmistress,
Sean,
Sean Berry,
Sean Game-Brown,
Sean and Harlow,
Sebastian Franke,
Sebastian Förster,
Sebastián Andrés Rivera Flores,
SecondgradeNR,
Seegarneluru,
Seekind,
Sekl,
SelenaW,
Sengisisu,
Sereba Diakite,
Serena.hernandez,
SerengetiBob,
Sergey,
Seth Goodwin,
Setthecontrols,
Sevkan Uzel,
Seyoung Jung ,
Shacharkara,
ShadowTigrex,
Shadowspinner,
Shae Emerson Lee Miller,
Shairlyn,
Shakes113,
Shalista,
Shanecia Davis,
Shanneij,
Shannon Osborne,
Shannon Richards,
Shannon Riese ,
Shannon23,
Shantall,
Shanti Coaston,
Sharan.b84,
Sharlabean,
Sharon Britton,
Sharon Darling,
Sharon Primerano,
Shateal McMillan,
Shayna Bedel,
Sheepcake,
Sheila Smith,
Shelbie Guckenberg,
Shellypm,
Sheng888,
Sheppy Shepherd,
Sher1ff,
Sheri Castanon,
Sherib2,
Sheridan Snyder,
Sheryar Suhail,
Sheryl Spencer,
Shill2008,
Shiphrah,
Shir-El,
ShmuelJ,
ShootingStars,
Shoreditch,
Shosha,
Shreya Avirneni,
Shreya Kappal,
Shruti K. ,
Shulamite C. Fajardo,
Shyanne Sanford,
SiDEBURNAZ,
Sian Hromek,
Sidney Rucker,
Sienna Cahill,
Sienna Cartier,
Sierra,
Sierra Stull,
Sightling,
Signy Reilly,
Sillydad,
Silthrina,
Silvasalta,
Silvergirl,
Silvi Dardon ,
SilviaBome,
SimOne_,
Simon Colmer,
Simon Kirk,
Simon Passmore,
SimonV2,
Simondbellamy,
SimpleCarrots,
Singerhoff,
Singing_Ginger,
SirBonobo,
SirHanni,
SirPrisinglyStupid,
SireGrumpy,
Sjaak-Jan,
Skavvie,
Skelm,
Ski83,
Skinnied,
SkinnypigXD,
SkyFall56,
Skydiver33,
Skye,
Skye Nagtegaal ,
Skye Shrader,
Skye-lyn,
Skylines01,
Skynet,
Slade Flood,
Slade_J_Sci08,
Slart,
Sleepinbeauty,
SlickWilly787,
Slugger,
Smeeta,
Smile7,
Smittybird,
Snacko,
Snafu89,
Snapshots123,
Sneeuwkoningin,
Snoopy72,
Snowdrop27,
Snowlarbaby,
Snugli,
SocratesFolly,
SofieMoyson,
SolarEclipse,
SoloBro,
Solomon Carr ,
Sombito,
Sonja Berg,
Sonja Josanovic,
Sonja Schmidt,
Sonja ven den Ende,
Sonjagumm,
Soosi,
Sophia D,
Sophia F,
Sophia Parlade,
Sophia Siegle,
Sophia lalalalalala ,
Sophie Alexandra Barton,
Sophie Secor,
Sophieeeee,
SorAzolam,
Sora_Shiro,
Sorayah Romero,
Sothoth,
SouthernBean,
SoylentGreenIsMadeOfPeople,
SpacePanda86,
SpamRichter,
Sparassidae,
Sparkielspa,
Sparks11212,
Sphurti Dixit,
Spikenstein,
SquiddyStarr,
Squishmoose,
Stacey A. Showlund,
Stacy67,
Starawareness,
Stargazer79,
Starman120457,
StarwatcherHB,
StealthAmoeba,
Stefan,
Stefan Copca,
Stefan Distler,
Stefan Haeusler,
Stefanie Flax,
Stefanie Hollenbach,
Steffan,
SteffiN,
Stegeman76,
Stella,
Stella07,
StellarBug,
StephBlack7,
Stephanie A Douglas,
Stephanie Cote,
Stephanie Frizzell,
Stephanie Haygarth,
Stephanie Sze-To,
Stephen M. Jacoby,
Stephen Plevier,
Stephyoung35,
Sternschnupper,
Sterrennevel,
Steve Muzos,
Steven West Becker,
Steven_D_Mullis,
Stevieb62,
Stewart Fletcher,
Stewart Garnett,
Stewart Pope,
Stinu,
Storeman20,
Stormyy,
Strubear,
Sue Carlson Dunn,
Sue Globensky,
Sue Little,
Sue Pellerito,
Sue Trevatt,
Sue Williams,
Sue-2,
Sue0022,
Suedetess,
Sugarbuzz,
Sultan Ghaznawi,
SundogWE,
SunlessDawn,
SunnaS,
Sunny35,
SunnyD,
SuperManu74,
Super_Shay_Shay,
SuperbiaIra,
Supersciencegirl100,
Superzilla1,
Surpluscat,
Surya George,
Susa,
Susan Alvaro,
Susan Ballinger,
Susan Brankley,
Susan Colfer,
Susan Dymock,
Susan Forster,
Susan Hawke,
Susan McDonnell,
Susan Ruth,
Susan Walker,
SusanAtHome,
SusanLelie88,
Susana Brower,
Susanne Desic,
Susi Osti,
Suyeon,
Suzanne Dropkin,
SuzannePLavelle,
Suzy F,
Suzysharpe,
Svante Lindman,
Svenmeyvis,
Svennie83,
SweetBee,
Sylvain Comte,
Sylverone,
SylvieT,
Symetriuser,
Syriass,
Szaka,
T Bell,
T-M,
TAElder,
TEMPLECC,
THE_JJB,
THGU,
TL,
TLW,
TMac150,
TMeni,
TRothrock125,
TUNG84,
TWebb,
TYGR,
Tabea Steimer,
TadasBlinda,
Taissa Csaky,
Talia Lee,
Talon Bard,
Tamare Ferguson,
Tamarind,
Tamaska,
Tamie Frierson,
TangySpiderLegs,
Taniesha_Sturdavent_PS122,
Tanner Nelson,
TanpopoKun,
TantasticOne,
Tanya L Wagner,
Tanya Oleksuik,
Taproot,
Tara Anderton,
Tara QUARTERO,
Tate Toelle,
Taubenus,
Taurelaiqua,
Tavi Greiner,
Tawnee Hicks,
Tawnytunes,
TayaRenee,
Tayeisha Nelson,
Taylor Dukes ,
Taylor Graham,
Taylor Gunther,
Taylor Nichols,
Taylor Norman,
Taylor Northrop,
Taylor Sparks,
Taz620,
TazeTee,
Tazumaki,
Tcerice,
TeachEarthSpace,
Teashan,
TechnoBeeKeeper,
Tecla Warren,
Ted Cheeseman,
TeeNoodles,
TeejZA,
TehShush,
Tehi,
Ten_cats,
Teresa,
Teresa Buck,
Tereza Belikova,
Terhi,
TerraSpatiale,
Terri Blackstone-West,
Terry Saunders,
Teryl Nolan Hesse,
Teshia Barr,
Tessa Holman,
TessaM,
Tg18,
Thajazzlady,
That1WeirdKid,
The Brennan,
The Chimp & See moderators,
TheBolter,
TheCrowe,
TheDarkVoid,
TheDemonButler,
TheEnds,
TheEpicPrimius123,
TheJewelOfJool,
TheMeeganFace,
TheRealG,
TheSciBoy,
TheSmurf,
TheTurtleKing,
TheWanderer8,
TheWishingStar,
Thebeegirl,
Thel,
Theo2016,
Theodore Bringa Kraus,
Theodore Pritchard,
Thespian,
Thimblewit,
Thomas,
Thomas Becker,
Thomas E Genower,
Thomas Miller,
Thomas Orlando James Martin,
Thomas Rice,
Thomas Upton,
Thomas Webb ,
Thordis,
Thornton.sarah,
Thorso,
Tia Whitehead,
TiagoC,
Tiarra Green,
Tiberius Atturney ,
TiborAcs,
Tiffany ,
TigerDrummer,
TiggyTiger,
TildalWave,
TillyW,
Tim Hreha,
Tim Lorbacher,
Tim and Sandy Lever,
Timmithy allan ,
Timmyg13,
Timothy Pigeon,
Timothy66360,
TimundMatti,
Tina Marie saxon,
Tina Vander Molen,
Tingleton,
Tinneke,
Tipo,
Tippy Bainbridge,
TisDone,
Tlittle358,
Tobias Deschner,
Toblerone659,
Toby Skailes ,
Tockolock,
Toffifee,
TokiWartooth,
Tokki,
Tom Gadberry,
Tom Gibbs,
Tom Kache,
Tom Poleski,
Tom Simokovic,
TomMunnery,
TomPipp,
TomSun,
Tomahawk145,
Tomas Van Verrewegen,
Tommyq,
Tone Iversen,
Toni S. Hernandez,
TonyIP,
Toon-Keesee,
TopTop23,
Tophica,
Totoro_sootball,
Toufic Eid,
Towhee12,
ToxicWheelz,
Toyosi,
Tp,
Trace Bybee,
Traceydix,
Tracy Merlin,
Tracymyles,
Tracyv,
Traker,
Transition Year,
Travis J. Croken,
Travis Moore,
Tredegar,
Trevor,
Tricia Le Pine,
Trier_,
Triniturtle,
Trinity Doan,
Trippthecat,
Triticites,
Trotto,
Troy Bingham,
TrudiC,
Truffle26,
Tschango,
Tslish,
TsukinoYami,
TubbyPanda,
TuesTao,
Tuketi Dawnstrider,
Tummy,
TusenApor,
Tychwarel,
Tykelass,
Tyler,
Tyler Osbourne ,
TypicalTacos,
Tyra,
Tzatziki,
USAequine001,
UandA,
Ubizi,
Ulakj,
Uli Scheuss,
Umpire,
Umut Kaan Karakaya,
UnionJCatForever,
UnscrewedPear9,
Upsilone,
Ursulala,
UsernameJ,
Usurer,
Ute,
Ute Schmaltz,
Ute Schneider,
Uttancs,
Uvibee,
UweRumberg,
V Beutler,
VD,
VJOM,
Vague_Nomenclature,
Vajrapani,
Val Latus,
Valea,
Valerie Fabinski,
Valerie Sousa,
Valina7mele,
VanderStel,
Vandon,
Vandusensummercamps,
Vanessa G,
Vanessa Kovarovic,
Vanessa Watson,
VanillaChief,
Varun ,
Vasyl Vaskivskyi,
Vault_0_boy,
Vedad,
Veegee,
Veevo,
Vera Mottino,
VermontGal,
VernulaPublicus,
Veronica Joy Harris,
Veronica Morris,
VeronicaG,
Verónica Gómez,
ViDrosera,
Vicki Morrow,
Vicking,
Victor Novacki,
Victoria ,
Victoria Baldwin,
Victoria Garcia,
Victoria Gasienica,
Victoria Goodwin,
Victoria Paitaridis,
Victoria Van Hyning,
VictoriaRankin,
Vignesh Poovasagam,
Viktoria Wahl,
VinBla,
Vincent DECKER,
Vincent Hobeïka,
VioAfrica,
Violaceae,
Vipul Patel,
Virginia Beikmann,
Viro,
VirtualDiana,
Visjevisje,
Vital_Signs_,
Vivian N. Ve'er,
Vivian Nguyen,
Vivian Thonger,
Vix_jane,
Vlada927,
Vmadmax,
Volitioner,
VonBach,
Vy Nguyen,
Vérena Renate van der Aar,
Víctor Linares Págan,
WWabbit,
WalgettCz,
Walter Gómez,
WanShan,
Wandgryffin,
Weaglebec,
Weisshandgibbon,
Weltraumkoyote,
Wendy Hawkins,
Wendy Martinez,
Wer,
Werhana,
Werner_L,
Wes Buzza,
Wes Warner,
Wesley RTeid,
Whitney Cochran ,
Whyevernot55,
WiggyPiggy,
Wildbarley67,
WildlifeKatie,
Wildliferspotter,
Wilfred Vlad Tomescu ,
Will Granger,
Willem Forrest,
William P Collins,
WillowSkye,
Willowstring,
Willyr17,
Wimoweh,
Wisery,
Withaar,
WitsEnd,
Wojciech Orzechowski,
Wolfen17,
WoodenHedgehog,
WordSpiller,
WorldofZoology,
Worminator90,
Woutso,
Wpitt,
X3Lord,
XOFrank,
Xander Vleet,
Xarabis,
Xeka,
XexyzChronos,
XijhingsBrother,
Xoannon,
Xombie337,
Xyrla Fernandez Rodriguez,
Yan Yang Tan,
Yana Ing,
Yaroslav Mokryk,
Yasmine Alexander,
Yefah,
Yente James,
YevgeniaC,
YiddleSeti,
Yifei An ,
Yoda555,
Yossi Haimson,
YouTa,
Yousef Moallem,
Ysabell,
Z.Fahnestock,
Z0e,
ZAC8121999,
Zaboomafoo,
Zac Thomas,
Zach B,
Zach Savage,
Zachary M. Wardle,
Zack Goodwin,
Zackery Walls,
Zambies,
Zane Hansen,
Zane Jacobs,
Zanna640,
ZannaDragon,
Zarsus,
Zathrus,
ZebraQ,
Zeck,
ZedCapricorn,
Zen2go,
Zenorian,
ZenzicBit,
Zephammo,
Zerraina,
Zhongar,
Zilli,
Zinhle Andile Mbatha,
Zinic,
Zoe Isaac,
Zoe Roberts,
ZoesAnimals,
Zoey,
ZooBot,
ZooDad1,
Zoobot_TKD_4life,
Zoochall,
Zoomngirl,
Zooniversalcitizen,
Zooniverse99,
Zperry,
Zuppy,
ZuvaSun,
Zuzana Macháčková,
Zygfryda,
Zykerria Ferguson,
Zynobia,
_Marta_,
_mibi,
_riddhii_,
a.murphy,
a.nefedova,
a.pagowska,
a.v.humboldt,
a351,
aanimal,
aaron.king,
ab3,
abastick,
abbeylin1017,
abby,
abdulrafi,
abrilnovoa,
acheng96,
achevalier,
acmesrv,
acmrshll,
acs15e,
actionhippy,
adam.kemish,
adam596,
adam_163,
adamajd,
adamas84,
adamow78,
adamrayevich,
adams_dad,
adamsimpson,
adeletaylor,
adespoton,
adishavit,
adonim,
adriennelily,
aeliane,
aferrell,
affenspiel,
afollmer,
afra123,
afrizzell,
agatkasta,
ageipel,
agnesaaubertgmail.com,
agoodman,
agoogan15,
agrint,
aheinrich,
aheulland,
ahjorth,
ahnlak,
ahsante7,
ahutto,
aidanos,
aideen.byrne,
aidualc,
aileesh,
aimfor,
aimsHS,
airacarvalho,
airplanesrme,
airvin6,
aitkene,
ajaybapat,
ajberg,
ajhickford,
ajissocool,
akalan,
akcustomlova,
akeomuon,
akintner,
akkarma,
akkobelja,
akku99,
akl680,
aknat,
akshaymahawar,
akt2,
akuzniacka,
aladar,
alan.martin,
alanapoole,
alanmebane,
albel1977,
aldanao,
alecorney,
alekna,
aleone84,
alessmoellmann,
alex12345,
alexan47,
alexandra silva,
alexandriaainsworth,
alexav,
alexg341,
alexisn32,
alexno18,
alexrusso1,
alexxavier,
alexzootest1,
alfonsojdl,
alhen123,
alibenali,
aliburchard,
aliceforest,
aliceruth,
alicia jimenez,
alihaggerty,
alina01px2020,
alinorth_893,
alisacrisp,
alison.rhodes,
allanstjames,
allcompute,
allen prabu,
alleyk813,
alli3,
allie_heather,
allsears,
ally83m,
allydunkin,
allyktu01,
allyphant,
alma lessing,
almpvnj,
alohabreeze,
alrp319,
alsipsclar,
altalt,
alycewells,
alymohab,
alymorsi,
alyndal,
alynstill,
alyshaSP88,
alysonslade,
alyssa.vazquez,
alzabel,
amadi.maatafale,
amandae,
amandamo,
amandamontemayor88,
amazon.parallax,
amberluda,
ambermarie,
amedina2,
amellers7,
amf8384,
amgaynor,
amily,
amlsilverstein,
amneris3,
amosser,
amp2003,
amullarky,
amy,
amy.soyka,
amybid,
amyjarman,
amymhoward1,
amyrwenzel,
anamorisab,
anat1969,
anathemaybe,
anbalo1963,
andata,
andersenfamily,
andjamwar,
andre4321,
andrea_1975,
andreap21,
andreapski,
andreicnx,
andrew.harwell,
androidlittle,
anergy,
anevab,
ang96,
angelazhang,
angeldefez,
angelleb,
angelman,
animalfriend1999,
animatus,
anita_leung329,
anjalimanohar,
anjamunder,
anna.feeney,
annalisah92,
annalisehobson04,
annavandergriend,
anneborrego,
annebyford,
anneke64,
annekefreaky,
anniepg,
anniev123,
annimaxine,
annoymenow,
anocan,
antbetty,
anteater21,
antiHUMANDesigns,
antirrhinum,
antmurphy,
anzacspirit,
apatura,
apavlecic,
april12712,
aprilh,
aquitanian,
arafan,
arbayer02,
arbitercay,
arczarkowski,
areBerry,
areichle,
areinders,
areposator,
arfblacker,
argasm,
arh3399,
ariana23,
arielvera,
arlo head,
armandea,
armando752,
armonihance123,
aronnthe1,
arquita,
artbymarion,
artistjillian,
arwen252,
asafum,
asalomark,
aschlemme,
ashura,
asmibert,
aspenolivia,
asplamagnifique,
astaasta,
astabile,
astadtler,
asteer,
astrocanin,
astronomas,
atacama77,
ataly,
atambros,
atatjes,
athina.giannakara,
aubreythez,
aurelie1991,
avanderbrug,
avc13,
ave_eva,
aviator246,
awallen,
aweenink,
aweiksnar,
awething,
awhitwell,
awilson,
awsomeguy1001,
axolotl42,
aydnleeds,
ayefany,
azagh,
azjarose,
azuravel,
baa31892,
baabz,
babsie,
babucurt,
baccorsi,
badpossum,
badskittler,
baffy,
balpsa,
ban4314,
bananie,
banfathi99,
banfield.makayla,
banihal,
barbarann,
barbarapreyer,
barbaravarek,
barbiegoth,
barbora.prikrylova,
barone.katelyn,
baronvoncrow,
barraoconnell,
baruchova,
bastiaan1984,
bastide,
batdog,
baterra,
batmue,
batuinal,
bbushey,
bcat785,
bclvines,
bcurtin,
bd2015,
bean2bone,
beastofwar,
beaumod,
becks688,
beer.frankie,
beeschnell,
beesnees4,
beingbob,
bekahmyers,
belago,
belan,
bell5a_helder,
bella118,
bellaf,
bellagisme,
belucho,
bencgauld,
bendrz,
benjamintx,
bennerino,
bennisster,
benrico,
bergfee1209,
berling,
bermanmat,
bernardo aguerre,
bertix,
beste,
besueandamy,
bethdingman,
bethke.madison,
bethsen,
betty,
bezidey,
bgarrett1,
bgoliber,
bhagyakw,
bhau,
bigcatlover,
biggiesmith,
bigmanbt,
bigworld,
bilalsaeedkhan,
binaz,
biryani,
bisector,
bjmendius,
bjowi,
blackbird1312,
blackninja556,
blahcoon,
blair,
blair438,
blairprescott,
blake.white098,
blanco.rioja,
bliedtke,
blmonroe,
bloop12,
bluebl249,
bluebudedog,
bluefloozy,
bluefootedb,
bluestar82,
blumer17,
bmboswell,
bmontie,
bmorson,
bmtate,
bnisonger,
bob ,
bobinky,
bobthemountaingorilla,
bodoostermann,
bogabasa,
bondailady,
bonobo69,
booja,
boomerb,
boppo,
borad,
bordavis,
borodinpm,
botting,
bowened083,
bowerpizer,
boxbot,
boxingtwig,
boygordon,
boykolos,
bpempire,
bracerup,
bradley87,
brainimpact,
brainod,
brandnewkirk,
brandon lowe,
brandon preciado,
bratverst,
brenda2796,
brendaleejurewicz,
bret707,
bribrantley,
bricheese,
brincess,
britearthangel,
britico,
brittaniefay920,
brittanybeaudoin,
brivogel,
brokaf,
brokenbox,
brooketheschnook,
brown80204,
brownfox,
brunobanani100,
bryan2013,
bsweigart,
bubbakaz,
buchinli,
buchwa,
buckaroo1,
buckh34,
bucksharbor,
buehli,
buerkir,
bulgaria_mitko,
bullet57,
bumblebee2,
bumishness,
bunnigirl,
bunnypenguin,
buntubi,
burgundergerd,
burningquest,
burnspatrick.206,
butterpro,
butterscotch,
bwentz,
bwitti,
bws2002,
bzaleski,
bzpam,
bztaconic,
c.thompson,
c_warrell,
cailina95,
calledthemoon,
callisto73,
calyma,
camada,
cambridge.christian,
camdent,
camera_bore,
camonaco,
camrey,
canadianbacon791,
canekoch,
captainazul,
carabearcanada,
carados,
cararuffo,
carina stjernqvist,
carleylyn,
carlinfel,
carmelle,
carol_mac,
carol_stabler,
caroljh,
carostani,
carylkpr,
carylsue,
caryngreen,
carzet,
casey.paquola,
caseykelly4,
cash717,
casmi,
casmith096,
cass3,
castorandpollux1978,
cat-shepherd,
cat1951,
catalana,
cathcollins,
catherine.frederick,
cathg123,
cathyschlegel,
catlovers,
catmiracle16,
cb.auder,
cbanson,
cblair900,
cboxtoby,
cbrown004,
cbrunquist,
ccain003,
cch001,
cchristopher,
ccoc,
cculbert,
cdafopo,
cdavies311,
cdecesare,
cdickson,
cdodger,
cecilieslc,
cedarsstudent2,
cedarsstudent3,
cedmom,
celticaire,
center100,
certifiedNinja,
ces2691,
ceversul,
cezy,
cfolk77,
cgerique,
cghudson,
cgremmich,
chalyse,
chanels,
channier,
chaosbastler,
charisrooda,
charizardace,
charles p ,
charlotte,
charly03,
chasasum,
chaselynnwarr77,
chaseo0626,
chateaumojo,
chaywood2,
cheetah90,
chekeichan,
chekhov,
chelsea.alex,
cherrycoke,
chevyman142000,
chewitt93,
cheyenne,
chezgoodell,
chh2035,
chia89,
chiarapuspa,
chiararudel,
chichart,
chidingbark911,
chiheb,
chilieh,
chimacummedia,
chimpandseeeee,
chimpler,
chimpsoccer,
chipdale,
chloe arneson,
chloejreid,
chocloteer,
choije,
chollow,
chrbod,
chrichri,
chris2108126,
chrisandann,
chrisas7,
chrisbennell,
chrischi3,
christatraub,
christiancurneal,
christianvanderven,
christine016,
christinevestrick,
christopher.harasta,
chrisujlo,
chrstlvn,
chubbell28,
chuckstar,
chuckster20,
cicadalady,
cigarjok,
cindy543,
cirinci,
cjackso3,
cjanand1954,
cjohnson.2019,
ckamprath,
ckieff,
ckvisser,
claire75,
claireei,
claireofthecosmos,
clairet_84,
claracharlier,
clarejonas,
clarkjw,
claudereka,
clcripps,
clee67,
cleghorn,
clemon,
cliverayner,
clllem,
cllllllloydh,
clochette,
clofgren,
clparker,
clt21duke,
cmadd009,
cmadeira95,
cmarshall,
cmckenney,
cmel40,
cmortara,
cmsquared,
cmurdoch,
cmwgeneva,
cnafrada,
cnorvalk,
cnramey,
co1010,
coachgregnola,
cobrasplinter,
cocolocoblondie,
coconino,
cocorug,
codyduzan,
cogs,
coldcounter,
cole.erin.eedumail.vic.gov.au,
collettesea,
colon,
colowick,
colt,
comelia,
comiqueso,
connnollly,
control,
cookeva,
coolartcaleb,
cooney6,
cooperjohn,
coppard,
corbettjn,
corduroyfutures,
corgi-mom,
corto,
corvi42,
coryphella,
coslisa,
cosmic.chameleon,
cosmicos,
cosmos77,
courseiam,
covervig,
cpilkentontaylor,
cplking,
cplummerabb,
cps1500,
cr0m,
craftyscientist,
crambacher,
crawlfast,
crawlingChaos,
crd3000,
creationists,
criscat,
croenan,
crottyfamily,
crowlord,
crystalhutchins,
cryvtvl,
cschraft,
cschueppert,
csmithgeddes,
csutter,
cubear,
cushman.kc,
cuynchips,
cworm,
cyanocitta,
cybersue,
cynlynten,
cynt80,
cynthgray,
cynthiag,
cyzaki,
czapien,
czechkate1,
d.eileen.d,
d1e1b1,
d8sconz,
d_idaho,
dab vine,
daemonbarber,
daestwen,
daffodil3,
daguta,
dahmin,
daiant,
dakeeps,
dalai,
daleet,
daleh,
daliahalfon,
dallya809,
dandan595,
danielpepkezoo,
danielsd,
daniiielaaa_hurtadooo,
danisha,
danjurious,
dankpoet,
dannybussy,
danrol,
dansmith87,
daphne_7,
darcybennett,
darcygrose,
darici,
darkquetzalcoatl,
darwin829co,
darya_lagrange,
darys21,
dav2000,
dave1056,
daveb9000,
davemaze,
daveross1971,
davet679,
daveytay,
david.gilbert,
david_faure,
david_gb,
davidkoch,
davidtroy418,
dawneagle,
dawnydawny,
day.breanna,
dayiel,
dbadger,
dberkenbilt,
dbot,
dbreniser,
dbuske,
dca1,
dcampbell21,
dderek,
ddiak,
deafscribe,
deandsusan,
dearl1103,
deathscowboy07,
debJESSski,
debbditt,
debmwill,
debnad,
debora713,
deborah16,
deborahsigler,
debperry,
debwil70,
deepwatch-1,
deetaurus,
dejaboo,
delbson,
delenaj,
delpiano,
delta_lady,
deltagamma,
delve202,
delxdune,
dembiecc,
denizenn,
derangedberger,
derschmiddi,
desh57,
desjarlais.austin,
devn clark,
devon.vt,
devonette,
devot,
dgmarc,
dgranrath,
dholoviak,
diamondback,
diamondone1999,
diana_monkey,
dianag825,
dianaserengeti,
dianebradley,
dieSeife,
diemade,
dileka,
dinjones,
dios,
discoverer99,
divakiana24,
djg6868a,
djlillil,
djmccon,
djsato,
dkb16d,
dksteele75,
dlr,
dlruthven,
dmaboyer,
dmagoo,
dmrtennis,
dmundil,
do_eme,
doctormoss,
dodgsonfamily,
doge panda,
doggers,
dohr,
dolphinate,
dolphincrazy,
dolphiny21,
domibowd,
dominik richardson,
donalthefirst,
donsa,
dont worry,
dor7539,
dorhel,
doriazul,
dorothybaez,
douglas.gregg,
down1nit,
dpcoop2,
dpellerin,
dpopovitch,
dragabalaur,
dragnfly,
dragosioan,
drbreznjev,
drehgeigerin,
drek,
drewbe,
drewmurphy,
drewthemunky,
drhibbert,
drizzle.virus,
drizzly,
drjenncash,
drmmha,
drobinso,
drtryan,
drummerboy5031,
dryden Shillingburg,
dsekkes,
dshowell,
dsmyth6,
dstarzfn72,
dtimmermans,
duban,
duckysempai,
dulsky,
dumdiddlysquat,
dunealex,
dupagirl,
durifon,
dvbrandon,
dvc214,
dwaynemedic,
dwhite1,
dwlameris,
dwmjmm,
dww257,
dydel420,
dysonkl,
dzezi,
e-motiv,
e2d2,
e8cm5n,
eCid,
eagleclaw,
ebaldwin,
ecodiva88,
econaut,
ecorrigan,
ecotrace,
ecsjcwru,
ecvegghead,
edaro,
eddaw,
eddiecharles,
edemars,
edmo2013,
ednapissenlit,
edoxseywhitfield,
edwardlau,
eecamp22,
eellwood,
efb,
egeland,
egor_12,
eileen96,
eilemach,
eisenhuth101,
ej77,
ejc123,
ekaterin,
ekt1228,
el chapo,
elaineoutdoors,
elcat24,
elcharlot,
elcwt,
elegantemu,
elenalin,
elephant20,
eleys,
eliedeker,
elisame,
elisugar,
elizabeth,
elizabethyeomans,
elizzak,
ellaelizabeth,
ellenkronberg,
ellenmiller1,
ellie_28,
ellipsis6,
elliscarly,
elloriac,
ellwoodmm,
elmod,
elmuchacho,
elsareed,
elseag,
elshrky,
elskabette,
eltreno,
elvraie,
elza,
ema98,
emalberstadt,
emalie22,
emberke,
embowen,
emerald_ware,
emily,
emily.juckes,
emilymynett,
emma,
emma2015,
emmabrooks,
emmacashmore,
emmacnapper,
emmalaura727,
emmar.luvs.cats,
emmatigerlily,
emmeline525,
emmmmmfox,
encephsagan,
encherend,
enderb,
endreh,
enr987,
enthusiastic_cell,
eoinrouine,
eosimias,
eotunun,
epaltzat,
eparroway,
episk22,
epple.grant,
eptitsyn,
epurvis,
eq15,
ergalty,
ergo1,
eric.rosenberg,
erica galvan,
ericahart,
erichowe,
erik_van_bijsteren,
erika.endo,
erikahowell,
erikamedwards,
eriki419,
erin strozier,
erin_bell,
erinmontague,
erkslook,
ernderfer7125,
erool,
ersmither,
esazure,
escholzia,
estefanoescarate,
estlineero,
ethurau,
eupharina,
evagr,
evel_chihuahua,
eversuhoshin,
evilpokefairy,
eviltigerlily,
eyarbrough,
f4phantom,
fabfran98,
faithkatts,
faithv,
falconview,
fantasticmrsfox,
fardal,
farooque,
fathizahdeh,
fayeherold,
fbarulli,
fbn79,
fbw0304,
feather813,
fedooora,
feigdafugl,
felagund_angarato,
felicitywallis,
felipenic,
felix330,
felixo42,
felmy,
fem302,
fengist,
fenoloftaleina,
fenti,
ferisaw,
fermor332002,
fernfire,
feywoods,
fezzik,
ffayiga,
fffnerigmail.com,
fi-t,
fiashhh,
fifibantam,
filippac,
fillyba,
filska,
fina-1,
findtheriver,
finleyg,
firecatstef,
firepig,
firgym,
fisera,
fisticuffpuffs,
fix83,
fizila,
fjoellnir,
fjuveneton,
flaipert,
flamania,
flamey_amy,
flavio.p,
flavios,
flee67,
fleisnat,
flemming.westphal,
fliederle,
flipit4u,
floatingadrift,
floortap,
flossiecelia,
flyingchina,
flyingfox0212,
fnenu,
focus54321,
foghorn90,
forcher,
fortheocean,
foxfluff5,
foxtrot-lima,
fran,
francesca,
franck53,
franklindhaven,
fraukelich,
frauleinfisch,
fred572,
freddyh,
freeday,
freelulu,
friendlysceptic,
friesiancrazy,
frimkron,
fringenious,
frizzymom,
frl.u,
frogking95a,
frogln,
frolicksome,
frozenchosen,
frozenlandscape,
frumpywebkin,
fruusje,
fruusjemonty,
fryalls,
fsc2ou,
fscelzo,
fstep,
fsu1216,
fsukristen,
fukkthemainstream,
fukomys,
fukthemainstream,
fullet003,
fullsteamahead,
funk07chick,
fuocofatuo,
furryspaghetti,
fuzzyemma,
gaa17,
gabbieb,
gabriel90,
gabriellewragge,
gaelike,
gafullenwider,
gahall44,
galaxer,
galaxie062,
galaxytrekker,
galaxyzoo_guy2,
galefernow,
gamer.jamer,
gamerdad,
gamincat,
gangerolv,
ganstead,
ganymeed,
ganzegal,
garamnonfok,
gardenfairy,
gardenmaeve,
garnerdillon,
gavied509,
gbemis,
gbpereira,
gbrewer,
gcloud94,
gdeyoung,
gebue,
gelenika,
geminidragon,
gemira,
genbug,
genogenie,
gentlesoul,
georgeblack,
georgepickles,
geoski,
gerda24,
germain1,
gertyrose,
ggdchip,
gghlyon,
giarcsllim,
gieskem,
gill14,
gillskill,
gilsm0m,
giova53,
gl367,
glashelder,
glberg,
glenda701,
glicaj,
gloris69,
glsk,
gmheck,
gmzabos,
gnagy5,
godlesswarriortm,
goelzerrf,
goffrey,
goggins,
gommersellen,
gonzo818,
googee3,
gorgonsit,
gort_industries,
gotauber,
govinpandian,
grace.,
grace.santone,
graigrai,
grasilda,
grazynastasinska,
greenscrubs,
greye,
gribblet,
grieve,
grimkin,
grisu33,
grok00,
grom,
gromozeka,
grondinm,
grumrill,
gsolano217,
gspier,
gst,
gtmh,
guercait,
guevfamily,
guitarist53188,
gunnip.olivia,
gunnroni1,
guruguru,
gwennie71,
gwhw,
gwynmor,
gythaogg2,
gzconf1,
gzconf2,
h.r.,
h3px,
hairygoats,
haleyarnette,
haleynicm,
halkruth,
halogne,
hammond family,
hamptongray23,
hanbag,
hand0532,
handows,
handreae,
hannah314,
hannahjwo,
hannahmg,
hannahrq,
hannahtheamazing,
hannamarielei,
hannanowak,
happy-accidents,
har6inger,
hardicnut,
harlequindoe,
harrycooke,
harrygrounds,
hartel,
hartwellc,
hasi_s.,
haskielr,
hasny,
hatfights,
havard.skj,
haversine,
hawkins.hannah,
hawpaw,
hayitsdavi,
hazard2802,
hazef,
hazelhorse425,
hbb,
hbbuddy,
hbun,
hckiger,
hcps-philliptm,
healymk2,
heath75t,
heatherleach,
heatherotis,
heathv,
heikepe59,
helabi07,
helen_butland,
helen_fewlass,
helena_ErrorDupUsername,
helene couprie,
helere,
helicity,
helloxeno,
hells_bells27,
heluna,
hendrens,
herdis,
hermannschwaerzler,
herondale123,
hester ,
hestie3,
hetts07,
heyguysitsliv,
heylei,
heyytheredude,
hfultonbennett,
hhendriks,
hial3,
hikarciu,
hilari4572,
hilit1983,
hillarygramlich,
hindenburg41,
hlkwd1,
hloliver,
hlp470,
hm99,
hnorab,
hoffi23,
holgerh,
hollygalluppo,
holmanart,
holzwege,
homermeyn,
hooplas,
hoothoot,
hopelessheron,
horkusone,
housegnome,
howdy2u,
hparker,
hsinger,
hudacko,
huffe32,
hugh12,
human_jenome,
humulus1,
hunterc273,
hussar,
hutchiebaby,
huxsterkate,
huy.duong,
hvh,
hwalkden,
hydrogene,
hyhopes,
hymnsf,
hypatia2012,
hypermonkey,
iDee,
iGoab,
iSophie,
iTinnitus,
i_bojtor,
iamfern,
iamgeorge15,
iamscotty,
iancl,
ianfinnesey,
ianhuk,
ibclc2,
icecoldjjs,
icm,
icortez,
icy76549,
igonzalez,
ih18,
iiatridis,
ike.gabrielyan,
ikesplace,
ilajoie3,
ilinz,
illlookforit,
illvibetip,
ilovecats,
ilovesquidsbecause,
iluvdolfinz,
imagine57,
imhotep.ojha,
imketys,
impossibleiseasy,
imrexus,
imtushay,
inab.ecker,
incibkaya,
inercy,
infinityLTFS,
inge janson,
ingridgalejs,
inkaplit,
invader,
inyene,
ioannes,
irenera2002,
irinashemonaeva,
irisheye,
irma12,
ironinleeds,
irridescentsong,
isabelledechabannes,
isadora_paradijsvogel,
isamaz,
isault,
isjke,
isol,
itak365,
itanio,
ithicks,
itsJim,
itsanurkithing,
itsfullofstars,
itsmestephanie,
itsoteric,
itzHydrq,
ivanad92,
ivanovp,
ivantama,
izabelakrause,
j50yab,
j_bewick,
jabati13,
jack2490,
jackej,
jackie.hawkins,
jackieolanterns,
jackstogo,
jacky0007,
jacob pospeshil,
jacquie lovell,
jadiss,
jaegerinha,
jaelyn,
jafuli,
jagdish85,
jahpepo,
jakadi,
jakoac,
jam2269,
jambojulie,
james,
jamesson dunbar,
jamesthejet,
jamie.jones,
jane.todd,
janelleb,
janety24,
jangugliotti,
janice_mcc,
jannc,
jannydunn,
januskians,
janwyn,
japhile,
jara.monica,
jasenwise,
jaslegit,
jasmine baptiste ,
jasonisorange,
javieryohn,
jaweghor,
jayceerae,
jaykee,
jaylow,
jayne.collier-smith,
jaynelynch,
jayras,
jazzy234,
jbacken,
jbailleu,
jbakes89,
jbbeau,
jbon93,
jboo,
jbrace13,
jbundy,
jbvm,
jcdenton,
jclowry14,
jcobbholmes,
jcolter,
jcsuperfly,
jd10,
jdcharteris,
jdemers,
jdhouston,
jdure24,
jeffabre,
jeffheif1,
jefftheengineer,
jeimer,
jemaus,
jemaverick,
jen_gupta,
jenbaby0122,
jenbflower,
jengee,
jenich,
jenmcd,
jenn bojorquez,
jennandrews,
jennfurr,
jennifer_greenfield,
jennysellmark,
jeremiah.mckinney,
jerome48,
jeronobo,
jeskarp,
jess.spinosa,
jessbou4860,
jessica9291,
jesus gonzalez avila,
jetcahill,
jftex22,
jgeschke,
jgjknight,
jgraber,
jgrablin,
jhawes,
jhook,
jhuey,
jill lawrence,
jim_pea,
jimjacknjohnnie,
jimtxmiller,
jinxo,
jinxxx07,
jjbluejay,
jjlvscj,
jjohntaylor,
jjonmyown,
jkneb,
jkolb01,
jland,
jmalcomb,
jmayhew,
jmccluskey,
jmkwon,
jnick753,
jo0oley,
jo_colsal,
joanne82,
jochair,
jodic,
johnmoore84,
johnny_duke,
johnstar25,
johnstonaa,
johny50,
jojohase,
jojow,
jokergirl,
jokuunal,
jolkeur,
jollyrogered,
jolon,
jomortimer,
jonasmmiguel,
jonathanhl,
jongray,
jonnm4,
jooliargh,
joolslee,
joopvanbirgelen,
jopo1987,
jordan schmid,
jordberry,
jordyman456,
jorgegil,
jorie.hh,
joshua,
josieandelliewong,
josiepegg,
joy_t,
joyjoy,
jozsa,
jpcatanzaro,
jpsokkernut10,
jrddias,
jrfs,
jrinear,
jrlyttle23,
jrmw,
jrosese,
jrozyczka,
jrutter,
jschell42,
jscottlenz,
jsjames1,
jsout1234,
jsprake,
jstaake,
jteselle,
jtj616,
jtreisner,
judi0491,
judybee,
judyross,
jujutherock,
jules9687,
juliabrazas,
juliflip,
juliotijuana,
julton,
jumpin,
jumpingranch,
junemb,
junograham,
jurasan,
justdave,
justmehere,
jvigo,
jvilaseca917,
jwhit,
jwidness,
jwmaritime,
jwmast,
jwmccomb84,
jwpepper152,
jxczer00,
jynto,
jypce,
k-s,
k.till1484,
k0zm1k,
kahbel,
kaity7007,
kaiwatha,
kakabeak,
kalbuzzy,
kaleem,
kamadden,
kamandizi,
kamilg,
kamv,
kandratii,
kanek,
kanliker,
karainio,
karanrajpal,
karenkantor,
karilyn1976,
karl_affeldt,
karlo4s,
karnarvon,
karu58,
kasiasa,
katacs,
kate_r,
kateboyd,
katelynn,
katemurray,
katesmccloud,
kathb,
kathfossil,
katie8107,
katiefanch,
katiekatt89,
katiekeyser_PS122,
katiekoplien,
katieofoz,
katieverett,
katkz,
katlittell,
katneils,
kats1285,
kavic,
kazza26,
kb96,
kbaptist,
kboehner,
kbonatti,
kbreckan,
kbreckangmail.com,
kcanfield,
kcoflondon,
kd14g,
kdroan,
keanna_165,
keel,
keeoeek,
kefitz,
kellyaggie,
kellyaidanp,
kelmcgrath,
kelsoli,
ken.i.brown,
kendevine,
kenk205,
kennij1,
kenogo,
kenzie ,
kerenor,
kerobero,
kerri12,
kerry walker,
kerynnisa,
keshetergon,
kevinable,
keytofly,
kfox,
kglass4462,
kgriffin3611,
khauglund,
khbuhr,
kianthras,
kibebe238,
kikametong,
kiki2008,
killerlou,
kimaire,
kimberly535,
kimboakimbo,
kimuenga,
kingaytata,
kingofspain1234,
kinseyatoz,
kinstelli,
kioruke,
kip2511,
kiraklindworth,
kirazz,
kirbecker,
kirino,
kirstysianevans,
kitsmelf,
kiya9132,
kjanson,
kjericks510,
kjetikada,
kjyg,
kk58,
kkenmots02,
kknight.4,
klaartje,
klb1015,
kledoux1,
kleistf,
klepsch,
klrainey,
klroot,
klwaffle,
klynne28,
kmacdonald,
kmcafee1812,
kmiyoshi,
kmkulasza,
kmlm,
kmmills,
kmmunchie,
kmt65msu,
kmunday,
kmzim2015,
kneff39,
knyghtowl,
kokrui,
korinna999,
koshlap,
kotagabi,
kponce03,
kragh,
krakenzmama,
kreiman514,
krhill322,
kris sharpe,
kris79,
krisis,
krockit,
krolov,
kruegnad,
kruseau,
krutki,
krwalkup,
kryfie,
ks14k,
kschlotfelt,
ksipe,
kswitalski,
kt171,
ktangell9,
ktarkin,
ktgeorgeous,
ktouchstone,
kucheryash,
kujata1,
kulinki,
kungpowell,
kuuipo83,
kuuurista,
kwirk,
kyamini,
kyaniab,
kyburg,
kyilmaz,
kyogreman,
kzajde1,
l4abergmann,
labocania,
lacey coleman ,
lachiester,
ladymink,
laenavarro,
lafiff Poulin,
lailabee,
lainie52,
lankiel,
larmcd188,
lascolarry,
lau3rie,
lauginn,
laura.berrier,
laura_germany_,
laurabjustesen,
laurabolt,
lauramaywigby,
laurasamson,
laurawhyte,
laurelbcr,
lauren johnson ,
laurenbreining,
laurenriv,
laurins,
layka13,
layman,
lbaracchi,
lbriard,
lchad,
lci17410,
lcourtneysmith,
lcw21,
leahbrilman,
leahlou99,
leannk,
lechaussette,
leeboy25,
leetcat,
leio,
leire,
lemurs366,
lena2289,
leo bijl,
leohelm,
leonidas907,
leonov1,
leopardi,
leopardspots12344,
lesbentley,
lesfromages,
lesley gough,
lesley.mazeyhotmail.co.uk,
letheward,
leupster,
levint,
lewnich,
lexij11,
lfv,
lg63laddhotmail.com,
lgiannini201,
lguidera,
liam2390,
lianderson,
lianne_m,
libervurto,
lieselottl,
lifesart,
ligaron,
lightness,
lilacwood,
lilapot,
lilico,
lilmonster4evs,
lilybloom,
limelikelemon,
linda.kutackova,
lindapnoe,
lindawithblueeyes,
lindek,
lindsay.patterson,
lindseyb420,
lindylooo1,
lineds,
lioncub12,
liondave,
lisacorewyn,
lissasmom,
litlewolf2,
littlebeard,
littlebug47,
livlorton,
lizardo,
lizbuffy,
lizmaple,
lizplanet,
lizzyshoe,
lizzyvet1,
ljalthoff,
lkirshaw,
llara,
llavoie,
lmbloom8903,
lmcco,
lmcmillin,
lmd6508,
lmf76,
lmgumby,
lmhornig,
lmusytschuk,
loader96,
lobotupgrade,
lobstersocks,
lobusparietalis,
lodonzo,
loerie,
loggins,
loisendaan,
loketimes,
lolobaba1,
lolonaze,
lolortie,
lolpus,
lolymie,
lonelyviolist,
lori2u,
lotsofloudlaughing,
lou7428,
loubelle66,
louisdresner,
louiseee,
loupdethies,
lovelysweetpea,
lpage,
lpatrie,
lpdavis,
lprussell,
lpspieler,
lrigby,
lsherman,
lsteigma,
lstephanski,
lszatmary,
ltakiguchi,
luca-chimp,
lucashh,
lucy55,
lucycawte,
lucyyyr,
ludyboots,
lueasley,
luisandresgonzalez,
lukejballard,
lukeonzoonivere,
lula0502,
lula14230,
lupham,
lusihuei0504,
luxray978,
luxtina,
lwerden2uwo.ca,
lyleje9,
lyndsey1987,
lyneille,
lyraloo,
m,
m1saac,
m3ganbarker,
mIl0van,
m_tennison,
maaikell,
maarten_k,
macaodha,
macgarvin,
macgbrown,
mackenzie.mr,
maddimendoza,
madeinspace,
madeleinen14,
madelinethespy,
madison young,
madsterr,
maewe,
mafgross,
mafintel,
magaa,
magdalen_n,
maggiea2598,
magic5379,
magnoliahigh,
maheer425,
mahynoor,
majac,
majportugal,
makaylamay,
makenziedespres,
malachi,
malcolm mcewen,
malforma,
mallory9163,
malloryladny,
malpiszony,
mamamuh,
mammyflo,
mandellamom,
mandiwaite,
mandyaldridge,
mandymayhemxo,
mantide,
manxkats,
maoa86,
mapat,
mapper,
mapreader4,
mar10g,
maramara,
marbrady,
marc085,
march3258,
marci61456,
marciasou,
margef,
margie,
mari laughlin,
maria elena velez,
maria villarreal ,
mariafanning,
mariannas,
mariarmoreda,
mariechloe,
marije2410,
marijke.desmet,
marina83,
marinadoukaki,
marjanneo,
mark athanasoff,
mark2004,
markhu,
marlfox580,
marrinertg4592,
mars118,
mars26,
marssociety,
martaczc,
martinbrooks,
martinsandvik,
martybeans,
marvelacad,
mary.e.boswell,
maryann,
marydeki,
maryjane,
marysiarunner,
marywashburn,
matej.zabojnik,
mathieu_farina,
matiaslilsis,
matildak,
mattb1792,
matteo1297,
matthewsarro,
mattiab,
mattlarrea96,
matty406,
maureenmccarthy,
mauricio ureno,
max,
maxdav01,
maxhy,
maxioi,
maxozaur,
maya011,
mayah24,
mayakiona,
maylu,
mbadger,
mbailey9573,
mbaselga,
mbatla,
mbosshard,
mccauleysa,
mccreery64,
mcgeea95,
mcgovernnj,
mcheek21,
mcintold,
mckinlayross,
mcoates,
mcoc,
mcolston,
mcru1601,
mdlw,
mdomaille,
meegja,
meerclar,
meganc30,
meganesmith539,
megankieran,
meganswanson,
mejones,
mekukat,
mel.s,
mel907,
melach,
melendil,
melinda kahl,
melissasmith08,
melvinj,
meme172,
memorymays13,
mercedes123,
merra1979,
metridious,
mflannigan,
mfolk77,
mfrisbie8,
mgarcia.eva,
mgarman,
mhammergren,
mheitm,
mhxyqwe123,
mi77ian,
mialumme,
miatafan,
micfil,
micha2718l,
michael roberts,
michael971,
michaelraeber,
michaelzucker,
michal108,
michlocke,
micmac274,
midnightiscute,
midnightisfluffy,
migrill,
miguel,
mikaselm,
mike.moothart,
mikej45,
mikelinz,
mikem2,
mikes269,
mikestill,
mikkajones,
mikyuzumaki,
milano,
milchtier,
milechampion,
milena.marin,
milenski,
milesnoir,
milesspencer,
milksneaker,
milktime,
mill9,
millbrook3,
mimatronic13,
minervadreaming,
miniMinN,
minty737,
mireillebocksberger,
mirl,
missadventuregm,
missbland,
missdriller,
missk2u,
missmomma86,
misterwoodward,
misteryoung,
mistletoe,
mitata,
mitchell631,
mitsyamarsupial,
mixu74,
mizliz,
mizracy,
mjac,
mjelse,
mjohn15,
mjourney,
mkenz1099,
mkmanos,
mkmcguir,
mkub,
mlambert27,
mleinspenner,
mlhanna,
mlsacg,
mlx0000,
mm2223,
mmadden542,
mmangos,
mmartarano,
mmatessa,
mnowy,
moacir augusto coutinho,
moefinley,
moex0125,
mogget_storm,
molllyequinnn,
mollmsd26,
mollus,
molly davis,
molly.stockmeyer,
momendes,
monaxue,
monbut,
money_princess,
monica moscatelli,
monicahaponski,
monkettales,
monkeyboy23,
monoculars,
monsol,
moolyballerina,
moomoohk,
moon001,
moonpie68,
morage,
moreno.meghan,
mosaic_world,
mosey_9,
mourningdove,
moxiemouse,
mpazrp2006,
mpeoples,
mpobocik,
mpriestman,
mqq,
mr.lee,
mrObbens,
mr_seeker,
mreynolds81,
mrkjad,
mrmanatee99,
mroliverridley,
mrsalphageek,
mrscoyle,
mrsmarriage,
mruffing13,
mrvaidya,
mrybczyn,
ms.physics,
ms4225,
msaligned,
mschwirck,
mseyler,
msnafziger,
mspooner,
msterreb,
mswmom,
mtjan2014,
mtornese,
mtran97,
mturman,
mudkip201,
mueslifix,
muis24,
mujumdarsr,
mulehollandaise,
murtaghc,
music788,
mutiertespinne,
mx46myra,
myfriendemma,
mynightmare1,
myrddin,
mysteryparry,
myuniverse,
myximagination,
n13j,
n165741,
na215975,
naaninn,
nadavbensh,
nadine gorman ,
nadiribro,
nadjahersacher,
nads,
naffi,
nafnaf,
nairobinights,
najuris,
nakilah,
naminag,
namtansign,
nanou,
nanselmus,
naomizf,
napdaw,
naralara,
natasha-gray,
natban1,
natbosici,
natcarish,
natchaphon.r,
nate,
nate.liz.white,
nathandench83,
nathanuy1,
natkaa,
natkomba,
nature,
natwins,
naumenko.pavlik65,
navegante,
navill0948,
nayerakhairat,
naynayfalife,
naysin27,
nbcdoe,
nbetzca04,
ncmastroddi,
ndarbysh,
ndw753,
neanderthaler6,
necoras,
neebobs,
neece_haynes,
neidzwiedz,
neitsa,
nelli,
neverendingo,
neverett,
newnewschool,
newyorker570,
ng15,
nh,
nicchic8402,
nicciden,
niceday1422,
nicestjerk,
nichollsm,
nicklebear,
nickyherbert1,
nico61,
nicobañez ,
nicolec8,
nicoleflynn18,
nicostone,
nieja devaughn,
nigeq,
night,
night_dust,
niirilli,
nikania,
nikia11,
nikolatt,
nikolosp,
nina ,
ninjabel,
nipnlap,
nirving123,
nitramko,
niv1,
niyer,
njcusack,
njwwright,
nkarn,
nlbookworm16,
nmaad083,
nmc19892004,
nnova,
noblehomeschool,
nogoodkris,
noidedpanda,
noitallgirl,
nola1034,
nolanedwin,
nomanslogin,
nonnel,
nonword,
noonizooni,
nopedose,
norauk3,
noromo,
northcrunk,
northernflower,
northernlimitptv,
nosnibor3,
notgucci,
noxvix,
nozee,
np33,
nrajkarnikar3,
ns22121,
nsm,
nsmel,
nspacefire,
nstoker,
nthrockmorton,
number1suzuki,
nutznix,
nwstone,
nyrmta,
nzRichardS,
nzanga,
obaratch,
oberon1066,
obnebion42,
ocbrown,
ocean171,
odele,
ohecbamboozledagain,
ohmanen,
oiramm,
ojacobs21,
oldapartment,
olekkurszewski,
oleksaberik,
olems,
oli123456789,
oliverhodson,
oliverlevitt,
olivers,
olivia.blake,
oliviarrrrrr15,
olivier,
ollyc,
ollydavey,
olsonl,
omicronCeti,
omnicron13,
oneill4,
oneillg2g,
oneoneone,
onetheycalldru,
onlylobster,
onnikur,
oo.adeyemo,
ooburai,
orangebird,
oraulins,
orchinati,
oriza,
oroanto,
osanna,
oscarmental,
osprey_neaves,
ossoz,
otac0n,
ottecktom,
owenfay,
p.titchin,
paLLadium2,
pablos87,
padfoothp,
pagh1,
paigeautumn,
paigefigone,
paigefroebe,
paka6789,
paleogen,
palmin,
pamba,
paminaz,
pampalini,
panPsax,
panek_attack,
paolojs,
paot,
pappasadrian,
pashley108,
patchwork,
patjohnstone,
patmcg,
patoberli,
patomac,
patrick32,
patrickeli,
patrickmobyrne,
pattinv,
pattodiablo,
paul miller,
paula33,
paulaclemente,
paulgregorylang,
pavovegetariano,
pawel_pleskaczynski,
pazithigallifreya,
pckkelly,
peachblossem,
pearcec,
pearlthetiger,
pearson.andrea,
pearsov,
pedaz,
pedi59,
peekay42,
pegk56,
peiftronica,
peinguinsandpolecats,
pekkish,
pendulin,
penguin1221,
penrica,
pepperell,
pepys1,
perfecti,
perkele91,
perpilami,
perryroper,
persiacr,
pessa3,
pete_48cck,
peter.warburton,
peter1234567,
petervanh,
petrack,
pflodin,
pg338,
pgiroux,
phantome,
phil.gringer,
philip,
philjpwade,
philomena,
phkorslund,
phoenix1284,
phraps,
phsc1001project,
pi2fly,
pianissimo,
pianokungfu,
piccolino,
pickle777,
picklesplays,
pigron,
pilpolly,
pimentelius,
pinguingud,
pinkpuppy,
pinkus,
pinkynz,
piotrek1010,
piotrexmeow,
pioup,
piper73,
pippo0815,
piratebrit,
pixelmesh,
pjjhurk,
pjp1959,
plambert78,
planet059,
planetari7,
playmobil,
plev13,
pmborodin,
pmgreen,
pogostickies,
pokedragonboy,
pokodinero,
polanski,
polly1,
polo24,
ponyry,
popocatepetl,
poppy42,
poppykentucky,
poundmaya,
prcromer,
priancohen,
primatelady,
princessclockie,
princesspeet,
prl,
protagoras42,
psivewright,
pssbanks,
pubgypsy,
purplegedge,
puti,
pyrosomida,
qingdou,
quarkcharm,
questathon50,
quetzalc,
quiet,
quinnr,
quinnrice127268,
quizmike,
r2rook,
r4s1,
racegirlj,
rachable,
rachaelfacey9,
rachelha23geo,
rachelklose,
rachie0204,
raddas,
radek_p,
radfordr,
radulfr,
rafael,
rafucho47,
ragingreshiram723,
rainbowhamster,
rainer,
rairai600,
raito,
ralab,
ralph.livockgmail.com,
ram2009,
ramberts,
ramoratz,
ramosc07,
ramsaut,
randolmm,
random_awesomeness,
randthor,
raphaelmaier,
raphi615,
rartho,
rascheper,
rashaveraka,
rastamon888,
rathole,
ravendrop,
ravenik45,
razvy97,
rbarba,
rbundy,
rcmason8790,
rcookman,
rcr14,
rderival,
reaganakgrown,
rebbi,
rebecca,
rebecca1104,
rebekahal,
recepturka,
recrea33,
redchaz57,
redragonPL,
redzep66,
reemab4,
regatavalon,
regularj,
reikirob,
reillyknight,
reimalkav,
reishi,
reki,
rema,
renato24,
renee corlett,
rennbird,
res96,
retiredbeep,
reydi,
reynaldo yanez,
reynold,
reynoldsjake1997,
rflaherty,
rfoltram,
rheineman,
rhonan,
rhys123,
rhythminc,
ribbit21,
richarddwelsh,
richardellos,
richbentley,
richiefelid84,
richieheal,
ricmwood,
riedel,
rieswesdorp,
rieta,
riley,
risk,
riverdrifter61,
rixey,
rjane314,
rjbrown1,
rjjm,
rjmarmaro,
rkresack,
rks7257,
rlcantwell,
rllrllrrlrrl,
rlorez,
rmana,
rmcavoy,
rmklaus12,
rmolinand10,
rmorden,
rnavajas,
rob054,
rob_moore75,
robbecorne,
robbiederoo72,
robertbirnie,
robertsteffeck,
robinlparkinson,
robright,
robuzon,
rocketdriver,
rogerwepf,
roguetamlin,
roknrn,
roland10,
rollingronnie,
roman madala,
romijean,
ronald8e,
roodle,
roosiezon,
roseandmoss,
rosie467,
rosie_lol,
rosrac,
rotband,
rotelippen,
rowejhs,
roxiesal73,
rphubbard,
rprblk,
rpricedavies,
rqj123,
rrpbgeek,
rrramtin,
rschini,
rsissons,
rsnberry,
rsuroland,
rubyred,
rubysolitaire,
rudim2014,
ruimacaco,
rush1125,
rusthen,
rutho13,
ruthparker,
ruthr,
rverghese20,
rvreeke,
rwbcanada,
ryan-rsa,
ryan.ippolto,
ryantmadden,
ryber1,
s. ledgerwood,
s.giersch,
s16947,
s17010,
sab2163010,
sabine-hh,
sabse,
sabsi1608,
saganesque,
sahil160,
saian,
saintrain,
sakatz,
salaa,
sallison13,
sallyjack,
sallykd,
salmarutha,
sambrea,
sambuca,
samg05,
samikes,
sammykinz9,
samvalentine,
samvdb,
sana hattar,
sanatem,
sanchez.sergio,
sandra_pilkington,
sandsmithglass,
sandyb777,
sangroff,
sanguinefiend,
sara_haveron,
sarah azuelos,
sarah callison,
sarah.g89,
sarahanneingram,
sarahcornacchio,
sarahelizabeth,
sarahh1193,
sarahkdavis21,
sarahmjl,
sarahruddell,
sarapotato,
sararae,
sarareed,
sarasefton777,
sarinozi,
sarje,
saroka,
sascha1es,
sashaxxx,
sashifoo,
saskia.k,
saule81,
saurabh_sb,
savagehenryceltic,
savagelovesinger,
saykojack,
sazzlecat,
sbardo,
sbeltrami2019,
sburda81,
scarletfeather,
schaeferhomeschool,
schaep,
schatzy,
schnaffeltier,
schneva,
schoolpsych77,
schreibvet,
schrodinger_kat,
sciencequiche,
scisarah,
scmorris,
scooterscooby,
scottm,
scoutcain,
scscottnz,
scurf,
scytheavalon,
sdalessandro,
sdev7,
sdkprodigy,
sdmorris,
seacliff5049,
seangoldstein,
seba.ka,
sebasish,
secrone,
sehindle,
selmarsh,
semayilmaz,
semiface,
senicalvin,
sensor4,
senv,
sequoiaforest,
sfmedusa,
sganon,
sgibson,
sgray8144,
sgutierrez1993,
shadow2056,
shahd Elashmawy,
shahino6,
shainacopeland,
shamaree copeland,
shananigan6326,
shanella,
shanher,
shannacherie,
shantimorrell,
shaoisaac,
sharathsarangmath,
sharoni.p,
sharrysharry,
shauna_ss,
shawnroderick11,
shayma...,
shazzyP,
shdybr8,
sheenabeena,
shekarbr,
shellie wallace,
shepardk20,
shero777,
sherritom,
sherwinptgmb,
shinebrightlea,
shmarla,
shmo,
shmooshy,
shocko61,
shoeman,
shortypantz,
shutterbugdi,
siberia2sumatra,
sigmapi10,
sijo,
silentq,
silo,
simmonsrzoo,
simona,
simona_12,
simond0,
simone53,
simonk1987,
simonliddle,
simonpopp,
simonvandenberg,
siouxiesue,
sirbertgarcia,
siren5298,
siwanicki,
sixpacktou,
sixxsgirl,
sjacobson1112,
sjc74,
sjcguk,
sjekkerton,
sjhwriter2,
sjwk,
skandlikar,
skavanagh,
skbarks,
skcool,
skepticHominid,
skilaru,
skintchef,
skiola,
skirtedrunner,
skity5,
sklo,
skorbolamid,
skrh,
skrzypkima,
skweek,
skybabe,
skyelight,
skyfundjr0520,
skykai,
skywatcher100,
slamdinista,
slidess,
slieberthal,
sloanes12,
slone02,
slowkow,
smccluskey1982,
smdu,
smfoote,
smhirt,
smithpa01,
smitjam,
smriemann,
smvilla,
sn322929,
sn328797,
sn349849,
sn363633,
snakeshit,
snappa,
snickermonkey,
snowdragon,
snowflake1,
so211,
soesoe,
sofieET,
soflynn,
sokrates49,
solenevermont,
solomonfolk,
sommakia,
sonictruth,
sontheold,
sopcat,
sophie00,
sophie8156,
sophydj,
sosmond,
soulselller,
soundgrl,
southofnonorth,
soyopopo,
sp515507,
spacegazer,
spakd07,
spamheadsmum,
sparow01,
spbf81,
speakofthewolf,
spearleyez,
spekky4eyes,
speters17,
spiderbui,
spinachravioli,
spinblade117,
spiralhunter,
spitysh,
spook34653,
sporter72,
spotharriet,
spu00crm,
squirrella,
squishycakes,
srallen,
sramsdale,
sroosa,
srs106srs,
srswann,
ssPETERss,
ssaPlacerias,
sschmeiser,
sschmidt,
ssmikey,
ssmith6519,
sstafford4,
stantastic,
starchitect,
stargirlmoon,
starhunter11,
starmag,
starman007,
starpower1,
starrypawz,
starwig,
steblak,
steeleyc,
stefan.k,
stefanie_caspari,
stefanmago,
steffbarros,
steffifee,
stefkova,
steiche,
stengleinjl1,
step.poulain,
stephen,
stephen.aites,
stephencarnes,
stepherbot,
stephkolenda,
steve67,
stevemiller99,
stevenjohnston2017,
stevensteven,
stevewales,
stfrue,
stinapham,
stitz,
storki,
stpage1,
strategy,
strawmeadow,
stripey,
strubeldoc,
stsearcher,
stsf1234,
stuartaylor57,
stuey273,
stuk755,
stweedale,
suburbanastronomer,
sue_welfare,
sueking,
sugo,
suji3000,
sumamaru,
suncoup,
sunshinegirl,
suzannebradbury,
suzer14,
suzi78uk,
svart,
svengooliehigh,
svg5595,
swamprunner,
swapeman,
swertkin,
swsahara,
syberknight99,
sydself,
syl.schmid,
sylphia,
sylvain dennielou,
synapse890,
syndarella,
syracon,
syzooniverse,
t.butra,
taboret,
tactoe,
tagori,
tahall3,
tahoedox,
tai_shan,
talm,
tamma,
tammylee67,
tan sin yi,
tanja92,
tapirsun,
tardigrade_girl_9,
tarelena3004,
tasnot,
tatoon37,
taylorv212,
taylour,
tbjerkga,
tbm,
tbrinton89,
tc29121981,
tcoriell,
tecannon,
technogourmet,
teekiteasy,
teemie,
teilhaft,
telophase,
temporaryblank,
tengnoth,
terryantrim,
tessa-virginia,
tfmorris,
tfrdawson,
tgoodway-sims,
tgrkpr2000,
th123,
thackler,
thawar23,
theWisp2864,
the_irishman,
theanimalover,
theappleman,
thebookscout,
thecuriousiguana,
thedweeb,
theheyman,
thehp,
thelinm,
themitchnz,
themrtroe,
thenoobest,
theofilos,
theotherme,
thepew,
theresxcfg,
thesquiddles,
thetruemilhouse,
thezohar,
thibaultmol,
thijszilla,
thimbleberry,
thisisjaid,
thoe,
thomas lovato,
thomdemm,
thoreslm,
thormster,
threadbare,
thula,
thurston_spaceman,
tien.duong,
tiffany9913,
tiffanyfields,
tiffanylian,
tigerpieatt.net,
tikijay,
tillmannhanna,
tillytots1232,
tilo,
tim_nutter,
timbok28,
timmerma7,
timothybouting2,
tina_pixie_,
tinamarash,
tines,
tinkapuppy,
tinkerlala,
tinope66,
tinykahoona,
tjbonzo,
tjeb,
tjilptjilp,
tjnelson5,
tkarian,
tkennedy371,
tkoc,
tlmaggiola,
tlusardi,
tmarchant,
tnl25,
tnladybug,
toasterparty,
toffee59,
tofly76,
tojad,
tolls,
tombombadil117,
tomburgerpie,
tomfinn1984,
tomtom47,
tomymx,
tonnerrebird,
tonnigonzalezPHYANTRO,
tony avrithis,
tonyhallam001,
tonymor,
toosirrius,
tori12,
torman,
tottobauer,
tournavy,
tracev,
tracy.angel,
tracyshenton,
traianusrex,
tralliott,
tranceline,
traumeule,
travicehockey,
tre-86,
treebeard236,
tribb99,
trieloff,
tristia,
trixx,
trmayo,
trock5445,
trout13169,
trstn1,
trudauglow,
trusek,
ts.hbgr,
tsepui,
tsering,
tsheiffer,
tsoderquist,
tswittelsbach,
ttekieli,
ttessler,
ttfnrob,
ttilley,
tubasumpf,
tubbyoatmeal,
tuberdj,
tuittu,
tuleu-emma,
tweeg5039,
tweger,
twilightmoon,
twinklelittlestar,
twwittig,
txmeg,
tyhannahsaurus,
tyquana,
ufo1989,
ujansub,
ulikoehler,
ulrike krumscheid,
umalinda,
undergroundgirl,
underthearch,
undyne,
unefeepasse,
unidentified-scientific-object,
uninetverse,
upashka,
urbansonnet,
urgo42,
us7461,
uschaefer,
user_name,
uxia,
v1oletv,
vago82,
vaguely_adrian,
val08,
vale0293,
valeriechristina,
valette,
valfer,
vallil,
valski,
vanessa16480,
vanhelsing71,
vchantorn,
vdejesus,
vectorization,
vedun-z,
veenboer,
veeveequeen,
vega14k,
veggiepenguin,
vehtoh,
veleska,
velmanatov,
velthove,
vena,
venusstop,
veronicasnape17,
vertigopolka,
vestigial,
vfinnerty,
vgoguen,
vicki224,
victorloyola,
vijilaksd,
villalongam,
vincentgsn,
violotti,
viralstrain,
virginia,
vita.kutacek,
vittoriaestienne,
viviankrause,
vivibreizh,
vjalex,
vjbakker,
vkmkevin,
vlad015,
vmerker,
vmont,
vmp32k,
vonderSchanze,
voshellmaddy,
vouchtieng ,
vpatfong,
vrohe,
vvvulpea,
vx100,
vybzbild,
wackylama,
waddles310,
walczyszyn,
wallafives,
wammy,
wamsleys,
wanda,
wannabe12,
wanni012,
warmworm,
warriorcatsophie,
watt_tyler,
waver,
wayne54,
wdecock,
weaverzinc,
webb1980,
webpixi,
werdnarb,
weromac,
wesley pawlowski,
wewa,
whaps,
whisperit,
white_squirrel,
whitelightnig,
whitetiger678086,
whooshdemon,
wibbi4,
wicked82,
wiebked,
wiedi,
wightdragon,
wildebeesty,
wildlifegisgirl,
wildlifephotographer,
wilktony,
willbarnes4,
willjohnboy,
willowstar321,
willywonki,
windinhair13dd,
wingcli2014,
winnethpawtrow,
wisenheimer,
wiztess,
wizzydaz,
wojciech_k,
wolfgang1,
wosgood,
wtkoala,
wvvampire,
wwscoobydo,
wyjeong2,
x303,
x500,
xXOrchidChildXx,
x_ANT,
xanthi373,
xantoras,
xapril7x,
xbenr,
xfgiro,
xflyer,
xgraceygirlx,
xn11983,
xpeh,
yacoub,
yagerp,
yampol,
yankeegunner,
yasdollasign,
yatomi,
yedidiamesfin,
yellowbird,
yep2yel,
yesenialv,
yibet,
yinyangscurse,
yipekiay,
yk2064,
yoah,
yoda1,
yodahound,
yoey0419,
yona.s,
younge,
youthvolunteersdallaszoo.com,
yoyoman67,
yugoh,
yusuf,
yvonne brockwell,
yvonne mews,
yvonnefish,
zach_fortuna13,
zara,
zawarudo,
zazabard,
zbgirl6,
zdjuf0,
zedone_geezer,
zeezel,
zeng19930220,
zeynaz,
zhol123,
zholl25,
ziegl087,
ziglaser,
zits,
zlake,
zocker,
zoeellis,
zoharkapalan,
zoonie4893,
zoorik,
zootastic,
zooz,
zorglax,
zperez,
ztdavies,
zwanenburg,
Łukasz Bielecki,
Špela Puš,
Šárka,
Илья,
Самохвалов Антон (Samokhvalov Anton),
李泽林,
赵颖,
임민서,
-Beach-,
.Peter Stockton-Lewis,
00012853,
00054798,
02csmith,
08682293,
0Sanne0,
100dee,
1091010,
1157243,
11kb34,
11kralle,
120,
123726,
12coombesj,
136007,
13zulus,
141Dial34,
14SergioA,
14sonbri,
1510568040,
17lauk2,
1827,
1828Duke,
18acavinee,
18mclum,
18scooper,
1Ver,
1anita1,
1yioi87,
2004pamf,
20080112,
20161105,
20161169,
20161186,
20161230,
20161255,
20161287,
20161305,
20161761,
20162130,
2024-habdi,
20scuc,
212marin,
2140lacy,
214455,
214629,
214860,
215977,
216043,
216097,
21nikelove,
21perryb,
223327,
224043,
224265,
225578,
225593,
22lhaws,
2409,
25or6to4,
2609005,
2fay,
307flyfisher,
321Hanni,
3rdgradeelwood,
43corunna,
4dlabs,
56227isd,
5paws23,
5yF0Rc3,
62050isd,
62492isd,
62566isd,
64053isd,
64HaRtZa,
71099isd,
75473isd,
77636isd,
77keeg77,
7bluebird7,
7bthirdrock,
7cscience,
85martinis,
99bellam
</p>
</section>
</div>
module.exports = Authors
| 36028 | React = require 'react/addons'
cx = React.addons.classSet
Authors = React.createClass
displayName: 'Authors'
render: ->
<div className="authors-page">
<section className="about-section content">
<h2>{@props.pageContent[0].header}</h2>
<p>{@props.pageContent[0].content}</p>
<p>
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
A13FAB,
AEagon,
AHO<NAME>ki,
<NAME>,
AJ1984,
AJTOTHEMAXX13,
AKGal,
ALCAVTAY,
AL<NAME>,
ALP1,
ALR20,
<NAME>,
APav,
ARD1999,
<NAME>.,
<NAME>,
A_<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Aaronaguila1,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>sher,
<NAME>,
<NAME>,
Acipi9,
<NAME>,
Adaku,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Adammada1,
Adamvanc,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Adely<NAME>j,
<NAME>,
<NAME>,
<NAME>,
<NAME>rianaM2001,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
AfricasFinest,
Agent_Bookworm,
AgoraChat,
Ahearyn,
Aidan <NAME>M,
Aigh90,
Aikane,
Ailurus,
Ailz79,
<NAME>,
<NAME>,
AimeeJo,
Ainmhi,
<NAME>,
Aiygeo,
Akaito,
Akat13nmkb,
Akoyan,
Akyu,
AlBatin,
AlJC,
Alagorn,
<NAME>,
<NAME>,
AlanaKM,
<NAME>-<NAME>,
Alanatkinson123,
Alba,
Alealonna,
<NAME>,
<NAME>,
<NAME>,
<NAME> III,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>H<NAME>103,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Alexandra1,
AlexandraD16000,
<NAME>,
<NAME>,
AlexandreLamothe,
<NAME>is,
<NAME>,
Alexis_V,
<NAME>,
Ali_Oldenburg,
<NAME>,
Alice.Rose182,
Aliceee11,
Alico,
Aligeeach,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
AlisonField,
<NAME>,
Alix2017,
<NAME>,
AllegraS,
<NAME>,
AlligatorCreator,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
Alpha27,
Alvgeir,
AlxV,
Alyfc210,
<NAME> ,
<NAME> King,
<NAME>,
Alyssa.G,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Amanda1961,
Amanda1975,
AmandaG,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>.,
<NAME>me<NAME>,
Americanthai,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>mmathor,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Amulus,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Amy1964,
An<NAME>,
AnalytischeATI,
<NAME>,
Anastasia44mk,
<NAME>,
Anastassia,
Anaïs ,
<NAME>,
<NAME>,
<NAME>,
Andr0meda,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
Andreas38871,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>JTalon,
<NAME>,
<NAME>,
<NAME>,
Ang-valerie,
Angel60SX,
<NAME>,
<NAME>-<NAME>,
<NAME>,
<NAME>,
AngelicaKaufmann,
AngelynR7480,
<NAME>,
AngieEads,
<NAME>,
<NAME>,
<NAME> ,
AnilkumarA,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Anja66,
<NAME>,
<NAME>, PhD ,
<NAME>-<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> <NAME>,
Anna-<NAME>,
AnnaTee,
Annabz,
Annau2,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
Anne<NAME>,
Annepet,
<NAME>,
<NAME>,
<NAME>,
AnnieOrlando,
Annie_Wiener,
<NAME>,
AnnikaC,
<NAME>,
Antek7,
<NAME>em<NAME>ia,
<NAME>,
<NAME>,
Anthonyames,
Anthonyrice89,
Anthro-ginger,
<NAME>ihe<NAME>,
Antoinetje1959,
<NAME>,
<NAME>,
<NAME>,
Antwan2004,
<NAME>,
<NAME>.<NAME>,
AoifeN,
<NAME>,
<NAME>,
Aqua_Fairy,
Aralest,
Archiesgirl,
<NAME>,
<NAME>,
A<NAME>ane<NAME>ooremans,
A<NAME>ani,
<NAME>,
<NAME>,
<NAME>us, S<NAME>, and <NAME>,
<NAME>,
Arlette ,
Ar<NAME>,
Aron2007,
AronC,
Arrakatanga33,
<NAME>owey,
Artemision,
ArturoG,
AruAxe,
ArwingPilot64,
<NAME>,
<NAME>adora,
<NAME>,
<NAME>,
Asd3r,
Ashaheedq,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Ashley<NAME>awyer,
<NAME>,
Ashlyn,
<NAME>,
Asillage,
<NAME>,
Astro_Max,
Astromarm,
AstronautCapybara,
Astyanaxx,
Ataticek,
AtomAnt,
Atti,
Atti_der_Kleine,
Audburrito,
<NAME>,
<NAME>,
Augdog,
<NAME>,
Aukej,
AuntieA,
Aurelide,
AuroraCV,
<NAME>,
<NAME>,
<NAME>,
<NAME> <NAME>,
<NAME>,
AvanW,
Avatar<NAME>i,
<NAME>,
Avery11,
Aw80,
AwesomeAva,
Ayanori,
<NAME>,
Azurite,
B ,
<NAME>,
<NAME>,
<NAME>,
BCC,
BDNeil,
BKNBLK13,
BKaller,
BRutherford,
BS1234,
BWPLS29,
BZW77,
B_Grenette,
B_McC65,
BabettevanRijn,
Badgerx,
<NAME>,
BakerStreet,
<NAME>,
Banana,
BananaChimp,
Bananenbert,
BarabasE,
Bar<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Barbaras,
Barbera,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Bastian_Peripheral_Area,
BattyElly,
<NAME>,
BbnV,
BeHotSiii,
Beanysteve,
Bear<NAME>,
BearmanK,
BeaterGirl,
<NAME>,
BeauRoch,
Beckster,
Be<NAME>,
Becky.k,
BeckyK8,
Beecher,
Beetleboy,
Bekki,
Belbelleb,
BelleB,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> and <NAME>,
<NAME>Adams34,
<NAME>Emma,
<NAME>Str<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>jaminW,
Benjamin_1,
<NAME>shlomo,
<NAME>stg,
<NAME>,
<NAME>,
Berner-88,
<NAME>,
<NAME>erry101,
<NAME>norb,
Besotted,
<NAME>,
Bestboy,
Bestlandlord,
<NAME>,
<NAME>,
Beth8,
<NAME>,
Bethany6659,
Bethels54,
Betmac,
Beto100,
BettyBuh,
<NAME>,
Bfurn4s,
Bgrundy,
<NAME>,
Bibliophile,
BicTigrou,
Big-Dipper,
BiggerJ,
<NAME>,
<NAME>,
Billy-n-<NAME>,
BillyBrad,
Binerexis,
BiologyDrew,
Biologyzone,
<NAME>,
<NAME>,
<NAME>i,
BjornPB,
<NAME>,
BlackLotos,
Blackdraumdancer,
Blactyde,
Blake,
Blauvogel,
BlightEdge,
BlindOracle,
BlossLearningSpot,
BlueGull,
BlueWhovian,
Blumi,
Bmnewcom2005,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Bonnie123,
BonnieC,
Bonus,
Bonut,
Boqboq,
BorisBee,
Boumans_,
Bozinator,
BradToTheBone,
BradWilliam91,
<NAME>,
<NAME>,
<NAME>,
BrainstormCreativity,
<NAME>,
Brandi7293,
<NAME>iAlba,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>redit,
<NAME>regtje,
<NAME>,
<NAME>hn,
BriPriUK,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Bridgitte,
BrielleJMBaker,
<NAME>,
<NAME>,
<NAME>,
Brittenie,
Britti,
<NAME>,
Brodstier,
Bronte,
<NAME>,
Brontë ,
Bronze,
Brooke,
Broo<NAME>,
<NAME>,
Brunitski,
<NAME>,
<NAME> ,
Brytt,
BubbleMan,
Bubo,
BuffS,
Burbear1,
Buwa,
By<NAME>,
<NAME>,
<NAME>,
CATALIN NAFTANAILA,
CCCC-Unit,
CHSAPBio,
CHaRLieDonTsURf,
CJF64,
CJLSMITH,
<NAME>,
CLilley,
COMPUTIAC,
COwenSmith,
CRoC,
CTh<NAME>,
CTidwell3,
C_McC,
CacaCrow,
<NAME>,
Caecilia42,
Caitlin,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Cal<NAME>,
Caleb,
CaliforniaBear,
<NAME>,
Camda5585,
Camdenmo,
Cameron,
<NAME>,
<NAME>,
Cameron12,
<NAME>,
Camrynn,
Canckaitis,
<NAME>,
<NAME>,
Candlemas,
<NAME>-<NAME>,
CaoimheK,
Cape2cape,
Capitano78,
Captain_Ruby,
Captain_skug,
<NAME>,
Cara_1981,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Carolo52,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
CarrieVS,
<NAME>,
<NAME>92,
<NAME>,
<NAME>,
<NAME>6<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
CatsEyes,
CatyB,
<NAME>,
<NAME>azzoo,
<NAME>,
<NAME>,
<NAME>,
Cecilia<NAME>well,
Cedar Student,
CefalofoRosso,
Cele<NAME>,
Celestial_Caticorn,
<NAME>,
Celiaros,
Cellendylle,
Celurca,
Cenkdem,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Ch<NAME> Van<NAME>,
Char2002,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>-<NAME>,
Charlottie96,
<NAME> ,
Chas,
<NAME>,
Cheavus,
Checkheck,
Cheesemouse,
<NAME> ,
ChelseaL,
<NAME>,
<NAME>,
Chewyswimmer,
Cheyenne baird,
Chezza6,
Chg507,
ChiliP2000,
Chimper,
Chimpfinder,
Chimpinator,
Chippietom,
ChizCT,
Ch<NAME>,
<NAME>,
Chouchou_Bidou,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>MM76,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Chube,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>areyclarey,
Classof2015,
<NAME>,
<NAME> & <NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>udybell,
Claus114-DK,
<NAME>,
Clefairy131,
Clemdalfit,
Cletus2014,
<NAME>,
Clode13,
Clue4fun4,
C<NAME>ick,
Cmk923,
CoconutPete90,
<NAME>,
<NAME>,
Codyking24,
Cole_Durden,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Colombier,
ColourFromSpace,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
Cop<NAME>54,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
CoriellK18,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>ory<NAME>han,
CosimoMontagu,
CosmicLatte,
CosmicZephyr,
Cosmonautic_Ape,
Costanza111,
Costimos,
Cougarrakira,
Courtney,
<NAME>,
<NAME> ,
<NAME>,
<NAME>raen,
Crawfork,
CrazyChicken,
Crazycatz935,
Crentist,
Creslin73,
CressBookworm,
CricketG,
Crinoline,
<NAME>,
Cristóbal,
Crlucas322,
Cromby3,
Croquet,
Cruuux,
CryptoLight1,
<NAME>,
<NAME>,
Culdrum,
Curculionidae,
CvanNoort,
Czandigar,
<NAME>,
D1verDad,
DB,
DBell87,
DEP,
DIDDLE,
<NAME>,
DJAI1979,
DK86,
DM Ravnsborg,
DUDONE2,
Daikoro,
Daisy-May,
Daisy_of_Doom,
Daja_GAlaxie,
<NAME>, PhD,
Dalitasdrain,
<NAME>ta,
<NAME>,
Damon22,
<NAME> ,
<NAME>,
Dan987,
Dana23,
Danel890,
DanelD,
Danell711,
Dangerfield1982,
DangerousDD,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Danielle<NAME>-<NAME>,
Danilo<NAME>li,
<NAME>,
<NAME>,
DarbyHeart1,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>.<NAME>,
DarthBec,
DatAlien,
DataDroid,
<NAME>,
<NAME>,
<NAME>_<NAME>s,
D<NAME>jaw,
<NAME>,
<NAME>,
<NAME>,
<NAME>, Jr.,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> <NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
D<NAME>,
Dduncan13,
De<NAME>,
De<NAME>,
<NAME>,
<NAME>,
<NAME>,
Dear<NAME>,
DearestPuppy,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>sky,
<NAME>,
Deborah1955,
DeborahV,
<NAME> ,
<NAME>,
<NAME>,
<NAME>-<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>, PhD,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>9<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>-<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
DiJo<NAME>,
<NAME>,
<NAME>,
<NAME>,
Diandra.mr,
<NAME>,
Die<NAME>,
<NAME>,
Diemorfeld<NAME>,
Dinahsaw,
DinoD123,
DinoGuy25,
Dinopenguin,
DiscoNixon,
Divas3458,
Dixybird,
Djhorsegirl,
DknightNZ,
Dlf143,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Dong<NAME>,
<NAME>,
<NAME>,
<NAME>,
Doschps,
DotMatrix,
Dotti,
Doubleknot888,
Dr <NAME>,
Dr <NAME>,
Dr <NAME>,
Dr. <NAME>,
Dr. <NAME>,
Dr. <NAME>,
Dr. <NAME>,
Dr.<NAME>,
DrD<NAME>J,
DrHox,
DrPongo,
Dr<NAME>,
Dr<NAME>,
<NAME>,
<NAME>,
Drastic<NAME>,
D<NAME>,
<NAME>,
D<NAME>0rZ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
E.N.G.,
ECPetra,
EDonahue,
EFlesch,
EValleyY6,
EagleLoft,
Eaglesoul,
<NAME>,
<NAME>,
East74,
Echo Brooks,
EclipseCeLL,
<NAME>,
Ed<NAME>,
<NAME>,
<NAME>,
Edisislost,
<NAME>,
Edsploration,
<NAME>,
<NAME>,
<NAME>13,
<NAME>,
Eileen,
Ejdarrow,
Ekiaer,
Ekima,
Elagorn,
Elaina_science,
<NAME>,
Elaine <NAME>,
<NAME>,
<NAME>.,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Elise2222,
<NAME>,
<NAME>,
<NAME> "<NAME>" <NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Elizaveta2,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Ellen904,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Ellis<NAME>ell,
<NAME>,
ElsieUnderscore,
<NAME>,
Elun,
<NAME>,
<NAME>,
<NAME>,
Emanuele676,
Emera9,
<NAME>,
<NAME> ,
<NAME> ,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
Emily.H.zooniverse,
<NAME>ily<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
EmmaJacobs,
EmmaVR,
Emmawhinnie2,
Emmelmann,
EmpireMousetrap,
Emptyrainbowbbls,
Emrosie,
Engler-Schneider,
EnjoySasa,
<NAME>,
<NAME>,
<NAME>ora<NAME>de<NAME>,
EnragedPlatypus,
<NAME>,
<NAME>,
EpicBlackDragon,
<NAME>y<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Er<NAME>_<NAME>,
<NAME>,
<NAME>,
Erufailon42,
<NAME>sai,
Esbn,
<NAME>,
<NAME>,
<NAME>y<NAME>,
Eswimmer0206,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
EthereaL,
<NAME>,
<NAME>,
EvEnuS17,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Eve00675,
<NAME>,
Even<NAME>us,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
FB-XL5,
FCardou,
FINDALLTEHALIENZ,
FaZe_Leder,
Fabi1923,
<NAME> ,
<NAME>,
Family St Quintin,
Farah_2005,
<NAME>,
FayeSomething,
Fearnaught,
FedTheMo,
Feliade,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>, <NAME>,
<NAME>,
FightingDodo,
Filibuster,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
Fizz<NAME>,
Flamingo,
Flexo221281,
FlightoftheValkerries,
Flint1545,
FlipperBozz,
FloMe,
<NAME>,
Flying_J,
<NAME>,
Foggyworld,
FourWinds,
Foxstar82,
Fr3d3r1k_M,
FranB25,
FranBow,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>kles,
<NAME>Van<NAME>,
<NAME>,
<NAME>,
<NAME> ,
FreyaJC,
Frozyth,
Fuchskind,
Funkyfalcon,
Funnyfavorer101,
Funnyguts,
Furiat,
Furniture307,
FuzzyJones,
<NAME>,
<NAME>,
<NAME>,
G.e<NAME>yn1214,
G.gorilla324,
G2EK,
GBauses,
GKaramanski,
GLambourne,
GMadeiros,
GSmith7018,
Gabeth123,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Gabriela<NAME>,
<NAME>,
<NAME>,
<NAME>,
Gabriellajm,
Gab<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
GalaxyTrain420,
GalenaBear,
Gallium,
GameBrown_Family,
GammaGreenthumb,
Gandalv,
GanneC,
Garlandiana,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Gavitron_2000,
Gbaez,
GdVI,
Ge0de,
GeWe,
Gedelgo,
Geeds,
Geek2,
Geeklette,
GeirM,
<NAME>,
Gekco,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Genosse Boss,
Genotype2,
Geode_,
Geometer,
<NAME>,
<NAME>,
GeorgiaGootee,
<NAME>,
<NAME>,
Gethere2,
Ggatton,
Ghazi,
<NAME>,
<NAME>,
Giganto<NAME>,
Gill_666,
<NAME>,
Gillian_Borland,
Gillis57,
<NAME>,
<NAME>,
<NAME>,
<NAME>ingertwin,
G<NAME>1,
<NAME>,
Giomv,
<NAME>,
<NAME>,
Giu_Gennaio,
Giuseppe,
GizmoMischief,
<NAME>,
GloriousMundane,
Glosoli,
GnatMan,
Godchild,
Gonodactylus,
Gordon_Forrest,
Gordonalistair,
Gorgar,
Gorgonos,
Gotenks,
Goupus,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Grace_Banks,
Grace_Little,
Graceling1,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Greenfields,
Greengirl65,
Greenlightrj,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
G<NAME>oireblond,
<NAME>,
<NAME>,
GrepPip,
<NAME>,
<NAME>,
Grethe,
GreyPhoenix,
Gribby,
Grimnir,
Grintalsm,
Gumby,
Gumok,
<NAME>,
<NAME>,
GuteMine,
Gwillewyn,
<NAME>,
H8stinks,
HKE,
HMPenguins,
Haedyn,
<NAME>,
Ha<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>aley,
<NAME>aleywigal,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Hanibal94,
<NAME>ann<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Hans<NAME>p<NAME>,
Hanschibal,
HaplessHero,
HappyEve,
Harambe <NAME> On ,
Hardrockhopper,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>avar<NAME>,
Haya,
Hayley,
HeathRussiaGc,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>hen<NAME>,
<NAME>,
Hippie1427,
Historysid,
Hitodama,
Hnigg,
Hoggo,
<NAME>,
<NAME>,
<NAME>J,
Hookage,
HoosierGen,
<NAME>,
Horia007,
Howard_Richards69FL,
Howardsn68,
<NAME>,
<NAME>,
<NAME>i,
Huddo,
Huffer907,
<NAME>,
<NAME>ugoBallee,
HumbertoRickenbacker,
<NAME>,
<NAME>,
Huntress,
HybridX,
HydroxyChloride,
<NAME>ye <NAME>,
Hypatia1,
I'm not telling you my real name. Give up.,
<NAME>,
IEM,
IL-K,
IMadeThisForYou,
<NAME>,
<NAME>,
<NAME>,
IanGalaxyZoo,
Iasmim,
Icecrasher,
Icephoenix96,
Iduun,
Idw,
<NAME>,
IkariaDaviau,
IlseB,
Iltis,
<NAME>,
Imes,
Inaari,
India Yip,
IngerInger,
Ingrid DOMKPO,
Ingridanita,
Ingridbio,
Inki81,
<NAME>,
I<NAME>en,
<NAME>,
<NAME>,
<NAME>,
<NAME>, <NAME>, <NAME>,
I<NAME>-oxf,
I<NAME>,
IrishAstro,
IrishRottie,
Iris<NAME>,
IronWhale,
Is0cre,
<NAME>,
<NAME>,
<NAME> ,
Is<NAME>a,
<NAME>,
<NAME>,
Isaman10,
Isannah,
Iseeit,
Isissea,
Isitme,
Issiah,
Itallcounts,
Itsasmallworld27,
Itsug,
I<NAME> ,
Ivanovna,
<NAME>,
IvdO,
Ivorostojic,
<NAME>,
<NAME>,
<NAME>,
Iyahalyssah2,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
J.A.J,
J3300918,
JDLA,
JDonnici,
JF392813E,
JG1996,
JGF,
JJ199,
JJ53,
<NAME>,
<NAME>,
JOB <NAME>,
JOs<NAME>,
JPJP_333,
JPenn2,
JPenn550,
JPlegge,
JPrice01,
JSN2001,
JVers<NAME>,
JWearth,
<NAME>,
<NAME>,
J<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Jaeti13,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Jamoni,
<NAME>,
<NAME>,
<NAME>,
Jan11965,
Jan<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
JanetCCS,
JanetCSB,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>ari <NAME>,
<NAME>-<NAME>,
<NAME>,
<NAME>,
<NAME>.<NAME>1<NAME>,
Jasminejo24,
<NAME>,
<NAME>,
Javiermv,
Jawofech,
<NAME>,
<NAME>,
<NAME>,
Jaynemet,
Jaynet88,
<NAME>,
<NAME>,
Jdj1,
Je11y,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
JeeaaR,
<NAME>,
<NAME>,
<NAME>,
Jeff0253,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>24,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
J<NAME>,
Jenx595,
Jenzoo44,
<NAME>dnotH<NAME>,
<NAME>,
<NAME>,
JeroenMJ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Jewelanne,
JeweloftheNile,
Jgronmark,
Jhf,
Jill<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Jimmy2010,
<NAME>,
<NAME>,
<NAME>in<NAME>,
Jjuju,
Jmallory1996,
Jmart3029,
Jmyers0517,
<NAME>,
<NAME>,
JoHB,
JoKD,
<NAME>anL,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
JoelDR,
<NAME>,
<NAME>,
JohannaThePenguinCounter,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
John170,
John93,
J<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Jordanboys4,
Jorge2898,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Josh.davies87,
<NAME>,
Josie,
<NAME>,
Jowuijts,
<NAME>,
JoyWorld,
Jr9355,
<NAME>,
<NAME>,
<NAME>,
Juanan_89,
<NAME>,
<NAME>,
J<NAME>,
<NAME>,
<NAME>,
Jujubee18,
Julebambus,
<NAME>,
Julesdomalain,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Jul<NAME>,
<NAME>,
JulietteHotel,
Juln,
Juls2783,
JungleHyena,
Junior_Guide,
Juraj,
Just<NAME>,
JusticeStone,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Juty,
Jyard2,
<NAME>,
<NAME>,
K <NAME>,
K <NAME>,
K P <NAME>,
K20Kat,
KGuk,
KHowlett,
KK_27,
KKramke1014,
KLUTSCH <NAME>,
KS_RM_CB,
KTMorton,
KTcakie,
KYL,
Kaate86,
Kaelee,
<NAME>ag<NAME>,
Kaida,
Kaitlinaw<NAME>,
<NAME>,
Kaiyol<NAME>,
K<NAME>,
Kakan,
<NAME>,
<NAME>,
Kangaroo36,
Kantos,
<NAME>aotix,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Karen_c.,
<NAME>,
<NAME>,
Karijn,
<NAME>,
<NAME>,
<NAME>,
Karina28,
<NAME>,
<NAME>,
<NAME>,
<NAME>z,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>atS82,
<NAME>,
<NAME>vitje,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>atelynh,
KateyND,
Katfish,
<NAME>,
<NAME> ,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
KathleenCO,
KathleenZ,
<NAME>,
<NAME>,
<NAME>,
K<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>atie<NAME>ley3,
<NAME>17,
<NAME>,
<NAME>,
Katt_25,
<NAME>,
<NAME> ,
Katybb,
<NAME>,
Kautton,
<NAME>,
<NAME>,
<NAME>.,
<NAME>,
<NAME>,
Kazesim,
<NAME>,
Kbzephyr,
Kcilf90,
Keats14,
<NAME>es,
Keerthana11,
<NAME>,
Keircurb,
<NAME>,
<NAME> La Rue,
KeithMason,
<NAME>,
KelliK2,
Kellie920,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
KenzieV.2005,
Keren,
<NAME>,
Kerima_Hill,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
KevinP2,
<NAME>,
Kgo1526,
Khalinka,
Khas,
Kiana,
<NAME>,
<NAME>-<NAME>,
<NAME>,
Kimberz86,
Kimbo_2112,
Kimvbelle,
Kirsi87,
<NAME>,
Kirsten<NAME>,
Kiru,
Kishachimp,
Kist,
<NAME>,
Kitsuneko,
<NAME>,
Kjo1626,
<NAME>,
<NAME>,
<NAME>,
K<NAME>,
Knowles1315,
<NAME>,
Kobold27,
Kold<NAME>,
<NAME>,
Koukouwaya,
K<NAME>,
<NAME>,
Krglass10,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Kristensop,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
KrkSmth,
Kruss375,
<NAME>,
<NAME>,
<NAME>,
K<NAME>,
Ku,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
KyoVang,
L-AForbes,
L-Glendinning,
<NAME>,
LEGO_NINJA,
LH_Kelley,
LINAC,
LJE,
LKavanagh,
LN8x,
LOLeannie,
LPO,
LRC Academy,
LSpeedie,
La<NAME>,
<NAME>.,
<NAME>,
Ladanmusic,
Lady12,
Lady_Godiva22,
Ladyann,
<NAME>,
<NAME>,
LailaA,
<NAME>,
<NAME>,
L<NAME>orna,
Lamutamu,
<NAME>ong,
<NAME>an Thai,
Landvermesser,
Laquaine2737,
LaraM,
<NAME>,
LarkinC,
Lar<NAME>el<NAME>igung,
<NAME>,
Latitude23,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>-<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
LauraCun,
LauraTheExplorer,
Lauralamble,
Laurapinguin,
LauravdMark,
<NAME>,
<NAME>,
LaurelG,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Laurin_003,
<NAME>,
<NAME>,
LazyL,
LeRyck,
<NAME>,
<NAME>her,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Lecter7,
<NAME>,
LeeBurn,
LeeKick,
Lee_Harris<NAME>n,
Lee_Re<NAME>,
Leibniz,
Leichman Career & Transition Center,
Leine.03,
<NAME>,
Lemoncupcakes37,
Lemonlorraine,
Lena<NAME>,
Lena_elh,
<NAME>,
Lennoxville,
Leo,
LeoMFR,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
LesleyLee,
<NAME>,
<NAME>,
Lesterj1972,
Les<NAME>on,
<NAME>,
<NAME>,
<NAME>,
Levicorpus,
<NAME>,
<NAME>,
LewisO<NAME>ary,
Lewyke,
<NAME>,
<NAME>,
Liambebb,
Lianus,
Librarian22,
Lichtecho,
LifeBounces,
LifeScienceMojo,
Lifeforce,
<NAME>,
Liie74,
Liisu.R.,
<NAME>,
Lillafjanten,
<NAME>,
<NAME>,
<NAME>,
LillieC,
Lilmissmechanic,
Lilunebrium,
Lily,
<NAME>,
LilySchultz2002,
Lilygwen,
<NAME>,
<NAME>,
<NAME>,
L<NAME>en,
LindaHagbergSweden,
Linguin,
LinkandNavi,
LinkyLongleaf,
Linnie14,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
LisainVA,
<NAME>,
Lissa623,
LiverLover,
<NAME> ,
<NAME>,
LizCoops,
LizDownes,
Liziuri,
Lizzietish81,
Lizzifer711,
Lizzy06,
Lizzyf23,
Lmcminn,
Lmcurls,
Lofty_h,
LoganSammy,
Lolipop666,
Lololiz,
Lomky,
<NAME>,
Lonewolf66,
Look4Star,
Loony_Lovegood,
Lordofthepies,
<NAME>,
LoriM,
Loschmidt,
<NAME>,
<NAME>,
<NAME>,
Louccby,
<NAME>,
<NAME>,
<NAME>,
Lovarq,
Lparkhurst,
Ltlsun,
Lu72,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
LucyitSwD,
Ludika,
Ludovic,
Luispeikou,
<NAME>,
Luke .S,
<NAME>,
<NAME>,
Luna54,
LunaMona,
Luqras,
Luthar22,
<NAME>,
Lyckerish,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Lyneth,
<NAME>,
<NAME>,
Lynnmrose,
Lynzw,
Lyrajane,
Lyss2303,
<NAME>,
<NAME>,
<NAME>,
M&P Noonan,
<NAME>,
M00N,
MALASSIGNE <NAME>,
MBear,
MBrett,
MDA,
MDrass,
MKDS,
MLSBetreuung,
MM-Noorsalehi,
MMT90,
MNe<NAME>son,
MOBI,
MOSI's Youth Advisory Board,
MPIchimp01,
MPIchimp02,
MSMurphy,
MaSeKind,
MabelH,
<NAME>abi,
<NAME>,
Machielovic,
<NAME>,
Macrell,
Mad<NAME>,
<NAME>,
<NAME> L,
<NAME>,
Maddy1221,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>afalda2015,
<NAME>,
<NAME>,
Magellan_Lin,
Magessa,
<NAME>,
<NAME>-<NAME>,
<NAME>,
<NAME> ,
Maggielikeszoology,
Maggiewill,
MagicMitch,
Magzie2000,
Mahmoth,
<NAME>,
Mai ,
<NAME>,
<NAME>,
Maike42,
Mainge,
<NAME>,
Mako001,
Mal<NAME>T,
<NAME>,
<NAME>ame<NAME>,
Mamphie7ty,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Mang19,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>uela11,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>-<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Mar<NAME>9,
<NAME>,
Marena84,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
MargaretW,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
Marianagazga,
<NAME>,
<NAME>,
<NAME>,
<NAME>ie-Fr<NAME>,
MariePG,
Marie_eve_78,
<NAME>,
MariluC,
Marinka1704,
<NAME>,
MarioMe,
<NAME>,
<NAME>,
<NAME>,
Mariposa123,
Marisachimpa,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Mart<NAME>x,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Mary196,
<NAME>,
<NAME>,
M<NAME>,
Mas2201,
<NAME>,
<NAME>science,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
MattB559,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
Maureen77,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
MaxKa,
Maxbax,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Mazzy,
<NAME>,
Mburke777,
<NAME>,
<NAME>,
Meanjean4321,
Mear48,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Me<NAME>,
Mel-<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>issa<NAME>ee,
<NAME>,
<NAME>,
Me<NAME>,
Melvis,
<NAME>,
<NAME>,
Mer<NAME>32,
Mercury82,
M<NAME>ie,
<NAME>,
MermaidBird,
Merowig,
Messi0088,
Metamikt,
Mht,
<NAME>ia ,
MiaTheFeline,
<NAME>,
<NAME>,
MicahB,
Mich.bell,
MichLM,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Michael_B,
Michaela810,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>, Esq.,
<NAME>.,
Michkov,
<NAME>,
Micmac1,
MidgeUniverse,
MidnightRook,
Miepie88,
M<NAME>,
<NAME>,
<NAME>,
<NAME>,
Mikaylab,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Mikerttt,
Mikeyser,
<NAME>,
<NAME>,
Mikrokli<NAME>,
Mikytron,
<NAME>,
<NAME>,
Milanp,
<NAME>.,
Miles_John,
<NAME>,
Mima2,
<NAME>,
Mimiko,
Mimster2,
<NAME>,
<NAME>.,
Mindslaver,
Minev,
MingMing,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Mirime,
Mirjam,
Mirkomaniac,
Mirrakor,
MirrorscapeDC,
Mirsandia,
MissS,
Mission73,
Missoedipa,
Missybee35,
MisterMor<NAME>,
Misterhamm,
Mistrinanka,
<NAME>,
Mitsiy14,
Mizzou,
Mjay,
Mkdtasha,
Mkele,
Mmjm,
Mna225284,
Mochyn,
Mohot,
Mokka,
<NAME>,
MolniyaD,
Momkey,
Mona09506,
<NAME>,
<NAME> ,
<NAME>,
MonicaMacD,
MonkeySis,
Monolakes,
Montexes,
Moonbeam,
Moone,
M<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Morganavila,
<NAME>,
Mortal_Speaker,
Motherjoanne,
MotionsensorElke,
Mowat,
Mr-Sl<NAME>,
Mr.To,
Mr<NAME>,
MrBr<NAME>,
MrCup,
MrES,
Mrs <NAME>,
Mrs. <NAME>,
Mrsl<NAME>,
Mrsthayer13,
MsTur<NAME>,
Ms<NAME>,
<NAME>,
Muggy7,
<NAME>,
<NAME>istrada,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Muskratte,
Mvpiggelen,
Mwal<NAME>,
Mydrynn,
MyersScienceLab,
Myoste,
MysteriousAlina,
N,
<NAME>. <NAME>.,
N165617,
N419045,
NA_18,
NCT,
NFust,
NUKE1989,
Nachtvogel,
Nada Abdel-Salam,
Nada <NAME>,
Nada <NAME>,
<NAME>,
Nadia Corp,
<NAME>,
<NAME>ad<NAME> B.,
<NAME>,
<NAME>,
Nakaleen,
Nakiki,
Nalyla,
Namibier,
Nan,
NanaHub,
<NAME>,
<NAME> ,
<NAME>, Ph.D.,
<NAME>,
<NAME>,
<NAME>,
Naomi2809,
Nargess.g,
Narnian,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
N<NAME>1,
N<NAME>F,
<NAME>,
<NAME>,
<NAME>,
Natsang,
Nauka,
<NAME>,
<NAME>,
Nbhedgehog,
Necker15,
<NAME>,
<NAME>,
Nepenthes1991,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Netballstar333,
Neurophile,
<NAME>,
<NAME>user,
<NAME>,
NextRoman,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> et <NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
Nik123Joh,
Nik<NAME>Raw<NAME>,
<NAME>,
<NAME>,
<NAME>,
Nikki76,
<NAME>,
Niknak2707,
<NAME>,
<NAME>sh<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>-<NAME>,
<NAME>Yu,
Nina_<NAME>,
Ninady,
Ninjoid,
Ninoue,
NoTigersInAfrica,
Noctiferix,
Node801,
<NAME>,
Noemiimeon,
NoisyMicrobe,
<NAME>,
<NAME>,
NoraAlJarbou,
<NAME>,
North Central High School in Kershaw, SC,
NotoriousVHD,
Nova329,
Novawatch,
Nrodrigo777,
NukeArts,
NunesJF,
<NAME>,
<NAME>,
Nussw<NAME>sel,
Nyamba,
Nycticorax,
Nye,
NynkS2,
OCaravella0829,
OGNaes,
OKutoNoSushi,
ORIELTON,
ObservatoryPark,
Oevans82,
Ofelia,
Ohayfield,
Ojetuno,
OkapiLove,
Okbrice14,
Okieh,
OldBluntman,
OldSwampy,
OleksiiD,
Olena-ua,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>.,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Omi,
OnePunchMan,
Oneloveson,
OnlyIsland,
Onthorfast,
OoooMatron,
OppaiDesu23,
Oranje99,
OriXan,
Ornit,
Orohena,
Oscar_Chavez,
Osnat At<NAME>,
Osprey49,
Osquid,
<NAME>wen ,
<NAME>,
<NAME>,
Owl12345,
OwlAli,
Ozro,
<NAME>.,
<NAME>,
PALewis,
<NAME>,
P<NAME>,
PF<NAME>,
PHald,
PMSPR,
<NAME>,
PWD,
PWDDCh,
PWDdr,
PWDkm,
PWDmb,
PWDsz,
<NAME>,
Packo,
Paharov,
Paige29,
PaigeGabriellex3,
Pais-ily,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Pamelahs,
PandaLion98,
PandaNation,
PandaOne,
Pandasoccer,
Pandora,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>ayette,
Paracebo,
<NAME>a,
<NAME>,
<NAME>u,
Parsa,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Patsy49,
PauDG,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Paula26,
Pa<NAME>,
Paulietta,
<NAME>,
<NAME>,
<NAME>,
<NAME>ino,
P<NAME>,
P<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Perada90,
<NAME>,
<NAME>,
<NAME>,
Perlo121,
Persephonerose,
Petanimal22,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Phasewave,
PherosNike,
<NAME>,
Phil405,
<NAME>,
<NAME> DDS,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Phi<NAME>sop<NAME>,
<NAME>,
Phl<NAME>,
<NAME>,
Ph<NAME>,
Ph<NAME>,
Phryxe,
Pi_14159,
<NAME>,
<NAME>ici,
<NAME>ier,
<NAME>,
<NAME>,
<NAME>ja,
PineapplesRCool,
Pin<NAME>,
Pinguana,
Pinguin411,
PiperRocks,
PiperS,
Pirandella,
Pito13,
Pitsi,
Pixie777,
Plinko,
Pocket_watch,
PollywithPockets,
Ponderwomum,
Pontus <NAME>,
PonyFriend23,
Popcycler,
PopiTomi,
Popmusicluv123,
Poppet13th,
Portalic,
Porter Science,
Possbert,
Poupinette,
PrairieGirl,
<NAME>,
<NAME>ek <NAME>,
<NAME>ious Smith,
<NAME>,
PrimericaRep,
ProfessorKid,
Pru_of,
Pr<NAME>,
<NAME>,
Psaltriparus_minimus,
PublicY,
Pudding2,
Pueppie68,
Purplecavingcat,
Pywacket1952,
<NAME>,
Quatsino,
Queen<NAME>,
QueenOfGasoline,
<NAME>,
QuirkyPlatypus,
R.H.,
R1chard,
RENERIBI,
RRbend,
RS,
RSchobermayr,
RSing,
RUS0032,
Rach1787,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
RachelLevine,
Rachel_R.,
Rachel_Rutland9,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>ni,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>as<NAME>13,
<NAME>,
Raul019,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
Rebeki,
RedTrev,
Redd2205,
<NAME>,
<NAME>,
Regina342,
<NAME>ichukey,
<NAME>,
Relentless363332,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
R<NAME>annon47,
<NAME>,
Rhonan,
<NAME>,
Ricardo130cc,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
RickM1989,
<NAME>,
Ri<NAME>,
<NAME> :),
Riley20,
Rille54,
Ringwood School Eco Committee,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Riverdog,
R<NAME>fusch,
Rkcomyn,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>, MSc,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>injoy73,
<NAME>inkay2,
<NAME> ,
<NAME>,
Rockette62,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>ondine,
Roneila,
Roobee,
Roown,
R<NAME>,
Rosa,
Ros<NAME>ieS,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
Rotoiti,
RoverD,
<NAME>,
Rowanstar77,
RowantheRed,
RowdyWeeps,
Rsh3222,
<NAME>,
<NAME>,
<NAME>,
<NAME>E,
Ruubjaah,
<NAME>,
Ry5Ghost,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
R<NAME>,
<NAME>,
<NAME>,
<NAME>,
S-a-be,
<NAME>,
<NAME>,
S00Z,
S1404377,
S1421881,
S745245,
SAGE-1,
SANEAlex,
SBolt89,
SCOUSER1964,
SCWilliams42,
SH<NAME>son,
SMWheeler,
SMike11,
SORINORAH,
SR9,
SRH23,
STARIDER,
SWTN,
SWude,
SZubic,
<NAME>,
<NAME>,
<NAME>,
Safari<NAME>iz,
Saibot82,
Saichira,
S<NAME>,
Sal<NAME>,
Sal<NAME>,
SallyL,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Samalexmummy,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Samoht,
Samppants,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Sand<NAME>,
<NAME>,
S<NAME>us,
<NAME>,
<NAME>,
<NAME>.,
<NAME>,
<NAME>,
<NAME>.,
Sandra987,
<NAME>,
SannaBergstr0m,
Santana <NAME>,
S<NAME>ana<NAME>,
Sapphirepegasus,
SappireTitan,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Sara_Houseman,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>.,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Sariemarais,
<NAME>,
Sarkis,
<NAME>,
Sascha1989,
Sathish.pr,
Satincat,
SatsumaConsumer,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
Scaramorey8,
Scarcat,
ScarlettBlacks,
Scarymum,
<NAME>,
<NAME>,
Schoolio,
Schorsch87,
Schule,
Schultzi,
Schwarzenbach <NAME>,
Scissorbird,
Scotssunflower,
Scott2742,
Scrappe3,
Seabury4,
Seafish,
SeaforCinnamon,
Seamsmistress,
Sean,
<NAME>,
Se<NAME>-B<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
SecondgradeNR,
Seegarneluru,
Seekind,
Sekl,
SelenaW,
Sengisisu,
<NAME>,
<NAME>,
SerengetiBob,
Ser<NAME>y,
<NAME>,
Setthecontrols,
<NAME>,
<NAME> ,
Shacharkara,
ShadowTigrex,
Shadowspinner,
<NAME>,
Sh<NAME>lyn,
Shakes113,
Shalista,
<NAME>,
Sh<NAME>ij,
<NAME>,
<NAME>,
<NAME> ,
<NAME>23,
<NAME>,
<NAME>,
Sharan.b84,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Sheepcake,
<NAME>,
<NAME>,
Shellypm,
Sheng888,
She<NAME> She<NAME>,
Sher1ff,
<NAME>,
Sherib2,
<NAME>,
<NAME>,
<NAME>,
Shill2008,
Shiphrah,
Shir-El,
ShmuelJ,
ShootingStars,
Shoreditch,
Shosha,
<NAME>,
<NAME>,
<NAME>. ,
<NAME>,
<NAME>,
SiDEBURNAZ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Sightling,
<NAME>,
<NAME>lydad,
<NAME>thrina,
<NAME>ta,
Silvergirl,
<NAME> ,
<NAME>,
SimOne_,
<NAME>,
<NAME>,
<NAME>,
SimonV2,
Simondbellamy,
SimpleCarrots,
Singerhoff,
Singing_Ginger,
SirBonobo,
SirHanni,
SirPrisinglyStupid,
SireGrumpy,
Sjaak-Jan,
Skavvie,
Skelm,
Ski83,
Skinnied,
SkinnypigXD,
SkyFall56,
Skydiver33,
Skye,
Skye Nagtegaal ,
Skye Shrader,
Skye-lyn,
Skylines01,
Skynet,
Slade Flood,
Slade_J_Sci08,
Slart,
Sleepinbeauty,
SlickWilly787,
Slugger,
Smeeta,
Smile7,
Smittybird,
Snacko,
Snafu89,
Snapshots123,
Sneeuwkoningin,
Snoopy72,
Snowdrop27,
Snowlarbaby,
Snugli,
SocratesFolly,
SofieMoyson,
SolarEclipse,
SoloBro,
Solomon Carr ,
Sombito,
<NAME>,
<NAME>,
<NAME>,
Sonja ven den Ende,
Sonjagumm,
Soosi,
<NAME>,
<NAME>,
S<NAME>,
Sophia Sie<NAME>,
Sophia lalalalalala ,
<NAME>,
S<NAME>,
Sophieeeee,
SorAzolam,
Sora_Shiro,
<NAME>,
Sothoth,
SouthernBean,
SoylentGreenIsMadeOfPeople,
SpacePanda86,
SpamRichter,
Sparassidae,
Sparkielspa,
Sparks11212,
<NAME>,
Spikenstein,
SquiddyStarr,
Squishmoose,
<NAME>,
Stacy67,
Starawareness,
Stargazer79,
Starman120457,
StarwatcherHB,
StealthAmoeba,
Stefan,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Steffan,
SteffiN,
Stegeman76,
Stella,
Stella07,
StellarBug,
StephBlack7,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Stephyoung35,
Sternschnupper,
Sterrennevel,
<NAME>,
<NAME>,
Steven_D_Mullis,
Stevieb62,
<NAME>,
<NAME>,
<NAME>,
Stinu,
Storeman20,
Stormyy,
Strubear,
Sue <NAME>,
<NAME>,
Sue <NAME>,
Sue P<NAME>ito,
S<NAME>,
<NAME>,
Sue-2,
Sue0022,
Suedetess,
Sugarbuzz,
S<NAME>,
SundogWE,
SunlessDawn,
SunnaS,
Sunny35,
SunnyD,
SuperManu74,
Super_Shay_Shay,
SuperbiaIra,
Supersciencegirl100,
Superzilla1,
Surpluscat,
Surya George,
Susa,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
S<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
SusanAtHome,
SusanLelie88,
<NAME>,
<NAME>,
<NAME>,
Suyeon,
<NAME>,
Suzanne<NAME>avelle,
<NAME>,
Suzysharpe,
<NAME>,
Svenmeyvis,
Svennie83,
SweetBee,
<NAME>,
Sylverone,
SylvieT,
Sy<NAME>,
<NAME>,
S<NAME>aka,
<NAME>,
T-M,
TAElder,
TEMPLECC,
THE_JJB,
THGU,
TL,
TLW,
TMac150,
TMeni,
TRothrock125,
TUNG84,
TWebb,
TYGR,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>amar<NAME>,
Tamaska,
<NAME>,
TangySpiderLegs,
Taniesha_Sturdavent_PS122,
<NAME>,
TanpopoKun,
TantasticOne,
<NAME>,
<NAME>,
Taproot,
<NAME>,
<NAME>,
<NAME>,
Taubenus,
Taurelaiqua,
<NAME>,
<NAME>,
Tawnytunes,
TayaRenee,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Taz620,
TazeTee,
Tazumaki,
T<NAME>,
TeachEarthSpace,
Teashan,
TechnoBeeKeeper,
<NAME>,
<NAME>,
TeeNoodles,
TeejZA,
TehShush,
Tehi,
Ten_cats,
<NAME>,
<NAME>,
<NAME>,
Terhi,
TerraSpatiale,
<NAME> Blackstone-West,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
TessaM,
Tg18,
Thajazzlady,
That1WeirdKid,
The Brennan,
The Chimp & See moderators,
TheBolter,
TheCrowe,
TheDarkVoid,
TheDemonButler,
TheEnds,
TheEpicPrimius123,
TheJewelOfJool,
TheMeeganFace,
TheRealG,
TheSciBoy,
TheSmurf,
TheTurtleKing,
TheWanderer8,
TheWishingStar,
Thebeegirl,
Thel,
Theo2016,
<NAME>od<NAME>,
Theodore Pritchard,
Thespian,
Thimblewit,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
Thornton.sarah,
<NAME>so,
<NAME>,
TiagoC,
<NAME>,
<NAME> ,
TiborAcs,
Tiffany ,
TigerDrummer,
TiggyTiger,
TildalWave,
<NAME>,
<NAME>,
<NAME>,
<NAME> and <NAME>,
<NAME> ,
<NAME>13,
<NAME>,
Timothy66360,
<NAME>,
<NAME>,
<NAME>,
Tingleton,
Tinneke,
Tipo,
<NAME>,
TisDone,
Tlittle358,
<NAME>,
Toblerone659,
<NAME> ,
Tockolock,
Toffifee,
Toki<NAME>,
Tokki,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
TomMunnery,
Tom<NAME>,
TomSun,
Tomahawk145,
<NAME>,
Tommyq,
<NAME>,
<NAME>,
TonyIP,
Toon-Keesee,
TopTop23,
Tophica,
Totoro_sootball,
<NAME>,
Towhee12,
Toxic<NAME>,
Toyosi,
Tp,
Trace Bybee,
Traceydix,
<NAME>,
Tracymyles,
Tracyv,
Traker,
Transition Year,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Trier_,
Triniturtle,
Tr<NAME>,
Trippthecat,
Triticites,
Trotto,
<NAME>,
TrudiC,
Truffle26,
Tschango,
Tslish,
TsukinoYami,
TubbyPanda,
TuesTao,
Tuketi D<NAME>,
Tummy,
TusenApor,
Tychwarel,
Tykelass,
Tyler,
<NAME> ,
TypicalTacos,
Tyra,
Tzatziki,
USAequine001,
UandA,
Ubizi,
Ulakj,
Uli S<NAME>,
Umpire,
Umut Kaan Karakaya,
UnionJCatForever,
UnscrewedPear9,
Upsilone,
Ursulala,
UsernameJ,
Us<NAME>,
Ute,
Ute <NAME>,
Ute <NAME>ider,
Uttancs,
Uvibee,
UweRumberg,
V Beutler,
VD,
VJOM,
Vague_Nomenclature,
Vajrapani,
<NAME>,
Valea,
<NAME>,
<NAME>,
Valina7me<NAME>,
VanderStel,
Vandon,
Vandusensummercamps,
<NAME>,
<NAME>,
<NAME>,
VanillaChief,
Varun ,
<NAME>,
Vault_0_boy,
Vedad,
Veegee,
Veevo,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
ViDrosera,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
VinBla,
<NAME>,
<NAME>,
VioAfrica,
Violaceae,
<NAME>,
<NAME>,
Viro,
VirtualDiana,
Visjevisje,
Vital_Signs_,
<NAME>,
<NAME>,
<NAME>,
Vix_jane,
Vlada927,
Vmadmax,
Volitioner,
VonB<NAME>,
<NAME>,
<NAME>,
<NAME>,
WWabbit,
WalgettCz,
<NAME>,
<NAME>an<NAME>,
Wandgryffin,
Weaglebec,
Weisshandgibbon,
Weltraumkoyote,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
W<NAME>_L,
<NAME>,
<NAME>,
<NAME>,
<NAME> ,
Whyevernot55,
WiggyPiggy,
Wildbarley67,
WildlifeKatie,
Wildliferspotter,
<NAME> ,
<NAME>,
<NAME>,
<NAME>,
<NAME>ye,
<NAME>,
Willyr17,
Wimoweh,
<NAME>,
Withaar,
WitsEnd,
<NAME>,
Wolfen17,
WoodenHedgehog,
WordSpiller,
WorldofZoology,
Worminator90,
Woutso,
W<NAME>itt,
X3Lord,
XOFrank,
<NAME>,
X<NAME>,
Xeka,
XexyzChronos,
XijhingsBrother,
Xoannon,
Xombie337,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
Yefah,
Y<NAME>,
YevgeniaC,
YiddleSeti,
Y<NAME> ,
Yoda555,
<NAME>,
YouTa,
<NAME>,
Ysabell,
<NAME>,
Z0e,
ZAC8121999,
Zaboomafoo,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>amb<NAME>,
<NAME>,
<NAME>,
Zanna640,
ZannaDragon,
Zarsus,
Zathrus,
ZebraQ,
Zeck,
ZedCapricorn,
Zen2go,
Zenorian,
ZenzicBit,
Zephammo,
Zerraina,
Zhongar,
Zilli,
Zinhle <NAME>,
Zinic,
Zoe I<NAME>ac,
Zoe <NAME>,
ZoesAnimals,
Zoey,
ZooBot,
ZooDad1,
Zoobot_TKD_4life,
Zoochall,
Zoomngirl,
Zooniversalcitizen,
Zooniverse99,
Zperry,
Zuppy,
ZuvaSun,
<NAME>,
Zygf<NAME>da,
<NAME>,
Zynobia,
_<NAME>_,
_<NAME>i,
_riddhii_,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
a351,
<NAME>,
<NAME>,
ab3,
<NAME>,
abbeylin1017,
abby,
abdul<NAME>i,
<NAME>a,
acheng96,
achevalier,
acmesrv,
acmrshll,
acs15e,
actionhippy,
ad<NAME>.<NAME>,
adam596,
adam_163,
<NAME>,
adamas84,
adamow78,
<NAME>,
<NAME>_<NAME>,
<NAME>son,
<NAME>deletaylor,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
aeliane,
<NAME>,
<NAME>,
<NAME>,
<NAME>ra123,
<NAME>zz<NAME>,
<NAME>asta,
ageipel,
<EMAIL>,
<NAME>,
agoogan15,
<NAME>grint,
ahein<NAME>,
<NAME>,
<NAME>jorth,
ahnlak,
ahsante7,
ahutto,
aid<NAME>os,
aideen.byrne,
aidualc,
aileesh,
aimfor,
aimsHS,
airacarvalho,
airplanesrme,
airvin6,
aitkene,
aj<NAME>,
aj<NAME>,
ajh<NAME>,
ajissocool,
akalan,
akcustomlova,
akeomuon,
akintner,
akkarma,
akkobelja,
akku99,
akl680,
aknat,
akshaymahawar,
akt2,
akuzniacka,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
albel1977,
<NAME>,
<NAME>,
<NAME>,
<NAME>one84,
<NAME>ell<NAME>,
alex12345,
<NAME>47,
<NAME>,
<NAME>,
<NAME>,
<NAME>41,
<NAME>2,
<NAME>18,
<NAME>,
<NAME>,
<NAME>z<NAME>1,
<NAME>fonsojdl,
alhen123,
<NAME>,
<NAME>,
aliceforest,
aliceruth,
<NAME>,
alihaggerty,
alina01px2020,
alinorth_893,
alisacrisp,
<NAME>.<NAME>,
<NAME>anst<NAME>,
allcompute,
<NAME>,
alleyk813,
alli3,
<NAME>_heather,
<NAME>sears,
ally83m,
allyd<NAME>in,
allyktu01,
allyphant,
alma lessing,
almpvnj,
alohabreeze,
alrp319,
alsipsclar,
altalt,
aly<NAME>,
alymohab,
<NAME>,
<NAME>dal,
alynstill,
alyshaSP88,
alysonslade,
alyssa.vazquez,
alzabel,
<NAME>.<NAME>,
<NAME>,
a<NAME>amo,
amandamontemayor88,
amazon.parallax,
amberluda,
<NAME>,
amedina2,
amellers7,
amf8384,
amgaynor,
<NAME>,
amlsilverstein,
amneris3,
amosser,
amp2003,
<NAME>ull<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
anamorisab,
anat1969,
anathemaybe,
anbalo1963,
<NAME>,
<NAME>ersenfamily,
<NAME>,
andre4321,
<NAME>_1975,
<NAME>reap21,
<NAME>,
<NAME>,
<NAME>,
androidlittle,
anergy,
anevab,
ang96,
angel<NAME>,
angel<NAME>,
angelleb,
<NAME>,
animalfriend1999,
animatus,
anita_leung329,
anjalimanohar,
anjamunder,
<NAME>.<NAME>,
annalisah92,
annalisehobson04,
annavand<NAME>,
anneborrego,
annebyford,
anneke64,
annekefreaky,
anniepg,
anniev123,
annimaxine,
annoymenow,
anocan,
antbetty,
anteater21,
antiHUMANDesigns,
antirrhinum,
antmurphy,
anzacspirit,
apatura,
apavlecic,
april12712,
aprilh,
aquitanian,
arafan,
arbayer02,
arbitercay,
arczarkowski,
areBerry,
areichle,
areinders,
areposator,
arfblacker,
argasm,
arh3399,
ariana23,
arielvera,
arlo head,
armandea,
armando752,
armonihance123,
aronnthe1,
arquita,
artbymarion,
artistjillian,
arwen252,
asafum,
asalomark,
aschlemme,
ashura,
asmibert,
aspenolivia,
asplamagnifique,
astaasta,
astabile,
astadtler,
asteer,
astrocanin,
astronomas,
atacama77,
ataly,
atambros,
atatjes,
<NAME>.<NAME>,
aubreythez,
aurelie1991,
avanderbrug,
avc13,
ave_eva,
aviator246,
awallen,
aweenink,
aweiksnar,
awething,
awhitwell,
awilson,
awsomeguy1001,
axolotl42,
aydnleeds,
ayefany,
azagh,
azjarose,
azuravel,
baa31892,
baabz,
babsie,
babucurt,
baccorsi,
badpossum,
badskittler,
baffy,
balpsa,
ban4314,
bananie,
banfathi99,
banfield.makayla,
banihal,
barbarann,
barbarapreyer,
barbaravarek,
barbiegoth,
barbora.<NAME>krylo<NAME>,
<NAME>.<NAME>,
baronv<NAME>,
barraoconnell,
barucho<NAME>,
bastiaan1984,
bastide,
batdog,
baterra,
batmue,
batuinal,
bbushey,
bcat785,
bclvines,
bcurtin,
bd2015,
bean2bone,
beastofwar,
beaumod,
becks688,
beer.frankie,
beeschnell,
beesnees4,
beingbob,
bekahmyers,
belago,
belan,
bell5a_helder,
bella118,
bellaf,
bellagisme,
belucho,
bencgauld,
bendrz,
benjamintx,
ben<NAME>,
<NAME>,
<NAME>,
bergfee1209,
<NAME>ling,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
besueandamy,
<NAME>,
<NAME>th<NAME>.<NAME>,
be<NAME>sen,
<NAME>,
<NAME>,
<NAME>1,
bgoliber,
bhagyakw,
bhau,
bigcatlover,
biggiesmith,
bigmanbt,
bigworld,
bilalsaeedkhan,
binaz,
biryani,
bisector,
bjmendius,
bjowi,
blackbird1312,
blackninja556,
blahcoon,
blair,
blair438,
blairprescott,
blake.white098,
blanco.rioja,
bliedtke,
blmonroe,
bloop12,
bluebl249,
bluebudedog,
bluefloozy,
bluefootedb,
bluestar82,
blumer17,
bmboswell,
bmontie,
bmorson,
bmtate,
bnisonger,
bob ,
bobinky,
bobthemountaingorilla,
bodoostermann,
bogabasa,
bondailady,
bonobo69,
booja,
boomerb,
boppo,
borad,
bordavis,
borodinpm,
botting,
bowened083,
bowerpizer,
boxbot,
boxingtwig,
boygordon,
boykolos,
bpempire,
bracerup,
bradley87,
brainimpact,
brainod,
brandnewkirk,
brandon lowe,
brandon preciado,
bratverst,
brenda2796,
brendaleejurewicz,
bret707,
bribrantley,
bricheese,
brincess,
britearthangel,
britico,
brittaniefay920,
brittanybeaudoin,
brivogel,
brokaf,
brokenbox,
brooketheschnook,
brown80204,
brownfox,
brunobanani100,
bryan2013,
bsweigart,
bubbakaz,
buchinli,
buchwa,
buckaroo1,
buckh34,
bucksharbor,
buehli,
buerkir,
bulgaria_mitko,
bullet57,
bumblebee2,
bumishness,
bunnigirl,
bunnypenguin,
buntubi,
burgundergerd,
burningquest,
burnspatrick.206,
butterpro,
butterscotch,
bwentz,
bwitti,
bws2002,
bzaleski,
bzpam,
bztaconic,
c.<NAME>,
c_warrell,
cailina95,
calledthemoon,
callisto73,
calyma,
camada,
cambridge.christian,
camdent,
camera_bore,
camonaco,
camrey,
canadianbacon791,
canekoch,
captainazul,
carabearcanada,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
car<NAME>_st<NAME>,
caroljh,
carostani,
carylkpr,
carylsue,
caryngreen,
carzet,
<NAME>.<NAME>,
caseykelly4,
cash717,
casmi,
casmith096,
cass3,
castorandpollux1978,
cat-shepherd,
cat1951,
catalana,
cathcollins,
<NAME>,
cathg123,
cathyschlegel,
catlovers,
catmiracle16,
<NAME>,
<NAME>,
cblair900,
cboxtoby,
cbrown004,
cbr<NAME>quist,
ccain003,
cch001,
<NAME>,
ccoc,
c<NAME>,
cdafopo,
cdavies311,
cde<NAME>,
<NAME>,
cdod<NAME>,
cecilieslc,
cedarsstudent2,
cedarsstudent3,
cedmom,
celticaire,
center100,
certifiedNinja,
ces2691,
ceversul,
cezy,
cfolk77,
cgerique,
cghudson,
cgremmich,
chalyse,
chanels,
channier,
chaosbastler,
charisrooda,
charizardace,
charles p ,
charlotte,
charly03,
chasasum,
chaselynnwarr77,
chaseo0626,
chateaumojo,
chaywood2,
cheetah90,
chekeichan,
chekhov,
chelsea.alex,
cherrycoke,
chevyman142000,
chewitt93,
cheyenne,
<NAME>,
chh2035,
chia89,
chiarapuspa,
ch<NAME>arudel,
<NAME>,
chidingbark911,
chiheb,
chilieh,
chimacummedia,
chimpandseeeee,
chimpler,
chimpsoccer,
chipd<NAME>,
chloe <NAME>,
chloejreid,
chocloteer,
choije,
chollow,
chrbod,
<NAME>,
chris2108126,
<NAME>risandann,
<NAME>risas7,
<NAME>,
<NAME>3,
<NAME>,
<NAME>urne<NAME>,
<NAME>,
<NAME>016,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>8,
<NAME>,
<NAME>ster20,
<NAME>,
<NAME>,
<NAME>43,
<NAME>,
<NAME>,
c<NAME>1954,
c<NAME>son.2019,
<NAME>,
<NAME>,
<NAME>,
<NAME>75,
<NAME>ei,
claireofthecosmos,
clairet_84,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
clee67,
cleg<NAME>,
<NAME>,
cliver<NAME>,
clllem,
cllllllloydh,
clochette,
<NAME>,
<NAME>ker,
clt21duke,
cmadd009,
cmadeira95,
cmarshall,
cmckenney,
cmel40,
cmortara,
cmsquared,
cmurdoch,
cmwgeneva,
cnafrada,
cnorvalk,
cnramey,
co1010,
coachgregnola,
cobrasplinter,
cocolocoblondie,
coconino,
cocorug,
codyduzan,
cogs,
coldcounter,
cole.erin.eedumail.vic.gov.au,
collettesea,
colon,
colowick,
colt,
comelia,
comiqueso,
connnollly,
control,
cookeva,
coolartcaleb,
cooney6,
cooperjohn,
coppard,
corbettjn,
corduroyfutures,
corgi-mom,
corto,
corvi42,
coryphella,
coslisa,
cosmic.chameleon,
cosmicos,
cosmos77,
courseiam,
covervig,
cpilkentontaylor,
cplking,
cplummerabb,
cps1500,
cr0m,
craftyscientist,
crambacher,
crawlfast,
crawlingChaos,
crd3000,
creationists,
criscat,
croenan,
crottyfamily,
crowlord,
crystalhutchins,
cryvtvl,
cschraft,
cschueppert,
csmithgeddes,
csutter,
cubear,
cushman.kc,
cuynchips,
cworm,
cyanocitta,
cybersue,
cynlynten,
cynt80,
cynthgray,
cynthiag,
cyzaki,
czapien,
czechkate1,
d.eileen.d,
d1e1b1,
d8sconz,
d_idaho,
dab vine,
daemonbarber,
daestwen,
daffodil3,
daguta,
dahmin,
daiant,
dakeeps,
dalai,
daleet,
daleh,
daliahalfon,
dallya809,
dandan595,
danielpepkezoo,
danielsd,
daniiielaaa_hurtadooo,
danisha,
danjurious,
dankpoet,
dannybussy,
danrol,
dansmith87,
daphne_7,
darcybennett,
darcygrose,
darici,
darkquetzalcoatl,
darwin829co,
darya_lagrange,
darys21,
dav2000,
dave1056,
daveb9000,
davemaze,
daveross1971,
davet679,
daveytay,
<NAME>,
<NAME>,
david_gb,
<NAME>ch,
<NAME>troy418,
<NAME>agle,
d<NAME>,
<NAME>,
day<NAME>,
<NAME>,
d<NAME>,
dbot,
<NAME>,
dbuske,
dca1,
dcampbell21,
dder<NAME>,
ddiak,
deafscribe,
deandsusan,
dearl1103,
deathscowboy07,
debJ<NAME>ski,
debbditt,
debmwill,
debnad,
debora713,
deborah16,
deborahsigler,
debperry,
debwil70,
deepwatch-1,
deetaurus,
dejaboo,
delbson,
delenaj,
delpiano,
delta_lady,
deltagamma,
delve202,
delxdune,
dembiecc,
denizenn,
derangedberger,
derschmiddi,
desh57,
desjar<NAME>.<NAME>,
devn clark,
devon.vt,
devonette,
devot,
dgmarc,
dgranrath,
dholoviak,
diamondback,
diamondone1999,
diana_monkey,
dianag825,
dianaserengeti,
dianebradley,
dieSeife,
diemade,
dileka,
dinjones,
dios,
discoverer99,
divakiana24,
djg6868a,
djlillil,
djmccon,
djsato,
dkb16d,
dksteele75,
dlr,
dlruthven,
dmaboyer,
dmagoo,
dmrtennis,
dmundil,
do_eme,
doctormoss,
dodgsonfamily,
doge panda,
doggers,
dohr,
dolphinate,
dolphincrazy,
dolphiny21,
domibowd,
<NAME>ik <NAME>,
donalthefirst,
donsa,
dont worry,
dor7539,
d<NAME>,
d<NAME>,
doroth<NAME>,
dou<NAME>.<NAME>,
down1nit,
dpcoop2,
dpellerin,
dpopovitch,
dragabalaur,
drag<NAME>ly,
drag<NAME>,
drbrezn<NAME>,
dreh<NAME>in,
drek,
d<NAME>,
d<NAME>,
drewthemunky,
drhib<NAME>,
drizzle.virus,
drizzly,
drjenncash,
drmmha,
dro<NAME>so,
drtryan,
drummerboy5031,
dryden Shillingburg,
dsekkes,
dshowell,
dsmyth6,
dstarzfn72,
dtimmermans,
duban,
duckysempai,
dulsky,
dumdiddlysquat,
dunealex,
dupagirl,
durifon,
dvbrandon,
dvc214,
dwaynemedic,
dwhite1,
dwlameris,
dwmjmm,
dww257,
dydel420,
dysonkl,
dzezi,
e-motiv,
e2d2,
e8cm5n,
eCid,
eagleclaw,
ebaldwin,
ecodiva88,
econaut,
ecorrigan,
ecotrace,
ecsjcwru,
ecvegghead,
edaro,
eddaw,
eddiecharles,
edemars,
edmo2013,
ednapissenlit,
edoxseywhitfield,
edwardlau,
eecamp22,
eellwood,
efb,
egeland,
egor_12,
eileen96,
eilemach,
eisenhuth101,
ej77,
ejc123,
ekaterin,
ekt1228,
el chapo,
elaineoutdoors,
elcat24,
elcharlot,
elcwt,
elegantemu,
elenalin,
elephant20,
eleys,
eliedeker,
elisame,
elisugar,
elizabeth,
elizabethyeomans,
elizzak,
ellaelizabeth,
ellenkronberg,
ellenmiller1,
ellie_28,
ellipsis6,
ellis<NAME>,
elloriac,
ellwoodmm,
elmod,
elmuchacho,
elsareed,
elseag,
elshrky,
elskabette,
eltreno,
elvraie,
elza,
ema98,
emalberstadt,
emalie22,
emberke,
embowen,
emerald_ware,
emily,
emily.juckes,
emilymynett,
emma,
emma2015,
emmabrooks,
emmacashmore,
emmacnapper,
emmalaura727,
emmar.luvs.cats,
emmatigerlily,
emmeline525,
emmmmmfox,
encephsagan,
encherend,
enderb,
endreh,
enr987,
enthusiastic_cell,
eoinrouine,
eos<NAME>,
eotunun,
epaltzat,
eparroway,
episk22,
epple.grant,
eptitsyn,
epurvis,
eq15,
ergalty,
ergo1,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
er<NAME>_<NAME>_<NAME>,
er<NAME>.<NAME>,
erikahowell,
erikamedwards,
eriki419,
<NAME>,
er<NAME>_<NAME>,
erinmontague,
erkslook,
ernderfer7125,
<NAME>,
<NAME>,
esazure,
escholzia,
estefanoescarate,
estlineero,
ethurau,
eupharina,
evagr,
evel_chihuahua,
eversuhoshin,
evilpokefairy,
eviltigerlily,
eyarbrough,
f4phantom,
fabfran98,
faithkatts,
faithv,
falconview,
fantasticmrsfox,
fardal,
farooque,
fathizahdeh,
fayeherold,
fbarulli,
fbn79,
fbw0304,
feather813,
fedooora,
feigdafugl,
felagund_<NAME>arato,
felicitywallis,
felipenic,
felix330,
felixo42,
felmy,
fem302,
fengist,
fenoloftaleina,
fenti,
ferisaw,
fermor332002,
fernfire,
feywoods,
fezzik,
ffayiga,
fffnerigmail.com,
fi-t,
fiashhh,
fifibantam,
filippac,
fillyba,
filska,
fina-1,
findtheriver,
finleyg,
firecatstef,
firepig,
firgym,
fisera,
fisticuffpuffs,
fix83,
fizila,
fjoellnir,
fjuveneton,
flaipert,
flamania,
flamey_amy,
flavio.p,
flavios,
flee67,
fleisnat,
flemming.westphal,
fliederle,
flipit4u,
floatingadrift,
floortap,
flossiecelia,
flyingchina,
flyingfox0212,
fnenu,
focus54321,
foghorn90,
forcher,
fortheocean,
foxfluff5,
foxtrot-lima,
fran,
francesca,
franck53,
franklindhaven,
fraukelich,
frauleinfisch,
fred572,
freddyh,
freeday,
freelulu,
friendlysceptic,
friesiancrazy,
frimkron,
fringenious,
frizzymom,
frl.u,
frogking95a,
frogln,
frolicksome,
frozenchosen,
frozenlandscape,
frumpywebkin,
fruusje,
fruusjemonty,
fryalls,
fsc2ou,
fscelzo,
fstep,
fsu1216,
fsukristen,
fukkthemainstream,
fukomys,
fukthemainstream,
fullet003,
fullsteamahead,
funk07chick,
fuocofatuo,
furryspaghetti,
fuzzyemma,
gaa17,
gabbieb,
gabriel90,
gabriellewragge,
gaelike,
gafullenwider,
gahall44,
galaxer,
galaxie062,
galaxytrekker,
galaxyzoo_guy2,
galefernow,
gamer.jamer,
gamerdad,
gamincat,
gangerolv,
ganstead,
ganymeed,
ganzegal,
garamnonfok,
gardenfairy,
gardenmaeve,
garnerdillon,
gavied509,
gbemis,
gbpereira,
gbrewer,
gcloud94,
gdeyoung,
gebue,
gelenika,
geminidragon,
gemira,
genbug,
genogenie,
gentlesoul,
georgeblack,
georgepickles,
geoski,
gerda24,
germain1,
gertyrose,
ggdchip,
gghlyon,
giarcsllim,
gieskem,
gill14,
gillskill,
gilsm0m,
giova53,
gl367,
glashelder,
glberg,
glenda701,
glicaj,
gloris69,
glsk,
gmheck,
gmzabos,
gnagy5,
godlesswarriortm,
goelzerrf,
goffrey,
goggins,
gommersellen,
gonzo818,
googee3,
gorgonsit,
gort_industries,
gotauber,
govinpandian,
grace.,
grace.santone,
graigrai,
grasilda,
grazynastasinska,
greenscrubs,
greye,
gribblet,
grieve,
grimkin,
grisu33,
grok00,
grom,
gromozeka,
grondinm,
grumrill,
gsolano217,
gspier,
gst,
gtmh,
guercait,
guevfamily,
guitarist53188,
gunnip.olivia,
gunnroni1,
guruguru,
gwennie71,
gwhw,
gwynmor,
gythaogg2,
gzconf1,
gzconf2,
h.r.,
h3px,
hairygoats,
haleyarnette,
haleynicm,
halkruth,
halogne,
hammond family,
hamptongray23,
hanbag,
hand0532,
handows,
handreae,
hannah314,
hannahjwo,
hannahmg,
hannahrq,
hannahtheamazing,
hannamarielei,
hannanowak,
happy-accidents,
har6inger,
hardicnut,
harlequindoe,
harrycooke,
harrygrounds,
hartel,
hartwellc,
hasi_s.,
haskielr,
has<NAME>,
hatfights,
h<NAME>.skj,
haversine,
<NAME>,
h<NAME>paw,
hayitsdavi,
hazard2802,
hazef,
hazelhorse425,
hbb,
hbbuddy,
hbun,
hckiger,
hcps-philliptm,
healymk2,
heath75t,
heatherleach,
heatherotis,
heathv,
heikepe59,
helabi07,
helen_butland,
helen_fewlass,
helena_ErrorDupUsername,
helene coup<NAME>,
helere,
helicity,
helloxeno,
hells_bells27,
heluna,
hendrens,
herdis,
hermannschwaerzler,
herondale123,
hester ,
hestie3,
hetts07,
heyguysitsliv,
heylei,
heyytheredude,
hfultonbennett,
hhendriks,
hial3,
hikarciu,
hilari4572,
hilit1983,
hillarygramlich,
hindenburg41,
hlkwd1,
hloliver,
hlp470,
hm99,
hnorab,
hoffi23,
holgerh,
hollygalluppo,
holmanart,
holzwege,
homermeyn,
hooplas,
hoothoot,
hopelessheron,
horkusone,
housegnome,
howdy2u,
hparker,
hsinger,
hudacko,
huffe32,
hugh12,
human_jenome,
humulus1,
hunterc273,
hussar,
hutchiebaby,
huxsterkate,
huy.duong,
hvh,
hwalkden,
hydrogene,
hyhopes,
hymnsf,
hypatia2012,
hypermonkey,
iDee,
iGoab,
iS<NAME>,
iTinnitus,
i_bojtor,
iamfern,
iamgeorge15,
iamscotty,
iancl,
ianfinnesey,
ianhuk,
ibclc2,
icecoldjjs,
icm,
icortez,
icy76549,
igonzale<NAME>,
ih18,
iiatridis,
ike.gabrielyan,
ikesplace,
ilajoie3,
ilinz,
illlookforit,
illvibetip,
ilovecats,
ilovesquidsbecause,
iluvdolfinz,
imagine57,
imhotep.ojha,
imketys,
impossibleiseasy,
imrexus,
imtushay,
inab.ecker,
incibkaya,
inercy,
infinityLTFS,
inge janson,
ingridgalejs,
inkaplit,
invader,
inyene,
ioannes,
irenera2002,
irinashemonaeva,
irisheye,
irma12,
ironinleeds,
irridescentsong,
is<NAME>,
is<NAME>_<NAME>,
isamaz,
isault,
isjke,
isol,
itak365,
itanio,
ithicks,
itsJim,
itsanurkithing,
itsfullofstars,
itsmestephanie,
itsoteric,
itzHydrq,
ivanad92,
ivanovp,
ivantama,
izabelakrause,
j50yab,
j_bewick,
jabati13,
jack2490,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
j<NAME>y0007,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
jam2269,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
j<NAME>ety24,
<NAME>,
j<NAME>_<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
jasmine b<NAME>iste ,
<NAME>,
<NAME>,
j<NAME>,
j<NAME>,
jayke<NAME>,
j<NAME>,
<NAME>.<NAME>-<NAME>,
j<NAME>,
<NAME>,
jazzy234,
<NAME>,
<NAME>,
jbakes89,
jbbeau,
jbon93,
jboo,
jbrace13,
jbundy,
jbvm,
jcd<NAME>,
jclowry14,
jcobbholmes,
<NAME>col<NAME>,
jcsuperfly,
jd10,
jdcharteris,
jdemers,
jdhouston,
jdure24,
<NAME>effabre,
jeffheif1,
jefftheengineer,
<NAME>imer,
<NAME>emaus,
<NAME>emaverick,
<NAME>en_gupta,
jenbaby0122,
jenbflower,
jengee,
jenich,
jenmcd,
<NAME> bo<NAME>,
jennandrews,
jennfurr,
jennifer_greenfield,
jennysellmark,
<NAME>eremiah.m<NAME>,
jerome48,
<NAME>eronobo,
<NAME>eskarp,
<NAME>.<NAME>,
jessbou4860,
jessica9291,
<NAME>,
<NAME>cahill,
jftex22,
jgeschke,
jgjknight,
jgraber,
jgrablin,
<NAME>,
jhook,
jhuey,
<NAME>,
<NAME>im_pea,
<NAME>imjacknjohnnie,
jimtxmiller,
<NAME>inxo,
jinxxx07,
jjbluejay,
jjlvscj,
jjohntaylor,
jjonmyown,
jkneb,
jkolb01,
jland,
jmalcomb,
jmayhew,
jmccluskey,
jmkwon,
jnick753,
jo0oley,
jo_colsal,
joanne82,
jochair,
jodic,
johnmoore84,
johnny_duke,
johnstar25,
johnstonaa,
johny50,
jojohase,
jojow,
jokergirl,
jokuunal,
jolkeur,
jollyrogered,
j<NAME>on,
jomortimer,
<NAME>onasmmiguel,
jonathanhl,
jongray,
jonnm4,
<NAME>,
joolslee,
joopvanbirgelen,
jopo1987,
<NAME>,
<NAME>ordberry,
jordyman456,
jorgegil,
jorie.hh,
<NAME>ua,
josieandelliewong,
josiepegg,
<NAME>y_t,
joyjoy,
jozsa,
jpcatanzaro,
jpsokkernut10,
jrddias,
jrfs,
jrinear,
jrlyttle23,
jrmw,
jrosese,
jrozyczka,
jrutter,
jschell42,
jscottlenz,
jsjames1,
jsout1234,
j<NAME>,
jsta<NAME>,
jteselle,
jtj616,
jtreis<NAME>,
judi0491,
judybee,
<NAME>udyross,
juj<NAME>,
jules9687,
<NAME>ab<NAME>,
<NAME>,
j<NAME>uana,
j<NAME>,
<NAME>,
jumpingranch,
junemb,
junograham,
<NAME>,
<NAME>,
justmehere,
jvigo,
jvilaseca917,
jwhit,
jwid<NAME>,
jwmaritime,
jwmast,
jwmccomb84,
jwpepper152,
jxczer00,
jynto,
jypce,
k-s,
k.till1484,
k0zm1k,
kahbel,
kaity7007,
kaiwatha,
kakabeak,
kalbuzzy,
kaleem,
kamadden,
kamandizi,
kamilg,
kamv,
kandratii,
kanek,
kanliker,
karainio,
karanrajpal,
karenkantor,
karilyn1976,
<NAME>,
<NAME>4s,
<NAME>,
karu58,
kasiasa,
katacs,
kate_r,
kateboyd,
katelynn,
katemurray,
katesmccloud,
kathb,
kathfossil,
katie8107,
katiefanch,
katiekatt89,
katiekeyser_PS122,
katiekoplien,
katieofoz,
katieverett,
katkz,
katlittell,
katneils,
kats1285,
<NAME>avic,
kazza26,
kb96,
kbaptist,
kbo<NAME>,
k<NAME>i,
k<NAME>,
kbreckangmail.com,
kcanfield,
kcoflondon,
kd14g,
kdroan,
keanna_165,
keel,
keeoeek,
kefitz,
<NAME>,
k<NAME>,
kel<NAME>ath,
kel<NAME>,
ken.<NAME>,
ken<NAME>,
kenk205,
kennij1,
kenogo,
<NAME>enz<NAME> ,
kerenor,
kerobero,
kerri12,
<NAME>,
<NAME>,
<NAME>,
kevin<NAME>,
keytofly,
kfox,
kglass4462,
kgriffin3611,
khauglund,
khbuhr,
kianth<NAME>,
kibebe238,
kikametong,
kiki2008,
killerlou,
kimaire,
kimberly535,
kimboakimbo,
kimuenga,
kingaytata,
kingofspain1234,
kinseyatoz,
<NAME>instelli,
kioruke,
kip2511,
<NAME>ir<NAME>,
<NAME>ir<NAME>,
kirbecker,
kirino,
kirst<NAME>,
kitsmelf,
kiya9132,
kjanson,
kjericks510,
kjetikada,
kjyg,
kk58,
kkenmots02,
kknight.4,
klaartje,
klb1015,
kledoux1,
kleistf,
klepsch,
klrainey,
klroot,
klwaffle,
klynne28,
kmacdonald,
kmcafee1812,
kmiyoshi,
kmkulasza,
kmlm,
kmmills,
kmmunchie,
kmt65msu,
kmunday,
kmzim2015,
kneff39,
knyghtowl,
kokrui,
korinna999,
koshlap,
kotagabi,
kponce03,
kragh,
krakenzmama,
kreiman514,
krhill322,
<NAME>,
kris79,
kris<NAME>,
krockit,
krolov,
kruegnad,
kruseau,
krutki,
krwalkup,
kryfie,
ks14k,
kschlotfelt,
ksipe,
kswitalski,
kt171,
ktangell9,
ktarkin,
ktgeorgeous,
ktouchstone,
kucheryash,
kujata1,
kulinki,
kungpowell,
kuuipo83,
kuuurista,
kwirk,
kyamini,
kyaniab,
kyburg,
kyilmaz,
kyogreman,
kzajde1,
l4abergmann,
labocania,
lacey coleman ,
lachiester,
ladymink,
laenavarro,
lafiff P<NAME>,
lailabee,
lainie52,
lankiel,
larmcd188,
<NAME>,
lau3rie,
<NAME>uginn,
<NAME>.<NAME>,
laura_germany_,
laurabjustesen,
laurabolt,
lauramaywigby,
laurasamson,
laurawhyte,
laurelbcr,
<NAME> ,
laurenbreining,
laurenriv,
laurins,
layka13,
<NAME>man,
lbaracchi,
lbriard,
lchad,
lci17410,
lcourtneysmith,
lcw21,
leah<NAME>,
leahlou99,
leannk,
lechaussette,
leeboy25,
leetcat,
leio,
leire,
lemurs366,
lena2289,
leo <NAME>,
leohelm,
leonidas907,
leonov1,
leopardi,
leopardspots12344,
lesb<NAME>,
lesfromages,
<NAME>,
lesley.mazeyhotmail.co.uk,
<NAME>,
leupster,
levint,
lewnich,
lexij11,
lfv,
lg63laddhotmail.com,
lgiannini201,
lguidera,
liam2390,
lianderson,
lianne_m,
libervurto,
lieselottl,
lifesart,
ligaron,
lightness,
lilacwood,
lilapot,
lilico,
lilmonster4evs,
lilybloom,
limelikelemon,
linda.k<NAME>,
lindapnoe,
lindawithblueeyes,
lindek,
lindsay.<NAME>,
lindseyb420,
lindylooo1,
lineds,
lioncub12,
liondave,
lisacorewyn,
lissasmom,
litlewolf2,
littlebeard,
littlebug47,
livlorton,
lizardo,
lizbuffy,
lizmaple,
lizplanet,
lizzyshoe,
lizzyvet1,
ljalthoff,
lkirshaw,
llara,
llavoie,
lmbloom8903,
lmcco,
lmcmillin,
lmd6508,
lmf76,
lmgumby,
lmhornig,
lmusytschuk,
loader96,
lobotupgrade,
lobstersocks,
lobusparietalis,
lodonzo,
loerie,
loggins,
loisendaan,
loketimes,
lolobaba1,
lolonaze,
lolortie,
lolpus,
lolymie,
lonelyviolist,
lori2u,
lotsofloudlaughing,
lou7428,
loubelle66,
louisd<NAME>,
louiseee,
loupdethies,
lovelysweetpea,
lpage,
l<NAME>,
lpd<NAME>,
l<NAME>,
lpspiel<NAME>,
lrig<NAME>,
l<NAME>,
lste<NAME>,
lste<NAME>ki,
lszatmary,
ltakiguchi,
luca-chimp,
lucashh,
lucy55,
lucycawte,
lucyyyr,
ludyboots,
lueasley,
luisandres<NAME>z,
lukejball<NAME>,
lukeonzoonivere,
lula0502,
lula14230,
lupham,
lusihuei0504,
luxray978,
luxtina,
lwerden2uwo.ca,
lyleje9,
lyndsey1987,
lyneille,
lyraloo,
m,
m1saac,
m3ganbark<NAME>,
mIl0van,
m_tennison,
maaikell,
<NAME>arten_k,
<NAME>aodha,
<NAME>,
<NAME>rown,
mackenzie.mr,
maddimendoza,
madeinspace,
madeleinen14,
madelinethespy,
madison young,
madsterr,
maewe,
mafgross,
mafintel,
magaa,
magdalen_n,
maggiea2598,
magic5379,
magnoliahigh,
maheer425,
mahynoor,
majac,
majportugal,
makaylamay,
makenziedespres,
malachi,
<NAME>colm <NAME>,
malforma,
mallory9163,
<NAME>,
<NAME>,
mamamuh,
<NAME>ammyflo,
<NAME>dellamom,
mandiwaite,
<NAME>andyaldridge,
mandymayhemxo,
<NAME>antide,
manxkats,
maoa86,
mapat,
mapper,
mapreader4,
mar10g,
<NAME>ara,
<NAME>,
marc085,
march3258,
marci61456,
marciasou,
<NAME>argef,
<NAME>argie,
<NAME>,
<NAME>,
<NAME> ,
<NAME>iafanning,
<NAME>ariannas,
<NAME>ariarmoreda,
mariechloe,
marije2410,
marijke.desmet,
marina83,
marinadoukaki,
marjanneo,
<NAME> <NAME>,
mark2004,
markhu,
marlfox580,
marrinertg4592,
mars118,
mars26,
marssociety,
martaczc,
martinbrooks,
martinsandvik,
martybeans,
marvelacad,
<NAME>,
<NAME>aryann,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>_<NAME>,
<NAME>iaslil<NAME>,
<NAME>ildak,
mattb1792,
matteo1297,
matthewsarro,
mattiab,
mattlarrea96,
matty406,
maureenmccarthy,
mauricio ure<NAME>,
max,
maxdav01,
maxhy,
maxioi,
maxozaur,
maya011,
mayah24,
mayakiona,
maylu,
mbadger,
mbailey9573,
mbaselga,
mbatla,
mbosshard,
mccauleysa,
mccreery64,
mcgeea95,
mcgovernnj,
mcheek21,
mcintold,
mckinlayross,
mcoates,
mcoc,
mcolston,
mcru1601,
mdlw,
mdomaille,
meegja,
meerclar,
meganc30,
meganesmith539,
megankieran,
meganswanson,
mejones,
mekukat,
mel.s,
mel907,
melach,
melendil,
<NAME>,
melissasmith08,
melvinj,
meme172,
memorymays13,
mercedes123,
merra1979,
metridious,
mflannigan,
mfolk77,
mfrisbie8,
mgarcia.eva,
mgarman,
mhammergren,
mheitm,
mhxyqwe123,
mi77ian,
mialumme,
miatafan,
micfil,
micha2718l,
<NAME>,
michael971,
<NAME>,
michaelzucker,
michal108,
<NAME>locke,
micmac274,
midnightiscute,
midnightisfluffy,
migrill,
<NAME>uel,
mikaselm,
<NAME>ke.<NAME>,
mikej45,
<NAME>ikelinz,
mikem2,
mikes269,
mikestill,
mikkajones,
mikyuzumaki,
milano,
milchtier,
milechampion,
milena.<NAME>,
milenski,
milesnoir,
milesspencer,
milksneaker,
milktime,
mill9,
millbrook3,
mimatronic13,
minervadreaming,
miniMinN,
minty737,
mireillebocksberger,
mirl,
missadventuregm,
missbland,
missdriller,
missk2u,
missmomma86,
misterwoodward,
misteryoung,
mistletoe,
mitata,
mitchell631,
mitsyamarsupial,
mixu74,
mizliz,
mizr<NAME>,
mjac,
mjelse,
mjohn15,
mjourney,
mkenz1099,
mkmanos,
mkmcguir,
mkub,
mlambert27,
mleinspenner,
mlhanna,
mlsacg,
mlx0000,
mm2223,
mmadden542,
mmangos,
mmartarano,
mmatessa,
mnowy,
moacir augusto coutinho,
moefinley,
moex0125,
mogget_storm,
molllyequinnn,
mollmsd26,
mollus,
<NAME>,
<NAME>.<NAME>,
momendes,
monaxue,
monbut,
money_princess,
monica moscatelli,
monicahaponski,
monkettales,
monkeyboy23,
monoculars,
monsol,
moolyballerina,
moomoohk,
moon001,
moonpie68,
morage,
moreno.meghan,
mosaic_world,
mosey_9,
mourningdove,
moxiemouse,
mpazrp2006,
mpeoples,
mpobocik,
mpriestman,
mqq,
mr.lee,
mrObbens,
mr_seeker,
mreynolds81,
mrkjad,
mrmanatee99,
mroliverridley,
mrsalphageek,
mrscoyle,
mrsmarriage,
mruffing13,
mrvaidya,
mrybczyn,
ms.physics,
ms4225,
msaligned,
mschwirck,
mseyler,
msnafziger,
mspooner,
msterreb,
mswmom,
mtjan2014,
mtornese,
mtran97,
mturman,
mudkip201,
mueslifix,
muis24,
mujumdarsr,
mulehollandaise,
murtaghc,
music788,
mutiertespinne,
mx46myra,
myfriendemma,
mynightmare1,
myrddin,
mysteryparry,
myuniverse,
myximagination,
n13j,
n165741,
na215975,
naaninn,
nadavbensh,
nadine gorman ,
nadiribro,
nadjahersacher,
nads,
naffi,
nafnaf,
nairobinights,
najuris,
nakilah,
naminag,
namtansign,
nanou,
nanselmus,
naomizf,
napdaw,
naralara,
natasha-gray,
natban1,
natbos<NAME>,
<NAME>,
<NAME>,
nate,
nate.liz.white,
nathandench83,
nathanuy1,
natkaa,
natkomba,
nature,
natwins,
naumenko.pavlik65,
navegante,
navill0948,
nayerakhairat,
naynayfalife,
naysin27,
nbcdoe,
nbetzca04,
ncmastroddi,
ndarbysh,
ndw753,
neanderthaler6,
necoras,
neebobs,
neece_haynes,
neidzwiedz,
neitsa,
nelli,
neverendingo,
neverett,
newnewschool,
newyorker570,
ng15,
nh,
nicchic8402,
<NAME>,
niceday1422,
nicestjerk,
<NAME>ichollsm,
<NAME>,
<NAME>her<NAME>1,
<NAME>ico61,
<NAME> ,
nicolec8,
nicoleflynn18,
nicostone,
nieja <NAME>,
nigeq,
night,
night_dust,
niirilli,
nikania,
nikia11,
nikolatt,
nikolosp,
nina ,
ninjabel,
nipnlap,
nirving123,
nitramko,
niv1,
niyer,
njcusack,
njwwright,
nkarn,
nlbookworm16,
nmaad083,
nmc19892004,
nnova,
noblehomeschool,
nogoodkris,
noidedpanda,
noitallgirl,
nola1034,
nolanedwin,
nomanslogin,
nonnel,
nonword,
noonizooni,
nopedose,
norauk3,
noromo,
northcrunk,
northernflower,
northernlimitptv,
nosnibor3,
notgucci,
noxvix,
nozee,
np33,
nrajkarnikar3,
ns22121,
nsm,
nsmel,
nspacefire,
nstoker,
nthrockmorton,
number1suzuki,
nutznix,
nwstone,
nyrmta,
nzRichardS,
nzanga,
obaratch,
oberon1066,
obnebion42,
ocbrown,
ocean171,
odele,
ohecbamboozledagain,
ohmanen,
oiramm,
ojacobs21,
oldapartment,
olekkurszewski,
oleksaberik,
olems,
oli123456789,
oliverhodson,
oliverlevitt,
olivers,
olivia.<NAME>,
oliviarrrrrr15,
olivier,
ollyc,
ollydavey,
olsonl,
omicronCeti,
omnicron13,
oneill4,
oneillg2g,
oneoneone,
onetheycalldru,
onlylobster,
onnikur,
oo.adeyemo,
ooburai,
orangebird,
oraulins,
orchinati,
oriza,
oroanto,
osanna,
oscarmental,
osprey_neaves,
ossoz,
otac0n,
ottecktom,
owenfay,
p.titchin,
paLLadium2,
pablos87,
padfoothp,
pagh1,
paigeautumn,
paigefigone,
paigefroebe,
paka6789,
paleogen,
palmin,
pamba,
paminaz,
pampalini,
panPsax,
panek_attack,
paolojs,
paot,
pappasadrian,
pashley108,
patchwork,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
p<NAME>inv,
p<NAME>odi<NAME>,
<NAME>,
paula33,
<NAME>ul<NAME>,
<NAME>ulgregorylang,
<NAME>,
p<NAME>el_<NAME>k<NAME>ski,
pazithigallifreya,
pckkelly,
peachblossem,
pearcec,
pearlthetiger,
<NAME>son.<NAME>rea,
<NAME>earsov,
<NAME>,
pedi59,
peekay42,
pegk56,
peiftronica,
peinguinsandpolecats,
pekkish,
pend<NAME>,
penguin1221,
<NAME>,
<NAME>,
pepys1,
<NAME>,
perkele91,
<NAME>,
<NAME>,
<NAME>,
<NAME>3,
p<NAME>_48cck,
<NAME>,
peter1234567,
<NAME>,
petrack,
<NAME>,
pg338,
pgiroux,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>a,
phk<NAME>und,
phoenix1284,
phraps,
phsc1001project,
pi2fly,
pianissimo,
pianokungfu,
piccolino,
pickle777,
picklesplays,
pigron,
pilpolly,
pimentelius,
pinguingud,
pinkpuppy,
pinkus,
pinkynz,
piotrek1010,
piotrexmeow,
pioup,
piper73,
pippo0815,
piratebrit,
pixelmesh,
pjjhurk,
pjp1959,
plambert78,
planet059,
planetari7,
playmobil,
plev13,
pmborodin,
pmgreen,
pogostickies,
pokedragonboy,
pokodinero,
polanski,
polly1,
polo24,
ponyry,
popocatepetl,
poppy42,
poppykentucky,
poundmaya,
prcromer,
priancohen,
primatelady,
princessclockie,
princesspeet,
prl,
protagoras42,
psivewright,
pssbanks,
pubgypsy,
purplegedge,
puti,
pyrosomida,
qingdou,
quarkcharm,
questathon50,
quetzalc,
quiet,
quinnr,
quinnrice127268,
quizmike,
r2rook,
r4s1,
racegirlj,
rachable,
rachaelfacey9,
rachelha23geo,
rachelklose,
rachie0204,
raddas,
radek_p,
radfordr,
radulfr,
rafael,
rafucho47,
ragingreshiram723,
rainbowhamster,
rainer,
rairai600,
raito,
ralab,
ralph.livockgmail.com,
ram2009,
ramberts,
ramoratz,
ramosc07,
ramsaut,
randolmm,
random_awesomeness,
randthor,
raphaelmaier,
raphi615,
rartho,
rascheper,
rashaveraka,
rastamon888,
rathole,
ravendrop,
ravenik45,
razvy97,
rbarba,
rbundy,
rcmason8790,
rcookman,
rcr14,
rderival,
reaganakg<NAME>,
<NAME>bbi,
<NAME>,
rebecca1104,
rebekahal,
recepturka,
recrea33,
redchaz57,
redragonPL,
redzep66,
reemab4,
regatavalon,
regularj,
reikirob,
reillyknight,
reimalkav,
reishi,
reki,
rema,
renato24,
renee corlett,
rennbird,
res96,
retiredbeep,
reydi,
reynaldo yane<NAME>,
<NAME>old,
reynoldsjake1997,
r<NAME>,
rfol<NAME>,
<NAME>,
<NAME>,
rhys123,
r<NAME>,
ribbit21,
<NAME>,
<NAME>,
<NAME>,
<NAME>id84,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
riverdrifter61,
<NAME>,
rjane314,
rjbrown1,
rjjm,
rj<NAME>,
rk<NAME>,
rks7257,
rlcantwell,
rllrllrrlrrl,
rlo<NAME>,
rmana,
rmcavoy,
rmklaus12,
rmolinand10,
<NAME>orden,
rnavajas,
rob054,
<NAME>_moore75,
<NAME>or<NAME>,
robbiederoo72,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
rocketdriver,
<NAME>gerwepf,
<NAME>gue<NAME>,
<NAME>knrn,
roland10,
<NAME>ron<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>ss,
rosie467,
ros<NAME>_lol,
rosrac,
rotband,
rotelippen,
rowejhs,
roxiesal73,
rphubbard,
rprblk,
rpricedavies,
rqj123,
rrpbgeek,
rrramtin,
rschini,
rsissons,
rsnberry,
rsuroland,
ruby<NAME>,
rubysolitaire,
rudim2014,
ruimacaco,
rush1125,
rusthen,
rutho13,
ruthparker,
ruthr,
rverghese20,
rvreeke,
rwbcanada,
ryan-rsa,
r<NAME>.<NAME>,
ry<NAME>,
ryber1,
<NAME>,
<NAME>,
s16947,
s17010,
sab2163010,
sabine-hh,
sabse,
sabsi1608,
saganesque,
sahil160,
<NAME>,
s<NAME>,
<NAME>,
sal<NAME>,
s<NAME>13,
<NAME>,
<NAME>,
<NAME>,
s<NAME>,
sambuca,
samg05,
samikes,
<NAME>9,
sam<NAME>,
samvdb,
<NAME>,
<NAME>,
san<NAME>.<NAME>,
<NAME>,
sand<NAME>,
sandyb777,
sangroff,
sanguinefiend,
sara_ha<NAME>,
sarah <NAME>,
sarah <NAME>,
sarah.g89,
sarahanneing<NAME>,
<NAME>,
sarahelizabeth,
sarahh1193,
sarahkdavis21,
sarahmjl,
sarahruddell,
sarapotato,
sararae,
sarare<NAME>,
sarasefton777,
sarinozi,
sarje,
saroka,
sascha1es,
sashaxxx,
sashifoo,
saskia.k,
saule81,
saurabh_sb,
savagehenryceltic,
savagelovesinger,
saykojack,
sazzlecat,
sbardo,
sbeltrami2019,
sburda81,
scarletfeather,
schaeferhomeschool,
schaep,
schatzy,
schnaffeltier,
schneva,
schoolpsych77,
schreibvet,
schrodinger_kat,
sciencequiche,
scisarah,
scmorris,
scooterscooby,
scottm,
scoutcain,
scscottnz,
scurf,
scytheavalon,
sdalessandro,
sdev7,
sdkprodigy,
sdmorris,
seacliff5049,
seangoldstein,
seba.ka,
sebasish,
secrone,
sehindle,
selmarsh,
semayilmaz,
semiface,
senicalvin,
sensor4,
senv,
sequoiaforest,
sfmedusa,
sganon,
sgibson,
sgray8144,
sgutierrez1993,
shadow2056,
shahd El<NAME>,
shahino6,
shainacopeland,
shamaree copeland,
shananigan6326,
shanella,
shanher,
shannacherie,
shantimorrell,
shaoisaac,
sharathsarangmath,
sharoni.p,
sharrysharry,
shauna_ss,
shawnroderick11,
shayma...,
shazzyP,
shdybr8,
sheenabeena,
shekarbr,
shellie wallace,
shepardk20,
shero777,
sherritom,
sherwinptgmb,
shinebrightlea,
shmarla,
shmo,
shmooshy,
shocko61,
shoeman,
shortypantz,
shutterbugdi,
siberia2sumatra,
sigmapi10,
sijo,
silentq,
silo,
simmonsrzoo,
simona,
simona_12,
simond0,
simone53,
simonk1987,
simonliddle,
simonpopp,
simonvandenberg,
siouxiesue,
sirbertgarcia,
siren5298,
siwanicki,
sixpacktou,
sixxsgirl,
sjacobson1112,
sjc74,
sjcguk,
sjekkerton,
sjhwriter2,
sjwk,
skandlikar,
skavanagh,
skbarks,
skcool,
skepticHominid,
skilaru,
skintchef,
skiola,
skirtedrunner,
skity5,
sklo,
skorbolamid,
skrh,
skrzypkima,
skweek,
skybabe,
skyelight,
skyfundjr0520,
skykai,
skywatcher100,
slamdinista,
slidess,
slieberthal,
sloanes12,
slone02,
slowkow,
smccluskey1982,
smdu,
smfoote,
smhirt,
smithpa01,
smitjam,
smriemann,
smvilla,
sn322929,
sn328797,
sn349849,
sn363633,
snakeshit,
snappa,
snickermonkey,
snowdragon,
snowflake1,
so211,
soesoe,
sofieET,
soflynn,
sokrates49,
solenevermont,
solomonfolk,
sommakia,
sonictruth,
sontheold,
sopcat,
sophie00,
sophie8156,
sophydj,
sosmond,
soulselller,
soundgrl,
southofnonorth,
soyopopo,
sp515507,
spacegazer,
spakd07,
spamheadsmum,
sparow01,
spbf81,
speakofthewolf,
spearleyez,
spekky4eyes,
speters17,
spiderbui,
spinachravioli,
spinblade117,
spiralhunter,
spitysh,
spook34653,
sporter72,
spotharriet,
spu00crm,
squirrella,
squishycakes,
srallen,
sramsdale,
sroosa,
srs106srs,
srswann,
ssPETERss,
ssaPlacerias,
sschmeiser,
sschmidt,
ssmikey,
ssmith6519,
sstafford4,
stantastic,
starchitect,
stargirlmoon,
starhunter11,
starmag,
starman007,
starpower1,
starrypawz,
starwig,
steblak,
steeleyc,
stefan.k,
stefanie_caspari,
stefanmago,
steffbar<NAME>,
steffifee,
stefkova,
steiche,
stengleinjl1,
step.poulain,
stephen,
stephen.<NAME>,
steph<NAME>,
stepherbot,
stephkolenda,
steve67,
stevemiller99,
stevenjohnston2017,
stevensteven,
stevewales,
stfrue,
stinapham,
stitz,
storki,
stpage1,
strategy,
strawmeadow,
stripey,
strubeldoc,
stsearcher,
stsf1234,
stuartaylor57,
stuey273,
stuk755,
stweedale,
suburbanastronomer,
sue_welfare,
sueking,
sugo,
suji3000,
sumamaru,
suncoup,
sunshinegirl,
suzannebradbury,
suzer14,
suzi78uk,
svart,
svengooliehigh,
svg5595,
swamprunner,
swapeman,
swertkin,
swsahara,
syberknight99,
sydself,
syl.schmid,
sylphia,
sylvain den<NAME>,
synapse890,
syndarella,
syracon,
syzooniverse,
t.butra,
taboret,
tactoe,
tagori,
tahall3,
tahoedox,
tai_shan,
talm,
tamma,
tammylee67,
tan sin yi,
tanja92,
tapirsun,
tardigrade_girl_9,
tarelena3004,
tasnot,
tatoon37,
taylorv212,
taylour,
tbjerkga,
tbm,
tbrinton89,
tc29121981,
tcoriell,
tecannon,
technogourmet,
teekiteasy,
teemie,
teilhaft,
telophase,
temporaryblank,
tengnoth,
terryantrim,
tessa-virginia,
tfmorris,
tfrdawson,
tgoodway-sims,
tgrkpr2000,
th123,
thackler,
thawar23,
theWisp2864,
the_irishman,
theanimalover,
theappleman,
thebookscout,
thecuriousiguana,
thedweeb,
theheyman,
thehp,
thelinm,
themitchnz,
themrtroe,
thenoobest,
theofilos,
theotherme,
thepew,
theresxcfg,
thesquiddles,
thetruemilhouse,
thezohar,
thibaultmol,
thijszilla,
thimbleberry,
thisisjaid,
thoe,
thomas lovato,
thomdemm,
thoreslm,
thormster,
threadbare,
thula,
thurston_spaceman,
tien.duong,
tiffany9913,
tiffanyfields,
tiffanylian,
tigerpieatt.net,
tikijay,
tillmannhanna,
tillytots1232,
tilo,
tim_nutter,
timbok28,
timmerma7,
timothybouting2,
tina_pixie_,
tinamarash,
tines,
tinkapuppy,
tinkerlala,
tinope66,
tinykahoona,
tjbonzo,
tjeb,
tjilptjilp,
tjnelson5,
tkarian,
tkennedy371,
tkoc,
tlmaggiola,
tlusardi,
tmarchant,
tnl25,
tnladybug,
toasterparty,
toffee59,
tofly76,
tojad,
tolls,
tombombadil117,
tomburgerpie,
tomfinn1984,
tomtom47,
tomymx,
tonnerrebird,
tonnigonzalezPHYANTRO,
<NAME>ony <NAME>,
tonyhallam001,
tonymor,
toosirrius,
tori12,
torman,
tottobauer,
tournavy,
tracev,
tracy.angel,
tracyshenton,
traianusrex,
tralliott,
tranceline,
traumeule,
travicehockey,
tre-86,
treebeard236,
tribb99,
trieloff,
tristia,
trixx,
trmayo,
trock5445,
trout13169,
trstn1,
trudauglow,
trusek,
ts.hbgr,
tsepui,
tsering,
tsheiffer,
tsoderquist,
tswittelsbach,
ttekieli,
ttessler,
ttfnrob,
ttilley,
tubasumpf,
tubbyoatmeal,
tuberdj,
tuittu,
tuleu-emma,
tweeg5039,
tweger,
twilightmoon,
twinklelittlestar,
twwittig,
txmeg,
tyhannahsaurus,
tyquana,
ufo1989,
ujansub,
ulikoehler,
ulrike krumscheid,
umalinda,
undergroundgirl,
underthearch,
undyne,
unefeepasse,
unidentified-scientific-object,
uninetverse,
upashka,
urbansonnet,
urgo42,
us7461,
uschaefer,
user_name,
uxia,
v1oletv,
vago82,
vague<NAME>_<NAME>,
val08,
vale0293,
<NAME>,
<NAME>,
valfer,
vallil,
<NAME>ski,
vanessa16480,
vanhelsing71,
vchantorn,
vdejesus,
vectorization,
vedun-z,
veenboer,
veeveequeen,
vega14k,
veggiepenguin,
vehtoh,
veleska,
velmanatov,
velthove,
vena,
venusstop,
veronicasnape17,
vertigopolka,
vestigial,
vfinnerty,
<NAME>,
<NAME>24,
<NAME>,
<NAME>aksd,
<NAME>,
<NAME>gsn,
<NAME>,
<NAME>,
<NAME>,
<NAME>.<NAME>,
<NAME>,
<NAME>ank<NAME>,
<NAME>izh,
<NAME>,
vjb<NAME>,
<NAME>mkevin,
vlad015,
vmer<NAME>,
<NAME>,
vmp32k,
<NAME>,
<NAME>hellm<NAME>,
vouchtieng ,
<NAME>ong,
vrohe,
vvv<NAME>,
vx100,
vybzbild,
wackylama,
waddles310,
walczyszyn,
wallafives,
wam<NAME>,
wamsleys,
wanda,
wannabe12,
wanni012,
warmworm,
warriorcatsophie,
watt_tyler,
waver,
wayne54,
wdecock,
weaverzinc,
webb1980,
webpixi,
werdnarb,
weromac,
wesley pawlowski,
wewa,
whaps,
whisperit,
white_squirrel,
whitelightnig,
whitetiger678086,
whooshdemon,
wibbi4,
wicked82,
wiebked,
wiedi,
wightdragon,
wildebeesty,
wildlifegisgirl,
wildlifephotographer,
wilktony,
willbarnes4,
<NAME>johnboy,
willowstar321,
willywonki,
windinhair13dd,
wingcli2014,
winnethpawtrow,
wisenheimer,
wiztess,
wizzydaz,
wojciech_k,
wolfgang1,
wosgood,
wtkoala,
wvvampire,
wwscoobydo,
wyjeong2,
x303,
x500,
xXOrchidChildXx,
x_ANT,
xanthi373,
xantoras,
xapril7x,
xbenr,
xfgiro,
xflyer,
xgraceygirlx,
xn11983,
xpeh,
yacoub,
yagerp,
yampol,
yankeegunner,
yasdollasign,
yatomi,
yedidiamesfin,
yellowbird,
yep2yel,
yesenialv,
yibet,
yinyangscurse,
yipekiay,
yk2064,
yoah,
yoda1,
yodahound,
yoey0419,
yona.s,
younge,
youthvolunteersdallaszoo.com,
yoyoman67,
yugoh,
yusuf,
yvonne brockwell,
yvonne mews,
yvonnefish,
zach_fortuna13,
zara,
zawarudo,
zazabard,
zbgirl6,
zdjuf0,
zedone_geezer,
zeezel,
zeng19930220,
zeynaz,
zhol123,
zholl25,
ziegl087,
ziglaser,
zits,
zlake,
zocker,
zoeellis,
zoharkapalan,
zoonie4893,
zoorik,
zootastic,
zooz,
zorglax,
zperez,
ztdavies,
zwanenburg,
<NAME>,
<NAME>,
<NAME>ka,
<NAME>,
<NAME> (<NAME>),
<NAME> <NAME>,
<NAME>,
임민서,
-Beach-,
.<NAME>-<NAME>,
00012853,
00054798,
02csmith,
08682293,
0Sanne0,
100dee,
1091010,
1157243,
11kb34,
11kralle,
120,
123726,
12coombesj,
136007,
13zulus,
141Dial34,
14SergioA,
14sonbri,
1510568040,
17lauk2,
1827,
1828Duke,
18acavinee,
18mclum,
18scooper,
1Ver,
1anita1,
1yioi87,
2004pamf,
20080112,
20161105,
20161169,
20161186,
20161230,
20161255,
20161287,
20161305,
20161761,
20162130,
2024-habdi,
20scuc,
212marin,
2140lacy,
214455,
214629,
214860,
215977,
216043,
216097,
21nikelove,
21perryb,
223327,
224043,
224265,
225578,
225593,
22lhaws,
2409,
25or6to4,
2609005,
2fay,
307flyfisher,
321Hanni,
3rdgradeelwood,
43corunna,
4dlabs,
56227isd,
5paws23,
5yF0Rc3,
62050isd,
62492isd,
62566isd,
64053isd,
64HaRtZa,
71099isd,
75473isd,
77636isd,
77keeg77,
7bluebird7,
7bthirdrock,
7cscience,
85martinis,
99bellam
</p>
</section>
</div>
module.exports = Authors
| true | React = require 'react/addons'
cx = React.addons.classSet
Authors = React.createClass
displayName: 'Authors'
render: ->
<div className="authors-page">
<section className="about-section content">
<h2>{@props.pageContent[0].header}</h2>
<p>{@props.pageContent[0].content}</p>
<p>
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
A13FAB,
AEagon,
AHOPI:NAME:<NAME>END_PIki,
PI:NAME:<NAME>END_PI,
AJ1984,
AJTOTHEMAXX13,
AKGal,
ALCAVTAY,
ALPI:NAME:<NAME>END_PI,
ALP1,
ALR20,
PI:NAME:<NAME>END_PI,
APav,
ARD1999,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PI,
A_PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Aaronaguila1,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIsher,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Acipi9,
PI:NAME:<NAME>END_PI,
Adaku,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Adammada1,
Adamvanc,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AdelyPI:NAME:<NAME>END_PIj,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIrianaM2001,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AfricasFinest,
Agent_Bookworm,
AgoraChat,
Ahearyn,
Aidan PI:NAME:<NAME>END_PIM,
Aigh90,
Aikane,
Ailurus,
Ailz79,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AimeeJo,
Ainmhi,
PI:NAME:<NAME>END_PI,
Aiygeo,
Akaito,
Akat13nmkb,
Akoyan,
Akyu,
AlBatin,
AlJC,
Alagorn,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AlanaKM,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
Alanatkinson123,
Alba,
Alealonna,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI III,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIHPI:NAME:<NAME>END_PI103,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Alexandra1,
AlexandraD16000,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AlexandreLamothe,
PI:NAME:<NAME>END_PIis,
PI:NAME:<NAME>END_PI,
Alexis_V,
PI:NAME:<NAME>END_PI,
Ali_Oldenburg,
PI:NAME:<NAME>END_PI,
Alice.Rose182,
Aliceee11,
Alico,
Aligeeach,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AlisonField,
PI:NAME:<NAME>END_PI,
Alix2017,
PI:NAME:<NAME>END_PI,
AllegraS,
PI:NAME:<NAME>END_PI,
AlligatorCreator,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
Alpha27,
Alvgeir,
AlxV,
Alyfc210,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI King,
PI:NAME:<NAME>END_PI,
Alyssa.G,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Amanda1961,
Amanda1975,
AmandaG,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PImePI:NAME:<NAME>END_PI,
Americanthai,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PImmathor,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Amulus,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Amy1964,
AnPI:NAME:<NAME>END_PI,
AnalytischeATI,
PI:NAME:<NAME>END_PI,
Anastasia44mk,
PI:NAME:<NAME>END_PI,
Anastassia,
Anaïs ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Andr0meda,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Andreas38871,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIJTalon,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Ang-valerie,
Angel60SX,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AngelicaKaufmann,
AngelynR7480,
PI:NAME:<NAME>END_PI,
AngieEads,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
AnilkumarA,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Anja66,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI, PhD ,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI,
Anna-PI:NAME:<NAME>END_PI,
AnnaTee,
Annabz,
Annau2,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AnnePI:NAME:<NAME>END_PI,
Annepet,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AnnieOrlando,
Annie_Wiener,
PI:NAME:<NAME>END_PI,
AnnikaC,
PI:NAME:<NAME>END_PI,
Antek7,
PI:NAME:<NAME>END_PIemPI:NAME:<NAME>END_PIia,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Anthonyames,
Anthonyrice89,
Anthro-ginger,
PI:NAME:<NAME>END_PIihePI:NAME:<NAME>END_PI,
Antoinetje1959,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Antwan2004,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
AoifeN,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Aqua_Fairy,
Aralest,
Archiesgirl,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
API:NAME:<NAME>END_PIanePI:NAME:<NAME>END_PIooremans,
API:NAME:<NAME>END_PIani,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIus, SPI:NAME:<NAME>END_PI, and PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Arlette ,
ArPI:NAME:<NAME>END_PI,
Aron2007,
AronC,
Arrakatanga33,
PI:NAME:<NAME>END_PIowey,
Artemision,
ArturoG,
AruAxe,
ArwingPilot64,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIadora,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Asd3r,
Ashaheedq,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AshleyPI:NAME:<NAME>END_PIawyer,
PI:NAME:<NAME>END_PI,
Ashlyn,
PI:NAME:<NAME>END_PI,
Asillage,
PI:NAME:<NAME>END_PI,
Astro_Max,
Astromarm,
AstronautCapybara,
Astyanaxx,
Ataticek,
AtomAnt,
Atti,
Atti_der_Kleine,
Audburrito,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Augdog,
PI:NAME:<NAME>END_PI,
Aukej,
AuntieA,
Aurelide,
AuroraCV,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
AvanW,
AvatarPI:NAME:<NAME>END_PIi,
PI:NAME:<NAME>END_PI,
Avery11,
Aw80,
AwesomeAva,
Ayanori,
PI:NAME:<NAME>END_PI,
Azurite,
B ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
BCC,
BDNeil,
BKNBLK13,
BKaller,
BRutherford,
BS1234,
BWPLS29,
BZW77,
B_Grenette,
B_McC65,
BabettevanRijn,
Badgerx,
PI:NAME:<NAME>END_PI,
BakerStreet,
PI:NAME:<NAME>END_PI,
Banana,
BananaChimp,
Bananenbert,
BarabasE,
BarPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Barbaras,
Barbera,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Bastian_Peripheral_Area,
BattyElly,
PI:NAME:<NAME>END_PI,
BbnV,
BeHotSiii,
Beanysteve,
BearPI:NAME:<NAME>END_PI,
BearmanK,
BeaterGirl,
PI:NAME:<NAME>END_PI,
BeauRoch,
Beckster,
BePI:NAME:<NAME>END_PI,
Becky.k,
BeckyK8,
Beecher,
Beetleboy,
Bekki,
Belbelleb,
BelleB,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIAdams34,
PI:NAME:<NAME>END_PIEmma,
PI:NAME:<NAME>END_PIStrPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIjaminW,
Benjamin_1,
PI:NAME:<NAME>END_PIshlomo,
PI:NAME:<NAME>END_PIstg,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Berner-88,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIerry101,
PI:NAME:<NAME>END_PInorb,
Besotted,
PI:NAME:<NAME>END_PI,
Bestboy,
Bestlandlord,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Beth8,
PI:NAME:<NAME>END_PI,
Bethany6659,
Bethels54,
Betmac,
Beto100,
BettyBuh,
PI:NAME:<NAME>END_PI,
Bfurn4s,
Bgrundy,
PI:NAME:<NAME>END_PI,
Bibliophile,
BicTigrou,
Big-Dipper,
BiggerJ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Billy-n-PI:NAME:<NAME>END_PI,
BillyBrad,
Binerexis,
BiologyDrew,
Biologyzone,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIi,
BjornPB,
PI:NAME:<NAME>END_PI,
BlackLotos,
Blackdraumdancer,
Blactyde,
Blake,
Blauvogel,
BlightEdge,
BlindOracle,
BlossLearningSpot,
BlueGull,
BlueWhovian,
Blumi,
Bmnewcom2005,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Bonnie123,
BonnieC,
Bonus,
Bonut,
Boqboq,
BorisBee,
Boumans_,
Bozinator,
BradToTheBone,
BradWilliam91,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
BrainstormCreativity,
PI:NAME:<NAME>END_PI,
Brandi7293,
PI:NAME:<NAME>END_PIiAlba,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIredit,
PI:NAME:<NAME>END_PIregtje,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIhn,
BriPriUK,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Bridgitte,
BrielleJMBaker,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Brittenie,
Britti,
PI:NAME:<NAME>END_PI,
Brodstier,
Bronte,
PI:NAME:<NAME>END_PI,
Brontë ,
Bronze,
Brooke,
BrooPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Brunitski,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
Brytt,
BubbleMan,
Bubo,
BuffS,
Burbear1,
Buwa,
ByPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
CATALIN NAFTANAILA,
CCCC-Unit,
CHSAPBio,
CHaRLieDonTsURf,
CJF64,
CJLSMITH,
PI:NAME:<NAME>END_PI,
CLilley,
COMPUTIAC,
COwenSmith,
CRoC,
CThPI:NAME:<NAME>END_PI,
CTidwell3,
C_McC,
CacaCrow,
PI:NAME:<NAME>END_PI,
Caecilia42,
Caitlin,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
CalPI:NAME:<NAME>END_PI,
Caleb,
CaliforniaBear,
PI:NAME:<NAME>END_PI,
Camda5585,
Camdenmo,
Cameron,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Cameron12,
PI:NAME:<NAME>END_PI,
Camrynn,
Canckaitis,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Candlemas,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
CaoimheK,
Cape2cape,
Capitano78,
Captain_Ruby,
Captain_skug,
PI:NAME:<NAME>END_PI,
Cara_1981,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Carolo52,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
CarrieVS,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI92,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI6PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
CatsEyes,
CatyB,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIazzoo,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
CeciliaPI:NAME:<NAME>END_PIwell,
Cedar Student,
CefalofoRosso,
CelePI:NAME:<NAME>END_PI,
Celestial_Caticorn,
PI:NAME:<NAME>END_PI,
Celiaros,
Cellendylle,
Celurca,
Cenkdem,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
ChPI:NAME:<NAME>END_PI VanPI:NAME:<NAME>END_PI,
Char2002,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
Charlottie96,
PI:NAME:<NAME>END_PI ,
Chas,
PI:NAME:<NAME>END_PI,
Cheavus,
Checkheck,
Cheesemouse,
PI:NAME:<NAME>END_PI ,
ChelseaL,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Chewyswimmer,
Cheyenne baird,
Chezza6,
Chg507,
ChiliP2000,
Chimper,
Chimpfinder,
Chimpinator,
Chippietom,
ChizCT,
ChPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Chouchou_Bidou,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIMM76,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Chube,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIareyclarey,
Classof2015,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI & PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIudybell,
Claus114-DK,
PI:NAME:<NAME>END_PI,
Clefairy131,
Clemdalfit,
Cletus2014,
PI:NAME:<NAME>END_PI,
Clode13,
Clue4fun4,
CPI:NAME:<NAME>END_PIick,
Cmk923,
CoconutPete90,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Codyking24,
Cole_Durden,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Colombier,
ColourFromSpace,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
CopPI:NAME:<NAME>END_PI54,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
CoriellK18,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIoryPI:NAME:<NAME>END_PIhan,
CosimoMontagu,
CosmicLatte,
CosmicZephyr,
Cosmonautic_Ape,
Costanza111,
Costimos,
Cougarrakira,
Courtney,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIraen,
Crawfork,
CrazyChicken,
Crazycatz935,
Crentist,
Creslin73,
CressBookworm,
CricketG,
Crinoline,
PI:NAME:<NAME>END_PI,
Cristóbal,
Crlucas322,
Cromby3,
Croquet,
Cruuux,
CryptoLight1,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Culdrum,
Curculionidae,
CvanNoort,
Czandigar,
PI:NAME:<NAME>END_PI,
D1verDad,
DB,
DBell87,
DEP,
DIDDLE,
PI:NAME:<NAME>END_PI,
DJAI1979,
DK86,
DM Ravnsborg,
DUDONE2,
Daikoro,
Daisy-May,
Daisy_of_Doom,
Daja_GAlaxie,
PI:NAME:<NAME>END_PI, PhD,
Dalitasdrain,
PI:NAME:<NAME>END_PIta,
PI:NAME:<NAME>END_PI,
Damon22,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
Dan987,
Dana23,
Danel890,
DanelD,
Danell711,
Dangerfield1982,
DangerousDD,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DaniellePI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
DaniloPI:NAME:<NAME>END_PIli,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DarbyHeart1,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
DarthBec,
DatAlien,
DataDroid,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PIs,
DPI:NAME:<NAME>END_PIjaw,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI, Jr.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DPI:NAME:<NAME>END_PI,
Dduncan13,
DePI:NAME:<NAME>END_PI,
DePI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DearPI:NAME:<NAME>END_PI,
DearestPuppy,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIsky,
PI:NAME:<NAME>END_PI,
Deborah1955,
DeborahV,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI, PhD,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI9PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DiJoPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Diandra.mr,
PI:NAME:<NAME>END_PI,
DiePI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DiemorfeldPI:NAME:<NAME>END_PI,
Dinahsaw,
DinoD123,
DinoGuy25,
Dinopenguin,
DiscoNixon,
Divas3458,
Dixybird,
Djhorsegirl,
DknightNZ,
Dlf143,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DongPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Doschps,
DotMatrix,
Dotti,
Doubleknot888,
Dr PI:NAME:<NAME>END_PI,
Dr PI:NAME:<NAME>END_PI,
Dr PI:NAME:<NAME>END_PI,
Dr. PI:NAME:<NAME>END_PI,
Dr. PI:NAME:<NAME>END_PI,
Dr. PI:NAME:<NAME>END_PI,
Dr. PI:NAME:<NAME>END_PI,
Dr.PI:NAME:<NAME>END_PI,
DrDPI:NAME:<NAME>END_PIJ,
DrHox,
DrPongo,
DrPI:NAME:<NAME>END_PI,
DrPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DrasticPI:NAME:<NAME>END_PI,
DPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
DPI:NAME:<NAME>END_PI0rZ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
E.N.G.,
ECPetra,
EDonahue,
EFlesch,
EValleyY6,
EagleLoft,
Eaglesoul,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
East74,
Echo Brooks,
EclipseCeLL,
PI:NAME:<NAME>END_PI,
EdPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Edisislost,
PI:NAME:<NAME>END_PI,
Edsploration,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI13,
PI:NAME:<NAME>END_PI,
Eileen,
Ejdarrow,
Ekiaer,
Ekima,
Elagorn,
Elaina_science,
PI:NAME:<NAME>END_PI,
Elaine PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Elise2222,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI "PI:NAME:<NAME>END_PI" PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Elizaveta2,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Ellen904,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
EllisPI:NAME:<NAME>END_PIell,
PI:NAME:<NAME>END_PI,
ElsieUnderscore,
PI:NAME:<NAME>END_PI,
Elun,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Emanuele676,
Emera9,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Emily.H.zooniverse,
PI:NAME:<NAME>END_PIilyPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
EmmaJacobs,
EmmaVR,
Emmawhinnie2,
Emmelmann,
EmpireMousetrap,
Emptyrainbowbbls,
Emrosie,
Engler-Schneider,
EnjoySasa,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIoraPI:NAME:<NAME>END_PIdePI:NAME:<NAME>END_PI,
EnragedPlatypus,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
EpicBlackDragon,
PI:NAME:<NAME>END_PIyPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
ErPI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Erufailon42,
PI:NAME:<NAME>END_PIsai,
Esbn,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIyPI:NAME:<NAME>END_PI,
Eswimmer0206,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
EthereaL,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
EvEnuS17,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Eve00675,
PI:NAME:<NAME>END_PI,
EvenPI:NAME:<NAME>END_PIus,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
FB-XL5,
FCardou,
FINDALLTEHALIENZ,
FaZe_Leder,
Fabi1923,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
Family St Quintin,
Farah_2005,
PI:NAME:<NAME>END_PI,
FayeSomething,
Fearnaught,
FedTheMo,
Feliade,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
FightingDodo,
Filibuster,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
FizzPI:NAME:<NAME>END_PI,
Flamingo,
Flexo221281,
FlightoftheValkerries,
Flint1545,
FlipperBozz,
FloMe,
PI:NAME:<NAME>END_PI,
Flying_J,
PI:NAME:<NAME>END_PI,
Foggyworld,
FourWinds,
Foxstar82,
Fr3d3r1k_M,
FranB25,
FranBow,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIkles,
PI:NAME:<NAME>END_PIVanPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
FreyaJC,
Frozyth,
Fuchskind,
Funkyfalcon,
Funnyfavorer101,
Funnyguts,
Furiat,
Furniture307,
FuzzyJones,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
G.ePI:NAME:<NAME>END_PIyn1214,
G.gorilla324,
G2EK,
GBauses,
GKaramanski,
GLambourne,
GMadeiros,
GSmith7018,
Gabeth123,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
GabrielaPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Gabriellajm,
GabPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
GalaxyTrain420,
GalenaBear,
Gallium,
GameBrown_Family,
GammaGreenthumb,
Gandalv,
GanneC,
Garlandiana,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Gavitron_2000,
Gbaez,
GdVI,
Ge0de,
GeWe,
Gedelgo,
Geeds,
Geek2,
Geeklette,
GeirM,
PI:NAME:<NAME>END_PI,
Gekco,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Genosse Boss,
Genotype2,
Geode_,
Geometer,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
GeorgiaGootee,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Gethere2,
Ggatton,
Ghazi,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
GigantoPI:NAME:<NAME>END_PI,
Gill_666,
PI:NAME:<NAME>END_PI,
Gillian_Borland,
Gillis57,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIingertwin,
GPI:NAME:<NAME>END_PI1,
PI:NAME:<NAME>END_PI,
Giomv,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Giu_Gennaio,
Giuseppe,
GizmoMischief,
PI:NAME:<NAME>END_PI,
GloriousMundane,
Glosoli,
GnatMan,
Godchild,
Gonodactylus,
Gordon_Forrest,
Gordonalistair,
Gorgar,
Gorgonos,
Gotenks,
Goupus,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Grace_Banks,
Grace_Little,
Graceling1,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Greenfields,
Greengirl65,
Greenlightrj,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
GPI:NAME:<NAME>END_PIoireblond,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
GrepPip,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Grethe,
GreyPhoenix,
Gribby,
Grimnir,
Grintalsm,
Gumby,
Gumok,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
GuteMine,
Gwillewyn,
PI:NAME:<NAME>END_PI,
H8stinks,
HKE,
HMPenguins,
Haedyn,
PI:NAME:<NAME>END_PI,
HaPI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIaley,
PI:NAME:<NAME>END_PIaleywigal,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Hanibal94,
PI:NAME:<NAME>END_PIannPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
HansPI:NAME:<NAME>END_PIpPI:NAME:<NAME>END_PI,
Hanschibal,
HaplessHero,
HappyEve,
Harambe PI:NAME:<NAME>END_PI On ,
Hardrockhopper,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIavarPI:NAME:<NAME>END_PI,
Haya,
Hayley,
HeathRussiaGc,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIhenPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Hippie1427,
Historysid,
Hitodama,
Hnigg,
Hoggo,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIJ,
Hookage,
HoosierGen,
PI:NAME:<NAME>END_PI,
Horia007,
Howard_Richards69FL,
Howardsn68,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIi,
Huddo,
Huffer907,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIugoBallee,
HumbertoRickenbacker,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Huntress,
HybridX,
HydroxyChloride,
PI:NAME:<NAME>END_PIye PI:NAME:<NAME>END_PI,
Hypatia1,
I'm not telling you my real name. Give up.,
PI:NAME:<NAME>END_PI,
IEM,
IL-K,
IMadeThisForYou,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
IanGalaxyZoo,
Iasmim,
Icecrasher,
Icephoenix96,
Iduun,
Idw,
PI:NAME:<NAME>END_PI,
IkariaDaviau,
IlseB,
Iltis,
PI:NAME:<NAME>END_PI,
Imes,
Inaari,
India Yip,
IngerInger,
Ingrid DOMKPO,
Ingridanita,
Ingridbio,
Inki81,
PI:NAME:<NAME>END_PI,
IPI:NAME:<NAME>END_PIen,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI,
IPI:NAME:<NAME>END_PI-oxf,
IPI:NAME:<NAME>END_PI,
IrishAstro,
IrishRottie,
IrisPI:NAME:<NAME>END_PI,
IronWhale,
Is0cre,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
IsPI:NAME:<NAME>END_PIa,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Isaman10,
Isannah,
Iseeit,
Isissea,
Isitme,
Issiah,
Itallcounts,
Itsasmallworld27,
Itsug,
IPI:NAME:<NAME>END_PI ,
Ivanovna,
PI:NAME:<NAME>END_PI,
IvdO,
Ivorostojic,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Iyahalyssah2,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
J.A.J,
J3300918,
JDLA,
JDonnici,
JF392813E,
JG1996,
JGF,
JJ199,
JJ53,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JOB PI:NAME:<NAME>END_PI,
JOsPI:NAME:<NAME>END_PI,
JPJP_333,
JPenn2,
JPenn550,
JPlegge,
JPrice01,
JSN2001,
JVersPI:NAME:<NAME>END_PI,
JWearth,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jaeti13,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jamoni,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jan11965,
JanPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JanetCCS,
JanetCSB,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIari PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI1PI:NAME:<NAME>END_PI,
Jasminejo24,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Javiermv,
Jawofech,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jaynemet,
Jaynet88,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jdj1,
Je11y,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JeeaaR,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jeff0253,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI24,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JPI:NAME:<NAME>END_PI,
Jenx595,
Jenzoo44,
PI:NAME:<NAME>END_PIdnotHPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JeroenMJ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jewelanne,
JeweloftheNile,
Jgronmark,
Jhf,
JillPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jimmy2010,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIinPI:NAME:<NAME>END_PI,
Jjuju,
Jmallory1996,
Jmart3029,
Jmyers0517,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JoHB,
JoKD,
PI:NAME:<NAME>END_PIanL,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JoelDR,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JohannaThePenguinCounter,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
John170,
John93,
JPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jordanboys4,
Jorge2898,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Josh.davies87,
PI:NAME:<NAME>END_PI,
Josie,
PI:NAME:<NAME>END_PI,
Jowuijts,
PI:NAME:<NAME>END_PI,
JoyWorld,
Jr9355,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Juanan_89,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Jujubee18,
Julebambus,
PI:NAME:<NAME>END_PI,
Julesdomalain,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JulPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
JulietteHotel,
Juln,
Juls2783,
JungleHyena,
Junior_Guide,
Juraj,
JustPI:NAME:<NAME>END_PI,
JusticeStone,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Juty,
Jyard2,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
K PI:NAME:<NAME>END_PI,
K PI:NAME:<NAME>END_PI,
K P PI:NAME:<NAME>END_PI,
K20Kat,
KGuk,
KHowlett,
KK_27,
KKramke1014,
KLUTSCH PI:NAME:<NAME>END_PI,
KS_RM_CB,
KTMorton,
KTcakie,
KYL,
Kaate86,
Kaelee,
PI:NAME:<NAME>END_PIagPI:NAME:<NAME>END_PI,
Kaida,
KaitlinawPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KaiyolPI:NAME:<NAME>END_PI,
KPI:NAME:<NAME>END_PI,
Kakan,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Kangaroo36,
Kantos,
PI:NAME:<NAME>END_PIaotix,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Karen_c.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Karijn,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Karina28,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIz,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIatS82,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIvitje,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIatelynh,
KateyND,
Katfish,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KathleenCO,
KathleenZ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIatiePI:NAME:<NAME>END_PIley3,
PI:NAME:<NAME>END_PI17,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Katt_25,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
Katybb,
PI:NAME:<NAME>END_PI,
Kautton,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Kazesim,
PI:NAME:<NAME>END_PI,
Kbzephyr,
Kcilf90,
Keats14,
PI:NAME:<NAME>END_PIes,
Keerthana11,
PI:NAME:<NAME>END_PI,
Keircurb,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI La Rue,
KeithMason,
PI:NAME:<NAME>END_PI,
KelliK2,
Kellie920,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KenzieV.2005,
Keren,
PI:NAME:<NAME>END_PI,
Kerima_Hill,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KevinP2,
PI:NAME:<NAME>END_PI,
Kgo1526,
Khalinka,
Khas,
Kiana,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Kimberz86,
Kimbo_2112,
Kimvbelle,
Kirsi87,
PI:NAME:<NAME>END_PI,
KirstenPI:NAME:<NAME>END_PI,
Kiru,
Kishachimp,
Kist,
PI:NAME:<NAME>END_PI,
Kitsuneko,
PI:NAME:<NAME>END_PI,
Kjo1626,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KPI:NAME:<NAME>END_PI,
Knowles1315,
PI:NAME:<NAME>END_PI,
Kobold27,
KoldPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Koukouwaya,
KPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Krglass10,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Kristensop,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KrkSmth,
Kruss375,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KPI:NAME:<NAME>END_PI,
Ku,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
KyoVang,
L-AForbes,
L-Glendinning,
PI:NAME:<NAME>END_PI,
LEGO_NINJA,
LH_Kelley,
LINAC,
LJE,
LKavanagh,
LN8x,
LOLeannie,
LPO,
LRC Academy,
LSpeedie,
LaPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PI,
Ladanmusic,
Lady12,
Lady_Godiva22,
Ladyann,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LailaA,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LPI:NAME:<NAME>END_PIorna,
Lamutamu,
PI:NAME:<NAME>END_PIong,
PI:NAME:<NAME>END_PIan Thai,
Landvermesser,
Laquaine2737,
LaraM,
PI:NAME:<NAME>END_PI,
LarkinC,
LarPI:NAME:<NAME>END_PIelPI:NAME:<NAME>END_PIigung,
PI:NAME:<NAME>END_PI,
Latitude23,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LauraCun,
LauraTheExplorer,
Lauralamble,
Laurapinguin,
LauravdMark,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LaurelG,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Laurin_003,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LazyL,
LeRyck,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIher,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Lecter7,
PI:NAME:<NAME>END_PI,
LeeBurn,
LeeKick,
Lee_HarrisPI:NAME:<NAME>END_PIn,
Lee_RePI:NAME:<NAME>END_PI,
Leibniz,
Leichman Career & Transition Center,
Leine.03,
PI:NAME:<NAME>END_PI,
Lemoncupcakes37,
Lemonlorraine,
LenaPI:NAME:<NAME>END_PI,
Lena_elh,
PI:NAME:<NAME>END_PI,
Lennoxville,
Leo,
LeoMFR,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LesleyLee,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Lesterj1972,
LesPI:NAME:<NAME>END_PIon,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Levicorpus,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LewisOPI:NAME:<NAME>END_PIary,
Lewyke,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Liambebb,
Lianus,
Librarian22,
Lichtecho,
LifeBounces,
LifeScienceMojo,
Lifeforce,
PI:NAME:<NAME>END_PI,
Liie74,
Liisu.R.,
PI:NAME:<NAME>END_PI,
Lillafjanten,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LillieC,
Lilmissmechanic,
Lilunebrium,
Lily,
PI:NAME:<NAME>END_PI,
LilySchultz2002,
Lilygwen,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LPI:NAME:<NAME>END_PIen,
LindaHagbergSweden,
Linguin,
LinkandNavi,
LinkyLongleaf,
Linnie14,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LisainVA,
PI:NAME:<NAME>END_PI,
Lissa623,
LiverLover,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
LizCoops,
LizDownes,
Liziuri,
Lizzietish81,
Lizzifer711,
Lizzy06,
Lizzyf23,
Lmcminn,
Lmcurls,
Lofty_h,
LoganSammy,
Lolipop666,
Lololiz,
Lomky,
PI:NAME:<NAME>END_PI,
Lonewolf66,
Look4Star,
Loony_Lovegood,
Lordofthepies,
PI:NAME:<NAME>END_PI,
LoriM,
Loschmidt,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Louccby,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Lovarq,
Lparkhurst,
Ltlsun,
Lu72,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
LucyitSwD,
Ludika,
Ludovic,
Luispeikou,
PI:NAME:<NAME>END_PI,
Luke .S,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Luna54,
LunaMona,
Luqras,
Luthar22,
PI:NAME:<NAME>END_PI,
Lyckerish,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Lyneth,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Lynnmrose,
Lynzw,
Lyrajane,
Lyss2303,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
M&P Noonan,
PI:NAME:<NAME>END_PI,
M00N,
MALASSIGNE PI:NAME:<NAME>END_PI,
MBear,
MBrett,
MDA,
MDrass,
MKDS,
MLSBetreuung,
MM-Noorsalehi,
MMT90,
MNePI:NAME:<NAME>END_PIson,
MOBI,
MOSI's Youth Advisory Board,
MPIchimp01,
MPIchimp02,
MSMurphy,
MaSeKind,
MabelH,
PI:NAME:<NAME>END_PIabi,
PI:NAME:<NAME>END_PI,
Machielovic,
PI:NAME:<NAME>END_PI,
Macrell,
MadPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI L,
PI:NAME:<NAME>END_PI,
Maddy1221,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIafalda2015,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Magellan_Lin,
Magessa,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
Maggielikeszoology,
Maggiewill,
MagicMitch,
Magzie2000,
Mahmoth,
PI:NAME:<NAME>END_PI,
Mai ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Maike42,
Mainge,
PI:NAME:<NAME>END_PI,
Mako001,
MalPI:NAME:<NAME>END_PIT,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIamePI:NAME:<NAME>END_PI,
Mamphie7ty,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Mang19,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIuela11,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MarPI:NAME:<NAME>END_PI9,
PI:NAME:<NAME>END_PI,
Marena84,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MargaretW,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
Marianagazga,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIie-FrPI:NAME:<NAME>END_PI,
MariePG,
Marie_eve_78,
PI:NAME:<NAME>END_PI,
MariluC,
Marinka1704,
PI:NAME:<NAME>END_PI,
MarioMe,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Mariposa123,
Marisachimpa,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MartPI:NAME:<NAME>END_PIx,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Mary196,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MPI:NAME:<NAME>END_PI,
Mas2201,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIscience,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
MattB559,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
Maureen77,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MaxKa,
Maxbax,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Mazzy,
PI:NAME:<NAME>END_PI,
Mburke777,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Meanjean4321,
Mear48,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MePI:NAME:<NAME>END_PI,
Mel-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIissaPI:NAME:<NAME>END_PIee,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MePI:NAME:<NAME>END_PI,
Melvis,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MerPI:NAME:<NAME>END_PI32,
Mercury82,
MPI:NAME:<NAME>END_PIie,
PI:NAME:<NAME>END_PI,
MermaidBird,
Merowig,
Messi0088,
Metamikt,
Mht,
PI:NAME:<NAME>END_PIia ,
MiaTheFeline,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MicahB,
Mich.bell,
MichLM,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Michael_B,
Michaela810,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI, Esq.,
PI:NAME:<NAME>END_PI.,
Michkov,
PI:NAME:<NAME>END_PI,
Micmac1,
MidgeUniverse,
MidnightRook,
Miepie88,
MPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Mikaylab,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Mikerttt,
Mikeyser,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
MikrokliPI:NAME:<NAME>END_PI,
Mikytron,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Milanp,
PI:NAME:<NAME>END_PI.,
Miles_John,
PI:NAME:<NAME>END_PI,
Mima2,
PI:NAME:<NAME>END_PI,
Mimiko,
Mimster2,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.,
Mindslaver,
Minev,
MingMing,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Mirime,
Mirjam,
Mirkomaniac,
Mirrakor,
MirrorscapeDC,
Mirsandia,
MissS,
Mission73,
Missoedipa,
Missybee35,
MisterMorPI:NAME:<NAME>END_PI,
Misterhamm,
Mistrinanka,
PI:NAME:<NAME>END_PI,
Mitsiy14,
Mizzou,
Mjay,
Mkdtasha,
Mkele,
Mmjm,
Mna225284,
Mochyn,
Mohot,
Mokka,
PI:NAME:<NAME>END_PI,
MolniyaD,
Momkey,
Mona09506,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
MonicaMacD,
MonkeySis,
Monolakes,
Montexes,
Moonbeam,
Moone,
MPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Morganavila,
PI:NAME:<NAME>END_PI,
Mortal_Speaker,
Motherjoanne,
MotionsensorElke,
Mowat,
Mr-SlPI:NAME:<NAME>END_PI,
Mr.To,
MrPI:NAME:<NAME>END_PI,
MrBrPI:NAME:<NAME>END_PI,
MrCup,
MrES,
Mrs PI:NAME:<NAME>END_PI,
Mrs. PI:NAME:<NAME>END_PI,
MrslPI:NAME:<NAME>END_PI,
Mrsthayer13,
MsTurPI:NAME:<NAME>END_PI,
MsPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Muggy7,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIistrada,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Muskratte,
Mvpiggelen,
MwalPI:NAME:<NAME>END_PI,
Mydrynn,
MyersScienceLab,
Myoste,
MysteriousAlina,
N,
PI:NAME:<NAME>END_PI. PI:NAME:<NAME>END_PI.,
N165617,
N419045,
NA_18,
NCT,
NFust,
NUKE1989,
Nachtvogel,
Nada Abdel-Salam,
Nada PI:NAME:<NAME>END_PI,
Nada PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Nadia Corp,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIadPI:NAME:<NAME>END_PI B.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Nakaleen,
Nakiki,
Nalyla,
Namibier,
Nan,
NanaHub,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI, Ph.D.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Naomi2809,
Nargess.g,
Narnian,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
NPI:NAME:<NAME>END_PI1,
NPI:NAME:<NAME>END_PIF,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Natsang,
Nauka,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Nbhedgehog,
Necker15,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Nepenthes1991,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Netballstar333,
Neurophile,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIuser,
PI:NAME:<NAME>END_PI,
NextRoman,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI et PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Nik123Joh,
NikPI:NAME:<NAME>END_PIRawPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Nikki76,
PI:NAME:<NAME>END_PI,
Niknak2707,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIshPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIYu,
Nina_PI:NAME:<NAME>END_PI,
Ninady,
Ninjoid,
Ninoue,
NoTigersInAfrica,
Noctiferix,
Node801,
PI:NAME:<NAME>END_PI,
Noemiimeon,
NoisyMicrobe,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
NoraAlJarbou,
PI:NAME:<NAME>END_PI,
North Central High School in Kershaw, SC,
NotoriousVHD,
Nova329,
Novawatch,
Nrodrigo777,
NukeArts,
NunesJF,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
NusswPI:NAME:<NAME>END_PIsel,
Nyamba,
Nycticorax,
Nye,
NynkS2,
OCaravella0829,
OGNaes,
OKutoNoSushi,
ORIELTON,
ObservatoryPark,
Oevans82,
Ofelia,
Ohayfield,
Ojetuno,
OkapiLove,
Okbrice14,
Okieh,
OldBluntman,
OldSwampy,
OleksiiD,
Olena-ua,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Omi,
OnePunchMan,
Oneloveson,
OnlyIsland,
Onthorfast,
OoooMatron,
OppaiDesu23,
Oranje99,
OriXan,
Ornit,
Orohena,
Oscar_Chavez,
Osnat AtPI:NAME:<NAME>END_PI,
Osprey49,
Osquid,
PI:NAME:<NAME>END_PIwen ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Owl12345,
OwlAli,
Ozro,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PI,
PALewis,
PI:NAME:<NAME>END_PI,
PPI:NAME:<NAME>END_PI,
PFPI:NAME:<NAME>END_PI,
PHald,
PMSPR,
PI:NAME:<NAME>END_PI,
PWD,
PWDDCh,
PWDdr,
PWDkm,
PWDmb,
PWDsz,
PI:NAME:<NAME>END_PI,
Packo,
Paharov,
Paige29,
PaigeGabriellex3,
Pais-ily,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Pamelahs,
PandaLion98,
PandaNation,
PandaOne,
Pandasoccer,
Pandora,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIayette,
Paracebo,
PI:NAME:<NAME>END_PIa,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIu,
Parsa,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Patsy49,
PauDG,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Paula26,
PaPI:NAME:<NAME>END_PI,
Paulietta,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIino,
PPI:NAME:<NAME>END_PI,
PPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Perada90,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Perlo121,
Persephonerose,
Petanimal22,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Phasewave,
PherosNike,
PI:NAME:<NAME>END_PI,
Phil405,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI DDS,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PhiPI:NAME:<NAME>END_PIsopPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PhlPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PhPI:NAME:<NAME>END_PI,
PhPI:NAME:<NAME>END_PI,
Phryxe,
Pi_14159,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIici,
PI:NAME:<NAME>END_PIier,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIja,
PineapplesRCool,
PinPI:NAME:<NAME>END_PI,
Pinguana,
Pinguin411,
PiperRocks,
PiperS,
Pirandella,
Pito13,
Pitsi,
Pixie777,
Plinko,
Pocket_watch,
PollywithPockets,
Ponderwomum,
Pontus PI:NAME:<NAME>END_PI,
PonyFriend23,
Popcycler,
PopiTomi,
Popmusicluv123,
Poppet13th,
Portalic,
Porter Science,
Possbert,
Poupinette,
PrairieGirl,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIek PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIious Smith,
PI:NAME:<NAME>END_PI,
PrimericaRep,
ProfessorKid,
Pru_of,
PrPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Psaltriparus_minimus,
PublicY,
Pudding2,
Pueppie68,
Purplecavingcat,
Pywacket1952,
PI:NAME:<NAME>END_PI,
Quatsino,
QueenPI:NAME:<NAME>END_PI,
QueenOfGasoline,
PI:NAME:<NAME>END_PI,
QuirkyPlatypus,
R.H.,
R1chard,
RENERIBI,
RRbend,
RS,
RSchobermayr,
RSing,
RUS0032,
Rach1787,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
RachelLevine,
Rachel_R.,
Rachel_Rutland9,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PIni,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIasPI:NAME:<NAME>END_PI13,
PI:NAME:<NAME>END_PI,
Raul019,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
Rebeki,
RedTrev,
Redd2205,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Regina342,
PI:NAME:<NAME>END_PIichukey,
PI:NAME:<NAME>END_PI,
Relentless363332,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
RPI:NAME:<NAME>END_PIannon47,
PI:NAME:<NAME>END_PI,
Rhonan,
PI:NAME:<NAME>END_PI,
Ricardo130cc,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
RickM1989,
PI:NAME:<NAME>END_PI,
RiPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI :),
Riley20,
Rille54,
Ringwood School Eco Committee,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Riverdog,
RPI:NAME:<NAME>END_PIfusch,
Rkcomyn,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI, MSc,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIinjoy73,
PI:NAME:<NAME>END_PIinkay2,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
Rockette62,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIondine,
Roneila,
Roobee,
Roown,
RPI:NAME:<NAME>END_PI,
Rosa,
RosPI:NAME:<NAME>END_PIieS,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Rotoiti,
RoverD,
PI:NAME:<NAME>END_PI,
Rowanstar77,
RowantheRed,
RowdyWeeps,
Rsh3222,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIE,
Ruubjaah,
PI:NAME:<NAME>END_PI,
Ry5Ghost,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
RPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
S-a-be,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
S00Z,
S1404377,
S1421881,
S745245,
SAGE-1,
SANEAlex,
SBolt89,
SCOUSER1964,
SCWilliams42,
SHPI:NAME:<NAME>END_PIson,
SMWheeler,
SMike11,
SORINORAH,
SR9,
SRH23,
STARIDER,
SWTN,
SWude,
SZubic,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SafariPI:NAME:<NAME>END_PIiz,
Saibot82,
Saichira,
SPI:NAME:<NAME>END_PI,
SalPI:NAME:<NAME>END_PI,
SalPI:NAME:<NAME>END_PI,
SallyL,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Samalexmummy,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Samoht,
Samppants,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SandPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SPI:NAME:<NAME>END_PIus,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.,
Sandra987,
PI:NAME:<NAME>END_PI,
SannaBergstr0m,
Santana PI:NAME:<NAME>END_PI,
SPI:NAME:<NAME>END_PIanaPI:NAME:<NAME>END_PI,
Sapphirepegasus,
SappireTitan,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Sara_Houseman,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI.,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Sariemarais,
PI:NAME:<NAME>END_PI,
Sarkis,
PI:NAME:<NAME>END_PI,
Sascha1989,
Sathish.pr,
Satincat,
SatsumaConsumer,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Scaramorey8,
Scarcat,
ScarlettBlacks,
Scarymum,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Schoolio,
Schorsch87,
Schule,
Schultzi,
Schwarzenbach PI:NAME:<NAME>END_PI,
Scissorbird,
Scotssunflower,
Scott2742,
Scrappe3,
Seabury4,
Seafish,
SeaforCinnamon,
Seamsmistress,
Sean,
PI:NAME:<NAME>END_PI,
SePI:NAME:<NAME>END_PI-BPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SecondgradeNR,
Seegarneluru,
Seekind,
Sekl,
SelenaW,
Sengisisu,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SerengetiBob,
SerPI:NAME:<NAME>END_PIy,
PI:NAME:<NAME>END_PI,
Setthecontrols,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
Shacharkara,
ShadowTigrex,
Shadowspinner,
PI:NAME:<NAME>END_PI,
ShPI:NAME:<NAME>END_PIlyn,
Shakes113,
Shalista,
PI:NAME:<NAME>END_PI,
ShPI:NAME:<NAME>END_PIij,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI23,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Sharan.b84,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Sheepcake,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Shellypm,
Sheng888,
ShePI:NAME:<NAME>END_PI ShePI:NAME:<NAME>END_PI,
Sher1ff,
PI:NAME:<NAME>END_PI,
Sherib2,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Shill2008,
Shiphrah,
Shir-El,
ShmuelJ,
ShootingStars,
Shoreditch,
Shosha,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI. ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SiDEBURNAZ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Sightling,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIlydad,
PI:NAME:<NAME>END_PIthrina,
PI:NAME:<NAME>END_PIta,
Silvergirl,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
SimOne_,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SimonV2,
Simondbellamy,
SimpleCarrots,
Singerhoff,
Singing_Ginger,
SirBonobo,
SirHanni,
SirPrisinglyStupid,
SireGrumpy,
Sjaak-Jan,
Skavvie,
Skelm,
Ski83,
Skinnied,
SkinnypigXD,
SkyFall56,
Skydiver33,
Skye,
Skye Nagtegaal ,
Skye Shrader,
Skye-lyn,
Skylines01,
Skynet,
Slade Flood,
Slade_J_Sci08,
Slart,
Sleepinbeauty,
SlickWilly787,
Slugger,
Smeeta,
Smile7,
Smittybird,
Snacko,
Snafu89,
Snapshots123,
Sneeuwkoningin,
Snoopy72,
Snowdrop27,
Snowlarbaby,
Snugli,
SocratesFolly,
SofieMoyson,
SolarEclipse,
SoloBro,
Solomon Carr ,
Sombito,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Sonja ven den Ende,
Sonjagumm,
Soosi,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SPI:NAME:<NAME>END_PI,
Sophia SiePI:NAME:<NAME>END_PI,
Sophia lalalalalala ,
PI:NAME:<NAME>END_PI,
SPI:NAME:<NAME>END_PI,
Sophieeeee,
SorAzolam,
Sora_Shiro,
PI:NAME:<NAME>END_PI,
Sothoth,
SouthernBean,
SoylentGreenIsMadeOfPeople,
SpacePanda86,
SpamRichter,
Sparassidae,
Sparkielspa,
Sparks11212,
PI:NAME:<NAME>END_PI,
Spikenstein,
SquiddyStarr,
Squishmoose,
PI:NAME:<NAME>END_PI,
Stacy67,
Starawareness,
Stargazer79,
Starman120457,
StarwatcherHB,
StealthAmoeba,
Stefan,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Steffan,
SteffiN,
Stegeman76,
Stella,
Stella07,
StellarBug,
StephBlack7,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Stephyoung35,
Sternschnupper,
Sterrennevel,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Steven_D_Mullis,
Stevieb62,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Stinu,
Storeman20,
Stormyy,
Strubear,
Sue PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Sue PI:NAME:<NAME>END_PI,
Sue PPI:NAME:<NAME>END_PIito,
SPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Sue-2,
Sue0022,
Suedetess,
Sugarbuzz,
SPI:NAME:<NAME>END_PI,
SundogWE,
SunlessDawn,
SunnaS,
Sunny35,
SunnyD,
SuperManu74,
Super_Shay_Shay,
SuperbiaIra,
Supersciencegirl100,
Superzilla1,
Surpluscat,
Surya George,
Susa,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SusanAtHome,
SusanLelie88,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Suyeon,
PI:NAME:<NAME>END_PI,
SuzannePI:NAME:<NAME>END_PIavelle,
PI:NAME:<NAME>END_PI,
Suzysharpe,
PI:NAME:<NAME>END_PI,
Svenmeyvis,
Svennie83,
SweetBee,
PI:NAME:<NAME>END_PI,
Sylverone,
SylvieT,
SyPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
SPI:NAME:<NAME>END_PIaka,
PI:NAME:<NAME>END_PI,
T-M,
TAElder,
TEMPLECC,
THE_JJB,
THGU,
TL,
TLW,
TMac150,
TMeni,
TRothrock125,
TUNG84,
TWebb,
TYGR,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIamarPI:NAME:<NAME>END_PI,
Tamaska,
PI:NAME:<NAME>END_PI,
TangySpiderLegs,
Taniesha_Sturdavent_PS122,
PI:NAME:<NAME>END_PI,
TanpopoKun,
TantasticOne,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Taproot,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Taubenus,
Taurelaiqua,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Tawnytunes,
TayaRenee,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Taz620,
TazeTee,
Tazumaki,
TPI:NAME:<NAME>END_PI,
TeachEarthSpace,
Teashan,
TechnoBeeKeeper,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
TeeNoodles,
TeejZA,
TehShush,
Tehi,
Ten_cats,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Terhi,
TerraSpatiale,
PI:NAME:<NAME>END_PI Blackstone-West,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
TessaM,
Tg18,
Thajazzlady,
That1WeirdKid,
The Brennan,
The Chimp & See moderators,
TheBolter,
TheCrowe,
TheDarkVoid,
TheDemonButler,
TheEnds,
TheEpicPrimius123,
TheJewelOfJool,
TheMeeganFace,
TheRealG,
TheSciBoy,
TheSmurf,
TheTurtleKing,
TheWanderer8,
TheWishingStar,
Thebeegirl,
Thel,
Theo2016,
PI:NAME:<NAME>END_PIodPI:NAME:<NAME>END_PI,
Theodore Pritchard,
Thespian,
Thimblewit,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
Thornton.sarah,
PI:NAME:<NAME>END_PIso,
PI:NAME:<NAME>END_PI,
TiagoC,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
TiborAcs,
Tiffany ,
TigerDrummer,
TiggyTiger,
TildalWave,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI13,
PI:NAME:<NAME>END_PI,
Timothy66360,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Tingleton,
Tinneke,
Tipo,
PI:NAME:<NAME>END_PI,
TisDone,
Tlittle358,
PI:NAME:<NAME>END_PI,
Toblerone659,
PI:NAME:<NAME>END_PI ,
Tockolock,
Toffifee,
TokiPI:NAME:<NAME>END_PI,
Tokki,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
TomMunnery,
TomPI:NAME:<NAME>END_PI,
TomSun,
Tomahawk145,
PI:NAME:<NAME>END_PI,
Tommyq,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
TonyIP,
Toon-Keesee,
TopTop23,
Tophica,
Totoro_sootball,
PI:NAME:<NAME>END_PI,
Towhee12,
ToxicPI:NAME:<NAME>END_PI,
Toyosi,
Tp,
Trace Bybee,
Traceydix,
PI:NAME:<NAME>END_PI,
Tracymyles,
Tracyv,
Traker,
Transition Year,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Trier_,
Triniturtle,
TrPI:NAME:<NAME>END_PI,
Trippthecat,
Triticites,
Trotto,
PI:NAME:<NAME>END_PI,
TrudiC,
Truffle26,
Tschango,
Tslish,
TsukinoYami,
TubbyPanda,
TuesTao,
Tuketi DPI:NAME:<NAME>END_PI,
Tummy,
TusenApor,
Tychwarel,
Tykelass,
Tyler,
PI:NAME:<NAME>END_PI ,
TypicalTacos,
Tyra,
Tzatziki,
USAequine001,
UandA,
Ubizi,
Ulakj,
Uli SPI:NAME:<NAME>END_PI,
Umpire,
Umut Kaan Karakaya,
UnionJCatForever,
UnscrewedPear9,
Upsilone,
Ursulala,
UsernameJ,
UsPI:NAME:<NAME>END_PI,
Ute,
Ute PI:NAME:<NAME>END_PI,
Ute PI:NAME:<NAME>END_PIider,
Uttancs,
Uvibee,
UweRumberg,
V Beutler,
VD,
VJOM,
Vague_Nomenclature,
Vajrapani,
PI:NAME:<NAME>END_PI,
Valea,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Valina7mePI:NAME:<NAME>END_PI,
VanderStel,
Vandon,
Vandusensummercamps,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
VanillaChief,
Varun ,
PI:NAME:<NAME>END_PI,
Vault_0_boy,
Vedad,
Veegee,
Veevo,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
ViDrosera,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
VinBla,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
VioAfrica,
Violaceae,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Viro,
VirtualDiana,
Visjevisje,
Vital_Signs_,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Vix_jane,
Vlada927,
Vmadmax,
Volitioner,
VonBPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
WWabbit,
WalgettCz,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIanPI:NAME:<NAME>END_PI,
Wandgryffin,
Weaglebec,
Weisshandgibbon,
Weltraumkoyote,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
WPI:NAME:<NAME>END_PI_L,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
Whyevernot55,
WiggyPiggy,
Wildbarley67,
WildlifeKatie,
Wildliferspotter,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIye,
PI:NAME:<NAME>END_PI,
Willyr17,
Wimoweh,
PI:NAME:<NAME>END_PI,
Withaar,
WitsEnd,
PI:NAME:<NAME>END_PI,
Wolfen17,
WoodenHedgehog,
WordSpiller,
WorldofZoology,
Worminator90,
Woutso,
WPI:NAME:<NAME>END_PIitt,
X3Lord,
XOFrank,
PI:NAME:<NAME>END_PI,
XPI:NAME:<NAME>END_PI,
Xeka,
XexyzChronos,
XijhingsBrother,
Xoannon,
Xombie337,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Yefah,
YPI:NAME:<NAME>END_PI,
YevgeniaC,
YiddleSeti,
YPI:NAME:<NAME>END_PI ,
Yoda555,
PI:NAME:<NAME>END_PI,
YouTa,
PI:NAME:<NAME>END_PI,
Ysabell,
PI:NAME:<NAME>END_PI,
Z0e,
ZAC8121999,
Zaboomafoo,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIambPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
Zanna640,
ZannaDragon,
Zarsus,
Zathrus,
ZebraQ,
Zeck,
ZedCapricorn,
Zen2go,
Zenorian,
ZenzicBit,
Zephammo,
Zerraina,
Zhongar,
Zilli,
Zinhle PI:NAME:<NAME>END_PI,
Zinic,
Zoe IPI:NAME:<NAME>END_PIac,
Zoe PI:NAME:<NAME>END_PI,
ZoesAnimals,
Zoey,
ZooBot,
ZooDad1,
Zoobot_TKD_4life,
Zoochall,
Zoomngirl,
Zooniversalcitizen,
Zooniverse99,
Zperry,
Zuppy,
ZuvaSun,
PI:NAME:<NAME>END_PI,
ZygfPI:NAME:<NAME>END_PIda,
PI:NAME:<NAME>END_PI,
Zynobia,
_PI:NAME:<NAME>END_PI_,
_PI:NAME:<NAME>END_PIi,
_riddhii_,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
a351,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
ab3,
PI:NAME:<NAME>END_PI,
abbeylin1017,
abby,
abdulPI:NAME:<NAME>END_PIi,
PI:NAME:<NAME>END_PIa,
acheng96,
achevalier,
acmesrv,
acmrshll,
acs15e,
actionhippy,
adPI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
adam596,
adam_163,
PI:NAME:<NAME>END_PI,
adamas84,
adamow78,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIson,
PI:NAME:<NAME>END_PIdeletaylor,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
aeliane,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIra123,
PI:NAME:<NAME>END_PIzzPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIasta,
ageipel,
PI:EMAIL:<EMAIL>END_PI,
PI:NAME:<NAME>END_PI,
agoogan15,
PI:NAME:<NAME>END_PIgrint,
aheinPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIjorth,
ahnlak,
ahsante7,
ahutto,
aidPI:NAME:<NAME>END_PIos,
aideen.byrne,
aidualc,
aileesh,
aimfor,
aimsHS,
airacarvalho,
airplanesrme,
airvin6,
aitkene,
ajPI:NAME:<NAME>END_PI,
ajPI:NAME:<NAME>END_PI,
ajhPI:NAME:<NAME>END_PI,
ajissocool,
akalan,
akcustomlova,
akeomuon,
akintner,
akkarma,
akkobelja,
akku99,
akl680,
aknat,
akshaymahawar,
akt2,
akuzniacka,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
albel1977,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIone84,
PI:NAME:<NAME>END_PIellPI:NAME:<NAME>END_PI,
alex12345,
PI:NAME:<NAME>END_PI47,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI41,
PI:NAME:<NAME>END_PI2,
PI:NAME:<NAME>END_PI18,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIzPI:NAME:<NAME>END_PI1,
PI:NAME:<NAME>END_PIfonsojdl,
alhen123,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
aliceforest,
aliceruth,
PI:NAME:<NAME>END_PI,
alihaggerty,
alina01px2020,
alinorth_893,
alisacrisp,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIanstPI:NAME:<NAME>END_PI,
allcompute,
PI:NAME:<NAME>END_PI,
alleyk813,
alli3,
PI:NAME:<NAME>END_PI_heather,
PI:NAME:<NAME>END_PIsears,
ally83m,
allydPI:NAME:<NAME>END_PIin,
allyktu01,
allyphant,
alma lessing,
almpvnj,
alohabreeze,
alrp319,
alsipsclar,
altalt,
alyPI:NAME:<NAME>END_PI,
alymohab,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIdal,
alynstill,
alyshaSP88,
alysonslade,
alyssa.vazquez,
alzabel,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
aPI:NAME:<NAME>END_PIamo,
amandamontemayor88,
amazon.parallax,
amberluda,
PI:NAME:<NAME>END_PI,
amedina2,
amellers7,
amf8384,
amgaynor,
PI:NAME:<NAME>END_PI,
amlsilverstein,
amneris3,
amosser,
amp2003,
PI:NAME:<NAME>END_PIullPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
anamorisab,
anat1969,
anathemaybe,
anbalo1963,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIersenfamily,
PI:NAME:<NAME>END_PI,
andre4321,
PI:NAME:<NAME>END_PI_1975,
PI:NAME:<NAME>END_PIreap21,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
androidlittle,
anergy,
anevab,
ang96,
angelPI:NAME:<NAME>END_PI,
angelPI:NAME:<NAME>END_PI,
angelleb,
PI:NAME:<NAME>END_PI,
animalfriend1999,
animatus,
anita_leung329,
anjalimanohar,
anjamunder,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
annalisah92,
annalisehobson04,
annavandPI:NAME:<NAME>END_PI,
anneborrego,
annebyford,
anneke64,
annekefreaky,
anniepg,
anniev123,
annimaxine,
annoymenow,
anocan,
antbetty,
anteater21,
antiHUMANDesigns,
antirrhinum,
antmurphy,
anzacspirit,
apatura,
apavlecic,
april12712,
aprilh,
aquitanian,
arafan,
arbayer02,
arbitercay,
arczarkowski,
areBerry,
areichle,
areinders,
areposator,
arfblacker,
argasm,
arh3399,
ariana23,
arielvera,
arlo head,
armandea,
armando752,
armonihance123,
aronnthe1,
arquita,
artbymarion,
artistjillian,
arwen252,
asafum,
asalomark,
aschlemme,
ashura,
asmibert,
aspenolivia,
asplamagnifique,
astaasta,
astabile,
astadtler,
asteer,
astrocanin,
astronomas,
atacama77,
ataly,
atambros,
atatjes,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
aubreythez,
aurelie1991,
avanderbrug,
avc13,
ave_eva,
aviator246,
awallen,
aweenink,
aweiksnar,
awething,
awhitwell,
awilson,
awsomeguy1001,
axolotl42,
aydnleeds,
ayefany,
azagh,
azjarose,
azuravel,
baa31892,
baabz,
babsie,
babucurt,
baccorsi,
badpossum,
badskittler,
baffy,
balpsa,
ban4314,
bananie,
banfathi99,
banfield.makayla,
banihal,
barbarann,
barbarapreyer,
barbaravarek,
barbiegoth,
barbora.PI:NAME:<NAME>END_PIkryloPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
baronvPI:NAME:<NAME>END_PI,
barraoconnell,
baruchoPI:NAME:<NAME>END_PI,
bastiaan1984,
bastide,
batdog,
baterra,
batmue,
batuinal,
bbushey,
bcat785,
bclvines,
bcurtin,
bd2015,
bean2bone,
beastofwar,
beaumod,
becks688,
beer.frankie,
beeschnell,
beesnees4,
beingbob,
bekahmyers,
belago,
belan,
bell5a_helder,
bella118,
bellaf,
bellagisme,
belucho,
bencgauld,
bendrz,
benjamintx,
benPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
bergfee1209,
PI:NAME:<NAME>END_PIling,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
besueandamy,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIthPI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
bePI:NAME:<NAME>END_PIsen,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI1,
bgoliber,
bhagyakw,
bhau,
bigcatlover,
biggiesmith,
bigmanbt,
bigworld,
bilalsaeedkhan,
binaz,
biryani,
bisector,
bjmendius,
bjowi,
blackbird1312,
blackninja556,
blahcoon,
blair,
blair438,
blairprescott,
blake.white098,
blanco.rioja,
bliedtke,
blmonroe,
bloop12,
bluebl249,
bluebudedog,
bluefloozy,
bluefootedb,
bluestar82,
blumer17,
bmboswell,
bmontie,
bmorson,
bmtate,
bnisonger,
bob ,
bobinky,
bobthemountaingorilla,
bodoostermann,
bogabasa,
bondailady,
bonobo69,
booja,
boomerb,
boppo,
borad,
bordavis,
borodinpm,
botting,
bowened083,
bowerpizer,
boxbot,
boxingtwig,
boygordon,
boykolos,
bpempire,
bracerup,
bradley87,
brainimpact,
brainod,
brandnewkirk,
brandon lowe,
brandon preciado,
bratverst,
brenda2796,
brendaleejurewicz,
bret707,
bribrantley,
bricheese,
brincess,
britearthangel,
britico,
brittaniefay920,
brittanybeaudoin,
brivogel,
brokaf,
brokenbox,
brooketheschnook,
brown80204,
brownfox,
brunobanani100,
bryan2013,
bsweigart,
bubbakaz,
buchinli,
buchwa,
buckaroo1,
buckh34,
bucksharbor,
buehli,
buerkir,
bulgaria_mitko,
bullet57,
bumblebee2,
bumishness,
bunnigirl,
bunnypenguin,
buntubi,
burgundergerd,
burningquest,
burnspatrick.206,
butterpro,
butterscotch,
bwentz,
bwitti,
bws2002,
bzaleski,
bzpam,
bztaconic,
c.PI:NAME:<NAME>END_PI,
c_warrell,
cailina95,
calledthemoon,
callisto73,
calyma,
camada,
cambridge.christian,
camdent,
camera_bore,
camonaco,
camrey,
canadianbacon791,
canekoch,
captainazul,
carabearcanada,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
carPI:NAME:<NAME>END_PI_stPI:NAME:<NAME>END_PI,
caroljh,
carostani,
carylkpr,
carylsue,
caryngreen,
carzet,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
caseykelly4,
cash717,
casmi,
casmith096,
cass3,
castorandpollux1978,
cat-shepherd,
cat1951,
catalana,
cathcollins,
PI:NAME:<NAME>END_PI,
cathg123,
cathyschlegel,
catlovers,
catmiracle16,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
cblair900,
cboxtoby,
cbrown004,
cbrPI:NAME:<NAME>END_PIquist,
ccain003,
cch001,
PI:NAME:<NAME>END_PI,
ccoc,
cPI:NAME:<NAME>END_PI,
cdafopo,
cdavies311,
cdePI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
cdodPI:NAME:<NAME>END_PI,
cecilieslc,
cedarsstudent2,
cedarsstudent3,
cedmom,
celticaire,
center100,
certifiedNinja,
ces2691,
ceversul,
cezy,
cfolk77,
cgerique,
cghudson,
cgremmich,
chalyse,
chanels,
channier,
chaosbastler,
charisrooda,
charizardace,
charles p ,
charlotte,
charly03,
chasasum,
chaselynnwarr77,
chaseo0626,
chateaumojo,
chaywood2,
cheetah90,
chekeichan,
chekhov,
chelsea.alex,
cherrycoke,
chevyman142000,
chewitt93,
cheyenne,
PI:NAME:<NAME>END_PI,
chh2035,
chia89,
chiarapuspa,
chPI:NAME:<NAME>END_PIarudel,
PI:NAME:<NAME>END_PI,
chidingbark911,
chiheb,
chilieh,
chimacummedia,
chimpandseeeee,
chimpler,
chimpsoccer,
chipdPI:NAME:<NAME>END_PI,
chloe PI:NAME:<NAME>END_PI,
chloejreid,
chocloteer,
choije,
chollow,
chrbod,
PI:NAME:<NAME>END_PI,
chris2108126,
PI:NAME:<NAME>END_PIrisandann,
PI:NAME:<NAME>END_PIrisas7,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI3,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIurnePI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI016,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI8,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIster20,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI43,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
cPI:NAME:<NAME>END_PI1954,
cPI:NAME:<NAME>END_PIson.2019,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI75,
PI:NAME:<NAME>END_PIei,
claireofthecosmos,
clairet_84,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
clee67,
clegPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
cliverPI:NAME:<NAME>END_PI,
clllem,
cllllllloydh,
clochette,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIker,
clt21duke,
cmadd009,
cmadeira95,
cmarshall,
cmckenney,
cmel40,
cmortara,
cmsquared,
cmurdoch,
cmwgeneva,
cnafrada,
cnorvalk,
cnramey,
co1010,
coachgregnola,
cobrasplinter,
cocolocoblondie,
coconino,
cocorug,
codyduzan,
cogs,
coldcounter,
cole.erin.eedumail.vic.gov.au,
collettesea,
colon,
colowick,
colt,
comelia,
comiqueso,
connnollly,
control,
cookeva,
coolartcaleb,
cooney6,
cooperjohn,
coppard,
corbettjn,
corduroyfutures,
corgi-mom,
corto,
corvi42,
coryphella,
coslisa,
cosmic.chameleon,
cosmicos,
cosmos77,
courseiam,
covervig,
cpilkentontaylor,
cplking,
cplummerabb,
cps1500,
cr0m,
craftyscientist,
crambacher,
crawlfast,
crawlingChaos,
crd3000,
creationists,
criscat,
croenan,
crottyfamily,
crowlord,
crystalhutchins,
cryvtvl,
cschraft,
cschueppert,
csmithgeddes,
csutter,
cubear,
cushman.kc,
cuynchips,
cworm,
cyanocitta,
cybersue,
cynlynten,
cynt80,
cynthgray,
cynthiag,
cyzaki,
czapien,
czechkate1,
d.eileen.d,
d1e1b1,
d8sconz,
d_idaho,
dab vine,
daemonbarber,
daestwen,
daffodil3,
daguta,
dahmin,
daiant,
dakeeps,
dalai,
daleet,
daleh,
daliahalfon,
dallya809,
dandan595,
danielpepkezoo,
danielsd,
daniiielaaa_hurtadooo,
danisha,
danjurious,
dankpoet,
dannybussy,
danrol,
dansmith87,
daphne_7,
darcybennett,
darcygrose,
darici,
darkquetzalcoatl,
darwin829co,
darya_lagrange,
darys21,
dav2000,
dave1056,
daveb9000,
davemaze,
daveross1971,
davet679,
daveytay,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
david_gb,
PI:NAME:<NAME>END_PIch,
PI:NAME:<NAME>END_PItroy418,
PI:NAME:<NAME>END_PIagle,
dPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
dayPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
dPI:NAME:<NAME>END_PI,
dbot,
PI:NAME:<NAME>END_PI,
dbuske,
dca1,
dcampbell21,
dderPI:NAME:<NAME>END_PI,
ddiak,
deafscribe,
deandsusan,
dearl1103,
deathscowboy07,
debJPI:NAME:<NAME>END_PIski,
debbditt,
debmwill,
debnad,
debora713,
deborah16,
deborahsigler,
debperry,
debwil70,
deepwatch-1,
deetaurus,
dejaboo,
delbson,
delenaj,
delpiano,
delta_lady,
deltagamma,
delve202,
delxdune,
dembiecc,
denizenn,
derangedberger,
derschmiddi,
desh57,
desjarPI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
devn clark,
devon.vt,
devonette,
devot,
dgmarc,
dgranrath,
dholoviak,
diamondback,
diamondone1999,
diana_monkey,
dianag825,
dianaserengeti,
dianebradley,
dieSeife,
diemade,
dileka,
dinjones,
dios,
discoverer99,
divakiana24,
djg6868a,
djlillil,
djmccon,
djsato,
dkb16d,
dksteele75,
dlr,
dlruthven,
dmaboyer,
dmagoo,
dmrtennis,
dmundil,
do_eme,
doctormoss,
dodgsonfamily,
doge panda,
doggers,
dohr,
dolphinate,
dolphincrazy,
dolphiny21,
domibowd,
PI:NAME:<NAME>END_PIik PI:NAME:<NAME>END_PI,
donalthefirst,
donsa,
dont worry,
dor7539,
dPI:NAME:<NAME>END_PI,
dPI:NAME:<NAME>END_PI,
dorothPI:NAME:<NAME>END_PI,
douPI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
down1nit,
dpcoop2,
dpellerin,
dpopovitch,
dragabalaur,
dragPI:NAME:<NAME>END_PIly,
dragPI:NAME:<NAME>END_PI,
drbreznPI:NAME:<NAME>END_PI,
drehPI:NAME:<NAME>END_PIin,
drek,
dPI:NAME:<NAME>END_PI,
dPI:NAME:<NAME>END_PI,
drewthemunky,
drhibPI:NAME:<NAME>END_PI,
drizzle.virus,
drizzly,
drjenncash,
drmmha,
droPI:NAME:<NAME>END_PIso,
drtryan,
drummerboy5031,
dryden Shillingburg,
dsekkes,
dshowell,
dsmyth6,
dstarzfn72,
dtimmermans,
duban,
duckysempai,
dulsky,
dumdiddlysquat,
dunealex,
dupagirl,
durifon,
dvbrandon,
dvc214,
dwaynemedic,
dwhite1,
dwlameris,
dwmjmm,
dww257,
dydel420,
dysonkl,
dzezi,
e-motiv,
e2d2,
e8cm5n,
eCid,
eagleclaw,
ebaldwin,
ecodiva88,
econaut,
ecorrigan,
ecotrace,
ecsjcwru,
ecvegghead,
edaro,
eddaw,
eddiecharles,
edemars,
edmo2013,
ednapissenlit,
edoxseywhitfield,
edwardlau,
eecamp22,
eellwood,
efb,
egeland,
egor_12,
eileen96,
eilemach,
eisenhuth101,
ej77,
ejc123,
ekaterin,
ekt1228,
el chapo,
elaineoutdoors,
elcat24,
elcharlot,
elcwt,
elegantemu,
elenalin,
elephant20,
eleys,
eliedeker,
elisame,
elisugar,
elizabeth,
elizabethyeomans,
elizzak,
ellaelizabeth,
ellenkronberg,
ellenmiller1,
ellie_28,
ellipsis6,
ellisPI:NAME:<NAME>END_PI,
elloriac,
ellwoodmm,
elmod,
elmuchacho,
elsareed,
elseag,
elshrky,
elskabette,
eltreno,
elvraie,
elza,
ema98,
emalberstadt,
emalie22,
emberke,
embowen,
emerald_ware,
emily,
emily.juckes,
emilymynett,
emma,
emma2015,
emmabrooks,
emmacashmore,
emmacnapper,
emmalaura727,
emmar.luvs.cats,
emmatigerlily,
emmeline525,
emmmmmfox,
encephsagan,
encherend,
enderb,
endreh,
enr987,
enthusiastic_cell,
eoinrouine,
eosPI:NAME:<NAME>END_PI,
eotunun,
epaltzat,
eparroway,
episk22,
epple.grant,
eptitsyn,
epurvis,
eq15,
ergalty,
ergo1,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
erPI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI,
erPI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
erikahowell,
erikamedwards,
eriki419,
PI:NAME:<NAME>END_PI,
erPI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI,
erinmontague,
erkslook,
ernderfer7125,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
esazure,
escholzia,
estefanoescarate,
estlineero,
ethurau,
eupharina,
evagr,
evel_chihuahua,
eversuhoshin,
evilpokefairy,
eviltigerlily,
eyarbrough,
f4phantom,
fabfran98,
faithkatts,
faithv,
falconview,
fantasticmrsfox,
fardal,
farooque,
fathizahdeh,
fayeherold,
fbarulli,
fbn79,
fbw0304,
feather813,
fedooora,
feigdafugl,
felagund_PI:NAME:<NAME>END_PIarato,
felicitywallis,
felipenic,
felix330,
felixo42,
felmy,
fem302,
fengist,
fenoloftaleina,
fenti,
ferisaw,
fermor332002,
fernfire,
feywoods,
fezzik,
ffayiga,
fffnerigmail.com,
fi-t,
fiashhh,
fifibantam,
filippac,
fillyba,
filska,
fina-1,
findtheriver,
finleyg,
firecatstef,
firepig,
firgym,
fisera,
fisticuffpuffs,
fix83,
fizila,
fjoellnir,
fjuveneton,
flaipert,
flamania,
flamey_amy,
flavio.p,
flavios,
flee67,
fleisnat,
flemming.westphal,
fliederle,
flipit4u,
floatingadrift,
floortap,
flossiecelia,
flyingchina,
flyingfox0212,
fnenu,
focus54321,
foghorn90,
forcher,
fortheocean,
foxfluff5,
foxtrot-lima,
fran,
francesca,
franck53,
franklindhaven,
fraukelich,
frauleinfisch,
fred572,
freddyh,
freeday,
freelulu,
friendlysceptic,
friesiancrazy,
frimkron,
fringenious,
frizzymom,
frl.u,
frogking95a,
frogln,
frolicksome,
frozenchosen,
frozenlandscape,
frumpywebkin,
fruusje,
fruusjemonty,
fryalls,
fsc2ou,
fscelzo,
fstep,
fsu1216,
fsukristen,
fukkthemainstream,
fukomys,
fukthemainstream,
fullet003,
fullsteamahead,
funk07chick,
fuocofatuo,
furryspaghetti,
fuzzyemma,
gaa17,
gabbieb,
gabriel90,
gabriellewragge,
gaelike,
gafullenwider,
gahall44,
galaxer,
galaxie062,
galaxytrekker,
galaxyzoo_guy2,
galefernow,
gamer.jamer,
gamerdad,
gamincat,
gangerolv,
ganstead,
ganymeed,
ganzegal,
garamnonfok,
gardenfairy,
gardenmaeve,
garnerdillon,
gavied509,
gbemis,
gbpereira,
gbrewer,
gcloud94,
gdeyoung,
gebue,
gelenika,
geminidragon,
gemira,
genbug,
genogenie,
gentlesoul,
georgeblack,
georgepickles,
geoski,
gerda24,
germain1,
gertyrose,
ggdchip,
gghlyon,
giarcsllim,
gieskem,
gill14,
gillskill,
gilsm0m,
giova53,
gl367,
glashelder,
glberg,
glenda701,
glicaj,
gloris69,
glsk,
gmheck,
gmzabos,
gnagy5,
godlesswarriortm,
goelzerrf,
goffrey,
goggins,
gommersellen,
gonzo818,
googee3,
gorgonsit,
gort_industries,
gotauber,
govinpandian,
grace.,
grace.santone,
graigrai,
grasilda,
grazynastasinska,
greenscrubs,
greye,
gribblet,
grieve,
grimkin,
grisu33,
grok00,
grom,
gromozeka,
grondinm,
grumrill,
gsolano217,
gspier,
gst,
gtmh,
guercait,
guevfamily,
guitarist53188,
gunnip.olivia,
gunnroni1,
guruguru,
gwennie71,
gwhw,
gwynmor,
gythaogg2,
gzconf1,
gzconf2,
h.r.,
h3px,
hairygoats,
haleyarnette,
haleynicm,
halkruth,
halogne,
hammond family,
hamptongray23,
hanbag,
hand0532,
handows,
handreae,
hannah314,
hannahjwo,
hannahmg,
hannahrq,
hannahtheamazing,
hannamarielei,
hannanowak,
happy-accidents,
har6inger,
hardicnut,
harlequindoe,
harrycooke,
harrygrounds,
hartel,
hartwellc,
hasi_s.,
haskielr,
hasPI:NAME:<NAME>END_PI,
hatfights,
hPI:NAME:<NAME>END_PI.skj,
haversine,
PI:NAME:<NAME>END_PI,
hPI:NAME:<NAME>END_PIpaw,
hayitsdavi,
hazard2802,
hazef,
hazelhorse425,
hbb,
hbbuddy,
hbun,
hckiger,
hcps-philliptm,
healymk2,
heath75t,
heatherleach,
heatherotis,
heathv,
heikepe59,
helabi07,
helen_butland,
helen_fewlass,
helena_ErrorDupUsername,
helene coupPI:NAME:<NAME>END_PI,
helere,
helicity,
helloxeno,
hells_bells27,
heluna,
hendrens,
herdis,
hermannschwaerzler,
herondale123,
hester ,
hestie3,
hetts07,
heyguysitsliv,
heylei,
heyytheredude,
hfultonbennett,
hhendriks,
hial3,
hikarciu,
hilari4572,
hilit1983,
hillarygramlich,
hindenburg41,
hlkwd1,
hloliver,
hlp470,
hm99,
hnorab,
hoffi23,
holgerh,
hollygalluppo,
holmanart,
holzwege,
homermeyn,
hooplas,
hoothoot,
hopelessheron,
horkusone,
housegnome,
howdy2u,
hparker,
hsinger,
hudacko,
huffe32,
hugh12,
human_jenome,
humulus1,
hunterc273,
hussar,
hutchiebaby,
huxsterkate,
huy.duong,
hvh,
hwalkden,
hydrogene,
hyhopes,
hymnsf,
hypatia2012,
hypermonkey,
iDee,
iGoab,
iSPI:NAME:<NAME>END_PI,
iTinnitus,
i_bojtor,
iamfern,
iamgeorge15,
iamscotty,
iancl,
ianfinnesey,
ianhuk,
ibclc2,
icecoldjjs,
icm,
icortez,
icy76549,
igonzalePI:NAME:<NAME>END_PI,
ih18,
iiatridis,
ike.gabrielyan,
ikesplace,
ilajoie3,
ilinz,
illlookforit,
illvibetip,
ilovecats,
ilovesquidsbecause,
iluvdolfinz,
imagine57,
imhotep.ojha,
imketys,
impossibleiseasy,
imrexus,
imtushay,
inab.ecker,
incibkaya,
inercy,
infinityLTFS,
inge janson,
ingridgalejs,
inkaplit,
invader,
inyene,
ioannes,
irenera2002,
irinashemonaeva,
irisheye,
irma12,
ironinleeds,
irridescentsong,
isPI:NAME:<NAME>END_PI,
isPI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI,
isamaz,
isault,
isjke,
isol,
itak365,
itanio,
ithicks,
itsJim,
itsanurkithing,
itsfullofstars,
itsmestephanie,
itsoteric,
itzHydrq,
ivanad92,
ivanovp,
ivantama,
izabelakrause,
j50yab,
j_bewick,
jabati13,
jack2490,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jPI:NAME:<NAME>END_PIy0007,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jam2269,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jPI:NAME:<NAME>END_PIety24,
PI:NAME:<NAME>END_PI,
jPI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jasmine bPI:NAME:<NAME>END_PIiste ,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jPI:NAME:<NAME>END_PI,
jPI:NAME:<NAME>END_PI,
jaykePI:NAME:<NAME>END_PI,
jPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
jPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jazzy234,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jbakes89,
jbbeau,
jbon93,
jboo,
jbrace13,
jbundy,
jbvm,
jcdPI:NAME:<NAME>END_PI,
jclowry14,
jcobbholmes,
PI:NAME:<NAME>END_PIcolPI:NAME:<NAME>END_PI,
jcsuperfly,
jd10,
jdcharteris,
jdemers,
jdhouston,
jdure24,
PI:NAME:<NAME>END_PIeffabre,
jeffheif1,
jefftheengineer,
PI:NAME:<NAME>END_PIimer,
PI:NAME:<NAME>END_PIemaus,
PI:NAME:<NAME>END_PIemaverick,
PI:NAME:<NAME>END_PIen_gupta,
jenbaby0122,
jenbflower,
jengee,
jenich,
jenmcd,
PI:NAME:<NAME>END_PI boPI:NAME:<NAME>END_PI,
jennandrews,
jennfurr,
jennifer_greenfield,
jennysellmark,
PI:NAME:<NAME>END_PIeremiah.mPI:NAME:<NAME>END_PI,
jerome48,
PI:NAME:<NAME>END_PIeronobo,
PI:NAME:<NAME>END_PIeskarp,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
jessbou4860,
jessica9291,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIcahill,
jftex22,
jgeschke,
jgjknight,
jgraber,
jgrablin,
PI:NAME:<NAME>END_PI,
jhook,
jhuey,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIim_pea,
PI:NAME:<NAME>END_PIimjacknjohnnie,
jimtxmiller,
PI:NAME:<NAME>END_PIinxo,
jinxxx07,
jjbluejay,
jjlvscj,
jjohntaylor,
jjonmyown,
jkneb,
jkolb01,
jland,
jmalcomb,
jmayhew,
jmccluskey,
jmkwon,
jnick753,
jo0oley,
jo_colsal,
joanne82,
jochair,
jodic,
johnmoore84,
johnny_duke,
johnstar25,
johnstonaa,
johny50,
jojohase,
jojow,
jokergirl,
jokuunal,
jolkeur,
jollyrogered,
jPI:NAME:<NAME>END_PIon,
jomortimer,
PI:NAME:<NAME>END_PIonasmmiguel,
jonathanhl,
jongray,
jonnm4,
PI:NAME:<NAME>END_PI,
joolslee,
joopvanbirgelen,
jopo1987,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIordberry,
jordyman456,
jorgegil,
jorie.hh,
PI:NAME:<NAME>END_PIua,
josieandelliewong,
josiepegg,
PI:NAME:<NAME>END_PIy_t,
joyjoy,
jozsa,
jpcatanzaro,
jpsokkernut10,
jrddias,
jrfs,
jrinear,
jrlyttle23,
jrmw,
jrosese,
jrozyczka,
jrutter,
jschell42,
jscottlenz,
jsjames1,
jsout1234,
jPI:NAME:<NAME>END_PI,
jstaPI:NAME:<NAME>END_PI,
jteselle,
jtj616,
jtreisPI:NAME:<NAME>END_PI,
judi0491,
judybee,
PI:NAME:<NAME>END_PIudyross,
jujPI:NAME:<NAME>END_PI,
jules9687,
PI:NAME:<NAME>END_PIabPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jPI:NAME:<NAME>END_PIuana,
jPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
jumpingranch,
junemb,
junograham,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
justmehere,
jvigo,
jvilaseca917,
jwhit,
jwidPI:NAME:<NAME>END_PI,
jwmaritime,
jwmast,
jwmccomb84,
jwpepper152,
jxczer00,
jynto,
jypce,
k-s,
k.till1484,
k0zm1k,
kahbel,
kaity7007,
kaiwatha,
kakabeak,
kalbuzzy,
kaleem,
kamadden,
kamandizi,
kamilg,
kamv,
kandratii,
kanek,
kanliker,
karainio,
karanrajpal,
karenkantor,
karilyn1976,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI4s,
PI:NAME:<NAME>END_PI,
karu58,
kasiasa,
katacs,
kate_r,
kateboyd,
katelynn,
katemurray,
katesmccloud,
kathb,
kathfossil,
katie8107,
katiefanch,
katiekatt89,
katiekeyser_PS122,
katiekoplien,
katieofoz,
katieverett,
katkz,
katlittell,
katneils,
kats1285,
PI:NAME:<NAME>END_PIavic,
kazza26,
kb96,
kbaptist,
kboPI:NAME:<NAME>END_PI,
kPI:NAME:<NAME>END_PIi,
kPI:NAME:<NAME>END_PI,
kbreckangmail.com,
kcanfield,
kcoflondon,
kd14g,
kdroan,
keanna_165,
keel,
keeoeek,
kefitz,
PI:NAME:<NAME>END_PI,
kPI:NAME:<NAME>END_PI,
kelPI:NAME:<NAME>END_PIath,
kelPI:NAME:<NAME>END_PI,
ken.PI:NAME:<NAME>END_PI,
kenPI:NAME:<NAME>END_PI,
kenk205,
kennij1,
kenogo,
PI:NAME:<NAME>END_PIenzPI:NAME:<NAME>END_PI ,
kerenor,
kerobero,
kerri12,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
kevinPI:NAME:<NAME>END_PI,
keytofly,
kfox,
kglass4462,
kgriffin3611,
khauglund,
khbuhr,
kianthPI:NAME:<NAME>END_PI,
kibebe238,
kikametong,
kiki2008,
killerlou,
kimaire,
kimberly535,
kimboakimbo,
kimuenga,
kingaytata,
kingofspain1234,
kinseyatoz,
PI:NAME:<NAME>END_PIinstelli,
kioruke,
kip2511,
PI:NAME:<NAME>END_PIirPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIirPI:NAME:<NAME>END_PI,
kirbecker,
kirino,
kirstPI:NAME:<NAME>END_PI,
kitsmelf,
kiya9132,
kjanson,
kjericks510,
kjetikada,
kjyg,
kk58,
kkenmots02,
kknight.4,
klaartje,
klb1015,
kledoux1,
kleistf,
klepsch,
klrainey,
klroot,
klwaffle,
klynne28,
kmacdonald,
kmcafee1812,
kmiyoshi,
kmkulasza,
kmlm,
kmmills,
kmmunchie,
kmt65msu,
kmunday,
kmzim2015,
kneff39,
knyghtowl,
kokrui,
korinna999,
koshlap,
kotagabi,
kponce03,
kragh,
krakenzmama,
kreiman514,
krhill322,
PI:NAME:<NAME>END_PI,
kris79,
krisPI:NAME:<NAME>END_PI,
krockit,
krolov,
kruegnad,
kruseau,
krutki,
krwalkup,
kryfie,
ks14k,
kschlotfelt,
ksipe,
kswitalski,
kt171,
ktangell9,
ktarkin,
ktgeorgeous,
ktouchstone,
kucheryash,
kujata1,
kulinki,
kungpowell,
kuuipo83,
kuuurista,
kwirk,
kyamini,
kyaniab,
kyburg,
kyilmaz,
kyogreman,
kzajde1,
l4abergmann,
labocania,
lacey coleman ,
lachiester,
ladymink,
laenavarro,
lafiff PPI:NAME:<NAME>END_PI,
lailabee,
lainie52,
lankiel,
larmcd188,
PI:NAME:<NAME>END_PI,
lau3rie,
PI:NAME:<NAME>END_PIuginn,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
laura_germany_,
laurabjustesen,
laurabolt,
lauramaywigby,
laurasamson,
laurawhyte,
laurelbcr,
PI:NAME:<NAME>END_PI ,
laurenbreining,
laurenriv,
laurins,
layka13,
PI:NAME:<NAME>END_PIman,
lbaracchi,
lbriard,
lchad,
lci17410,
lcourtneysmith,
lcw21,
leahPI:NAME:<NAME>END_PI,
leahlou99,
leannk,
lechaussette,
leeboy25,
leetcat,
leio,
leire,
lemurs366,
lena2289,
leo PI:NAME:<NAME>END_PI,
leohelm,
leonidas907,
leonov1,
leopardi,
leopardspots12344,
lesbPI:NAME:<NAME>END_PI,
lesfromages,
PI:NAME:<NAME>END_PI,
lesley.mazeyhotmail.co.uk,
PI:NAME:<NAME>END_PI,
leupster,
levint,
lewnich,
lexij11,
lfv,
lg63laddhotmail.com,
lgiannini201,
lguidera,
liam2390,
lianderson,
lianne_m,
libervurto,
lieselottl,
lifesart,
ligaron,
lightness,
lilacwood,
lilapot,
lilico,
lilmonster4evs,
lilybloom,
limelikelemon,
linda.kPI:NAME:<NAME>END_PI,
lindapnoe,
lindawithblueeyes,
lindek,
lindsay.PI:NAME:<NAME>END_PI,
lindseyb420,
lindylooo1,
lineds,
lioncub12,
liondave,
lisacorewyn,
lissasmom,
litlewolf2,
littlebeard,
littlebug47,
livlorton,
lizardo,
lizbuffy,
lizmaple,
lizplanet,
lizzyshoe,
lizzyvet1,
ljalthoff,
lkirshaw,
llara,
llavoie,
lmbloom8903,
lmcco,
lmcmillin,
lmd6508,
lmf76,
lmgumby,
lmhornig,
lmusytschuk,
loader96,
lobotupgrade,
lobstersocks,
lobusparietalis,
lodonzo,
loerie,
loggins,
loisendaan,
loketimes,
lolobaba1,
lolonaze,
lolortie,
lolpus,
lolymie,
lonelyviolist,
lori2u,
lotsofloudlaughing,
lou7428,
loubelle66,
louisdPI:NAME:<NAME>END_PI,
louiseee,
loupdethies,
lovelysweetpea,
lpage,
lPI:NAME:<NAME>END_PI,
lpdPI:NAME:<NAME>END_PI,
lPI:NAME:<NAME>END_PI,
lpspielPI:NAME:<NAME>END_PI,
lrigPI:NAME:<NAME>END_PI,
lPI:NAME:<NAME>END_PI,
lstePI:NAME:<NAME>END_PI,
lstePI:NAME:<NAME>END_PIki,
lszatmary,
ltakiguchi,
luca-chimp,
lucashh,
lucy55,
lucycawte,
lucyyyr,
ludyboots,
lueasley,
luisandresPI:NAME:<NAME>END_PIz,
lukejballPI:NAME:<NAME>END_PI,
lukeonzoonivere,
lula0502,
lula14230,
lupham,
lusihuei0504,
luxray978,
luxtina,
lwerden2uwo.ca,
lyleje9,
lyndsey1987,
lyneille,
lyraloo,
m,
m1saac,
m3ganbarkPI:NAME:<NAME>END_PI,
mIl0van,
m_tennison,
maaikell,
PI:NAME:<NAME>END_PIarten_k,
PI:NAME:<NAME>END_PIaodha,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIrown,
mackenzie.mr,
maddimendoza,
madeinspace,
madeleinen14,
madelinethespy,
madison young,
madsterr,
maewe,
mafgross,
mafintel,
magaa,
magdalen_n,
maggiea2598,
magic5379,
magnoliahigh,
maheer425,
mahynoor,
majac,
majportugal,
makaylamay,
makenziedespres,
malachi,
PI:NAME:<NAME>END_PIcolm PI:NAME:<NAME>END_PI,
malforma,
mallory9163,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
mamamuh,
PI:NAME:<NAME>END_PIammyflo,
PI:NAME:<NAME>END_PIdellamom,
mandiwaite,
PI:NAME:<NAME>END_PIandyaldridge,
mandymayhemxo,
PI:NAME:<NAME>END_PIantide,
manxkats,
maoa86,
mapat,
mapper,
mapreader4,
mar10g,
PI:NAME:<NAME>END_PIara,
PI:NAME:<NAME>END_PI,
marc085,
march3258,
marci61456,
marciasou,
PI:NAME:<NAME>END_PIargef,
PI:NAME:<NAME>END_PIargie,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI ,
PI:NAME:<NAME>END_PIiafanning,
PI:NAME:<NAME>END_PIariannas,
PI:NAME:<NAME>END_PIariarmoreda,
mariechloe,
marije2410,
marijke.desmet,
marina83,
marinadoukaki,
marjanneo,
PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI,
mark2004,
markhu,
marlfox580,
marrinertg4592,
mars118,
mars26,
marssociety,
martaczc,
martinbrooks,
martinsandvik,
martybeans,
marvelacad,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIaryann,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIiaslilPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIildak,
mattb1792,
matteo1297,
matthewsarro,
mattiab,
mattlarrea96,
matty406,
maureenmccarthy,
mauricio urePI:NAME:<NAME>END_PI,
max,
maxdav01,
maxhy,
maxioi,
maxozaur,
maya011,
mayah24,
mayakiona,
maylu,
mbadger,
mbailey9573,
mbaselga,
mbatla,
mbosshard,
mccauleysa,
mccreery64,
mcgeea95,
mcgovernnj,
mcheek21,
mcintold,
mckinlayross,
mcoates,
mcoc,
mcolston,
mcru1601,
mdlw,
mdomaille,
meegja,
meerclar,
meganc30,
meganesmith539,
megankieran,
meganswanson,
mejones,
mekukat,
mel.s,
mel907,
melach,
melendil,
PI:NAME:<NAME>END_PI,
melissasmith08,
melvinj,
meme172,
memorymays13,
mercedes123,
merra1979,
metridious,
mflannigan,
mfolk77,
mfrisbie8,
mgarcia.eva,
mgarman,
mhammergren,
mheitm,
mhxyqwe123,
mi77ian,
mialumme,
miatafan,
micfil,
micha2718l,
PI:NAME:<NAME>END_PI,
michael971,
PI:NAME:<NAME>END_PI,
michaelzucker,
michal108,
PI:NAME:<NAME>END_PIlocke,
micmac274,
midnightiscute,
midnightisfluffy,
migrill,
PI:NAME:<NAME>END_PIuel,
mikaselm,
PI:NAME:<NAME>END_PIke.PI:NAME:<NAME>END_PI,
mikej45,
PI:NAME:<NAME>END_PIikelinz,
mikem2,
mikes269,
mikestill,
mikkajones,
mikyuzumaki,
milano,
milchtier,
milechampion,
milena.PI:NAME:<NAME>END_PI,
milenski,
milesnoir,
milesspencer,
milksneaker,
milktime,
mill9,
millbrook3,
mimatronic13,
minervadreaming,
miniMinN,
minty737,
mireillebocksberger,
mirl,
missadventuregm,
missbland,
missdriller,
missk2u,
missmomma86,
misterwoodward,
misteryoung,
mistletoe,
mitata,
mitchell631,
mitsyamarsupial,
mixu74,
mizliz,
mizrPI:NAME:<NAME>END_PI,
mjac,
mjelse,
mjohn15,
mjourney,
mkenz1099,
mkmanos,
mkmcguir,
mkub,
mlambert27,
mleinspenner,
mlhanna,
mlsacg,
mlx0000,
mm2223,
mmadden542,
mmangos,
mmartarano,
mmatessa,
mnowy,
moacir augusto coutinho,
moefinley,
moex0125,
mogget_storm,
molllyequinnn,
mollmsd26,
mollus,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
momendes,
monaxue,
monbut,
money_princess,
monica moscatelli,
monicahaponski,
monkettales,
monkeyboy23,
monoculars,
monsol,
moolyballerina,
moomoohk,
moon001,
moonpie68,
morage,
moreno.meghan,
mosaic_world,
mosey_9,
mourningdove,
moxiemouse,
mpazrp2006,
mpeoples,
mpobocik,
mpriestman,
mqq,
mr.lee,
mrObbens,
mr_seeker,
mreynolds81,
mrkjad,
mrmanatee99,
mroliverridley,
mrsalphageek,
mrscoyle,
mrsmarriage,
mruffing13,
mrvaidya,
mrybczyn,
ms.physics,
ms4225,
msaligned,
mschwirck,
mseyler,
msnafziger,
mspooner,
msterreb,
mswmom,
mtjan2014,
mtornese,
mtran97,
mturman,
mudkip201,
mueslifix,
muis24,
mujumdarsr,
mulehollandaise,
murtaghc,
music788,
mutiertespinne,
mx46myra,
myfriendemma,
mynightmare1,
myrddin,
mysteryparry,
myuniverse,
myximagination,
n13j,
n165741,
na215975,
naaninn,
nadavbensh,
nadine gorman ,
nadiribro,
nadjahersacher,
nads,
naffi,
nafnaf,
nairobinights,
najuris,
nakilah,
naminag,
namtansign,
nanou,
nanselmus,
naomizf,
napdaw,
naralara,
natasha-gray,
natban1,
natbosPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
nate,
nate.liz.white,
nathandench83,
nathanuy1,
natkaa,
natkomba,
nature,
natwins,
naumenko.pavlik65,
navegante,
navill0948,
nayerakhairat,
naynayfalife,
naysin27,
nbcdoe,
nbetzca04,
ncmastroddi,
ndarbysh,
ndw753,
neanderthaler6,
necoras,
neebobs,
neece_haynes,
neidzwiedz,
neitsa,
nelli,
neverendingo,
neverett,
newnewschool,
newyorker570,
ng15,
nh,
nicchic8402,
PI:NAME:<NAME>END_PI,
niceday1422,
nicestjerk,
PI:NAME:<NAME>END_PIichollsm,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIherPI:NAME:<NAME>END_PI1,
PI:NAME:<NAME>END_PIico61,
PI:NAME:<NAME>END_PI ,
nicolec8,
nicoleflynn18,
nicostone,
nieja PI:NAME:<NAME>END_PI,
nigeq,
night,
night_dust,
niirilli,
nikania,
nikia11,
nikolatt,
nikolosp,
nina ,
ninjabel,
nipnlap,
nirving123,
nitramko,
niv1,
niyer,
njcusack,
njwwright,
nkarn,
nlbookworm16,
nmaad083,
nmc19892004,
nnova,
noblehomeschool,
nogoodkris,
noidedpanda,
noitallgirl,
nola1034,
nolanedwin,
nomanslogin,
nonnel,
nonword,
noonizooni,
nopedose,
norauk3,
noromo,
northcrunk,
northernflower,
northernlimitptv,
nosnibor3,
notgucci,
noxvix,
nozee,
np33,
nrajkarnikar3,
ns22121,
nsm,
nsmel,
nspacefire,
nstoker,
nthrockmorton,
number1suzuki,
nutznix,
nwstone,
nyrmta,
nzRichardS,
nzanga,
obaratch,
oberon1066,
obnebion42,
ocbrown,
ocean171,
odele,
ohecbamboozledagain,
ohmanen,
oiramm,
ojacobs21,
oldapartment,
olekkurszewski,
oleksaberik,
olems,
oli123456789,
oliverhodson,
oliverlevitt,
olivers,
olivia.PI:NAME:<NAME>END_PI,
oliviarrrrrr15,
olivier,
ollyc,
ollydavey,
olsonl,
omicronCeti,
omnicron13,
oneill4,
oneillg2g,
oneoneone,
onetheycalldru,
onlylobster,
onnikur,
oo.adeyemo,
ooburai,
orangebird,
oraulins,
orchinati,
oriza,
oroanto,
osanna,
oscarmental,
osprey_neaves,
ossoz,
otac0n,
ottecktom,
owenfay,
p.titchin,
paLLadium2,
pablos87,
padfoothp,
pagh1,
paigeautumn,
paigefigone,
paigefroebe,
paka6789,
paleogen,
palmin,
pamba,
paminaz,
pampalini,
panPsax,
panek_attack,
paolojs,
paot,
pappasadrian,
pashley108,
patchwork,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
pPI:NAME:<NAME>END_PIinv,
pPI:NAME:<NAME>END_PIodiPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
paula33,
PI:NAME:<NAME>END_PIulPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIulgregorylang,
PI:NAME:<NAME>END_PI,
pPI:NAME:<NAME>END_PIel_PI:NAME:<NAME>END_PIkPI:NAME:<NAME>END_PIski,
pazithigallifreya,
pckkelly,
peachblossem,
pearcec,
pearlthetiger,
PI:NAME:<NAME>END_PIson.PI:NAME:<NAME>END_PIrea,
PI:NAME:<NAME>END_PIearsov,
PI:NAME:<NAME>END_PI,
pedi59,
peekay42,
pegk56,
peiftronica,
peinguinsandpolecats,
pekkish,
pendPI:NAME:<NAME>END_PI,
penguin1221,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
pepys1,
PI:NAME:<NAME>END_PI,
perkele91,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI3,
pPI:NAME:<NAME>END_PI_48cck,
PI:NAME:<NAME>END_PI,
peter1234567,
PI:NAME:<NAME>END_PI,
petrack,
PI:NAME:<NAME>END_PI,
pg338,
pgiroux,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIa,
phkPI:NAME:<NAME>END_PIund,
phoenix1284,
phraps,
phsc1001project,
pi2fly,
pianissimo,
pianokungfu,
piccolino,
pickle777,
picklesplays,
pigron,
pilpolly,
pimentelius,
pinguingud,
pinkpuppy,
pinkus,
pinkynz,
piotrek1010,
piotrexmeow,
pioup,
piper73,
pippo0815,
piratebrit,
pixelmesh,
pjjhurk,
pjp1959,
plambert78,
planet059,
planetari7,
playmobil,
plev13,
pmborodin,
pmgreen,
pogostickies,
pokedragonboy,
pokodinero,
polanski,
polly1,
polo24,
ponyry,
popocatepetl,
poppy42,
poppykentucky,
poundmaya,
prcromer,
priancohen,
primatelady,
princessclockie,
princesspeet,
prl,
protagoras42,
psivewright,
pssbanks,
pubgypsy,
purplegedge,
puti,
pyrosomida,
qingdou,
quarkcharm,
questathon50,
quetzalc,
quiet,
quinnr,
quinnrice127268,
quizmike,
r2rook,
r4s1,
racegirlj,
rachable,
rachaelfacey9,
rachelha23geo,
rachelklose,
rachie0204,
raddas,
radek_p,
radfordr,
radulfr,
rafael,
rafucho47,
ragingreshiram723,
rainbowhamster,
rainer,
rairai600,
raito,
ralab,
ralph.livockgmail.com,
ram2009,
ramberts,
ramoratz,
ramosc07,
ramsaut,
randolmm,
random_awesomeness,
randthor,
raphaelmaier,
raphi615,
rartho,
rascheper,
rashaveraka,
rastamon888,
rathole,
ravendrop,
ravenik45,
razvy97,
rbarba,
rbundy,
rcmason8790,
rcookman,
rcr14,
rderival,
reaganakgPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIbbi,
PI:NAME:<NAME>END_PI,
rebecca1104,
rebekahal,
recepturka,
recrea33,
redchaz57,
redragonPL,
redzep66,
reemab4,
regatavalon,
regularj,
reikirob,
reillyknight,
reimalkav,
reishi,
reki,
rema,
renato24,
renee corlett,
rennbird,
res96,
retiredbeep,
reydi,
reynaldo yanePI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIold,
reynoldsjake1997,
rPI:NAME:<NAME>END_PI,
rfolPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
rhys123,
rPI:NAME:<NAME>END_PI,
ribbit21,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIid84,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
riverdrifter61,
PI:NAME:<NAME>END_PI,
rjane314,
rjbrown1,
rjjm,
rjPI:NAME:<NAME>END_PI,
rkPI:NAME:<NAME>END_PI,
rks7257,
rlcantwell,
rllrllrrlrrl,
rloPI:NAME:<NAME>END_PI,
rmana,
rmcavoy,
rmklaus12,
rmolinand10,
PI:NAME:<NAME>END_PIorden,
rnavajas,
rob054,
PI:NAME:<NAME>END_PI_moore75,
PI:NAME:<NAME>END_PIorPI:NAME:<NAME>END_PI,
robbiederoo72,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
rocketdriver,
PI:NAME:<NAME>END_PIgerwepf,
PI:NAME:<NAME>END_PIguePI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIknrn,
roland10,
PI:NAME:<NAME>END_PIronPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIss,
rosie467,
rosPI:NAME:<NAME>END_PI_lol,
rosrac,
rotband,
rotelippen,
rowejhs,
roxiesal73,
rphubbard,
rprblk,
rpricedavies,
rqj123,
rrpbgeek,
rrramtin,
rschini,
rsissons,
rsnberry,
rsuroland,
rubyPI:NAME:<NAME>END_PI,
rubysolitaire,
rudim2014,
ruimacaco,
rush1125,
rusthen,
rutho13,
ruthparker,
ruthr,
rverghese20,
rvreeke,
rwbcanada,
ryan-rsa,
rPI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
ryPI:NAME:<NAME>END_PI,
ryber1,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
s16947,
s17010,
sab2163010,
sabine-hh,
sabse,
sabsi1608,
saganesque,
sahil160,
PI:NAME:<NAME>END_PI,
sPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
salPI:NAME:<NAME>END_PI,
sPI:NAME:<NAME>END_PI13,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
sPI:NAME:<NAME>END_PI,
sambuca,
samg05,
samikes,
PI:NAME:<NAME>END_PI9,
samPI:NAME:<NAME>END_PI,
samvdb,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
sanPI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
sandPI:NAME:<NAME>END_PI,
sandyb777,
sangroff,
sanguinefiend,
sara_haPI:NAME:<NAME>END_PI,
sarah PI:NAME:<NAME>END_PI,
sarah PI:NAME:<NAME>END_PI,
sarah.g89,
sarahanneingPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
sarahelizabeth,
sarahh1193,
sarahkdavis21,
sarahmjl,
sarahruddell,
sarapotato,
sararae,
sararePI:NAME:<NAME>END_PI,
sarasefton777,
sarinozi,
sarje,
saroka,
sascha1es,
sashaxxx,
sashifoo,
saskia.k,
saule81,
saurabh_sb,
savagehenryceltic,
savagelovesinger,
saykojack,
sazzlecat,
sbardo,
sbeltrami2019,
sburda81,
scarletfeather,
schaeferhomeschool,
schaep,
schatzy,
schnaffeltier,
schneva,
schoolpsych77,
schreibvet,
schrodinger_kat,
sciencequiche,
scisarah,
scmorris,
scooterscooby,
scottm,
scoutcain,
scscottnz,
scurf,
scytheavalon,
sdalessandro,
sdev7,
sdkprodigy,
sdmorris,
seacliff5049,
seangoldstein,
seba.ka,
sebasish,
secrone,
sehindle,
selmarsh,
semayilmaz,
semiface,
senicalvin,
sensor4,
senv,
sequoiaforest,
sfmedusa,
sganon,
sgibson,
sgray8144,
sgutierrez1993,
shadow2056,
shahd ElPI:NAME:<NAME>END_PI,
shahino6,
shainacopeland,
shamaree copeland,
shananigan6326,
shanella,
shanher,
shannacherie,
shantimorrell,
shaoisaac,
sharathsarangmath,
sharoni.p,
sharrysharry,
shauna_ss,
shawnroderick11,
shayma...,
shazzyP,
shdybr8,
sheenabeena,
shekarbr,
shellie wallace,
shepardk20,
shero777,
sherritom,
sherwinptgmb,
shinebrightlea,
shmarla,
shmo,
shmooshy,
shocko61,
shoeman,
shortypantz,
shutterbugdi,
siberia2sumatra,
sigmapi10,
sijo,
silentq,
silo,
simmonsrzoo,
simona,
simona_12,
simond0,
simone53,
simonk1987,
simonliddle,
simonpopp,
simonvandenberg,
siouxiesue,
sirbertgarcia,
siren5298,
siwanicki,
sixpacktou,
sixxsgirl,
sjacobson1112,
sjc74,
sjcguk,
sjekkerton,
sjhwriter2,
sjwk,
skandlikar,
skavanagh,
skbarks,
skcool,
skepticHominid,
skilaru,
skintchef,
skiola,
skirtedrunner,
skity5,
sklo,
skorbolamid,
skrh,
skrzypkima,
skweek,
skybabe,
skyelight,
skyfundjr0520,
skykai,
skywatcher100,
slamdinista,
slidess,
slieberthal,
sloanes12,
slone02,
slowkow,
smccluskey1982,
smdu,
smfoote,
smhirt,
smithpa01,
smitjam,
smriemann,
smvilla,
sn322929,
sn328797,
sn349849,
sn363633,
snakeshit,
snappa,
snickermonkey,
snowdragon,
snowflake1,
so211,
soesoe,
sofieET,
soflynn,
sokrates49,
solenevermont,
solomonfolk,
sommakia,
sonictruth,
sontheold,
sopcat,
sophie00,
sophie8156,
sophydj,
sosmond,
soulselller,
soundgrl,
southofnonorth,
soyopopo,
sp515507,
spacegazer,
spakd07,
spamheadsmum,
sparow01,
spbf81,
speakofthewolf,
spearleyez,
spekky4eyes,
speters17,
spiderbui,
spinachravioli,
spinblade117,
spiralhunter,
spitysh,
spook34653,
sporter72,
spotharriet,
spu00crm,
squirrella,
squishycakes,
srallen,
sramsdale,
sroosa,
srs106srs,
srswann,
ssPETERss,
ssaPlacerias,
sschmeiser,
sschmidt,
ssmikey,
ssmith6519,
sstafford4,
stantastic,
starchitect,
stargirlmoon,
starhunter11,
starmag,
starman007,
starpower1,
starrypawz,
starwig,
steblak,
steeleyc,
stefan.k,
stefanie_caspari,
stefanmago,
steffbarPI:NAME:<NAME>END_PI,
steffifee,
stefkova,
steiche,
stengleinjl1,
step.poulain,
stephen,
stephen.PI:NAME:<NAME>END_PI,
stephPI:NAME:<NAME>END_PI,
stepherbot,
stephkolenda,
steve67,
stevemiller99,
stevenjohnston2017,
stevensteven,
stevewales,
stfrue,
stinapham,
stitz,
storki,
stpage1,
strategy,
strawmeadow,
stripey,
strubeldoc,
stsearcher,
stsf1234,
stuartaylor57,
stuey273,
stuk755,
stweedale,
suburbanastronomer,
sue_welfare,
sueking,
sugo,
suji3000,
sumamaru,
suncoup,
sunshinegirl,
suzannebradbury,
suzer14,
suzi78uk,
svart,
svengooliehigh,
svg5595,
swamprunner,
swapeman,
swertkin,
swsahara,
syberknight99,
sydself,
syl.schmid,
sylphia,
sylvain denPI:NAME:<NAME>END_PI,
synapse890,
syndarella,
syracon,
syzooniverse,
t.butra,
taboret,
tactoe,
tagori,
tahall3,
tahoedox,
tai_shan,
talm,
tamma,
tammylee67,
tan sin yi,
tanja92,
tapirsun,
tardigrade_girl_9,
tarelena3004,
tasnot,
tatoon37,
taylorv212,
taylour,
tbjerkga,
tbm,
tbrinton89,
tc29121981,
tcoriell,
tecannon,
technogourmet,
teekiteasy,
teemie,
teilhaft,
telophase,
temporaryblank,
tengnoth,
terryantrim,
tessa-virginia,
tfmorris,
tfrdawson,
tgoodway-sims,
tgrkpr2000,
th123,
thackler,
thawar23,
theWisp2864,
the_irishman,
theanimalover,
theappleman,
thebookscout,
thecuriousiguana,
thedweeb,
theheyman,
thehp,
thelinm,
themitchnz,
themrtroe,
thenoobest,
theofilos,
theotherme,
thepew,
theresxcfg,
thesquiddles,
thetruemilhouse,
thezohar,
thibaultmol,
thijszilla,
thimbleberry,
thisisjaid,
thoe,
thomas lovato,
thomdemm,
thoreslm,
thormster,
threadbare,
thula,
thurston_spaceman,
tien.duong,
tiffany9913,
tiffanyfields,
tiffanylian,
tigerpieatt.net,
tikijay,
tillmannhanna,
tillytots1232,
tilo,
tim_nutter,
timbok28,
timmerma7,
timothybouting2,
tina_pixie_,
tinamarash,
tines,
tinkapuppy,
tinkerlala,
tinope66,
tinykahoona,
tjbonzo,
tjeb,
tjilptjilp,
tjnelson5,
tkarian,
tkennedy371,
tkoc,
tlmaggiola,
tlusardi,
tmarchant,
tnl25,
tnladybug,
toasterparty,
toffee59,
tofly76,
tojad,
tolls,
tombombadil117,
tomburgerpie,
tomfinn1984,
tomtom47,
tomymx,
tonnerrebird,
tonnigonzalezPHYANTRO,
PI:NAME:<NAME>END_PIony PI:NAME:<NAME>END_PI,
tonyhallam001,
tonymor,
toosirrius,
tori12,
torman,
tottobauer,
tournavy,
tracev,
tracy.angel,
tracyshenton,
traianusrex,
tralliott,
tranceline,
traumeule,
travicehockey,
tre-86,
treebeard236,
tribb99,
trieloff,
tristia,
trixx,
trmayo,
trock5445,
trout13169,
trstn1,
trudauglow,
trusek,
ts.hbgr,
tsepui,
tsering,
tsheiffer,
tsoderquist,
tswittelsbach,
ttekieli,
ttessler,
ttfnrob,
ttilley,
tubasumpf,
tubbyoatmeal,
tuberdj,
tuittu,
tuleu-emma,
tweeg5039,
tweger,
twilightmoon,
twinklelittlestar,
twwittig,
txmeg,
tyhannahsaurus,
tyquana,
ufo1989,
ujansub,
ulikoehler,
ulrike krumscheid,
umalinda,
undergroundgirl,
underthearch,
undyne,
unefeepasse,
unidentified-scientific-object,
uninetverse,
upashka,
urbansonnet,
urgo42,
us7461,
uschaefer,
user_name,
uxia,
v1oletv,
vago82,
vaguePI:NAME:<NAME>END_PI_PI:NAME:<NAME>END_PI,
val08,
vale0293,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
valfer,
vallil,
PI:NAME:<NAME>END_PIski,
vanessa16480,
vanhelsing71,
vchantorn,
vdejesus,
vectorization,
vedun-z,
veenboer,
veeveequeen,
vega14k,
veggiepenguin,
vehtoh,
veleska,
velmanatov,
velthove,
vena,
venusstop,
veronicasnape17,
vertigopolka,
vestigial,
vfinnerty,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI24,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIaksd,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIgsn,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI.PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIankPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIizh,
PI:NAME:<NAME>END_PI,
vjbPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PImkevin,
vlad015,
vmerPI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
vmp32k,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIhellmPI:NAME:<NAME>END_PI,
vouchtieng ,
PI:NAME:<NAME>END_PIong,
vrohe,
vvvPI:NAME:<NAME>END_PI,
vx100,
vybzbild,
wackylama,
waddles310,
walczyszyn,
wallafives,
wamPI:NAME:<NAME>END_PI,
wamsleys,
wanda,
wannabe12,
wanni012,
warmworm,
warriorcatsophie,
watt_tyler,
waver,
wayne54,
wdecock,
weaverzinc,
webb1980,
webpixi,
werdnarb,
weromac,
wesley pawlowski,
wewa,
whaps,
whisperit,
white_squirrel,
whitelightnig,
whitetiger678086,
whooshdemon,
wibbi4,
wicked82,
wiebked,
wiedi,
wightdragon,
wildebeesty,
wildlifegisgirl,
wildlifephotographer,
wilktony,
willbarnes4,
PI:NAME:<NAME>END_PIjohnboy,
willowstar321,
willywonki,
windinhair13dd,
wingcli2014,
winnethpawtrow,
wisenheimer,
wiztess,
wizzydaz,
wojciech_k,
wolfgang1,
wosgood,
wtkoala,
wvvampire,
wwscoobydo,
wyjeong2,
x303,
x500,
xXOrchidChildXx,
x_ANT,
xanthi373,
xantoras,
xapril7x,
xbenr,
xfgiro,
xflyer,
xgraceygirlx,
xn11983,
xpeh,
yacoub,
yagerp,
yampol,
yankeegunner,
yasdollasign,
yatomi,
yedidiamesfin,
yellowbird,
yep2yel,
yesenialv,
yibet,
yinyangscurse,
yipekiay,
yk2064,
yoah,
yoda1,
yodahound,
yoey0419,
yona.s,
younge,
youthvolunteersdallaszoo.com,
yoyoman67,
yugoh,
yusuf,
yvonne brockwell,
yvonne mews,
yvonnefish,
zach_fortuna13,
zara,
zawarudo,
zazabard,
zbgirl6,
zdjuf0,
zedone_geezer,
zeezel,
zeng19930220,
zeynaz,
zhol123,
zholl25,
ziegl087,
ziglaser,
zits,
zlake,
zocker,
zoeellis,
zoharkapalan,
zoonie4893,
zoorik,
zootastic,
zooz,
zorglax,
zperez,
ztdavies,
zwanenburg,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PIka,
PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI (PI:NAME:<NAME>END_PI),
PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI,
PI:NAME:<NAME>END_PI,
임민서,
-Beach-,
.PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI,
00012853,
00054798,
02csmith,
08682293,
0Sanne0,
100dee,
1091010,
1157243,
11kb34,
11kralle,
120,
123726,
12coombesj,
136007,
13zulus,
141Dial34,
14SergioA,
14sonbri,
1510568040,
17lauk2,
1827,
1828Duke,
18acavinee,
18mclum,
18scooper,
1Ver,
1anita1,
1yioi87,
2004pamf,
20080112,
20161105,
20161169,
20161186,
20161230,
20161255,
20161287,
20161305,
20161761,
20162130,
2024-habdi,
20scuc,
212marin,
2140lacy,
214455,
214629,
214860,
215977,
216043,
216097,
21nikelove,
21perryb,
223327,
224043,
224265,
225578,
225593,
22lhaws,
2409,
25or6to4,
2609005,
2fay,
307flyfisher,
321Hanni,
3rdgradeelwood,
43corunna,
4dlabs,
56227isd,
5paws23,
5yF0Rc3,
62050isd,
62492isd,
62566isd,
64053isd,
64HaRtZa,
71099isd,
75473isd,
77636isd,
77keeg77,
7bluebird7,
7bthirdrock,
7cscience,
85martinis,
99bellam
</p>
</section>
</div>
module.exports = Authors
|
[
{
"context": "> and </span>\n <a href=\"https://github.com/kmalakoff/knockback-reference-app/\">Knockback.js Reference ",
"end": 523,
"score": 0.9580044150352478,
"start": 514,
"tag": "USERNAME",
"value": "kmalakoff"
},
{
"context": "ou by </span>\n <a href=\"https://gi... | app_management/views/credits.coffee | kmalakoff/knockback-reference-app | 1 | template_engine.templates.credits = """
<div data-bind="visible: credits_is_opened">
<div class='modal-backdrop'></div>
<div class="modal" data-bind="fadeIn: credits_is_opened"><div class="modal-body">
<div class='nav pull-right'>
<a data-bind="click: toggleCredits"><i class="icon-remove"></i></a>
</div>
<div class='pagination-centered'>
<a href="http://kmalakoff.github.com/knockback/">Knockback.js</a>
<span> and </span>
<a href="https://github.com/kmalakoff/knockback-reference-app/">Knockback.js Reference App</a>
<br/>
<span> are brought to you by </span>
<a href="https://github.com/kmalakoff">Kevin Malakoff</a>
</div>
<p></p>
<div class='pagination-centered'>
<span> With much appreciated dependencies on the </span>
<a href="http://twitter.github.com/bootstrap/">Twitter Bootstrap</a>
<span>, </span>
<a href="http://knockoutjs.com/">Knockout.js</a>
<span>, </span>
<a href="http://backbonejs.org/">Backbone.js</a>
<span> and </span>
<a href="http://underscorejs.org/">Underscore.js</a>
<span> libraries.</span>
</div>
</div></div>
</div>
""" | 17479 | template_engine.templates.credits = """
<div data-bind="visible: credits_is_opened">
<div class='modal-backdrop'></div>
<div class="modal" data-bind="fadeIn: credits_is_opened"><div class="modal-body">
<div class='nav pull-right'>
<a data-bind="click: toggleCredits"><i class="icon-remove"></i></a>
</div>
<div class='pagination-centered'>
<a href="http://kmalakoff.github.com/knockback/">Knockback.js</a>
<span> and </span>
<a href="https://github.com/kmalakoff/knockback-reference-app/">Knockback.js Reference App</a>
<br/>
<span> are brought to you by </span>
<a href="https://github.com/kmalakoff"><NAME></a>
</div>
<p></p>
<div class='pagination-centered'>
<span> With much appreciated dependencies on the </span>
<a href="http://twitter.github.com/bootstrap/">Twitter Bootstrap</a>
<span>, </span>
<a href="http://knockoutjs.com/">Knockout.js</a>
<span>, </span>
<a href="http://backbonejs.org/">Backbone.js</a>
<span> and </span>
<a href="http://underscorejs.org/">Underscore.js</a>
<span> libraries.</span>
</div>
</div></div>
</div>
""" | true | template_engine.templates.credits = """
<div data-bind="visible: credits_is_opened">
<div class='modal-backdrop'></div>
<div class="modal" data-bind="fadeIn: credits_is_opened"><div class="modal-body">
<div class='nav pull-right'>
<a data-bind="click: toggleCredits"><i class="icon-remove"></i></a>
</div>
<div class='pagination-centered'>
<a href="http://kmalakoff.github.com/knockback/">Knockback.js</a>
<span> and </span>
<a href="https://github.com/kmalakoff/knockback-reference-app/">Knockback.js Reference App</a>
<br/>
<span> are brought to you by </span>
<a href="https://github.com/kmalakoff">PI:NAME:<NAME>END_PI</a>
</div>
<p></p>
<div class='pagination-centered'>
<span> With much appreciated dependencies on the </span>
<a href="http://twitter.github.com/bootstrap/">Twitter Bootstrap</a>
<span>, </span>
<a href="http://knockoutjs.com/">Knockout.js</a>
<span>, </span>
<a href="http://backbonejs.org/">Backbone.js</a>
<span> and </span>
<a href="http://underscorejs.org/">Underscore.js</a>
<span> libraries.</span>
</div>
</div></div>
</div>
""" |
[
{
"context": "in ['dev', 'prod']\n\t### name ###\n\tname:\n\t\tvalue: 'GridFW'\n\t\tcheck: (value)->\n\t\t\tthrow new Error 'Name expe",
"end": 517,
"score": 0.9991908669471741,
"start": 511,
"tag": "USERNAME",
"value": "GridFW"
},
{
"context": "e is 'string'\n\t###* Author ###\n\tau... | config/settings.coffee | coredigix/gridfw | 0 | # App consts
exports.app = app =
# modes
DEV: 0
PROD: 1
# params
PATH_PARAM : 0
QUERY_PARAM: 1
# default encoding
DEFAULT_ENCODING: 'utf8'
### this file contains app default settings ###
exports.settings=
####<========================== App Id =============================>####
mode:
value: 'dev'
default: (value)-> ['dev', 'prod'].indexOf value
check: (value)->
throw new Error "Illegal mode #{mode}. Expected 'dev' or 'prod'" unless value in ['dev', 'prod']
### name ###
name:
value: 'GridFW'
check: (value)->
throw new Error 'Name expected string' unless typeof value is 'string'
###* Author ###
author:
value: 'GridFW@coredigix'
check: (value)->
throw new Error 'Author expected string' unless typeof value is 'string'
###* Admin Email ###
email:
value: 'contact@coredigix.com'
check: (value)->
throw new Error 'Email expected string' unless typeof value is 'string'
####<========================== LOG =============================>####
###*
* log level
* @default prod: 'info', dev: 'debug'
###
logLevel:
value: 'debug'
default: (app, mode)->
if mode is 0 then 'debug' else 'info'
check: (level)->
accepted = ['debug', 'log', 'info', 'warn', 'error', 'fatalError']
throw new Error "level expected in #{accepted.join ','}" unless level in accepted
####<========================== Router =============================>####
###*
* Route cache
###
routeCacheMax:
value: 50
check: (max)->
throw new Error 'max expected positive number greater then 10' unless Number.isSafeInteger(max) and max >= 10
###*
* Ignore trailing slashes
* off : ignore
* 0 : ignore, make redirect when someone asks for this URL
* on : 'keep it'
###
trailingSlash:
value: 0
check: (value)->
throw new Error 'trailingSlash expected in [0, false, true]' unless value in [0, off, on]
###*
* when 1, ignore path case
* when on, ignore route static part case only (do not lowercase param values)
* when off, case sensitive
* @type {boolean}
###
routeIgnoreCase:
value: on
check: (value)->
throw new Error 'routeIgnoreCase expected in [false, true, 1]' unless value in [1, on, off]
####<========================== Request =============================>####
###*
* trust proxy
###
trustProxyFunction:
#TODO
value: (req, proxyLevel)-> on
check: (fx)->
throw new Error 'trustProxyFunction expected function' unless typeof fx is 'function'
####<========================== Render and output =============================>####
###*
* Render pretty JSON, XML and HTML
* @default false when prod mode
###
pretty:
value: on # true if dev mode
default: (app, mode)-> mode is 0
check: (value)->
throw new Error 'pretty expected boolean' unless typeof fx is 'boolean'
###*
* Etag function generator
* generate ETag for responses
###
etagFunction:
#TODO
value: (data)-> ''
check: (fx)->
throw new Error 'etagFunction expected function' unless typeof fx is 'function'
###*
* render templates
* we do use function, so the require inside will be executed
* inside the app and not the compiler
###
engines:
value: (app, mode)->
engines = Object.create null
engines['.pug'] = require 'pug'
return engines
check: ->
# TODO
###*
* view Cache
* @when off: disable cache
* @when on: enable cache for ever
* @type {boolean}
###
viewCache:
value: on
default: (app, mode) ->
mode isnt 0 # false if dev mode
check: (value)->
throw new Error 'viewCache expected boolean' unless typeof value is 'boolean'
viewCacheMax:
value: 50 # view cache max entries
check: (value)->
unless number.isSafeInteger(value) and value >= 10
throw new Error 'viewCacheMax expected positive integer greater then 10'
views:
value:[
'views' # default folder
]
check: (value)->
unless Array.isArray(value) and value.every (e)-> typeof e is 'string'
throw new Error 'views should be a list of directory paths'
####<========================== Errors =============================>####
# Error templates
errorTemplates:
value: null
default: (app, mode)->
# dev mode
if mode is 0
'404': path.join __dirname, '../../build/views/errors/d404'
'500': path.join __dirname, '../../build/views/errors/d500'
# prod mode
else
'404': path.join __dirname, '../../build/views/errors/404'
'500': path.join __dirname, '../../build/views/errors/500'
check: (value)->
unless typeof value is 'object' and value
throw new Error 'ErrorTemplates a map of "Error-code" to "template path"'
for k,v in value
unless /^d[0-9]{3}/.test k
throw new Error "Error templates: Illegal error code: #{k}"
unless typeof v is 'string'
throw new Error "Error templates: errorTemplates.#{k} mast be file path"
return
# plugins
plugins:
value: {}
# default: (app, mode)->
# # dev or prod
# isDev = mode is 0
# # default logger
# 'gridfw-logger':
# require: '../gridfw-logger'
# level: if isDev then 'debug' : 'info'
# target: 'console'
check: (value)->
throw new Error 'plugins option expected map of plugins' unless typeof value is 'object' and value
| 37968 | # App consts
exports.app = app =
# modes
DEV: 0
PROD: 1
# params
PATH_PARAM : 0
QUERY_PARAM: 1
# default encoding
DEFAULT_ENCODING: 'utf8'
### this file contains app default settings ###
exports.settings=
####<========================== App Id =============================>####
mode:
value: 'dev'
default: (value)-> ['dev', 'prod'].indexOf value
check: (value)->
throw new Error "Illegal mode #{mode}. Expected 'dev' or 'prod'" unless value in ['dev', 'prod']
### name ###
name:
value: 'GridFW'
check: (value)->
throw new Error 'Name expected string' unless typeof value is 'string'
###* Author ###
author:
value: '<EMAIL>'
check: (value)->
throw new Error 'Author expected string' unless typeof value is 'string'
###* Admin Email ###
email:
value: '<EMAIL>'
check: (value)->
throw new Error 'Email expected string' unless typeof value is 'string'
####<========================== LOG =============================>####
###*
* log level
* @default prod: 'info', dev: 'debug'
###
logLevel:
value: 'debug'
default: (app, mode)->
if mode is 0 then 'debug' else 'info'
check: (level)->
accepted = ['debug', 'log', 'info', 'warn', 'error', 'fatalError']
throw new Error "level expected in #{accepted.join ','}" unless level in accepted
####<========================== Router =============================>####
###*
* Route cache
###
routeCacheMax:
value: 50
check: (max)->
throw new Error 'max expected positive number greater then 10' unless Number.isSafeInteger(max) and max >= 10
###*
* Ignore trailing slashes
* off : ignore
* 0 : ignore, make redirect when someone asks for this URL
* on : 'keep it'
###
trailingSlash:
value: 0
check: (value)->
throw new Error 'trailingSlash expected in [0, false, true]' unless value in [0, off, on]
###*
* when 1, ignore path case
* when on, ignore route static part case only (do not lowercase param values)
* when off, case sensitive
* @type {boolean}
###
routeIgnoreCase:
value: on
check: (value)->
throw new Error 'routeIgnoreCase expected in [false, true, 1]' unless value in [1, on, off]
####<========================== Request =============================>####
###*
* trust proxy
###
trustProxyFunction:
#TODO
value: (req, proxyLevel)-> on
check: (fx)->
throw new Error 'trustProxyFunction expected function' unless typeof fx is 'function'
####<========================== Render and output =============================>####
###*
* Render pretty JSON, XML and HTML
* @default false when prod mode
###
pretty:
value: on # true if dev mode
default: (app, mode)-> mode is 0
check: (value)->
throw new Error 'pretty expected boolean' unless typeof fx is 'boolean'
###*
* Etag function generator
* generate ETag for responses
###
etagFunction:
#TODO
value: (data)-> ''
check: (fx)->
throw new Error 'etagFunction expected function' unless typeof fx is 'function'
###*
* render templates
* we do use function, so the require inside will be executed
* inside the app and not the compiler
###
engines:
value: (app, mode)->
engines = Object.create null
engines['.pug'] = require 'pug'
return engines
check: ->
# TODO
###*
* view Cache
* @when off: disable cache
* @when on: enable cache for ever
* @type {boolean}
###
viewCache:
value: on
default: (app, mode) ->
mode isnt 0 # false if dev mode
check: (value)->
throw new Error 'viewCache expected boolean' unless typeof value is 'boolean'
viewCacheMax:
value: 50 # view cache max entries
check: (value)->
unless number.isSafeInteger(value) and value >= 10
throw new Error 'viewCacheMax expected positive integer greater then 10'
views:
value:[
'views' # default folder
]
check: (value)->
unless Array.isArray(value) and value.every (e)-> typeof e is 'string'
throw new Error 'views should be a list of directory paths'
####<========================== Errors =============================>####
# Error templates
errorTemplates:
value: null
default: (app, mode)->
# dev mode
if mode is 0
'404': path.join __dirname, '../../build/views/errors/d404'
'500': path.join __dirname, '../../build/views/errors/d500'
# prod mode
else
'404': path.join __dirname, '../../build/views/errors/404'
'500': path.join __dirname, '../../build/views/errors/500'
check: (value)->
unless typeof value is 'object' and value
throw new Error 'ErrorTemplates a map of "Error-code" to "template path"'
for k,v in value
unless /^d[0-9]{3}/.test k
throw new Error "Error templates: Illegal error code: #{k}"
unless typeof v is 'string'
throw new Error "Error templates: errorTemplates.#{k} mast be file path"
return
# plugins
plugins:
value: {}
# default: (app, mode)->
# # dev or prod
# isDev = mode is 0
# # default logger
# 'gridfw-logger':
# require: '../gridfw-logger'
# level: if isDev then 'debug' : 'info'
# target: 'console'
check: (value)->
throw new Error 'plugins option expected map of plugins' unless typeof value is 'object' and value
| true | # App consts
exports.app = app =
# modes
DEV: 0
PROD: 1
# params
PATH_PARAM : 0
QUERY_PARAM: 1
# default encoding
DEFAULT_ENCODING: 'utf8'
### this file contains app default settings ###
exports.settings=
####<========================== App Id =============================>####
mode:
value: 'dev'
default: (value)-> ['dev', 'prod'].indexOf value
check: (value)->
throw new Error "Illegal mode #{mode}. Expected 'dev' or 'prod'" unless value in ['dev', 'prod']
### name ###
name:
value: 'GridFW'
check: (value)->
throw new Error 'Name expected string' unless typeof value is 'string'
###* Author ###
author:
value: 'PI:EMAIL:<EMAIL>END_PI'
check: (value)->
throw new Error 'Author expected string' unless typeof value is 'string'
###* Admin Email ###
email:
value: 'PI:EMAIL:<EMAIL>END_PI'
check: (value)->
throw new Error 'Email expected string' unless typeof value is 'string'
####<========================== LOG =============================>####
###*
* log level
* @default prod: 'info', dev: 'debug'
###
logLevel:
value: 'debug'
default: (app, mode)->
if mode is 0 then 'debug' else 'info'
check: (level)->
accepted = ['debug', 'log', 'info', 'warn', 'error', 'fatalError']
throw new Error "level expected in #{accepted.join ','}" unless level in accepted
####<========================== Router =============================>####
###*
* Route cache
###
routeCacheMax:
value: 50
check: (max)->
throw new Error 'max expected positive number greater then 10' unless Number.isSafeInteger(max) and max >= 10
###*
* Ignore trailing slashes
* off : ignore
* 0 : ignore, make redirect when someone asks for this URL
* on : 'keep it'
###
trailingSlash:
value: 0
check: (value)->
throw new Error 'trailingSlash expected in [0, false, true]' unless value in [0, off, on]
###*
* when 1, ignore path case
* when on, ignore route static part case only (do not lowercase param values)
* when off, case sensitive
* @type {boolean}
###
routeIgnoreCase:
value: on
check: (value)->
throw new Error 'routeIgnoreCase expected in [false, true, 1]' unless value in [1, on, off]
####<========================== Request =============================>####
###*
* trust proxy
###
trustProxyFunction:
#TODO
value: (req, proxyLevel)-> on
check: (fx)->
throw new Error 'trustProxyFunction expected function' unless typeof fx is 'function'
####<========================== Render and output =============================>####
###*
* Render pretty JSON, XML and HTML
* @default false when prod mode
###
pretty:
value: on # true if dev mode
default: (app, mode)-> mode is 0
check: (value)->
throw new Error 'pretty expected boolean' unless typeof fx is 'boolean'
###*
* Etag function generator
* generate ETag for responses
###
etagFunction:
#TODO
value: (data)-> ''
check: (fx)->
throw new Error 'etagFunction expected function' unless typeof fx is 'function'
###*
* render templates
* we do use function, so the require inside will be executed
* inside the app and not the compiler
###
engines:
value: (app, mode)->
engines = Object.create null
engines['.pug'] = require 'pug'
return engines
check: ->
# TODO
###*
* view Cache
* @when off: disable cache
* @when on: enable cache for ever
* @type {boolean}
###
viewCache:
value: on
default: (app, mode) ->
mode isnt 0 # false if dev mode
check: (value)->
throw new Error 'viewCache expected boolean' unless typeof value is 'boolean'
viewCacheMax:
value: 50 # view cache max entries
check: (value)->
unless number.isSafeInteger(value) and value >= 10
throw new Error 'viewCacheMax expected positive integer greater then 10'
views:
value:[
'views' # default folder
]
check: (value)->
unless Array.isArray(value) and value.every (e)-> typeof e is 'string'
throw new Error 'views should be a list of directory paths'
####<========================== Errors =============================>####
# Error templates
errorTemplates:
value: null
default: (app, mode)->
# dev mode
if mode is 0
'404': path.join __dirname, '../../build/views/errors/d404'
'500': path.join __dirname, '../../build/views/errors/d500'
# prod mode
else
'404': path.join __dirname, '../../build/views/errors/404'
'500': path.join __dirname, '../../build/views/errors/500'
check: (value)->
unless typeof value is 'object' and value
throw new Error 'ErrorTemplates a map of "Error-code" to "template path"'
for k,v in value
unless /^d[0-9]{3}/.test k
throw new Error "Error templates: Illegal error code: #{k}"
unless typeof v is 'string'
throw new Error "Error templates: errorTemplates.#{k} mast be file path"
return
# plugins
plugins:
value: {}
# default: (app, mode)->
# # dev or prod
# isDev = mode is 0
# # default logger
# 'gridfw-logger':
# require: '../gridfw-logger'
# level: if isDev then 'debug' : 'info'
# target: 'console'
check: (value)->
throw new Error 'plugins option expected map of plugins' unless typeof value is 'object' and value
|
[
{
"context": "andlebars.render('Hello there {{ name }}', name: 'homie')\n .done((res) => should.match_expected(@han",
"end": 574,
"score": 0.9844018220901489,
"start": 569,
"tag": "NAME",
"value": "homie"
},
{
"context": "pected(\n @handlebars\n res(friend: 'r kell... | test/handlebars.coffee | slang800/accord | 0 | should = require 'should'
path = require 'path'
W = require 'when'
accord = require '../'
describe 'handlebars', ->
before ->
@handlebars = accord.load('handlebars')
@path = path.join(__dirname, 'fixtures', 'handlebars')
it 'should expose name, extensions, output, and engine', ->
@handlebars.extensions.should.be.an.instanceOf(Array)
@handlebars.output.should.be.type('string')
@handlebars.engine.should.be.ok
@handlebars.name.should.be.ok
it 'should render a string', (done) ->
@handlebars.render('Hello there {{ name }}', name: 'homie')
.done((res) => should.match_expected(@handlebars, res, path.join(@path, 'rstring.hbs'), done))
it 'should render a file', (done) ->
lpath = path.join(@path, 'basic.hbs')
@handlebars.renderFile(lpath, compiler: 'handlebars')
.done((res) => should.match_expected(@handlebars, res, lpath, done))
it 'should compile a string', (done) ->
@handlebars.compile('Hello there {{ name }}').done((res) =>
should.match_expected(
@handlebars
res(name: 'my friend').trim() + '\n'
path.join(@path, 'pstring.hbs')
done
)
)
it 'should compile a file', (done) ->
lpath = path.join(@path, 'precompile.hbs')
@handlebars.compileFile(lpath).done((res) =>
should.match_expected(
@handlebars
res(friend: 'r kelly').trim() + '\n'
lpath
done
)
)
it 'should client-compile a string', (done) ->
@handlebars.compileClient("Here comes the {{ thing }}").done((res) =>
should.match_expected(@handlebars, res, path.join(@path, 'cstring.hbs'), done))
it 'should client-compile a file', (done) ->
lpath = path.join(@path, 'client.hbs')
@handlebars.compileFileClient(lpath).done((res) =>
should.match_expected(@handlebars, res, lpath, done))
it 'should handle external file requests', (done) ->
lpath = path.join(@path, 'partial.hbs')
@handlebars.renderFile(lpath, { partials: { foo: "<p>hello from a partial!</p>" }})
.done((res) => should.match_expected(@handlebars, res, lpath, done))
it 'should render with client side helpers', (done) ->
lpath = path.join(@path, 'client-complex.hbs')
@handlebars.compileFileClient(lpath).done (res) =>
@handlebars.clientHelpers().done (clientHelpers) =>
tpl_string = "#{clientHelpers}; var tpl = #{res}; tpl({ wow: 'local' })"
tpl = eval.call(global, tpl_string)
should.match_expected(@handlebars, tpl.trim() + '\n', lpath, done)
it 'should correctly handle errors', (done) ->
@handlebars.render("{{# !@{!# }}")
.done(should.not.exist, (-> done()))
| 218464 | should = require 'should'
path = require 'path'
W = require 'when'
accord = require '../'
describe 'handlebars', ->
before ->
@handlebars = accord.load('handlebars')
@path = path.join(__dirname, 'fixtures', 'handlebars')
it 'should expose name, extensions, output, and engine', ->
@handlebars.extensions.should.be.an.instanceOf(Array)
@handlebars.output.should.be.type('string')
@handlebars.engine.should.be.ok
@handlebars.name.should.be.ok
it 'should render a string', (done) ->
@handlebars.render('Hello there {{ name }}', name: '<NAME>')
.done((res) => should.match_expected(@handlebars, res, path.join(@path, 'rstring.hbs'), done))
it 'should render a file', (done) ->
lpath = path.join(@path, 'basic.hbs')
@handlebars.renderFile(lpath, compiler: 'handlebars')
.done((res) => should.match_expected(@handlebars, res, lpath, done))
it 'should compile a string', (done) ->
@handlebars.compile('Hello there {{ name }}').done((res) =>
should.match_expected(
@handlebars
res(name: 'my friend').trim() + '\n'
path.join(@path, 'pstring.hbs')
done
)
)
it 'should compile a file', (done) ->
lpath = path.join(@path, 'precompile.hbs')
@handlebars.compileFile(lpath).done((res) =>
should.match_expected(
@handlebars
res(friend: 'r <NAME>').trim() + '\n'
lpath
done
)
)
it 'should client-compile a string', (done) ->
@handlebars.compileClient("Here comes the {{ thing }}").done((res) =>
should.match_expected(@handlebars, res, path.join(@path, 'cstring.hbs'), done))
it 'should client-compile a file', (done) ->
lpath = path.join(@path, 'client.hbs')
@handlebars.compileFileClient(lpath).done((res) =>
should.match_expected(@handlebars, res, lpath, done))
it 'should handle external file requests', (done) ->
lpath = path.join(@path, 'partial.hbs')
@handlebars.renderFile(lpath, { partials: { foo: "<p>hello from a partial!</p>" }})
.done((res) => should.match_expected(@handlebars, res, lpath, done))
it 'should render with client side helpers', (done) ->
lpath = path.join(@path, 'client-complex.hbs')
@handlebars.compileFileClient(lpath).done (res) =>
@handlebars.clientHelpers().done (clientHelpers) =>
tpl_string = "#{clientHelpers}; var tpl = #{res}; tpl({ wow: 'local' })"
tpl = eval.call(global, tpl_string)
should.match_expected(@handlebars, tpl.trim() + '\n', lpath, done)
it 'should correctly handle errors', (done) ->
@handlebars.render("{{# !@{!# }}")
.done(should.not.exist, (-> done()))
| true | should = require 'should'
path = require 'path'
W = require 'when'
accord = require '../'
describe 'handlebars', ->
before ->
@handlebars = accord.load('handlebars')
@path = path.join(__dirname, 'fixtures', 'handlebars')
it 'should expose name, extensions, output, and engine', ->
@handlebars.extensions.should.be.an.instanceOf(Array)
@handlebars.output.should.be.type('string')
@handlebars.engine.should.be.ok
@handlebars.name.should.be.ok
it 'should render a string', (done) ->
@handlebars.render('Hello there {{ name }}', name: 'PI:NAME:<NAME>END_PI')
.done((res) => should.match_expected(@handlebars, res, path.join(@path, 'rstring.hbs'), done))
it 'should render a file', (done) ->
lpath = path.join(@path, 'basic.hbs')
@handlebars.renderFile(lpath, compiler: 'handlebars')
.done((res) => should.match_expected(@handlebars, res, lpath, done))
it 'should compile a string', (done) ->
@handlebars.compile('Hello there {{ name }}').done((res) =>
should.match_expected(
@handlebars
res(name: 'my friend').trim() + '\n'
path.join(@path, 'pstring.hbs')
done
)
)
it 'should compile a file', (done) ->
lpath = path.join(@path, 'precompile.hbs')
@handlebars.compileFile(lpath).done((res) =>
should.match_expected(
@handlebars
res(friend: 'r PI:NAME:<NAME>END_PI').trim() + '\n'
lpath
done
)
)
it 'should client-compile a string', (done) ->
@handlebars.compileClient("Here comes the {{ thing }}").done((res) =>
should.match_expected(@handlebars, res, path.join(@path, 'cstring.hbs'), done))
it 'should client-compile a file', (done) ->
lpath = path.join(@path, 'client.hbs')
@handlebars.compileFileClient(lpath).done((res) =>
should.match_expected(@handlebars, res, lpath, done))
it 'should handle external file requests', (done) ->
lpath = path.join(@path, 'partial.hbs')
@handlebars.renderFile(lpath, { partials: { foo: "<p>hello from a partial!</p>" }})
.done((res) => should.match_expected(@handlebars, res, lpath, done))
it 'should render with client side helpers', (done) ->
lpath = path.join(@path, 'client-complex.hbs')
@handlebars.compileFileClient(lpath).done (res) =>
@handlebars.clientHelpers().done (clientHelpers) =>
tpl_string = "#{clientHelpers}; var tpl = #{res}; tpl({ wow: 'local' })"
tpl = eval.call(global, tpl_string)
should.match_expected(@handlebars, tpl.trim() + '\n', lpath, done)
it 'should correctly handle errors', (done) ->
@handlebars.render("{{# !@{!# }}")
.done(should.not.exist, (-> done()))
|
[
{
"context": "# GainText\n#\n# Martin Waitz <tali@admingilde.org>\n\nblock = require '../src/bl",
"end": 27,
"score": 0.9997977614402771,
"start": 15,
"tag": "NAME",
"value": "Martin Waitz"
},
{
"context": "# GainText\n#\n# Martin Waitz <tali@admingilde.org>\n\nblock = require '../sr... | test/block.coffee | gaintext/gaintext.js | 0 | # GainText
#
# Martin Waitz <tali@admingilde.org>
block = require '../src/block'
mona = require 'mona-parser'
{expect} = require 'chai'
describe 'block', ->
describe 'Paragraph', ->
paragraph = new block.Paragraph().parser()
it 'parses a single line', ->
expect(mona.parse paragraph, "Hello World.\n")
.to.eql([['Hello World.']])
it 'parses multiple lines', ->
expect(mona.parse paragraph, "Hello\nWorld.\n")
.to.eql([['Hello'], ['World.']])
it 'skips leading blank lines', ->
expect(mona.parse paragraph, "\nHello World.\n")
.to.eql([['Hello World.']])
it 'stops at blank line', ->
expect(mona.parse paragraph,
"Hello World.\n\n", allowTrailing: true)
.to.eql([['Hello World.']])
it 'stops at white-space only line', ->
expect(mona.parse paragraph,
"Hello World.\n \n", allowTrailing: true)
.to.eql([['Hello World.']])
describe 'indentation', ->
it 'rejects normal text', ->
expect(-> mona.parse block.indentation,
"Hello", allowTrailing: true)
.to.throw /expected indentation/
it 'accepts text indented with a space', ->
expect(mona.parse block.indentation,
" Hello", allowTrailing: true)
.to.eql ' '
it 'accepts text indented with a tab', ->
expect(mona.parse block.indentation,
"\tHello", allowTrailing: true)
.to.eql '\t'
it 'accepts text indented with tab/space combination', ->
expect(mona.parse block.indentation,
" \t Hello", allowTrailing: true)
.to.eql ' \t '
describe 'sameIndent', ->
it 'accepts the empty string', ->
expect(mona.parse block.sameIndent, '')
.to.eql ''
describe 'indentedBlock', ->
text = mona.text mona.noneOf('\n'), min: 1
line = mona.followedBy mona.and(block.sameIndent, text),
mona.string '\n'
para = mona.collect line, min: 1
it 'rejects not indented text', ->
expect(-> mona.parse block.indentedBlock(line),
"Hello\n")
.to.throw /expected indentation/
it 'accepts indented text', ->
expect(mona.parse block.indentedBlock(line),
" Hello\n")
.to.eql 'Hello'
it 'accepts indented paragraph', ->
expect(mona.parse block.indentedBlock(para),
" Hello\n World\n")
.to.eql ['Hello', 'World']
it 'accepts only indented lines', ->
expect(mona.parse block.indentedBlock(para),
" Hello\nWorld\n", allowTrailing: true)
.to.eql ['Hello']
describe 'NamedBlockElement', ->
hello = new block.NamedBlockElement 'hello'
it 'rejects normal text', ->
expect(-> mona.parse hello.parser(), "hello world\n")
.to.throw /expected/
it 'rejects other names', ->
expect(-> mona.parse hello.parser(), "goodbye:\n")
.to.throw /expected/
it 'parses an empty element', ->
expect(mona.parse hello.parser(), "hello:\n")
.to.eql name: 'hello', title: '', content: []
it 'parses a simple element', ->
expect(mona.parse hello.parser(), "hello: world\n")
.to.eql name: 'hello', title: 'world', content: []
it 'parses an element with simple content', ->
expect(mona.parse hello.parser(), "hello: world\n Here I am!\n")
.to.eql name: 'hello', title: 'world', content: [
[['Here I am!']]
]
it 'parses an element with multiple paragraphs', ->
expect(mona.parse hello.parser(),
"hello: world\n Here I am!\n\n Goodbye.\n")
.to.eql name: 'hello', title: 'world', content: [
[['Here I am!']], [['Goodbye.']]
]
describe 'NamedSpanElement', ->
span = new block.NamedSpanElement 'span'
it 'rejects normal text', ->
expect(-> mona.parse span.parser(), "text")
.to.throw /expected/
it 'rejects other name', ->
expect(-> mona.parse span.parser(), "[text]")
.to.throw /expected/
it 'parses a simple element', ->
expect(mona.parse span.parser(), "[span]").to.eql
name: 'span', title: '', content: []
it 'parses an element with title', ->
expect(mona.parse span.parser(), "[span title]").to.eql
name: 'span', title: 'title', content: []
it 'parses an element with content', ->
expect(mona.parse span.parser(), "[span: content]").to.eql
name: 'span', title: '', content: ['content']
| 109418 | # GainText
#
# <NAME> <<EMAIL>>
block = require '../src/block'
mona = require 'mona-parser'
{expect} = require 'chai'
describe 'block', ->
describe 'Paragraph', ->
paragraph = new block.Paragraph().parser()
it 'parses a single line', ->
expect(mona.parse paragraph, "Hello World.\n")
.to.eql([['Hello World.']])
it 'parses multiple lines', ->
expect(mona.parse paragraph, "Hello\nWorld.\n")
.to.eql([['Hello'], ['World.']])
it 'skips leading blank lines', ->
expect(mona.parse paragraph, "\nHello World.\n")
.to.eql([['Hello World.']])
it 'stops at blank line', ->
expect(mona.parse paragraph,
"Hello World.\n\n", allowTrailing: true)
.to.eql([['Hello World.']])
it 'stops at white-space only line', ->
expect(mona.parse paragraph,
"Hello World.\n \n", allowTrailing: true)
.to.eql([['Hello World.']])
describe 'indentation', ->
it 'rejects normal text', ->
expect(-> mona.parse block.indentation,
"Hello", allowTrailing: true)
.to.throw /expected indentation/
it 'accepts text indented with a space', ->
expect(mona.parse block.indentation,
" Hello", allowTrailing: true)
.to.eql ' '
it 'accepts text indented with a tab', ->
expect(mona.parse block.indentation,
"\tHello", allowTrailing: true)
.to.eql '\t'
it 'accepts text indented with tab/space combination', ->
expect(mona.parse block.indentation,
" \t Hello", allowTrailing: true)
.to.eql ' \t '
describe 'sameIndent', ->
it 'accepts the empty string', ->
expect(mona.parse block.sameIndent, '')
.to.eql ''
describe 'indentedBlock', ->
text = mona.text mona.noneOf('\n'), min: 1
line = mona.followedBy mona.and(block.sameIndent, text),
mona.string '\n'
para = mona.collect line, min: 1
it 'rejects not indented text', ->
expect(-> mona.parse block.indentedBlock(line),
"Hello\n")
.to.throw /expected indentation/
it 'accepts indented text', ->
expect(mona.parse block.indentedBlock(line),
" Hello\n")
.to.eql 'Hello'
it 'accepts indented paragraph', ->
expect(mona.parse block.indentedBlock(para),
" Hello\n World\n")
.to.eql ['Hello', 'World']
it 'accepts only indented lines', ->
expect(mona.parse block.indentedBlock(para),
" Hello\nWorld\n", allowTrailing: true)
.to.eql ['Hello']
describe 'NamedBlockElement', ->
hello = new block.NamedBlockElement 'hello'
it 'rejects normal text', ->
expect(-> mona.parse hello.parser(), "hello world\n")
.to.throw /expected/
it 'rejects other names', ->
expect(-> mona.parse hello.parser(), "goodbye:\n")
.to.throw /expected/
it 'parses an empty element', ->
expect(mona.parse hello.parser(), "hello:\n")
.to.eql name: 'hello', title: '', content: []
it 'parses a simple element', ->
expect(mona.parse hello.parser(), "hello: world\n")
.to.eql name: '<NAME>', title: 'world', content: []
it 'parses an element with simple content', ->
expect(mona.parse hello.parser(), "hello: world\n Here I am!\n")
.to.eql name: 'hello', title: 'world', content: [
[['Here I am!']]
]
it 'parses an element with multiple paragraphs', ->
expect(mona.parse hello.parser(),
"hello: world\n Here I am!\n\n Goodbye.\n")
.to.eql name: '<NAME>', title: 'world', content: [
[['Here I am!']], [['Goodbye.']]
]
describe 'NamedSpanElement', ->
span = new block.NamedSpanElement 'span'
it 'rejects normal text', ->
expect(-> mona.parse span.parser(), "text")
.to.throw /expected/
it 'rejects other name', ->
expect(-> mona.parse span.parser(), "[text]")
.to.throw /expected/
it 'parses a simple element', ->
expect(mona.parse span.parser(), "[span]").to.eql
name: 'span', title: '', content: []
it 'parses an element with title', ->
expect(mona.parse span.parser(), "[span title]").to.eql
name: 'span', title: 'title', content: []
it 'parses an element with content', ->
expect(mona.parse span.parser(), "[span: content]").to.eql
name: 'span', title: '', content: ['content']
| true | # GainText
#
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
block = require '../src/block'
mona = require 'mona-parser'
{expect} = require 'chai'
describe 'block', ->
describe 'Paragraph', ->
paragraph = new block.Paragraph().parser()
it 'parses a single line', ->
expect(mona.parse paragraph, "Hello World.\n")
.to.eql([['Hello World.']])
it 'parses multiple lines', ->
expect(mona.parse paragraph, "Hello\nWorld.\n")
.to.eql([['Hello'], ['World.']])
it 'skips leading blank lines', ->
expect(mona.parse paragraph, "\nHello World.\n")
.to.eql([['Hello World.']])
it 'stops at blank line', ->
expect(mona.parse paragraph,
"Hello World.\n\n", allowTrailing: true)
.to.eql([['Hello World.']])
it 'stops at white-space only line', ->
expect(mona.parse paragraph,
"Hello World.\n \n", allowTrailing: true)
.to.eql([['Hello World.']])
describe 'indentation', ->
it 'rejects normal text', ->
expect(-> mona.parse block.indentation,
"Hello", allowTrailing: true)
.to.throw /expected indentation/
it 'accepts text indented with a space', ->
expect(mona.parse block.indentation,
" Hello", allowTrailing: true)
.to.eql ' '
it 'accepts text indented with a tab', ->
expect(mona.parse block.indentation,
"\tHello", allowTrailing: true)
.to.eql '\t'
it 'accepts text indented with tab/space combination', ->
expect(mona.parse block.indentation,
" \t Hello", allowTrailing: true)
.to.eql ' \t '
describe 'sameIndent', ->
it 'accepts the empty string', ->
expect(mona.parse block.sameIndent, '')
.to.eql ''
describe 'indentedBlock', ->
text = mona.text mona.noneOf('\n'), min: 1
line = mona.followedBy mona.and(block.sameIndent, text),
mona.string '\n'
para = mona.collect line, min: 1
it 'rejects not indented text', ->
expect(-> mona.parse block.indentedBlock(line),
"Hello\n")
.to.throw /expected indentation/
it 'accepts indented text', ->
expect(mona.parse block.indentedBlock(line),
" Hello\n")
.to.eql 'Hello'
it 'accepts indented paragraph', ->
expect(mona.parse block.indentedBlock(para),
" Hello\n World\n")
.to.eql ['Hello', 'World']
it 'accepts only indented lines', ->
expect(mona.parse block.indentedBlock(para),
" Hello\nWorld\n", allowTrailing: true)
.to.eql ['Hello']
describe 'NamedBlockElement', ->
hello = new block.NamedBlockElement 'hello'
it 'rejects normal text', ->
expect(-> mona.parse hello.parser(), "hello world\n")
.to.throw /expected/
it 'rejects other names', ->
expect(-> mona.parse hello.parser(), "goodbye:\n")
.to.throw /expected/
it 'parses an empty element', ->
expect(mona.parse hello.parser(), "hello:\n")
.to.eql name: 'hello', title: '', content: []
it 'parses a simple element', ->
expect(mona.parse hello.parser(), "hello: world\n")
.to.eql name: 'PI:NAME:<NAME>END_PI', title: 'world', content: []
it 'parses an element with simple content', ->
expect(mona.parse hello.parser(), "hello: world\n Here I am!\n")
.to.eql name: 'hello', title: 'world', content: [
[['Here I am!']]
]
it 'parses an element with multiple paragraphs', ->
expect(mona.parse hello.parser(),
"hello: world\n Here I am!\n\n Goodbye.\n")
.to.eql name: 'PI:NAME:<NAME>END_PI', title: 'world', content: [
[['Here I am!']], [['Goodbye.']]
]
describe 'NamedSpanElement', ->
span = new block.NamedSpanElement 'span'
it 'rejects normal text', ->
expect(-> mona.parse span.parser(), "text")
.to.throw /expected/
it 'rejects other name', ->
expect(-> mona.parse span.parser(), "[text]")
.to.throw /expected/
it 'parses a simple element', ->
expect(mona.parse span.parser(), "[span]").to.eql
name: 'span', title: '', content: []
it 'parses an element with title', ->
expect(mona.parse span.parser(), "[span title]").to.eql
name: 'span', title: 'title', content: []
it 'parses an element with content', ->
expect(mona.parse span.parser(), "[span: content]").to.eql
name: 'span', title: '', content: ['content']
|
[
{
"context": "##\n# oss-easy\n# https://github.com/yi/node-oss-easy\n#\n# Copyright (c) 2013 yi\n# License",
"end": 37,
"score": 0.9990803003311157,
"start": 35,
"tag": "USERNAME",
"value": "yi"
},
{
"context": "github.com/yi/node-oss-easy\n#\n# Copyright (c) 2013 yi\n# Licensed unde... | src/oss-easy.coffee | yxdh4620/node-oss-easy | 0 | ##
# oss-easy
# https://github.com/yi/node-oss-easy
#
# Copyright (c) 2013 yi
# Licensed under the MIT license.
##
_ = require "underscore"
ossAPI = require 'oss-client'
fs = require "fs"
async = require "async"
path = require "path"
debuglog = require("debug")("oss-easy")
assert = require "assert"
generateRandomId = ->
return "#{(Math.random() * 36 >> 0).toString(36)}#{(Math.random() * 36 >> 0).toString(36)}#{Date.now().toString(36)}"
NONSENCE_CALLBACK = ()->
class OssEasy
# constructor function
# @param {Object} ossOptions
# accessKeyId :
# accessKeySecret :
# host : default: 'oss.aliyuncs.com';
# port : default: '8080';
# timeout : default: 300000 ms;
# uploaderHeaders : http headers for all uploading actions
# bucket : target bucket
# @param {String} targetBucket bucket name
constructor: (ossOptions, targetBucket) ->
assert ossOptions, "missing options"
assert ossOptions.accessKeyId, "missing oss key id"
assert ossOptions.accessKeySecret, "missing access secret"
@targetBucket = targetBucket || ossOptions.bucket
assert @targetBucket, "missing bucket name"
ossOptions['timeout'] = ossOptions['timeout'] || 5 * 60 * 1000
if ossOptions.uploaderHeaders?
@uploaderHeaders = ossOptions.uploaderHeaders
@contentType = @uploaderHeaders.contentType if @uploaderHeaders.contentType
delete ossOptions['uploaderHeaders']
debuglog "[constructor] bucket: %j, ossOptions:%j", @targetBucket, ossOptions
@oss = new ossAPI.OssClient(ossOptions)
# read file from oss
# @param {String} bucketName
# @param {String} filename
# @param {Object} [options] , refer to [options] of fs.readFile
# @param {Function} callback
readFile : (remoteFilePath, options, callback) ->
debuglog "[readFile] #{remoteFilePath}"
pathToTempFile = path.join "/tmp/", generateRandomId()
@downloadFile remoteFilePath, pathToTempFile, (err) ->
if err?
callback(err) if _.isFunction callback
else
fs.readFile pathToTempFile, options, callback
return
# write data to oss
# @param {String} bucketName
# @param {String} filename
# @param {String | Buffer} data
# @param {Function} callback
writeFile : (remoteFilePath, data, headers, callback) ->
debuglog "[writeFile] #{remoteFilePath}"
if Buffer.isBuffer(data)
contentType = "application/octet-stream"
else
contentType = "text/plain"
data = new Buffer(data)
if _.isFunction(headers) and not callback?
callback = headers
headers = null
args =
bucket: @targetBucket
object: remoteFilePath
srcFile: data
contentType : contentType
headers = _.extend({}, headers, @uploaderHeaders) if headers? or @uploaderHeaders?
args["userMetas"] = headers if headers?
@oss.putObject args, callback
return
# upload a local file to oss bucket
# @param {String} remoteFilePath
# @param {String} localFilePath
# @param {Function} callback
uploadFile : (localFilePath, remoteFilePath, headers, callback) ->
debuglog "[uploadFile] local:#{localFilePath} -> #{@targetBucket}:#{remoteFilePath}"
timeKey = "[oss-easy::uploadFile] -> #{remoteFilePath}"
console.time timeKey
if _.isFunction(headers) and not callback?
callback = headers
headers = null
args =
bucket: @targetBucket
object: remoteFilePath
srcFile: localFilePath
contentType:@contentType
headers = _.extend({}, headers, @uploaderHeaders) if headers? or @uploaderHeaders?
args["userMetas"] = headers if headers?
@oss.putObject args, (err)->
console.timeEnd timeKey
callback err
return
return
# upload multiple files in a batch
# @param {Object KV} tasks
# keys: localFilePaths
# values: remoteFilePaths
# @param {Function} callback
uploadFiles : (tasks, headers, callback) ->
debuglog "[uploadFiles] tasks:%j", tasks
if _.isFunction(headers) and not callback?
callback = headers
headers = null
callback or= NONSENCE_CALLBACK
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "ERROR [oss-easy::uploadFiles] #{err}"
return callback(err)
localFilePaths = _.keys(tasks)
async.eachSeries localFilePaths, (localFilePath, eachCallback)=>
@uploadFile localFilePath, tasks[localFilePath], headers, eachCallback
, callback
return
# upload a local file to oss bucket
# @param {String} remoteFilePath
# @param {String} localFilePath
# @param {Function} callback
downloadFile : (remoteFilePath, localFilePath, callback=NONSENCE_CALLBACK) ->
debuglog "[downloadFile] #{@targetBucket}:#{remoteFilePath} -> local:#{localFilePath}"
args =
bucket: @targetBucket
object: remoteFilePath
dstFile: localFilePath
@oss.getObject args, callback
return
# upload a local file to oss bucket
# @param {Object KV} tasks
# keys: remoteFilePaths
# values: localFilePaths
# @param {Function} callback
downloadFiles: (tasks, callback=NONSENCE_CALLBACK) ->
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "[oss-easy::downloadFileBatch] #{err}"
callback(err) if _.isFunction(callback)
return
remoteFilePaths = _.keys(tasks)
async.eachSeries remoteFilePaths, (remoteFilePath, eachCallback)=>
@downloadFile remoteFilePath, tasks[remoteFilePath], eachCallback
, callback
return
# delete a single file from oss bucket
# @param {String} remoteFilePath
deleteFile : (remoteFilePath, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFile] #{remoteFilePath}"
unless _.isString(remoteFilePath) and remoteFilePath
err = "bad argument, remoteFilePath:#{remoteFilePath}"
callback(err) if _.isFunction callback
return
args =
bucket: @targetBucket
object: remoteFilePath
@oss.deleteObject args, callback
return
# delete a list of single files from oss bucket
# @param {String[]} remoteFilePaths[]
deleteFiles: (remoteFilePaths, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFiles] #{remoteFilePaths}"
unless Array.isArray(remoteFilePaths) and remoteFilePaths.length
err = "bad argument, remoteFilePaths:#{remoteFilePaths}"
debuglog "[deleteFileBatch] #{err}"
callback(err) if _.isFunction callback
return
async.eachSeries remoteFilePaths, (remoteFilePath, eachCallback)=>
@deleteFile remoteFilePath, eachCallback
, callback
return
# delete all files under the given remote folder
# @param {String} remoteFolderPath
deleteFolder : (remoteFolderPath, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFolder] #{remoteFolderPath}"
unless _.isString(remoteFolderPath) and remoteFolderPath
err = "bad argument, remoteFolderPath:#{remoteFolderPath}"
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
# list folder
args =
bucket: @targetBucket
prefix : remoteFolderPath
delimiter : "/"
@oss.listObject args, (err, result)=>
if err?
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
#console.dir result.ListBucketResult.Contents
filelist = []
try
for item in result.ListBucketResult.Contents
key = item.Key
filelist.push(if Array.isArray(key) then key[0] else key)
catch err
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
@deleteFiles filelist, callback
return
return
copyFile : (sourceFilePath, destinationFilePath, callback) ->
debuglog "[copyFile] #{@targetBucket}:#{sourceFilePath} -> destinationFilePath:#{destinationFilePath}"
#console.log "[copyFile] #{@targetBucket}:#{sourceFilePath} -> destinationFilePath:#{destinationFilePath}"
args =
bucket: @targetBucket
object: destinationFilePath
srcObject: sourceFilePath
@oss.copyObject args, (err) ->
callback err
return
return
copyFiles : (tasks, callback) ->
debuglog "[copyFile] tasks:%j", tasks
assert _.isFunction(callback),"missing callback"
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "[oss-easy::copyFiles] #{err}"
callback(err) if _.isFunction(callback)
return
sourceFilePaths = _.keys(tasks)
async.eachSeries sourceFilePaths, (sourceFilePath, eachCallback) =>
@copyFile sourceFilePath, tasks[sourceFilePath], eachCallback
, callback
return
#复制一个目录下的文件到另一个目录
copyFolder: (sourceFolderPath, destinationFolderPath, callback) ->
debuglog "[copyFolder] source:#{sourceFolderPath} destination:#{destinationFolderPath}"
unless _.isString(sourceFolderPath) and sourceFolderPath and destinationFolderPath and _.isString(destinationFolderPath)
err = "bad argument, source:#{sourceFolderPath} destination:#{destinationFolderPath}"
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
# list folder
args =
bucket: @targetBucket
prefix : sourceFolderPath
delimiter : "/"
@oss.listObject args, (err, result)=>
if err?
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
#console.dir result.ListBucketResult.Contents
tasks = {}
try
for item in result.ListBucketResult.Contents
key = item.Key
des = path.join "#{destinationFolderPath}", path.basename(key)
tasks[key] = des
catch err
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
#console.dir tasks
@copyFiles tasks, callback
return
return
setUploaderHeaders: (uploaderHeaders) -> @uploaderHeaders = uploaderHeaders
setContentType: (contentType) -> @contentType = contentType
module.exports=OssEasy
| 59446 | ##
# oss-easy
# https://github.com/yi/node-oss-easy
#
# Copyright (c) 2013 <NAME>i
# Licensed under the MIT license.
##
_ = require "underscore"
ossAPI = require 'oss-client'
fs = require "fs"
async = require "async"
path = require "path"
debuglog = require("debug")("oss-easy")
assert = require "assert"
generateRandomId = ->
return "#{(Math.random() * 36 >> 0).toString(36)}#{(Math.random() * 36 >> 0).toString(36)}#{Date.now().toString(36)}"
NONSENCE_CALLBACK = ()->
class OssEasy
# constructor function
# @param {Object} ossOptions
# accessKeyId :
# accessKeySecret :
# host : default: 'oss.aliyuncs.com';
# port : default: '8080';
# timeout : default: 300000 ms;
# uploaderHeaders : http headers for all uploading actions
# bucket : target bucket
# @param {String} targetBucket bucket name
constructor: (ossOptions, targetBucket) ->
assert ossOptions, "missing options"
assert ossOptions.accessKeyId, "missing oss key id"
assert ossOptions.accessKeySecret, "missing access secret"
@targetBucket = targetBucket || ossOptions.bucket
assert @targetBucket, "missing bucket name"
ossOptions['timeout'] = ossOptions['timeout'] || 5 * 60 * 1000
if ossOptions.uploaderHeaders?
@uploaderHeaders = ossOptions.uploaderHeaders
@contentType = @uploaderHeaders.contentType if @uploaderHeaders.contentType
delete ossOptions['uploaderHeaders']
debuglog "[constructor] bucket: %j, ossOptions:%j", @targetBucket, ossOptions
@oss = new ossAPI.OssClient(ossOptions)
# read file from oss
# @param {String} bucketName
# @param {String} filename
# @param {Object} [options] , refer to [options] of fs.readFile
# @param {Function} callback
readFile : (remoteFilePath, options, callback) ->
debuglog "[readFile] #{remoteFilePath}"
pathToTempFile = path.join "/tmp/", generateRandomId()
@downloadFile remoteFilePath, pathToTempFile, (err) ->
if err?
callback(err) if _.isFunction callback
else
fs.readFile pathToTempFile, options, callback
return
# write data to oss
# @param {String} bucketName
# @param {String} filename
# @param {String | Buffer} data
# @param {Function} callback
writeFile : (remoteFilePath, data, headers, callback) ->
debuglog "[writeFile] #{remoteFilePath}"
if Buffer.isBuffer(data)
contentType = "application/octet-stream"
else
contentType = "text/plain"
data = new Buffer(data)
if _.isFunction(headers) and not callback?
callback = headers
headers = null
args =
bucket: @targetBucket
object: remoteFilePath
srcFile: data
contentType : contentType
headers = _.extend({}, headers, @uploaderHeaders) if headers? or @uploaderHeaders?
args["userMetas"] = headers if headers?
@oss.putObject args, callback
return
# upload a local file to oss bucket
# @param {String} remoteFilePath
# @param {String} localFilePath
# @param {Function} callback
uploadFile : (localFilePath, remoteFilePath, headers, callback) ->
debuglog "[uploadFile] local:#{localFilePath} -> #{@targetBucket}:#{remoteFilePath}"
timeKey = "[oss-easy::uploadFile] -> #{remoteFilePath}"
console.time timeKey
if _.isFunction(headers) and not callback?
callback = headers
headers = null
args =
bucket: @targetBucket
object: remoteFilePath
srcFile: localFilePath
contentType:@contentType
headers = _.extend({}, headers, @uploaderHeaders) if headers? or @uploaderHeaders?
args["userMetas"] = headers if headers?
@oss.putObject args, (err)->
console.timeEnd timeKey
callback err
return
return
# upload multiple files in a batch
# @param {Object KV} tasks
# keys: localFilePaths
# values: remoteFilePaths
# @param {Function} callback
uploadFiles : (tasks, headers, callback) ->
debuglog "[uploadFiles] tasks:%j", tasks
if _.isFunction(headers) and not callback?
callback = headers
headers = null
callback or= NONSENCE_CALLBACK
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "ERROR [oss-easy::uploadFiles] #{err}"
return callback(err)
localFilePaths = _.keys(tasks)
async.eachSeries localFilePaths, (localFilePath, eachCallback)=>
@uploadFile localFilePath, tasks[localFilePath], headers, eachCallback
, callback
return
# upload a local file to oss bucket
# @param {String} remoteFilePath
# @param {String} localFilePath
# @param {Function} callback
downloadFile : (remoteFilePath, localFilePath, callback=NONSENCE_CALLBACK) ->
debuglog "[downloadFile] #{@targetBucket}:#{remoteFilePath} -> local:#{localFilePath}"
args =
bucket: @targetBucket
object: remoteFilePath
dstFile: localFilePath
@oss.getObject args, callback
return
# upload a local file to oss bucket
# @param {Object KV} tasks
# keys: remoteFilePaths
# values: localFilePaths
# @param {Function} callback
downloadFiles: (tasks, callback=NONSENCE_CALLBACK) ->
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "[oss-easy::downloadFileBatch] #{err}"
callback(err) if _.isFunction(callback)
return
remoteFilePaths = _.keys(tasks)
async.eachSeries remoteFilePaths, (remoteFilePath, eachCallback)=>
@downloadFile remoteFilePath, tasks[remoteFilePath], eachCallback
, callback
return
# delete a single file from oss bucket
# @param {String} remoteFilePath
deleteFile : (remoteFilePath, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFile] #{remoteFilePath}"
unless _.isString(remoteFilePath) and remoteFilePath
err = "bad argument, remoteFilePath:#{remoteFilePath}"
callback(err) if _.isFunction callback
return
args =
bucket: @targetBucket
object: remoteFilePath
@oss.deleteObject args, callback
return
# delete a list of single files from oss bucket
# @param {String[]} remoteFilePaths[]
deleteFiles: (remoteFilePaths, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFiles] #{remoteFilePaths}"
unless Array.isArray(remoteFilePaths) and remoteFilePaths.length
err = "bad argument, remoteFilePaths:#{remoteFilePaths}"
debuglog "[deleteFileBatch] #{err}"
callback(err) if _.isFunction callback
return
async.eachSeries remoteFilePaths, (remoteFilePath, eachCallback)=>
@deleteFile remoteFilePath, eachCallback
, callback
return
# delete all files under the given remote folder
# @param {String} remoteFolderPath
deleteFolder : (remoteFolderPath, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFolder] #{remoteFolderPath}"
unless _.isString(remoteFolderPath) and remoteFolderPath
err = "bad argument, remoteFolderPath:#{remoteFolderPath}"
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
# list folder
args =
bucket: @targetBucket
prefix : remoteFolderPath
delimiter : "/"
@oss.listObject args, (err, result)=>
if err?
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
#console.dir result.ListBucketResult.Contents
filelist = []
try
for item in result.ListBucketResult.Contents
key = item.Key
filelist.push(if Array.isArray(key) then key[0] else key)
catch err
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
@deleteFiles filelist, callback
return
return
copyFile : (sourceFilePath, destinationFilePath, callback) ->
debuglog "[copyFile] #{@targetBucket}:#{sourceFilePath} -> destinationFilePath:#{destinationFilePath}"
#console.log "[copyFile] #{@targetBucket}:#{sourceFilePath} -> destinationFilePath:#{destinationFilePath}"
args =
bucket: @targetBucket
object: destinationFilePath
srcObject: sourceFilePath
@oss.copyObject args, (err) ->
callback err
return
return
copyFiles : (tasks, callback) ->
debuglog "[copyFile] tasks:%j", tasks
assert _.isFunction(callback),"missing callback"
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "[oss-easy::copyFiles] #{err}"
callback(err) if _.isFunction(callback)
return
sourceFilePaths = _.keys(tasks)
async.eachSeries sourceFilePaths, (sourceFilePath, eachCallback) =>
@copyFile sourceFilePath, tasks[sourceFilePath], eachCallback
, callback
return
#复制一个目录下的文件到另一个目录
copyFolder: (sourceFolderPath, destinationFolderPath, callback) ->
debuglog "[copyFolder] source:#{sourceFolderPath} destination:#{destinationFolderPath}"
unless _.isString(sourceFolderPath) and sourceFolderPath and destinationFolderPath and _.isString(destinationFolderPath)
err = "bad argument, source:#{sourceFolderPath} destination:#{destinationFolderPath}"
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
# list folder
args =
bucket: @targetBucket
prefix : sourceFolderPath
delimiter : "/"
@oss.listObject args, (err, result)=>
if err?
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
#console.dir result.ListBucketResult.Contents
tasks = {}
try
for item in result.ListBucketResult.Contents
key = item.Key
des = path.join "#{destinationFolderPath}", path.basename(key)
tasks[key] = des
catch err
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
#console.dir tasks
@copyFiles tasks, callback
return
return
setUploaderHeaders: (uploaderHeaders) -> @uploaderHeaders = uploaderHeaders
setContentType: (contentType) -> @contentType = contentType
module.exports=OssEasy
| true | ##
# oss-easy
# https://github.com/yi/node-oss-easy
#
# Copyright (c) 2013 PI:NAME:<NAME>END_PIi
# Licensed under the MIT license.
##
_ = require "underscore"
ossAPI = require 'oss-client'
fs = require "fs"
async = require "async"
path = require "path"
debuglog = require("debug")("oss-easy")
assert = require "assert"
generateRandomId = ->
return "#{(Math.random() * 36 >> 0).toString(36)}#{(Math.random() * 36 >> 0).toString(36)}#{Date.now().toString(36)}"
NONSENCE_CALLBACK = ()->
class OssEasy
# constructor function
# @param {Object} ossOptions
# accessKeyId :
# accessKeySecret :
# host : default: 'oss.aliyuncs.com';
# port : default: '8080';
# timeout : default: 300000 ms;
# uploaderHeaders : http headers for all uploading actions
# bucket : target bucket
# @param {String} targetBucket bucket name
constructor: (ossOptions, targetBucket) ->
assert ossOptions, "missing options"
assert ossOptions.accessKeyId, "missing oss key id"
assert ossOptions.accessKeySecret, "missing access secret"
@targetBucket = targetBucket || ossOptions.bucket
assert @targetBucket, "missing bucket name"
ossOptions['timeout'] = ossOptions['timeout'] || 5 * 60 * 1000
if ossOptions.uploaderHeaders?
@uploaderHeaders = ossOptions.uploaderHeaders
@contentType = @uploaderHeaders.contentType if @uploaderHeaders.contentType
delete ossOptions['uploaderHeaders']
debuglog "[constructor] bucket: %j, ossOptions:%j", @targetBucket, ossOptions
@oss = new ossAPI.OssClient(ossOptions)
# read file from oss
# @param {String} bucketName
# @param {String} filename
# @param {Object} [options] , refer to [options] of fs.readFile
# @param {Function} callback
readFile : (remoteFilePath, options, callback) ->
debuglog "[readFile] #{remoteFilePath}"
pathToTempFile = path.join "/tmp/", generateRandomId()
@downloadFile remoteFilePath, pathToTempFile, (err) ->
if err?
callback(err) if _.isFunction callback
else
fs.readFile pathToTempFile, options, callback
return
# write data to oss
# @param {String} bucketName
# @param {String} filename
# @param {String | Buffer} data
# @param {Function} callback
writeFile : (remoteFilePath, data, headers, callback) ->
debuglog "[writeFile] #{remoteFilePath}"
if Buffer.isBuffer(data)
contentType = "application/octet-stream"
else
contentType = "text/plain"
data = new Buffer(data)
if _.isFunction(headers) and not callback?
callback = headers
headers = null
args =
bucket: @targetBucket
object: remoteFilePath
srcFile: data
contentType : contentType
headers = _.extend({}, headers, @uploaderHeaders) if headers? or @uploaderHeaders?
args["userMetas"] = headers if headers?
@oss.putObject args, callback
return
# upload a local file to oss bucket
# @param {String} remoteFilePath
# @param {String} localFilePath
# @param {Function} callback
uploadFile : (localFilePath, remoteFilePath, headers, callback) ->
debuglog "[uploadFile] local:#{localFilePath} -> #{@targetBucket}:#{remoteFilePath}"
timeKey = "[oss-easy::uploadFile] -> #{remoteFilePath}"
console.time timeKey
if _.isFunction(headers) and not callback?
callback = headers
headers = null
args =
bucket: @targetBucket
object: remoteFilePath
srcFile: localFilePath
contentType:@contentType
headers = _.extend({}, headers, @uploaderHeaders) if headers? or @uploaderHeaders?
args["userMetas"] = headers if headers?
@oss.putObject args, (err)->
console.timeEnd timeKey
callback err
return
return
# upload multiple files in a batch
# @param {Object KV} tasks
# keys: localFilePaths
# values: remoteFilePaths
# @param {Function} callback
uploadFiles : (tasks, headers, callback) ->
debuglog "[uploadFiles] tasks:%j", tasks
if _.isFunction(headers) and not callback?
callback = headers
headers = null
callback or= NONSENCE_CALLBACK
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "ERROR [oss-easy::uploadFiles] #{err}"
return callback(err)
localFilePaths = _.keys(tasks)
async.eachSeries localFilePaths, (localFilePath, eachCallback)=>
@uploadFile localFilePath, tasks[localFilePath], headers, eachCallback
, callback
return
# upload a local file to oss bucket
# @param {String} remoteFilePath
# @param {String} localFilePath
# @param {Function} callback
downloadFile : (remoteFilePath, localFilePath, callback=NONSENCE_CALLBACK) ->
debuglog "[downloadFile] #{@targetBucket}:#{remoteFilePath} -> local:#{localFilePath}"
args =
bucket: @targetBucket
object: remoteFilePath
dstFile: localFilePath
@oss.getObject args, callback
return
# upload a local file to oss bucket
# @param {Object KV} tasks
# keys: remoteFilePaths
# values: localFilePaths
# @param {Function} callback
downloadFiles: (tasks, callback=NONSENCE_CALLBACK) ->
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "[oss-easy::downloadFileBatch] #{err}"
callback(err) if _.isFunction(callback)
return
remoteFilePaths = _.keys(tasks)
async.eachSeries remoteFilePaths, (remoteFilePath, eachCallback)=>
@downloadFile remoteFilePath, tasks[remoteFilePath], eachCallback
, callback
return
# delete a single file from oss bucket
# @param {String} remoteFilePath
deleteFile : (remoteFilePath, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFile] #{remoteFilePath}"
unless _.isString(remoteFilePath) and remoteFilePath
err = "bad argument, remoteFilePath:#{remoteFilePath}"
callback(err) if _.isFunction callback
return
args =
bucket: @targetBucket
object: remoteFilePath
@oss.deleteObject args, callback
return
# delete a list of single files from oss bucket
# @param {String[]} remoteFilePaths[]
deleteFiles: (remoteFilePaths, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFiles] #{remoteFilePaths}"
unless Array.isArray(remoteFilePaths) and remoteFilePaths.length
err = "bad argument, remoteFilePaths:#{remoteFilePaths}"
debuglog "[deleteFileBatch] #{err}"
callback(err) if _.isFunction callback
return
async.eachSeries remoteFilePaths, (remoteFilePath, eachCallback)=>
@deleteFile remoteFilePath, eachCallback
, callback
return
# delete all files under the given remote folder
# @param {String} remoteFolderPath
deleteFolder : (remoteFolderPath, callback=NONSENCE_CALLBACK) ->
debuglog "[deleteFolder] #{remoteFolderPath}"
unless _.isString(remoteFolderPath) and remoteFolderPath
err = "bad argument, remoteFolderPath:#{remoteFolderPath}"
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
# list folder
args =
bucket: @targetBucket
prefix : remoteFolderPath
delimiter : "/"
@oss.listObject args, (err, result)=>
if err?
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
#console.dir result.ListBucketResult.Contents
filelist = []
try
for item in result.ListBucketResult.Contents
key = item.Key
filelist.push(if Array.isArray(key) then key[0] else key)
catch err
debuglog "ERROR [deleteFolder] error:#{err}"
callback(err)
return
@deleteFiles filelist, callback
return
return
copyFile : (sourceFilePath, destinationFilePath, callback) ->
debuglog "[copyFile] #{@targetBucket}:#{sourceFilePath} -> destinationFilePath:#{destinationFilePath}"
#console.log "[copyFile] #{@targetBucket}:#{sourceFilePath} -> destinationFilePath:#{destinationFilePath}"
args =
bucket: @targetBucket
object: destinationFilePath
srcObject: sourceFilePath
@oss.copyObject args, (err) ->
callback err
return
return
copyFiles : (tasks, callback) ->
debuglog "[copyFile] tasks:%j", tasks
assert _.isFunction(callback),"missing callback"
unless tasks?
err = "bad argument, tasks:#{tasks}"
console.error "[oss-easy::copyFiles] #{err}"
callback(err) if _.isFunction(callback)
return
sourceFilePaths = _.keys(tasks)
async.eachSeries sourceFilePaths, (sourceFilePath, eachCallback) =>
@copyFile sourceFilePath, tasks[sourceFilePath], eachCallback
, callback
return
#复制一个目录下的文件到另一个目录
copyFolder: (sourceFolderPath, destinationFolderPath, callback) ->
debuglog "[copyFolder] source:#{sourceFolderPath} destination:#{destinationFolderPath}"
unless _.isString(sourceFolderPath) and sourceFolderPath and destinationFolderPath and _.isString(destinationFolderPath)
err = "bad argument, source:#{sourceFolderPath} destination:#{destinationFolderPath}"
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
# list folder
args =
bucket: @targetBucket
prefix : sourceFolderPath
delimiter : "/"
@oss.listObject args, (err, result)=>
if err?
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
#console.dir result.ListBucketResult.Contents
tasks = {}
try
for item in result.ListBucketResult.Contents
key = item.Key
des = path.join "#{destinationFolderPath}", path.basename(key)
tasks[key] = des
catch err
debuglog "ERROR [copyFolder] error:#{err}"
callback(err)
return
#console.dir tasks
@copyFiles tasks, callback
return
return
setUploaderHeaders: (uploaderHeaders) -> @uploaderHeaders = uploaderHeaders
setContentType: (contentType) -> @contentType = contentType
module.exports=OssEasy
|
[
{
"context": " class InnerGlow\n @shouldParse: (key) -> key is 'iglw'\n\n BLEND_MODES = {\n norm: 'normal',\n dark: 'da",
"end": 72,
"score": 0.9545557498931885,
"start": 68,
"tag": "KEY",
"value": "iglw"
}
] | lib/psd/layer_info/effect_info/inner_glow.coffee | fzx-design/Trims-psd-resolving | 0 |
module.exports = class InnerGlow
@shouldParse: (key) -> key is 'iglw'
BLEND_MODES = {
norm: 'normal',
dark: 'darken',
lite: 'lighten',
hue: 'hue',
sat: 'saturation',
colr: 'color',
lum: 'luminosity',
mul: 'multiply',
scrn: 'screen',
diss: 'dissolve',
over: 'overlay',
hLit: 'hard_light',
sLit: 'soft_light',
diff: 'difference',
smud: 'exclusion',
div: 'color_dodge',
idiv: 'color_burn',
lbrn: 'linear_burn',
lddg: 'linear_dodge',
vLit: 'vivid_light',
lLit: 'linear_light',
pLit: 'pin_light',
hMix: 'hard_mix',
pass: 'passthru',
dkCl: 'darker_color',
lgCl: 'lighter_color',
fsub: 'subtract',
fdiv: 'divide'
}
constructor: (file, size) ->
@file = file
@version = null #4
@blur = null #4
@intensity = null #4
@color = null #10
@blendmode = null #4+4
@embled = null #1
@opacity = null #1
@invert = null #1
@native_color = null #10
@end = @file.tell() + size
parse: ->
@version = @file.readInt()
@blur = @file.readInt()/65536
@intensity = @file.readInt()/65536
@color = @file.readSpaceColor()
@file.seek 4, true
@blendmode = @file.readString(4)
@enbled = @file.readBoolean()
@opacity = Math.round(@file.readByte()/2.55)
@invert = @file.readByte()
@nativeColor = @file.readSpaceColor()
@file.seek @end
export: ->
version: @version
blur: @blur
intensity: @intensity
color: @color
blendmode: @blendmode
enbled: @enbled
opacity: @opacity
invert: @invert
nativeColor: @nativeColor
| 141522 |
module.exports = class InnerGlow
@shouldParse: (key) -> key is '<KEY>'
BLEND_MODES = {
norm: 'normal',
dark: 'darken',
lite: 'lighten',
hue: 'hue',
sat: 'saturation',
colr: 'color',
lum: 'luminosity',
mul: 'multiply',
scrn: 'screen',
diss: 'dissolve',
over: 'overlay',
hLit: 'hard_light',
sLit: 'soft_light',
diff: 'difference',
smud: 'exclusion',
div: 'color_dodge',
idiv: 'color_burn',
lbrn: 'linear_burn',
lddg: 'linear_dodge',
vLit: 'vivid_light',
lLit: 'linear_light',
pLit: 'pin_light',
hMix: 'hard_mix',
pass: 'passthru',
dkCl: 'darker_color',
lgCl: 'lighter_color',
fsub: 'subtract',
fdiv: 'divide'
}
constructor: (file, size) ->
@file = file
@version = null #4
@blur = null #4
@intensity = null #4
@color = null #10
@blendmode = null #4+4
@embled = null #1
@opacity = null #1
@invert = null #1
@native_color = null #10
@end = @file.tell() + size
parse: ->
@version = @file.readInt()
@blur = @file.readInt()/65536
@intensity = @file.readInt()/65536
@color = @file.readSpaceColor()
@file.seek 4, true
@blendmode = @file.readString(4)
@enbled = @file.readBoolean()
@opacity = Math.round(@file.readByte()/2.55)
@invert = @file.readByte()
@nativeColor = @file.readSpaceColor()
@file.seek @end
export: ->
version: @version
blur: @blur
intensity: @intensity
color: @color
blendmode: @blendmode
enbled: @enbled
opacity: @opacity
invert: @invert
nativeColor: @nativeColor
| true |
module.exports = class InnerGlow
@shouldParse: (key) -> key is 'PI:KEY:<KEY>END_PI'
BLEND_MODES = {
norm: 'normal',
dark: 'darken',
lite: 'lighten',
hue: 'hue',
sat: 'saturation',
colr: 'color',
lum: 'luminosity',
mul: 'multiply',
scrn: 'screen',
diss: 'dissolve',
over: 'overlay',
hLit: 'hard_light',
sLit: 'soft_light',
diff: 'difference',
smud: 'exclusion',
div: 'color_dodge',
idiv: 'color_burn',
lbrn: 'linear_burn',
lddg: 'linear_dodge',
vLit: 'vivid_light',
lLit: 'linear_light',
pLit: 'pin_light',
hMix: 'hard_mix',
pass: 'passthru',
dkCl: 'darker_color',
lgCl: 'lighter_color',
fsub: 'subtract',
fdiv: 'divide'
}
constructor: (file, size) ->
@file = file
@version = null #4
@blur = null #4
@intensity = null #4
@color = null #10
@blendmode = null #4+4
@embled = null #1
@opacity = null #1
@invert = null #1
@native_color = null #10
@end = @file.tell() + size
parse: ->
@version = @file.readInt()
@blur = @file.readInt()/65536
@intensity = @file.readInt()/65536
@color = @file.readSpaceColor()
@file.seek 4, true
@blendmode = @file.readString(4)
@enbled = @file.readBoolean()
@opacity = Math.round(@file.readByte()/2.55)
@invert = @file.readByte()
@nativeColor = @file.readSpaceColor()
@file.seek @end
export: ->
version: @version
blur: @blur
intensity: @intensity
color: @color
blendmode: @blendmode
enbled: @enbled
opacity: @opacity
invert: @invert
nativeColor: @nativeColor
|
[
{
"context": "ain]\n Is _, -> 'no match'\n )\n ret = m('bob@alice.com')\n assert ret[0] is 'bob'\n assert ret[1] is",
"end": 742,
"score": 0.9999160766601562,
"start": 729,
"tag": "EMAIL",
"value": "bob@alice.com"
},
{
"context": "\n assert ret[0] is 'bob'\n as... | node_modules/pat-mat/test/spec-test.coffee | prasanthbendra/ng-2 | 0 | {
Match
Is
As
On
NoMatchError
extract
guard
parameter
paramSeq
wildcard
wildcardSeq
} = require('../dest/api')
assert = require('assert')
$ = parameter
$$ = paramSeq
_ = wildcard
__ = wildcardSeq
describe 'Match ', ->
it 'should match Is branch', ->
m = Match(
Is Number, (p) -> p * 2
Is String, (s) -> s + s
Is [0, $$], (t) -> t.length
Is Array, (a) -> (2*i for i in a)
)
assert m(1) is 2
assert m('s') is 'ss'
assert m([1])[0] is 2
assert m([0, 1, 2]) is 2
it 'Is should inject RegExp param', ->
MAIL_REG = /(.*?)@(.*?)\..*/
m = Match(
Is MAIL_REG, (_, name, domain) -> [name, domain]
Is _, -> 'no match'
)
ret = m('bob@alice.com')
assert ret[0] is 'bob'
assert ret[1] is 'alice'
assert m('sss') is 'no match'
it 'test custom extract', ->
Circle = extract {
annotation: ['r']
transform: (other) ->
x = other.x
y = other.y
{r: Math.sqrt(x*x + y*y)}
}
Point = extract class Point
constructor: (@x, @y) ->
m = Match(
Is Point(3, 4), -> 'point'
Is Circle(5), -> 'circle'
Is _, -> 'nothing'
)
assert m(new Point(3, 4)) is 'point'
assert m(new Point(4, 3)) is 'circle'
assert m(x: 4, y: 3) is 'circle'
assert m(r: 5) is 'nothing'
it 'As should ignore RegExp', ->
MAIL_REG = /(.*?)@(.*?)\..*/
m = Match(
As MAIL_REG, -> arguments.length
As _, -> 'no match'
)
assert m('bob@alice.com') is 0
it 'As test', ->
pm = As String, -> arguments.length
assert pm.hasMatch('sss')
assert pm.inject('sss') is 0
pm = As undefined, -> arguments.length
assert pm.hasMatch('sss') is false
it 'As should match', ->
argCount = -> arguments.length
m = Match(
As null , -> 'null'
As undefined , -> 'undefined'
As 42 , -> 'meaning of life'
As String , argCount
As {x: $()} , argCount
As [$(), __ , Number] , argCount
)
assert m(null) is 'null'
assert m(undefined) is 'undefined'
assert m(42) is 'meaning of life'
assert m('love live') is 0
assert m(x: 2, y: 3) is 1
assert m([1,2,3]) is 1
it 'On should inject parameter as object k-v pair', ->
m = Match(
On $('n', Number), (m) -> m.n * 2
On {x: $('x'), y: $('y')}, (m) -> m.x + m.y
On $(), -> @unnamed[0]
)
assert m(2) is 4
assert m({x: 5, y: 5}) is 10
assert m(true)
it 'should throw error if no match', ->
m = Match(
On $('n', Number), (m) -> m.n * 2
On {x: $('x'), y: $('y')}, (m) -> m.x + m.y
)
assert.throws( ->
m('no match')
NoMatchError)
it 'should ask guard', ->
m = Match(
Is Number, guard(-> @m%2 == 0), -> 'even'
Is Number, guard(-> @m%2 == 1), -> 'odd'
Is wildcard, -> 'not integer'
)
assert m(2) is 'even'
assert m(3) is 'odd'
assert m('dd') is 'not integer'
| 200442 | {
Match
Is
As
On
NoMatchError
extract
guard
parameter
paramSeq
wildcard
wildcardSeq
} = require('../dest/api')
assert = require('assert')
$ = parameter
$$ = paramSeq
_ = wildcard
__ = wildcardSeq
describe 'Match ', ->
it 'should match Is branch', ->
m = Match(
Is Number, (p) -> p * 2
Is String, (s) -> s + s
Is [0, $$], (t) -> t.length
Is Array, (a) -> (2*i for i in a)
)
assert m(1) is 2
assert m('s') is 'ss'
assert m([1])[0] is 2
assert m([0, 1, 2]) is 2
it 'Is should inject RegExp param', ->
MAIL_REG = /(.*?)@(.*?)\..*/
m = Match(
Is MAIL_REG, (_, name, domain) -> [name, domain]
Is _, -> 'no match'
)
ret = m('<EMAIL>')
assert ret[0] is 'bob'
assert ret[1] is '<NAME>'
assert m('sss') is 'no match'
it 'test custom extract', ->
Circle = extract {
annotation: ['r']
transform: (other) ->
x = other.x
y = other.y
{r: Math.sqrt(x*x + y*y)}
}
Point = extract class Point
constructor: (@x, @y) ->
m = Match(
Is Point(3, 4), -> 'point'
Is Circle(5), -> 'circle'
Is _, -> 'nothing'
)
assert m(new Point(3, 4)) is 'point'
assert m(new Point(4, 3)) is 'circle'
assert m(x: 4, y: 3) is 'circle'
assert m(r: 5) is 'nothing'
it 'As should ignore RegExp', ->
MAIL_REG = /(.*?)@(.*?)\..*/
m = Match(
As MAIL_REG, -> arguments.length
As _, -> 'no match'
)
assert m('<EMAIL>') is 0
it 'As test', ->
pm = As String, -> arguments.length
assert pm.hasMatch('sss')
assert pm.inject('sss') is 0
pm = As undefined, -> arguments.length
assert pm.hasMatch('sss') is false
it 'As should match', ->
argCount = -> arguments.length
m = Match(
As null , -> 'null'
As undefined , -> 'undefined'
As 42 , -> 'meaning of life'
As String , argCount
As {x: $()} , argCount
As [$(), __ , Number] , argCount
)
assert m(null) is 'null'
assert m(undefined) is 'undefined'
assert m(42) is 'meaning of life'
assert m('love live') is 0
assert m(x: 2, y: 3) is 1
assert m([1,2,3]) is 1
it 'On should inject parameter as object k-v pair', ->
m = Match(
On $('n', Number), (m) -> m.n * 2
On {x: $('x'), y: $('y')}, (m) -> m.x + m.y
On $(), -> @unnamed[0]
)
assert m(2) is 4
assert m({x: 5, y: 5}) is 10
assert m(true)
it 'should throw error if no match', ->
m = Match(
On $('n', Number), (m) -> m.n * 2
On {x: $('x'), y: $('y')}, (m) -> m.x + m.y
)
assert.throws( ->
m('no match')
NoMatchError)
it 'should ask guard', ->
m = Match(
Is Number, guard(-> @m%2 == 0), -> 'even'
Is Number, guard(-> @m%2 == 1), -> 'odd'
Is wildcard, -> 'not integer'
)
assert m(2) is 'even'
assert m(3) is 'odd'
assert m('dd') is 'not integer'
| true | {
Match
Is
As
On
NoMatchError
extract
guard
parameter
paramSeq
wildcard
wildcardSeq
} = require('../dest/api')
assert = require('assert')
$ = parameter
$$ = paramSeq
_ = wildcard
__ = wildcardSeq
describe 'Match ', ->
it 'should match Is branch', ->
m = Match(
Is Number, (p) -> p * 2
Is String, (s) -> s + s
Is [0, $$], (t) -> t.length
Is Array, (a) -> (2*i for i in a)
)
assert m(1) is 2
assert m('s') is 'ss'
assert m([1])[0] is 2
assert m([0, 1, 2]) is 2
it 'Is should inject RegExp param', ->
MAIL_REG = /(.*?)@(.*?)\..*/
m = Match(
Is MAIL_REG, (_, name, domain) -> [name, domain]
Is _, -> 'no match'
)
ret = m('PI:EMAIL:<EMAIL>END_PI')
assert ret[0] is 'bob'
assert ret[1] is 'PI:NAME:<NAME>END_PI'
assert m('sss') is 'no match'
it 'test custom extract', ->
Circle = extract {
annotation: ['r']
transform: (other) ->
x = other.x
y = other.y
{r: Math.sqrt(x*x + y*y)}
}
Point = extract class Point
constructor: (@x, @y) ->
m = Match(
Is Point(3, 4), -> 'point'
Is Circle(5), -> 'circle'
Is _, -> 'nothing'
)
assert m(new Point(3, 4)) is 'point'
assert m(new Point(4, 3)) is 'circle'
assert m(x: 4, y: 3) is 'circle'
assert m(r: 5) is 'nothing'
it 'As should ignore RegExp', ->
MAIL_REG = /(.*?)@(.*?)\..*/
m = Match(
As MAIL_REG, -> arguments.length
As _, -> 'no match'
)
assert m('PI:EMAIL:<EMAIL>END_PI') is 0
it 'As test', ->
pm = As String, -> arguments.length
assert pm.hasMatch('sss')
assert pm.inject('sss') is 0
pm = As undefined, -> arguments.length
assert pm.hasMatch('sss') is false
it 'As should match', ->
argCount = -> arguments.length
m = Match(
As null , -> 'null'
As undefined , -> 'undefined'
As 42 , -> 'meaning of life'
As String , argCount
As {x: $()} , argCount
As [$(), __ , Number] , argCount
)
assert m(null) is 'null'
assert m(undefined) is 'undefined'
assert m(42) is 'meaning of life'
assert m('love live') is 0
assert m(x: 2, y: 3) is 1
assert m([1,2,3]) is 1
it 'On should inject parameter as object k-v pair', ->
m = Match(
On $('n', Number), (m) -> m.n * 2
On {x: $('x'), y: $('y')}, (m) -> m.x + m.y
On $(), -> @unnamed[0]
)
assert m(2) is 4
assert m({x: 5, y: 5}) is 10
assert m(true)
it 'should throw error if no match', ->
m = Match(
On $('n', Number), (m) -> m.n * 2
On {x: $('x'), y: $('y')}, (m) -> m.x + m.y
)
assert.throws( ->
m('no match')
NoMatchError)
it 'should ask guard', ->
m = Match(
Is Number, guard(-> @m%2 == 0), -> 'even'
Is Number, guard(-> @m%2 == 1), -> 'odd'
Is wildcard, -> 'not integer'
)
assert m(2) is 'even'
assert m(3) is 'odd'
assert m('dd') is 'not integer'
|
[
{
"context": ", ->\n assert validate\n id: 1\n name: 'John Doe'\n email: 'foo@bar.com'\n taxRate: 0.1\n ",
"end": 190,
"score": 0.9998695850372314,
"start": 182,
"tag": "NAME",
"value": "John Doe"
},
{
"context": "\n id: 1\n name: 'John Doe'\n e... | 07-validation/test/index.coffee | SaritaChakote/node-puzzle | 0 | assert = require 'assert'
{validate} = require '../lib'
describe '07-validation', ->
it 'should return `true` for valid data', ->
assert validate
id: 1
name: 'John Doe'
email: 'foo@bar.com'
taxRate: 0.1
favouriteColour: '#ccccff'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: name', ->
assert !validate
id: 1
name: 2 # <--- problem
email: 'foo@bar.com'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: email', ->
assert !validate
id: 1
name: 'John Doe'
email: 'foo@bar@baz.com' # <--- problem
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: id', ->
assert !validate
id: -5 # <--- problem
name: 'John Doe'
email: 'foo@bar.com'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: favouriteColour', ->
assert !validate
id: 1
name: 'John Doe'
email: 'foo@bar.com'
taxRate: 0.1
favouriteColour: '#ccccffx' # <--- problem
interests: ["cycling", "programming"]
# !!!!!
# Add more tests for different data that users might try to provide!
# !!!!!
it 'should return `false` for invalid data: name', ->
assert !validate
id: 1
name: 'abcaaaaaaaaaaaaaaaaaaaaaa' # <--- problem
email: 'foo@bar.com'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: email', ->
assert !validate
id: 1
name: 'John Doe'
email: 'foo @baz.com' # <--- problem
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: id', ->
assert !validate
id: 0.5 # <--- problem
name: 'John Doe'
email: 'foo@bar.com'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: favouriteColour', ->
assert !validate
id: 1
name: 'John Doe'
email: 'foo@bar.com'
taxRate: 0.1
favouriteColour: 'Orange' # <--- problem
interests: ["cycling", "programming"] | 128794 | assert = require 'assert'
{validate} = require '../lib'
describe '07-validation', ->
it 'should return `true` for valid data', ->
assert validate
id: 1
name: '<NAME>'
email: '<EMAIL>'
taxRate: 0.1
favouriteColour: '#ccccff'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: name', ->
assert !validate
id: 1
name: 2 # <--- problem
email: '<EMAIL>'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: email', ->
assert !validate
id: 1
name: '<NAME>'
email: '<EMAIL>' # <--- problem
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: id', ->
assert !validate
id: -5 # <--- problem
name: '<NAME>'
email: '<EMAIL>'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: favouriteColour', ->
assert !validate
id: 1
name: '<NAME>'
email: '<EMAIL>'
taxRate: 0.1
favouriteColour: '#ccccffx' # <--- problem
interests: ["cycling", "programming"]
# !!!!!
# Add more tests for different data that users might try to provide!
# !!!!!
it 'should return `false` for invalid data: name', ->
assert !validate
id: 1
name: '<NAME>caaaaaaaaaaaaaaaaaaaaaa' # <--- problem
email: '<EMAIL>'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: email', ->
assert !validate
id: 1
name: '<NAME>'
email: '<EMAIL>' # <--- problem
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: id', ->
assert !validate
id: 0.5 # <--- problem
name: '<NAME>'
email: '<EMAIL>'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: favouriteColour', ->
assert !validate
id: 1
name: '<NAME>'
email: '<EMAIL>'
taxRate: 0.1
favouriteColour: 'Orange' # <--- problem
interests: ["cycling", "programming"] | true | assert = require 'assert'
{validate} = require '../lib'
describe '07-validation', ->
it 'should return `true` for valid data', ->
assert validate
id: 1
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
taxRate: 0.1
favouriteColour: '#ccccff'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: name', ->
assert !validate
id: 1
name: 2 # <--- problem
email: 'PI:EMAIL:<EMAIL>END_PI'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: email', ->
assert !validate
id: 1
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI' # <--- problem
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: id', ->
assert !validate
id: -5 # <--- problem
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: favouriteColour', ->
assert !validate
id: 1
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
taxRate: 0.1
favouriteColour: '#ccccffx' # <--- problem
interests: ["cycling", "programming"]
# !!!!!
# Add more tests for different data that users might try to provide!
# !!!!!
it 'should return `false` for invalid data: name', ->
assert !validate
id: 1
name: 'PI:NAME:<NAME>END_PIcaaaaaaaaaaaaaaaaaaaaaa' # <--- problem
email: 'PI:EMAIL:<EMAIL>END_PI'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: email', ->
assert !validate
id: 1
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI' # <--- problem
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: id', ->
assert !validate
id: 0.5 # <--- problem
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
taxRate: 0.1
favouriteColour: '#ff6'
interests: ["cycling", "programming"]
it 'should return `false` for invalid data: favouriteColour', ->
assert !validate
id: 1
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
taxRate: 0.1
favouriteColour: 'Orange' # <--- problem
interests: ["cycling", "programming"] |
[
{
"context": "###*\n * Cookie parser for GridFW\n * @copyright khalid RAFIK 2018\n###\n'use strict'\n\nplugName = require('../pac",
"end": 59,
"score": 0.9991590976715088,
"start": 47,
"tag": "NAME",
"value": "khalid RAFIK"
}
] | assets/index.coffee | gridfw/i18n-gulp | 0 | ###*
* Cookie parser for GridFW
* @copyright khalid RAFIK 2018
###
'use strict'
plugName = require('../package.json').name
gutil = require 'gulp-util'
Path = require 'path'
through = require 'through2'
Pug = require 'pug'
Terser = require 'terser'
I18N_SYMBOL = Symbol 'i18n module'
#=include _noramlize.coffee
#=include _compile-methods.coffee
_isEmpty = (obj)->
for k of obj
return false
true
# gulp compile files
gulpCompiler = (options)->
bufferedI18n = Object.create null
# options
options ?= Object.create null
toJson = options.json is true
cwd = null
# compile each file
bufferContents = (file, end, cb)->
# ignore incorrect files
return cb() if file.isNull()
return cb new Error "i18n-compiler>> Streaming isn't supported" if file.isStream()
# process
err = null
try
# compile file and buffer data
Object.assign bufferedI18n, eval file.contents.toString 'utf8'
# base dir
cwd= file._cwd
catch e
err = new gutil.PluginError plugName, e
cb err
# concat all files
concatAll = (cb)->
err= null
try
# check file not empty
unless _isEmpty bufferedI18n
# normalize 18n: convert into separated locals
data = _normalize bufferedI18n
# separate into multiple locals
for k,v of data
# reserved attributes
v.local = k
# compile to JSON
if toJson
fle = new gutil.File
cwd: cwd
path: k + '.json'
contents: new Buffer JSON.stringify v
# compile js instead
else
content = []
for a,b of v
content.push "#{JSON.stringify a}:#{(i18n.compile b).toString()}"
# create table for fast access
content = """
var msgs= exports.messages= {#{content.join ','}};
var arr= exports.arr= [];
var map= exports.map= Object.create(null);
var i=0, k;
for(k in msgs){ arr.push(msgs[k]); map[k] = i++; }
"""
# create file
fle = new gutil.File
cwd: cwd
path: k + '.js'
contents: new Buffer content
@push fle
catch e
err = new gutil.PluginError plugName, e
cb err
# return
through.obj bufferContents, concatAll
module.exports = gulpCompiler
| 226176 | ###*
* Cookie parser for GridFW
* @copyright <NAME> 2018
###
'use strict'
plugName = require('../package.json').name
gutil = require 'gulp-util'
Path = require 'path'
through = require 'through2'
Pug = require 'pug'
Terser = require 'terser'
I18N_SYMBOL = Symbol 'i18n module'
#=include _noramlize.coffee
#=include _compile-methods.coffee
_isEmpty = (obj)->
for k of obj
return false
true
# gulp compile files
gulpCompiler = (options)->
bufferedI18n = Object.create null
# options
options ?= Object.create null
toJson = options.json is true
cwd = null
# compile each file
bufferContents = (file, end, cb)->
# ignore incorrect files
return cb() if file.isNull()
return cb new Error "i18n-compiler>> Streaming isn't supported" if file.isStream()
# process
err = null
try
# compile file and buffer data
Object.assign bufferedI18n, eval file.contents.toString 'utf8'
# base dir
cwd= file._cwd
catch e
err = new gutil.PluginError plugName, e
cb err
# concat all files
concatAll = (cb)->
err= null
try
# check file not empty
unless _isEmpty bufferedI18n
# normalize 18n: convert into separated locals
data = _normalize bufferedI18n
# separate into multiple locals
for k,v of data
# reserved attributes
v.local = k
# compile to JSON
if toJson
fle = new gutil.File
cwd: cwd
path: k + '.json'
contents: new Buffer JSON.stringify v
# compile js instead
else
content = []
for a,b of v
content.push "#{JSON.stringify a}:#{(i18n.compile b).toString()}"
# create table for fast access
content = """
var msgs= exports.messages= {#{content.join ','}};
var arr= exports.arr= [];
var map= exports.map= Object.create(null);
var i=0, k;
for(k in msgs){ arr.push(msgs[k]); map[k] = i++; }
"""
# create file
fle = new gutil.File
cwd: cwd
path: k + '.js'
contents: new Buffer content
@push fle
catch e
err = new gutil.PluginError plugName, e
cb err
# return
through.obj bufferContents, concatAll
module.exports = gulpCompiler
| true | ###*
* Cookie parser for GridFW
* @copyright PI:NAME:<NAME>END_PI 2018
###
'use strict'
plugName = require('../package.json').name
gutil = require 'gulp-util'
Path = require 'path'
through = require 'through2'
Pug = require 'pug'
Terser = require 'terser'
I18N_SYMBOL = Symbol 'i18n module'
#=include _noramlize.coffee
#=include _compile-methods.coffee
_isEmpty = (obj)->
for k of obj
return false
true
# gulp compile files
gulpCompiler = (options)->
bufferedI18n = Object.create null
# options
options ?= Object.create null
toJson = options.json is true
cwd = null
# compile each file
bufferContents = (file, end, cb)->
# ignore incorrect files
return cb() if file.isNull()
return cb new Error "i18n-compiler>> Streaming isn't supported" if file.isStream()
# process
err = null
try
# compile file and buffer data
Object.assign bufferedI18n, eval file.contents.toString 'utf8'
# base dir
cwd= file._cwd
catch e
err = new gutil.PluginError plugName, e
cb err
# concat all files
concatAll = (cb)->
err= null
try
# check file not empty
unless _isEmpty bufferedI18n
# normalize 18n: convert into separated locals
data = _normalize bufferedI18n
# separate into multiple locals
for k,v of data
# reserved attributes
v.local = k
# compile to JSON
if toJson
fle = new gutil.File
cwd: cwd
path: k + '.json'
contents: new Buffer JSON.stringify v
# compile js instead
else
content = []
for a,b of v
content.push "#{JSON.stringify a}:#{(i18n.compile b).toString()}"
# create table for fast access
content = """
var msgs= exports.messages= {#{content.join ','}};
var arr= exports.arr= [];
var map= exports.map= Object.create(null);
var i=0, k;
for(k in msgs){ arr.push(msgs[k]); map[k] = i++; }
"""
# create file
fle = new gutil.File
cwd: cwd
path: k + '.js'
contents: new Buffer content
@push fle
catch e
err = new gutil.PluginError plugName, e
cb err
# return
through.obj bufferContents, concatAll
module.exports = gulpCompiler
|
[
{
"context": "#\t> File Name: log-test.coffee\n#\t> Author: LY\n#\t> Mail: ly.franky@gmail.com\n#\t> Created Time: F",
"end": 45,
"score": 0.9976768493652344,
"start": 43,
"tag": "USERNAME",
"value": "LY"
},
{
"context": "ile Name: log-test.coffee\n#\t> Author: LY\n#\t> Mail: ly.fran... | server/test/log-test.coffee | wiiliamking/miac-website | 0 | # > File Name: log-test.coffee
# > Author: LY
# > Mail: ly.franky@gmail.com
# > Created Time: Friday, November 21, 2014 AM09:29:20 CST
chai = require 'chai'
chai.should()
app = require '../app.coffee'
request = (require 'supertest')(app)
UserModel = require '../db/models/user.coffee'
describe 'log test', ->
describe 'user login', ->
before (done)->
UserModel.drop ->
request
.post('/register/regist')
.send({username: 'laiy', password: 'miac-website', email: 'ly.franky@gmail.com'})
.end done
it 'Login fail cuz user doesnt exist', (done)->
request
.post('/log/session')
.send({username: 'ly', password: 'blablabla'})
.expect(404)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'User is not found.'
done()
it 'Login fail cuz password is not correct', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: 'blablabla'})
.expect(400)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Password is not correct.'
done()
it 'Login fail cuz Info not complete', (done)->
request
.post('/log/session')
.send({username: '', password: ''})
.expect(200)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Info not complete.'
done()
it 'Login successfully', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: 'miac-website'})
.expect(200)
.expect("set-cookie", /connect\.sid/)
.end (err, res)->
res.body.result.should.equal 'success'
done()
it 'Login fail cuz user has already login', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: 'miac-website'})
.expect('set-cookie', /connect\.sid/)
.end (err, res)->
request
.post('/log/session')
.set('Cookie', res.headers['set-cookie'])
.send({username: 'laiy', password: 'miac-website'})
.expect(409)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'User conflict.'
done()
describe 'user logout', ->
before (done)->
UserModel.drop ->
request
.post('/register/regist')
.send({username: 'laiy', password: 'miac-website', email: 'ly.franky@gmail.com'})
.end done
it 'Logout successfully', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: 'miac-website'})
.expect(200)
.expect('set-cookie', /connect\.sid/)
.end (err, res)->
res.body.result.should.equal 'success'
request
.delete('/log/session')
.set('Cookie', res.headers['set-cookie'])
.end (err, res)->
res.body.result.should.equal 'success'
done()
it 'Logout fail cuz user has not login.', (done)->
request
.delete('/log/session')
.expect(401)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Please log in first.'
done()
| 37091 | # > File Name: log-test.coffee
# > Author: LY
# > Mail: <EMAIL>
# > Created Time: Friday, November 21, 2014 AM09:29:20 CST
chai = require 'chai'
chai.should()
app = require '../app.coffee'
request = (require 'supertest')(app)
UserModel = require '../db/models/user.coffee'
describe 'log test', ->
describe 'user login', ->
before (done)->
UserModel.drop ->
request
.post('/register/regist')
.send({username: 'laiy', password: '<PASSWORD>', email: '<EMAIL>'})
.end done
it 'Login fail cuz user doesnt exist', (done)->
request
.post('/log/session')
.send({username: 'ly', password: '<PASSWORD>'})
.expect(404)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'User is not found.'
done()
it 'Login fail cuz password is not correct', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: '<PASSWORD>'})
.expect(400)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Password is not correct.'
done()
it 'Login fail cuz Info not complete', (done)->
request
.post('/log/session')
.send({username: '', password: ''})
.expect(200)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Info not complete.'
done()
it 'Login successfully', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: '<PASSWORD>'})
.expect(200)
.expect("set-cookie", /connect\.sid/)
.end (err, res)->
res.body.result.should.equal 'success'
done()
it 'Login fail cuz user has already login', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: '<PASSWORD>'})
.expect('set-cookie', /connect\.sid/)
.end (err, res)->
request
.post('/log/session')
.set('Cookie', res.headers['set-cookie'])
.send({username: 'laiy', password: '<PASSWORD>'})
.expect(409)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'User conflict.'
done()
describe 'user logout', ->
before (done)->
UserModel.drop ->
request
.post('/register/regist')
.send({username: 'laiy', password: '<PASSWORD>', email: '<EMAIL>'})
.end done
it 'Logout successfully', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: '<PASSWORD>'})
.expect(200)
.expect('set-cookie', /connect\.sid/)
.end (err, res)->
res.body.result.should.equal 'success'
request
.delete('/log/session')
.set('Cookie', res.headers['set-cookie'])
.end (err, res)->
res.body.result.should.equal 'success'
done()
it 'Logout fail cuz user has not login.', (done)->
request
.delete('/log/session')
.expect(401)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Please log in first.'
done()
| true | # > File Name: log-test.coffee
# > Author: LY
# > Mail: PI:EMAIL:<EMAIL>END_PI
# > Created Time: Friday, November 21, 2014 AM09:29:20 CST
chai = require 'chai'
chai.should()
app = require '../app.coffee'
request = (require 'supertest')(app)
UserModel = require '../db/models/user.coffee'
describe 'log test', ->
describe 'user login', ->
before (done)->
UserModel.drop ->
request
.post('/register/regist')
.send({username: 'laiy', password: 'PI:PASSWORD:<PASSWORD>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI'})
.end done
it 'Login fail cuz user doesnt exist', (done)->
request
.post('/log/session')
.send({username: 'ly', password: 'PI:PASSWORD:<PASSWORD>END_PI'})
.expect(404)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'User is not found.'
done()
it 'Login fail cuz password is not correct', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: 'PI:PASSWORD:<PASSWORD>END_PI'})
.expect(400)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Password is not correct.'
done()
it 'Login fail cuz Info not complete', (done)->
request
.post('/log/session')
.send({username: '', password: ''})
.expect(200)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Info not complete.'
done()
it 'Login successfully', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: 'PI:PASSWORD:<PASSWORD>END_PI'})
.expect(200)
.expect("set-cookie", /connect\.sid/)
.end (err, res)->
res.body.result.should.equal 'success'
done()
it 'Login fail cuz user has already login', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: 'PI:PASSWORD:<PASSWORD>END_PI'})
.expect('set-cookie', /connect\.sid/)
.end (err, res)->
request
.post('/log/session')
.set('Cookie', res.headers['set-cookie'])
.send({username: 'laiy', password: 'PI:PASSWORD:<PASSWORD>END_PI'})
.expect(409)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'User conflict.'
done()
describe 'user logout', ->
before (done)->
UserModel.drop ->
request
.post('/register/regist')
.send({username: 'laiy', password: 'PI:PASSWORD:<PASSWORD>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI'})
.end done
it 'Logout successfully', (done)->
request
.post('/log/session')
.send({username: 'laiy', password: 'PI:PASSWORD:<PASSWORD>END_PI'})
.expect(200)
.expect('set-cookie', /connect\.sid/)
.end (err, res)->
res.body.result.should.equal 'success'
request
.delete('/log/session')
.set('Cookie', res.headers['set-cookie'])
.end (err, res)->
res.body.result.should.equal 'success'
done()
it 'Logout fail cuz user has not login.', (done)->
request
.delete('/log/session')
.expect(401)
.end (err, res)->
res.body.result.should.equal 'fail'
res.body.msg.should.equal 'Please log in first.'
done()
|
[
{
"context": "n an argument error', (cb) ->\n invalid = ['hallo','frank']\n\n helper.store.users.getByIds in",
"end": 3156,
"score": 0.9968947172164917,
"start": 3151,
"tag": "NAME",
"value": "hallo"
},
{
"context": "ument error', (cb) ->\n invalid = ['hallo','fr... | test/method-users-get-by-ids-tests.coffee | codedoctor/mongoose-user-store-multi-tenant | 4 | should = require 'should'
helper = require './support/helper'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
sampleUsers = null
describe 'WHEN working with store.users.getByIds', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.users
describe 'WHEN running against an empty database', ->
describe 'WHEN invoking getByIds', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getByIds [], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
describe 'WHEN running against a sample database', ->
it 'SETTING UP SAMPLE', (cb) ->
sampleUsers = helper.addSampleUsers cb
###
it "DUMP", (cb) ->
helper.dumpCollection('users') ->
cb()
###
describe 'WHEN invoking getByIds', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getByIds [], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
it 'WITH non existing object ids IT should return an empty list', (cb) ->
helper.store.users.getByIds sampleUsers.nonExistingUserIds(3), (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
it 'WITH partially non existing object ids IT should return an only the matches', (cb) ->
nonExisting = sampleUsers.nonExistingUserIds(3)
existing = sampleUsers.existingUserIds(3)
#helper.log _.union(nonExisting,existing)
helper.store.users.getByIds _.union(nonExisting,existing), (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
it 'WITH valid duplicates IT should only return one', (cb) ->
existing = sampleUsers.existingUserIds(3)
existing.push existing[0]
helper.store.users.getByIds existing, (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
it 'WITH valid object ids (not strings) IT should return those', (cb) ->
existing = sampleUsers.existingUserIds(3)
existing = _.map existing, (x) => new ObjectId(x)
helper.store.users.getByIds existing, (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
###
NOTE: WE NEED TO ADD THIS, but no time today.
it 'WITH invalid object ids it should return an argument error', (cb) ->
invalid = ['hallo','frank']
helper.store.users.getByIds invalid, (err,result) ->
should.exist err
# TODO: Ensure that this is the right kind of error
cb()
### | 110532 | should = require 'should'
helper = require './support/helper'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
sampleUsers = null
describe 'WHEN working with store.users.getByIds', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.users
describe 'WHEN running against an empty database', ->
describe 'WHEN invoking getByIds', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getByIds [], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
describe 'WHEN running against a sample database', ->
it 'SETTING UP SAMPLE', (cb) ->
sampleUsers = helper.addSampleUsers cb
###
it "DUMP", (cb) ->
helper.dumpCollection('users') ->
cb()
###
describe 'WHEN invoking getByIds', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getByIds [], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
it 'WITH non existing object ids IT should return an empty list', (cb) ->
helper.store.users.getByIds sampleUsers.nonExistingUserIds(3), (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
it 'WITH partially non existing object ids IT should return an only the matches', (cb) ->
nonExisting = sampleUsers.nonExistingUserIds(3)
existing = sampleUsers.existingUserIds(3)
#helper.log _.union(nonExisting,existing)
helper.store.users.getByIds _.union(nonExisting,existing), (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
it 'WITH valid duplicates IT should only return one', (cb) ->
existing = sampleUsers.existingUserIds(3)
existing.push existing[0]
helper.store.users.getByIds existing, (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
it 'WITH valid object ids (not strings) IT should return those', (cb) ->
existing = sampleUsers.existingUserIds(3)
existing = _.map existing, (x) => new ObjectId(x)
helper.store.users.getByIds existing, (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
###
NOTE: WE NEED TO ADD THIS, but no time today.
it 'WITH invalid object ids it should return an argument error', (cb) ->
invalid = ['<NAME>','<NAME>']
helper.store.users.getByIds invalid, (err,result) ->
should.exist err
# TODO: Ensure that this is the right kind of error
cb()
### | true | should = require 'should'
helper = require './support/helper'
_ = require 'underscore'
mongoose = require 'mongoose'
ObjectId = mongoose.Types.ObjectId
sampleUsers = null
describe 'WHEN working with store.users.getByIds', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.users
describe 'WHEN running against an empty database', ->
describe 'WHEN invoking getByIds', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getByIds [], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
describe 'WHEN running against a sample database', ->
it 'SETTING UP SAMPLE', (cb) ->
sampleUsers = helper.addSampleUsers cb
###
it "DUMP", (cb) ->
helper.dumpCollection('users') ->
cb()
###
describe 'WHEN invoking getByIds', ->
it 'WITH empty parameters IT should return an empty list', (cb) ->
helper.store.users.getByIds [], (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
it 'WITH non existing object ids IT should return an empty list', (cb) ->
helper.store.users.getByIds sampleUsers.nonExistingUserIds(3), (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 0
cb()
it 'WITH partially non existing object ids IT should return an only the matches', (cb) ->
nonExisting = sampleUsers.nonExistingUserIds(3)
existing = sampleUsers.existingUserIds(3)
#helper.log _.union(nonExisting,existing)
helper.store.users.getByIds _.union(nonExisting,existing), (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
it 'WITH valid duplicates IT should only return one', (cb) ->
existing = sampleUsers.existingUserIds(3)
existing.push existing[0]
helper.store.users.getByIds existing, (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
it 'WITH valid object ids (not strings) IT should return those', (cb) ->
existing = sampleUsers.existingUserIds(3)
existing = _.map existing, (x) => new ObjectId(x)
helper.store.users.getByIds existing, (err,result) ->
return cb err if err
should.exist.result
result.should.have.property "items"
result.items.should.have.lengthOf 3
cb()
###
NOTE: WE NEED TO ADD THIS, but no time today.
it 'WITH invalid object ids it should return an argument error', (cb) ->
invalid = ['PI:NAME:<NAME>END_PI','PI:NAME:<NAME>END_PI']
helper.store.users.getByIds invalid, (err,result) ->
should.exist err
# TODO: Ensure that this is the right kind of error
cb()
### |
[
{
"context": " 'entry'\n @sheet_id = sheet._id\n @key ?= rfc4122.v4()\n\n render: ->\n {\n project: @project",
"end": 278,
"score": 0.9810497164726257,
"start": 266,
"tag": "KEY",
"value": "rfc4122.v4()"
}
] | coffeescript/libs/services/entry-service.coffee | mechanoid/timerevel | 0 | angular.module('EntryService', ['DbService', 'uuid'])
.factory 'entries', ($q, db, rfc4122) ->
class Entry
constructor: (@date, sheet, @project = '', @begin, @end, @intermission, @notice, @tm) ->
@type = 'entry'
@sheet_id = sheet._id
@key ?= rfc4122.v4()
render: ->
{
project: @project
date: @date
type: @type
sheet_id: @sheet_id
key: @key
notice: @notice
begin: @begin
end: @end
tm: @tm
intermission: @intermission
}
# @TODO: add a entry list view
@setupViews: ->
# // document that tells PouchDB/CouchDB
# // to build up an index on doc.name
# var myIndex = {
# _id: '_design/my_index',
# views: {
# 'my_index': {
# map: function (doc) { emit(doc.name); }.toString()
# }
# }
# };
# // save it
# pouch.put(myIndex).then(function () {
# // success!
# }).catch(function (err) {
# // some error (maybe a 409, because it already exists?)
# });
@all: (sheet, cb) ->
deferred = $q.defer()
map = (doc) ->
if doc?.type is "entry"
# order by startdate
emit(doc)
sheetFilter = (err, response) ->
if err?
console.log err
deferred.reject([])
matches = _.filter response.rows, (row) ->
if row?.doc?.sheet_id is sheet._id
row.doc.date = new Date(row.doc.date) if row.doc.begin
row.doc.begin = new Date(row.doc.begin) if row.doc.begin
row.doc.end = new Date(row.doc.end) if row.doc.end
row
cb?(matches)
deferred.resolve(matches)
db.query({map: map}, {include_docs: true}, sheetFilter)
deferred.promise
@new: (sheet, date, project, begin, end, intermission, notice, tm) ->
e = new Entry(date, sheet, project, begin, end, intermission, notice, tm)
e.render()
@delete: (item) ->
db.get(item.key)
.then (entry) ->
db.remove(entry)
.then () ->
console.log "DELETED #{entry.key}"
.catch (error) ->
console.log error
.catch (error) ->
console.log error
@update: (item) ->
db.get(item.key)
.then (entry) ->
delete item._rev
delete item._id
for attr, _ of item
entry[attr] = item[attr]
db.put(entry, entry.key)
.then ->
console.log "updated"
.catch (error) ->
console.log error
.catch (error) ->
console.log "ERROR: ", error
if error.status is 404
db.put(item, item.key)
.then ->
console.log "created"
.catch (error) ->
console.log error
Entry
| 163414 | angular.module('EntryService', ['DbService', 'uuid'])
.factory 'entries', ($q, db, rfc4122) ->
class Entry
constructor: (@date, sheet, @project = '', @begin, @end, @intermission, @notice, @tm) ->
@type = 'entry'
@sheet_id = sheet._id
@key ?= <KEY>
render: ->
{
project: @project
date: @date
type: @type
sheet_id: @sheet_id
key: @key
notice: @notice
begin: @begin
end: @end
tm: @tm
intermission: @intermission
}
# @TODO: add a entry list view
@setupViews: ->
# // document that tells PouchDB/CouchDB
# // to build up an index on doc.name
# var myIndex = {
# _id: '_design/my_index',
# views: {
# 'my_index': {
# map: function (doc) { emit(doc.name); }.toString()
# }
# }
# };
# // save it
# pouch.put(myIndex).then(function () {
# // success!
# }).catch(function (err) {
# // some error (maybe a 409, because it already exists?)
# });
@all: (sheet, cb) ->
deferred = $q.defer()
map = (doc) ->
if doc?.type is "entry"
# order by startdate
emit(doc)
sheetFilter = (err, response) ->
if err?
console.log err
deferred.reject([])
matches = _.filter response.rows, (row) ->
if row?.doc?.sheet_id is sheet._id
row.doc.date = new Date(row.doc.date) if row.doc.begin
row.doc.begin = new Date(row.doc.begin) if row.doc.begin
row.doc.end = new Date(row.doc.end) if row.doc.end
row
cb?(matches)
deferred.resolve(matches)
db.query({map: map}, {include_docs: true}, sheetFilter)
deferred.promise
@new: (sheet, date, project, begin, end, intermission, notice, tm) ->
e = new Entry(date, sheet, project, begin, end, intermission, notice, tm)
e.render()
@delete: (item) ->
db.get(item.key)
.then (entry) ->
db.remove(entry)
.then () ->
console.log "DELETED #{entry.key}"
.catch (error) ->
console.log error
.catch (error) ->
console.log error
@update: (item) ->
db.get(item.key)
.then (entry) ->
delete item._rev
delete item._id
for attr, _ of item
entry[attr] = item[attr]
db.put(entry, entry.key)
.then ->
console.log "updated"
.catch (error) ->
console.log error
.catch (error) ->
console.log "ERROR: ", error
if error.status is 404
db.put(item, item.key)
.then ->
console.log "created"
.catch (error) ->
console.log error
Entry
| true | angular.module('EntryService', ['DbService', 'uuid'])
.factory 'entries', ($q, db, rfc4122) ->
class Entry
constructor: (@date, sheet, @project = '', @begin, @end, @intermission, @notice, @tm) ->
@type = 'entry'
@sheet_id = sheet._id
@key ?= PI:KEY:<KEY>END_PI
render: ->
{
project: @project
date: @date
type: @type
sheet_id: @sheet_id
key: @key
notice: @notice
begin: @begin
end: @end
tm: @tm
intermission: @intermission
}
# @TODO: add a entry list view
@setupViews: ->
# // document that tells PouchDB/CouchDB
# // to build up an index on doc.name
# var myIndex = {
# _id: '_design/my_index',
# views: {
# 'my_index': {
# map: function (doc) { emit(doc.name); }.toString()
# }
# }
# };
# // save it
# pouch.put(myIndex).then(function () {
# // success!
# }).catch(function (err) {
# // some error (maybe a 409, because it already exists?)
# });
@all: (sheet, cb) ->
deferred = $q.defer()
map = (doc) ->
if doc?.type is "entry"
# order by startdate
emit(doc)
sheetFilter = (err, response) ->
if err?
console.log err
deferred.reject([])
matches = _.filter response.rows, (row) ->
if row?.doc?.sheet_id is sheet._id
row.doc.date = new Date(row.doc.date) if row.doc.begin
row.doc.begin = new Date(row.doc.begin) if row.doc.begin
row.doc.end = new Date(row.doc.end) if row.doc.end
row
cb?(matches)
deferred.resolve(matches)
db.query({map: map}, {include_docs: true}, sheetFilter)
deferred.promise
@new: (sheet, date, project, begin, end, intermission, notice, tm) ->
e = new Entry(date, sheet, project, begin, end, intermission, notice, tm)
e.render()
@delete: (item) ->
db.get(item.key)
.then (entry) ->
db.remove(entry)
.then () ->
console.log "DELETED #{entry.key}"
.catch (error) ->
console.log error
.catch (error) ->
console.log error
@update: (item) ->
db.get(item.key)
.then (entry) ->
delete item._rev
delete item._id
for attr, _ of item
entry[attr] = item[attr]
db.put(entry, entry.key)
.then ->
console.log "updated"
.catch (error) ->
console.log error
.catch (error) ->
console.log "ERROR: ", error
if error.status is 404
db.put(item, item.key)
.then ->
console.log "created"
.catch (error) ->
console.log error
Entry
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9970431327819824,
"start": 12,
"tag": "NAME",
"value": "Joyent"
},
{
"context": "T_CASES = [\n {\n algo: \"aes-128-gcm\"\n key: \"6970787039613669314d623455536234\"\n ... | test/simple/test-crypto-authenticated.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
try
crypto = require("crypto")
catch e
console.log "Not compiled with OPENSSL support."
process.exit()
crypto.DEFAULT_ENCODING = "buffer"
#
# Test authenticated encryption modes.
#
# !NEVER USE STATIC IVs IN REAL LIFE!
#
TEST_CASES = [
{
algo: "aes-128-gcm"
key: "6970787039613669314d623455536234"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4BE13896F64DFA2C2D0F2C76"
tag: "272B422F62EB545EAA15B5FF84092447"
tampered: false
}
{
algo: "aes-128-gcm"
key: "6970787039613669314d623455536234"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4BE13896F64DFA2C2D0F2C76"
aad: "000000FF"
tag: "BA2479F66275665A88CB7B15F43EB005"
tampered: false
}
{
algo: "aes-128-gcm"
key: "6970787039613669314d623455536234"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4BE13596F64DFA2C2D0FAC76"
tag: "272B422F62EB545EAA15B5FF84092447"
tampered: true
}
{
algo: "aes-256-gcm"
key: "337a54767a7233703637564336316a6d56353472495975313534357834546c59"
iv: "36306950306836764a6f4561"
plain: "Hello node.js world!"
ct: "58E62CFE7B1D274111A82267EBB93866E72B6C2A"
tag: "9BB44F663BADABACAE9720881FB1EC7A"
tampered: false
}
{
algo: "aes-256-gcm"
key: "337a54767a7233703637564336316a6d56353472495975313534357834546c59"
iv: "36306950306836764a6f4561"
plain: "Hello node.js world!"
ct: "58E62CFF7B1D274011A82267EBB93866E72B6C2B"
tag: "9BB44F663BADABACAE9720881FB1EC7A"
tampered: true
}
{
algo: "aes-192-gcm"
key: "1ed2233fa2223ef5d7df08546049406c7305220bca40d4c9"
iv: "0e1791e9db3bd21a9122c416"
plain: "Hello node.js world!"
password: "very bad password"
aad: "63616c76696e"
ct: "DDA53A4059AA17B88756984995F7BBA3C636CC44"
tag: "D2A35E5C611E5E3D2258360241C5B045"
tampered: false
}
]
ciphers = crypto.getCiphers()
for i of TEST_CASES
test = TEST_CASES[i]
if ciphers.indexOf(test.algo) is -1
console.log "skipping unsupported " + test.algo + " test"
continue
(->
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
encrypt.setAAD new Buffer(test.aad, "hex") if test.aad
hex = encrypt.update(test.plain, "ascii", "hex")
hex += encrypt.final("hex")
auth_tag = encrypt.getAuthTag()
# only test basic encryption run if output is marked as tampered.
unless test.tampered
assert.equal hex.toUpperCase(), test.ct
assert.equal auth_tag.toString("hex").toUpperCase(), test.tag
return
)()
(->
decrypt = crypto.createDecipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
decrypt.setAuthTag new Buffer(test.tag, "hex")
decrypt.setAAD new Buffer(test.aad, "hex") if test.aad
msg = decrypt.update(test.ct, "hex", "ascii")
unless test.tampered
msg += decrypt.final("ascii")
assert.equal msg, test.plain
else
# assert that final throws if input data could not be verified!
assert.throws (->
decrypt.final "ascii"
return
), RegExp(" auth")
return
)()
(->
return unless test.password
encrypt = crypto.createCipher(test.algo, test.password)
encrypt.setAAD new Buffer(test.aad, "hex") if test.aad
hex = encrypt.update(test.plain, "ascii", "hex")
hex += encrypt.final("hex")
auth_tag = encrypt.getAuthTag()
# only test basic encryption run if output is marked as tampered.
unless test.tampered
assert.equal hex.toUpperCase(), test.ct
assert.equal auth_tag.toString("hex").toUpperCase(), test.tag
return
)()
(->
return unless test.password
decrypt = crypto.createDecipher(test.algo, test.password)
decrypt.setAuthTag new Buffer(test.tag, "hex")
decrypt.setAAD new Buffer(test.aad, "hex") if test.aad
msg = decrypt.update(test.ct, "hex", "ascii")
unless test.tampered
msg += decrypt.final("ascii")
assert.equal msg, test.plain
else
# assert that final throws if input data could not be verified!
assert.throws (->
decrypt.final "ascii"
return
), RegExp(" auth")
return
)()
# after normal operation, test some incorrect ways of calling the API:
# it's most certainly enough to run these tests with one algorithm only.
continue if i > 0
(->
# non-authenticating mode:
encrypt = crypto.createCipheriv("aes-128-cbc", "ipxp9a6i1Mb4USb4", "6fKjEjR3Vl30EUYC")
encrypt.update "blah", "ascii"
encrypt.final()
assert.throws (->
encrypt.getAuthTag()
return
), RegExp(" state")
assert.throws (->
encrypt.setAAD new Buffer("123", "ascii")
return
), RegExp(" state")
return
)()
(->
# trying to get tag before inputting all data:
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
encrypt.update "blah", "ascii"
assert.throws (->
encrypt.getAuthTag()
return
), RegExp(" state")
return
)()
(->
# trying to set tag on encryption object:
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
assert.throws (->
encrypt.setAuthTag new Buffer(test.tag, "hex")
return
), RegExp(" state")
return
)()
(->
# trying to read tag from decryption object:
decrypt = crypto.createDecipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
assert.throws (->
decrypt.getAuthTag()
return
), RegExp(" state")
return
)()
| 125333 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
try
crypto = require("crypto")
catch e
console.log "Not compiled with OPENSSL support."
process.exit()
crypto.DEFAULT_ENCODING = "buffer"
#
# Test authenticated encryption modes.
#
# !NEVER USE STATIC IVs IN REAL LIFE!
#
TEST_CASES = [
{
algo: "aes-128-gcm"
key: "<KEY>"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4<KEY>13896F64DFA2C2D0<KEY>2C76"
tag: "272B422F62EB545EAA15B5FF84092447"
tampered: false
}
{
algo: "aes-128-gcm"
key: "<KEY>87039613669314d623455536234"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4BE13896F64DFA2C2D0F2C76"
aad: "000000FF"
tag: "BA2479F66275665A88CB7B15F43EB005"
tampered: false
}
{
algo: "aes-128-gcm"
key: "<KEY>"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4BE13596F64DFA2C2D0FAC76"
tag: "272B422F62EB545EAA15B5FF84092447"
tampered: true
}
{
algo: "aes-256-gcm"
key: "<KEY>"
iv: "36306950306836764a6f4561"
plain: "Hello node.js world!"
ct: "58E62CFE7B1D274111A82267EBB93866E72B6C2A"
tag: "9BB44F663BADABACAE9720881FB1EC7A"
tampered: false
}
{
algo: "aes-256-gcm"
key: "<KEY>"
iv: "36306950306836764a6f4561"
plain: "Hello node.js world!"
ct: "58E62CFF7B1D274011A82267EBB93866E72B6C2B"
tag: "9BB44F663BADABACAE9720881FB1EC7A"
tampered: true
}
{
algo: "aes-192-gcm"
key: "<KEY>"
iv: "0e1791e9db3bd21a9122c416"
plain: "Hello node.js world!"
password: "<PASSWORD>"
aad: "63616c76696e"
ct: "DDA53A4059AA17B88756984995F7BBA3C636CC44"
tag: "D2A35E5C611E5E3D2258360241C5B045"
tampered: false
}
]
ciphers = crypto.getCiphers()
for i of TEST_CASES
test = TEST_CASES[i]
if ciphers.indexOf(test.algo) is -1
console.log "skipping unsupported " + test.algo + " test"
continue
(->
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
encrypt.setAAD new Buffer(test.aad, "hex") if test.aad
hex = encrypt.update(test.plain, "ascii", "hex")
hex += encrypt.final("hex")
auth_tag = encrypt.getAuthTag()
# only test basic encryption run if output is marked as tampered.
unless test.tampered
assert.equal hex.toUpperCase(), test.ct
assert.equal auth_tag.toString("hex").toUpperCase(), test.tag
return
)()
(->
decrypt = crypto.createDecipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
decrypt.setAuthTag new Buffer(test.tag, "hex")
decrypt.setAAD new Buffer(test.aad, "hex") if test.aad
msg = decrypt.update(test.ct, "hex", "ascii")
unless test.tampered
msg += decrypt.final("ascii")
assert.equal msg, test.plain
else
# assert that final throws if input data could not be verified!
assert.throws (->
decrypt.final "ascii"
return
), RegExp(" auth")
return
)()
(->
return unless test.password
encrypt = crypto.createCipher(test.algo, test.password)
encrypt.setAAD new Buffer(test.aad, "hex") if test.aad
hex = encrypt.update(test.plain, "ascii", "hex")
hex += encrypt.final("hex")
auth_tag = encrypt.getAuthTag()
# only test basic encryption run if output is marked as tampered.
unless test.tampered
assert.equal hex.toUpperCase(), test.ct
assert.equal auth_tag.toString("hex").toUpperCase(), test.tag
return
)()
(->
return unless test.password
decrypt = crypto.createDecipher(test.algo, test.password)
decrypt.setAuthTag new Buffer(test.tag, "hex")
decrypt.setAAD new Buffer(test.aad, "hex") if test.aad
msg = decrypt.update(test.ct, "hex", "ascii")
unless test.tampered
msg += decrypt.final("ascii")
assert.equal msg, test.plain
else
# assert that final throws if input data could not be verified!
assert.throws (->
decrypt.final "ascii"
return
), RegExp(" auth")
return
)()
# after normal operation, test some incorrect ways of calling the API:
# it's most certainly enough to run these tests with one algorithm only.
continue if i > 0
(->
# non-authenticating mode:
encrypt = crypto.createCipheriv("aes-128-cbc", "ipxp9a6i1Mb4USb4", "6fKjEjR3Vl30EUYC")
encrypt.update "blah", "ascii"
encrypt.final()
assert.throws (->
encrypt.getAuthTag()
return
), RegExp(" state")
assert.throws (->
encrypt.setAAD new Buffer("123", "ascii")
return
), RegExp(" state")
return
)()
(->
# trying to get tag before inputting all data:
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
encrypt.update "blah", "ascii"
assert.throws (->
encrypt.getAuthTag()
return
), RegExp(" state")
return
)()
(->
# trying to set tag on encryption object:
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
assert.throws (->
encrypt.setAuthTag new Buffer(test.tag, "hex")
return
), RegExp(" state")
return
)()
(->
# trying to read tag from decryption object:
decrypt = crypto.createDecipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
assert.throws (->
decrypt.getAuthTag()
return
), RegExp(" state")
return
)()
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
try
crypto = require("crypto")
catch e
console.log "Not compiled with OPENSSL support."
process.exit()
crypto.DEFAULT_ENCODING = "buffer"
#
# Test authenticated encryption modes.
#
# !NEVER USE STATIC IVs IN REAL LIFE!
#
TEST_CASES = [
{
algo: "aes-128-gcm"
key: "PI:KEY:<KEY>END_PI"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4PI:KEY:<KEY>END_PI13896F64DFA2C2D0PI:KEY:<KEY>END_PI2C76"
tag: "272B422F62EB545EAA15B5FF84092447"
tampered: false
}
{
algo: "aes-128-gcm"
key: "PI:KEY:<KEY>END_PI87039613669314d623455536234"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4BE13896F64DFA2C2D0F2C76"
aad: "000000FF"
tag: "BA2479F66275665A88CB7B15F43EB005"
tampered: false
}
{
algo: "aes-128-gcm"
key: "PI:KEY:<KEY>END_PI"
iv: "583673497131313748307652"
plain: "Hello World!"
ct: "4BE13596F64DFA2C2D0FAC76"
tag: "272B422F62EB545EAA15B5FF84092447"
tampered: true
}
{
algo: "aes-256-gcm"
key: "PI:KEY:<KEY>END_PI"
iv: "36306950306836764a6f4561"
plain: "Hello node.js world!"
ct: "58E62CFE7B1D274111A82267EBB93866E72B6C2A"
tag: "9BB44F663BADABACAE9720881FB1EC7A"
tampered: false
}
{
algo: "aes-256-gcm"
key: "PI:KEY:<KEY>END_PI"
iv: "36306950306836764a6f4561"
plain: "Hello node.js world!"
ct: "58E62CFF7B1D274011A82267EBB93866E72B6C2B"
tag: "9BB44F663BADABACAE9720881FB1EC7A"
tampered: true
}
{
algo: "aes-192-gcm"
key: "PI:KEY:<KEY>END_PI"
iv: "0e1791e9db3bd21a9122c416"
plain: "Hello node.js world!"
password: "PI:PASSWORD:<PASSWORD>END_PI"
aad: "63616c76696e"
ct: "DDA53A4059AA17B88756984995F7BBA3C636CC44"
tag: "D2A35E5C611E5E3D2258360241C5B045"
tampered: false
}
]
ciphers = crypto.getCiphers()
for i of TEST_CASES
test = TEST_CASES[i]
if ciphers.indexOf(test.algo) is -1
console.log "skipping unsupported " + test.algo + " test"
continue
(->
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
encrypt.setAAD new Buffer(test.aad, "hex") if test.aad
hex = encrypt.update(test.plain, "ascii", "hex")
hex += encrypt.final("hex")
auth_tag = encrypt.getAuthTag()
# only test basic encryption run if output is marked as tampered.
unless test.tampered
assert.equal hex.toUpperCase(), test.ct
assert.equal auth_tag.toString("hex").toUpperCase(), test.tag
return
)()
(->
decrypt = crypto.createDecipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
decrypt.setAuthTag new Buffer(test.tag, "hex")
decrypt.setAAD new Buffer(test.aad, "hex") if test.aad
msg = decrypt.update(test.ct, "hex", "ascii")
unless test.tampered
msg += decrypt.final("ascii")
assert.equal msg, test.plain
else
# assert that final throws if input data could not be verified!
assert.throws (->
decrypt.final "ascii"
return
), RegExp(" auth")
return
)()
(->
return unless test.password
encrypt = crypto.createCipher(test.algo, test.password)
encrypt.setAAD new Buffer(test.aad, "hex") if test.aad
hex = encrypt.update(test.plain, "ascii", "hex")
hex += encrypt.final("hex")
auth_tag = encrypt.getAuthTag()
# only test basic encryption run if output is marked as tampered.
unless test.tampered
assert.equal hex.toUpperCase(), test.ct
assert.equal auth_tag.toString("hex").toUpperCase(), test.tag
return
)()
(->
return unless test.password
decrypt = crypto.createDecipher(test.algo, test.password)
decrypt.setAuthTag new Buffer(test.tag, "hex")
decrypt.setAAD new Buffer(test.aad, "hex") if test.aad
msg = decrypt.update(test.ct, "hex", "ascii")
unless test.tampered
msg += decrypt.final("ascii")
assert.equal msg, test.plain
else
# assert that final throws if input data could not be verified!
assert.throws (->
decrypt.final "ascii"
return
), RegExp(" auth")
return
)()
# after normal operation, test some incorrect ways of calling the API:
# it's most certainly enough to run these tests with one algorithm only.
continue if i > 0
(->
# non-authenticating mode:
encrypt = crypto.createCipheriv("aes-128-cbc", "ipxp9a6i1Mb4USb4", "6fKjEjR3Vl30EUYC")
encrypt.update "blah", "ascii"
encrypt.final()
assert.throws (->
encrypt.getAuthTag()
return
), RegExp(" state")
assert.throws (->
encrypt.setAAD new Buffer("123", "ascii")
return
), RegExp(" state")
return
)()
(->
# trying to get tag before inputting all data:
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
encrypt.update "blah", "ascii"
assert.throws (->
encrypt.getAuthTag()
return
), RegExp(" state")
return
)()
(->
# trying to set tag on encryption object:
encrypt = crypto.createCipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
assert.throws (->
encrypt.setAuthTag new Buffer(test.tag, "hex")
return
), RegExp(" state")
return
)()
(->
# trying to read tag from decryption object:
decrypt = crypto.createDecipheriv(test.algo, new Buffer(test.key, "hex"), new Buffer(test.iv, "hex"))
assert.throws (->
decrypt.getAuthTag()
return
), RegExp(" state")
return
)()
|
[
{
"context": " r2._rChunkLength emitDuration:1, streamKey:\"testing\"\n\n rewind.hls_segmenter.snapshot (err,",
"end": 15658,
"score": 0.712083101272583,
"start": 15651,
"tag": "KEY",
"value": "testing"
}
] | test/hls_segmenter.coffee | firebrandv2/FirebrandNetwork.ga | 342 | RewindBuffer = $src "rewind_buffer"
HLSSegmenter = $src "rewind/hls_segmenter"
Logger = $src "logger"
ChunkGenerator = $src "util/chunk_generator"
debug = require("debug")("sm:tests:hls_segmenter")
_ = require "underscore"
#----------
class FakeStreamGroup extends require("events").EventEmitter
constructor: (rewinds) ->
@updates = []
@hls_min_id = null
for r in rewinds
r.hls_segmenter.syncToGroup @
hlsUpdateMinSegment: (id) ->
if !@hls_min_id || id > @hls_min_id
@updates.push id
prev = @hls_min_id
@hls_min_id = id
@emit "hls_update_min_segment", id
#----------
describe "HTTP Live Streaming Segmenter", ->
rewind = null
chunk_duration = 1000
segment_duration = 10000
start_ts = new Date()
# to make life easy to reason about, we'll put start_ts on a segment start.
start_ts = new Date( Math.round(start_ts / segment_duration) * segment_duration )
console.log "Start ts is ", start_ts
#----------
# The chunk injector takes a stream of audio chunks going forward and/or
# backward from a common starting point, emitting segments each time a
# segment length boundary is crossed. These semi-baked segments will have
# start and end timestamps and a `buffers` array that contains the matching
# chunks. It will reject pushes that aren't at the edges of the segment list.
describe "Chunk Injector", ->
injector = null
generator = null
beforeEach (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
done()
afterEach (done) ->
generator.unpipe()
injector.removeAllListeners()
done()
it "accepts forward chunks and produces segments", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments.length).to.be.eql 3
expect( Number(segments[1].ts) - Number(segments[0].ts) ).to.eql segment_duration
done()
# producing 31 seconds of audio should give us 3 pushed segments
generator.forward 31, ->
generator.end()
it "doesn't emit a segment if not given enough data", (done) ->
injector.once "segment", ->
throw new Error "No segment was supposed to be created."
injector.once "finish", ->
done()
# since we emit when _segments.length > 2, 19 seconds can't produce
# an emitted segment
generator.forward 19, -> generator.end()
it "emits segments when given backward data", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments.length).to.be.eql 3
expect( Number(segments[0].ts) - Number(segments[1].ts) ).to.eql segment_duration
done()
generator.backward 31, ->
generator.end()
it "emits mixed segments when given mixed data", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments).to.have.length.within 6,7
# sort by ts
segments = _(segments).sortBy (s) -> Number(s.ts)
# five segments should cover 40 seconds going by timestamp
expect( Number(segments[4].ts) - Number(segments[0].ts) ).to.eql 4 * segment_duration
done()
af = _.after 2, -> generator.end()
generator.forward 30, -> af()
generator.backward 30, -> af()
#----------
# The Finalizer sits after the Injector. It takes the half-baked segments
# that the Injector emits and gives them sequence IDs. These can be from a
# loaded sequence map (to reload data on startup, for instance), or they
# can be generated sequence numbers going forward. The Finalizer is also
# in charge of spotting gaps in the segment array, inserting discontinuity
# objects where appropriate and keeping track of the discontinuity sequence.
describe "Segment Finalizer", ->
generator = null
injector = null
beforeEach (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
done()
afterEach (done) ->
generator.unpipe()
injector.unpipe()
done()
describe "assigning sequenced IDs to new segments", ->
finalizer = null
it "generates segments", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
done()
generator.forward 31, -> generator.end()
it "assigned the right ID sequence", (done) ->
# even though three segments worth of data gets pushed into
# the injector, it currently doesn't know how to trigger the
# emit of its first segment into the finalizer.
expect( finalizer.segmentSeq ).to.be.eql 2
expect( finalizer.discontinuitySeq ).to.eql 0
expect( finalizer.segments ).to.have.length 2
expect( finalizer.segments[0].id ).to.eql 0
expect( finalizer.segments[1].id ).to.eql 1
done()
it "assigned valid PTS values", (done) ->
seg_pts_units = segment_duration * 90
expect( finalizer.segmentPTS ).to.be.closeTo seg_pts_units*2, 100
expect( finalizer.segments[0].pts ).to.be.eql 0
expect( finalizer.segments[1].pts ).to.be.closeTo seg_pts_units*1, 100
done()
it "creates a discontinuity when given a gap", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
expect( finalizer.segmentSeq ).to.be.eql 3
expect( finalizer.discontinuitySeq ).to.eql 1
expect( finalizer.segments[ 0 ].discontinuitySeq ).to.eql 0
expect( finalizer.segments[ finalizer.segments.length - 1 ].discontinuitySeq ).to.eql 1
done()
generator.forward 30, -> generator.skip_forward 15, ->
generator.forward 11, -> generator.end()
it "will use a segment map to assign sequence numbers", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [0..2]
seg_map[ Number(f_seg.ts) + i*segment_duration ] = seq + i
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length.within 4,5
if finalizer.segments.length == 4
expect(finalizer.segments[ finalizer.segments.length - 4 ].id).to.eql 5
expect(finalizer.segments[ finalizer.segments.length - 3 ].id).to.eql 6
expect(finalizer.segments[ finalizer.segments.length - 2 ].id).to.eql 7
expect(finalizer.segments[ finalizer.segments.length - 1 ].id).to.eql 8
done()
generator.forward 40, -> generator.end()
it "will use a segment map to assign sequence numbers to back data", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [3..1]
seg_map[ Number(f_seg.ts) - i*segment_duration ] = seq
seq += 1
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 4
for seg in finalizer.segments
if seg.id < 8
map_id = seg_map[ Number( seg.ts ) ]
expect(map_id).to.not.be.undefined
expect(seg.id).to.eql map_id
else
expect(seg.id).to.eql 8
done()
af = _.after 2, ->
generator.end()
generator.forward 10, af
generator.backward 30, af
it "will not publish a segment that is not in the segment map", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [3..1]
seg_map[ Number(f_seg.ts) - i*segment_duration ] = seq
seq += 1
for m in [[1,7],[3,6]]
seg_map[ Number(f_seg.ts) - m[0]*segment_duration ] = m[1]
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 3
for seg in finalizer.segments
map_id = seg_map[ Number( seg.ts ) ]
expect(map_id).to.not.be.undefined
expect(seg.id).to.eql map_id
done()
generator.backward 40, -> generator.end()
it "will correctly number discontinuities in back data", (done) ->
f_seg = injector._createSegment start_ts
seg_map = {}
for m in [[1,7],[2,6],[4,5]]
seg_map[ Number(f_seg.ts) - m[0]*segment_duration ] = m[1]
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 4
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 3
expect(finalizer.segments[0].discontinuitySeq).to.eql 3
done()
generator.backward 20, -> generator.skip_backward 10,
-> generator.backward 10, -> generator.end()
it "can dump map info", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
finalizer.dumpMap (err,map) ->
expect( map ).to.have.property "segmentMap"
expect( map ).to.have.property "segmentSeq"
expect( map ).to.have.property "discontinuitySeq"
expect( map ).to.have.property "nextSegment"
done()
generator.forward 41, -> generator.end()
it "can dump a snapshot", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
finalizer.snapshot (err,snapshot) ->
expect( snapshot ).to.be.instanceof Array
expect( snapshot ).to.have.length 3
for s,i in snapshot
expect(s.discontinuitySeq).to.eql 0
expect(s.id).to.eql i
done()
generator.forward 41, -> generator.end()
it "can expire segments using the expire function", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.once "finish", ->
expect(finalizer.segments.length).to.eql 5
done()
generator.forward 120, ->
exp_ts = new Date( Number(start_ts) + 65*1000 )
process.nextTick ->
expect(Number(finalizer.segments[0].ts)).to.be.lt Number(exp_ts)
finalizer.expire exp_ts, (err,min_id) ->
# we expect our new minimum segment to have a start_ts
# greater than the time we expired
expect(Number(finalizer.segments[0].ts)).to.be.gt Number(exp_ts)
generator.end()
#----------
# now put it all together. This time, create an HLSSegmenter and a
# RewindBuffer and feed audio through its normal course.
describe "RewindBuffer -> Segmenter", ->
rewind = null
generator = null
r2 = null
before (done) ->
rewind = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger stdout:false)
rewind._rChunkLength emitDuration:1, streamKey:"testing"
generator = new ChunkGenerator start_ts, 1000
generator.on "readable", ->
while c = generator.read()
rewind._insertBuffer c
done()
it "creates the HLS Segmenter", (done) ->
expect(rewind.hls_segmenter).to.be.an.instanceof HLSSegmenter
done()
it "segments source data", (done) ->
injector_pushes = 0
rewind.hls_segmenter.injector.on "push", ->
injector_pushes += 1
rewind.hls_segmenter.once "_finalizer", ->
setTimeout ->
expect(injector_pushes).to.be.within 4,5
expect(rewind.hls_segmenter.finalizer.segments).to.have.length.within 4,5
done()
, 200
generator.forward 60
it "expires segments when the RewindBuffer fills", (done) ->
rewind.hls_segmenter.once "snapshot", (snap) ->
expect(rewind.bufferedSecs()).to.eql 120
# we're sending in 121 seconds, so there will be one second in
# a not-yet-created segment, and our first segment will have lost
# its first segment and been expired. That leaves 11 active.
expect(snap.segments).to.have.length 11
done()
generator.forward 121
it "loads segment data from a RewindBuffer dump", (done) ->
pt = new require("stream").PassThrough()
r2 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {stdout:false})
r2._rChunkLength emitDuration:1, streamKey:"testing"
rewind.hls_segmenter.snapshot (err,snap1) ->
throw err if err
debug "snap1 has #{snap1.segments.length} segments."
debug "snap1 first segment is #{snap1.segments[0].id}.", snap1.segments[0]
debug "snap1 last segment is #{snap1.segments[snap1.segments.length-1].id}.", snap1.segments[snap1.segments.length-1]
r2.loadBuffer pt, (err,stats) ->
throw err if err
# -- r2 loaded -- #
r2.hls_segmenter.snapshot (err,snap2) ->
throw err if err
debug "snap2 has #{snap2.segments.length} segments."
debug "snap2 first segment is #{snap2.segments[0].id}.", snap2.segments[0]
debug "snap2 last segment is #{snap2.segments[snap2.segments.length-1].id}.", snap2.segments[snap2.segments.length-1]
expect(snap2.segments).to.have.length snap1.segments.length
done()
rewind.dumpBuffer (err,writer) ->
throw err if err
writer.pipe(pt)
it "receives new data cleanly after loading from a RewindBuffer dump", (done) ->
throw new Error "Requires r2 to be populated." if !r2
g2 = new ChunkGenerator generator.ts().forward, 1000
g2.on "readable", ->
while c = g2.read()
r2._insertBuffer c
r2.hls_segmenter.once "snapshot", (snap)->
debug "g2 snap is ", snap
# we're checking to make sure that our snapshot numbering is intact,
# as is our timestamp sequence
last = null
for s in snap.segments
if last
expect(s.id).to.eql last.id + 1
expect(s.ts).to.eql last.end_ts
last = s
done()
g2.forward 10
#----------
describe "Stream Group Coordination", ->
r1 = null
r2 = null
sg = null
g1 = null
g2 = null
before (done) ->
r1 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {})
r1._rChunkLength emitDuration:0.5, streamKey:"testing"
r1.loadBuffer null, (err,stats) ->
r2 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {})
r2._rChunkLength emitDuration:0.5, streamKey:"testing"
r2.loadBuffer null, (err,stats) ->
sg = new FakeStreamGroup [r1,r2]
d = new Date()
g1 = new ChunkGenerator d, 1000
g2 = new ChunkGenerator d, 1000
g1.on "readable", ->
r1._insertBuffer c while c = g1.read()
g2.on "readable", ->
r2._insertBuffer c while c = g2.read()
done()
it "should trigger updates to stream group min segment TS", (done) ->
this.timeout 4000
# stream all f_chunks into r1, but skip some for r2
g1.forward 120
g2.skip_forward 30, -> g2.forward 90
af = _.after 2, ->
expect(sg.updates.length).to.eql 2
done()
r1.hls_segmenter.once "snapshot", af
r2.hls_segmenter.once "snapshot", af
it "both RewindBuffers should have the correct first segment", (done) ->
expect(Number(r1._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
expect(Number(r2._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
done()
it "should stay correct when data is expired unevenly", (done) ->
this.timeout 5000
r1.setRewind(30,30)
af = _.after 2, ->
expect(Number(r1._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
expect(Number(r2._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
done()
r1.hls_segmenter.once "snapshot", af
r2.hls_segmenter.once "snapshot", af
#----------
describe "Segment PTS values", ->
generator = null
injector = null
finalizer = null
before (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
done()
after (done) ->
generator.unpipe()
injector.unpipe()
done()
it "generates correct PTS at 24 hours", (done) ->
generator.forward (86400 * 1000 / chunk_duration), ->
# PTS will have started at 0 since we didn't send in a map, so a
# segment's PTS should be seg.id * segment_duration * 90
last_seg = finalizer.segments[finalizer.segments.length-1]
expect(last_seg.pts).to.eql last_seg.id * segment_duration * 90
done()
it "generates correct PTS at 48 hours", (done) ->
generator.forward (86400 * 1000 / chunk_duration), ->
# PTS is a 33-bit integer, so it will roll over after 26.5 hours
# when it hits Math.pow(2,33) - 1
last_seg = finalizer.segments[finalizer.segments.length-1]
# we can't test actaul equality here because javascript doesn't believe in ints
expect(last_seg.pts).to.be.closeTo ((last_seg.id * segment_duration * 90) - Math.pow(2,33) - 1), 10
done()
| 155212 | RewindBuffer = $src "rewind_buffer"
HLSSegmenter = $src "rewind/hls_segmenter"
Logger = $src "logger"
ChunkGenerator = $src "util/chunk_generator"
debug = require("debug")("sm:tests:hls_segmenter")
_ = require "underscore"
#----------
class FakeStreamGroup extends require("events").EventEmitter
constructor: (rewinds) ->
@updates = []
@hls_min_id = null
for r in rewinds
r.hls_segmenter.syncToGroup @
hlsUpdateMinSegment: (id) ->
if !@hls_min_id || id > @hls_min_id
@updates.push id
prev = @hls_min_id
@hls_min_id = id
@emit "hls_update_min_segment", id
#----------
describe "HTTP Live Streaming Segmenter", ->
rewind = null
chunk_duration = 1000
segment_duration = 10000
start_ts = new Date()
# to make life easy to reason about, we'll put start_ts on a segment start.
start_ts = new Date( Math.round(start_ts / segment_duration) * segment_duration )
console.log "Start ts is ", start_ts
#----------
# The chunk injector takes a stream of audio chunks going forward and/or
# backward from a common starting point, emitting segments each time a
# segment length boundary is crossed. These semi-baked segments will have
# start and end timestamps and a `buffers` array that contains the matching
# chunks. It will reject pushes that aren't at the edges of the segment list.
describe "Chunk Injector", ->
injector = null
generator = null
beforeEach (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
done()
afterEach (done) ->
generator.unpipe()
injector.removeAllListeners()
done()
it "accepts forward chunks and produces segments", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments.length).to.be.eql 3
expect( Number(segments[1].ts) - Number(segments[0].ts) ).to.eql segment_duration
done()
# producing 31 seconds of audio should give us 3 pushed segments
generator.forward 31, ->
generator.end()
it "doesn't emit a segment if not given enough data", (done) ->
injector.once "segment", ->
throw new Error "No segment was supposed to be created."
injector.once "finish", ->
done()
# since we emit when _segments.length > 2, 19 seconds can't produce
# an emitted segment
generator.forward 19, -> generator.end()
it "emits segments when given backward data", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments.length).to.be.eql 3
expect( Number(segments[0].ts) - Number(segments[1].ts) ).to.eql segment_duration
done()
generator.backward 31, ->
generator.end()
it "emits mixed segments when given mixed data", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments).to.have.length.within 6,7
# sort by ts
segments = _(segments).sortBy (s) -> Number(s.ts)
# five segments should cover 40 seconds going by timestamp
expect( Number(segments[4].ts) - Number(segments[0].ts) ).to.eql 4 * segment_duration
done()
af = _.after 2, -> generator.end()
generator.forward 30, -> af()
generator.backward 30, -> af()
#----------
# The Finalizer sits after the Injector. It takes the half-baked segments
# that the Injector emits and gives them sequence IDs. These can be from a
# loaded sequence map (to reload data on startup, for instance), or they
# can be generated sequence numbers going forward. The Finalizer is also
# in charge of spotting gaps in the segment array, inserting discontinuity
# objects where appropriate and keeping track of the discontinuity sequence.
describe "Segment Finalizer", ->
generator = null
injector = null
beforeEach (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
done()
afterEach (done) ->
generator.unpipe()
injector.unpipe()
done()
describe "assigning sequenced IDs to new segments", ->
finalizer = null
it "generates segments", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
done()
generator.forward 31, -> generator.end()
it "assigned the right ID sequence", (done) ->
# even though three segments worth of data gets pushed into
# the injector, it currently doesn't know how to trigger the
# emit of its first segment into the finalizer.
expect( finalizer.segmentSeq ).to.be.eql 2
expect( finalizer.discontinuitySeq ).to.eql 0
expect( finalizer.segments ).to.have.length 2
expect( finalizer.segments[0].id ).to.eql 0
expect( finalizer.segments[1].id ).to.eql 1
done()
it "assigned valid PTS values", (done) ->
seg_pts_units = segment_duration * 90
expect( finalizer.segmentPTS ).to.be.closeTo seg_pts_units*2, 100
expect( finalizer.segments[0].pts ).to.be.eql 0
expect( finalizer.segments[1].pts ).to.be.closeTo seg_pts_units*1, 100
done()
it "creates a discontinuity when given a gap", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
expect( finalizer.segmentSeq ).to.be.eql 3
expect( finalizer.discontinuitySeq ).to.eql 1
expect( finalizer.segments[ 0 ].discontinuitySeq ).to.eql 0
expect( finalizer.segments[ finalizer.segments.length - 1 ].discontinuitySeq ).to.eql 1
done()
generator.forward 30, -> generator.skip_forward 15, ->
generator.forward 11, -> generator.end()
it "will use a segment map to assign sequence numbers", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [0..2]
seg_map[ Number(f_seg.ts) + i*segment_duration ] = seq + i
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length.within 4,5
if finalizer.segments.length == 4
expect(finalizer.segments[ finalizer.segments.length - 4 ].id).to.eql 5
expect(finalizer.segments[ finalizer.segments.length - 3 ].id).to.eql 6
expect(finalizer.segments[ finalizer.segments.length - 2 ].id).to.eql 7
expect(finalizer.segments[ finalizer.segments.length - 1 ].id).to.eql 8
done()
generator.forward 40, -> generator.end()
it "will use a segment map to assign sequence numbers to back data", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [3..1]
seg_map[ Number(f_seg.ts) - i*segment_duration ] = seq
seq += 1
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 4
for seg in finalizer.segments
if seg.id < 8
map_id = seg_map[ Number( seg.ts ) ]
expect(map_id).to.not.be.undefined
expect(seg.id).to.eql map_id
else
expect(seg.id).to.eql 8
done()
af = _.after 2, ->
generator.end()
generator.forward 10, af
generator.backward 30, af
it "will not publish a segment that is not in the segment map", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [3..1]
seg_map[ Number(f_seg.ts) - i*segment_duration ] = seq
seq += 1
for m in [[1,7],[3,6]]
seg_map[ Number(f_seg.ts) - m[0]*segment_duration ] = m[1]
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 3
for seg in finalizer.segments
map_id = seg_map[ Number( seg.ts ) ]
expect(map_id).to.not.be.undefined
expect(seg.id).to.eql map_id
done()
generator.backward 40, -> generator.end()
it "will correctly number discontinuities in back data", (done) ->
f_seg = injector._createSegment start_ts
seg_map = {}
for m in [[1,7],[2,6],[4,5]]
seg_map[ Number(f_seg.ts) - m[0]*segment_duration ] = m[1]
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 4
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 3
expect(finalizer.segments[0].discontinuitySeq).to.eql 3
done()
generator.backward 20, -> generator.skip_backward 10,
-> generator.backward 10, -> generator.end()
it "can dump map info", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
finalizer.dumpMap (err,map) ->
expect( map ).to.have.property "segmentMap"
expect( map ).to.have.property "segmentSeq"
expect( map ).to.have.property "discontinuitySeq"
expect( map ).to.have.property "nextSegment"
done()
generator.forward 41, -> generator.end()
it "can dump a snapshot", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
finalizer.snapshot (err,snapshot) ->
expect( snapshot ).to.be.instanceof Array
expect( snapshot ).to.have.length 3
for s,i in snapshot
expect(s.discontinuitySeq).to.eql 0
expect(s.id).to.eql i
done()
generator.forward 41, -> generator.end()
it "can expire segments using the expire function", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.once "finish", ->
expect(finalizer.segments.length).to.eql 5
done()
generator.forward 120, ->
exp_ts = new Date( Number(start_ts) + 65*1000 )
process.nextTick ->
expect(Number(finalizer.segments[0].ts)).to.be.lt Number(exp_ts)
finalizer.expire exp_ts, (err,min_id) ->
# we expect our new minimum segment to have a start_ts
# greater than the time we expired
expect(Number(finalizer.segments[0].ts)).to.be.gt Number(exp_ts)
generator.end()
#----------
# now put it all together. This time, create an HLSSegmenter and a
# RewindBuffer and feed audio through its normal course.
describe "RewindBuffer -> Segmenter", ->
rewind = null
generator = null
r2 = null
before (done) ->
rewind = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger stdout:false)
rewind._rChunkLength emitDuration:1, streamKey:"testing"
generator = new ChunkGenerator start_ts, 1000
generator.on "readable", ->
while c = generator.read()
rewind._insertBuffer c
done()
it "creates the HLS Segmenter", (done) ->
expect(rewind.hls_segmenter).to.be.an.instanceof HLSSegmenter
done()
it "segments source data", (done) ->
injector_pushes = 0
rewind.hls_segmenter.injector.on "push", ->
injector_pushes += 1
rewind.hls_segmenter.once "_finalizer", ->
setTimeout ->
expect(injector_pushes).to.be.within 4,5
expect(rewind.hls_segmenter.finalizer.segments).to.have.length.within 4,5
done()
, 200
generator.forward 60
it "expires segments when the RewindBuffer fills", (done) ->
rewind.hls_segmenter.once "snapshot", (snap) ->
expect(rewind.bufferedSecs()).to.eql 120
# we're sending in 121 seconds, so there will be one second in
# a not-yet-created segment, and our first segment will have lost
# its first segment and been expired. That leaves 11 active.
expect(snap.segments).to.have.length 11
done()
generator.forward 121
it "loads segment data from a RewindBuffer dump", (done) ->
pt = new require("stream").PassThrough()
r2 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {stdout:false})
r2._rChunkLength emitDuration:1, streamKey:"<KEY>"
rewind.hls_segmenter.snapshot (err,snap1) ->
throw err if err
debug "snap1 has #{snap1.segments.length} segments."
debug "snap1 first segment is #{snap1.segments[0].id}.", snap1.segments[0]
debug "snap1 last segment is #{snap1.segments[snap1.segments.length-1].id}.", snap1.segments[snap1.segments.length-1]
r2.loadBuffer pt, (err,stats) ->
throw err if err
# -- r2 loaded -- #
r2.hls_segmenter.snapshot (err,snap2) ->
throw err if err
debug "snap2 has #{snap2.segments.length} segments."
debug "snap2 first segment is #{snap2.segments[0].id}.", snap2.segments[0]
debug "snap2 last segment is #{snap2.segments[snap2.segments.length-1].id}.", snap2.segments[snap2.segments.length-1]
expect(snap2.segments).to.have.length snap1.segments.length
done()
rewind.dumpBuffer (err,writer) ->
throw err if err
writer.pipe(pt)
it "receives new data cleanly after loading from a RewindBuffer dump", (done) ->
throw new Error "Requires r2 to be populated." if !r2
g2 = new ChunkGenerator generator.ts().forward, 1000
g2.on "readable", ->
while c = g2.read()
r2._insertBuffer c
r2.hls_segmenter.once "snapshot", (snap)->
debug "g2 snap is ", snap
# we're checking to make sure that our snapshot numbering is intact,
# as is our timestamp sequence
last = null
for s in snap.segments
if last
expect(s.id).to.eql last.id + 1
expect(s.ts).to.eql last.end_ts
last = s
done()
g2.forward 10
#----------
describe "Stream Group Coordination", ->
r1 = null
r2 = null
sg = null
g1 = null
g2 = null
before (done) ->
r1 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {})
r1._rChunkLength emitDuration:0.5, streamKey:"testing"
r1.loadBuffer null, (err,stats) ->
r2 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {})
r2._rChunkLength emitDuration:0.5, streamKey:"testing"
r2.loadBuffer null, (err,stats) ->
sg = new FakeStreamGroup [r1,r2]
d = new Date()
g1 = new ChunkGenerator d, 1000
g2 = new ChunkGenerator d, 1000
g1.on "readable", ->
r1._insertBuffer c while c = g1.read()
g2.on "readable", ->
r2._insertBuffer c while c = g2.read()
done()
it "should trigger updates to stream group min segment TS", (done) ->
this.timeout 4000
# stream all f_chunks into r1, but skip some for r2
g1.forward 120
g2.skip_forward 30, -> g2.forward 90
af = _.after 2, ->
expect(sg.updates.length).to.eql 2
done()
r1.hls_segmenter.once "snapshot", af
r2.hls_segmenter.once "snapshot", af
it "both RewindBuffers should have the correct first segment", (done) ->
expect(Number(r1._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
expect(Number(r2._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
done()
it "should stay correct when data is expired unevenly", (done) ->
this.timeout 5000
r1.setRewind(30,30)
af = _.after 2, ->
expect(Number(r1._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
expect(Number(r2._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
done()
r1.hls_segmenter.once "snapshot", af
r2.hls_segmenter.once "snapshot", af
#----------
describe "Segment PTS values", ->
generator = null
injector = null
finalizer = null
before (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
done()
after (done) ->
generator.unpipe()
injector.unpipe()
done()
it "generates correct PTS at 24 hours", (done) ->
generator.forward (86400 * 1000 / chunk_duration), ->
# PTS will have started at 0 since we didn't send in a map, so a
# segment's PTS should be seg.id * segment_duration * 90
last_seg = finalizer.segments[finalizer.segments.length-1]
expect(last_seg.pts).to.eql last_seg.id * segment_duration * 90
done()
it "generates correct PTS at 48 hours", (done) ->
generator.forward (86400 * 1000 / chunk_duration), ->
# PTS is a 33-bit integer, so it will roll over after 26.5 hours
# when it hits Math.pow(2,33) - 1
last_seg = finalizer.segments[finalizer.segments.length-1]
# we can't test actaul equality here because javascript doesn't believe in ints
expect(last_seg.pts).to.be.closeTo ((last_seg.id * segment_duration * 90) - Math.pow(2,33) - 1), 10
done()
| true | RewindBuffer = $src "rewind_buffer"
HLSSegmenter = $src "rewind/hls_segmenter"
Logger = $src "logger"
ChunkGenerator = $src "util/chunk_generator"
debug = require("debug")("sm:tests:hls_segmenter")
_ = require "underscore"
#----------
class FakeStreamGroup extends require("events").EventEmitter
constructor: (rewinds) ->
@updates = []
@hls_min_id = null
for r in rewinds
r.hls_segmenter.syncToGroup @
hlsUpdateMinSegment: (id) ->
if !@hls_min_id || id > @hls_min_id
@updates.push id
prev = @hls_min_id
@hls_min_id = id
@emit "hls_update_min_segment", id
#----------
describe "HTTP Live Streaming Segmenter", ->
rewind = null
chunk_duration = 1000
segment_duration = 10000
start_ts = new Date()
# to make life easy to reason about, we'll put start_ts on a segment start.
start_ts = new Date( Math.round(start_ts / segment_duration) * segment_duration )
console.log "Start ts is ", start_ts
#----------
# The chunk injector takes a stream of audio chunks going forward and/or
# backward from a common starting point, emitting segments each time a
# segment length boundary is crossed. These semi-baked segments will have
# start and end timestamps and a `buffers` array that contains the matching
# chunks. It will reject pushes that aren't at the edges of the segment list.
describe "Chunk Injector", ->
injector = null
generator = null
beforeEach (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
done()
afterEach (done) ->
generator.unpipe()
injector.removeAllListeners()
done()
it "accepts forward chunks and produces segments", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments.length).to.be.eql 3
expect( Number(segments[1].ts) - Number(segments[0].ts) ).to.eql segment_duration
done()
# producing 31 seconds of audio should give us 3 pushed segments
generator.forward 31, ->
generator.end()
it "doesn't emit a segment if not given enough data", (done) ->
injector.once "segment", ->
throw new Error "No segment was supposed to be created."
injector.once "finish", ->
done()
# since we emit when _segments.length > 2, 19 seconds can't produce
# an emitted segment
generator.forward 19, -> generator.end()
it "emits segments when given backward data", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments.length).to.be.eql 3
expect( Number(segments[0].ts) - Number(segments[1].ts) ).to.eql segment_duration
done()
generator.backward 31, ->
generator.end()
it "emits mixed segments when given mixed data", (done) ->
segments = []
injector.on "readable", ->
while s = injector.read()
segments.push s
injector.once "finish", ->
expect(segments).to.have.length.within 6,7
# sort by ts
segments = _(segments).sortBy (s) -> Number(s.ts)
# five segments should cover 40 seconds going by timestamp
expect( Number(segments[4].ts) - Number(segments[0].ts) ).to.eql 4 * segment_duration
done()
af = _.after 2, -> generator.end()
generator.forward 30, -> af()
generator.backward 30, -> af()
#----------
# The Finalizer sits after the Injector. It takes the half-baked segments
# that the Injector emits and gives them sequence IDs. These can be from a
# loaded sequence map (to reload data on startup, for instance), or they
# can be generated sequence numbers going forward. The Finalizer is also
# in charge of spotting gaps in the segment array, inserting discontinuity
# objects where appropriate and keeping track of the discontinuity sequence.
describe "Segment Finalizer", ->
generator = null
injector = null
beforeEach (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
done()
afterEach (done) ->
generator.unpipe()
injector.unpipe()
done()
describe "assigning sequenced IDs to new segments", ->
finalizer = null
it "generates segments", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
done()
generator.forward 31, -> generator.end()
it "assigned the right ID sequence", (done) ->
# even though three segments worth of data gets pushed into
# the injector, it currently doesn't know how to trigger the
# emit of its first segment into the finalizer.
expect( finalizer.segmentSeq ).to.be.eql 2
expect( finalizer.discontinuitySeq ).to.eql 0
expect( finalizer.segments ).to.have.length 2
expect( finalizer.segments[0].id ).to.eql 0
expect( finalizer.segments[1].id ).to.eql 1
done()
it "assigned valid PTS values", (done) ->
seg_pts_units = segment_duration * 90
expect( finalizer.segmentPTS ).to.be.closeTo seg_pts_units*2, 100
expect( finalizer.segments[0].pts ).to.be.eql 0
expect( finalizer.segments[1].pts ).to.be.closeTo seg_pts_units*1, 100
done()
it "creates a discontinuity when given a gap", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
expect( finalizer.segmentSeq ).to.be.eql 3
expect( finalizer.discontinuitySeq ).to.eql 1
expect( finalizer.segments[ 0 ].discontinuitySeq ).to.eql 0
expect( finalizer.segments[ finalizer.segments.length - 1 ].discontinuitySeq ).to.eql 1
done()
generator.forward 30, -> generator.skip_forward 15, ->
generator.forward 11, -> generator.end()
it "will use a segment map to assign sequence numbers", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [0..2]
seg_map[ Number(f_seg.ts) + i*segment_duration ] = seq + i
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length.within 4,5
if finalizer.segments.length == 4
expect(finalizer.segments[ finalizer.segments.length - 4 ].id).to.eql 5
expect(finalizer.segments[ finalizer.segments.length - 3 ].id).to.eql 6
expect(finalizer.segments[ finalizer.segments.length - 2 ].id).to.eql 7
expect(finalizer.segments[ finalizer.segments.length - 1 ].id).to.eql 8
done()
generator.forward 40, -> generator.end()
it "will use a segment map to assign sequence numbers to back data", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [3..1]
seg_map[ Number(f_seg.ts) - i*segment_duration ] = seq
seq += 1
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 4
for seg in finalizer.segments
if seg.id < 8
map_id = seg_map[ Number( seg.ts ) ]
expect(map_id).to.not.be.undefined
expect(seg.id).to.eql map_id
else
expect(seg.id).to.eql 8
done()
af = _.after 2, ->
generator.end()
generator.forward 10, af
generator.backward 30, af
it "will not publish a segment that is not in the segment map", (done) ->
f_seg = injector._createSegment start_ts
seq = 5
seg_map = {}
for i in [3..1]
seg_map[ Number(f_seg.ts) - i*segment_duration ] = seq
seq += 1
for m in [[1,7],[3,6]]
seg_map[ Number(f_seg.ts) - m[0]*segment_duration ] = m[1]
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 0
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 3
for seg in finalizer.segments
map_id = seg_map[ Number( seg.ts ) ]
expect(map_id).to.not.be.undefined
expect(seg.id).to.eql map_id
done()
generator.backward 40, -> generator.end()
it "will correctly number discontinuities in back data", (done) ->
f_seg = injector._createSegment start_ts
seg_map = {}
for m in [[1,7],[2,6],[4,5]]
seg_map[ Number(f_seg.ts) - m[0]*segment_duration ] = m[1]
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration,
segmentSeq: 8
nextSegment: f_seg.ts
discontinuitySeq: 4
segmentMap: seg_map
injector.pipe(finalizer)
finalizer.on "finish", ->
expect(finalizer.segments).to.have.length 3
expect(finalizer.segments[0].discontinuitySeq).to.eql 3
done()
generator.backward 20, -> generator.skip_backward 10,
-> generator.backward 10, -> generator.end()
it "can dump map info", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
finalizer.dumpMap (err,map) ->
expect( map ).to.have.property "segmentMap"
expect( map ).to.have.property "segmentSeq"
expect( map ).to.have.property "discontinuitySeq"
expect( map ).to.have.property "nextSegment"
done()
generator.forward 41, -> generator.end()
it "can dump a snapshot", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.on "finish", ->
finalizer.snapshot (err,snapshot) ->
expect( snapshot ).to.be.instanceof Array
expect( snapshot ).to.have.length 3
for s,i in snapshot
expect(s.discontinuitySeq).to.eql 0
expect(s.id).to.eql i
done()
generator.forward 41, -> generator.end()
it "can expire segments using the expire function", (done) ->
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
finalizer.once "finish", ->
expect(finalizer.segments.length).to.eql 5
done()
generator.forward 120, ->
exp_ts = new Date( Number(start_ts) + 65*1000 )
process.nextTick ->
expect(Number(finalizer.segments[0].ts)).to.be.lt Number(exp_ts)
finalizer.expire exp_ts, (err,min_id) ->
# we expect our new minimum segment to have a start_ts
# greater than the time we expired
expect(Number(finalizer.segments[0].ts)).to.be.gt Number(exp_ts)
generator.end()
#----------
# now put it all together. This time, create an HLSSegmenter and a
# RewindBuffer and feed audio through its normal course.
describe "RewindBuffer -> Segmenter", ->
rewind = null
generator = null
r2 = null
before (done) ->
rewind = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger stdout:false)
rewind._rChunkLength emitDuration:1, streamKey:"testing"
generator = new ChunkGenerator start_ts, 1000
generator.on "readable", ->
while c = generator.read()
rewind._insertBuffer c
done()
it "creates the HLS Segmenter", (done) ->
expect(rewind.hls_segmenter).to.be.an.instanceof HLSSegmenter
done()
it "segments source data", (done) ->
injector_pushes = 0
rewind.hls_segmenter.injector.on "push", ->
injector_pushes += 1
rewind.hls_segmenter.once "_finalizer", ->
setTimeout ->
expect(injector_pushes).to.be.within 4,5
expect(rewind.hls_segmenter.finalizer.segments).to.have.length.within 4,5
done()
, 200
generator.forward 60
it "expires segments when the RewindBuffer fills", (done) ->
rewind.hls_segmenter.once "snapshot", (snap) ->
expect(rewind.bufferedSecs()).to.eql 120
# we're sending in 121 seconds, so there will be one second in
# a not-yet-created segment, and our first segment will have lost
# its first segment and been expired. That leaves 11 active.
expect(snap.segments).to.have.length 11
done()
generator.forward 121
it "loads segment data from a RewindBuffer dump", (done) ->
pt = new require("stream").PassThrough()
r2 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {stdout:false})
r2._rChunkLength emitDuration:1, streamKey:"PI:KEY:<KEY>END_PI"
rewind.hls_segmenter.snapshot (err,snap1) ->
throw err if err
debug "snap1 has #{snap1.segments.length} segments."
debug "snap1 first segment is #{snap1.segments[0].id}.", snap1.segments[0]
debug "snap1 last segment is #{snap1.segments[snap1.segments.length-1].id}.", snap1.segments[snap1.segments.length-1]
r2.loadBuffer pt, (err,stats) ->
throw err if err
# -- r2 loaded -- #
r2.hls_segmenter.snapshot (err,snap2) ->
throw err if err
debug "snap2 has #{snap2.segments.length} segments."
debug "snap2 first segment is #{snap2.segments[0].id}.", snap2.segments[0]
debug "snap2 last segment is #{snap2.segments[snap2.segments.length-1].id}.", snap2.segments[snap2.segments.length-1]
expect(snap2.segments).to.have.length snap1.segments.length
done()
rewind.dumpBuffer (err,writer) ->
throw err if err
writer.pipe(pt)
it "receives new data cleanly after loading from a RewindBuffer dump", (done) ->
throw new Error "Requires r2 to be populated." if !r2
g2 = new ChunkGenerator generator.ts().forward, 1000
g2.on "readable", ->
while c = g2.read()
r2._insertBuffer c
r2.hls_segmenter.once "snapshot", (snap)->
debug "g2 snap is ", snap
# we're checking to make sure that our snapshot numbering is intact,
# as is our timestamp sequence
last = null
for s in snap.segments
if last
expect(s.id).to.eql last.id + 1
expect(s.ts).to.eql last.end_ts
last = s
done()
g2.forward 10
#----------
describe "Stream Group Coordination", ->
r1 = null
r2 = null
sg = null
g1 = null
g2 = null
before (done) ->
r1 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {})
r1._rChunkLength emitDuration:0.5, streamKey:"testing"
r1.loadBuffer null, (err,stats) ->
r2 = new RewindBuffer hls:10, seconds:120, burst:30, log:(new Logger {})
r2._rChunkLength emitDuration:0.5, streamKey:"testing"
r2.loadBuffer null, (err,stats) ->
sg = new FakeStreamGroup [r1,r2]
d = new Date()
g1 = new ChunkGenerator d, 1000
g2 = new ChunkGenerator d, 1000
g1.on "readable", ->
r1._insertBuffer c while c = g1.read()
g2.on "readable", ->
r2._insertBuffer c while c = g2.read()
done()
it "should trigger updates to stream group min segment TS", (done) ->
this.timeout 4000
# stream all f_chunks into r1, but skip some for r2
g1.forward 120
g2.skip_forward 30, -> g2.forward 90
af = _.after 2, ->
expect(sg.updates.length).to.eql 2
done()
r1.hls_segmenter.once "snapshot", af
r2.hls_segmenter.once "snapshot", af
it "both RewindBuffers should have the correct first segment", (done) ->
expect(Number(r1._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
expect(Number(r2._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
done()
it "should stay correct when data is expired unevenly", (done) ->
this.timeout 5000
r1.setRewind(30,30)
af = _.after 2, ->
expect(Number(r1._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
expect(Number(r2._rStatus().hls_first_seg_ts)).to.eql sg.hls_min_id
done()
r1.hls_segmenter.once "snapshot", af
r2.hls_segmenter.once "snapshot", af
#----------
describe "Segment PTS values", ->
generator = null
injector = null
finalizer = null
before (done) ->
injector = new HLSSegmenter.Injector segment_duration, (new Logger {})
generator = new ChunkGenerator start_ts, chunk_duration
generator.pipe(injector)
finalizer = new HLSSegmenter.Finalizer (new Logger {}), segment_duration
injector.pipe(finalizer)
done()
after (done) ->
generator.unpipe()
injector.unpipe()
done()
it "generates correct PTS at 24 hours", (done) ->
generator.forward (86400 * 1000 / chunk_duration), ->
# PTS will have started at 0 since we didn't send in a map, so a
# segment's PTS should be seg.id * segment_duration * 90
last_seg = finalizer.segments[finalizer.segments.length-1]
expect(last_seg.pts).to.eql last_seg.id * segment_duration * 90
done()
it "generates correct PTS at 48 hours", (done) ->
generator.forward (86400 * 1000 / chunk_duration), ->
# PTS is a 33-bit integer, so it will roll over after 26.5 hours
# when it hits Math.pow(2,33) - 1
last_seg = finalizer.segments[finalizer.segments.length-1]
# we can't test actaul equality here because javascript doesn't believe in ints
expect(last_seg.pts).to.be.closeTo ((last_seg.id * segment_duration * 90) - Math.pow(2,33) - 1), 10
done()
|
[
{
"context": "@host = \"mit.edu\".split('').reverse().join('')\n\t\t@stubbedUser1 =\n\t\t\t_id: \"3131231\"\n\t\t\tname:\"bob\"\n\t\t\temail:\"hell",
"end": 444,
"score": 0.7454670071601868,
"start": 433,
"tag": "USERNAME",
"value": "stubbedUser"
},
{
"context": "('')\n\t\t@stubbedUs... | test/unit/coffee/Institutions/InstitutionsControllerTests.coffee | shyoshyo/web-sharelatex | 1 | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/Institutions/InstitutionsController"
expect = require("chai").expect
describe "InstitutionsController", ->
beforeEach ->
@logger = err: sinon.stub(), log: ->
@host = "mit.edu".split('').reverse().join('')
@stubbedUser1 =
_id: "3131231"
name:"bob"
email:"hello@world.com"
emails: [
{"email":"stubb1@mit.edu","reversedHostname":@host},
{"email":"test@test.com","reversedHostname":"test.com"},
{"email":"another@mit.edu","reversedHostname":@host}
]
@stubbedUser2 =
_id: "3131232"
name:"test"
email:"hello2@world.com"
emails: [
{"email":"subb2@mit.edu","reversedHostname":@host}
]
@getUsersByHostname = sinon.stub().callsArgWith(2, null, [ @stubbedUser1, @stubbedUser2 ])
@addAffiliation = sinon.stub().callsArgWith(3, null)
@refreshFeatures = sinon.stub().callsArgWith(2, null)
@InstitutionsController = SandboxedModule.require modulePath, requires:
'logger-sharelatex': @logger
'../User/UserGetter':
getUsersByHostname: @getUsersByHostname
'../Institutions/InstitutionsAPI':
addAffiliation: @addAffiliation
'../Subscription/FeaturesUpdater':
refreshFeatures: @refreshFeatures
@req =
body: hostname: 'mit.edu'
@res =
send: sinon.stub()
json: sinon.stub()
@next = sinon.stub()
describe 'affiliateUsers', ->
it 'should add affiliations for matching users', (done)->
@res.sendStatus = (code) =>
code.should.equal 200
@getUsersByHostname.calledOnce.should.equal true
@addAffiliation.calledThrice.should.equal true
@addAffiliation.calledWith(@stubbedUser1._id, @stubbedUser1.emails[0].email).should.equal true
@addAffiliation.calledWith(@stubbedUser1._id, @stubbedUser1.emails[2].email).should.equal true
@addAffiliation.calledWith(@stubbedUser2._id, @stubbedUser2.emails[0].email).should.equal true
@refreshFeatures.calledWith(@stubbedUser1._id, true).should.equal true
@refreshFeatures.calledWith(@stubbedUser2._id, true).should.equal true
done()
@InstitutionsController.confirmDomain @req, @res, @next
it 'should return errors if last affiliation cannot be added', (done)->
@addAffiliation.onCall(2).callsArgWith(3, new Error("error"))
@next = (error) =>
expect(error).to.exist
@getUsersByHostname.calledOnce.should.equal true
done()
@InstitutionsController.confirmDomain @req, @res, @next
| 2144 | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/Institutions/InstitutionsController"
expect = require("chai").expect
describe "InstitutionsController", ->
beforeEach ->
@logger = err: sinon.stub(), log: ->
@host = "mit.edu".split('').reverse().join('')
@stubbedUser1 =
_id: "3131231"
name:"<NAME>"
email:"<EMAIL>"
emails: [
{"email":"<EMAIL>","reversedHostname":@host},
{"email":"<EMAIL>","reversedHostname":"test.com"},
{"email":"<EMAIL>","reversedHostname":@host}
]
@stubbedUser2 =
_id: "3131232"
name:"<NAME>"
email:"<EMAIL>"
emails: [
{"email":"<EMAIL>","reversedHostname":@host}
]
@getUsersByHostname = sinon.stub().callsArgWith(2, null, [ @stubbedUser1, @stubbedUser2 ])
@addAffiliation = sinon.stub().callsArgWith(3, null)
@refreshFeatures = sinon.stub().callsArgWith(2, null)
@InstitutionsController = SandboxedModule.require modulePath, requires:
'logger-sharelatex': @logger
'../User/UserGetter':
getUsersByHostname: @getUsersByHostname
'../Institutions/InstitutionsAPI':
addAffiliation: @addAffiliation
'../Subscription/FeaturesUpdater':
refreshFeatures: @refreshFeatures
@req =
body: hostname: 'mit.edu'
@res =
send: sinon.stub()
json: sinon.stub()
@next = sinon.stub()
describe 'affiliateUsers', ->
it 'should add affiliations for matching users', (done)->
@res.sendStatus = (code) =>
code.should.equal 200
@getUsersByHostname.calledOnce.should.equal true
@addAffiliation.calledThrice.should.equal true
@addAffiliation.calledWith(@stubbedUser1._id, @stubbedUser1.emails[0].email).should.equal true
@addAffiliation.calledWith(@stubbedUser1._id, @stubbedUser1.emails[2].email).should.equal true
@addAffiliation.calledWith(@stubbedUser2._id, @stubbedUser2.emails[0].email).should.equal true
@refreshFeatures.calledWith(@stubbedUser1._id, true).should.equal true
@refreshFeatures.calledWith(@stubbedUser2._id, true).should.equal true
done()
@InstitutionsController.confirmDomain @req, @res, @next
it 'should return errors if last affiliation cannot be added', (done)->
@addAffiliation.onCall(2).callsArgWith(3, new Error("error"))
@next = (error) =>
expect(error).to.exist
@getUsersByHostname.calledOnce.should.equal true
done()
@InstitutionsController.confirmDomain @req, @res, @next
| true | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/Institutions/InstitutionsController"
expect = require("chai").expect
describe "InstitutionsController", ->
beforeEach ->
@logger = err: sinon.stub(), log: ->
@host = "mit.edu".split('').reverse().join('')
@stubbedUser1 =
_id: "3131231"
name:"PI:NAME:<NAME>END_PI"
email:"PI:EMAIL:<EMAIL>END_PI"
emails: [
{"email":"PI:EMAIL:<EMAIL>END_PI","reversedHostname":@host},
{"email":"PI:EMAIL:<EMAIL>END_PI","reversedHostname":"test.com"},
{"email":"PI:EMAIL:<EMAIL>END_PI","reversedHostname":@host}
]
@stubbedUser2 =
_id: "3131232"
name:"PI:NAME:<NAME>END_PI"
email:"PI:EMAIL:<EMAIL>END_PI"
emails: [
{"email":"PI:EMAIL:<EMAIL>END_PI","reversedHostname":@host}
]
@getUsersByHostname = sinon.stub().callsArgWith(2, null, [ @stubbedUser1, @stubbedUser2 ])
@addAffiliation = sinon.stub().callsArgWith(3, null)
@refreshFeatures = sinon.stub().callsArgWith(2, null)
@InstitutionsController = SandboxedModule.require modulePath, requires:
'logger-sharelatex': @logger
'../User/UserGetter':
getUsersByHostname: @getUsersByHostname
'../Institutions/InstitutionsAPI':
addAffiliation: @addAffiliation
'../Subscription/FeaturesUpdater':
refreshFeatures: @refreshFeatures
@req =
body: hostname: 'mit.edu'
@res =
send: sinon.stub()
json: sinon.stub()
@next = sinon.stub()
describe 'affiliateUsers', ->
it 'should add affiliations for matching users', (done)->
@res.sendStatus = (code) =>
code.should.equal 200
@getUsersByHostname.calledOnce.should.equal true
@addAffiliation.calledThrice.should.equal true
@addAffiliation.calledWith(@stubbedUser1._id, @stubbedUser1.emails[0].email).should.equal true
@addAffiliation.calledWith(@stubbedUser1._id, @stubbedUser1.emails[2].email).should.equal true
@addAffiliation.calledWith(@stubbedUser2._id, @stubbedUser2.emails[0].email).should.equal true
@refreshFeatures.calledWith(@stubbedUser1._id, true).should.equal true
@refreshFeatures.calledWith(@stubbedUser2._id, true).should.equal true
done()
@InstitutionsController.confirmDomain @req, @res, @next
it 'should return errors if last affiliation cannot be added', (done)->
@addAffiliation.onCall(2).callsArgWith(3, new Error("error"))
@next = (error) =>
expect(error).to.exist
@getUsersByHostname.calledOnce.should.equal true
done()
@InstitutionsController.confirmDomain @req, @res, @next
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9976980090141296,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-zlib-dictionary-fail.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common.js")
assert = require("assert")
zlib = require("zlib")
# Should raise an error, not trigger an assertion in src/node_zlib.cc
(->
stream = zlib.createInflate()
stream.on "error", common.mustCall((err) ->
assert /Missing dictionary/.test(err.message)
return
)
# String "test" encoded with dictionary "dict".
stream.write Buffer([
0x78
0xbb
0x04
0x09
0x01
0xa5
])
return
)()
# Should raise an error, not trigger an assertion in src/node_zlib.cc
(->
stream = zlib.createInflate(dictionary: Buffer("fail"))
stream.on "error", common.mustCall((err) ->
assert /Bad dictionary/.test(err.message)
return
)
# String "test" encoded with dictionary "dict".
stream.write Buffer([
0x78
0xbb
0x04
0x09
0x01
0xa5
])
return
)()
| 13319 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common.js")
assert = require("assert")
zlib = require("zlib")
# Should raise an error, not trigger an assertion in src/node_zlib.cc
(->
stream = zlib.createInflate()
stream.on "error", common.mustCall((err) ->
assert /Missing dictionary/.test(err.message)
return
)
# String "test" encoded with dictionary "dict".
stream.write Buffer([
0x78
0xbb
0x04
0x09
0x01
0xa5
])
return
)()
# Should raise an error, not trigger an assertion in src/node_zlib.cc
(->
stream = zlib.createInflate(dictionary: Buffer("fail"))
stream.on "error", common.mustCall((err) ->
assert /Bad dictionary/.test(err.message)
return
)
# String "test" encoded with dictionary "dict".
stream.write Buffer([
0x78
0xbb
0x04
0x09
0x01
0xa5
])
return
)()
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common.js")
assert = require("assert")
zlib = require("zlib")
# Should raise an error, not trigger an assertion in src/node_zlib.cc
(->
stream = zlib.createInflate()
stream.on "error", common.mustCall((err) ->
assert /Missing dictionary/.test(err.message)
return
)
# String "test" encoded with dictionary "dict".
stream.write Buffer([
0x78
0xbb
0x04
0x09
0x01
0xa5
])
return
)()
# Should raise an error, not trigger an assertion in src/node_zlib.cc
(->
stream = zlib.createInflate(dictionary: Buffer("fail"))
stream.on "error", common.mustCall((err) ->
assert /Bad dictionary/.test(err.message)
return
)
# String "test" encoded with dictionary "dict".
stream.write Buffer([
0x78
0xbb
0x04
0x09
0x01
0xa5
])
return
)()
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.999189555644989,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-event-emitter-remove-all-listeners.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
expect = (expected) ->
listener = (name) ->
actual.push name
return
actual = []
process.on "exit", ->
assert.deepEqual actual.sort(), expected.sort()
return
common.mustCall listener, expected.length
listener = ->
common = require("../common")
assert = require("assert")
events = require("events")
e1 = new events.EventEmitter()
e1.on "foo", listener
e1.on "bar", listener
e1.on "baz", listener
e1.on "baz", listener
fooListeners = e1.listeners("foo")
barListeners = e1.listeners("bar")
bazListeners = e1.listeners("baz")
e1.on "removeListener", expect([
"bar"
"baz"
"baz"
])
e1.removeAllListeners "bar"
e1.removeAllListeners "baz"
assert.deepEqual e1.listeners("foo"), [listener]
assert.deepEqual e1.listeners("bar"), []
assert.deepEqual e1.listeners("baz"), []
# after calling removeAllListeners,
# the old listeners array should stay unchanged
assert.deepEqual fooListeners, [listener]
assert.deepEqual barListeners, [listener]
assert.deepEqual bazListeners, [
listener
listener
]
# after calling removeAllListeners,
# new listeners arrays are different from the old
assert.notEqual e1.listeners("bar"), barListeners
assert.notEqual e1.listeners("baz"), bazListeners
e2 = new events.EventEmitter()
e2.on "foo", listener
e2.on "bar", listener
# expect LIFO order
e2.on "removeListener", expect([
"foo"
"bar"
"removeListener"
])
e2.on "removeListener", expect([
"foo"
"bar"
])
e2.removeAllListeners()
console.error e2
assert.deepEqual [], e2.listeners("foo")
assert.deepEqual [], e2.listeners("bar")
e3 = new events.EventEmitter()
e3.on "removeListener", listener
# check for regression where removeAllListeners throws when
# there exists a removeListener listener, but there exists
# no listeners for the provided event type
assert.doesNotThrow e3.removeAllListeners.bind(e3, "foo")
| 112584 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
expect = (expected) ->
listener = (name) ->
actual.push name
return
actual = []
process.on "exit", ->
assert.deepEqual actual.sort(), expected.sort()
return
common.mustCall listener, expected.length
listener = ->
common = require("../common")
assert = require("assert")
events = require("events")
e1 = new events.EventEmitter()
e1.on "foo", listener
e1.on "bar", listener
e1.on "baz", listener
e1.on "baz", listener
fooListeners = e1.listeners("foo")
barListeners = e1.listeners("bar")
bazListeners = e1.listeners("baz")
e1.on "removeListener", expect([
"bar"
"baz"
"baz"
])
e1.removeAllListeners "bar"
e1.removeAllListeners "baz"
assert.deepEqual e1.listeners("foo"), [listener]
assert.deepEqual e1.listeners("bar"), []
assert.deepEqual e1.listeners("baz"), []
# after calling removeAllListeners,
# the old listeners array should stay unchanged
assert.deepEqual fooListeners, [listener]
assert.deepEqual barListeners, [listener]
assert.deepEqual bazListeners, [
listener
listener
]
# after calling removeAllListeners,
# new listeners arrays are different from the old
assert.notEqual e1.listeners("bar"), barListeners
assert.notEqual e1.listeners("baz"), bazListeners
e2 = new events.EventEmitter()
e2.on "foo", listener
e2.on "bar", listener
# expect LIFO order
e2.on "removeListener", expect([
"foo"
"bar"
"removeListener"
])
e2.on "removeListener", expect([
"foo"
"bar"
])
e2.removeAllListeners()
console.error e2
assert.deepEqual [], e2.listeners("foo")
assert.deepEqual [], e2.listeners("bar")
e3 = new events.EventEmitter()
e3.on "removeListener", listener
# check for regression where removeAllListeners throws when
# there exists a removeListener listener, but there exists
# no listeners for the provided event type
assert.doesNotThrow e3.removeAllListeners.bind(e3, "foo")
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
expect = (expected) ->
listener = (name) ->
actual.push name
return
actual = []
process.on "exit", ->
assert.deepEqual actual.sort(), expected.sort()
return
common.mustCall listener, expected.length
listener = ->
common = require("../common")
assert = require("assert")
events = require("events")
e1 = new events.EventEmitter()
e1.on "foo", listener
e1.on "bar", listener
e1.on "baz", listener
e1.on "baz", listener
fooListeners = e1.listeners("foo")
barListeners = e1.listeners("bar")
bazListeners = e1.listeners("baz")
e1.on "removeListener", expect([
"bar"
"baz"
"baz"
])
e1.removeAllListeners "bar"
e1.removeAllListeners "baz"
assert.deepEqual e1.listeners("foo"), [listener]
assert.deepEqual e1.listeners("bar"), []
assert.deepEqual e1.listeners("baz"), []
# after calling removeAllListeners,
# the old listeners array should stay unchanged
assert.deepEqual fooListeners, [listener]
assert.deepEqual barListeners, [listener]
assert.deepEqual bazListeners, [
listener
listener
]
# after calling removeAllListeners,
# new listeners arrays are different from the old
assert.notEqual e1.listeners("bar"), barListeners
assert.notEqual e1.listeners("baz"), bazListeners
e2 = new events.EventEmitter()
e2.on "foo", listener
e2.on "bar", listener
# expect LIFO order
e2.on "removeListener", expect([
"foo"
"bar"
"removeListener"
])
e2.on "removeListener", expect([
"foo"
"bar"
])
e2.removeAllListeners()
console.error e2
assert.deepEqual [], e2.listeners("foo")
assert.deepEqual [], e2.listeners("bar")
e3 = new events.EventEmitter()
e3.on "removeListener", listener
# check for regression where removeAllListeners throws when
# there exists a removeListener listener, but there exists
# no listeners for the provided event type
assert.doesNotThrow e3.removeAllListeners.bind(e3, "foo")
|
[
{
"context": "###\n\t(c) 2016 Julian Gonggrijp\n###\n\ndefine [\n\t'backbone'\n\t'view/home'\n], (bb, Ho",
"end": 30,
"score": 0.9998828172683716,
"start": 14,
"tag": "NAME",
"value": "Julian Gonggrijp"
}
] | client/script/router/main.coffee | NBOCampbellToets/CampbellSoup | 0 | ###
(c) 2016 Julian Gonggrijp
###
define [
'backbone'
'view/home'
], (bb, HomeView) ->
'use strict'
homeView = new HomeView
class MainRouter extends bb.Router
routes:
'': 'home'
home: -> homeView.render()
| 151034 | ###
(c) 2016 <NAME>
###
define [
'backbone'
'view/home'
], (bb, HomeView) ->
'use strict'
homeView = new HomeView
class MainRouter extends bb.Router
routes:
'': 'home'
home: -> homeView.render()
| true | ###
(c) 2016 PI:NAME:<NAME>END_PI
###
define [
'backbone'
'view/home'
], (bb, HomeView) ->
'use strict'
homeView = new HomeView
class MainRouter extends bb.Router
routes:
'': 'home'
home: -> homeView.render()
|
[
{
"context": "r disallow usage of \"English\" operators.\n# @author Julian Rosse\n###\n'use strict'\n\n#------------------------------",
"end": 109,
"score": 0.9998564720153809,
"start": 97,
"tag": "NAME",
"value": "Julian Rosse"
}
] | src/rules/english-operators.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview This rule shoud require or disallow usage of "English" operators.
# @author Julian Rosse
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
ENGLISH_OPERATORS =
not: '!'
and: '&&'
or: '||'
is: '=='
isnt: '!='
NON_ENGLISH_OPERATORS =
'!': 'not'
'&&': 'and'
'||': 'or'
'==': 'is'
'!=': 'isnt'
isBang = ({operator}) ->
operator is '!'
isDoubleBang = (node) ->
return no unless isBang node
return yes if isBang node.parent
return yes if isBang node.argument
no
getMessage = ({useEnglish, operator}) ->
"Prefer the usage of '#{
(if useEnglish then NON_ENGLISH_OPERATORS else ENGLISH_OPERATORS)[operator]
}' over '#{operator}'"
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent usage of English operators'
category: 'Stylistic Issues'
recommended: no
# url: 'https://eslint.org/docs/rules/space-unary-ops'
schema: [enum: ['always', 'never']]
create: (context) ->
useEnglish = context.options?[0] isnt 'never'
checkOp = (node) ->
return unless (
node.operator of (
if useEnglish
NON_ENGLISH_OPERATORS
else
ENGLISH_OPERATORS
)
)
return if isDoubleBang node
context.report {
node
message: getMessage {useEnglish, operator: node.operator}
}
#--------------------------------------------------------------------------
# Public
#--------------------------------------------------------------------------
UnaryExpression: checkOp
BinaryExpression: checkOp
LogicalExpression: checkOp
| 213465 | ###*
# @fileoverview This rule shoud require or disallow usage of "English" operators.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
ENGLISH_OPERATORS =
not: '!'
and: '&&'
or: '||'
is: '=='
isnt: '!='
NON_ENGLISH_OPERATORS =
'!': 'not'
'&&': 'and'
'||': 'or'
'==': 'is'
'!=': 'isnt'
isBang = ({operator}) ->
operator is '!'
isDoubleBang = (node) ->
return no unless isBang node
return yes if isBang node.parent
return yes if isBang node.argument
no
getMessage = ({useEnglish, operator}) ->
"Prefer the usage of '#{
(if useEnglish then NON_ENGLISH_OPERATORS else ENGLISH_OPERATORS)[operator]
}' over '#{operator}'"
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent usage of English operators'
category: 'Stylistic Issues'
recommended: no
# url: 'https://eslint.org/docs/rules/space-unary-ops'
schema: [enum: ['always', 'never']]
create: (context) ->
useEnglish = context.options?[0] isnt 'never'
checkOp = (node) ->
return unless (
node.operator of (
if useEnglish
NON_ENGLISH_OPERATORS
else
ENGLISH_OPERATORS
)
)
return if isDoubleBang node
context.report {
node
message: getMessage {useEnglish, operator: node.operator}
}
#--------------------------------------------------------------------------
# Public
#--------------------------------------------------------------------------
UnaryExpression: checkOp
BinaryExpression: checkOp
LogicalExpression: checkOp
| true | ###*
# @fileoverview This rule shoud require or disallow usage of "English" operators.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
ENGLISH_OPERATORS =
not: '!'
and: '&&'
or: '||'
is: '=='
isnt: '!='
NON_ENGLISH_OPERATORS =
'!': 'not'
'&&': 'and'
'||': 'or'
'==': 'is'
'!=': 'isnt'
isBang = ({operator}) ->
operator is '!'
isDoubleBang = (node) ->
return no unless isBang node
return yes if isBang node.parent
return yes if isBang node.argument
no
getMessage = ({useEnglish, operator}) ->
"Prefer the usage of '#{
(if useEnglish then NON_ENGLISH_OPERATORS else ENGLISH_OPERATORS)[operator]
}' over '#{operator}'"
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent usage of English operators'
category: 'Stylistic Issues'
recommended: no
# url: 'https://eslint.org/docs/rules/space-unary-ops'
schema: [enum: ['always', 'never']]
create: (context) ->
useEnglish = context.options?[0] isnt 'never'
checkOp = (node) ->
return unless (
node.operator of (
if useEnglish
NON_ENGLISH_OPERATORS
else
ENGLISH_OPERATORS
)
)
return if isDoubleBang node
context.report {
node
message: getMessage {useEnglish, operator: node.operator}
}
#--------------------------------------------------------------------------
# Public
#--------------------------------------------------------------------------
UnaryExpression: checkOp
BinaryExpression: checkOp
LogicalExpression: checkOp
|
[
{
"context": "r templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ\n\n\ttemplateData:\n\n\t\t# Specify some",
"end": 320,
"score": 0.9961287975311279,
"start": 315,
"tag": "USERNAME",
"value": "bevry"
},
{
"context": "\n\t\t\t# The default title of our website\n\t\... | docpad.coffee | patocallaghan/little-glitch | 0 | # The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
# Specify some site properties
site:
# The production url of our website
url: "http://website.com"
# Here are some old site urls that you would like to redirect from
oldUrls: [
'www.website.com',
'website.herokuapp.com'
]
# The default title of our website
title: "Pat O'Callaghan - UI Engineer"
# The website description (for SEO)
description: """
Blog of Pat O'Callaghan
"""
# The website keywords (for SEO) separated by commas
keywords: """
frontend development, css, html, html5, javascript
"""
# The website's styles
styles: [
'assets/css/style.css'
]
# The website's scripts
scripts: [
# '/vendor/log.js'
# '/vendor/modernizr.js'
# '/scripts/script.js'
]
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@document.title} | #{@site.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.keywords or []).join(', ')
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Server Extend
# Used to add our own custom routes to the server before the docpad routes are added
serverExtend: (opts) ->
# Extract the server from the options
{server} = opts
docpad = @docpad
# As we are now running in an event,
# ensure we are using the latest copy of the docpad configuraiton
# and fetch our urls from it
latestConfig = docpad.getConfig()
oldUrls = latestConfig.templateData.site.oldUrls or []
newUrl = latestConfig.templateData.site.url
# Redirect any requests accessing one of our sites oldUrls to the new site url
server.use (req,res,next) ->
if req.headers.host in oldUrls
res.redirect(newUrl+req.url, 301)
else
next()
# Configure Plugins
# Should contain the plugin short names on the left, and the configuration to pass the plugin on the right
plugins: # example
sass:
requireLibraries: ['breakpoint', 'ceaser-easing']
bundler: true
# Environments
# Allows us to set custom configuration for specific environments
environments: # default
development: # default
# Always refresh from server
maxAge: false # default
# Only do these if we are running standalone via the `docpad` executable
checkVersion: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
welcome: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
prompts: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
# Listen to port 9778 on the development environment
port: 9778 # example
production:
plugins:
livereload:
enabled: false
sass:
outputStyle: 'compressed'
}
# Export our DocPad Configuration
module.exports = docpadConfig
| 224971 | # The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
# Specify some site properties
site:
# The production url of our website
url: "http://website.com"
# Here are some old site urls that you would like to redirect from
oldUrls: [
'www.website.com',
'website.herokuapp.com'
]
# The default title of our website
title: "<NAME> - UI Engineer"
# The website description (for SEO)
description: """
Blog of <NAME>
"""
# The website keywords (for SEO) separated by commas
keywords: """
frontend development, css, html, html5, javascript
"""
# The website's styles
styles: [
'assets/css/style.css'
]
# The website's scripts
scripts: [
# '/vendor/log.js'
# '/vendor/modernizr.js'
# '/scripts/script.js'
]
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@document.title} | #{@site.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.keywords or []).join(', ')
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Server Extend
# Used to add our own custom routes to the server before the docpad routes are added
serverExtend: (opts) ->
# Extract the server from the options
{server} = opts
docpad = @docpad
# As we are now running in an event,
# ensure we are using the latest copy of the docpad configuraiton
# and fetch our urls from it
latestConfig = docpad.getConfig()
oldUrls = latestConfig.templateData.site.oldUrls or []
newUrl = latestConfig.templateData.site.url
# Redirect any requests accessing one of our sites oldUrls to the new site url
server.use (req,res,next) ->
if req.headers.host in oldUrls
res.redirect(newUrl+req.url, 301)
else
next()
# Configure Plugins
# Should contain the plugin short names on the left, and the configuration to pass the plugin on the right
plugins: # example
sass:
requireLibraries: ['breakpoint', 'ceaser-easing']
bundler: true
# Environments
# Allows us to set custom configuration for specific environments
environments: # default
development: # default
# Always refresh from server
maxAge: false # default
# Only do these if we are running standalone via the `docpad` executable
checkVersion: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
welcome: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
prompts: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
# Listen to port 9778 on the development environment
port: 9778 # example
production:
plugins:
livereload:
enabled: false
sass:
outputStyle: 'compressed'
}
# Export our DocPad Configuration
module.exports = docpadConfig
| true | # The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
# Specify some site properties
site:
# The production url of our website
url: "http://website.com"
# Here are some old site urls that you would like to redirect from
oldUrls: [
'www.website.com',
'website.herokuapp.com'
]
# The default title of our website
title: "PI:NAME:<NAME>END_PI - UI Engineer"
# The website description (for SEO)
description: """
Blog of PI:NAME:<NAME>END_PI
"""
# The website keywords (for SEO) separated by commas
keywords: """
frontend development, css, html, html5, javascript
"""
# The website's styles
styles: [
'assets/css/style.css'
]
# The website's scripts
scripts: [
# '/vendor/log.js'
# '/vendor/modernizr.js'
# '/scripts/script.js'
]
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@document.title} | #{@site.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.keywords or []).join(', ')
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Server Extend
# Used to add our own custom routes to the server before the docpad routes are added
serverExtend: (opts) ->
# Extract the server from the options
{server} = opts
docpad = @docpad
# As we are now running in an event,
# ensure we are using the latest copy of the docpad configuraiton
# and fetch our urls from it
latestConfig = docpad.getConfig()
oldUrls = latestConfig.templateData.site.oldUrls or []
newUrl = latestConfig.templateData.site.url
# Redirect any requests accessing one of our sites oldUrls to the new site url
server.use (req,res,next) ->
if req.headers.host in oldUrls
res.redirect(newUrl+req.url, 301)
else
next()
# Configure Plugins
# Should contain the plugin short names on the left, and the configuration to pass the plugin on the right
plugins: # example
sass:
requireLibraries: ['breakpoint', 'ceaser-easing']
bundler: true
# Environments
# Allows us to set custom configuration for specific environments
environments: # default
development: # default
# Always refresh from server
maxAge: false # default
# Only do these if we are running standalone via the `docpad` executable
checkVersion: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
welcome: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
prompts: process.argv.length >= 2 and /docpad$/.test(process.argv[1]) # default
# Listen to port 9778 on the development environment
port: 9778 # example
production:
plugins:
livereload:
enabled: false
sass:
outputStyle: 'compressed'
}
# Export our DocPad Configuration
module.exports = docpadConfig
|
[
{
"context": "qlite.coffee - top level file\n#\n# Copyright © 2011 Pavan Kumar Sunkara. All rights reserved\n#\n\nnosqlite = module.exports",
"end": 77,
"score": 0.9998509287834167,
"start": 58,
"tag": "NAME",
"value": "Pavan Kumar Sunkara"
}
] | node_modules/resources/persistence/vendor/jugglingdb/lib/nosqlite/src/nosqlite.coffee | manecz/storytail | 0 | #
# nosqlite.coffee - top level file
#
# Copyright © 2011 Pavan Kumar Sunkara. All rights reserved
#
nosqlite = module.exports
# Requiring modules
path = require 'path'
fs = require 'fs'
rimraf = require '../vendor/rimraf'
async = require 'async'
# Declaring variables
nosqlite.path = path.join __dirname, '..', 'data'
# Connection class for nosqlite
nosqlite.Connection = (arg) ->
options = {}
@path = nosqlite.path
if typeof(arg) is 'object'
options = arg
@path = options.path
else if typeof(arg) is 'string'
@path = arg
# Database class which we work with
nosqlite.Connection::database = (name, mode) ->
that = this
# Variables
dir: path.resolve that.path, name
name: name || 'test'
mode: mode || '0775'
# Utils
file: (id) ->
path.resolve @dir, id + '.json'
project: (onto, from) ->
Object.keys(from).forEach (k) ->
onto[k] = from[k]
onto
satisfy: (data, cond) ->
Object.keys(cond).every (k) ->
if data[k] is cond[k] then true else false
# Write files tmp and rename
_write: (id, data, cb) ->
fs.writeFile @file('.' + id), data, (err) =>
if err then cb err else fs.rename @file('.' + id), @file(id), cb
_writeSync: (id, data) ->
fs.writeFileSync @file('.' + id), data
fs.renameSync @file('.' + id), @file(id)
# Check if db exists
exists: (cb) ->
path.exists @dir, cb
existsSync: ->
path.existsSync @dir
# Create db
create: (cb) ->
fs.mkdir @dir, @mode, cb
createSync: ->
fs.mkdirSync @dir, @mode
# Destroy db
destroy: (cb) ->
rimraf @dir, cb
destroySync: ->
rimraf.sync @dir
# Get doc by id
get: (id, cb) ->
fs.readFile @file(id), 'utf8', (err, data) ->
cb err, (JSON.parse(data) if data)
getSync: (id) ->
JSON.parse fs.readFileSync @file(id), 'utf8'
# Remove doc by id
delete: (id, cb) ->
fs.unlink @file(id), cb
deleteSync: (id) ->
fs.unlinkSync @file(id)
# Update doc by id
put: (id, obj, cb) ->
@get id, (err, data) =>
data = @project data, obj
@_write id, JSON.stringify(data, null, 2), cb
putSync: (id, obj) ->
data = @project @getSync(id), obj
@_writeSync id, JSON.stringify(data, null, 2)
# Create doc
post: (obj, cb) ->
@_write obj.id or obj._id, JSON.stringify(obj, null, 2), cb
postSync: (obj) ->
@_writeSync obj.id or obj._id, JSON.stringify(obj, null, 2)
# Find a doc
find: (cond, cb) ->
fs.readdir @dir, (err, files) =>
async.map files, (file, callback) =>
@get path.basename(file, '.json'), (err, data) =>
if @satisfy data, cond then callback err, data else callback err, null
, (err, files) ->
cb err, files.filter (file) -> file?
findSync: (cond) ->
files = fs.readdirSync @dir
files = files.map (file) =>
data = @getSync path.basename(file, '.json')
if @satisfy data, cond then data else null
files.filter (file) -> file?
# Get all docs
all: (cb) ->
fs.readdir @dir, (err, files) =>
async.map files, (file, callback) =>
@get path.basename(file, '.json'), callback
, cb
allSync: ->
files = fs.readdirSync @dir
files.map (file) =>
@getSync path.basename file, '.json'
| 217690 | #
# nosqlite.coffee - top level file
#
# Copyright © 2011 <NAME>. All rights reserved
#
nosqlite = module.exports
# Requiring modules
path = require 'path'
fs = require 'fs'
rimraf = require '../vendor/rimraf'
async = require 'async'
# Declaring variables
nosqlite.path = path.join __dirname, '..', 'data'
# Connection class for nosqlite
nosqlite.Connection = (arg) ->
options = {}
@path = nosqlite.path
if typeof(arg) is 'object'
options = arg
@path = options.path
else if typeof(arg) is 'string'
@path = arg
# Database class which we work with
nosqlite.Connection::database = (name, mode) ->
that = this
# Variables
dir: path.resolve that.path, name
name: name || 'test'
mode: mode || '0775'
# Utils
file: (id) ->
path.resolve @dir, id + '.json'
project: (onto, from) ->
Object.keys(from).forEach (k) ->
onto[k] = from[k]
onto
satisfy: (data, cond) ->
Object.keys(cond).every (k) ->
if data[k] is cond[k] then true else false
# Write files tmp and rename
_write: (id, data, cb) ->
fs.writeFile @file('.' + id), data, (err) =>
if err then cb err else fs.rename @file('.' + id), @file(id), cb
_writeSync: (id, data) ->
fs.writeFileSync @file('.' + id), data
fs.renameSync @file('.' + id), @file(id)
# Check if db exists
exists: (cb) ->
path.exists @dir, cb
existsSync: ->
path.existsSync @dir
# Create db
create: (cb) ->
fs.mkdir @dir, @mode, cb
createSync: ->
fs.mkdirSync @dir, @mode
# Destroy db
destroy: (cb) ->
rimraf @dir, cb
destroySync: ->
rimraf.sync @dir
# Get doc by id
get: (id, cb) ->
fs.readFile @file(id), 'utf8', (err, data) ->
cb err, (JSON.parse(data) if data)
getSync: (id) ->
JSON.parse fs.readFileSync @file(id), 'utf8'
# Remove doc by id
delete: (id, cb) ->
fs.unlink @file(id), cb
deleteSync: (id) ->
fs.unlinkSync @file(id)
# Update doc by id
put: (id, obj, cb) ->
@get id, (err, data) =>
data = @project data, obj
@_write id, JSON.stringify(data, null, 2), cb
putSync: (id, obj) ->
data = @project @getSync(id), obj
@_writeSync id, JSON.stringify(data, null, 2)
# Create doc
post: (obj, cb) ->
@_write obj.id or obj._id, JSON.stringify(obj, null, 2), cb
postSync: (obj) ->
@_writeSync obj.id or obj._id, JSON.stringify(obj, null, 2)
# Find a doc
find: (cond, cb) ->
fs.readdir @dir, (err, files) =>
async.map files, (file, callback) =>
@get path.basename(file, '.json'), (err, data) =>
if @satisfy data, cond then callback err, data else callback err, null
, (err, files) ->
cb err, files.filter (file) -> file?
findSync: (cond) ->
files = fs.readdirSync @dir
files = files.map (file) =>
data = @getSync path.basename(file, '.json')
if @satisfy data, cond then data else null
files.filter (file) -> file?
# Get all docs
all: (cb) ->
fs.readdir @dir, (err, files) =>
async.map files, (file, callback) =>
@get path.basename(file, '.json'), callback
, cb
allSync: ->
files = fs.readdirSync @dir
files.map (file) =>
@getSync path.basename file, '.json'
| true | #
# nosqlite.coffee - top level file
#
# Copyright © 2011 PI:NAME:<NAME>END_PI. All rights reserved
#
nosqlite = module.exports
# Requiring modules
path = require 'path'
fs = require 'fs'
rimraf = require '../vendor/rimraf'
async = require 'async'
# Declaring variables
nosqlite.path = path.join __dirname, '..', 'data'
# Connection class for nosqlite
nosqlite.Connection = (arg) ->
options = {}
@path = nosqlite.path
if typeof(arg) is 'object'
options = arg
@path = options.path
else if typeof(arg) is 'string'
@path = arg
# Database class which we work with
nosqlite.Connection::database = (name, mode) ->
that = this
# Variables
dir: path.resolve that.path, name
name: name || 'test'
mode: mode || '0775'
# Utils
file: (id) ->
path.resolve @dir, id + '.json'
project: (onto, from) ->
Object.keys(from).forEach (k) ->
onto[k] = from[k]
onto
satisfy: (data, cond) ->
Object.keys(cond).every (k) ->
if data[k] is cond[k] then true else false
# Write files tmp and rename
_write: (id, data, cb) ->
fs.writeFile @file('.' + id), data, (err) =>
if err then cb err else fs.rename @file('.' + id), @file(id), cb
_writeSync: (id, data) ->
fs.writeFileSync @file('.' + id), data
fs.renameSync @file('.' + id), @file(id)
# Check if db exists
exists: (cb) ->
path.exists @dir, cb
existsSync: ->
path.existsSync @dir
# Create db
create: (cb) ->
fs.mkdir @dir, @mode, cb
createSync: ->
fs.mkdirSync @dir, @mode
# Destroy db
destroy: (cb) ->
rimraf @dir, cb
destroySync: ->
rimraf.sync @dir
# Get doc by id
get: (id, cb) ->
fs.readFile @file(id), 'utf8', (err, data) ->
cb err, (JSON.parse(data) if data)
getSync: (id) ->
JSON.parse fs.readFileSync @file(id), 'utf8'
# Remove doc by id
delete: (id, cb) ->
fs.unlink @file(id), cb
deleteSync: (id) ->
fs.unlinkSync @file(id)
# Update doc by id
put: (id, obj, cb) ->
@get id, (err, data) =>
data = @project data, obj
@_write id, JSON.stringify(data, null, 2), cb
putSync: (id, obj) ->
data = @project @getSync(id), obj
@_writeSync id, JSON.stringify(data, null, 2)
# Create doc
post: (obj, cb) ->
@_write obj.id or obj._id, JSON.stringify(obj, null, 2), cb
postSync: (obj) ->
@_writeSync obj.id or obj._id, JSON.stringify(obj, null, 2)
# Find a doc
find: (cond, cb) ->
fs.readdir @dir, (err, files) =>
async.map files, (file, callback) =>
@get path.basename(file, '.json'), (err, data) =>
if @satisfy data, cond then callback err, data else callback err, null
, (err, files) ->
cb err, files.filter (file) -> file?
findSync: (cond) ->
files = fs.readdirSync @dir
files = files.map (file) =>
data = @getSync path.basename(file, '.json')
if @satisfy data, cond then data else null
files.filter (file) -> file?
# Get all docs
all: (cb) ->
fs.readdir @dir, (err, files) =>
async.map files, (file, callback) =>
@get path.basename(file, '.json'), callback
, cb
allSync: ->
files = fs.readdirSync @dir
files.map (file) =>
@getSync path.basename file, '.json'
|
[
{
"context": ":\n#\n# Configuration:\n#\n# Commands:\n#\n# Author:\n# Thomas Howe - ghostofbasho@gmail.com\n#\n\nTelnet = require('tel",
"end": 125,
"score": 0.9998669624328613,
"start": 114,
"tag": "NAME",
"value": "Thomas Howe"
},
{
"context": "ation:\n#\n# Commands:\n#\n# Author... | src/scripts/telnet.coffee | green-bot/hubot-session | 1 | # Description:
# Connects greenbot to Telnet
#
# Dependencies:
#
# Configuration:
#
# Commands:
#
# Author:
# Thomas Howe - ghostofbasho@gmail.com
#
Telnet = require('telnet')
ShortUUID = require 'shortid'
module.exports = (robot) ->
Telnet.createServer((client) ->
sessionId = ShortUUID.generate()
client.on 'data', (b) ->
msg =
dst: '12183255075'
src: 'telnet'
txt: b.toString()
robot.emit 'telnet:ingress', msg
return
robot.on "telnet:egress:telnet", (txt) ->
client.write new Buffer txt + "\n"
return
).listen 3002
| 1526 | # Description:
# Connects greenbot to Telnet
#
# Dependencies:
#
# Configuration:
#
# Commands:
#
# Author:
# <NAME> - <EMAIL>
#
Telnet = require('telnet')
ShortUUID = require 'shortid'
module.exports = (robot) ->
Telnet.createServer((client) ->
sessionId = ShortUUID.generate()
client.on 'data', (b) ->
msg =
dst: '12183255075'
src: 'telnet'
txt: b.toString()
robot.emit 'telnet:ingress', msg
return
robot.on "telnet:egress:telnet", (txt) ->
client.write new Buffer txt + "\n"
return
).listen 3002
| true | # Description:
# Connects greenbot to Telnet
#
# Dependencies:
#
# Configuration:
#
# Commands:
#
# Author:
# PI:NAME:<NAME>END_PI - PI:EMAIL:<EMAIL>END_PI
#
Telnet = require('telnet')
ShortUUID = require 'shortid'
module.exports = (robot) ->
Telnet.createServer((client) ->
sessionId = ShortUUID.generate()
client.on 'data', (b) ->
msg =
dst: '12183255075'
src: 'telnet'
txt: b.toString()
robot.emit 'telnet:ingress', msg
return
robot.on "telnet:egress:telnet", (txt) ->
client.write new Buffer txt + "\n"
return
).listen 3002
|
[
{
"context": "ame: \"from\", 'data-default-text': \"from\", value: \"jae@ycatalyst.com\"\n br()\n input id: \"to\", type: \"text\", name:",
"end": 200,
"score": 0.999925971031189,
"start": 183,
"tag": "EMAIL",
"value": "jae@ycatalyst.com"
}
] | templates/admin/messages.coffee | jaekwon/YCatalyst | 3 | exports.template = ->
p "Send an email"
form action: "/admin/messages", method: "POST", ->
input id: "from", type: "text", name: "from", 'data-default-text': "from", value: "jae@ycatalyst.com"
br()
input id: "to", type: "text", name: "to", 'data-default-text': "to"
br()
input id: "subject", type: "text", name: "subject", 'data-default-text': "subject"
br()
textarea id: "body", name: "body", cols: 60, rows: 20, 'data-default-text': "body"
br()
input type: "submit", value: "send"
exports.coffeescript = ->
$(document).ready ->
$('[data-default-text]').set_default_text()
$('#body').make_autoresizable()
| 5574 | exports.template = ->
p "Send an email"
form action: "/admin/messages", method: "POST", ->
input id: "from", type: "text", name: "from", 'data-default-text': "from", value: "<EMAIL>"
br()
input id: "to", type: "text", name: "to", 'data-default-text': "to"
br()
input id: "subject", type: "text", name: "subject", 'data-default-text': "subject"
br()
textarea id: "body", name: "body", cols: 60, rows: 20, 'data-default-text': "body"
br()
input type: "submit", value: "send"
exports.coffeescript = ->
$(document).ready ->
$('[data-default-text]').set_default_text()
$('#body').make_autoresizable()
| true | exports.template = ->
p "Send an email"
form action: "/admin/messages", method: "POST", ->
input id: "from", type: "text", name: "from", 'data-default-text': "from", value: "PI:EMAIL:<EMAIL>END_PI"
br()
input id: "to", type: "text", name: "to", 'data-default-text': "to"
br()
input id: "subject", type: "text", name: "subject", 'data-default-text': "subject"
br()
textarea id: "body", name: "body", cols: 60, rows: 20, 'data-default-text': "body"
br()
input type: "submit", value: "send"
exports.coffeescript = ->
$(document).ready ->
$('[data-default-text]').set_default_text()
$('#body').make_autoresizable()
|
[
{
"context": "obile, text, options = {}, callback = ->) ->\n # @osv\n return callback(null, ok: 1)\n {redis} = re",
"end": 242,
"score": 0.8085721135139465,
"start": 238,
"tag": "USERNAME",
"value": "@osv"
},
{
"context": " = moment().endOf('day').valueOf()\n rateKey = \"... | talk-api2x/server/util/sms.coffee | ikingye/talk-os | 3,084 | Err = require 'err1st'
Promise = require 'bluebird'
requestAsync = Promise.promisify(require 'request')
config = require 'config'
moment = require 'moment'
module.exports =
sendSMS: (mobile, text, options = {}, callback = ->) ->
# @osv
return callback(null, ok: 1)
{redis} = require '../components'
endOfToday = moment().endOf('day').valueOf()
rateKey = "sms:#{mobile}:#{endOfToday}"
if options.dailyRate
$overRate = redis.getAsync rateKey
.then (rateNum) ->
return unless rateNum
if Number(rateNum) > options.dailyRate
throw new Err 'MOBILE_RATE_EXCEEDED'
return
else $overRate = Promise.resolve()
$sendSMS = $overRate.then ->
_options =
method: 'POST'
url: config.sms.host + '/send'
json: true
body:
key: config.sms.key
secret: config.sms.secret
phone: mobile
ip: options.ip
_userId: options._userId
refer: options.refer
msg: text
uid: options.uid or options._userId
requestAsync _options
.spread (res) ->
throw new Err('SEND_SMS_ERROR') unless res?.statusCode is 200
res.body
$setRate = $sendSMS.then ->
remainSeconds = Math.floor((endOfToday - Date.now())/1000)
redis
.multi()
.incr rateKey
.expire rateKey, remainSeconds
.execAsync()
Promise.all [$sendSMS, $setRate]
.spread (body) -> body
.nodeify callback
| 5516 | Err = require 'err1st'
Promise = require 'bluebird'
requestAsync = Promise.promisify(require 'request')
config = require 'config'
moment = require 'moment'
module.exports =
sendSMS: (mobile, text, options = {}, callback = ->) ->
# @osv
return callback(null, ok: 1)
{redis} = require '../components'
endOfToday = moment().endOf('day').valueOf()
rateKey = "<KEY>endOf<KEY>
if options.dailyRate
$overRate = redis.getAsync rateKey
.then (rateNum) ->
return unless rateNum
if Number(rateNum) > options.dailyRate
throw new Err 'MOBILE_RATE_EXCEEDED'
return
else $overRate = Promise.resolve()
$sendSMS = $overRate.then ->
_options =
method: 'POST'
url: config.sms.host + '/send'
json: true
body:
key: config.sms.key
secret: config.sms.secret
phone: mobile
ip: options.ip
_userId: options._userId
refer: options.refer
msg: text
uid: options.uid or options._userId
requestAsync _options
.spread (res) ->
throw new Err('SEND_SMS_ERROR') unless res?.statusCode is 200
res.body
$setRate = $sendSMS.then ->
remainSeconds = Math.floor((endOfToday - Date.now())/1000)
redis
.multi()
.incr rateKey
.expire rateKey, remainSeconds
.execAsync()
Promise.all [$sendSMS, $setRate]
.spread (body) -> body
.nodeify callback
| true | Err = require 'err1st'
Promise = require 'bluebird'
requestAsync = Promise.promisify(require 'request')
config = require 'config'
moment = require 'moment'
module.exports =
sendSMS: (mobile, text, options = {}, callback = ->) ->
# @osv
return callback(null, ok: 1)
{redis} = require '../components'
endOfToday = moment().endOf('day').valueOf()
rateKey = "PI:KEY:<KEY>END_PIendOfPI:KEY:<KEY>END_PI
if options.dailyRate
$overRate = redis.getAsync rateKey
.then (rateNum) ->
return unless rateNum
if Number(rateNum) > options.dailyRate
throw new Err 'MOBILE_RATE_EXCEEDED'
return
else $overRate = Promise.resolve()
$sendSMS = $overRate.then ->
_options =
method: 'POST'
url: config.sms.host + '/send'
json: true
body:
key: config.sms.key
secret: config.sms.secret
phone: mobile
ip: options.ip
_userId: options._userId
refer: options.refer
msg: text
uid: options.uid or options._userId
requestAsync _options
.spread (res) ->
throw new Err('SEND_SMS_ERROR') unless res?.statusCode is 200
res.body
$setRate = $sendSMS.then ->
remainSeconds = Math.floor((endOfToday - Date.now())/1000)
redis
.multi()
.incr rateKey
.expire rateKey, remainSeconds
.execAsync()
Promise.all [$sendSMS, $setRate]
.spread (body) -> body
.nodeify callback
|
[
{
"context": "(RepositoryRow {repos: reposPair, key: reposPair[0].id + '-' + reposPair[1]?.id})\n )\n\n (div {className",
"end": 650,
"score": 0.796488881111145,
"start": 640,
"tag": "KEY",
"value": "id + '-' +"
},
{
"context": "eposPair, key: reposPair[0].id + '-' + reposP... | app/scripts/repository-list.coffee | Ocramius/travis-wallboard | 1 |
React = require('react')
RepositoryRow = require './repository-row.coffee'
_ = require('underscore')
{table, tr, td, th, thead, tbody, span, div, a, input} = React.DOM
RepositoryList = React.createClass(
displayName :'RepositoryFilter'
pairs: ->
_.chain(@props.repos)
.groupBy (repo, index) ->
Math.floor(index / 2)
.toArray()
render: ->
if !@props.repos.length
(div {className: 'container-fluid'},
(div {className: 'alert alert-default'}, 'No repositories found.')
)
else
rows = @pairs().map((reposPair) ->
(RepositoryRow {repos: reposPair, key: reposPair[0].id + '-' + reposPair[1]?.id})
)
(div {className: 'container-fluid'}, rows)
)
module.exports = RepositoryList | 143850 |
React = require('react')
RepositoryRow = require './repository-row.coffee'
_ = require('underscore')
{table, tr, td, th, thead, tbody, span, div, a, input} = React.DOM
RepositoryList = React.createClass(
displayName :'RepositoryFilter'
pairs: ->
_.chain(@props.repos)
.groupBy (repo, index) ->
Math.floor(index / 2)
.toArray()
render: ->
if !@props.repos.length
(div {className: 'container-fluid'},
(div {className: 'alert alert-default'}, 'No repositories found.')
)
else
rows = @pairs().map((reposPair) ->
(RepositoryRow {repos: reposPair, key: reposPair[0].<KEY> reposPair[1]?.<KEY>})
)
(div {className: 'container-fluid'}, rows)
)
module.exports = RepositoryList | true |
React = require('react')
RepositoryRow = require './repository-row.coffee'
_ = require('underscore')
{table, tr, td, th, thead, tbody, span, div, a, input} = React.DOM
RepositoryList = React.createClass(
displayName :'RepositoryFilter'
pairs: ->
_.chain(@props.repos)
.groupBy (repo, index) ->
Math.floor(index / 2)
.toArray()
render: ->
if !@props.repos.length
(div {className: 'container-fluid'},
(div {className: 'alert alert-default'}, 'No repositories found.')
)
else
rows = @pairs().map((reposPair) ->
(RepositoryRow {repos: reposPair, key: reposPair[0].PI:KEY:<KEY>END_PI reposPair[1]?.PI:KEY:<KEY>END_PI})
)
(div {className: 'container-fluid'}, rows)
)
module.exports = RepositoryList |
[
{
"context": ": 'post'\n\t\t\turl: '/signup'\n\t\t\tdata: \n\t\t\t\tpassword: data.password\n\t\t\t\trePassword: data.rePassword\n\t\t\t\tloginname: da",
"end": 126,
"score": 0.9918140769004822,
"start": 113,
"tag": "PASSWORD",
"value": "data.password"
},
{
"context": "data: \n\t\t\t... | public/src/pages/sign/signup.coffee | yuanzm/Live-Chat | 56 | signUpDataBus =
signup: (data, callback)->
$.ajax {
type: 'post'
url: '/signup'
data:
password: data.password
rePassword: data.rePassword
loginname: data.loginname
avatar: "/public/images/static/avatar.jpg"
email: data.email
success: (data)->
callback(data)
}
# 缓存DOM节点变量
$loginname = $('#loginname')
$email = $('#email')
$password = $('#password')
$rePassword = $('#re-password')
$signupBtn = $('#signup-btn')
$signupTips = $('.signup-tips')
###
# 页面加载完成执行的操作
###
$ ->
$signupBtn.bind 'click', signUpBtnHandler
###
# 点击登录按钮所执行的DOM操作
# 1. 如果出未填信息,给出红色文字提示
# 2. 如果用户的验证码错误,给出红色文字提示
# 2. 如果信息都填写完成,清空文字提示,返回用户信息
#
###
signUpBtnAction = ->
if not $loginname.val() or not $password.val() or not $rePassword.val() or not $email.val()
$signupTips.removeClass('green-tip').addClass('red-tip').text('请填写完整信息')
return false
else
$signupTips.removeClass('green-tip').removeClass('red-tip').text('')
return {
password: $password.val()
loginname: $loginname.val()
email: $email.val()
rePassword: $rePassword.val()
}
###
# 点击登录按钮所执行的逻辑
# 1. 判断用户是否输入完成,并且输入信息是否正确
###
signUpBtnHandler = ->
signupInfo = signUpBtnAction()
if signupInfo
signUpDataBus.signup signupInfo, (data)->
console.log data
if data.errCode isnt 200
$signupTips.removeClass('green-tip').addClass('red-tip').text(data.message)
if data.errCode is 200
$signupTips.removeClass('red-tip').addClass('green-tip').text('注册成功')
setTimeout ->
location.href = '/chat'
, 1000
| 203545 | signUpDataBus =
signup: (data, callback)->
$.ajax {
type: 'post'
url: '/signup'
data:
password: <PASSWORD>
rePassword: <PASSWORD>
loginname: data.loginname
avatar: "/public/images/static/avatar.jpg"
email: data.email
success: (data)->
callback(data)
}
# 缓存DOM节点变量
$loginname = $('#loginname')
$email = $('#email')
$password = <PASSWORD>')
$rePassword = $('#re-password')
$signupBtn = $('#signup-btn')
$signupTips = $('.signup-tips')
###
# 页面加载完成执行的操作
###
$ ->
$signupBtn.bind 'click', signUpBtnHandler
###
# 点击登录按钮所执行的DOM操作
# 1. 如果出未填信息,给出红色文字提示
# 2. 如果用户的验证码错误,给出红色文字提示
# 2. 如果信息都填写完成,清空文字提示,返回用户信息
#
###
signUpBtnAction = ->
if not $loginname.val() or not $password.val() or not $rePassword.val() or not $email.val()
$signupTips.removeClass('green-tip').addClass('red-tip').text('请填写完整信息')
return false
else
$signupTips.removeClass('green-tip').removeClass('red-tip').text('')
return {
password: $<PASSWORD>.val()
loginname: $loginname.val()
email: $email.val()
rePassword: $rePassword.val()
}
###
# 点击登录按钮所执行的逻辑
# 1. 判断用户是否输入完成,并且输入信息是否正确
###
signUpBtnHandler = ->
signupInfo = signUpBtnAction()
if signupInfo
signUpDataBus.signup signupInfo, (data)->
console.log data
if data.errCode isnt 200
$signupTips.removeClass('green-tip').addClass('red-tip').text(data.message)
if data.errCode is 200
$signupTips.removeClass('red-tip').addClass('green-tip').text('注册成功')
setTimeout ->
location.href = '/chat'
, 1000
| true | signUpDataBus =
signup: (data, callback)->
$.ajax {
type: 'post'
url: '/signup'
data:
password: PI:PASSWORD:<PASSWORD>END_PI
rePassword: PI:PASSWORD:<PASSWORD>END_PI
loginname: data.loginname
avatar: "/public/images/static/avatar.jpg"
email: data.email
success: (data)->
callback(data)
}
# 缓存DOM节点变量
$loginname = $('#loginname')
$email = $('#email')
$password = PI:PASSWORD:<PASSWORD>END_PI')
$rePassword = $('#re-password')
$signupBtn = $('#signup-btn')
$signupTips = $('.signup-tips')
###
# 页面加载完成执行的操作
###
$ ->
$signupBtn.bind 'click', signUpBtnHandler
###
# 点击登录按钮所执行的DOM操作
# 1. 如果出未填信息,给出红色文字提示
# 2. 如果用户的验证码错误,给出红色文字提示
# 2. 如果信息都填写完成,清空文字提示,返回用户信息
#
###
signUpBtnAction = ->
if not $loginname.val() or not $password.val() or not $rePassword.val() or not $email.val()
$signupTips.removeClass('green-tip').addClass('red-tip').text('请填写完整信息')
return false
else
$signupTips.removeClass('green-tip').removeClass('red-tip').text('')
return {
password: $PI:PASSWORD:<PASSWORD>END_PI.val()
loginname: $loginname.val()
email: $email.val()
rePassword: $rePassword.val()
}
###
# 点击登录按钮所执行的逻辑
# 1. 判断用户是否输入完成,并且输入信息是否正确
###
signUpBtnHandler = ->
signupInfo = signUpBtnAction()
if signupInfo
signUpDataBus.signup signupInfo, (data)->
console.log data
if data.errCode isnt 200
$signupTips.removeClass('green-tip').addClass('red-tip').text(data.message)
if data.errCode is 200
$signupTips.removeClass('red-tip').addClass('green-tip').text('注册成功')
setTimeout ->
location.href = '/chat'
, 1000
|
[
{
"context": "# Copyright 2013 Andrey Antukh <niwi@niwi.be>\n#\n# Licensed under the Apache Lice",
"end": 30,
"score": 0.9998871684074402,
"start": 17,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "# Copyright 2013 Andrey Antukh <niwi@niwi.be>\n#\n# Licensed under the Apac... | results/front/app/coffee/services/model.coffee | gnufede/results | 0 | # Copyright 2013 Andrey Antukh <niwi@niwi.be>
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ModelProvider = ($q, $http, $gmUrls, $gmStorage) ->
headers = ->
token = $gmStorage.get('token')
if token
return {"Authorization":"Token #{token}"}
return {}
class Model
constructor: (name, data, dataTypes) ->
@._attrs = data
@._name = name
@._dataTypes = dataTypes
@.setAttrs(data)
@.initialize()
applyCasts: ->
for attrName, castName of @._dataTypes
castMethod = service.casts[castName]
if not castMethod
continue
@._attrs[attrName] = castMethod(@._attrs[attrName])
getIdAttrName: ->
return "id"
getUrl: ->
return "#{$gmUrls.api(@_name)}/#{@.getAttrs()[@.getIdAttrName()]}"
getAttrs: (patch=false) ->
if patch
return _.extend({}, @._modifiedAttrs)
return _.extend({}, @._attrs, @._modifiedAttrs)
setAttrs: (attrs) ->
@._attrs = attrs
@._modifiedAttrs = {}
@.applyCasts()
@._isModified = false
setAttr: (name, value) ->
@._modifiedAttrs[name] = value
@._isModified = true
initialize: () ->
self = @
getter = (name) ->
return ->
if name.substr(0,2) == "__"
return self[name]
if name not in _.keys(self._modifiedAttrs)
return self._attrs[name]
return self._modifiedAttrs[name]
setter = (name) ->
return (value) ->
if name.substr(0,2) == "__"
self[name] = value
return
if self._attrs[name] != value
self._modifiedAttrs[name] = value
self._isModified = true
else
delete self._modifiedAttrs[name]
return
_.each @_attrs, (value, name) ->
options =
get: getter(name)
set: setter(name)
enumerable: true
configurable: true
Object.defineProperty(self, name, options)
serialize: () ->
data =
"data": _.clone(@_attrs)
"name": @_name
return JSON.stringify(data)
isModified: () ->
return this._isModified
markSaved: () ->
@._isModified = false
@._attrs = @.getAttrs()
@._modifiedAttrs = {}
revert: () ->
@_modifiedAttrs = {}
@_isModified = false
remove: () ->
defered = $q.defer()
self = @
params =
method: "DELETE"
url: @getUrl()
headers: headers()
promise = $http(params)
promise.success (data, status) ->
defered.resolve(self)
promise.error (data, status) ->
defered.reject(self)
return defered.promise
save: (patch=false, extraParams) ->
self = @
defered = $q.defer()
if not @isModified() and patch
defered.resolve(self)
return defered.promise
params =
url: @getUrl()
headers: headers(),
if patch
params.method = "PATCH"
else
params.method = "PUT"
params.data = JSON.stringify(@.getAttrs(patch))
params = _.extend({}, params, extraParams)
promise = $http(params)
promise.success (data, status) ->
self._isModified = false
self._attrs = _.extend(self.getAttrs(), data)
self._modifiedAttrs = {}
self.applyCasts()
defered.resolve(self)
promise.error (data, status) ->
defered.reject(data)
return defered.promise
refresh: () ->
defered = $q.defer()
self = @
params =
method: "GET",
url: @getUrl()
headers: headers()
promise = $http(params)
promise.success (data, status) ->
self._modifiedAttrs = {}
self._attrs = data
self._isModified = false
self.applyCasts()
defered.resolve(self)
promise.error (data, status) ->
defered.reject([data, status])
return defered.promise
@desSerialize = (sdata) ->
ddata = JSON.parse(sdata)
model = new Model(ddata.url, ddata.data)
return model
service = {}
service.make_model = (name, data, cls=Model, dataTypes={}) ->
return new cls(name, data, dataTypes)
service.create = (name, data, cls=Model, dataTypes={}) ->
defered = $q.defer()
params =
method: "POST"
url: $gmUrls.api(name)
headers: headers()
data: JSON.stringify(data)
promise = $http(params)
promise.success (_data, _status) ->
defered.resolve(service.make_model(name, _data, cls, dataTypes))
promise.error (data, status) ->
defered.reject(data)
return defered.promise
service.cls = Model
service.casts =
int: (value) ->
return parseInt(value, 10)
float: (value) ->
return parseFloat(value, 10)
return service
module = angular.module('results.services.model', [])
module.factory('$model', ['$q', '$http', '$gmUrls', '$gmStorage', ModelProvider])
| 224312 | # Copyright 2013 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ModelProvider = ($q, $http, $gmUrls, $gmStorage) ->
headers = ->
token = $gmStorage.get('token')
if token
return {"Authorization":"Token #{token}"}
return {}
class Model
constructor: (name, data, dataTypes) ->
@._attrs = data
@._name = name
@._dataTypes = dataTypes
@.setAttrs(data)
@.initialize()
applyCasts: ->
for attrName, castName of @._dataTypes
castMethod = service.casts[castName]
if not castMethod
continue
@._attrs[attrName] = castMethod(@._attrs[attrName])
getIdAttrName: ->
return "id"
getUrl: ->
return "#{$gmUrls.api(@_name)}/#{@.getAttrs()[@.getIdAttrName()]}"
getAttrs: (patch=false) ->
if patch
return _.extend({}, @._modifiedAttrs)
return _.extend({}, @._attrs, @._modifiedAttrs)
setAttrs: (attrs) ->
@._attrs = attrs
@._modifiedAttrs = {}
@.applyCasts()
@._isModified = false
setAttr: (name, value) ->
@._modifiedAttrs[name] = value
@._isModified = true
initialize: () ->
self = @
getter = (name) ->
return ->
if name.substr(0,2) == "__"
return self[name]
if name not in _.keys(self._modifiedAttrs)
return self._attrs[name]
return self._modifiedAttrs[name]
setter = (name) ->
return (value) ->
if name.substr(0,2) == "__"
self[name] = value
return
if self._attrs[name] != value
self._modifiedAttrs[name] = value
self._isModified = true
else
delete self._modifiedAttrs[name]
return
_.each @_attrs, (value, name) ->
options =
get: getter(name)
set: setter(name)
enumerable: true
configurable: true
Object.defineProperty(self, name, options)
serialize: () ->
data =
"data": _.clone(@_attrs)
"name": @_name
return JSON.stringify(data)
isModified: () ->
return this._isModified
markSaved: () ->
@._isModified = false
@._attrs = @.getAttrs()
@._modifiedAttrs = {}
revert: () ->
@_modifiedAttrs = {}
@_isModified = false
remove: () ->
defered = $q.defer()
self = @
params =
method: "DELETE"
url: @getUrl()
headers: headers()
promise = $http(params)
promise.success (data, status) ->
defered.resolve(self)
promise.error (data, status) ->
defered.reject(self)
return defered.promise
save: (patch=false, extraParams) ->
self = @
defered = $q.defer()
if not @isModified() and patch
defered.resolve(self)
return defered.promise
params =
url: @getUrl()
headers: headers(),
if patch
params.method = "PATCH"
else
params.method = "PUT"
params.data = JSON.stringify(@.getAttrs(patch))
params = _.extend({}, params, extraParams)
promise = $http(params)
promise.success (data, status) ->
self._isModified = false
self._attrs = _.extend(self.getAttrs(), data)
self._modifiedAttrs = {}
self.applyCasts()
defered.resolve(self)
promise.error (data, status) ->
defered.reject(data)
return defered.promise
refresh: () ->
defered = $q.defer()
self = @
params =
method: "GET",
url: @getUrl()
headers: headers()
promise = $http(params)
promise.success (data, status) ->
self._modifiedAttrs = {}
self._attrs = data
self._isModified = false
self.applyCasts()
defered.resolve(self)
promise.error (data, status) ->
defered.reject([data, status])
return defered.promise
@desSerialize = (sdata) ->
ddata = JSON.parse(sdata)
model = new Model(ddata.url, ddata.data)
return model
service = {}
service.make_model = (name, data, cls=Model, dataTypes={}) ->
return new cls(name, data, dataTypes)
service.create = (name, data, cls=Model, dataTypes={}) ->
defered = $q.defer()
params =
method: "POST"
url: $gmUrls.api(name)
headers: headers()
data: JSON.stringify(data)
promise = $http(params)
promise.success (_data, _status) ->
defered.resolve(service.make_model(name, _data, cls, dataTypes))
promise.error (data, status) ->
defered.reject(data)
return defered.promise
service.cls = Model
service.casts =
int: (value) ->
return parseInt(value, 10)
float: (value) ->
return parseFloat(value, 10)
return service
module = angular.module('results.services.model', [])
module.factory('$model', ['$q', '$http', '$gmUrls', '$gmStorage', ModelProvider])
| true | # Copyright 2013 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ModelProvider = ($q, $http, $gmUrls, $gmStorage) ->
headers = ->
token = $gmStorage.get('token')
if token
return {"Authorization":"Token #{token}"}
return {}
class Model
constructor: (name, data, dataTypes) ->
@._attrs = data
@._name = name
@._dataTypes = dataTypes
@.setAttrs(data)
@.initialize()
applyCasts: ->
for attrName, castName of @._dataTypes
castMethod = service.casts[castName]
if not castMethod
continue
@._attrs[attrName] = castMethod(@._attrs[attrName])
getIdAttrName: ->
return "id"
getUrl: ->
return "#{$gmUrls.api(@_name)}/#{@.getAttrs()[@.getIdAttrName()]}"
getAttrs: (patch=false) ->
if patch
return _.extend({}, @._modifiedAttrs)
return _.extend({}, @._attrs, @._modifiedAttrs)
setAttrs: (attrs) ->
@._attrs = attrs
@._modifiedAttrs = {}
@.applyCasts()
@._isModified = false
setAttr: (name, value) ->
@._modifiedAttrs[name] = value
@._isModified = true
initialize: () ->
self = @
getter = (name) ->
return ->
if name.substr(0,2) == "__"
return self[name]
if name not in _.keys(self._modifiedAttrs)
return self._attrs[name]
return self._modifiedAttrs[name]
setter = (name) ->
return (value) ->
if name.substr(0,2) == "__"
self[name] = value
return
if self._attrs[name] != value
self._modifiedAttrs[name] = value
self._isModified = true
else
delete self._modifiedAttrs[name]
return
_.each @_attrs, (value, name) ->
options =
get: getter(name)
set: setter(name)
enumerable: true
configurable: true
Object.defineProperty(self, name, options)
serialize: () ->
data =
"data": _.clone(@_attrs)
"name": @_name
return JSON.stringify(data)
isModified: () ->
return this._isModified
markSaved: () ->
@._isModified = false
@._attrs = @.getAttrs()
@._modifiedAttrs = {}
revert: () ->
@_modifiedAttrs = {}
@_isModified = false
remove: () ->
defered = $q.defer()
self = @
params =
method: "DELETE"
url: @getUrl()
headers: headers()
promise = $http(params)
promise.success (data, status) ->
defered.resolve(self)
promise.error (data, status) ->
defered.reject(self)
return defered.promise
save: (patch=false, extraParams) ->
self = @
defered = $q.defer()
if not @isModified() and patch
defered.resolve(self)
return defered.promise
params =
url: @getUrl()
headers: headers(),
if patch
params.method = "PATCH"
else
params.method = "PUT"
params.data = JSON.stringify(@.getAttrs(patch))
params = _.extend({}, params, extraParams)
promise = $http(params)
promise.success (data, status) ->
self._isModified = false
self._attrs = _.extend(self.getAttrs(), data)
self._modifiedAttrs = {}
self.applyCasts()
defered.resolve(self)
promise.error (data, status) ->
defered.reject(data)
return defered.promise
refresh: () ->
defered = $q.defer()
self = @
params =
method: "GET",
url: @getUrl()
headers: headers()
promise = $http(params)
promise.success (data, status) ->
self._modifiedAttrs = {}
self._attrs = data
self._isModified = false
self.applyCasts()
defered.resolve(self)
promise.error (data, status) ->
defered.reject([data, status])
return defered.promise
@desSerialize = (sdata) ->
ddata = JSON.parse(sdata)
model = new Model(ddata.url, ddata.data)
return model
service = {}
service.make_model = (name, data, cls=Model, dataTypes={}) ->
return new cls(name, data, dataTypes)
service.create = (name, data, cls=Model, dataTypes={}) ->
defered = $q.defer()
params =
method: "POST"
url: $gmUrls.api(name)
headers: headers()
data: JSON.stringify(data)
promise = $http(params)
promise.success (_data, _status) ->
defered.resolve(service.make_model(name, _data, cls, dataTypes))
promise.error (data, status) ->
defered.reject(data)
return defered.promise
service.cls = Model
service.casts =
int: (value) ->
return parseInt(value, 10)
float: (value) ->
return parseFloat(value, 10)
return service
module = angular.module('results.services.model', [])
module.factory('$model', ['$q', '$http', '$gmUrls', '$gmStorage', ModelProvider])
|
[
{
"context": "e Function class extensions\n#\n# Copyright (C) 2011 Nikolay Nemshilov\n#\next Function.prototype,\n\n #\n # Binds this fun",
"end": 74,
"score": 0.9998807311058044,
"start": 57,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | stl/lang/src/function.coffee | lovely-io/lovely.io-stl | 2 | #
# The Function class extensions
#
# Copyright (C) 2011 Nikolay Nemshilov
#
ext Function.prototype,
#
# Binds this function to be executed in given context
#
# @param {Object} context
# @param {mixed} optional argument to curry
# ....
# @return {Function} the proxy function
#
bind: Function.prototype.bind || ->
args = A(arguments)
context = args.shift()
method = @
-> method.apply(context, args.concat(A(arguments)))
#
# Makes a left-curry proxy function
#
# @param {mixed} value to curry
# ...
# @return {Function} the proxy function
#
curry: ->
@bind.apply @, [@].concat(A(arguments))
#
# Makes a right-curry proxy function
#
# @param {mixed} value to curry
# ...
# @return {Function} the proxy function
#
rcurry: ->
curry = A(arguments)
method = @
-> method.apply(method, A(arguments).concat(curry))
#
# Makes a delayed call of the function
#
# @param {Number} delay in ms
# @param {mixed} optional argument to curry
# ...
# @return {Number} timer marker
#
delay: ->
args = A(arguments)
ms = args.shift()
ext(
new Number(setTimeout(@bind.apply(this, [this].concat(args)), ms)),
cancel: -> clearTimeout(@))
#
# Makes the function to be periodically called with given interval
#
# @param {Number} calls interval in ms
# @param {mixed} optional argument to curry
# ...
# @return {Number} timer marker
#
#
periodical: (ms) ->
args = A(arguments)
ms = args.shift()
ext(
new Number(setInterval(@bind.apply(this, [this].concat(args)), ms)),
stop: -> clearInterval(@))
| 130448 | #
# The Function class extensions
#
# Copyright (C) 2011 <NAME>
#
ext Function.prototype,
#
# Binds this function to be executed in given context
#
# @param {Object} context
# @param {mixed} optional argument to curry
# ....
# @return {Function} the proxy function
#
bind: Function.prototype.bind || ->
args = A(arguments)
context = args.shift()
method = @
-> method.apply(context, args.concat(A(arguments)))
#
# Makes a left-curry proxy function
#
# @param {mixed} value to curry
# ...
# @return {Function} the proxy function
#
curry: ->
@bind.apply @, [@].concat(A(arguments))
#
# Makes a right-curry proxy function
#
# @param {mixed} value to curry
# ...
# @return {Function} the proxy function
#
rcurry: ->
curry = A(arguments)
method = @
-> method.apply(method, A(arguments).concat(curry))
#
# Makes a delayed call of the function
#
# @param {Number} delay in ms
# @param {mixed} optional argument to curry
# ...
# @return {Number} timer marker
#
delay: ->
args = A(arguments)
ms = args.shift()
ext(
new Number(setTimeout(@bind.apply(this, [this].concat(args)), ms)),
cancel: -> clearTimeout(@))
#
# Makes the function to be periodically called with given interval
#
# @param {Number} calls interval in ms
# @param {mixed} optional argument to curry
# ...
# @return {Number} timer marker
#
#
periodical: (ms) ->
args = A(arguments)
ms = args.shift()
ext(
new Number(setInterval(@bind.apply(this, [this].concat(args)), ms)),
stop: -> clearInterval(@))
| true | #
# The Function class extensions
#
# Copyright (C) 2011 PI:NAME:<NAME>END_PI
#
ext Function.prototype,
#
# Binds this function to be executed in given context
#
# @param {Object} context
# @param {mixed} optional argument to curry
# ....
# @return {Function} the proxy function
#
bind: Function.prototype.bind || ->
args = A(arguments)
context = args.shift()
method = @
-> method.apply(context, args.concat(A(arguments)))
#
# Makes a left-curry proxy function
#
# @param {mixed} value to curry
# ...
# @return {Function} the proxy function
#
curry: ->
@bind.apply @, [@].concat(A(arguments))
#
# Makes a right-curry proxy function
#
# @param {mixed} value to curry
# ...
# @return {Function} the proxy function
#
rcurry: ->
curry = A(arguments)
method = @
-> method.apply(method, A(arguments).concat(curry))
#
# Makes a delayed call of the function
#
# @param {Number} delay in ms
# @param {mixed} optional argument to curry
# ...
# @return {Number} timer marker
#
delay: ->
args = A(arguments)
ms = args.shift()
ext(
new Number(setTimeout(@bind.apply(this, [this].concat(args)), ms)),
cancel: -> clearTimeout(@))
#
# Makes the function to be periodically called with given interval
#
# @param {Number} calls interval in ms
# @param {mixed} optional argument to curry
# ...
# @return {Number} timer marker
#
#
periodical: (ms) ->
args = A(arguments)
ms = args.shift()
ext(
new Number(setInterval(@bind.apply(this, [this].concat(args)), ms)),
stop: -> clearInterval(@))
|
[
{
"context": "# Copyright (c) Konode. All rights reserved.\n# This source code is subje",
"end": 22,
"score": 0.9946807622909546,
"start": 16,
"tag": "NAME",
"value": "Konode"
}
] | src/revisionHistory.coffee | LogicalOutcomes/KoNote | 1 | # Copyright (c) Konode. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Lists entries of revisions from provided rev-history, with add/removed diffing
Imm = require 'immutable'
DiffMatchPatch = require 'diff-match-patch'
{diffSentences, diffTrimmedLines, diffWordsWithSpace} = require 'diff'
Term = require './term'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
MetricWidget = require('./metricWidget').load(win)
ColorKeyBubble = require('./colorKeyBubble').load(win)
{FaIcon, renderLineBreaks, showWhen,
stripMetadata, formatTimestamp, capitalize} = require('./utils').load(win)
RevisionHistory = React.createFactory React.createClass
displayName: 'RevisionHistory'
mixins: [React.addons.PureRenderMixin]
getDefaultProps: -> {
terms: {}
metricsById: Imm.List()
disableSnapshot: false
}
propTypes: -> {
dataModelName: React.PropTypes.string().isRequired()
revisions: React.PropTypes.instanceOf(Imm.List).isRequired()
programsById: React.PropTypes.instanceOf(Imm.Map).isRequired()
}
_diffStrings: (oldString = "", newString = "") ->
dmp = new DiffMatchPatch()
# first pass; can lower timeout if too slow
# dmp.Diff_Timeout = 0.5
diffs = dmp.diff_main(oldString, newString)
# measure of similarity
lev = dmp.diff_levenshtein(diffs)
# second pass
if lev > 20
# compare the diff by sentences and diff by lines
# use the output that produces the cleanest output (shortest sum of removals)
diffsSentences = diffSentences(oldString, newString)
# TODO: newlineIsToken would remove all line-breaks for renderLineBreaks, but might be needed
# diffsLines = diffTrimmedLines(oldString, newString, {newlineIsToken:true})
diffsLines = diffTrimmedLines(oldString, newString)
diffsSentencesTotal = 0
diffsLinesTotal = 0
for diff in diffsSentences
if diff.removed?
diffsSentencesTotal += diff.value.length
for diff in diffsLines
if diff.removed?
diffsLinesTotal += diff.value.length
if diffsLinesTotal < diffsSentencesTotal
diffs = diffsLines
else
diffs = diffsSentences
else
diffs = diffWordsWithSpace(oldString, newString)
return R.span({className: 'value'},
# Iterate over diffs and assign a surrounding span, or just the plain string
diffs.map (diff, key) ->
lines = diff.value
.replace(/\r\n/g, '\n') # Windows -> Unix
.replace(/\r/g, '\n') # old Mac -> Unix
.split('\n')
value = []
for line, lineIndex in lines
if lineIndex > 0
value.push R.br({key: lineIndex})
if line.trim()
value.push line
if diff.added?
R.span({className: 'added', key}, value)
else if diff.removed?
R.span({className: 'removed', key}, value)
else
value
)
_generateChangeLogEntries: (revision, index) ->
changeLog = Imm.List()
# Convenience method for adding Imm changeLog entries
# Replace property name with term from @props when valid
pushToChangeLog = (entry) =>
if @props.terms[entry.property]?
entry.property = @props.terms[entry.property]
changeLog = changeLog.push Imm.fromJS(entry)
# Process the changes of an object or Imm.List from its predecessor
processChanges = ({parent, property, value, previousValue}) =>
# Handle regular values
if typeof value in ['string', 'number'] and value isnt previousValue
pushToChangeLog {
parent
property
action: 'revised'
value: @_diffStrings(previousValue, value)
}
# Is it an Imm list? (metrics)
else if property in ['metric', 'metrics']
# Generate 'revised' (metric value) changes
value.forEach (arrayItem) =>
itemId = arrayItem.get('id')
itemValue = arrayItem.get('value')
previousItem = previousValue.find (item) -> item.get('id') is itemId
previousItemValue = previousItem.get('value')
if previousItem? and itemValue isnt previousItemValue
pushToChangeLog {
parent
property: "#{Term 'metric'} value"
action: 'revised'
item: arrayItem
value: @_diffStrings(previousItemValue, itemValue)
}
# Return only creation change if 0-index, check existence to make sure
unless index > 0 and @props.revisions.reverse().get(index - 1)?
pushToChangeLog {
property: @props.dataModelName
action: 'created'
}
return changeLog
# Not a first revision, so let's build the diff objects for each property/value
# compared to the previous revision (ignoring metadata properties)
previousRevision = stripMetadata @props.revisions.reverse().get(index - 1)
currentRevision = stripMetadata revision
currentRevision.entrySeq().forEach ([property, value]) =>
# Ignore statusReason property, it can't be revised
return if property is 'statusReason'
# Account for previousRevision not having this property
previousRevisionValue = previousRevision.get(property) or ""
# Plain string & number comparison
if typeof value in ['string', 'number'] and value isnt previousRevisionValue
# Unique handling for 'status'
if property is 'status' and currentRevision.has('statusReason')
# Special case to convert status 'default' -> 'reactivated'
if value is 'default' then value = 'reactivated'
pushToChangeLog {
property
action: value
value
reason: currentRevision.get('statusReason')
}
else
pushToChangeLog {
property
action: 'revised'
value: @_diffStrings(previousRevisionValue, value)
}
# Imm List comparison (we assume existence of isList validates Imm.List)
else if not Imm.is value, previousRevisionValue
switch @props.type
when 'planTarget'
# Generate 'removed' changes
previousRevisionValue.forEach (item) ->
unless value.contains(item)
pushToChangeLog {
property
action: 'removed'
item
}
# Generate 'added' changes
value.forEach (item) ->
unless previousRevisionValue.contains(item)
pushToChangeLog {
property
action: 'added'
item
}
when 'progNote'
value.forEach (unit, unitIndex) =>
switch unit.get('type')
when 'basic'
unit.entrySeq().forEach ([property, value]) =>
previousValue = previousRevisionValue.getIn [unitIndex, property]
parent = unit.get('name')
processChanges {parent, property, value, previousValue}
when 'plan'
unit.get('sections').forEach (section, sectionIndex) =>
section.get('targets').forEach (target, targetIndex) =>
target.entrySeq().forEach ([property, value]) =>
# Grab the same target value from prev revision
previousValue = previousRevisionValue.getIn [
unitIndex
'sections', sectionIndex
'targets', targetIndex, property
]
parent = target.get('name')
processChanges {parent, property, value, previousValue}
else
throw new Error "Unknown unit type: #{unit.get('type')}"
else
throw new Error "Unknown RevisionHistory 'type': #{@props.type}"
# Fin.
return changeLog
_buildInChangeLog: (revision, index) ->
changeLog = @_generateChangeLogEntries revision, index
return revision.set 'changeLog', changeLog
render: ->
# Process revision history to devise change logs
# They're already in reverse-order, so reverse() to map changes
revisions = @props.revisions
.reverse()
.map(@_buildInChangeLog)
.reverse()
# Either use the revision's name (ex: target name), or the dataModel name
firstRevision = revisions.first()
dataName = if firstRevision? and firstRevision.get('name')
firstRevision.get('name')
else
capitalize(Term @props.dataModelName)
return R.div({className: 'revisionHistory'},
R.div({className: 'heading'},
(unless revisions.isEmpty()
R.section({className: 'dataName'},
revisions.first().get('name') or capitalize(Term @props.dataModelName)
)
)
R.section({className: 'title'}, "Revision History")
)
(if revisions.isEmpty()
R.div({className: 'noRevisions'},
"This #{@props.dataModelName} is new. ",
"It won't have any history until the #{Term 'client file'} is saved."
)
else
R.div({className: 'revisions'},
revisions.map (revision, index) => RevisionChangeLog({
key: revision.get('revisionId')
isFirstRevision: index is (revisions.size - 1)
revision
type: @props.type
metricsById: @props.metricsById
programsById: @props.programsById
dataModelName: @props.dataModelName
disableSnapshot: @props.disableSnapshot
})
)
)
)
RevisionChangeLog = React.createFactory React.createClass
displayName: 'RevisionChangeLog'
mixins: [React.addons.PureRenderMixin]
# TODO: propTypes
getInitialState: -> {
isSnapshotVisible: null
}
_toggleSnapshot: ->
@setState {isSnapshotVisible: not @state.isSnapshotVisible}
render: ->
revision = @props.revision
changeLog = revision.get('changeLog')
userProgramId = revision.get('authorProgramId')
userProgram = @props.programsById.get(userProgramId)
# Special cases made for planTarget types
isPlanTarget = @props.type is 'planTarget'
isRevision = changeLog.first()? and changeLog.first().get('action') is 'revised'
isTargetStatusChange = isPlanTarget and not isRevision
isRenameEntry = changeLog.first()? and changeLog.first().get('property') is 'name'
return R.section({className: 'revision'},
R.div({className: 'header'},
R.div({className: 'author'},
FaIcon('user')
revision.get('authorDisplayName') or revision.get('author')
)
R.div({className: 'timestamp'},
if @props.isFirstRevision and revision.get('backdate')
"#{formatTimestamp revision.get('backdate')} (late entry)"
else
formatTimestamp revision.get('timestamp')
(if userProgram
ColorKeyBubble({
colorKeyHex: userProgram.get('colorKeyHex')
popup: {
title: userProgram.get('name')
content: userProgram.get('description')
placement: 'left'
}
})
)
)
)
R.div({className: 'changeLog'},
(changeLog.map (entry, index) =>
ChangeLogEntry({
key: index
index
entry
revision
isPlanTarget
type: @props.type
dataModelName: @props.dataModelName
metricsById: @props.metricsById
onToggleSnapshot: @_toggleSnapshot
isSnapshotVisible: @state.isSnapshotVisible
disableSnapshot: @props.disableSnapshot
})
)
(if isPlanTarget and not isTargetStatusChange
RevisionSnapshot({
revision
metricsById: @props.metricsById
isRenameEntry
})
)
)
)
ChangeLogEntry = React.createFactory React.createClass
displayName: 'ChangeLogEntry'
mixins: [React.addons.PureRenderMixin]
render: ->
entry = @props.entry
isCreationEntry = entry.get('action') is 'created'
isRenameEntry = entry.get('property') is 'name'
# Account for terminology metricIds -> metrics
if entry.get('property') is 'metricIds'
entry = entry.set('property', 'metric')
R.article({className: 'entry', key: @props.index},
# # TODO: Restore as Diffing selector
# (if not @props.disableSnapshot and @props.index is 0 and entry.get('action') isnt 'created'
# R.button({
# className: 'btn btn-default btn-xs snapshotButton'
# onClick: @props.onToggleSnapshot
# },
# if not @props.isSnapshotVisible then "view" else "hide"
# " full revision"
# )
# )
R.span({className: 'action'},
# Different display cases for indication of change
(if entry.get('action') is 'created'
"#{capitalize entry.get('action')} #{entry.get('property')}
#{if @props.isPlanTarget then ' as:' else ''}"
else if entry.has('reason') # Status change
"#{capitalize entry.get('value')} #{@props.dataModelName}"
else if entry.has('parent') and not entry.get('parent').has? # Parent isn't an Imm Map obj
"#{capitalize entry.get('action')} #{entry.get('property')} for #{entry.get('parent')}"
else
"#{capitalize entry.get('action')} #{entry.get('property')}"
)
": " if not @props.isPlanTarget and not isCreationEntry
)
(if isCreationEntry and not @props.disableSnapshot
# We can show full snapshot for dataModel creation
RevisionSnapshot({
revision: @props.revision
dataModelName: @props.dataModelName
metricsById: @props.metricsById
#this is here to show the target name in the first history entry
isRenameEntry: true
})
# Unique handling for metrics
else if entry.get('property') in [Term('metric'), "#{Term 'metric'} value"]
if typeof entry.get('item') is 'string'
# This changeLog entry is a single ID string, so fetch latest metric
metricId = entry.get('item')
metric = @props.metricsById.get metricId
else
# Assume item is the metric object itself
metric = entry.get('item')
MetricWidget({
value: entry.get('value') # Use diffed value if exists
isEditable: false
name: metric.get('name')
definition: metric.get('definition')
tooltipViewport: 'article'
styleClass: 'clear' unless entry.get('value')
})
else if entry.get('property') is 'value'
metric = entry.get('item')
else if @props.isPlanTarget and entry.get('reason')
": \"#{entry.get('reason')}\""
else if not @props.isPlanTarget
if entry.get('reason') then "\"#{entry.get('reason')}\"" else entry.get('value')
)
)
RevisionSnapshot = ({revision, metricsById, isRenameEntry}) ->
hasMetrics = revision.get('metricIds')?
R.div({className: 'snapshot'},
if isRenameEntry
R.div({className: 'name'},
revision.get('name')
)
R.div({className: 'description'},
renderLineBreaks revision.get('description')
)
(if hasMetrics
R.div({className: 'metrics'},
(revision.get('metricIds').map (metricId) =>
metric = metricsById.get(metricId)
MetricWidget({
isEditable: false
key: metricId
name: metric.get('name')
definition: metric.get('definition')
tooltipViewport: '.snapshot'
})
)
)
)
)
return RevisionHistory
module.exports = {load}
| 6069 | # Copyright (c) <NAME>. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Lists entries of revisions from provided rev-history, with add/removed diffing
Imm = require 'immutable'
DiffMatchPatch = require 'diff-match-patch'
{diffSentences, diffTrimmedLines, diffWordsWithSpace} = require 'diff'
Term = require './term'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
MetricWidget = require('./metricWidget').load(win)
ColorKeyBubble = require('./colorKeyBubble').load(win)
{FaIcon, renderLineBreaks, showWhen,
stripMetadata, formatTimestamp, capitalize} = require('./utils').load(win)
RevisionHistory = React.createFactory React.createClass
displayName: 'RevisionHistory'
mixins: [React.addons.PureRenderMixin]
getDefaultProps: -> {
terms: {}
metricsById: Imm.List()
disableSnapshot: false
}
propTypes: -> {
dataModelName: React.PropTypes.string().isRequired()
revisions: React.PropTypes.instanceOf(Imm.List).isRequired()
programsById: React.PropTypes.instanceOf(Imm.Map).isRequired()
}
_diffStrings: (oldString = "", newString = "") ->
dmp = new DiffMatchPatch()
# first pass; can lower timeout if too slow
# dmp.Diff_Timeout = 0.5
diffs = dmp.diff_main(oldString, newString)
# measure of similarity
lev = dmp.diff_levenshtein(diffs)
# second pass
if lev > 20
# compare the diff by sentences and diff by lines
# use the output that produces the cleanest output (shortest sum of removals)
diffsSentences = diffSentences(oldString, newString)
# TODO: newlineIsToken would remove all line-breaks for renderLineBreaks, but might be needed
# diffsLines = diffTrimmedLines(oldString, newString, {newlineIsToken:true})
diffsLines = diffTrimmedLines(oldString, newString)
diffsSentencesTotal = 0
diffsLinesTotal = 0
for diff in diffsSentences
if diff.removed?
diffsSentencesTotal += diff.value.length
for diff in diffsLines
if diff.removed?
diffsLinesTotal += diff.value.length
if diffsLinesTotal < diffsSentencesTotal
diffs = diffsLines
else
diffs = diffsSentences
else
diffs = diffWordsWithSpace(oldString, newString)
return R.span({className: 'value'},
# Iterate over diffs and assign a surrounding span, or just the plain string
diffs.map (diff, key) ->
lines = diff.value
.replace(/\r\n/g, '\n') # Windows -> Unix
.replace(/\r/g, '\n') # old Mac -> Unix
.split('\n')
value = []
for line, lineIndex in lines
if lineIndex > 0
value.push R.br({key: lineIndex})
if line.trim()
value.push line
if diff.added?
R.span({className: 'added', key}, value)
else if diff.removed?
R.span({className: 'removed', key}, value)
else
value
)
_generateChangeLogEntries: (revision, index) ->
changeLog = Imm.List()
# Convenience method for adding Imm changeLog entries
# Replace property name with term from @props when valid
pushToChangeLog = (entry) =>
if @props.terms[entry.property]?
entry.property = @props.terms[entry.property]
changeLog = changeLog.push Imm.fromJS(entry)
# Process the changes of an object or Imm.List from its predecessor
processChanges = ({parent, property, value, previousValue}) =>
# Handle regular values
if typeof value in ['string', 'number'] and value isnt previousValue
pushToChangeLog {
parent
property
action: 'revised'
value: @_diffStrings(previousValue, value)
}
# Is it an Imm list? (metrics)
else if property in ['metric', 'metrics']
# Generate 'revised' (metric value) changes
value.forEach (arrayItem) =>
itemId = arrayItem.get('id')
itemValue = arrayItem.get('value')
previousItem = previousValue.find (item) -> item.get('id') is itemId
previousItemValue = previousItem.get('value')
if previousItem? and itemValue isnt previousItemValue
pushToChangeLog {
parent
property: "#{Term 'metric'} value"
action: 'revised'
item: arrayItem
value: @_diffStrings(previousItemValue, itemValue)
}
# Return only creation change if 0-index, check existence to make sure
unless index > 0 and @props.revisions.reverse().get(index - 1)?
pushToChangeLog {
property: @props.dataModelName
action: 'created'
}
return changeLog
# Not a first revision, so let's build the diff objects for each property/value
# compared to the previous revision (ignoring metadata properties)
previousRevision = stripMetadata @props.revisions.reverse().get(index - 1)
currentRevision = stripMetadata revision
currentRevision.entrySeq().forEach ([property, value]) =>
# Ignore statusReason property, it can't be revised
return if property is 'statusReason'
# Account for previousRevision not having this property
previousRevisionValue = previousRevision.get(property) or ""
# Plain string & number comparison
if typeof value in ['string', 'number'] and value isnt previousRevisionValue
# Unique handling for 'status'
if property is 'status' and currentRevision.has('statusReason')
# Special case to convert status 'default' -> 'reactivated'
if value is 'default' then value = 'reactivated'
pushToChangeLog {
property
action: value
value
reason: currentRevision.get('statusReason')
}
else
pushToChangeLog {
property
action: 'revised'
value: @_diffStrings(previousRevisionValue, value)
}
# Imm List comparison (we assume existence of isList validates Imm.List)
else if not Imm.is value, previousRevisionValue
switch @props.type
when 'planTarget'
# Generate 'removed' changes
previousRevisionValue.forEach (item) ->
unless value.contains(item)
pushToChangeLog {
property
action: 'removed'
item
}
# Generate 'added' changes
value.forEach (item) ->
unless previousRevisionValue.contains(item)
pushToChangeLog {
property
action: 'added'
item
}
when 'progNote'
value.forEach (unit, unitIndex) =>
switch unit.get('type')
when 'basic'
unit.entrySeq().forEach ([property, value]) =>
previousValue = previousRevisionValue.getIn [unitIndex, property]
parent = unit.get('name')
processChanges {parent, property, value, previousValue}
when 'plan'
unit.get('sections').forEach (section, sectionIndex) =>
section.get('targets').forEach (target, targetIndex) =>
target.entrySeq().forEach ([property, value]) =>
# Grab the same target value from prev revision
previousValue = previousRevisionValue.getIn [
unitIndex
'sections', sectionIndex
'targets', targetIndex, property
]
parent = target.get('name')
processChanges {parent, property, value, previousValue}
else
throw new Error "Unknown unit type: #{unit.get('type')}"
else
throw new Error "Unknown RevisionHistory 'type': #{@props.type}"
# Fin.
return changeLog
_buildInChangeLog: (revision, index) ->
changeLog = @_generateChangeLogEntries revision, index
return revision.set 'changeLog', changeLog
render: ->
# Process revision history to devise change logs
# They're already in reverse-order, so reverse() to map changes
revisions = @props.revisions
.reverse()
.map(@_buildInChangeLog)
.reverse()
# Either use the revision's name (ex: target name), or the dataModel name
firstRevision = revisions.first()
dataName = if firstRevision? and firstRevision.get('name')
firstRevision.get('name')
else
capitalize(Term @props.dataModelName)
return R.div({className: 'revisionHistory'},
R.div({className: 'heading'},
(unless revisions.isEmpty()
R.section({className: 'dataName'},
revisions.first().get('name') or capitalize(Term @props.dataModelName)
)
)
R.section({className: 'title'}, "Revision History")
)
(if revisions.isEmpty()
R.div({className: 'noRevisions'},
"This #{@props.dataModelName} is new. ",
"It won't have any history until the #{Term 'client file'} is saved."
)
else
R.div({className: 'revisions'},
revisions.map (revision, index) => RevisionChangeLog({
key: revision.get('revisionId')
isFirstRevision: index is (revisions.size - 1)
revision
type: @props.type
metricsById: @props.metricsById
programsById: @props.programsById
dataModelName: @props.dataModelName
disableSnapshot: @props.disableSnapshot
})
)
)
)
RevisionChangeLog = React.createFactory React.createClass
displayName: 'RevisionChangeLog'
mixins: [React.addons.PureRenderMixin]
# TODO: propTypes
getInitialState: -> {
isSnapshotVisible: null
}
_toggleSnapshot: ->
@setState {isSnapshotVisible: not @state.isSnapshotVisible}
render: ->
revision = @props.revision
changeLog = revision.get('changeLog')
userProgramId = revision.get('authorProgramId')
userProgram = @props.programsById.get(userProgramId)
# Special cases made for planTarget types
isPlanTarget = @props.type is 'planTarget'
isRevision = changeLog.first()? and changeLog.first().get('action') is 'revised'
isTargetStatusChange = isPlanTarget and not isRevision
isRenameEntry = changeLog.first()? and changeLog.first().get('property') is 'name'
return R.section({className: 'revision'},
R.div({className: 'header'},
R.div({className: 'author'},
FaIcon('user')
revision.get('authorDisplayName') or revision.get('author')
)
R.div({className: 'timestamp'},
if @props.isFirstRevision and revision.get('backdate')
"#{formatTimestamp revision.get('backdate')} (late entry)"
else
formatTimestamp revision.get('timestamp')
(if userProgram
ColorKeyBubble({
colorKeyHex: userProgram.get('colorKeyHex')
popup: {
title: userProgram.get('name')
content: userProgram.get('description')
placement: 'left'
}
})
)
)
)
R.div({className: 'changeLog'},
(changeLog.map (entry, index) =>
ChangeLogEntry({
key: index
index
entry
revision
isPlanTarget
type: @props.type
dataModelName: @props.dataModelName
metricsById: @props.metricsById
onToggleSnapshot: @_toggleSnapshot
isSnapshotVisible: @state.isSnapshotVisible
disableSnapshot: @props.disableSnapshot
})
)
(if isPlanTarget and not isTargetStatusChange
RevisionSnapshot({
revision
metricsById: @props.metricsById
isRenameEntry
})
)
)
)
ChangeLogEntry = React.createFactory React.createClass
displayName: 'ChangeLogEntry'
mixins: [React.addons.PureRenderMixin]
render: ->
entry = @props.entry
isCreationEntry = entry.get('action') is 'created'
isRenameEntry = entry.get('property') is 'name'
# Account for terminology metricIds -> metrics
if entry.get('property') is 'metricIds'
entry = entry.set('property', 'metric')
R.article({className: 'entry', key: @props.index},
# # TODO: Restore as Diffing selector
# (if not @props.disableSnapshot and @props.index is 0 and entry.get('action') isnt 'created'
# R.button({
# className: 'btn btn-default btn-xs snapshotButton'
# onClick: @props.onToggleSnapshot
# },
# if not @props.isSnapshotVisible then "view" else "hide"
# " full revision"
# )
# )
R.span({className: 'action'},
# Different display cases for indication of change
(if entry.get('action') is 'created'
"#{capitalize entry.get('action')} #{entry.get('property')}
#{if @props.isPlanTarget then ' as:' else ''}"
else if entry.has('reason') # Status change
"#{capitalize entry.get('value')} #{@props.dataModelName}"
else if entry.has('parent') and not entry.get('parent').has? # Parent isn't an Imm Map obj
"#{capitalize entry.get('action')} #{entry.get('property')} for #{entry.get('parent')}"
else
"#{capitalize entry.get('action')} #{entry.get('property')}"
)
": " if not @props.isPlanTarget and not isCreationEntry
)
(if isCreationEntry and not @props.disableSnapshot
# We can show full snapshot for dataModel creation
RevisionSnapshot({
revision: @props.revision
dataModelName: @props.dataModelName
metricsById: @props.metricsById
#this is here to show the target name in the first history entry
isRenameEntry: true
})
# Unique handling for metrics
else if entry.get('property') in [Term('metric'), "#{Term 'metric'} value"]
if typeof entry.get('item') is 'string'
# This changeLog entry is a single ID string, so fetch latest metric
metricId = entry.get('item')
metric = @props.metricsById.get metricId
else
# Assume item is the metric object itself
metric = entry.get('item')
MetricWidget({
value: entry.get('value') # Use diffed value if exists
isEditable: false
name: metric.get('name')
definition: metric.get('definition')
tooltipViewport: 'article'
styleClass: 'clear' unless entry.get('value')
})
else if entry.get('property') is 'value'
metric = entry.get('item')
else if @props.isPlanTarget and entry.get('reason')
": \"#{entry.get('reason')}\""
else if not @props.isPlanTarget
if entry.get('reason') then "\"#{entry.get('reason')}\"" else entry.get('value')
)
)
RevisionSnapshot = ({revision, metricsById, isRenameEntry}) ->
hasMetrics = revision.get('metricIds')?
R.div({className: 'snapshot'},
if isRenameEntry
R.div({className: 'name'},
revision.get('name')
)
R.div({className: 'description'},
renderLineBreaks revision.get('description')
)
(if hasMetrics
R.div({className: 'metrics'},
(revision.get('metricIds').map (metricId) =>
metric = metricsById.get(metricId)
MetricWidget({
isEditable: false
key: metricId
name: metric.get('name')
definition: metric.get('definition')
tooltipViewport: '.snapshot'
})
)
)
)
)
return RevisionHistory
module.exports = {load}
| true | # Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Lists entries of revisions from provided rev-history, with add/removed diffing
Imm = require 'immutable'
DiffMatchPatch = require 'diff-match-patch'
{diffSentences, diffTrimmedLines, diffWordsWithSpace} = require 'diff'
Term = require './term'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
MetricWidget = require('./metricWidget').load(win)
ColorKeyBubble = require('./colorKeyBubble').load(win)
{FaIcon, renderLineBreaks, showWhen,
stripMetadata, formatTimestamp, capitalize} = require('./utils').load(win)
RevisionHistory = React.createFactory React.createClass
displayName: 'RevisionHistory'
mixins: [React.addons.PureRenderMixin]
getDefaultProps: -> {
terms: {}
metricsById: Imm.List()
disableSnapshot: false
}
propTypes: -> {
dataModelName: React.PropTypes.string().isRequired()
revisions: React.PropTypes.instanceOf(Imm.List).isRequired()
programsById: React.PropTypes.instanceOf(Imm.Map).isRequired()
}
_diffStrings: (oldString = "", newString = "") ->
dmp = new DiffMatchPatch()
# first pass; can lower timeout if too slow
# dmp.Diff_Timeout = 0.5
diffs = dmp.diff_main(oldString, newString)
# measure of similarity
lev = dmp.diff_levenshtein(diffs)
# second pass
if lev > 20
# compare the diff by sentences and diff by lines
# use the output that produces the cleanest output (shortest sum of removals)
diffsSentences = diffSentences(oldString, newString)
# TODO: newlineIsToken would remove all line-breaks for renderLineBreaks, but might be needed
# diffsLines = diffTrimmedLines(oldString, newString, {newlineIsToken:true})
diffsLines = diffTrimmedLines(oldString, newString)
diffsSentencesTotal = 0
diffsLinesTotal = 0
for diff in diffsSentences
if diff.removed?
diffsSentencesTotal += diff.value.length
for diff in diffsLines
if diff.removed?
diffsLinesTotal += diff.value.length
if diffsLinesTotal < diffsSentencesTotal
diffs = diffsLines
else
diffs = diffsSentences
else
diffs = diffWordsWithSpace(oldString, newString)
return R.span({className: 'value'},
# Iterate over diffs and assign a surrounding span, or just the plain string
diffs.map (diff, key) ->
lines = diff.value
.replace(/\r\n/g, '\n') # Windows -> Unix
.replace(/\r/g, '\n') # old Mac -> Unix
.split('\n')
value = []
for line, lineIndex in lines
if lineIndex > 0
value.push R.br({key: lineIndex})
if line.trim()
value.push line
if diff.added?
R.span({className: 'added', key}, value)
else if diff.removed?
R.span({className: 'removed', key}, value)
else
value
)
_generateChangeLogEntries: (revision, index) ->
changeLog = Imm.List()
# Convenience method for adding Imm changeLog entries
# Replace property name with term from @props when valid
pushToChangeLog = (entry) =>
if @props.terms[entry.property]?
entry.property = @props.terms[entry.property]
changeLog = changeLog.push Imm.fromJS(entry)
# Process the changes of an object or Imm.List from its predecessor
processChanges = ({parent, property, value, previousValue}) =>
# Handle regular values
if typeof value in ['string', 'number'] and value isnt previousValue
pushToChangeLog {
parent
property
action: 'revised'
value: @_diffStrings(previousValue, value)
}
# Is it an Imm list? (metrics)
else if property in ['metric', 'metrics']
# Generate 'revised' (metric value) changes
value.forEach (arrayItem) =>
itemId = arrayItem.get('id')
itemValue = arrayItem.get('value')
previousItem = previousValue.find (item) -> item.get('id') is itemId
previousItemValue = previousItem.get('value')
if previousItem? and itemValue isnt previousItemValue
pushToChangeLog {
parent
property: "#{Term 'metric'} value"
action: 'revised'
item: arrayItem
value: @_diffStrings(previousItemValue, itemValue)
}
# Return only creation change if 0-index, check existence to make sure
unless index > 0 and @props.revisions.reverse().get(index - 1)?
pushToChangeLog {
property: @props.dataModelName
action: 'created'
}
return changeLog
# Not a first revision, so let's build the diff objects for each property/value
# compared to the previous revision (ignoring metadata properties)
previousRevision = stripMetadata @props.revisions.reverse().get(index - 1)
currentRevision = stripMetadata revision
currentRevision.entrySeq().forEach ([property, value]) =>
# Ignore statusReason property, it can't be revised
return if property is 'statusReason'
# Account for previousRevision not having this property
previousRevisionValue = previousRevision.get(property) or ""
# Plain string & number comparison
if typeof value in ['string', 'number'] and value isnt previousRevisionValue
# Unique handling for 'status'
if property is 'status' and currentRevision.has('statusReason')
# Special case to convert status 'default' -> 'reactivated'
if value is 'default' then value = 'reactivated'
pushToChangeLog {
property
action: value
value
reason: currentRevision.get('statusReason')
}
else
pushToChangeLog {
property
action: 'revised'
value: @_diffStrings(previousRevisionValue, value)
}
# Imm List comparison (we assume existence of isList validates Imm.List)
else if not Imm.is value, previousRevisionValue
switch @props.type
when 'planTarget'
# Generate 'removed' changes
previousRevisionValue.forEach (item) ->
unless value.contains(item)
pushToChangeLog {
property
action: 'removed'
item
}
# Generate 'added' changes
value.forEach (item) ->
unless previousRevisionValue.contains(item)
pushToChangeLog {
property
action: 'added'
item
}
when 'progNote'
value.forEach (unit, unitIndex) =>
switch unit.get('type')
when 'basic'
unit.entrySeq().forEach ([property, value]) =>
previousValue = previousRevisionValue.getIn [unitIndex, property]
parent = unit.get('name')
processChanges {parent, property, value, previousValue}
when 'plan'
unit.get('sections').forEach (section, sectionIndex) =>
section.get('targets').forEach (target, targetIndex) =>
target.entrySeq().forEach ([property, value]) =>
# Grab the same target value from prev revision
previousValue = previousRevisionValue.getIn [
unitIndex
'sections', sectionIndex
'targets', targetIndex, property
]
parent = target.get('name')
processChanges {parent, property, value, previousValue}
else
throw new Error "Unknown unit type: #{unit.get('type')}"
else
throw new Error "Unknown RevisionHistory 'type': #{@props.type}"
# Fin.
return changeLog
_buildInChangeLog: (revision, index) ->
changeLog = @_generateChangeLogEntries revision, index
return revision.set 'changeLog', changeLog
render: ->
# Process revision history to devise change logs
# They're already in reverse-order, so reverse() to map changes
revisions = @props.revisions
.reverse()
.map(@_buildInChangeLog)
.reverse()
# Either use the revision's name (ex: target name), or the dataModel name
firstRevision = revisions.first()
dataName = if firstRevision? and firstRevision.get('name')
firstRevision.get('name')
else
capitalize(Term @props.dataModelName)
return R.div({className: 'revisionHistory'},
R.div({className: 'heading'},
(unless revisions.isEmpty()
R.section({className: 'dataName'},
revisions.first().get('name') or capitalize(Term @props.dataModelName)
)
)
R.section({className: 'title'}, "Revision History")
)
(if revisions.isEmpty()
R.div({className: 'noRevisions'},
"This #{@props.dataModelName} is new. ",
"It won't have any history until the #{Term 'client file'} is saved."
)
else
R.div({className: 'revisions'},
revisions.map (revision, index) => RevisionChangeLog({
key: revision.get('revisionId')
isFirstRevision: index is (revisions.size - 1)
revision
type: @props.type
metricsById: @props.metricsById
programsById: @props.programsById
dataModelName: @props.dataModelName
disableSnapshot: @props.disableSnapshot
})
)
)
)
RevisionChangeLog = React.createFactory React.createClass
displayName: 'RevisionChangeLog'
mixins: [React.addons.PureRenderMixin]
# TODO: propTypes
getInitialState: -> {
isSnapshotVisible: null
}
_toggleSnapshot: ->
@setState {isSnapshotVisible: not @state.isSnapshotVisible}
render: ->
revision = @props.revision
changeLog = revision.get('changeLog')
userProgramId = revision.get('authorProgramId')
userProgram = @props.programsById.get(userProgramId)
# Special cases made for planTarget types
isPlanTarget = @props.type is 'planTarget'
isRevision = changeLog.first()? and changeLog.first().get('action') is 'revised'
isTargetStatusChange = isPlanTarget and not isRevision
isRenameEntry = changeLog.first()? and changeLog.first().get('property') is 'name'
return R.section({className: 'revision'},
R.div({className: 'header'},
R.div({className: 'author'},
FaIcon('user')
revision.get('authorDisplayName') or revision.get('author')
)
R.div({className: 'timestamp'},
if @props.isFirstRevision and revision.get('backdate')
"#{formatTimestamp revision.get('backdate')} (late entry)"
else
formatTimestamp revision.get('timestamp')
(if userProgram
ColorKeyBubble({
colorKeyHex: userProgram.get('colorKeyHex')
popup: {
title: userProgram.get('name')
content: userProgram.get('description')
placement: 'left'
}
})
)
)
)
R.div({className: 'changeLog'},
(changeLog.map (entry, index) =>
ChangeLogEntry({
key: index
index
entry
revision
isPlanTarget
type: @props.type
dataModelName: @props.dataModelName
metricsById: @props.metricsById
onToggleSnapshot: @_toggleSnapshot
isSnapshotVisible: @state.isSnapshotVisible
disableSnapshot: @props.disableSnapshot
})
)
(if isPlanTarget and not isTargetStatusChange
RevisionSnapshot({
revision
metricsById: @props.metricsById
isRenameEntry
})
)
)
)
ChangeLogEntry = React.createFactory React.createClass
displayName: 'ChangeLogEntry'
mixins: [React.addons.PureRenderMixin]
render: ->
entry = @props.entry
isCreationEntry = entry.get('action') is 'created'
isRenameEntry = entry.get('property') is 'name'
# Account for terminology metricIds -> metrics
if entry.get('property') is 'metricIds'
entry = entry.set('property', 'metric')
R.article({className: 'entry', key: @props.index},
# # TODO: Restore as Diffing selector
# (if not @props.disableSnapshot and @props.index is 0 and entry.get('action') isnt 'created'
# R.button({
# className: 'btn btn-default btn-xs snapshotButton'
# onClick: @props.onToggleSnapshot
# },
# if not @props.isSnapshotVisible then "view" else "hide"
# " full revision"
# )
# )
R.span({className: 'action'},
# Different display cases for indication of change
(if entry.get('action') is 'created'
"#{capitalize entry.get('action')} #{entry.get('property')}
#{if @props.isPlanTarget then ' as:' else ''}"
else if entry.has('reason') # Status change
"#{capitalize entry.get('value')} #{@props.dataModelName}"
else if entry.has('parent') and not entry.get('parent').has? # Parent isn't an Imm Map obj
"#{capitalize entry.get('action')} #{entry.get('property')} for #{entry.get('parent')}"
else
"#{capitalize entry.get('action')} #{entry.get('property')}"
)
": " if not @props.isPlanTarget and not isCreationEntry
)
(if isCreationEntry and not @props.disableSnapshot
# We can show full snapshot for dataModel creation
RevisionSnapshot({
revision: @props.revision
dataModelName: @props.dataModelName
metricsById: @props.metricsById
#this is here to show the target name in the first history entry
isRenameEntry: true
})
# Unique handling for metrics
else if entry.get('property') in [Term('metric'), "#{Term 'metric'} value"]
if typeof entry.get('item') is 'string'
# This changeLog entry is a single ID string, so fetch latest metric
metricId = entry.get('item')
metric = @props.metricsById.get metricId
else
# Assume item is the metric object itself
metric = entry.get('item')
MetricWidget({
value: entry.get('value') # Use diffed value if exists
isEditable: false
name: metric.get('name')
definition: metric.get('definition')
tooltipViewport: 'article'
styleClass: 'clear' unless entry.get('value')
})
else if entry.get('property') is 'value'
metric = entry.get('item')
else if @props.isPlanTarget and entry.get('reason')
": \"#{entry.get('reason')}\""
else if not @props.isPlanTarget
if entry.get('reason') then "\"#{entry.get('reason')}\"" else entry.get('value')
)
)
RevisionSnapshot = ({revision, metricsById, isRenameEntry}) ->
hasMetrics = revision.get('metricIds')?
R.div({className: 'snapshot'},
if isRenameEntry
R.div({className: 'name'},
revision.get('name')
)
R.div({className: 'description'},
renderLineBreaks revision.get('description')
)
(if hasMetrics
R.div({className: 'metrics'},
(revision.get('metricIds').map (metricId) =>
metric = metricsById.get(metricId)
MetricWidget({
isEditable: false
key: metricId
name: metric.get('name')
definition: metric.get('definition')
tooltipViewport: '.snapshot'
})
)
)
)
)
return RevisionHistory
module.exports = {load}
|
[
{
"context": " , (done) ->\n req.query =\n a: 'jack*or*jill'\n \n options = getOpti",
"end": 4261,
"score": 0.8893854022026062,
"start": 4257,
"tag": "NAME",
"value": "jack"
},
{
"context": "one) ->\n req.query =\n a: 'jack*or*ji... | test/server/controllers/helper/requestOptions.coffee | valueflowquality/gi-util-update | 0 | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
moment = require 'moment'
dir = path.normalize __dirname + '../../../../../server'
proxyquire = require 'proxyquire'
module.exports = () ->
stubs =
'./querySplitter':
processSplits: sinon.stub().returns [ {a: '1'}, {a: '2'} ]
processSplit: sinon.stub().returnsArg 0
requestOptions = proxyquire dir + '/controllers/helper/requestOptions', stubs
describe 'Exports', (done) ->
it 'getOptions: Function', (done) ->
expect(requestOptions).to.have.ownProperty 'getOptions'
expect(requestOptions.getOptions).to.be.a 'function'
done()
describe 'getOptions: Function(req) -> {options}', ->
getOptions = requestOptions.getOptions
req = null
model = null
options = null
beforeEach (done) ->
req =
systemId: '123'
giFilter:
aModel: 'anId'
aParent: 'aParentId'
model =
name: 'aModel'
done()
afterEach (done) ->
stubs['./querySplitter'].processSplits.reset()
done()
it 'sets query.systemId to req.systemId', (done) ->
options = getOptions req, model
expect(options.query.systemId).to.equal req.systemId
done()
it 'if gi filter is specified for the model name ' +
'set options.query._id', (done) ->
options = getOptions req, model
expect(options.query._id).to.equal req.giFilter.aModel
done()
it 'but not if gi filter is specified for a different model', (done) ->
model.name = 'anotherModel'
options = getOptions req, model
expect(options.query._id).to.not.exist
done()
it 'sets options.query[parent.field] for any parents specified' +
' in model.releations().parents', (done) ->
model.relations = () ->
parents: [
{modelName: 'aParent', field: 'parentModelId'}
{modelName: 'anotherParent', field: 'anotherParentModelId'}
]
options = getOptions req, model
expect(options.query.parentModelId).to.equal 'aParentId'
expect(options.query.anotherParentModelId).to.not.exist
done()
it 'does not set options.max if req.query.max does not exist', (done) ->
options = getOptions req, model
expect(options.max).to.not.exist
done()
it 'sets options.max for req.query.max', (done) ->
req.query =
max: 10
options = getOptions req, model
expect(options.max).to.equal req.query.max
done()
it 'does not set options.max if the value is not a number', (done) ->
req.query =
max: 'bob'
options = getOptions req, model
expect(options.max).to.not.exist
done()
it 'sets options.max to 0 if the value is less than 1', (done) ->
req.query =
max: -5
options = getOptions req, model
expect(options.max).to.equal 0
done()
it 'does not set options.sort if req.query.sort not given', (done) ->
options = getOptions req, model
expect(options.sort).to.not.exist
done()
it 'sets options.sort to req.query.sort', (done) ->
req.query =
sort: 'bob'
options = getOptions req, model
expect(options.sort).to.equal req.query.sort
done()
it 'does not set options.page if req.query.page not given', (done) ->
options = getOptions req, model
expect(options.page).to.not.exist
done()
it 'sets options.page to req.query.page', (done) ->
req.query =
page: 'alice'
options = getOptions req, model
expect(options.page).to.equal req.query.page
done()
it 'populates query.$or if query contains *or*', (done) ->
req.query =
a: '1*or*2'
options = getOptions req, model
expect(options.query).to.have.property '$or'
expect(options.query.$or).to.deep.equal [{a: '1'}, {a: '2'}]
done()
it 'calls processSplits with array of splits if query contains *or*'
, (done) ->
req.query =
a: 'jack*or*jill'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.true
expect(stubs['./querySplitter'].processSplits
.calledWith(['jack', 'jill'], 'a')).to.be.true
done()
it 'populates query.$and if query contains *and*', (done) ->
req.query =
a: '1*and*2'
options = getOptions req, model
expect(options.query).to.have.property '$and'
expect(options.query.$and).to.deep.equal [{a: '1'}, {a: '2'}]
done()
it 'calls processSplits with array of splits if query contains *and*'
, (done) ->
req.query =
b: 'alice*and*bob'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.true
expect(stubs['./querySplitter'].processSplits
.calledWith(['alice', 'bob'], 'b')).to.be.true
done()
it 'calls processSplit if query does contains neither *and* nor *or*'
, (done) ->
req.query =
c: 'charlie'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.false
expect(stubs['./querySplitter'].processSplit.called).to.be.true
expect(stubs['./querySplitter'].processSplit
.calledWith('charlie', 'c')).to.be.true
done()
| 54966 | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
moment = require 'moment'
dir = path.normalize __dirname + '../../../../../server'
proxyquire = require 'proxyquire'
module.exports = () ->
stubs =
'./querySplitter':
processSplits: sinon.stub().returns [ {a: '1'}, {a: '2'} ]
processSplit: sinon.stub().returnsArg 0
requestOptions = proxyquire dir + '/controllers/helper/requestOptions', stubs
describe 'Exports', (done) ->
it 'getOptions: Function', (done) ->
expect(requestOptions).to.have.ownProperty 'getOptions'
expect(requestOptions.getOptions).to.be.a 'function'
done()
describe 'getOptions: Function(req) -> {options}', ->
getOptions = requestOptions.getOptions
req = null
model = null
options = null
beforeEach (done) ->
req =
systemId: '123'
giFilter:
aModel: 'anId'
aParent: 'aParentId'
model =
name: 'aModel'
done()
afterEach (done) ->
stubs['./querySplitter'].processSplits.reset()
done()
it 'sets query.systemId to req.systemId', (done) ->
options = getOptions req, model
expect(options.query.systemId).to.equal req.systemId
done()
it 'if gi filter is specified for the model name ' +
'set options.query._id', (done) ->
options = getOptions req, model
expect(options.query._id).to.equal req.giFilter.aModel
done()
it 'but not if gi filter is specified for a different model', (done) ->
model.name = 'anotherModel'
options = getOptions req, model
expect(options.query._id).to.not.exist
done()
it 'sets options.query[parent.field] for any parents specified' +
' in model.releations().parents', (done) ->
model.relations = () ->
parents: [
{modelName: 'aParent', field: 'parentModelId'}
{modelName: 'anotherParent', field: 'anotherParentModelId'}
]
options = getOptions req, model
expect(options.query.parentModelId).to.equal 'aParentId'
expect(options.query.anotherParentModelId).to.not.exist
done()
it 'does not set options.max if req.query.max does not exist', (done) ->
options = getOptions req, model
expect(options.max).to.not.exist
done()
it 'sets options.max for req.query.max', (done) ->
req.query =
max: 10
options = getOptions req, model
expect(options.max).to.equal req.query.max
done()
it 'does not set options.max if the value is not a number', (done) ->
req.query =
max: 'bob'
options = getOptions req, model
expect(options.max).to.not.exist
done()
it 'sets options.max to 0 if the value is less than 1', (done) ->
req.query =
max: -5
options = getOptions req, model
expect(options.max).to.equal 0
done()
it 'does not set options.sort if req.query.sort not given', (done) ->
options = getOptions req, model
expect(options.sort).to.not.exist
done()
it 'sets options.sort to req.query.sort', (done) ->
req.query =
sort: 'bob'
options = getOptions req, model
expect(options.sort).to.equal req.query.sort
done()
it 'does not set options.page if req.query.page not given', (done) ->
options = getOptions req, model
expect(options.page).to.not.exist
done()
it 'sets options.page to req.query.page', (done) ->
req.query =
page: 'alice'
options = getOptions req, model
expect(options.page).to.equal req.query.page
done()
it 'populates query.$or if query contains *or*', (done) ->
req.query =
a: '1*or*2'
options = getOptions req, model
expect(options.query).to.have.property '$or'
expect(options.query.$or).to.deep.equal [{a: '1'}, {a: '2'}]
done()
it 'calls processSplits with array of splits if query contains *or*'
, (done) ->
req.query =
a: '<NAME>*or*<NAME>'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.true
expect(stubs['./querySplitter'].processSplits
.calledWith(['<NAME>', 'j<NAME>'], 'a')).to.be.true
done()
it 'populates query.$and if query contains *and*', (done) ->
req.query =
a: '1*and*2'
options = getOptions req, model
expect(options.query).to.have.property '$and'
expect(options.query.$and).to.deep.equal [{a: '1'}, {a: '2'}]
done()
it 'calls processSplits with array of splits if query contains *and*'
, (done) ->
req.query =
b: '<NAME>*and*bob'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.true
expect(stubs['./querySplitter'].processSplits
.calledWith(['<NAME>', 'bob'], 'b')).to.be.true
done()
it 'calls processSplit if query does contains neither *and* nor *or*'
, (done) ->
req.query =
c: '<NAME>'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.false
expect(stubs['./querySplitter'].processSplit.called).to.be.true
expect(stubs['./querySplitter'].processSplit
.calledWith('charlie', 'c')).to.be.true
done()
| true | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
moment = require 'moment'
dir = path.normalize __dirname + '../../../../../server'
proxyquire = require 'proxyquire'
module.exports = () ->
stubs =
'./querySplitter':
processSplits: sinon.stub().returns [ {a: '1'}, {a: '2'} ]
processSplit: sinon.stub().returnsArg 0
requestOptions = proxyquire dir + '/controllers/helper/requestOptions', stubs
describe 'Exports', (done) ->
it 'getOptions: Function', (done) ->
expect(requestOptions).to.have.ownProperty 'getOptions'
expect(requestOptions.getOptions).to.be.a 'function'
done()
describe 'getOptions: Function(req) -> {options}', ->
getOptions = requestOptions.getOptions
req = null
model = null
options = null
beforeEach (done) ->
req =
systemId: '123'
giFilter:
aModel: 'anId'
aParent: 'aParentId'
model =
name: 'aModel'
done()
afterEach (done) ->
stubs['./querySplitter'].processSplits.reset()
done()
it 'sets query.systemId to req.systemId', (done) ->
options = getOptions req, model
expect(options.query.systemId).to.equal req.systemId
done()
it 'if gi filter is specified for the model name ' +
'set options.query._id', (done) ->
options = getOptions req, model
expect(options.query._id).to.equal req.giFilter.aModel
done()
it 'but not if gi filter is specified for a different model', (done) ->
model.name = 'anotherModel'
options = getOptions req, model
expect(options.query._id).to.not.exist
done()
it 'sets options.query[parent.field] for any parents specified' +
' in model.releations().parents', (done) ->
model.relations = () ->
parents: [
{modelName: 'aParent', field: 'parentModelId'}
{modelName: 'anotherParent', field: 'anotherParentModelId'}
]
options = getOptions req, model
expect(options.query.parentModelId).to.equal 'aParentId'
expect(options.query.anotherParentModelId).to.not.exist
done()
it 'does not set options.max if req.query.max does not exist', (done) ->
options = getOptions req, model
expect(options.max).to.not.exist
done()
it 'sets options.max for req.query.max', (done) ->
req.query =
max: 10
options = getOptions req, model
expect(options.max).to.equal req.query.max
done()
it 'does not set options.max if the value is not a number', (done) ->
req.query =
max: 'bob'
options = getOptions req, model
expect(options.max).to.not.exist
done()
it 'sets options.max to 0 if the value is less than 1', (done) ->
req.query =
max: -5
options = getOptions req, model
expect(options.max).to.equal 0
done()
it 'does not set options.sort if req.query.sort not given', (done) ->
options = getOptions req, model
expect(options.sort).to.not.exist
done()
it 'sets options.sort to req.query.sort', (done) ->
req.query =
sort: 'bob'
options = getOptions req, model
expect(options.sort).to.equal req.query.sort
done()
it 'does not set options.page if req.query.page not given', (done) ->
options = getOptions req, model
expect(options.page).to.not.exist
done()
it 'sets options.page to req.query.page', (done) ->
req.query =
page: 'alice'
options = getOptions req, model
expect(options.page).to.equal req.query.page
done()
it 'populates query.$or if query contains *or*', (done) ->
req.query =
a: '1*or*2'
options = getOptions req, model
expect(options.query).to.have.property '$or'
expect(options.query.$or).to.deep.equal [{a: '1'}, {a: '2'}]
done()
it 'calls processSplits with array of splits if query contains *or*'
, (done) ->
req.query =
a: 'PI:NAME:<NAME>END_PI*or*PI:NAME:<NAME>END_PI'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.true
expect(stubs['./querySplitter'].processSplits
.calledWith(['PI:NAME:<NAME>END_PI', 'jPI:NAME:<NAME>END_PI'], 'a')).to.be.true
done()
it 'populates query.$and if query contains *and*', (done) ->
req.query =
a: '1*and*2'
options = getOptions req, model
expect(options.query).to.have.property '$and'
expect(options.query.$and).to.deep.equal [{a: '1'}, {a: '2'}]
done()
it 'calls processSplits with array of splits if query contains *and*'
, (done) ->
req.query =
b: 'PI:NAME:<NAME>END_PI*and*bob'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.true
expect(stubs['./querySplitter'].processSplits
.calledWith(['PI:NAME:<NAME>END_PI', 'bob'], 'b')).to.be.true
done()
it 'calls processSplit if query does contains neither *and* nor *or*'
, (done) ->
req.query =
c: 'PI:NAME:<NAME>END_PI'
options = getOptions req, model
expect(stubs['./querySplitter'].processSplits.called).to.be.false
expect(stubs['./querySplitter'].processSplit.called).to.be.true
expect(stubs['./querySplitter'].processSplit
.calledWith('charlie', 'c')).to.be.true
done()
|
[
{
"context": "module.exports =\n admin: [\n 'admin@abc.com'\n ]\n ca:\n passphrase: 'password'\n oauth2:\n ",
"end": 46,
"score": 0.9999195337295532,
"start": 33,
"tag": "EMAIL",
"value": "admin@abc.com"
},
{
"context": " [\n 'admin@abc.com'\n ]\n ca:\n passphrase: ... | backend/config/env/production.coffee | twhtanghk/ca | 0 | module.exports =
admin: [
'admin@abc.com'
]
ca:
passphrase: 'password'
oauth2:
url:
verify: 'https://mobtest.ml/auth/oauth2/verify/'
| 103623 | module.exports =
admin: [
'<EMAIL>'
]
ca:
passphrase: '<PASSWORD>'
oauth2:
url:
verify: 'https://mobtest.ml/auth/oauth2/verify/'
| true | module.exports =
admin: [
'PI:EMAIL:<EMAIL>END_PI'
]
ca:
passphrase: 'PI:PASSWORD:<PASSWORD>END_PI'
oauth2:
url:
verify: 'https://mobtest.ml/auth/oauth2/verify/'
|
[
{
"context": "tPOST(\n url(\"/users\"),\n {username: \"johndoe\", password: 'foobar'}\n signedHeaders\n ",
"end": 999,
"score": 0.9989417791366577,
"start": 992,
"tag": "USERNAME",
"value": "johndoe"
},
{
"context": "users\"),\n {username: \"johndoe\",... | test/unit/authSpec.coffee | theodo/angular-parse | 82 | describe 'auth', ->
backend = null
signedHeaders = null
url = null
afterEach inject ($window) ->
$window.localStorage.clear()
beforeEach ->
angular.module('ParseSpec', ['Parse']).config (ParseProvider) ->
ParseProvider.initialize 'appId', 'apiKey'
module 'ParseSpec'
inject (Parse, $injector) ->
# Helper method for matching API URLS
url = (path) ->
"#{Parse.BaseUrl}#{path}"
backend = $injector.get('$httpBackend')
signedHeaders = (headers) ->
headers["X-Parse-Application-Id"] == 'appId' and
headers["X-Parse-REST-API-KEY"] == "apiKey"
afterEach ->
backend.verifyNoOutstandingRequest()
backend.verifyNoOutstandingExpectation()
it 'has an auth property', inject (Parse) ->
expect(Parse.auth).not.toBeUndefined()
describe 'registering', ->
beforeEach inject (Parse, $window) ->
$window.localStorage = {}
backend.expectPOST(
url("/users"),
{username: "johndoe", password: 'foobar'}
signedHeaders
).respond(
{
"createdAt": "2011-11-07T20:58:34.448Z"
"objectId": "g7y9tkhB7O"
"sessionToken": "sessionTok"
}
)
Parse.auth.register("johndoe", "foobar")
backend.flush()
it 'sets the session token', inject (Parse) ->
expect(Parse.auth.sessionToken).toBe('sessionTok')
it 'sets the current user', inject (Parse) ->
user = Parse.auth.currentUser
expect(user.objectId).toBe('g7y9tkhB7O')
it 'clears the password on registation', inject ($window, Parse) ->
user = Parse.auth.currentUser
expect(user.password).toBeUndefined()
it 'stores sessionId to localStorage', inject ($window) ->
expect($window.localStorage.PARSE_SESSION_TOKEN).toBe 'sessionTok'
it 'stores user to localStorage', inject ($window, Parse) ->
info = Parse.auth.currentUser.attributes()
info.objectId = Parse.auth.currentUser.objectId
expect($window.localStorage.PARSE_USER_INFO).toBe JSON.stringify(info)
describe 'logging out', ->
beforeEach inject (Parse, $window) ->
Parse.auth._login(user = new Parse.User(
username: 'foo',
sessionToken: 'sessionTok'
))
expect(Parse.auth.currentUser.username).toBe 'foo'
expect($window.localStorage.PARSE_USER_INFO).toBeTruthy()
expect($window.localStorage.PARSE_SESSION_TOKEN).toBeTruthy()
Parse.auth.logout()
it 'clears localstorage sessionToken', inject ($window) ->
expect($window.localStorage.PARSE_SESSION_TOKEN).toBeUndefined()
it 'clears localstorage userInfo', inject ($window) ->
expect($window.localStorage.PARSE_USER_INFO).toBeUndefined()
it 'clears currentUser', inject (Parse) ->
expect(Parse.auth.currentUser).toBeNull()
it 'clears sessionToken', inject (Parse) ->
expect(Parse.auth.sessionToken).toBeNull()
describe 'resumeSession', ->
describe 'with session data', ->
user = null
beforeEach inject (Parse, $window) ->
user = new Parse.User
username: 'foobar'
sessionToken: 'sessTok'
$window.localStorage.setItem('PARSE_USER_INFO', JSON.stringify(user.attributes()))
$window.localStorage.setItem('PARSE_SESSION_TOKEN', user.sessionToken)
it 'exists', inject (Parse) ->
Parse.auth.resumeSession()
it 'sets the currentUser', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.currentUser.username).toBe 'foobar'
it 'sets the sessionToken', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.sessionToken).toBe 'sessTok'
describe 'without session data', ->
it 'doesnt set the currentUser', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.currentUser).toBeNull()
it 'doesnt set the sessionToken', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.sessionToken).toBeNull()
| 185607 | describe 'auth', ->
backend = null
signedHeaders = null
url = null
afterEach inject ($window) ->
$window.localStorage.clear()
beforeEach ->
angular.module('ParseSpec', ['Parse']).config (ParseProvider) ->
ParseProvider.initialize 'appId', 'apiKey'
module 'ParseSpec'
inject (Parse, $injector) ->
# Helper method for matching API URLS
url = (path) ->
"#{Parse.BaseUrl}#{path}"
backend = $injector.get('$httpBackend')
signedHeaders = (headers) ->
headers["X-Parse-Application-Id"] == 'appId' and
headers["X-Parse-REST-API-KEY"] == "apiKey"
afterEach ->
backend.verifyNoOutstandingRequest()
backend.verifyNoOutstandingExpectation()
it 'has an auth property', inject (Parse) ->
expect(Parse.auth).not.toBeUndefined()
describe 'registering', ->
beforeEach inject (Parse, $window) ->
$window.localStorage = {}
backend.expectPOST(
url("/users"),
{username: "johndoe", password: '<PASSWORD>'}
signedHeaders
).respond(
{
"createdAt": "2011-11-07T20:58:34.448Z"
"objectId": "g7y9tkhB7O"
"sessionToken": "sessionTok"
}
)
Parse.auth.register("johndoe", "foobar")
backend.flush()
it 'sets the session token', inject (Parse) ->
expect(Parse.auth.sessionToken).toBe('sessionTok')
it 'sets the current user', inject (Parse) ->
user = Parse.auth.currentUser
expect(user.objectId).toBe('g7y9tkhB7O')
it 'clears the password on registation', inject ($window, Parse) ->
user = Parse.auth.currentUser
expect(user.password).toBeUndefined()
it 'stores sessionId to localStorage', inject ($window) ->
expect($window.localStorage.PARSE_SESSION_TOKEN).toBe 'sessionTok'
it 'stores user to localStorage', inject ($window, Parse) ->
info = Parse.auth.currentUser.attributes()
info.objectId = Parse.auth.currentUser.objectId
expect($window.localStorage.PARSE_USER_INFO).toBe JSON.stringify(info)
describe 'logging out', ->
beforeEach inject (Parse, $window) ->
Parse.auth._login(user = new Parse.User(
username: 'foo',
sessionToken: '<PASSWORD>'
))
expect(Parse.auth.currentUser.username).toBe 'foo'
expect($window.localStorage.PARSE_USER_INFO).toBeTruthy()
expect($window.localStorage.PARSE_SESSION_TOKEN).toBeTruthy()
Parse.auth.logout()
it 'clears localstorage sessionToken', inject ($window) ->
expect($window.localStorage.PARSE_SESSION_TOKEN).toBeUndefined()
it 'clears localstorage userInfo', inject ($window) ->
expect($window.localStorage.PARSE_USER_INFO).toBeUndefined()
it 'clears currentUser', inject (Parse) ->
expect(Parse.auth.currentUser).toBeNull()
it 'clears sessionToken', inject (Parse) ->
expect(Parse.auth.sessionToken).toBeNull()
describe 'resumeSession', ->
describe 'with session data', ->
user = null
beforeEach inject (Parse, $window) ->
user = new Parse.User
username: 'foobar'
sessionToken: '<PASSWORD>'
$window.localStorage.setItem('PARSE_USER_INFO', JSON.stringify(user.attributes()))
$window.localStorage.setItem('PARSE_SESSION_TOKEN', user.sessionToken)
it 'exists', inject (Parse) ->
Parse.auth.resumeSession()
it 'sets the currentUser', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.currentUser.username).toBe 'foobar'
it 'sets the sessionToken', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.sessionToken).toBe '<KEY> <PASSWORD>'
describe 'without session data', ->
it 'doesnt set the currentUser', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.currentUser).toBeNull()
it 'doesnt set the sessionToken', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.sessionToken).toBeNull()
| true | describe 'auth', ->
backend = null
signedHeaders = null
url = null
afterEach inject ($window) ->
$window.localStorage.clear()
beforeEach ->
angular.module('ParseSpec', ['Parse']).config (ParseProvider) ->
ParseProvider.initialize 'appId', 'apiKey'
module 'ParseSpec'
inject (Parse, $injector) ->
# Helper method for matching API URLS
url = (path) ->
"#{Parse.BaseUrl}#{path}"
backend = $injector.get('$httpBackend')
signedHeaders = (headers) ->
headers["X-Parse-Application-Id"] == 'appId' and
headers["X-Parse-REST-API-KEY"] == "apiKey"
afterEach ->
backend.verifyNoOutstandingRequest()
backend.verifyNoOutstandingExpectation()
it 'has an auth property', inject (Parse) ->
expect(Parse.auth).not.toBeUndefined()
describe 'registering', ->
beforeEach inject (Parse, $window) ->
$window.localStorage = {}
backend.expectPOST(
url("/users"),
{username: "johndoe", password: 'PI:PASSWORD:<PASSWORD>END_PI'}
signedHeaders
).respond(
{
"createdAt": "2011-11-07T20:58:34.448Z"
"objectId": "g7y9tkhB7O"
"sessionToken": "sessionTok"
}
)
Parse.auth.register("johndoe", "foobar")
backend.flush()
it 'sets the session token', inject (Parse) ->
expect(Parse.auth.sessionToken).toBe('sessionTok')
it 'sets the current user', inject (Parse) ->
user = Parse.auth.currentUser
expect(user.objectId).toBe('g7y9tkhB7O')
it 'clears the password on registation', inject ($window, Parse) ->
user = Parse.auth.currentUser
expect(user.password).toBeUndefined()
it 'stores sessionId to localStorage', inject ($window) ->
expect($window.localStorage.PARSE_SESSION_TOKEN).toBe 'sessionTok'
it 'stores user to localStorage', inject ($window, Parse) ->
info = Parse.auth.currentUser.attributes()
info.objectId = Parse.auth.currentUser.objectId
expect($window.localStorage.PARSE_USER_INFO).toBe JSON.stringify(info)
describe 'logging out', ->
beforeEach inject (Parse, $window) ->
Parse.auth._login(user = new Parse.User(
username: 'foo',
sessionToken: 'PI:PASSWORD:<PASSWORD>END_PI'
))
expect(Parse.auth.currentUser.username).toBe 'foo'
expect($window.localStorage.PARSE_USER_INFO).toBeTruthy()
expect($window.localStorage.PARSE_SESSION_TOKEN).toBeTruthy()
Parse.auth.logout()
it 'clears localstorage sessionToken', inject ($window) ->
expect($window.localStorage.PARSE_SESSION_TOKEN).toBeUndefined()
it 'clears localstorage userInfo', inject ($window) ->
expect($window.localStorage.PARSE_USER_INFO).toBeUndefined()
it 'clears currentUser', inject (Parse) ->
expect(Parse.auth.currentUser).toBeNull()
it 'clears sessionToken', inject (Parse) ->
expect(Parse.auth.sessionToken).toBeNull()
describe 'resumeSession', ->
describe 'with session data', ->
user = null
beforeEach inject (Parse, $window) ->
user = new Parse.User
username: 'foobar'
sessionToken: 'PI:PASSWORD:<PASSWORD>END_PI'
$window.localStorage.setItem('PARSE_USER_INFO', JSON.stringify(user.attributes()))
$window.localStorage.setItem('PARSE_SESSION_TOKEN', user.sessionToken)
it 'exists', inject (Parse) ->
Parse.auth.resumeSession()
it 'sets the currentUser', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.currentUser.username).toBe 'foobar'
it 'sets the sessionToken', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.sessionToken).toBe 'PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI'
describe 'without session data', ->
it 'doesnt set the currentUser', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.currentUser).toBeNull()
it 'doesnt set the sessionToken', inject (Parse) ->
Parse.auth.resumeSession()
expect(Parse.auth.sessionToken).toBeNull()
|
[
{
"context": "###\nCopyright (c) 2002-2013 \"Neo Technology,\"\nNetwork Engine for Objects in Lund AB [http://n",
"end": 43,
"score": 0.49162521958351135,
"start": 33,
"tag": "NAME",
"value": "Technology"
}
] | community/server/src/main/coffeescript/neo4j/webadmin/modules/databrowser/models/Property.coffee | rebaze/neo4j | 1 | ###
Copyright (c) 2002-2013 "Neo Technology,"
Network Engine for Objects in Lund AB [http://neotechnology.com]
This file is part of Neo4j.
Neo4j is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
###
define ['ribcage/security/HtmlEscaper','ribcage/Model'], (HtmlEscaper, Model) ->
htmlEscaper = new HtmlEscaper
class Property extends Model
defaults :
key : ""
value : ""
keyError : false
valueError : false
getLocalId : =>
@get "localId"
getKey : () =>
@get "key"
getValue : () =>
@get "value"
getValueError : =>
@get "valueError"
getKeyError : =>
@get "keyError"
getValueAsJSON : () =>
if @hasValueError() then @getValue() else JSON.stringify(@getValue())
getValueAsHtml : () =>
htmlEscaper.escape @getValueAsJSON()
getTruncatedHtmlValue : (maxLength=100) =>
str = @getValueAsJSON()
if str.length > maxLength
str = str.substr(0,maxLength-3) + ".."
htmlEscaper.escape str
getKeyAsHtml : () =>
htmlEscaper.escape @getKey()
setKeyError : (error) =>
@set "keyError" : error
setValueError : (error) =>
@set "valueError" : error
setValue : (value) =>
@set "value" : value
setKey : (key) =>
@set "key" : key
hasKeyError : =>
@getKeyError() != false
hasValueError : =>
@getValueError() != false
| 59730 | ###
Copyright (c) 2002-2013 "Neo <NAME>,"
Network Engine for Objects in Lund AB [http://neotechnology.com]
This file is part of Neo4j.
Neo4j is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
###
define ['ribcage/security/HtmlEscaper','ribcage/Model'], (HtmlEscaper, Model) ->
htmlEscaper = new HtmlEscaper
class Property extends Model
defaults :
key : ""
value : ""
keyError : false
valueError : false
getLocalId : =>
@get "localId"
getKey : () =>
@get "key"
getValue : () =>
@get "value"
getValueError : =>
@get "valueError"
getKeyError : =>
@get "keyError"
getValueAsJSON : () =>
if @hasValueError() then @getValue() else JSON.stringify(@getValue())
getValueAsHtml : () =>
htmlEscaper.escape @getValueAsJSON()
getTruncatedHtmlValue : (maxLength=100) =>
str = @getValueAsJSON()
if str.length > maxLength
str = str.substr(0,maxLength-3) + ".."
htmlEscaper.escape str
getKeyAsHtml : () =>
htmlEscaper.escape @getKey()
setKeyError : (error) =>
@set "keyError" : error
setValueError : (error) =>
@set "valueError" : error
setValue : (value) =>
@set "value" : value
setKey : (key) =>
@set "key" : key
hasKeyError : =>
@getKeyError() != false
hasValueError : =>
@getValueError() != false
| true | ###
Copyright (c) 2002-2013 "Neo PI:NAME:<NAME>END_PI,"
Network Engine for Objects in Lund AB [http://neotechnology.com]
This file is part of Neo4j.
Neo4j is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
###
define ['ribcage/security/HtmlEscaper','ribcage/Model'], (HtmlEscaper, Model) ->
htmlEscaper = new HtmlEscaper
class Property extends Model
defaults :
key : ""
value : ""
keyError : false
valueError : false
getLocalId : =>
@get "localId"
getKey : () =>
@get "key"
getValue : () =>
@get "value"
getValueError : =>
@get "valueError"
getKeyError : =>
@get "keyError"
getValueAsJSON : () =>
if @hasValueError() then @getValue() else JSON.stringify(@getValue())
getValueAsHtml : () =>
htmlEscaper.escape @getValueAsJSON()
getTruncatedHtmlValue : (maxLength=100) =>
str = @getValueAsJSON()
if str.length > maxLength
str = str.substr(0,maxLength-3) + ".."
htmlEscaper.escape str
getKeyAsHtml : () =>
htmlEscaper.escape @getKey()
setKeyError : (error) =>
@set "keyError" : error
setValueError : (error) =>
@set "valueError" : error
setValue : (value) =>
@set "value" : value
setKey : (key) =>
@set "key" : key
hasKeyError : =>
@getKeyError() != false
hasValueError : =>
@getValueError() != false
|
[
{
"context": "###\nCopyright (c) 2014 Ramesh Nair (hiddentao.com)\n\nPermission is hereby granted, fr",
"end": 34,
"score": 0.9998838305473328,
"start": 23,
"tag": "NAME",
"value": "Ramesh Nair"
},
{
"context": "###\nCopyright (c) 2014 Ramesh Nair (hiddentao.com)\n\nPermission is her... | test/blocks.test.coffee | SimeonC/squel | 0 | ###
Copyright (c) 2014 Ramesh Nair (hiddentao.com)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['Blocks'] =
'Block base class':
beforeEach: ->
@inst = new squel.cls.Block()
'instanceof of BaseBuilder': ->
assert.instanceOf @inst, squel.cls.BaseBuilder
'options': ->
expectedOptions = _.extend {}, squel.cls.DefaultQueryBuilderOptions,
usingValuePlaceholders: true
dummy: true
@inst = new squel.cls.Block
usingValuePlaceholders: true
dummy: true
assert.same expectedOptions, @inst.options
'buildStr()': ->
assert.same '', @inst.buildStr()
'exposedMethods()':
'returns methods': ->
@inst['method1'] = -> return false
@inst['method2'] = -> return false
assert.ok ['method1', 'method2'], (name for name of @inst.exposedMethods())
'ignores methods prefixed with _': ->
@inst['_method'] = -> return false
assert.ok undefined is _.find (name for name of @inst.exposedMethods()), (name) ->
return name is '_method'
'ignores buildStr()': ->
assert.ok undefined is _.find (name for name of @inst.exposedMethods()), (name) ->
return name is 'buildStr'
'cloning copies the options over': ->
@inst.options.dummy = true;
newinst = @inst.clone()
@inst.options.dummy = false;
assert.same true, newinst.options.dummy
'StringBlock':
beforeEach: ->
@cls = squel.cls.StringBlock
@inst = new @cls
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'buildStr()':
'returns the string as is': ->
@inst = new @cls {}, 'TAG'
assert.same 'TAG', @inst.buildStr()
'buildParam()':
'returns the string as is': ->
@inst = new @cls {}, 'TAG'
assert.same { text: 'TAG', values: [] }, @inst.buildParam()
'AbstractValueBlock':
beforeEach: ->
@cls = squel.cls.AbstractValueBlock
@inst = new @cls
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial member values': ->
assert.same null, @inst._val
'buildStr()':
'when value not set': ->
assert.same '', @inst.buildStr()
'when value set': ->
@inst._setValue 'bla'
assert.same 'bla', @inst.buildStr()
'buildParam()':
'when value not set': ->
assert.same @inst.buildParam(), { text: '', values: [] }
'when value set': ->
@inst._setValue 'bla'
assert.same @inst.buildParam(), { text: 'bla', values: [] }
'AbstractTableBlock':
beforeEach: ->
@cls = squel.cls.AbstractTableBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.tables
'_table()':
'saves inputs': ->
@inst._table('table1')
@inst._table('table2', 'alias2')
@inst._table('table3')
expectedFroms = [
{
table: 'table1',
alias: null
},
{
table: 'table2',
alias: '`alias2`'
},
{
table: 'table3',
alias: null
}
]
assert.same expectedFroms, @inst.tables
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeTableAlias', -> return '_a'
@inst._table('table', 'alias')
assert.ok sanitizeTableSpy.calledWith 'table'
assert.ok sanitizeAliasSpy.calledWithExactly 'alias'
assert.same [ { table: '_t', alias: '_a' }], @inst.tables
'handles single-table mode': ->
@inst.options.singleTable = true
@inst._table('table1')
@inst._table('table2')
@inst._table('table3')
expected = [
{
table: 'table3',
alias: null
}
]
assert.same expected, @inst.tables
'if not allowing nested queries': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
innerTable = squel.select()
@inst.options.allowNested = false
@inst._table(innerTable)
assert.ok sanitizeTableSpy.calledWithExactly innerTable, false
'if allowing nested queries': ->
sanitizeTableSpy = test.mocker.spy @cls.prototype, '_sanitizeTable'
innerTable1 = squel.select()
innerTable2 = squel.select()
@inst.options.allowNested = true
@inst._table(innerTable1)
@inst._table(innerTable2, 'Inner2')
assert.ok sanitizeTableSpy.calledWithExactly innerTable1, true
assert.ok sanitizeTableSpy.calledWithExactly innerTable2, true
expected = [
{
alias: null
table: innerTable1
}
{
alias: '`Inner2`'
table: innerTable2
}
]
assert.same expected, @inst.tables
'buildStr()':
'requires at least one table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: _table() needs to be called', err.toString()
'returns formatted query phrase': ->
@inst._table('table1')
@inst._table('table2', 'alias2')
@inst._table('table3')
assert.same 'table1, table2 `alias2`, table3', @inst.buildStr()
'handles nested query': ->
innerTable1 = squel.select().from('inner1')
innerTable2 = squel.select().from('inner2')
@inst.options.allowNested = true
@inst._table(innerTable1)
@inst._table(innerTable2, 'inner2')
assert.same '(SELECT * FROM inner1), (SELECT * FROM inner2) `inner2`', @inst.buildStr()
'FromTableBlock':
beforeEach: ->
@cls = squel.cls.FromTableBlock
@inst = new @cls()
'instanceof of AbstractTableBlock': ->
assert.instanceOf @inst, squel.cls.AbstractTableBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractTableBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'from()':
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, '_table'
@inst.from('table1')
@inst.from('table2', 'alias2')
assert.same 2, baseMethodSpy.callCount
assert.ok baseMethodSpy.calledWithExactly('table1', null)
assert.ok baseMethodSpy.calledWithExactly('table2', 'alias2')
'buildStr()':
'requires at least one table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: from() needs to be called', err.toString()
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, 'buildStr', -> 'blah'
@inst.from('table')
assert.same 'FROM blah', @inst.buildStr()
'UpdateTableBlock':
beforeEach: ->
@cls = squel.cls.UpdateTableBlock
@inst = new @cls()
'instanceof of AbstractTableBlock': ->
assert.instanceOf @inst, squel.cls.AbstractTableBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractTableBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'table()':
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, '_table'
@inst.table('table1')
@inst.table('table2', 'alias2')
assert.same 2, baseMethodSpy.callCount
assert.ok baseMethodSpy.calledWithExactly('table1', null)
assert.ok baseMethodSpy.calledWithExactly('table2', 'alias2')
'IntoTableBlock':
beforeEach: ->
@cls = squel.cls.IntoTableBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.table
'into()':
'saves inputs': ->
@inst.into('table1')
@inst.into('table2')
@inst.into('table3')
assert.same 'table3', @inst.table
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
@inst.into('table')
assert.ok sanitizeTableSpy.calledWithExactly 'table', false
assert.same '_t', @inst.table
'buildStr()':
'requires table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: into() needs to be called', err.toString()
'returns formatted query phrase': ->
@inst.into('table1')
assert.same 'INTO table1', @inst.buildStr()
'GetFieldBlock':
beforeEach: ->
@cls = squel.cls.GetFieldBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst._fields
'fields() - object':
'saves inputs': ->
fieldSpy = test.mocker.spy(@inst, 'field')
@inst.fields({
'field1': null
'field2': 'alias2'
'field3': null
}, { dummy: true})
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field3',
alias: null
}
]
assert.ok fieldSpy.calledThrice
assert.ok fieldSpy.calledWithExactly('field1', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field2', 'alias2', dummy: true)
assert.ok fieldSpy.calledWithExactly('field3', null, dummy: true)
assert.same expected, @inst._fields
'fields() - array':
'saves inputs': ->
fieldSpy = test.mocker.spy(@inst, 'field')
@inst.fields([ 'field1', 'field2', 'field3' ], { dummy: true})
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: null
},
{
name: 'field3',
alias: null
}
]
assert.ok fieldSpy.calledThrice
assert.ok fieldSpy.calledWithExactly('field1', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field2', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field3', null, dummy: true)
assert.same expected, @inst._fields
'field()':
'saves inputs': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field3')
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field3',
alias: null
}
]
assert.same expected, @inst._fields
'field() - discard duplicates':
'saves inputs': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field2', 'alias2')
@inst.field('field1', 'alias1')
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field1',
alias: '"alias1"'
}
]
assert.same expected, @inst._fields
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeFieldAlias', -> return '_a'
@inst.field('field1', 'alias1', { dummy: true})
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', { dummy: true }
assert.ok sanitizeAliasSpy.calledWithExactly 'alias1'
assert.same [ { name: '_f', alias: '_a' } ], @inst._fields
'buildStr()':
'returns all fields when none provided': ->
@inst._fields = []
assert.same '*', @inst.buildStr()
'returns formatted query phrase': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field3')
assert.same 'field1, field2 AS "alias2", field3', @inst.buildStr()
'AbstractSetFieldBlock':
beforeEach: ->
@cls = squel.cls.AbstractSetFieldBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial fields': ->
assert.same [], @inst.fields
'initial field options': ->
assert.same [], @inst.fieldOptions
'initial values': ->
assert.same [], @inst.values
'_set()':
'saves inputs': ->
@inst._set('field1', 'value1', dummy: 1)
@inst._set('field2', 'value2', dummy: 2)
@inst._set('field3', 'value3', dummy: 3)
@inst._set('field4')
expectedFields = [ 'field1', 'field2', 'field3', 'field4' ]
expectedValues = [ [ 'value1', 'value2', 'value3', undefined ] ]
expectedFieldOptions = [ [ {dummy: 1}, {dummy: 2}, {dummy: 3}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._set('field1', 'value1', dummy: true)
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', dummy: true
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ] ], @inst.values
'_setFields()':
'saves inputs': ->
@inst._setFields
'field1': 'value1'
'field2': 'value2'
'field3': 'value3'
expectedFields = [ 'field1', 'field2', 'field3' ]
expectedValues = [ [ 'value1', 'value2', 'value3'] ]
expectedFieldOptions = [ [ {}, {}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._setFields({'field1': 'value1'}, {dummy: true})
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', dummy: true
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ] ], @inst.values
'_setFieldsRows()':
'saves inputs': ->
@inst._setFieldsRows [
{
'field1': 'value1'
'field2': 'value2'
'field3': 'value3'
}
{
'field1': 'value21'
'field2': 'value22'
'field3': 'value23'
}
]
expectedFields = [ 'field1', 'field2', 'field3' ]
expectedValues = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
expectedFieldOptions = [ [ {}, {}, {} ], [ {}, {}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._setFieldsRows [
{
'field1': 'value1'
},
{
'field1': 'value21'
}
], { dummy: true }
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', { dummy: true }
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.ok sanitizeValueSpy.calledWithExactly 'value21'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ], [ '_v' ] ], @inst.values
'buildStr()': ->
assert.throws ( => @inst.buildStr()), 'Not yet implemented'
'buildParam()': ->
assert.throws ( => @inst.buildParam()), 'Not yet implemented'
'SetFieldBlock':
beforeEach: ->
@cls = squel.cls.SetFieldBlock
@inst = new @cls()
'instanceof of AbstractSetFieldBlock': ->
assert.instanceOf @inst, squel.cls.AbstractSetFieldBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractSetFieldBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field options': ->
assert.same [], @inst.fieldOptions
'initial fields': ->
assert.same [], @inst.fields
'initial values': ->
assert.same [], @inst.values
'set()':
'calls to _set()': ->
spy = test.mocker.stub @inst, '_set'
@inst.set 'f', 'v', dummy: true
assert.ok spy.calledWithExactly('f', 'v', dummy: true)
'setFields()':
'calls to _setFields()': ->
spy = test.mocker.stub @inst, '_setFields'
@inst.setFields 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'buildStr()':
'needs at least one field to have been provided': ->
@inst.fields = []
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: set() needs to be called', err.toString()
'calls formatValue() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValue', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ] ]
@inst.fieldOptions = [ [ {dummy: true}, {dummy: false}, {} ] ]
assert.same 'SET field1 = [value1], field2 = [value2], field3 = [value3]', @inst.buildStr()
assert.ok formatValueSpy.calledThrice
assert.ok formatValueSpy.calledWithExactly 'value1', { dummy: true }
assert.ok formatValueSpy.calledWithExactly 'value2', { dummy: false }
assert.ok formatValueSpy.calledWithExactly 'value3', {}
'buildParam()':
'needs at least one field to have been provided': ->
@inst.fields = []
try
@inst.buildParam()
throw new Error 'should not reach here'
catch err
assert.same 'Error: set() needs to be called', err.toString()
'calls formatValueAsParam() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ] ]
assert.same { text: 'SET field1 = ?, field2 = ?, field3 = ?', values: ['[value1]', '[value2]', '[value3]'] }, @inst.buildParam()
assert.ok formatValueSpy.calledThrice
assert.ok formatValueSpy.calledWithExactly 'value1'
assert.ok formatValueSpy.calledWithExactly 'value2'
assert.ok formatValueSpy.calledWithExactly 'value3'
'Fix for hiddentao/squel#63': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> v
@inst.fields = [ 'age = age + 1', 'field2', 'field3' ]
@inst.values = [ [ undefined, 'value2', 'value3' ] ]
assert.same { text: 'SET age = age + 1, field2 = ?, field3 = ?', values: ['value2', 'value3'] }, @inst.buildParam()
'InsertFieldValueBlock':
beforeEach: ->
@cls = squel.cls.InsertFieldValueBlock
@inst = new @cls()
'instanceof of AbstractSetFieldBlock': ->
assert.instanceOf @inst, squel.cls.AbstractSetFieldBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractSetFieldBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'set()':
'calls to _set()': ->
spy = test.mocker.stub @inst, '_set'
@inst.set 'f', 'v', dummy: true
assert.ok spy.calledWithExactly('f', 'v', dummy: true)
'setFields()':
'calls to _setFields()': ->
spy = test.mocker.stub @inst, '_setFields'
@inst.setFields 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'setFieldsRows()':
'calls to _setFieldsRows()': ->
spy = test.mocker.stub @inst, '_setFieldsRows'
@inst.setFieldsRows 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'buildStr()':
'needs at least one field to have been provided': ->
@inst.fields = []
assert.same '', @inst.buildStr()
'calls formatValue() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValue', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
@inst.fieldOptions = [ [ {}, {}, {} ], [ {}, {}, { dummy: 23 } ] ]
assert.same '(field1, field2, field3) VALUES ([value1], [value2], [value3]), ([value21], [value22], [value23])', @inst.buildStr()
assert.same formatValueSpy.callCount, 6
assert.ok formatValueSpy.calledWithExactly 'value1', {}
assert.ok formatValueSpy.calledWithExactly 'value2', {}
assert.ok formatValueSpy.calledWithExactly 'value3', {}
assert.ok formatValueSpy.calledWithExactly 'value21', {}
assert.ok formatValueSpy.calledWithExactly 'value22', {}
assert.ok formatValueSpy.calledWithExactly 'value23', { dummy: 23 }
'buildParam()':
'needs at least one field to have been provided': ->
@inst.fields = []
assert.same {
text: ''
values: []
}, @inst.buildParam()
'calls formatValueAsParam() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
assert.same {
text: '(field1, field2, field3) VALUES (?, ?, ?), (?, ?, ?)',
values: [ '[value1]', '[value2]', '[value3]', '[value21]', '[value22]', '[value23]' ]
}, @inst.buildParam()
assert.same formatValueSpy.callCount, 6
assert.ok formatValueSpy.calledWithExactly 'value1'
assert.ok formatValueSpy.calledWithExactly 'value2'
assert.ok formatValueSpy.calledWithExactly 'value3'
assert.ok formatValueSpy.calledWithExactly 'value21'
assert.ok formatValueSpy.calledWithExactly 'value22'
assert.ok formatValueSpy.calledWithExactly 'value23'
'InsertFieldsFromQueryBlock':
beforeEach: ->
@cls = squel.cls.InsertFieldsFromQueryBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'fromQuery()':
'sanitizes field names': ->
spy = test.mocker.stub @inst, '_sanitizeField', -> 1
qry = squel.select()
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.ok spy.calledThrice
assert.ok spy.calledWithExactly 'test'
assert.ok spy.calledWithExactly 'one'
assert.ok spy.calledWithExactly 'two'
'sanitizes query': ->
spy = test.mocker.stub @inst, '_sanitizeNestableQuery', -> 1
qry = 123
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.ok spy.calledOnce
assert.ok spy.calledWithExactly qry
'overwrites existing values': ->
@inst._fields = 1
@inst._query = 2
qry = squel.select()
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.same qry, @inst._query
assert.same ['test', 'one', 'two'], @inst._fields
'buildStr()':
'needs fromQuery() to have been called': ->
@inst._fields = []
assert.same "", @inst.buildStr()
'default': ->
qry = squel.select().from('mega')
@inst.fromQuery ['test', 'one', 'two'], qry
assert.same "(test, one, two) (#{qry.toString()})", @inst.buildStr()
'buildParam()':
'needs fromQuery() to have been called': ->
@inst._fields = []
expected = {
text: '',
values: []
}
assert.same expected, @inst.buildParam()
'default': ->
qry = squel.select().from('mega')
@inst.fromQuery ['test', 'one', 'two'], qry
test.mocker.stub qry, 'toParam', ->
{
text: 'blah',
values: [1,2,3]
}
expected = {
text: '(test, one, two) (blah)',
values: [1,2,3]
}
assert.same expected, @inst.buildParam()
'DistinctBlock':
beforeEach: ->
@cls = squel.cls.DistinctBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same false, @inst.useDistinct
'distinct()':
'sets the flat': ->
@inst.distinct()
assert.same true, @inst.useDistinct
@inst.distinct()
assert.same true, @inst.useDistinct
'buildStr()':
'output nothing if not set': ->
@inst.useDistinct = false
assert.same '', @inst.buildStr()
'output DISTINCT if set': ->
@inst.useDistinct = true
assert.same 'DISTINCT', @inst.buildStr()
'GroupByBlock':
beforeEach: ->
@cls = squel.cls.GroupByBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.groups
'group()':
'adds to list': ->
@inst.group('field1')
@inst.group('field2')
assert.same ['field1', 'field2'], @inst.groups
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
@inst.group('field1')
assert.ok sanitizeFieldSpy.calledWithExactly 'field1'
assert.same ['_f'], @inst.groups
'buildStr()':
'output nothing if no fields set': ->
@inst.groups = []
assert.same '', @inst.buildStr()
'output GROUP BY': ->
@inst.group('field1')
@inst.group('field2')
assert.same 'GROUP BY field1, field2', @inst.buildStr()
'OffsetBlock':
beforeEach: ->
@cls = squel.cls.OffsetBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.offsets
'offset()':
'set value': ->
@inst.offset(1)
assert.same 1, @inst.offsets
@inst.offset(22)
assert.same 22, @inst.offsets
'sanitizes inputs': ->
sanitizeSpy = test.mocker.stub @cls.prototype, '_sanitizeLimitOffset', -> return 234
@inst.offset(23)
assert.ok sanitizeSpy.calledWithExactly 23
assert.same 234, @inst.offsets
'buildStr()':
'output nothing if not set': ->
@inst.offsets = null
assert.same '', @inst.buildStr()
'output OFFSET': ->
@inst.offset(12)
assert.same 'OFFSET 12', @inst.buildStr()
'WhereBlock':
beforeEach: ->
@cls = squel.cls.WhereBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy: true
'initial field values': ->
assert.same [], @inst.wheres
'where()':
'adds to list': ->
@inst.where('a = 1')
@inst.where('b = 2 OR c = 3')
assert.same [
{
text: 'a = 1'
values: []
}
{
text: 'b = 2 OR c = 3'
values: []
}
], @inst.wheres
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeCondition', -> return '_c'
@inst.where('a = 1')
assert.ok sanitizeFieldSpy.calledWithExactly 'a = 1'
assert.same [{
text: '_c'
values: []
}], @inst.wheres
'handles variadic arguments': ->
sanitizeStub = test.mocker.stub @cls.prototype, '_sanitizeValue', _.identity
substitutes = ['hello', [1, 2, 3]]
@inst.where.apply @inst, ['a = ? and b in ?'].concat(substitutes)
expectedValues = _.flatten substitutes
for expectedValue, index in expectedValues
assert.ok sanitizeStub.getCall(index).calledWithExactly expectedValue
assert.same [
{
text: 'a = ? and b in (?, ?, ?)'
values: ['hello', 1, 2, 3]
}
], @inst.wheres
'buildStr()':
'output QueryBuilder ': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
@inst.where('a in ?', subquery)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = 10))) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'output nothing if no conditions set': ->
@inst.wheres = []
assert.same '', @inst.buildStr()
'output WHERE ': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a = 1) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'Fix for hiddentao/squel#64': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
# second time it should still work
@inst.buildStr()
@inst.buildStr()
assert.same 'WHERE (a = 1) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'formats values ': ->
formatValueStub = test.mocker.stub @cls.prototype, '_formatValue', (a) -> '[' + a + ']'
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a = [1]) AND (b = [2] OR c = [3]) AND (d in ([4], [5], [6]))', @inst.buildStr()
'buildParam()':
'output QueryBuilder ': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
@inst.where('a in ?', subquery)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same { text: 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = ?))) AND (b = ? OR c = ?) AND (d in (?, ?, ?))', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
'output QueryBuilder expr': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
expr = squel.expr().and('a in ?',subquery)
.and_begin().or('b = ?', 2).or('c = ?', 3).end().and_begin()
.and('d in ?', [4, 5, 6]).end()
@inst.where(expr)
#assert.same { text: '', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
assert.same { text: 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = ?)) AND (b = ? OR c = ?) AND (d in (?, ?, ?)))', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
'output nothing if no conditions set': ->
@inst.wheres = []
assert.same { text: '', values: [] }, @inst.buildParam()
'output WHERE ': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same { text: 'WHERE (a = ?) AND (b = ? OR c = ?) AND (d in (?, ?, ?))', values: [1, 2, 3, 4, 5, 6] }, @inst.buildParam()
'formats value types as params': ->
formatValueSpy = test.mocker.spy @cls.prototype, '_formatValue'
test.mocker.stub @cls.prototype, '_formatValueAsParam', (a) -> '[' + a + ']'
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same {
text: 'WHERE (a = ?) AND (b = ? OR c = ?) AND (d in (?, ?, ?))',
values: ['[1]', '[2]', '[3]', '[4]', '[5]', '[6]']
}, @inst.buildParam()
assert.ok formatValueSpy.notCalled
'OrderByBlock':
beforeEach: ->
@cls = squel.cls.OrderByBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.orders
assert.same [], @inst._values
'order()':
'adds to list': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
expected = [
{
field: 'field1',
dir: true
},
{
field: 'field2',
dir: false
},
{
field: 'field3',
dir: true
}
]
assert.same expected, @inst.orders
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
@inst.order('field1')
assert.ok sanitizeFieldSpy.calledWithExactly 'field1'
assert.same [ { field: '_f', dir: true } ], @inst.orders
'saves additional values': ->
@inst.order('field1', false, 1.2, 4)
assert.same [ { field: 'field1', dir: false } ], @inst.orders
assert.same [1.2, 4], @inst._values
'buildStr()':
'output nothing if nothing set': ->
@inst.orders = []
assert.same '', @inst.buildStr()
'output ORDER BY': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
assert.same 'ORDER BY field1 ASC, field2 DESC, field3 ASC', @inst.buildStr()
'buildParam()':
'empty': ->
@inst.orders = []
assert.same { text: '', values: [] }, @inst.buildParam()
'default': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
assert.same { text: 'ORDER BY field1 ASC, field2 DESC, field3 ASC', values: [] }, @inst.buildParam()
'with values': ->
@inst.order('field3', true, 1.2, 5)
assert.same { text: 'ORDER BY field3 ASC', values: [1.2, 5] }, @inst.buildParam()
'LimitBlock':
beforeEach: ->
@cls = squel.cls.LimitBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.limits
'limit()':
'set value': ->
@inst.limit(1)
assert.same 1, @inst.limits
@inst.limit(22)
assert.same 22, @inst.limits
'sanitizes inputs': ->
sanitizeSpy = test.mocker.stub @cls.prototype, '_sanitizeLimitOffset', -> return 234
@inst.limit(23)
assert.ok sanitizeSpy.calledWithExactly 23
assert.same 234, @inst.limits
'buildStr()':
'output nothing if not set': ->
@inst.limits = null
assert.same '', @inst.buildStr()
'output nothing if set to 0': ->
@inst.limit(0)
assert.same '', @inst.buildStr()
'output LIMIT': ->
@inst.limit(12)
assert.same 'LIMIT 12', @inst.buildStr()
'JoinBlock':
beforeEach: ->
@cls = squel.cls.JoinBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.joins
'join()':
'adds to list': ->
@inst.join('table1')
@inst.join('table2', null, 'b = 1', 'LEFT')
@inst.join('table3', 'alias3', 'c = 1', 'RIGHT')
@inst.join('table4', 'alias4', 'd = 1', 'OUTER')
@inst.join('table5', 'alias5', null, 'CROSS')
expected = [
{
type: 'INNER',
table: 'table1',
alias: null,
condition: null
},
{
type: 'LEFT',
table: 'table2',
alias: null,
condition: 'b = 1'
},
{
type: 'RIGHT',
table: 'table3',
alias: '`alias3`',
condition: 'c = 1'
},
{
type: 'OUTER',
table: 'table4',
alias: '`alias4`',
condition: 'd = 1'
},
{
type: 'CROSS',
table: 'table5',
alias: '`alias5`',
condition: null
}
]
assert.same expected, @inst.joins
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeTableAlias', -> return '_a'
sanitizeConditionSpy = test.mocker.stub @cls.prototype, '_sanitizeCondition', -> return '_c'
@inst.join('table1', 'alias1', 'a = 1')
assert.ok sanitizeTableSpy.calledWithExactly 'table1', true
assert.ok sanitizeAliasSpy.calledWithExactly 'alias1'
assert.ok sanitizeConditionSpy.calledWithExactly 'a = 1'
expected = [
{
type: 'INNER',
table: '_t',
alias: '_a',
condition: '_c'
}
]
assert.same expected, @inst.joins
'nested queries': ->
inner1 = squel.select()
inner2 = squel.select()
inner3 = squel.select()
inner4 = squel.select()
inner5 = squel.select()
inner6 = squel.select()
@inst.join(inner1)
@inst.join(inner2, null, 'b = 1', 'LEFT')
@inst.join(inner3, 'alias3', 'c = 1', 'RIGHT')
@inst.join(inner4, 'alias4', 'd = 1', 'OUTER')
@inst.join(inner5, 'alias5', 'e = 1', 'FULL')
@inst.join(inner6, 'alias6', null, 'CROSS')
expected = [
{
type: 'INNER',
table: inner1,
alias: null,
condition: null
},
{
type: 'LEFT',
table: inner2,
alias: null,
condition: 'b = 1'
},
{
type: 'RIGHT',
table: inner3,
alias: '`alias3`',
condition: 'c = 1'
},
{
type: 'OUTER',
table: inner4,
alias: '`alias4`',
condition: 'd = 1'
},
{
type: 'FULL',
table: inner5,
alias: '`alias5`',
condition: 'e = 1'
},
{
type: 'CROSS',
table: inner6,
alias: '`alias6`',
condition: null
}
]
assert.same expected, @inst.joins
'left_join()':
'calls join()': ->
joinSpy = test.mocker.stub(@inst, 'join')
@inst.left_join('t', 'a', 'c')
assert.ok joinSpy.calledOnce
assert.ok joinSpy.calledWithExactly('t', 'a', 'c', 'LEFT')
'buildStr()':
'output nothing if nothing set': ->
@inst.joins = []
assert.same '', @inst.buildStr()
'output JOINs': ->
@inst.join('table1')
@inst.join('table2', null, 'b = 1', 'LEFT')
@inst.join('table3', 'alias3', 'c = 1', 'RIGHT')
@inst.join('table4', 'alias4', 'd = 1', 'FULL')
@inst.join('table5', 'alias5', null, 'CROSS')
assert.same 'INNER JOIN table1 LEFT JOIN table2 ON (b = 1) RIGHT JOIN table3 `alias3` ON (c = 1) FULL JOIN table4 `alias4` ON (d = 1) CROSS JOIN table5 `alias5`', @inst.buildStr()
'output JOINs with nested query': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
inner3 = squel.select().from('3')
inner4 = squel.select().from('4')
inner5 = squel.select().from('5')
@inst.join(inner1)
@inst.join(inner2, null, 'b = 1', 'LEFT')
@inst.join(inner3, 'alias3', 'c = 1', 'RIGHT')
@inst.join(inner4, 'alias4', 'e = 1', 'FULL')
@inst.join(inner5, 'alias5', null, 'CROSS')
assert.same 'INNER JOIN (SELECT * FROM 1) LEFT JOIN (SELECT * FROM 2) ON (b = 1) RIGHT JOIN (SELECT * FROM 3) `alias3` ON (c = 1) FULL JOIN (SELECT * FROM 4) `alias4` ON (e = 1) CROSS JOIN (SELECT * FROM 5) `alias5`', @inst.buildStr()
'QueryBuilder in ON condition expr()': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
expr = squel.expr()
.and('field1 = ?',inner2)
@inst.join(inner1, null, expr)
assert.same 'INNER JOIN (SELECT * FROM 1) ON (field1 = (SELECT * FROM 2))', @inst.buildStr()
'buildParam()':
'QueryBuilder in ON condition expr()': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
expr = squel.expr()
.and('field1 = ?',inner2)
@inst.join(inner1, null, expr)
assert.same { text: 'INNER JOIN (SELECT * FROM 1) ON (field1 = (SELECT * FROM 2))', values: [] }, @inst.buildParam()
module?.exports[require('path').basename(__filename)] = test
| 143469 | ###
Copyright (c) 2014 <NAME> (hiddentao.<EMAIL>)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['Blocks'] =
'Block base class':
beforeEach: ->
@inst = new squel.cls.Block()
'instanceof of BaseBuilder': ->
assert.instanceOf @inst, squel.cls.BaseBuilder
'options': ->
expectedOptions = _.extend {}, squel.cls.DefaultQueryBuilderOptions,
usingValuePlaceholders: true
dummy: true
@inst = new squel.cls.Block
usingValuePlaceholders: true
dummy: true
assert.same expectedOptions, @inst.options
'buildStr()': ->
assert.same '', @inst.buildStr()
'exposedMethods()':
'returns methods': ->
@inst['method1'] = -> return false
@inst['method2'] = -> return false
assert.ok ['method1', 'method2'], (name for name of @inst.exposedMethods())
'ignores methods prefixed with _': ->
@inst['_method'] = -> return false
assert.ok undefined is _.find (name for name of @inst.exposedMethods()), (name) ->
return name is '_method'
'ignores buildStr()': ->
assert.ok undefined is _.find (name for name of @inst.exposedMethods()), (name) ->
return name is 'buildStr'
'cloning copies the options over': ->
@inst.options.dummy = true;
newinst = @inst.clone()
@inst.options.dummy = false;
assert.same true, newinst.options.dummy
'StringBlock':
beforeEach: ->
@cls = squel.cls.StringBlock
@inst = new @cls
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'buildStr()':
'returns the string as is': ->
@inst = new @cls {}, 'TAG'
assert.same 'TAG', @inst.buildStr()
'buildParam()':
'returns the string as is': ->
@inst = new @cls {}, 'TAG'
assert.same { text: 'TAG', values: [] }, @inst.buildParam()
'AbstractValueBlock':
beforeEach: ->
@cls = squel.cls.AbstractValueBlock
@inst = new @cls
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial member values': ->
assert.same null, @inst._val
'buildStr()':
'when value not set': ->
assert.same '', @inst.buildStr()
'when value set': ->
@inst._setValue 'bla'
assert.same 'bla', @inst.buildStr()
'buildParam()':
'when value not set': ->
assert.same @inst.buildParam(), { text: '', values: [] }
'when value set': ->
@inst._setValue 'bla'
assert.same @inst.buildParam(), { text: 'bla', values: [] }
'AbstractTableBlock':
beforeEach: ->
@cls = squel.cls.AbstractTableBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.tables
'_table()':
'saves inputs': ->
@inst._table('table1')
@inst._table('table2', 'alias2')
@inst._table('table3')
expectedFroms = [
{
table: 'table1',
alias: null
},
{
table: 'table2',
alias: '`alias2`'
},
{
table: 'table3',
alias: null
}
]
assert.same expectedFroms, @inst.tables
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeTableAlias', -> return '_a'
@inst._table('table', 'alias')
assert.ok sanitizeTableSpy.calledWith 'table'
assert.ok sanitizeAliasSpy.calledWithExactly 'alias'
assert.same [ { table: '_t', alias: '_a' }], @inst.tables
'handles single-table mode': ->
@inst.options.singleTable = true
@inst._table('table1')
@inst._table('table2')
@inst._table('table3')
expected = [
{
table: 'table3',
alias: null
}
]
assert.same expected, @inst.tables
'if not allowing nested queries': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
innerTable = squel.select()
@inst.options.allowNested = false
@inst._table(innerTable)
assert.ok sanitizeTableSpy.calledWithExactly innerTable, false
'if allowing nested queries': ->
sanitizeTableSpy = test.mocker.spy @cls.prototype, '_sanitizeTable'
innerTable1 = squel.select()
innerTable2 = squel.select()
@inst.options.allowNested = true
@inst._table(innerTable1)
@inst._table(innerTable2, 'Inner2')
assert.ok sanitizeTableSpy.calledWithExactly innerTable1, true
assert.ok sanitizeTableSpy.calledWithExactly innerTable2, true
expected = [
{
alias: null
table: innerTable1
}
{
alias: '`Inner2`'
table: innerTable2
}
]
assert.same expected, @inst.tables
'buildStr()':
'requires at least one table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: _table() needs to be called', err.toString()
'returns formatted query phrase': ->
@inst._table('table1')
@inst._table('table2', 'alias2')
@inst._table('table3')
assert.same 'table1, table2 `alias2`, table3', @inst.buildStr()
'handles nested query': ->
innerTable1 = squel.select().from('inner1')
innerTable2 = squel.select().from('inner2')
@inst.options.allowNested = true
@inst._table(innerTable1)
@inst._table(innerTable2, 'inner2')
assert.same '(SELECT * FROM inner1), (SELECT * FROM inner2) `inner2`', @inst.buildStr()
'FromTableBlock':
beforeEach: ->
@cls = squel.cls.FromTableBlock
@inst = new @cls()
'instanceof of AbstractTableBlock': ->
assert.instanceOf @inst, squel.cls.AbstractTableBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractTableBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'from()':
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, '_table'
@inst.from('table1')
@inst.from('table2', 'alias2')
assert.same 2, baseMethodSpy.callCount
assert.ok baseMethodSpy.calledWithExactly('table1', null)
assert.ok baseMethodSpy.calledWithExactly('table2', 'alias2')
'buildStr()':
'requires at least one table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: from() needs to be called', err.toString()
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, 'buildStr', -> 'blah'
@inst.from('table')
assert.same 'FROM blah', @inst.buildStr()
'UpdateTableBlock':
beforeEach: ->
@cls = squel.cls.UpdateTableBlock
@inst = new @cls()
'instanceof of AbstractTableBlock': ->
assert.instanceOf @inst, squel.cls.AbstractTableBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractTableBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'table()':
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, '_table'
@inst.table('table1')
@inst.table('table2', 'alias2')
assert.same 2, baseMethodSpy.callCount
assert.ok baseMethodSpy.calledWithExactly('table1', null)
assert.ok baseMethodSpy.calledWithExactly('table2', 'alias2')
'IntoTableBlock':
beforeEach: ->
@cls = squel.cls.IntoTableBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.table
'into()':
'saves inputs': ->
@inst.into('table1')
@inst.into('table2')
@inst.into('table3')
assert.same 'table3', @inst.table
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
@inst.into('table')
assert.ok sanitizeTableSpy.calledWithExactly 'table', false
assert.same '_t', @inst.table
'buildStr()':
'requires table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: into() needs to be called', err.toString()
'returns formatted query phrase': ->
@inst.into('table1')
assert.same 'INTO table1', @inst.buildStr()
'GetFieldBlock':
beforeEach: ->
@cls = squel.cls.GetFieldBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst._fields
'fields() - object':
'saves inputs': ->
fieldSpy = test.mocker.spy(@inst, 'field')
@inst.fields({
'field1': null
'field2': 'alias2'
'field3': null
}, { dummy: true})
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field3',
alias: null
}
]
assert.ok fieldSpy.calledThrice
assert.ok fieldSpy.calledWithExactly('field1', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field2', 'alias2', dummy: true)
assert.ok fieldSpy.calledWithExactly('field3', null, dummy: true)
assert.same expected, @inst._fields
'fields() - array':
'saves inputs': ->
fieldSpy = test.mocker.spy(@inst, 'field')
@inst.fields([ 'field1', 'field2', 'field3' ], { dummy: true})
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: null
},
{
name: 'field3',
alias: null
}
]
assert.ok fieldSpy.calledThrice
assert.ok fieldSpy.calledWithExactly('field1', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field2', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field3', null, dummy: true)
assert.same expected, @inst._fields
'field()':
'saves inputs': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field3')
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field3',
alias: null
}
]
assert.same expected, @inst._fields
'field() - discard duplicates':
'saves inputs': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field2', 'alias2')
@inst.field('field1', 'alias1')
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field1',
alias: '"alias1"'
}
]
assert.same expected, @inst._fields
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeFieldAlias', -> return '_a'
@inst.field('field1', 'alias1', { dummy: true})
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', { dummy: true }
assert.ok sanitizeAliasSpy.calledWithExactly 'alias1'
assert.same [ { name: '_f', alias: '_a' } ], @inst._fields
'buildStr()':
'returns all fields when none provided': ->
@inst._fields = []
assert.same '*', @inst.buildStr()
'returns formatted query phrase': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field3')
assert.same 'field1, field2 AS "alias2", field3', @inst.buildStr()
'AbstractSetFieldBlock':
beforeEach: ->
@cls = squel.cls.AbstractSetFieldBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial fields': ->
assert.same [], @inst.fields
'initial field options': ->
assert.same [], @inst.fieldOptions
'initial values': ->
assert.same [], @inst.values
'_set()':
'saves inputs': ->
@inst._set('field1', 'value1', dummy: 1)
@inst._set('field2', 'value2', dummy: 2)
@inst._set('field3', 'value3', dummy: 3)
@inst._set('field4')
expectedFields = [ 'field1', 'field2', 'field3', 'field4' ]
expectedValues = [ [ 'value1', 'value2', 'value3', undefined ] ]
expectedFieldOptions = [ [ {dummy: 1}, {dummy: 2}, {dummy: 3}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._set('field1', 'value1', dummy: true)
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', dummy: true
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ] ], @inst.values
'_setFields()':
'saves inputs': ->
@inst._setFields
'field1': 'value1'
'field2': 'value2'
'field3': 'value3'
expectedFields = [ 'field1', 'field2', 'field3' ]
expectedValues = [ [ 'value1', 'value2', 'value3'] ]
expectedFieldOptions = [ [ {}, {}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._setFields({'field1': 'value1'}, {dummy: true})
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', dummy: true
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ] ], @inst.values
'_setFieldsRows()':
'saves inputs': ->
@inst._setFieldsRows [
{
'field1': 'value1'
'field2': 'value2'
'field3': 'value3'
}
{
'field1': 'value21'
'field2': 'value22'
'field3': 'value23'
}
]
expectedFields = [ 'field1', 'field2', 'field3' ]
expectedValues = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
expectedFieldOptions = [ [ {}, {}, {} ], [ {}, {}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._setFieldsRows [
{
'field1': 'value1'
},
{
'field1': 'value21'
}
], { dummy: true }
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', { dummy: true }
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.ok sanitizeValueSpy.calledWithExactly 'value21'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ], [ '_v' ] ], @inst.values
'buildStr()': ->
assert.throws ( => @inst.buildStr()), 'Not yet implemented'
'buildParam()': ->
assert.throws ( => @inst.buildParam()), 'Not yet implemented'
'SetFieldBlock':
beforeEach: ->
@cls = squel.cls.SetFieldBlock
@inst = new @cls()
'instanceof of AbstractSetFieldBlock': ->
assert.instanceOf @inst, squel.cls.AbstractSetFieldBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractSetFieldBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field options': ->
assert.same [], @inst.fieldOptions
'initial fields': ->
assert.same [], @inst.fields
'initial values': ->
assert.same [], @inst.values
'set()':
'calls to _set()': ->
spy = test.mocker.stub @inst, '_set'
@inst.set 'f', 'v', dummy: true
assert.ok spy.calledWithExactly('f', 'v', dummy: true)
'setFields()':
'calls to _setFields()': ->
spy = test.mocker.stub @inst, '_setFields'
@inst.setFields 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'buildStr()':
'needs at least one field to have been provided': ->
@inst.fields = []
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: set() needs to be called', err.toString()
'calls formatValue() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValue', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ] ]
@inst.fieldOptions = [ [ {dummy: true}, {dummy: false}, {} ] ]
assert.same 'SET field1 = [value1], field2 = [value2], field3 = [value3]', @inst.buildStr()
assert.ok formatValueSpy.calledThrice
assert.ok formatValueSpy.calledWithExactly 'value1', { dummy: true }
assert.ok formatValueSpy.calledWithExactly 'value2', { dummy: false }
assert.ok formatValueSpy.calledWithExactly 'value3', {}
'buildParam()':
'needs at least one field to have been provided': ->
@inst.fields = []
try
@inst.buildParam()
throw new Error 'should not reach here'
catch err
assert.same 'Error: set() needs to be called', err.toString()
'calls formatValueAsParam() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ] ]
assert.same { text: 'SET field1 = ?, field2 = ?, field3 = ?', values: ['[value1]', '[value2]', '[value3]'] }, @inst.buildParam()
assert.ok formatValueSpy.calledThrice
assert.ok formatValueSpy.calledWithExactly 'value1'
assert.ok formatValueSpy.calledWithExactly 'value2'
assert.ok formatValueSpy.calledWithExactly 'value3'
'Fix for hiddentao/squel#63': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> v
@inst.fields = [ 'age = age + 1', 'field2', 'field3' ]
@inst.values = [ [ undefined, 'value2', 'value3' ] ]
assert.same { text: 'SET age = age + 1, field2 = ?, field3 = ?', values: ['value2', 'value3'] }, @inst.buildParam()
'InsertFieldValueBlock':
beforeEach: ->
@cls = squel.cls.InsertFieldValueBlock
@inst = new @cls()
'instanceof of AbstractSetFieldBlock': ->
assert.instanceOf @inst, squel.cls.AbstractSetFieldBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractSetFieldBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'set()':
'calls to _set()': ->
spy = test.mocker.stub @inst, '_set'
@inst.set 'f', 'v', dummy: true
assert.ok spy.calledWithExactly('f', 'v', dummy: true)
'setFields()':
'calls to _setFields()': ->
spy = test.mocker.stub @inst, '_setFields'
@inst.setFields 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'setFieldsRows()':
'calls to _setFieldsRows()': ->
spy = test.mocker.stub @inst, '_setFieldsRows'
@inst.setFieldsRows 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'buildStr()':
'needs at least one field to have been provided': ->
@inst.fields = []
assert.same '', @inst.buildStr()
'calls formatValue() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValue', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
@inst.fieldOptions = [ [ {}, {}, {} ], [ {}, {}, { dummy: 23 } ] ]
assert.same '(field1, field2, field3) VALUES ([value1], [value2], [value3]), ([value21], [value22], [value23])', @inst.buildStr()
assert.same formatValueSpy.callCount, 6
assert.ok formatValueSpy.calledWithExactly 'value1', {}
assert.ok formatValueSpy.calledWithExactly 'value2', {}
assert.ok formatValueSpy.calledWithExactly 'value3', {}
assert.ok formatValueSpy.calledWithExactly 'value21', {}
assert.ok formatValueSpy.calledWithExactly 'value22', {}
assert.ok formatValueSpy.calledWithExactly 'value23', { dummy: 23 }
'buildParam()':
'needs at least one field to have been provided': ->
@inst.fields = []
assert.same {
text: ''
values: []
}, @inst.buildParam()
'calls formatValueAsParam() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
assert.same {
text: '(field1, field2, field3) VALUES (?, ?, ?), (?, ?, ?)',
values: [ '[value1]', '[value2]', '[value3]', '[value21]', '[value22]', '[value23]' ]
}, @inst.buildParam()
assert.same formatValueSpy.callCount, 6
assert.ok formatValueSpy.calledWithExactly 'value1'
assert.ok formatValueSpy.calledWithExactly 'value2'
assert.ok formatValueSpy.calledWithExactly 'value3'
assert.ok formatValueSpy.calledWithExactly 'value21'
assert.ok formatValueSpy.calledWithExactly 'value22'
assert.ok formatValueSpy.calledWithExactly 'value23'
'InsertFieldsFromQueryBlock':
beforeEach: ->
@cls = squel.cls.InsertFieldsFromQueryBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'fromQuery()':
'sanitizes field names': ->
spy = test.mocker.stub @inst, '_sanitizeField', -> 1
qry = squel.select()
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.ok spy.calledThrice
assert.ok spy.calledWithExactly 'test'
assert.ok spy.calledWithExactly 'one'
assert.ok spy.calledWithExactly 'two'
'sanitizes query': ->
spy = test.mocker.stub @inst, '_sanitizeNestableQuery', -> 1
qry = 123
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.ok spy.calledOnce
assert.ok spy.calledWithExactly qry
'overwrites existing values': ->
@inst._fields = 1
@inst._query = 2
qry = squel.select()
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.same qry, @inst._query
assert.same ['test', 'one', 'two'], @inst._fields
'buildStr()':
'needs fromQuery() to have been called': ->
@inst._fields = []
assert.same "", @inst.buildStr()
'default': ->
qry = squel.select().from('mega')
@inst.fromQuery ['test', 'one', 'two'], qry
assert.same "(test, one, two) (#{qry.toString()})", @inst.buildStr()
'buildParam()':
'needs fromQuery() to have been called': ->
@inst._fields = []
expected = {
text: '',
values: []
}
assert.same expected, @inst.buildParam()
'default': ->
qry = squel.select().from('mega')
@inst.fromQuery ['test', 'one', 'two'], qry
test.mocker.stub qry, 'toParam', ->
{
text: 'blah',
values: [1,2,3]
}
expected = {
text: '(test, one, two) (blah)',
values: [1,2,3]
}
assert.same expected, @inst.buildParam()
'DistinctBlock':
beforeEach: ->
@cls = squel.cls.DistinctBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same false, @inst.useDistinct
'distinct()':
'sets the flat': ->
@inst.distinct()
assert.same true, @inst.useDistinct
@inst.distinct()
assert.same true, @inst.useDistinct
'buildStr()':
'output nothing if not set': ->
@inst.useDistinct = false
assert.same '', @inst.buildStr()
'output DISTINCT if set': ->
@inst.useDistinct = true
assert.same 'DISTINCT', @inst.buildStr()
'GroupByBlock':
beforeEach: ->
@cls = squel.cls.GroupByBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.groups
'group()':
'adds to list': ->
@inst.group('field1')
@inst.group('field2')
assert.same ['field1', 'field2'], @inst.groups
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
@inst.group('field1')
assert.ok sanitizeFieldSpy.calledWithExactly 'field1'
assert.same ['_f'], @inst.groups
'buildStr()':
'output nothing if no fields set': ->
@inst.groups = []
assert.same '', @inst.buildStr()
'output GROUP BY': ->
@inst.group('field1')
@inst.group('field2')
assert.same 'GROUP BY field1, field2', @inst.buildStr()
'OffsetBlock':
beforeEach: ->
@cls = squel.cls.OffsetBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.offsets
'offset()':
'set value': ->
@inst.offset(1)
assert.same 1, @inst.offsets
@inst.offset(22)
assert.same 22, @inst.offsets
'sanitizes inputs': ->
sanitizeSpy = test.mocker.stub @cls.prototype, '_sanitizeLimitOffset', -> return 234
@inst.offset(23)
assert.ok sanitizeSpy.calledWithExactly 23
assert.same 234, @inst.offsets
'buildStr()':
'output nothing if not set': ->
@inst.offsets = null
assert.same '', @inst.buildStr()
'output OFFSET': ->
@inst.offset(12)
assert.same 'OFFSET 12', @inst.buildStr()
'WhereBlock':
beforeEach: ->
@cls = squel.cls.WhereBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy: true
'initial field values': ->
assert.same [], @inst.wheres
'where()':
'adds to list': ->
@inst.where('a = 1')
@inst.where('b = 2 OR c = 3')
assert.same [
{
text: 'a = 1'
values: []
}
{
text: 'b = 2 OR c = 3'
values: []
}
], @inst.wheres
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeCondition', -> return '_c'
@inst.where('a = 1')
assert.ok sanitizeFieldSpy.calledWithExactly 'a = 1'
assert.same [{
text: '_c'
values: []
}], @inst.wheres
'handles variadic arguments': ->
sanitizeStub = test.mocker.stub @cls.prototype, '_sanitizeValue', _.identity
substitutes = ['hello', [1, 2, 3]]
@inst.where.apply @inst, ['a = ? and b in ?'].concat(substitutes)
expectedValues = _.flatten substitutes
for expectedValue, index in expectedValues
assert.ok sanitizeStub.getCall(index).calledWithExactly expectedValue
assert.same [
{
text: 'a = ? and b in (?, ?, ?)'
values: ['hello', 1, 2, 3]
}
], @inst.wheres
'buildStr()':
'output QueryBuilder ': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
@inst.where('a in ?', subquery)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = 10))) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'output nothing if no conditions set': ->
@inst.wheres = []
assert.same '', @inst.buildStr()
'output WHERE ': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a = 1) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'Fix for hiddentao/squel#64': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
# second time it should still work
@inst.buildStr()
@inst.buildStr()
assert.same 'WHERE (a = 1) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'formats values ': ->
formatValueStub = test.mocker.stub @cls.prototype, '_formatValue', (a) -> '[' + a + ']'
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a = [1]) AND (b = [2] OR c = [3]) AND (d in ([4], [5], [6]))', @inst.buildStr()
'buildParam()':
'output QueryBuilder ': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
@inst.where('a in ?', subquery)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same { text: 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = ?))) AND (b = ? OR c = ?) AND (d in (?, ?, ?))', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
'output QueryBuilder expr': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
expr = squel.expr().and('a in ?',subquery)
.and_begin().or('b = ?', 2).or('c = ?', 3).end().and_begin()
.and('d in ?', [4, 5, 6]).end()
@inst.where(expr)
#assert.same { text: '', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
assert.same { text: 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = ?)) AND (b = ? OR c = ?) AND (d in (?, ?, ?)))', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
'output nothing if no conditions set': ->
@inst.wheres = []
assert.same { text: '', values: [] }, @inst.buildParam()
'output WHERE ': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same { text: 'WHERE (a = ?) AND (b = ? OR c = ?) AND (d in (?, ?, ?))', values: [1, 2, 3, 4, 5, 6] }, @inst.buildParam()
'formats value types as params': ->
formatValueSpy = test.mocker.spy @cls.prototype, '_formatValue'
test.mocker.stub @cls.prototype, '_formatValueAsParam', (a) -> '[' + a + ']'
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same {
text: 'WHERE (a = ?) AND (b = ? OR c = ?) AND (d in (?, ?, ?))',
values: ['[1]', '[2]', '[3]', '[4]', '[5]', '[6]']
}, @inst.buildParam()
assert.ok formatValueSpy.notCalled
'OrderByBlock':
beforeEach: ->
@cls = squel.cls.OrderByBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.orders
assert.same [], @inst._values
'order()':
'adds to list': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
expected = [
{
field: 'field1',
dir: true
},
{
field: 'field2',
dir: false
},
{
field: 'field3',
dir: true
}
]
assert.same expected, @inst.orders
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
@inst.order('field1')
assert.ok sanitizeFieldSpy.calledWithExactly 'field1'
assert.same [ { field: '_f', dir: true } ], @inst.orders
'saves additional values': ->
@inst.order('field1', false, 1.2, 4)
assert.same [ { field: 'field1', dir: false } ], @inst.orders
assert.same [1.2, 4], @inst._values
'buildStr()':
'output nothing if nothing set': ->
@inst.orders = []
assert.same '', @inst.buildStr()
'output ORDER BY': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
assert.same 'ORDER BY field1 ASC, field2 DESC, field3 ASC', @inst.buildStr()
'buildParam()':
'empty': ->
@inst.orders = []
assert.same { text: '', values: [] }, @inst.buildParam()
'default': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
assert.same { text: 'ORDER BY field1 ASC, field2 DESC, field3 ASC', values: [] }, @inst.buildParam()
'with values': ->
@inst.order('field3', true, 1.2, 5)
assert.same { text: 'ORDER BY field3 ASC', values: [1.2, 5] }, @inst.buildParam()
'LimitBlock':
beforeEach: ->
@cls = squel.cls.LimitBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.limits
'limit()':
'set value': ->
@inst.limit(1)
assert.same 1, @inst.limits
@inst.limit(22)
assert.same 22, @inst.limits
'sanitizes inputs': ->
sanitizeSpy = test.mocker.stub @cls.prototype, '_sanitizeLimitOffset', -> return 234
@inst.limit(23)
assert.ok sanitizeSpy.calledWithExactly 23
assert.same 234, @inst.limits
'buildStr()':
'output nothing if not set': ->
@inst.limits = null
assert.same '', @inst.buildStr()
'output nothing if set to 0': ->
@inst.limit(0)
assert.same '', @inst.buildStr()
'output LIMIT': ->
@inst.limit(12)
assert.same 'LIMIT 12', @inst.buildStr()
'JoinBlock':
beforeEach: ->
@cls = squel.cls.JoinBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.joins
'join()':
'adds to list': ->
@inst.join('table1')
@inst.join('table2', null, 'b = 1', 'LEFT')
@inst.join('table3', 'alias3', 'c = 1', 'RIGHT')
@inst.join('table4', 'alias4', 'd = 1', 'OUTER')
@inst.join('table5', 'alias5', null, 'CROSS')
expected = [
{
type: 'INNER',
table: 'table1',
alias: null,
condition: null
},
{
type: 'LEFT',
table: 'table2',
alias: null,
condition: 'b = 1'
},
{
type: 'RIGHT',
table: 'table3',
alias: '`alias3`',
condition: 'c = 1'
},
{
type: 'OUTER',
table: 'table4',
alias: '`alias4`',
condition: 'd = 1'
},
{
type: 'CROSS',
table: 'table5',
alias: '`alias5`',
condition: null
}
]
assert.same expected, @inst.joins
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeTableAlias', -> return '_a'
sanitizeConditionSpy = test.mocker.stub @cls.prototype, '_sanitizeCondition', -> return '_c'
@inst.join('table1', 'alias1', 'a = 1')
assert.ok sanitizeTableSpy.calledWithExactly 'table1', true
assert.ok sanitizeAliasSpy.calledWithExactly 'alias1'
assert.ok sanitizeConditionSpy.calledWithExactly 'a = 1'
expected = [
{
type: 'INNER',
table: '_t',
alias: '_a',
condition: '_c'
}
]
assert.same expected, @inst.joins
'nested queries': ->
inner1 = squel.select()
inner2 = squel.select()
inner3 = squel.select()
inner4 = squel.select()
inner5 = squel.select()
inner6 = squel.select()
@inst.join(inner1)
@inst.join(inner2, null, 'b = 1', 'LEFT')
@inst.join(inner3, 'alias3', 'c = 1', 'RIGHT')
@inst.join(inner4, 'alias4', 'd = 1', 'OUTER')
@inst.join(inner5, 'alias5', 'e = 1', 'FULL')
@inst.join(inner6, 'alias6', null, 'CROSS')
expected = [
{
type: 'INNER',
table: inner1,
alias: null,
condition: null
},
{
type: 'LEFT',
table: inner2,
alias: null,
condition: 'b = 1'
},
{
type: 'RIGHT',
table: inner3,
alias: '`alias3`',
condition: 'c = 1'
},
{
type: 'OUTER',
table: inner4,
alias: '`alias4`',
condition: 'd = 1'
},
{
type: 'FULL',
table: inner5,
alias: '`alias5`',
condition: 'e = 1'
},
{
type: 'CROSS',
table: inner6,
alias: '`alias6`',
condition: null
}
]
assert.same expected, @inst.joins
'left_join()':
'calls join()': ->
joinSpy = test.mocker.stub(@inst, 'join')
@inst.left_join('t', 'a', 'c')
assert.ok joinSpy.calledOnce
assert.ok joinSpy.calledWithExactly('t', 'a', 'c', 'LEFT')
'buildStr()':
'output nothing if nothing set': ->
@inst.joins = []
assert.same '', @inst.buildStr()
'output JOINs': ->
@inst.join('table1')
@inst.join('table2', null, 'b = 1', 'LEFT')
@inst.join('table3', 'alias3', 'c = 1', 'RIGHT')
@inst.join('table4', 'alias4', 'd = 1', 'FULL')
@inst.join('table5', 'alias5', null, 'CROSS')
assert.same 'INNER JOIN table1 LEFT JOIN table2 ON (b = 1) RIGHT JOIN table3 `alias3` ON (c = 1) FULL JOIN table4 `alias4` ON (d = 1) CROSS JOIN table5 `alias5`', @inst.buildStr()
'output JOINs with nested query': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
inner3 = squel.select().from('3')
inner4 = squel.select().from('4')
inner5 = squel.select().from('5')
@inst.join(inner1)
@inst.join(inner2, null, 'b = 1', 'LEFT')
@inst.join(inner3, 'alias3', 'c = 1', 'RIGHT')
@inst.join(inner4, 'alias4', 'e = 1', 'FULL')
@inst.join(inner5, 'alias5', null, 'CROSS')
assert.same 'INNER JOIN (SELECT * FROM 1) LEFT JOIN (SELECT * FROM 2) ON (b = 1) RIGHT JOIN (SELECT * FROM 3) `alias3` ON (c = 1) FULL JOIN (SELECT * FROM 4) `alias4` ON (e = 1) CROSS JOIN (SELECT * FROM 5) `alias5`', @inst.buildStr()
'QueryBuilder in ON condition expr()': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
expr = squel.expr()
.and('field1 = ?',inner2)
@inst.join(inner1, null, expr)
assert.same 'INNER JOIN (SELECT * FROM 1) ON (field1 = (SELECT * FROM 2))', @inst.buildStr()
'buildParam()':
'QueryBuilder in ON condition expr()': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
expr = squel.expr()
.and('field1 = ?',inner2)
@inst.join(inner1, null, expr)
assert.same { text: 'INNER JOIN (SELECT * FROM 1) ON (field1 = (SELECT * FROM 2))', values: [] }, @inst.buildParam()
module?.exports[require('path').basename(__filename)] = test
| true | ###
Copyright (c) 2014 PI:NAME:<NAME>END_PI (hiddentao.PI:EMAIL:<EMAIL>END_PI)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['Blocks'] =
'Block base class':
beforeEach: ->
@inst = new squel.cls.Block()
'instanceof of BaseBuilder': ->
assert.instanceOf @inst, squel.cls.BaseBuilder
'options': ->
expectedOptions = _.extend {}, squel.cls.DefaultQueryBuilderOptions,
usingValuePlaceholders: true
dummy: true
@inst = new squel.cls.Block
usingValuePlaceholders: true
dummy: true
assert.same expectedOptions, @inst.options
'buildStr()': ->
assert.same '', @inst.buildStr()
'exposedMethods()':
'returns methods': ->
@inst['method1'] = -> return false
@inst['method2'] = -> return false
assert.ok ['method1', 'method2'], (name for name of @inst.exposedMethods())
'ignores methods prefixed with _': ->
@inst['_method'] = -> return false
assert.ok undefined is _.find (name for name of @inst.exposedMethods()), (name) ->
return name is '_method'
'ignores buildStr()': ->
assert.ok undefined is _.find (name for name of @inst.exposedMethods()), (name) ->
return name is 'buildStr'
'cloning copies the options over': ->
@inst.options.dummy = true;
newinst = @inst.clone()
@inst.options.dummy = false;
assert.same true, newinst.options.dummy
'StringBlock':
beforeEach: ->
@cls = squel.cls.StringBlock
@inst = new @cls
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'buildStr()':
'returns the string as is': ->
@inst = new @cls {}, 'TAG'
assert.same 'TAG', @inst.buildStr()
'buildParam()':
'returns the string as is': ->
@inst = new @cls {}, 'TAG'
assert.same { text: 'TAG', values: [] }, @inst.buildParam()
'AbstractValueBlock':
beforeEach: ->
@cls = squel.cls.AbstractValueBlock
@inst = new @cls
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial member values': ->
assert.same null, @inst._val
'buildStr()':
'when value not set': ->
assert.same '', @inst.buildStr()
'when value set': ->
@inst._setValue 'bla'
assert.same 'bla', @inst.buildStr()
'buildParam()':
'when value not set': ->
assert.same @inst.buildParam(), { text: '', values: [] }
'when value set': ->
@inst._setValue 'bla'
assert.same @inst.buildParam(), { text: 'bla', values: [] }
'AbstractTableBlock':
beforeEach: ->
@cls = squel.cls.AbstractTableBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.tables
'_table()':
'saves inputs': ->
@inst._table('table1')
@inst._table('table2', 'alias2')
@inst._table('table3')
expectedFroms = [
{
table: 'table1',
alias: null
},
{
table: 'table2',
alias: '`alias2`'
},
{
table: 'table3',
alias: null
}
]
assert.same expectedFroms, @inst.tables
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeTableAlias', -> return '_a'
@inst._table('table', 'alias')
assert.ok sanitizeTableSpy.calledWith 'table'
assert.ok sanitizeAliasSpy.calledWithExactly 'alias'
assert.same [ { table: '_t', alias: '_a' }], @inst.tables
'handles single-table mode': ->
@inst.options.singleTable = true
@inst._table('table1')
@inst._table('table2')
@inst._table('table3')
expected = [
{
table: 'table3',
alias: null
}
]
assert.same expected, @inst.tables
'if not allowing nested queries': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
innerTable = squel.select()
@inst.options.allowNested = false
@inst._table(innerTable)
assert.ok sanitizeTableSpy.calledWithExactly innerTable, false
'if allowing nested queries': ->
sanitizeTableSpy = test.mocker.spy @cls.prototype, '_sanitizeTable'
innerTable1 = squel.select()
innerTable2 = squel.select()
@inst.options.allowNested = true
@inst._table(innerTable1)
@inst._table(innerTable2, 'Inner2')
assert.ok sanitizeTableSpy.calledWithExactly innerTable1, true
assert.ok sanitizeTableSpy.calledWithExactly innerTable2, true
expected = [
{
alias: null
table: innerTable1
}
{
alias: '`Inner2`'
table: innerTable2
}
]
assert.same expected, @inst.tables
'buildStr()':
'requires at least one table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: _table() needs to be called', err.toString()
'returns formatted query phrase': ->
@inst._table('table1')
@inst._table('table2', 'alias2')
@inst._table('table3')
assert.same 'table1, table2 `alias2`, table3', @inst.buildStr()
'handles nested query': ->
innerTable1 = squel.select().from('inner1')
innerTable2 = squel.select().from('inner2')
@inst.options.allowNested = true
@inst._table(innerTable1)
@inst._table(innerTable2, 'inner2')
assert.same '(SELECT * FROM inner1), (SELECT * FROM inner2) `inner2`', @inst.buildStr()
'FromTableBlock':
beforeEach: ->
@cls = squel.cls.FromTableBlock
@inst = new @cls()
'instanceof of AbstractTableBlock': ->
assert.instanceOf @inst, squel.cls.AbstractTableBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractTableBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'from()':
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, '_table'
@inst.from('table1')
@inst.from('table2', 'alias2')
assert.same 2, baseMethodSpy.callCount
assert.ok baseMethodSpy.calledWithExactly('table1', null)
assert.ok baseMethodSpy.calledWithExactly('table2', 'alias2')
'buildStr()':
'requires at least one table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: from() needs to be called', err.toString()
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, 'buildStr', -> 'blah'
@inst.from('table')
assert.same 'FROM blah', @inst.buildStr()
'UpdateTableBlock':
beforeEach: ->
@cls = squel.cls.UpdateTableBlock
@inst = new @cls()
'instanceof of AbstractTableBlock': ->
assert.instanceOf @inst, squel.cls.AbstractTableBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractTableBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'table()':
'calls base class handler': ->
baseMethodSpy = test.mocker.stub squel.cls.AbstractTableBlock.prototype, '_table'
@inst.table('table1')
@inst.table('table2', 'alias2')
assert.same 2, baseMethodSpy.callCount
assert.ok baseMethodSpy.calledWithExactly('table1', null)
assert.ok baseMethodSpy.calledWithExactly('table2', 'alias2')
'IntoTableBlock':
beforeEach: ->
@cls = squel.cls.IntoTableBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.table
'into()':
'saves inputs': ->
@inst.into('table1')
@inst.into('table2')
@inst.into('table3')
assert.same 'table3', @inst.table
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
@inst.into('table')
assert.ok sanitizeTableSpy.calledWithExactly 'table', false
assert.same '_t', @inst.table
'buildStr()':
'requires table to have been provided': ->
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: into() needs to be called', err.toString()
'returns formatted query phrase': ->
@inst.into('table1')
assert.same 'INTO table1', @inst.buildStr()
'GetFieldBlock':
beforeEach: ->
@cls = squel.cls.GetFieldBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst._fields
'fields() - object':
'saves inputs': ->
fieldSpy = test.mocker.spy(@inst, 'field')
@inst.fields({
'field1': null
'field2': 'alias2'
'field3': null
}, { dummy: true})
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field3',
alias: null
}
]
assert.ok fieldSpy.calledThrice
assert.ok fieldSpy.calledWithExactly('field1', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field2', 'alias2', dummy: true)
assert.ok fieldSpy.calledWithExactly('field3', null, dummy: true)
assert.same expected, @inst._fields
'fields() - array':
'saves inputs': ->
fieldSpy = test.mocker.spy(@inst, 'field')
@inst.fields([ 'field1', 'field2', 'field3' ], { dummy: true})
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: null
},
{
name: 'field3',
alias: null
}
]
assert.ok fieldSpy.calledThrice
assert.ok fieldSpy.calledWithExactly('field1', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field2', null, dummy: true)
assert.ok fieldSpy.calledWithExactly('field3', null, dummy: true)
assert.same expected, @inst._fields
'field()':
'saves inputs': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field3')
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field3',
alias: null
}
]
assert.same expected, @inst._fields
'field() - discard duplicates':
'saves inputs': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field2', 'alias2')
@inst.field('field1', 'alias1')
expected = [
{
name: 'field1',
alias: null
},
{
name: 'field2',
alias: '"alias2"'
},
{
name: 'field1',
alias: '"alias1"'
}
]
assert.same expected, @inst._fields
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeFieldAlias', -> return '_a'
@inst.field('field1', 'alias1', { dummy: true})
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', { dummy: true }
assert.ok sanitizeAliasSpy.calledWithExactly 'alias1'
assert.same [ { name: '_f', alias: '_a' } ], @inst._fields
'buildStr()':
'returns all fields when none provided': ->
@inst._fields = []
assert.same '*', @inst.buildStr()
'returns formatted query phrase': ->
@inst.field('field1')
@inst.field('field2', 'alias2')
@inst.field('field3')
assert.same 'field1, field2 AS "alias2", field3', @inst.buildStr()
'AbstractSetFieldBlock':
beforeEach: ->
@cls = squel.cls.AbstractSetFieldBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial fields': ->
assert.same [], @inst.fields
'initial field options': ->
assert.same [], @inst.fieldOptions
'initial values': ->
assert.same [], @inst.values
'_set()':
'saves inputs': ->
@inst._set('field1', 'value1', dummy: 1)
@inst._set('field2', 'value2', dummy: 2)
@inst._set('field3', 'value3', dummy: 3)
@inst._set('field4')
expectedFields = [ 'field1', 'field2', 'field3', 'field4' ]
expectedValues = [ [ 'value1', 'value2', 'value3', undefined ] ]
expectedFieldOptions = [ [ {dummy: 1}, {dummy: 2}, {dummy: 3}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._set('field1', 'value1', dummy: true)
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', dummy: true
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ] ], @inst.values
'_setFields()':
'saves inputs': ->
@inst._setFields
'field1': 'value1'
'field2': 'value2'
'field3': 'value3'
expectedFields = [ 'field1', 'field2', 'field3' ]
expectedValues = [ [ 'value1', 'value2', 'value3'] ]
expectedFieldOptions = [ [ {}, {}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._setFields({'field1': 'value1'}, {dummy: true})
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', dummy: true
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ] ], @inst.values
'_setFieldsRows()':
'saves inputs': ->
@inst._setFieldsRows [
{
'field1': 'value1'
'field2': 'value2'
'field3': 'value3'
}
{
'field1': 'value21'
'field2': 'value22'
'field3': 'value23'
}
]
expectedFields = [ 'field1', 'field2', 'field3' ]
expectedValues = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
expectedFieldOptions = [ [ {}, {}, {} ], [ {}, {}, {} ] ]
assert.same expectedFields, @inst.fields
assert.same expectedValues, @inst.values
assert.same expectedFieldOptions, @inst.fieldOptions
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
sanitizeValueSpy = test.mocker.stub @cls.prototype, '_sanitizeValue', -> return '_v'
@inst._setFieldsRows [
{
'field1': 'value1'
},
{
'field1': 'value21'
}
], { dummy: true }
assert.ok sanitizeFieldSpy.calledWithExactly 'field1', { dummy: true }
assert.ok sanitizeValueSpy.calledWithExactly 'value1'
assert.ok sanitizeValueSpy.calledWithExactly 'value21'
assert.same [ '_f' ], @inst.fields
assert.same [ [ '_v' ], [ '_v' ] ], @inst.values
'buildStr()': ->
assert.throws ( => @inst.buildStr()), 'Not yet implemented'
'buildParam()': ->
assert.throws ( => @inst.buildParam()), 'Not yet implemented'
'SetFieldBlock':
beforeEach: ->
@cls = squel.cls.SetFieldBlock
@inst = new @cls()
'instanceof of AbstractSetFieldBlock': ->
assert.instanceOf @inst, squel.cls.AbstractSetFieldBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractSetFieldBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field options': ->
assert.same [], @inst.fieldOptions
'initial fields': ->
assert.same [], @inst.fields
'initial values': ->
assert.same [], @inst.values
'set()':
'calls to _set()': ->
spy = test.mocker.stub @inst, '_set'
@inst.set 'f', 'v', dummy: true
assert.ok spy.calledWithExactly('f', 'v', dummy: true)
'setFields()':
'calls to _setFields()': ->
spy = test.mocker.stub @inst, '_setFields'
@inst.setFields 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'buildStr()':
'needs at least one field to have been provided': ->
@inst.fields = []
try
@inst.buildStr()
throw new Error 'should not reach here'
catch err
assert.same 'Error: set() needs to be called', err.toString()
'calls formatValue() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValue', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ] ]
@inst.fieldOptions = [ [ {dummy: true}, {dummy: false}, {} ] ]
assert.same 'SET field1 = [value1], field2 = [value2], field3 = [value3]', @inst.buildStr()
assert.ok formatValueSpy.calledThrice
assert.ok formatValueSpy.calledWithExactly 'value1', { dummy: true }
assert.ok formatValueSpy.calledWithExactly 'value2', { dummy: false }
assert.ok formatValueSpy.calledWithExactly 'value3', {}
'buildParam()':
'needs at least one field to have been provided': ->
@inst.fields = []
try
@inst.buildParam()
throw new Error 'should not reach here'
catch err
assert.same 'Error: set() needs to be called', err.toString()
'calls formatValueAsParam() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ] ]
assert.same { text: 'SET field1 = ?, field2 = ?, field3 = ?', values: ['[value1]', '[value2]', '[value3]'] }, @inst.buildParam()
assert.ok formatValueSpy.calledThrice
assert.ok formatValueSpy.calledWithExactly 'value1'
assert.ok formatValueSpy.calledWithExactly 'value2'
assert.ok formatValueSpy.calledWithExactly 'value3'
'Fix for hiddentao/squel#63': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> v
@inst.fields = [ 'age = age + 1', 'field2', 'field3' ]
@inst.values = [ [ undefined, 'value2', 'value3' ] ]
assert.same { text: 'SET age = age + 1, field2 = ?, field3 = ?', values: ['value2', 'value3'] }, @inst.buildParam()
'InsertFieldValueBlock':
beforeEach: ->
@cls = squel.cls.InsertFieldValueBlock
@inst = new @cls()
'instanceof of AbstractSetFieldBlock': ->
assert.instanceOf @inst, squel.cls.AbstractSetFieldBlock
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.AbstractSetFieldBlock.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'set()':
'calls to _set()': ->
spy = test.mocker.stub @inst, '_set'
@inst.set 'f', 'v', dummy: true
assert.ok spy.calledWithExactly('f', 'v', dummy: true)
'setFields()':
'calls to _setFields()': ->
spy = test.mocker.stub @inst, '_setFields'
@inst.setFields 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'setFieldsRows()':
'calls to _setFieldsRows()': ->
spy = test.mocker.stub @inst, '_setFieldsRows'
@inst.setFieldsRows 'f', dummy: true
assert.ok spy.calledWithExactly('f', dummy: true)
'buildStr()':
'needs at least one field to have been provided': ->
@inst.fields = []
assert.same '', @inst.buildStr()
'calls formatValue() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValue', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
@inst.fieldOptions = [ [ {}, {}, {} ], [ {}, {}, { dummy: 23 } ] ]
assert.same '(field1, field2, field3) VALUES ([value1], [value2], [value3]), ([value21], [value22], [value23])', @inst.buildStr()
assert.same formatValueSpy.callCount, 6
assert.ok formatValueSpy.calledWithExactly 'value1', {}
assert.ok formatValueSpy.calledWithExactly 'value2', {}
assert.ok formatValueSpy.calledWithExactly 'value3', {}
assert.ok formatValueSpy.calledWithExactly 'value21', {}
assert.ok formatValueSpy.calledWithExactly 'value22', {}
assert.ok formatValueSpy.calledWithExactly 'value23', { dummy: 23 }
'buildParam()':
'needs at least one field to have been provided': ->
@inst.fields = []
assert.same {
text: ''
values: []
}, @inst.buildParam()
'calls formatValueAsParam() for each field value': ->
formatValueSpy = test.mocker.stub @cls.prototype, '_formatValueAsParam', (v) -> return "[#{v}]"
@inst.fields = [ 'field1', 'field2', 'field3' ]
@inst.values = [ [ 'value1', 'value2', 'value3' ], [ 'value21', 'value22', 'value23' ] ]
assert.same {
text: '(field1, field2, field3) VALUES (?, ?, ?), (?, ?, ?)',
values: [ '[value1]', '[value2]', '[value3]', '[value21]', '[value22]', '[value23]' ]
}, @inst.buildParam()
assert.same formatValueSpy.callCount, 6
assert.ok formatValueSpy.calledWithExactly 'value1'
assert.ok formatValueSpy.calledWithExactly 'value2'
assert.ok formatValueSpy.calledWithExactly 'value3'
assert.ok formatValueSpy.calledWithExactly 'value21'
assert.ok formatValueSpy.calledWithExactly 'value22'
assert.ok formatValueSpy.calledWithExactly 'value23'
'InsertFieldsFromQueryBlock':
beforeEach: ->
@cls = squel.cls.InsertFieldsFromQueryBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'fromQuery()':
'sanitizes field names': ->
spy = test.mocker.stub @inst, '_sanitizeField', -> 1
qry = squel.select()
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.ok spy.calledThrice
assert.ok spy.calledWithExactly 'test'
assert.ok spy.calledWithExactly 'one'
assert.ok spy.calledWithExactly 'two'
'sanitizes query': ->
spy = test.mocker.stub @inst, '_sanitizeNestableQuery', -> 1
qry = 123
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.ok spy.calledOnce
assert.ok spy.calledWithExactly qry
'overwrites existing values': ->
@inst._fields = 1
@inst._query = 2
qry = squel.select()
@inst.fromQuery(['test', 'one', 'two'], qry)
assert.same qry, @inst._query
assert.same ['test', 'one', 'two'], @inst._fields
'buildStr()':
'needs fromQuery() to have been called': ->
@inst._fields = []
assert.same "", @inst.buildStr()
'default': ->
qry = squel.select().from('mega')
@inst.fromQuery ['test', 'one', 'two'], qry
assert.same "(test, one, two) (#{qry.toString()})", @inst.buildStr()
'buildParam()':
'needs fromQuery() to have been called': ->
@inst._fields = []
expected = {
text: '',
values: []
}
assert.same expected, @inst.buildParam()
'default': ->
qry = squel.select().from('mega')
@inst.fromQuery ['test', 'one', 'two'], qry
test.mocker.stub qry, 'toParam', ->
{
text: 'blah',
values: [1,2,3]
}
expected = {
text: '(test, one, two) (blah)',
values: [1,2,3]
}
assert.same expected, @inst.buildParam()
'DistinctBlock':
beforeEach: ->
@cls = squel.cls.DistinctBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same false, @inst.useDistinct
'distinct()':
'sets the flat': ->
@inst.distinct()
assert.same true, @inst.useDistinct
@inst.distinct()
assert.same true, @inst.useDistinct
'buildStr()':
'output nothing if not set': ->
@inst.useDistinct = false
assert.same '', @inst.buildStr()
'output DISTINCT if set': ->
@inst.useDistinct = true
assert.same 'DISTINCT', @inst.buildStr()
'GroupByBlock':
beforeEach: ->
@cls = squel.cls.GroupByBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.groups
'group()':
'adds to list': ->
@inst.group('field1')
@inst.group('field2')
assert.same ['field1', 'field2'], @inst.groups
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
@inst.group('field1')
assert.ok sanitizeFieldSpy.calledWithExactly 'field1'
assert.same ['_f'], @inst.groups
'buildStr()':
'output nothing if no fields set': ->
@inst.groups = []
assert.same '', @inst.buildStr()
'output GROUP BY': ->
@inst.group('field1')
@inst.group('field2')
assert.same 'GROUP BY field1, field2', @inst.buildStr()
'OffsetBlock':
beforeEach: ->
@cls = squel.cls.OffsetBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.offsets
'offset()':
'set value': ->
@inst.offset(1)
assert.same 1, @inst.offsets
@inst.offset(22)
assert.same 22, @inst.offsets
'sanitizes inputs': ->
sanitizeSpy = test.mocker.stub @cls.prototype, '_sanitizeLimitOffset', -> return 234
@inst.offset(23)
assert.ok sanitizeSpy.calledWithExactly 23
assert.same 234, @inst.offsets
'buildStr()':
'output nothing if not set': ->
@inst.offsets = null
assert.same '', @inst.buildStr()
'output OFFSET': ->
@inst.offset(12)
assert.same 'OFFSET 12', @inst.buildStr()
'WhereBlock':
beforeEach: ->
@cls = squel.cls.WhereBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy: true
'initial field values': ->
assert.same [], @inst.wheres
'where()':
'adds to list': ->
@inst.where('a = 1')
@inst.where('b = 2 OR c = 3')
assert.same [
{
text: 'a = 1'
values: []
}
{
text: 'b = 2 OR c = 3'
values: []
}
], @inst.wheres
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeCondition', -> return '_c'
@inst.where('a = 1')
assert.ok sanitizeFieldSpy.calledWithExactly 'a = 1'
assert.same [{
text: '_c'
values: []
}], @inst.wheres
'handles variadic arguments': ->
sanitizeStub = test.mocker.stub @cls.prototype, '_sanitizeValue', _.identity
substitutes = ['hello', [1, 2, 3]]
@inst.where.apply @inst, ['a = ? and b in ?'].concat(substitutes)
expectedValues = _.flatten substitutes
for expectedValue, index in expectedValues
assert.ok sanitizeStub.getCall(index).calledWithExactly expectedValue
assert.same [
{
text: 'a = ? and b in (?, ?, ?)'
values: ['hello', 1, 2, 3]
}
], @inst.wheres
'buildStr()':
'output QueryBuilder ': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
@inst.where('a in ?', subquery)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = 10))) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'output nothing if no conditions set': ->
@inst.wheres = []
assert.same '', @inst.buildStr()
'output WHERE ': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a = 1) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'Fix for hiddentao/squel#64': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
# second time it should still work
@inst.buildStr()
@inst.buildStr()
assert.same 'WHERE (a = 1) AND (b = 2 OR c = 3) AND (d in (4, 5, 6))', @inst.buildStr()
'formats values ': ->
formatValueStub = test.mocker.stub @cls.prototype, '_formatValue', (a) -> '[' + a + ']'
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same 'WHERE (a = [1]) AND (b = [2] OR c = [3]) AND (d in ([4], [5], [6]))', @inst.buildStr()
'buildParam()':
'output QueryBuilder ': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
@inst.where('a in ?', subquery)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same { text: 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = ?))) AND (b = ? OR c = ?) AND (d in (?, ?, ?))', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
'output QueryBuilder expr': ->
subquery = new squel.select()
subquery.field('col1').from('table1').where('field1 = ?', 10)
expr = squel.expr().and('a in ?',subquery)
.and_begin().or('b = ?', 2).or('c = ?', 3).end().and_begin()
.and('d in ?', [4, 5, 6]).end()
@inst.where(expr)
#assert.same { text: '', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
assert.same { text: 'WHERE (a in (SELECT col1 FROM table1 WHERE (field1 = ?)) AND (b = ? OR c = ?) AND (d in (?, ?, ?)))', values: [10, 2, 3, 4, 5, 6] }, @inst.buildParam()
'output nothing if no conditions set': ->
@inst.wheres = []
assert.same { text: '', values: [] }, @inst.buildParam()
'output WHERE ': ->
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same { text: 'WHERE (a = ?) AND (b = ? OR c = ?) AND (d in (?, ?, ?))', values: [1, 2, 3, 4, 5, 6] }, @inst.buildParam()
'formats value types as params': ->
formatValueSpy = test.mocker.spy @cls.prototype, '_formatValue'
test.mocker.stub @cls.prototype, '_formatValueAsParam', (a) -> '[' + a + ']'
@inst.where('a = ?', 1)
@inst.where('b = ? OR c = ?', 2, 3)
@inst.where('d in ?', [4, 5, 6])
assert.same {
text: 'WHERE (a = ?) AND (b = ? OR c = ?) AND (d in (?, ?, ?))',
values: ['[1]', '[2]', '[3]', '[4]', '[5]', '[6]']
}, @inst.buildParam()
assert.ok formatValueSpy.notCalled
'OrderByBlock':
beforeEach: ->
@cls = squel.cls.OrderByBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.orders
assert.same [], @inst._values
'order()':
'adds to list': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
expected = [
{
field: 'field1',
dir: true
},
{
field: 'field2',
dir: false
},
{
field: 'field3',
dir: true
}
]
assert.same expected, @inst.orders
'sanitizes inputs': ->
sanitizeFieldSpy = test.mocker.stub @cls.prototype, '_sanitizeField', -> return '_f'
@inst.order('field1')
assert.ok sanitizeFieldSpy.calledWithExactly 'field1'
assert.same [ { field: '_f', dir: true } ], @inst.orders
'saves additional values': ->
@inst.order('field1', false, 1.2, 4)
assert.same [ { field: 'field1', dir: false } ], @inst.orders
assert.same [1.2, 4], @inst._values
'buildStr()':
'output nothing if nothing set': ->
@inst.orders = []
assert.same '', @inst.buildStr()
'output ORDER BY': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
assert.same 'ORDER BY field1 ASC, field2 DESC, field3 ASC', @inst.buildStr()
'buildParam()':
'empty': ->
@inst.orders = []
assert.same { text: '', values: [] }, @inst.buildParam()
'default': ->
@inst.order('field1')
@inst.order('field2', false)
@inst.order('field3', true)
assert.same { text: 'ORDER BY field1 ASC, field2 DESC, field3 ASC', values: [] }, @inst.buildParam()
'with values': ->
@inst.order('field3', true, 1.2, 5)
assert.same { text: 'ORDER BY field3 ASC', values: [1.2, 5] }, @inst.buildParam()
'LimitBlock':
beforeEach: ->
@cls = squel.cls.LimitBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same null, @inst.limits
'limit()':
'set value': ->
@inst.limit(1)
assert.same 1, @inst.limits
@inst.limit(22)
assert.same 22, @inst.limits
'sanitizes inputs': ->
sanitizeSpy = test.mocker.stub @cls.prototype, '_sanitizeLimitOffset', -> return 234
@inst.limit(23)
assert.ok sanitizeSpy.calledWithExactly 23
assert.same 234, @inst.limits
'buildStr()':
'output nothing if not set': ->
@inst.limits = null
assert.same '', @inst.buildStr()
'output nothing if set to 0': ->
@inst.limit(0)
assert.same '', @inst.buildStr()
'output LIMIT': ->
@inst.limit(12)
assert.same 'LIMIT 12', @inst.buildStr()
'JoinBlock':
beforeEach: ->
@cls = squel.cls.JoinBlock
@inst = new @cls()
'instanceof of Block': ->
assert.instanceOf @inst, squel.cls.Block
'calls base constructor': ->
spy = test.mocker.spy(squel.cls.Block.prototype, 'constructor')
@inst = new @cls
dummy: true
assert.ok spy.calledWithExactly
dummy:true
'initial field values': ->
assert.same [], @inst.joins
'join()':
'adds to list': ->
@inst.join('table1')
@inst.join('table2', null, 'b = 1', 'LEFT')
@inst.join('table3', 'alias3', 'c = 1', 'RIGHT')
@inst.join('table4', 'alias4', 'd = 1', 'OUTER')
@inst.join('table5', 'alias5', null, 'CROSS')
expected = [
{
type: 'INNER',
table: 'table1',
alias: null,
condition: null
},
{
type: 'LEFT',
table: 'table2',
alias: null,
condition: 'b = 1'
},
{
type: 'RIGHT',
table: 'table3',
alias: '`alias3`',
condition: 'c = 1'
},
{
type: 'OUTER',
table: 'table4',
alias: '`alias4`',
condition: 'd = 1'
},
{
type: 'CROSS',
table: 'table5',
alias: '`alias5`',
condition: null
}
]
assert.same expected, @inst.joins
'sanitizes inputs': ->
sanitizeTableSpy = test.mocker.stub @cls.prototype, '_sanitizeTable', -> return '_t'
sanitizeAliasSpy = test.mocker.stub @cls.prototype, '_sanitizeTableAlias', -> return '_a'
sanitizeConditionSpy = test.mocker.stub @cls.prototype, '_sanitizeCondition', -> return '_c'
@inst.join('table1', 'alias1', 'a = 1')
assert.ok sanitizeTableSpy.calledWithExactly 'table1', true
assert.ok sanitizeAliasSpy.calledWithExactly 'alias1'
assert.ok sanitizeConditionSpy.calledWithExactly 'a = 1'
expected = [
{
type: 'INNER',
table: '_t',
alias: '_a',
condition: '_c'
}
]
assert.same expected, @inst.joins
'nested queries': ->
inner1 = squel.select()
inner2 = squel.select()
inner3 = squel.select()
inner4 = squel.select()
inner5 = squel.select()
inner6 = squel.select()
@inst.join(inner1)
@inst.join(inner2, null, 'b = 1', 'LEFT')
@inst.join(inner3, 'alias3', 'c = 1', 'RIGHT')
@inst.join(inner4, 'alias4', 'd = 1', 'OUTER')
@inst.join(inner5, 'alias5', 'e = 1', 'FULL')
@inst.join(inner6, 'alias6', null, 'CROSS')
expected = [
{
type: 'INNER',
table: inner1,
alias: null,
condition: null
},
{
type: 'LEFT',
table: inner2,
alias: null,
condition: 'b = 1'
},
{
type: 'RIGHT',
table: inner3,
alias: '`alias3`',
condition: 'c = 1'
},
{
type: 'OUTER',
table: inner4,
alias: '`alias4`',
condition: 'd = 1'
},
{
type: 'FULL',
table: inner5,
alias: '`alias5`',
condition: 'e = 1'
},
{
type: 'CROSS',
table: inner6,
alias: '`alias6`',
condition: null
}
]
assert.same expected, @inst.joins
'left_join()':
'calls join()': ->
joinSpy = test.mocker.stub(@inst, 'join')
@inst.left_join('t', 'a', 'c')
assert.ok joinSpy.calledOnce
assert.ok joinSpy.calledWithExactly('t', 'a', 'c', 'LEFT')
'buildStr()':
'output nothing if nothing set': ->
@inst.joins = []
assert.same '', @inst.buildStr()
'output JOINs': ->
@inst.join('table1')
@inst.join('table2', null, 'b = 1', 'LEFT')
@inst.join('table3', 'alias3', 'c = 1', 'RIGHT')
@inst.join('table4', 'alias4', 'd = 1', 'FULL')
@inst.join('table5', 'alias5', null, 'CROSS')
assert.same 'INNER JOIN table1 LEFT JOIN table2 ON (b = 1) RIGHT JOIN table3 `alias3` ON (c = 1) FULL JOIN table4 `alias4` ON (d = 1) CROSS JOIN table5 `alias5`', @inst.buildStr()
'output JOINs with nested query': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
inner3 = squel.select().from('3')
inner4 = squel.select().from('4')
inner5 = squel.select().from('5')
@inst.join(inner1)
@inst.join(inner2, null, 'b = 1', 'LEFT')
@inst.join(inner3, 'alias3', 'c = 1', 'RIGHT')
@inst.join(inner4, 'alias4', 'e = 1', 'FULL')
@inst.join(inner5, 'alias5', null, 'CROSS')
assert.same 'INNER JOIN (SELECT * FROM 1) LEFT JOIN (SELECT * FROM 2) ON (b = 1) RIGHT JOIN (SELECT * FROM 3) `alias3` ON (c = 1) FULL JOIN (SELECT * FROM 4) `alias4` ON (e = 1) CROSS JOIN (SELECT * FROM 5) `alias5`', @inst.buildStr()
'QueryBuilder in ON condition expr()': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
expr = squel.expr()
.and('field1 = ?',inner2)
@inst.join(inner1, null, expr)
assert.same 'INNER JOIN (SELECT * FROM 1) ON (field1 = (SELECT * FROM 2))', @inst.buildStr()
'buildParam()':
'QueryBuilder in ON condition expr()': ->
inner1 = squel.select().from('1')
inner2 = squel.select().from('2')
expr = squel.expr()
.and('field1 = ?',inner2)
@inst.join(inner1, null, expr)
assert.same { text: 'INNER JOIN (SELECT * FROM 1) ON (field1 = (SELECT * FROM 2))', values: [] }, @inst.buildParam()
module?.exports[require('path').basename(__filename)] = test
|
[
{
"context": "f type.isObject answers\n # TODO: 2008-11-24 <brian@wesabe.com> -- this is only here until the new style (Array)",
"end": 7421,
"score": 0.9999277591705322,
"start": 7405,
"tag": "EMAIL",
"value": "brian@wesabe.com"
}
] | application/chrome/content/wesabe/download/Player.coffee | wesabe/ssu | 28 | wesabe.provide 'fi-scripts'
extend = require 'lang/extend'
date = require 'lang/date'
array = require 'lang/array'
dateForElement = (require 'dom/date').forElement
{trim} = require 'lang/string'
func = require 'lang/func'
type = require 'lang/type'
{uuid} = require 'lang/UUID'
prefs = require 'util/prefs'
inspect = require 'util/inspect'
Dir = require 'io/Dir'
File = require 'io/File'
{download} = require 'io/Downloader'
privacy = require 'util/privacy'
Page = require 'dom/Page'
Browser = require 'dom/Browser'
UserAgent = require 'xul/UserAgent'
Bridge = require 'dom/Bridge'
{Pathway} = require 'xpath'
{EventEmitter} = require 'events2'
{sharedEventEmitter} = require 'events2'
{tryThrow, tryCatch} = require 'util/try'
DEFAULT_TIMEOUTS =
action: 60 # 1m
global: 300 # 5m
security: 180 # 3m
class Player extends EventEmitter
@register: (params) ->
@create params, (klass) ->
# make sure we put it where wesabe.require expects it
wesabe.provide "fi-scripts.#{params.fid}", klass
@create: (params, callback) ->
class klass extends Player
# the Wesabe Financial Institution ID (e.g. com.Chase)
@fid: params.fid
# the name of the Financial Institution (e.g. Chase)
@org: params.org
# ofx info in case this is a hybrid
@ofx: params.ofx
# the elements we need to recognize
@elements: {}
# pass .fid and .org through to the class
@::__defineGetter__ 'fid', -> @constructor.fid
@::__defineGetter__ 'org', -> @constructor.org
canHandleGoal: params.canHandleGoal or (-> true)
callback?(klass)
params.__module__ = klass.__module__
# the method that decides based on the state of the job and page what to do next
klass::dispatches = []
# any dispatch filters
klass::filters = []
# any download callbacks
klass::afterDownloadCallbacks = []
# after last goal callbacks
klass::afterLastGoalCallbacks = []
# any alert callbacks
klass::alertReceivedCallbacks = []
# any confirm callbacks
klass::confirmReceivedCallbacks = []
# any open callbacks
klass::openReceivedCallbacks = []
modules = [params]
if params.includes
for include in params.includes
try
modules.push wesabe.require(include)
catch ex
throw new Error "Error while requiring #{include} -- check that the file exists and has the correct 'provide' line"
# dispatchFrames: off
if params.dispatchFrames is off
klass::filters.push
name: 'frame blocker'
test: (browser, page) ->
if page.framed
logger.info "skipping frame page load: ", page.title
return false
if params.filter
klass::filters.push
name: 'global'
test: params.filter
# userAgent: "Mozilla/4.0 HappyFunBrowser"
if params.userAgent
klass::userAgent = params.userAgent
# userAgentAlias: "Firefox"
if params.userAgentAlias
klass::userAgent = UserAgent.getByNamedAlias(params.userAgentAlias)
for module in modules
if module.dispatch
klass::dispatches.push
name: module.__module__.name
callback: module.dispatch
if module.elements
extend klass.elements, module.elements, merge: on
if module.actions
extend klass::, module.actions
if module.extensions
extend klass::, module.extensions
if module.afterDownload
klass::afterDownloadCallbacks.push module.afterDownload
if module.afterLastGoal
klass::afterLastGoalCallbacks.push module.afterLastGoal
if module.alertReceived
klass::alertReceivedCallbacks.push module.alertReceived
if module.confirmReceived
klass::confirmReceivedCallbacks.push module.confirmReceived
if module.openReceived
klass::openReceivedCallbacks.push module.openReceived
if module.filter
klass::filters.push
name: module.__module__.name
test: module.filter
return klass
start: (answers, browser) ->
if @userAgent
UserAgent.set @userAgent
else
UserAgent.revertToDefault()
# set up the callbacks for page load and download done
browser.addEventListener 'DOMContentLoaded', (evt) =>
@onDocumentLoaded Browser.wrap(browser), Page.wrap(evt.target)
, no
sharedEventEmitter.on 'downloadSuccess', (data, suggestedFilename, contentType) =>
@job.update 'account.download.success'
@setErrorTimeout 'global'
tryThrow 'Player#downloadSuccess', (log) =>
folder = Dir.profile.child('statements')
unless folder.exists
folder.create()
statement = folder.child(uuid()).asFile
statement.write data
metadata = @job.nextDownloadMetadata or {}
delete @job.nextDownloadMetadata
# restore the browser and page the download was triggered from, if any
if metadata.browser
@browser = metadata.browser
delete metadata.browser
if metadata.page
@page = metadata.page
delete metadata.page
@job.recordSuccessfulDownload statement, extend({suggestedFilename, contentType}, metadata)
@onDownloadSuccessful @browser, @page
sharedEventEmitter.on 'downloadFail', =>
logger.warn 'Failed to download a statement! This is bad, but a failed job is worse, so we press on'
@job.update 'account.download.failure'
@setErrorTimeout 'global'
@onDownloadSuccessful @browser, @page
@setErrorTimeout 'global'
# start the security question timeout when the job is suspended
@job.on 'suspend', =>
@clearErrorTimeout 'action'
@clearErrorTimeout 'global'
@setErrorTimeout 'security'
@job.on 'resume', =>
@clearErrorTimeout 'security'
@setErrorTimeout 'global'
@answers = answers
@runAction 'main', Browser.wrap(browser)
nextGoal: ->
@job.nextGoal()
onLastGoalFinished: ->
logger.info 'Finished all goals, running callbacks'
for callback in @afterLastGoalCallbacks
@runAction callback, @browser, @page
finish: ->
@clearErrorTimeout 'action'
@clearErrorTimeout 'global'
@clearErrorTimeout 'security'
runAction: (name, browser, page, scope) ->
module = @constructor.fid
[fn, name] = if type.isFunction name
[name, name.name or '(anonymous)']
else
[@[name], name]
unless fn
throw new Error "Cannot find action '#{name}'! Typo? Forgot to include a file?"
retval = tryThrow "#{module}##{name}", (log) =>
url = page?.url
title = page?.title
@setErrorTimeout 'action'
@history.push
name: name
url: url
title: title
logger.info 'History is ', (hi.name for hi in @history).join(' -> ')
@callWithAppropriateScope fn, browser, page, extend({log}, scope or {})
return retval
@::__defineGetter__ 'elements', ->
@constructor.elements
@::__defineGetter__ 'e', ->
@elements
resume: (answers) ->
if type.isArray answers
for {key, value} in answers
@answers[key] = value
else if type.isObject answers
# TODO: 2008-11-24 <brian@wesabe.com> -- this is only here until the new style (Array) is in PFC and SSU Service
extend @answers, answers
@onDocumentLoaded @browser, @page
getActionProxy: (browser, page) ->
new ActionProxy this, browser, page
getJobProxy: ->
@job
download: (url, metadata) ->
# hang on to the current browser and page so we can reload with the right context
browser = @browser
page = @page
newStatementFile = =>
folder = Dir.profile.child('statements')
folder.create() unless folder.exists
return folder.child(uuid())
# allow pre-registering information about the next download
if type.isFunction metadata
callback = metadata
metadata = url
url = null
metadata.browser = browser
metadata.page = page
@job.nextDownloadMetadata = metadata
callback()
return
else if metadata is undefined
metadata = url
url = null
unless metadata.data
throw new Error "Expected metadata #{metadata} to have data to write"
statement = newStatementFile()
statement.write metadata.data
delete metadata.data
@job.recordSuccessfulDownload statement, metadata
@onDownloadSuccessful browser, page
return
url = privacy.untaint url
metadata = extend {url}, (metadata or {})
tryThrow "Player#download(#{url})", =>
download url, newStatementFile(),
success: (path, suggestedFilename, contentType) =>
@job.recordSuccessfulDownload path, extend({suggestedFilename, contentType}, metadata)
@onDownloadSuccessful browser, page
failure: =>
@job.recordFailedDownload metadata
@onDownloadSuccessful browser, page
# Public: Answers whatever security questions are on the page by
# using the xpaths given in e.security.
#
# NOTE: Called with magic scope!
#
answerSecurityQuestions: ->
@answerSecurityQuestionsWithoutMagicScope browser ? @browser, page ? @page
# Internal: Answers whatever security questions are on the page by
# using the xpaths given in e.security.
#
# browser - The context Browser.
# page - The context Page.
answerSecurityQuestionsWithoutMagicScope: (browser, page) ->
# these are here because this function is called with magic scope
# and therefore won't see the variables we defined above
questions = page.select @e.security.questions
qanswers = page.select @e.security.answers
if questions.length isnt qanswers.length
logger.error "Found ", questions.length, " security questions, but ",
qanswers.length, " security question answers to fill"
logger.error "questions = ", questions
logger.error "qanswers = ", qanswers
return false
if questions.length is 0
logger.error "Failed to find any security questions"
return false
questions = (trim page.text(q) for q in questions)
logger.info "Found security questions: ", questions
questions = privacy.untaint questions
data = questions: []
for question, i in questions
answer = @answers[question]
element = qanswers[i]
if answer
page.fill element, answer
else
logger.debug "element = ", element, " -- element.type = ", element.type
data.questions.push
key: question
label: question
persistent: true
type: privacy.untaint(element.type) or "text"
if data.questions.length
@job.suspend 'suspended.missing-answer.auth.security', data
return false
@job.update 'auth.security'
# choose to bypass the security questions if we can
page.check @e.security.setCookieCheckbox if @e.security.setCookieCheckbox
page.fill @e.security.setCookieSelect, @e.security.setCookieOption if @e.security.setCookieSelect
# submit the form
page.click @e.security.continueButton
return true
#
# Fills in the date range for a download based on a lower bound.
#
# NOTE: This is an action and may be called with action.fillDateRange().
# Be warned: because this is an action _it is called with magic scope_.
#
# ==== Options (options)
# :since<Number, null>::
# Time of the lower bound to use for the date range (in ms since epoch).
#
# @public
#
fillDateRange: ->
@fillDateRangeWithoutMagicScope @browser, @page
# Internal: Fills out the date form fields for a download.
fillDateRangeWithoutMagicScope: (browser, page) ->
formatString = @elements.download.date.format or 'MM/dd/yyyy'
opts = @elements.download.date
fromEl = privacy.untaint page.find(opts.from)
toEl = privacy.untaint page.find(opts.to)
getDefault = (defaultValue, existing) =>
if type.isFunction defaultValue
defaultValue = defaultValue(existing)
date.parse(defaultValue) if defaultValue
if toEl
to = dateForElement(toEl, formatString)
# use default or today's date if we can't get a date from the field
to.date ||= getDefault(opts.defaults && opts.defaults.to) or new Date()
logger.info "Adjusting date upper bound: ", to.date
if fromEl
# if there's a lower bound, choose a week before it to ensure some overlap
since = options.since and (options.since - 7 * date.DAYS)
# get a date if there's already one in the field
from = dateForElement fromEl, formatString
if from.date and since
# choose the most recent of the pre-populated date and the lower bound
from.date = new Date Math.max(since, from.date.getTime())
else if since
# choose the lower bound
from.date = new Date since
else if to
# pick the default or an 89 day window
from.date = getDefault(opts.defaults and opts.defaults.from, to: to.date) or
date.add(to.date, -89 * date.DAYS)
logger.info "Adjusting date lower bound: ", from.date
nextAccount: ->
delete tmp.account
reload()
skipAccount: (args...) ->
logger.warn args... if args.length
delete @tmp.account
setErrorTimeout: (timeoutType) ->
duration = (prefs.get "wesabe.download.player.timeout.#{timeoutType}") or DEFAULT_TIMEOUTS[timeoutType]
tt = @_timeouts
tt ||= @_timeouts = {}
@clearErrorTimeout timeoutType
logger.debug "Timeout ", timeoutType, " set (", duration, " seconds)"
tt[timeoutType] = setTimeout =>
@emit 'timeout', timeoutType
return if @job.done
logger.error "Timeout ", timeoutType, " (", duration, " seconds) reached, abandoning job"
tryCatch "Player#setErrorTimeout(page dump)", =>
@page?.dumpPrivately()
@job.fail 504, "timeout.#{timeoutType}"
, duration * 1000
clearErrorTimeout: (timeoutType) ->
if @_timeouts?[timeoutType]
logger.debug "Timeout ", timeoutType, " cleared"
clearTimeout @_timeouts[timeoutType]
onDocumentLoaded: (browser, page) ->
return if @job.done or @job.paused
module = @constructor.fid
# log when alert and confirm are called
new Bridge page, (bridge) =>
bridge.evaluate ->
# evaluated on the page
window.alert = (message) ->
callback 'alert', message
return true
window.confirm = (message) ->
callback 'confirm', message
return true
window.open = (url) ->
callback 'open', url
return false
, (data) =>
# evaluated here
unless data
logger.debug "Bridge connected"
return
[messageType, message] = data
formattedMessage = inspect message, undefined, undefined, color: prefs.get('wesabe.logger.color') ? on
switch messageType
when 'alert'
logger.info messageType, ' called with message=', formattedMessage
when 'confirm'
logger.info messageType, ' called with message=', formattedMessage, ', automatically answered YES'
when 'open'
logger.info messageType, ' called with url=', formattedMessage
callbacks = @["#{messageType}ReceivedCallbacks"]
if callbacks
for callback in callbacks
names = func.argNames callback
if names.length > 2
callback.call @, browser, page, message
else
@callWithMagicScope callback, browser, page, extend({message, logger: (require 'Logger').rootLogger}), message
unless @shouldDispatch browser, page
logger.info 'skipping document load'
return
@triggerDispatch browser, page
triggerDispatch: (browser, page) ->
module = @constructor.fid
browser ||= @browser
page ||= @page
logger.info 'url=', page.url
logger.info 'title=', page.title
# these should not be used inside the FI scripts
@browser = browser
@page = page
setTimeout =>
for dispatch in @dispatches
return if @job.done or @job.paused
result = tryThrow "#{module}#dispatch(#{dispatch.name})", (log) =>
@callWithAppropriateScope dispatch.callback, browser, page, {log}
if result is false
logger.info "dispatch chain halted"
return
, 2000
onDownloadSuccessful: (browser, page) ->
for callback in @afterDownloadCallbacks
@runAction callback, browser, page
shouldDispatch: (browser, page) ->
for filter in @filters
result = tryCatch "#{@constructor.fid}#filter(#{filter.name})", (log) =>
switch r = @callWithAppropriateScope filter.test, browser, page, {log}
when true
log.debug "forcing dispatch"
when false
log.debug "aborting dispatch"
return r
# check for a definite answer
return result if type.isBoolean result
logger.debug "no filter voted to force or abort dispatch, so forcing dispatch by default"
return true
callWithAppropriateScope: (fn, browser, page, scope, args...) ->
if func.argNames(fn).length > 0
fn.call @, browser, page
else
@callWithMagicScope fn, browser, page, scope or {}
callWithMagicScope: (fn, browser, page, scope, args...) ->
log = scope.logger or scope.log or logger
func.callWithScope fn, this, extend({
browser
page
e: @constructor.elements
answers: @answers
options: @job.options
tmp: @tmp
action: @getActionProxy browser, page
job: @getJobProxy()
skipAccount: @skipAccount
reload: => @triggerDispatch browser, page
download: (args...) => @download args...
bind: (args...) => Pathway.bind(args...)
logger: log
log: log
isMagicScope: on
}, scope or {}), args
@::__defineGetter__ 'history', ->
@_history ||= []
@::__defineGetter__ 'tmp', ->
@_tmp ||= {}
@build: (fid) ->
tryThrow "download.Player.build(fid=#{fid})", (log) =>
klass = tryThrow "loading fi-scripts.#{fid}", =>
wesabe.require "fi-scripts.#{fid}"
new klass(fid)
class ActionProxy
constructor: (@player, @browser, @page) ->
__noSuchMethod__: (method, args) ->
@player.runAction method, @browser, @page
module.exports = Player
| 71510 | wesabe.provide 'fi-scripts'
extend = require 'lang/extend'
date = require 'lang/date'
array = require 'lang/array'
dateForElement = (require 'dom/date').forElement
{trim} = require 'lang/string'
func = require 'lang/func'
type = require 'lang/type'
{uuid} = require 'lang/UUID'
prefs = require 'util/prefs'
inspect = require 'util/inspect'
Dir = require 'io/Dir'
File = require 'io/File'
{download} = require 'io/Downloader'
privacy = require 'util/privacy'
Page = require 'dom/Page'
Browser = require 'dom/Browser'
UserAgent = require 'xul/UserAgent'
Bridge = require 'dom/Bridge'
{Pathway} = require 'xpath'
{EventEmitter} = require 'events2'
{sharedEventEmitter} = require 'events2'
{tryThrow, tryCatch} = require 'util/try'
DEFAULT_TIMEOUTS =
action: 60 # 1m
global: 300 # 5m
security: 180 # 3m
class Player extends EventEmitter
@register: (params) ->
@create params, (klass) ->
# make sure we put it where wesabe.require expects it
wesabe.provide "fi-scripts.#{params.fid}", klass
@create: (params, callback) ->
class klass extends Player
# the Wesabe Financial Institution ID (e.g. com.Chase)
@fid: params.fid
# the name of the Financial Institution (e.g. Chase)
@org: params.org
# ofx info in case this is a hybrid
@ofx: params.ofx
# the elements we need to recognize
@elements: {}
# pass .fid and .org through to the class
@::__defineGetter__ 'fid', -> @constructor.fid
@::__defineGetter__ 'org', -> @constructor.org
canHandleGoal: params.canHandleGoal or (-> true)
callback?(klass)
params.__module__ = klass.__module__
# the method that decides based on the state of the job and page what to do next
klass::dispatches = []
# any dispatch filters
klass::filters = []
# any download callbacks
klass::afterDownloadCallbacks = []
# after last goal callbacks
klass::afterLastGoalCallbacks = []
# any alert callbacks
klass::alertReceivedCallbacks = []
# any confirm callbacks
klass::confirmReceivedCallbacks = []
# any open callbacks
klass::openReceivedCallbacks = []
modules = [params]
if params.includes
for include in params.includes
try
modules.push wesabe.require(include)
catch ex
throw new Error "Error while requiring #{include} -- check that the file exists and has the correct 'provide' line"
# dispatchFrames: off
if params.dispatchFrames is off
klass::filters.push
name: 'frame blocker'
test: (browser, page) ->
if page.framed
logger.info "skipping frame page load: ", page.title
return false
if params.filter
klass::filters.push
name: 'global'
test: params.filter
# userAgent: "Mozilla/4.0 HappyFunBrowser"
if params.userAgent
klass::userAgent = params.userAgent
# userAgentAlias: "Firefox"
if params.userAgentAlias
klass::userAgent = UserAgent.getByNamedAlias(params.userAgentAlias)
for module in modules
if module.dispatch
klass::dispatches.push
name: module.__module__.name
callback: module.dispatch
if module.elements
extend klass.elements, module.elements, merge: on
if module.actions
extend klass::, module.actions
if module.extensions
extend klass::, module.extensions
if module.afterDownload
klass::afterDownloadCallbacks.push module.afterDownload
if module.afterLastGoal
klass::afterLastGoalCallbacks.push module.afterLastGoal
if module.alertReceived
klass::alertReceivedCallbacks.push module.alertReceived
if module.confirmReceived
klass::confirmReceivedCallbacks.push module.confirmReceived
if module.openReceived
klass::openReceivedCallbacks.push module.openReceived
if module.filter
klass::filters.push
name: module.__module__.name
test: module.filter
return klass
start: (answers, browser) ->
if @userAgent
UserAgent.set @userAgent
else
UserAgent.revertToDefault()
# set up the callbacks for page load and download done
browser.addEventListener 'DOMContentLoaded', (evt) =>
@onDocumentLoaded Browser.wrap(browser), Page.wrap(evt.target)
, no
sharedEventEmitter.on 'downloadSuccess', (data, suggestedFilename, contentType) =>
@job.update 'account.download.success'
@setErrorTimeout 'global'
tryThrow 'Player#downloadSuccess', (log) =>
folder = Dir.profile.child('statements')
unless folder.exists
folder.create()
statement = folder.child(uuid()).asFile
statement.write data
metadata = @job.nextDownloadMetadata or {}
delete @job.nextDownloadMetadata
# restore the browser and page the download was triggered from, if any
if metadata.browser
@browser = metadata.browser
delete metadata.browser
if metadata.page
@page = metadata.page
delete metadata.page
@job.recordSuccessfulDownload statement, extend({suggestedFilename, contentType}, metadata)
@onDownloadSuccessful @browser, @page
sharedEventEmitter.on 'downloadFail', =>
logger.warn 'Failed to download a statement! This is bad, but a failed job is worse, so we press on'
@job.update 'account.download.failure'
@setErrorTimeout 'global'
@onDownloadSuccessful @browser, @page
@setErrorTimeout 'global'
# start the security question timeout when the job is suspended
@job.on 'suspend', =>
@clearErrorTimeout 'action'
@clearErrorTimeout 'global'
@setErrorTimeout 'security'
@job.on 'resume', =>
@clearErrorTimeout 'security'
@setErrorTimeout 'global'
@answers = answers
@runAction 'main', Browser.wrap(browser)
nextGoal: ->
@job.nextGoal()
onLastGoalFinished: ->
logger.info 'Finished all goals, running callbacks'
for callback in @afterLastGoalCallbacks
@runAction callback, @browser, @page
finish: ->
@clearErrorTimeout 'action'
@clearErrorTimeout 'global'
@clearErrorTimeout 'security'
runAction: (name, browser, page, scope) ->
module = @constructor.fid
[fn, name] = if type.isFunction name
[name, name.name or '(anonymous)']
else
[@[name], name]
unless fn
throw new Error "Cannot find action '#{name}'! Typo? Forgot to include a file?"
retval = tryThrow "#{module}##{name}", (log) =>
url = page?.url
title = page?.title
@setErrorTimeout 'action'
@history.push
name: name
url: url
title: title
logger.info 'History is ', (hi.name for hi in @history).join(' -> ')
@callWithAppropriateScope fn, browser, page, extend({log}, scope or {})
return retval
@::__defineGetter__ 'elements', ->
@constructor.elements
@::__defineGetter__ 'e', ->
@elements
resume: (answers) ->
if type.isArray answers
for {key, value} in answers
@answers[key] = value
else if type.isObject answers
# TODO: 2008-11-24 <<EMAIL>> -- this is only here until the new style (Array) is in PFC and SSU Service
extend @answers, answers
@onDocumentLoaded @browser, @page
getActionProxy: (browser, page) ->
new ActionProxy this, browser, page
getJobProxy: ->
@job
download: (url, metadata) ->
# hang on to the current browser and page so we can reload with the right context
browser = @browser
page = @page
newStatementFile = =>
folder = Dir.profile.child('statements')
folder.create() unless folder.exists
return folder.child(uuid())
# allow pre-registering information about the next download
if type.isFunction metadata
callback = metadata
metadata = url
url = null
metadata.browser = browser
metadata.page = page
@job.nextDownloadMetadata = metadata
callback()
return
else if metadata is undefined
metadata = url
url = null
unless metadata.data
throw new Error "Expected metadata #{metadata} to have data to write"
statement = newStatementFile()
statement.write metadata.data
delete metadata.data
@job.recordSuccessfulDownload statement, metadata
@onDownloadSuccessful browser, page
return
url = privacy.untaint url
metadata = extend {url}, (metadata or {})
tryThrow "Player#download(#{url})", =>
download url, newStatementFile(),
success: (path, suggestedFilename, contentType) =>
@job.recordSuccessfulDownload path, extend({suggestedFilename, contentType}, metadata)
@onDownloadSuccessful browser, page
failure: =>
@job.recordFailedDownload metadata
@onDownloadSuccessful browser, page
# Public: Answers whatever security questions are on the page by
# using the xpaths given in e.security.
#
# NOTE: Called with magic scope!
#
answerSecurityQuestions: ->
@answerSecurityQuestionsWithoutMagicScope browser ? @browser, page ? @page
# Internal: Answers whatever security questions are on the page by
# using the xpaths given in e.security.
#
# browser - The context Browser.
# page - The context Page.
answerSecurityQuestionsWithoutMagicScope: (browser, page) ->
# these are here because this function is called with magic scope
# and therefore won't see the variables we defined above
questions = page.select @e.security.questions
qanswers = page.select @e.security.answers
if questions.length isnt qanswers.length
logger.error "Found ", questions.length, " security questions, but ",
qanswers.length, " security question answers to fill"
logger.error "questions = ", questions
logger.error "qanswers = ", qanswers
return false
if questions.length is 0
logger.error "Failed to find any security questions"
return false
questions = (trim page.text(q) for q in questions)
logger.info "Found security questions: ", questions
questions = privacy.untaint questions
data = questions: []
for question, i in questions
answer = @answers[question]
element = qanswers[i]
if answer
page.fill element, answer
else
logger.debug "element = ", element, " -- element.type = ", element.type
data.questions.push
key: question
label: question
persistent: true
type: privacy.untaint(element.type) or "text"
if data.questions.length
@job.suspend 'suspended.missing-answer.auth.security', data
return false
@job.update 'auth.security'
# choose to bypass the security questions if we can
page.check @e.security.setCookieCheckbox if @e.security.setCookieCheckbox
page.fill @e.security.setCookieSelect, @e.security.setCookieOption if @e.security.setCookieSelect
# submit the form
page.click @e.security.continueButton
return true
#
# Fills in the date range for a download based on a lower bound.
#
# NOTE: This is an action and may be called with action.fillDateRange().
# Be warned: because this is an action _it is called with magic scope_.
#
# ==== Options (options)
# :since<Number, null>::
# Time of the lower bound to use for the date range (in ms since epoch).
#
# @public
#
fillDateRange: ->
@fillDateRangeWithoutMagicScope @browser, @page
# Internal: Fills out the date form fields for a download.
fillDateRangeWithoutMagicScope: (browser, page) ->
formatString = @elements.download.date.format or 'MM/dd/yyyy'
opts = @elements.download.date
fromEl = privacy.untaint page.find(opts.from)
toEl = privacy.untaint page.find(opts.to)
getDefault = (defaultValue, existing) =>
if type.isFunction defaultValue
defaultValue = defaultValue(existing)
date.parse(defaultValue) if defaultValue
if toEl
to = dateForElement(toEl, formatString)
# use default or today's date if we can't get a date from the field
to.date ||= getDefault(opts.defaults && opts.defaults.to) or new Date()
logger.info "Adjusting date upper bound: ", to.date
if fromEl
# if there's a lower bound, choose a week before it to ensure some overlap
since = options.since and (options.since - 7 * date.DAYS)
# get a date if there's already one in the field
from = dateForElement fromEl, formatString
if from.date and since
# choose the most recent of the pre-populated date and the lower bound
from.date = new Date Math.max(since, from.date.getTime())
else if since
# choose the lower bound
from.date = new Date since
else if to
# pick the default or an 89 day window
from.date = getDefault(opts.defaults and opts.defaults.from, to: to.date) or
date.add(to.date, -89 * date.DAYS)
logger.info "Adjusting date lower bound: ", from.date
nextAccount: ->
delete tmp.account
reload()
skipAccount: (args...) ->
logger.warn args... if args.length
delete @tmp.account
setErrorTimeout: (timeoutType) ->
duration = (prefs.get "wesabe.download.player.timeout.#{timeoutType}") or DEFAULT_TIMEOUTS[timeoutType]
tt = @_timeouts
tt ||= @_timeouts = {}
@clearErrorTimeout timeoutType
logger.debug "Timeout ", timeoutType, " set (", duration, " seconds)"
tt[timeoutType] = setTimeout =>
@emit 'timeout', timeoutType
return if @job.done
logger.error "Timeout ", timeoutType, " (", duration, " seconds) reached, abandoning job"
tryCatch "Player#setErrorTimeout(page dump)", =>
@page?.dumpPrivately()
@job.fail 504, "timeout.#{timeoutType}"
, duration * 1000
clearErrorTimeout: (timeoutType) ->
if @_timeouts?[timeoutType]
logger.debug "Timeout ", timeoutType, " cleared"
clearTimeout @_timeouts[timeoutType]
onDocumentLoaded: (browser, page) ->
return if @job.done or @job.paused
module = @constructor.fid
# log when alert and confirm are called
new Bridge page, (bridge) =>
bridge.evaluate ->
# evaluated on the page
window.alert = (message) ->
callback 'alert', message
return true
window.confirm = (message) ->
callback 'confirm', message
return true
window.open = (url) ->
callback 'open', url
return false
, (data) =>
# evaluated here
unless data
logger.debug "Bridge connected"
return
[messageType, message] = data
formattedMessage = inspect message, undefined, undefined, color: prefs.get('wesabe.logger.color') ? on
switch messageType
when 'alert'
logger.info messageType, ' called with message=', formattedMessage
when 'confirm'
logger.info messageType, ' called with message=', formattedMessage, ', automatically answered YES'
when 'open'
logger.info messageType, ' called with url=', formattedMessage
callbacks = @["#{messageType}ReceivedCallbacks"]
if callbacks
for callback in callbacks
names = func.argNames callback
if names.length > 2
callback.call @, browser, page, message
else
@callWithMagicScope callback, browser, page, extend({message, logger: (require 'Logger').rootLogger}), message
unless @shouldDispatch browser, page
logger.info 'skipping document load'
return
@triggerDispatch browser, page
triggerDispatch: (browser, page) ->
module = @constructor.fid
browser ||= @browser
page ||= @page
logger.info 'url=', page.url
logger.info 'title=', page.title
# these should not be used inside the FI scripts
@browser = browser
@page = page
setTimeout =>
for dispatch in @dispatches
return if @job.done or @job.paused
result = tryThrow "#{module}#dispatch(#{dispatch.name})", (log) =>
@callWithAppropriateScope dispatch.callback, browser, page, {log}
if result is false
logger.info "dispatch chain halted"
return
, 2000
onDownloadSuccessful: (browser, page) ->
for callback in @afterDownloadCallbacks
@runAction callback, browser, page
shouldDispatch: (browser, page) ->
for filter in @filters
result = tryCatch "#{@constructor.fid}#filter(#{filter.name})", (log) =>
switch r = @callWithAppropriateScope filter.test, browser, page, {log}
when true
log.debug "forcing dispatch"
when false
log.debug "aborting dispatch"
return r
# check for a definite answer
return result if type.isBoolean result
logger.debug "no filter voted to force or abort dispatch, so forcing dispatch by default"
return true
callWithAppropriateScope: (fn, browser, page, scope, args...) ->
if func.argNames(fn).length > 0
fn.call @, browser, page
else
@callWithMagicScope fn, browser, page, scope or {}
callWithMagicScope: (fn, browser, page, scope, args...) ->
log = scope.logger or scope.log or logger
func.callWithScope fn, this, extend({
browser
page
e: @constructor.elements
answers: @answers
options: @job.options
tmp: @tmp
action: @getActionProxy browser, page
job: @getJobProxy()
skipAccount: @skipAccount
reload: => @triggerDispatch browser, page
download: (args...) => @download args...
bind: (args...) => Pathway.bind(args...)
logger: log
log: log
isMagicScope: on
}, scope or {}), args
@::__defineGetter__ 'history', ->
@_history ||= []
@::__defineGetter__ 'tmp', ->
@_tmp ||= {}
@build: (fid) ->
tryThrow "download.Player.build(fid=#{fid})", (log) =>
klass = tryThrow "loading fi-scripts.#{fid}", =>
wesabe.require "fi-scripts.#{fid}"
new klass(fid)
class ActionProxy
constructor: (@player, @browser, @page) ->
__noSuchMethod__: (method, args) ->
@player.runAction method, @browser, @page
module.exports = Player
| true | wesabe.provide 'fi-scripts'
extend = require 'lang/extend'
date = require 'lang/date'
array = require 'lang/array'
dateForElement = (require 'dom/date').forElement
{trim} = require 'lang/string'
func = require 'lang/func'
type = require 'lang/type'
{uuid} = require 'lang/UUID'
prefs = require 'util/prefs'
inspect = require 'util/inspect'
Dir = require 'io/Dir'
File = require 'io/File'
{download} = require 'io/Downloader'
privacy = require 'util/privacy'
Page = require 'dom/Page'
Browser = require 'dom/Browser'
UserAgent = require 'xul/UserAgent'
Bridge = require 'dom/Bridge'
{Pathway} = require 'xpath'
{EventEmitter} = require 'events2'
{sharedEventEmitter} = require 'events2'
{tryThrow, tryCatch} = require 'util/try'
DEFAULT_TIMEOUTS =
action: 60 # 1m
global: 300 # 5m
security: 180 # 3m
class Player extends EventEmitter
@register: (params) ->
@create params, (klass) ->
# make sure we put it where wesabe.require expects it
wesabe.provide "fi-scripts.#{params.fid}", klass
@create: (params, callback) ->
class klass extends Player
# the Wesabe Financial Institution ID (e.g. com.Chase)
@fid: params.fid
# the name of the Financial Institution (e.g. Chase)
@org: params.org
# ofx info in case this is a hybrid
@ofx: params.ofx
# the elements we need to recognize
@elements: {}
# pass .fid and .org through to the class
@::__defineGetter__ 'fid', -> @constructor.fid
@::__defineGetter__ 'org', -> @constructor.org
canHandleGoal: params.canHandleGoal or (-> true)
callback?(klass)
params.__module__ = klass.__module__
# the method that decides based on the state of the job and page what to do next
klass::dispatches = []
# any dispatch filters
klass::filters = []
# any download callbacks
klass::afterDownloadCallbacks = []
# after last goal callbacks
klass::afterLastGoalCallbacks = []
# any alert callbacks
klass::alertReceivedCallbacks = []
# any confirm callbacks
klass::confirmReceivedCallbacks = []
# any open callbacks
klass::openReceivedCallbacks = []
modules = [params]
if params.includes
for include in params.includes
try
modules.push wesabe.require(include)
catch ex
throw new Error "Error while requiring #{include} -- check that the file exists and has the correct 'provide' line"
# dispatchFrames: off
if params.dispatchFrames is off
klass::filters.push
name: 'frame blocker'
test: (browser, page) ->
if page.framed
logger.info "skipping frame page load: ", page.title
return false
if params.filter
klass::filters.push
name: 'global'
test: params.filter
# userAgent: "Mozilla/4.0 HappyFunBrowser"
if params.userAgent
klass::userAgent = params.userAgent
# userAgentAlias: "Firefox"
if params.userAgentAlias
klass::userAgent = UserAgent.getByNamedAlias(params.userAgentAlias)
for module in modules
if module.dispatch
klass::dispatches.push
name: module.__module__.name
callback: module.dispatch
if module.elements
extend klass.elements, module.elements, merge: on
if module.actions
extend klass::, module.actions
if module.extensions
extend klass::, module.extensions
if module.afterDownload
klass::afterDownloadCallbacks.push module.afterDownload
if module.afterLastGoal
klass::afterLastGoalCallbacks.push module.afterLastGoal
if module.alertReceived
klass::alertReceivedCallbacks.push module.alertReceived
if module.confirmReceived
klass::confirmReceivedCallbacks.push module.confirmReceived
if module.openReceived
klass::openReceivedCallbacks.push module.openReceived
if module.filter
klass::filters.push
name: module.__module__.name
test: module.filter
return klass
start: (answers, browser) ->
if @userAgent
UserAgent.set @userAgent
else
UserAgent.revertToDefault()
# set up the callbacks for page load and download done
browser.addEventListener 'DOMContentLoaded', (evt) =>
@onDocumentLoaded Browser.wrap(browser), Page.wrap(evt.target)
, no
sharedEventEmitter.on 'downloadSuccess', (data, suggestedFilename, contentType) =>
@job.update 'account.download.success'
@setErrorTimeout 'global'
tryThrow 'Player#downloadSuccess', (log) =>
folder = Dir.profile.child('statements')
unless folder.exists
folder.create()
statement = folder.child(uuid()).asFile
statement.write data
metadata = @job.nextDownloadMetadata or {}
delete @job.nextDownloadMetadata
# restore the browser and page the download was triggered from, if any
if metadata.browser
@browser = metadata.browser
delete metadata.browser
if metadata.page
@page = metadata.page
delete metadata.page
@job.recordSuccessfulDownload statement, extend({suggestedFilename, contentType}, metadata)
@onDownloadSuccessful @browser, @page
sharedEventEmitter.on 'downloadFail', =>
logger.warn 'Failed to download a statement! This is bad, but a failed job is worse, so we press on'
@job.update 'account.download.failure'
@setErrorTimeout 'global'
@onDownloadSuccessful @browser, @page
@setErrorTimeout 'global'
# start the security question timeout when the job is suspended
@job.on 'suspend', =>
@clearErrorTimeout 'action'
@clearErrorTimeout 'global'
@setErrorTimeout 'security'
@job.on 'resume', =>
@clearErrorTimeout 'security'
@setErrorTimeout 'global'
@answers = answers
@runAction 'main', Browser.wrap(browser)
nextGoal: ->
@job.nextGoal()
onLastGoalFinished: ->
logger.info 'Finished all goals, running callbacks'
for callback in @afterLastGoalCallbacks
@runAction callback, @browser, @page
finish: ->
@clearErrorTimeout 'action'
@clearErrorTimeout 'global'
@clearErrorTimeout 'security'
runAction: (name, browser, page, scope) ->
module = @constructor.fid
[fn, name] = if type.isFunction name
[name, name.name or '(anonymous)']
else
[@[name], name]
unless fn
throw new Error "Cannot find action '#{name}'! Typo? Forgot to include a file?"
retval = tryThrow "#{module}##{name}", (log) =>
url = page?.url
title = page?.title
@setErrorTimeout 'action'
@history.push
name: name
url: url
title: title
logger.info 'History is ', (hi.name for hi in @history).join(' -> ')
@callWithAppropriateScope fn, browser, page, extend({log}, scope or {})
return retval
@::__defineGetter__ 'elements', ->
@constructor.elements
@::__defineGetter__ 'e', ->
@elements
resume: (answers) ->
if type.isArray answers
for {key, value} in answers
@answers[key] = value
else if type.isObject answers
# TODO: 2008-11-24 <PI:EMAIL:<EMAIL>END_PI> -- this is only here until the new style (Array) is in PFC and SSU Service
extend @answers, answers
@onDocumentLoaded @browser, @page
getActionProxy: (browser, page) ->
new ActionProxy this, browser, page
getJobProxy: ->
@job
download: (url, metadata) ->
# hang on to the current browser and page so we can reload with the right context
browser = @browser
page = @page
newStatementFile = =>
folder = Dir.profile.child('statements')
folder.create() unless folder.exists
return folder.child(uuid())
# allow pre-registering information about the next download
if type.isFunction metadata
callback = metadata
metadata = url
url = null
metadata.browser = browser
metadata.page = page
@job.nextDownloadMetadata = metadata
callback()
return
else if metadata is undefined
metadata = url
url = null
unless metadata.data
throw new Error "Expected metadata #{metadata} to have data to write"
statement = newStatementFile()
statement.write metadata.data
delete metadata.data
@job.recordSuccessfulDownload statement, metadata
@onDownloadSuccessful browser, page
return
url = privacy.untaint url
metadata = extend {url}, (metadata or {})
tryThrow "Player#download(#{url})", =>
download url, newStatementFile(),
success: (path, suggestedFilename, contentType) =>
@job.recordSuccessfulDownload path, extend({suggestedFilename, contentType}, metadata)
@onDownloadSuccessful browser, page
failure: =>
@job.recordFailedDownload metadata
@onDownloadSuccessful browser, page
# Public: Answers whatever security questions are on the page by
# using the xpaths given in e.security.
#
# NOTE: Called with magic scope!
#
answerSecurityQuestions: ->
@answerSecurityQuestionsWithoutMagicScope browser ? @browser, page ? @page
# Internal: Answers whatever security questions are on the page by
# using the xpaths given in e.security.
#
# browser - The context Browser.
# page - The context Page.
answerSecurityQuestionsWithoutMagicScope: (browser, page) ->
# these are here because this function is called with magic scope
# and therefore won't see the variables we defined above
questions = page.select @e.security.questions
qanswers = page.select @e.security.answers
if questions.length isnt qanswers.length
logger.error "Found ", questions.length, " security questions, but ",
qanswers.length, " security question answers to fill"
logger.error "questions = ", questions
logger.error "qanswers = ", qanswers
return false
if questions.length is 0
logger.error "Failed to find any security questions"
return false
questions = (trim page.text(q) for q in questions)
logger.info "Found security questions: ", questions
questions = privacy.untaint questions
data = questions: []
for question, i in questions
answer = @answers[question]
element = qanswers[i]
if answer
page.fill element, answer
else
logger.debug "element = ", element, " -- element.type = ", element.type
data.questions.push
key: question
label: question
persistent: true
type: privacy.untaint(element.type) or "text"
if data.questions.length
@job.suspend 'suspended.missing-answer.auth.security', data
return false
@job.update 'auth.security'
# choose to bypass the security questions if we can
page.check @e.security.setCookieCheckbox if @e.security.setCookieCheckbox
page.fill @e.security.setCookieSelect, @e.security.setCookieOption if @e.security.setCookieSelect
# submit the form
page.click @e.security.continueButton
return true
#
# Fills in the date range for a download based on a lower bound.
#
# NOTE: This is an action and may be called with action.fillDateRange().
# Be warned: because this is an action _it is called with magic scope_.
#
# ==== Options (options)
# :since<Number, null>::
# Time of the lower bound to use for the date range (in ms since epoch).
#
# @public
#
fillDateRange: ->
@fillDateRangeWithoutMagicScope @browser, @page
# Internal: Fills out the date form fields for a download.
fillDateRangeWithoutMagicScope: (browser, page) ->
formatString = @elements.download.date.format or 'MM/dd/yyyy'
opts = @elements.download.date
fromEl = privacy.untaint page.find(opts.from)
toEl = privacy.untaint page.find(opts.to)
getDefault = (defaultValue, existing) =>
if type.isFunction defaultValue
defaultValue = defaultValue(existing)
date.parse(defaultValue) if defaultValue
if toEl
to = dateForElement(toEl, formatString)
# use default or today's date if we can't get a date from the field
to.date ||= getDefault(opts.defaults && opts.defaults.to) or new Date()
logger.info "Adjusting date upper bound: ", to.date
if fromEl
# if there's a lower bound, choose a week before it to ensure some overlap
since = options.since and (options.since - 7 * date.DAYS)
# get a date if there's already one in the field
from = dateForElement fromEl, formatString
if from.date and since
# choose the most recent of the pre-populated date and the lower bound
from.date = new Date Math.max(since, from.date.getTime())
else if since
# choose the lower bound
from.date = new Date since
else if to
# pick the default or an 89 day window
from.date = getDefault(opts.defaults and opts.defaults.from, to: to.date) or
date.add(to.date, -89 * date.DAYS)
logger.info "Adjusting date lower bound: ", from.date
nextAccount: ->
delete tmp.account
reload()
skipAccount: (args...) ->
logger.warn args... if args.length
delete @tmp.account
setErrorTimeout: (timeoutType) ->
duration = (prefs.get "wesabe.download.player.timeout.#{timeoutType}") or DEFAULT_TIMEOUTS[timeoutType]
tt = @_timeouts
tt ||= @_timeouts = {}
@clearErrorTimeout timeoutType
logger.debug "Timeout ", timeoutType, " set (", duration, " seconds)"
tt[timeoutType] = setTimeout =>
@emit 'timeout', timeoutType
return if @job.done
logger.error "Timeout ", timeoutType, " (", duration, " seconds) reached, abandoning job"
tryCatch "Player#setErrorTimeout(page dump)", =>
@page?.dumpPrivately()
@job.fail 504, "timeout.#{timeoutType}"
, duration * 1000
clearErrorTimeout: (timeoutType) ->
if @_timeouts?[timeoutType]
logger.debug "Timeout ", timeoutType, " cleared"
clearTimeout @_timeouts[timeoutType]
onDocumentLoaded: (browser, page) ->
return if @job.done or @job.paused
module = @constructor.fid
# log when alert and confirm are called
new Bridge page, (bridge) =>
bridge.evaluate ->
# evaluated on the page
window.alert = (message) ->
callback 'alert', message
return true
window.confirm = (message) ->
callback 'confirm', message
return true
window.open = (url) ->
callback 'open', url
return false
, (data) =>
# evaluated here
unless data
logger.debug "Bridge connected"
return
[messageType, message] = data
formattedMessage = inspect message, undefined, undefined, color: prefs.get('wesabe.logger.color') ? on
switch messageType
when 'alert'
logger.info messageType, ' called with message=', formattedMessage
when 'confirm'
logger.info messageType, ' called with message=', formattedMessage, ', automatically answered YES'
when 'open'
logger.info messageType, ' called with url=', formattedMessage
callbacks = @["#{messageType}ReceivedCallbacks"]
if callbacks
for callback in callbacks
names = func.argNames callback
if names.length > 2
callback.call @, browser, page, message
else
@callWithMagicScope callback, browser, page, extend({message, logger: (require 'Logger').rootLogger}), message
unless @shouldDispatch browser, page
logger.info 'skipping document load'
return
@triggerDispatch browser, page
triggerDispatch: (browser, page) ->
module = @constructor.fid
browser ||= @browser
page ||= @page
logger.info 'url=', page.url
logger.info 'title=', page.title
# these should not be used inside the FI scripts
@browser = browser
@page = page
setTimeout =>
for dispatch in @dispatches
return if @job.done or @job.paused
result = tryThrow "#{module}#dispatch(#{dispatch.name})", (log) =>
@callWithAppropriateScope dispatch.callback, browser, page, {log}
if result is false
logger.info "dispatch chain halted"
return
, 2000
onDownloadSuccessful: (browser, page) ->
for callback in @afterDownloadCallbacks
@runAction callback, browser, page
shouldDispatch: (browser, page) ->
for filter in @filters
result = tryCatch "#{@constructor.fid}#filter(#{filter.name})", (log) =>
switch r = @callWithAppropriateScope filter.test, browser, page, {log}
when true
log.debug "forcing dispatch"
when false
log.debug "aborting dispatch"
return r
# check for a definite answer
return result if type.isBoolean result
logger.debug "no filter voted to force or abort dispatch, so forcing dispatch by default"
return true
callWithAppropriateScope: (fn, browser, page, scope, args...) ->
if func.argNames(fn).length > 0
fn.call @, browser, page
else
@callWithMagicScope fn, browser, page, scope or {}
callWithMagicScope: (fn, browser, page, scope, args...) ->
log = scope.logger or scope.log or logger
func.callWithScope fn, this, extend({
browser
page
e: @constructor.elements
answers: @answers
options: @job.options
tmp: @tmp
action: @getActionProxy browser, page
job: @getJobProxy()
skipAccount: @skipAccount
reload: => @triggerDispatch browser, page
download: (args...) => @download args...
bind: (args...) => Pathway.bind(args...)
logger: log
log: log
isMagicScope: on
}, scope or {}), args
@::__defineGetter__ 'history', ->
@_history ||= []
@::__defineGetter__ 'tmp', ->
@_tmp ||= {}
@build: (fid) ->
tryThrow "download.Player.build(fid=#{fid})", (log) =>
klass = tryThrow "loading fi-scripts.#{fid}", =>
wesabe.require "fi-scripts.#{fid}"
new klass(fid)
class ActionProxy
constructor: (@player, @browser, @page) ->
__noSuchMethod__: (method, args) ->
@player.runAction method, @browser, @page
module.exports = Player
|
[
{
"context": " if session_data\n key = \"app:session:#{crypto.createHash('md5').update(session_data.sessi",
"end": 795,
"score": 0.9550021886825562,
"start": 781,
"tag": "KEY",
"value": "app:session:#{"
},
{
"context": "n_data\n key = \"app:ses... | src/server/lib/session.coffee | mauriciodelrio/carbon | 0 | CONFIG = require('../../../config').CONFIG
User = new (require('./pgconn').User)()
crypto = require 'crypto'
redis = require 'redis'
Url = process.env.REDIS_URL or 'redis://localhost:6379'
rclient = redis.createClient Url, prefix: CONFIG?.DB?.REDIS?.PREFIX
class Session
constructor: () ->
@config =
session_ttl: 30
check: (req, cb) ->
if req?.session?.session_id and req?.session?.user_id
rclient.GET "app:session:#{crypto.createHash('md5').update(req.session.session_id).digest('hex')}", (r_error, r_session) =>
if r_session
cb? req.session.session_id
else
User.connect (client) =>
User.get_session_by_id client, req.session.user_id, (session_data) =>
if session_data
key = "app:session:#{crypto.createHash('md5').update(session_data.session_id).digest('hex')}"
ttl = @config.session_ttl
value = 'true'
rclient.SETEX key, ttl, value
cb? req.session.session_id
else
User.clean_old_sessions client, req.session.user_id, (resp) ->
if resp.status is 'OK' and resp.data?
console.log resp
else
console.error resp
cb?()
else
cb?()
set: (user_id, metadata = {}, req, clear_others = false, cb) =>
User.connect (client) =>
User.create_session client, user_id, (session_data) =>
if session_data?.session_id
console.log "req session", req.session
req.session.session_id = session_data.session_id
req.session.user_id = user_id
req.session.metadata = metadata if metadata
key = "app:session:#{crypto.createHash('md5').update(session_data.session_id).digest('hex')}"
ttl = @config.session_ttl
value = 'true'
rclient.SETEX key, ttl, value
cb? session_data.session_id
else
cb?()
clear: (req, cb) ->
#TODO: clear session in SM
key = "app:session:#{crypto.createHash('md5').update(req?.session?.session_id).digest('hex')}"
rclient.DEL key
if req?.session?
req.session.destroy()
req.session = null
cb?()
module.exports = Session | 223389 | CONFIG = require('../../../config').CONFIG
User = new (require('./pgconn').User)()
crypto = require 'crypto'
redis = require 'redis'
Url = process.env.REDIS_URL or 'redis://localhost:6379'
rclient = redis.createClient Url, prefix: CONFIG?.DB?.REDIS?.PREFIX
class Session
constructor: () ->
@config =
session_ttl: 30
check: (req, cb) ->
if req?.session?.session_id and req?.session?.user_id
rclient.GET "app:session:#{crypto.createHash('md5').update(req.session.session_id).digest('hex')}", (r_error, r_session) =>
if r_session
cb? req.session.session_id
else
User.connect (client) =>
User.get_session_by_id client, req.session.user_id, (session_data) =>
if session_data
key = "<KEY>crypto.<KEY>(session_data.session_<KEY>')<KEY>}"
ttl = @config.session_ttl
value = 'true'
rclient.SETEX key, ttl, value
cb? req.session.session_id
else
User.clean_old_sessions client, req.session.user_id, (resp) ->
if resp.status is 'OK' and resp.data?
console.log resp
else
console.error resp
cb?()
else
cb?()
set: (user_id, metadata = {}, req, clear_others = false, cb) =>
User.connect (client) =>
User.create_session client, user_id, (session_data) =>
if session_data?.session_id
console.log "req session", req.session
req.session.session_id = session_data.session_id
req.session.user_id = user_id
req.session.metadata = metadata if metadata
key = "<KEY> <KEY>(session_data.session_<KEY>('<KEY>
ttl = @config.session_ttl
value = 'true'
rclient.SETEX key, ttl, value
cb? session_data.session_id
else
cb?()
clear: (req, cb) ->
#TODO: clear session in SM
key = "<KEY>{crypto.<KEY>Hash('md5').update(req?.session?.session_id).digest('hex')}"
rclient.DEL key
if req?.session?
req.session.destroy()
req.session = null
cb?()
module.exports = Session | true | CONFIG = require('../../../config').CONFIG
User = new (require('./pgconn').User)()
crypto = require 'crypto'
redis = require 'redis'
Url = process.env.REDIS_URL or 'redis://localhost:6379'
rclient = redis.createClient Url, prefix: CONFIG?.DB?.REDIS?.PREFIX
class Session
constructor: () ->
@config =
session_ttl: 30
check: (req, cb) ->
if req?.session?.session_id and req?.session?.user_id
rclient.GET "app:session:#{crypto.createHash('md5').update(req.session.session_id).digest('hex')}", (r_error, r_session) =>
if r_session
cb? req.session.session_id
else
User.connect (client) =>
User.get_session_by_id client, req.session.user_id, (session_data) =>
if session_data
key = "PI:KEY:<KEY>END_PIcrypto.PI:KEY:<KEY>END_PI(session_data.session_PI:KEY:<KEY>END_PI')PI:KEY:<KEY>END_PI}"
ttl = @config.session_ttl
value = 'true'
rclient.SETEX key, ttl, value
cb? req.session.session_id
else
User.clean_old_sessions client, req.session.user_id, (resp) ->
if resp.status is 'OK' and resp.data?
console.log resp
else
console.error resp
cb?()
else
cb?()
set: (user_id, metadata = {}, req, clear_others = false, cb) =>
User.connect (client) =>
User.create_session client, user_id, (session_data) =>
if session_data?.session_id
console.log "req session", req.session
req.session.session_id = session_data.session_id
req.session.user_id = user_id
req.session.metadata = metadata if metadata
key = "PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI(session_data.session_PI:KEY:<KEY>END_PI('PI:KEY:<KEY>END_PI
ttl = @config.session_ttl
value = 'true'
rclient.SETEX key, ttl, value
cb? session_data.session_id
else
cb?()
clear: (req, cb) ->
#TODO: clear session in SM
key = "PI:KEY:<KEY>END_PI{crypto.PI:KEY:<KEY>END_PIHash('md5').update(req?.session?.session_id).digest('hex')}"
rclient.DEL key
if req?.session?
req.session.destroy()
req.session = null
cb?()
module.exports = Session |
[
{
"context": "me: $5\n instance_type: $6\n key_name: $7\n monitoring: $8\n region: $9\n ",
"end": 249,
"score": 0.7404003143310547,
"start": 247,
"tag": "KEY",
"value": "$7"
}
] | snippets/aws.cson | skord/atom-ansible-snippets | 6 | ".source.yaml":
"newEC2Instance":
"prefix": "ec2"
"body": """
ec2:
assign_public_ip: $1
group: $2
group_id: $3
image: $4
instance_profile_name: $5
instance_type: $6
key_name: $7
monitoring: $8
region: $9
state: present
wait: $10
vpc_subnet_id: $11
instance_tags:
Name: $12
CreatedBy: Ansible
$13
"""
"newEC2Instances":
"prefix": "ec2m"
"body": """
ec2:
assign_public_ip: $1
count: $2
count_tag: $3
exact_count: $4
group: $5
group_id:
image:
instance_profile_name:
instance_type:
key_name:
monitoring:
region:
state:
wait:
vpc_subnet_id:
instance_tags:
Name: $12
CreatedBy: Ansible
$13
"""
"newEC2Group":
"prefix": "ec2g"
"body": """
ec2_group:
name: $1
description: $2
purge_rules: yes
purge_rules_egress: yes
region: $3
vpc_id: $4
state: present
rules:
$5
"""
"tcpRuleGid":
"prefix": "tcpgid"
"body": """
- proto: tcp
from_port: $1
to_port: $2
group_id: $3
"""
"tcpRuleGn":
"prefix": "tcpgn"
"body": """
- proto: tcp
from_port: $1
to_port: $2
group_name: $3
"""
"tcpRuleCidr":
"prefix": "tcpcidr"
"body": """
- proto: tcp
from_port: $1
to_port: $2
cidr_ip: $3
"""
"udpRuleGid":
"prefix": "udpgid"
"body": """
- proto: udp
from_port: $1
to_port: $2
group_id: $3
"""
"udpRuleGn":
"prefix": "udpgn"
"body": """
- proto: udp
from_port: $1
to_port: $2
group_name: $3
"""
"udpRuleCidr":
"prefix": "udpcidr"
"body": """
- proto: udp
from_port: $1
to_port: $2
cidr_ip: $3
"""
| 45082 | ".source.yaml":
"newEC2Instance":
"prefix": "ec2"
"body": """
ec2:
assign_public_ip: $1
group: $2
group_id: $3
image: $4
instance_profile_name: $5
instance_type: $6
key_name: <KEY>
monitoring: $8
region: $9
state: present
wait: $10
vpc_subnet_id: $11
instance_tags:
Name: $12
CreatedBy: Ansible
$13
"""
"newEC2Instances":
"prefix": "ec2m"
"body": """
ec2:
assign_public_ip: $1
count: $2
count_tag: $3
exact_count: $4
group: $5
group_id:
image:
instance_profile_name:
instance_type:
key_name:
monitoring:
region:
state:
wait:
vpc_subnet_id:
instance_tags:
Name: $12
CreatedBy: Ansible
$13
"""
"newEC2Group":
"prefix": "ec2g"
"body": """
ec2_group:
name: $1
description: $2
purge_rules: yes
purge_rules_egress: yes
region: $3
vpc_id: $4
state: present
rules:
$5
"""
"tcpRuleGid":
"prefix": "tcpgid"
"body": """
- proto: tcp
from_port: $1
to_port: $2
group_id: $3
"""
"tcpRuleGn":
"prefix": "tcpgn"
"body": """
- proto: tcp
from_port: $1
to_port: $2
group_name: $3
"""
"tcpRuleCidr":
"prefix": "tcpcidr"
"body": """
- proto: tcp
from_port: $1
to_port: $2
cidr_ip: $3
"""
"udpRuleGid":
"prefix": "udpgid"
"body": """
- proto: udp
from_port: $1
to_port: $2
group_id: $3
"""
"udpRuleGn":
"prefix": "udpgn"
"body": """
- proto: udp
from_port: $1
to_port: $2
group_name: $3
"""
"udpRuleCidr":
"prefix": "udpcidr"
"body": """
- proto: udp
from_port: $1
to_port: $2
cidr_ip: $3
"""
| true | ".source.yaml":
"newEC2Instance":
"prefix": "ec2"
"body": """
ec2:
assign_public_ip: $1
group: $2
group_id: $3
image: $4
instance_profile_name: $5
instance_type: $6
key_name: PI:KEY:<KEY>END_PI
monitoring: $8
region: $9
state: present
wait: $10
vpc_subnet_id: $11
instance_tags:
Name: $12
CreatedBy: Ansible
$13
"""
"newEC2Instances":
"prefix": "ec2m"
"body": """
ec2:
assign_public_ip: $1
count: $2
count_tag: $3
exact_count: $4
group: $5
group_id:
image:
instance_profile_name:
instance_type:
key_name:
monitoring:
region:
state:
wait:
vpc_subnet_id:
instance_tags:
Name: $12
CreatedBy: Ansible
$13
"""
"newEC2Group":
"prefix": "ec2g"
"body": """
ec2_group:
name: $1
description: $2
purge_rules: yes
purge_rules_egress: yes
region: $3
vpc_id: $4
state: present
rules:
$5
"""
"tcpRuleGid":
"prefix": "tcpgid"
"body": """
- proto: tcp
from_port: $1
to_port: $2
group_id: $3
"""
"tcpRuleGn":
"prefix": "tcpgn"
"body": """
- proto: tcp
from_port: $1
to_port: $2
group_name: $3
"""
"tcpRuleCidr":
"prefix": "tcpcidr"
"body": """
- proto: tcp
from_port: $1
to_port: $2
cidr_ip: $3
"""
"udpRuleGid":
"prefix": "udpgid"
"body": """
- proto: udp
from_port: $1
to_port: $2
group_id: $3
"""
"udpRuleGn":
"prefix": "udpgn"
"body": """
- proto: udp
from_port: $1
to_port: $2
group_name: $3
"""
"udpRuleCidr":
"prefix": "udpcidr"
"body": """
- proto: udp
from_port: $1
to_port: $2
cidr_ip: $3
"""
|
[
{
"context": " 'must not be null or undefined'\n password: 'must not be null or undefined'\n }\n\n test.deepEqual validateUser({\n e",
"end": 450,
"score": 0.9964537620544434,
"start": 421,
"tag": "PASSWORD",
"value": "must not be null or undefined"
},
{
"context": "m... | test/waechter.coffee | snd/waechter | 0 | Promise = require 'bluebird'
waechter = require '../src/waechter'
module.exports =
'schemaToValidator': (test) ->
userSchema =
email: waechter.email
password: waechter.stringNotEmpty
validateUser = waechter.schemaToValidator userSchema
test.equal validateUser(), 'must be an object'
test.deepEqual validateUser({
}), {
email: 'must not be null or undefined'
password: 'must not be null or undefined'
}
test.deepEqual validateUser({
email: 'i am definitely not an email address'
password: ''
}), {
email: 'must be an email address'
password: 'must not be empty'
}
test.deepEqual validateUser({
email: 'test@example.com'
}), {
password: 'must not be null or undefined'
}
test.equal validateUser({
email: 'test@example.com'
password: 'topsecret'
}), null
test.done()
'schemasToLazyAsyncValidator': (test) ->
schemaUserShared =
name: waechter.stringNotEmpty
password: waechter.stringMinLength(8)
email: waechter.email
callsToFirstUserWhereName = []
firstUserWhereName = (name) ->
callsToFirstUserWhereName.push name
if name is 'this-name-is-taken'
Promise.delay({}, 10)
else
Promise.delay(null, 10)
callsToFirstUserWhereEmail = []
firstUserWhereEmail = (email) ->
callsToFirstUserWhereEmail.push email
if email is 'this-email-is-taken@example.com'
Promise.delay({}, 10)
else
Promise.delay(null, 10)
schemaUserTakenAsync =
name: (value) ->
firstUserWhereName(value).then (user) ->
if user? then 'taken'
email: (value) ->
firstUserWhereEmail(value).then (user) ->
if user? then 'taken'
validateUser = waechter.schemasToLazyAsyncValidator(
schemaUserShared
schemaUserTakenAsync
)
validateUser()
.then (errors) ->
test.equal errors, 'must be an object'
validateUser
email: 'i am definitely not an email address'
password: ''
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
name: 'must not be null or undefined'
password: 'must not be empty'
validateUser
email: 'i am definitely not an email address'
name: ''
password: 'foo'
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
name: 'must not be empty'
password: 'must be at least 8 characters long'
validateUser
email: 'i am definitely not an email address'
name: 'a'
password: 'foo'
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
password: 'must be at least 8 characters long'
validateUser
email: 'test@example.com'
name: 'a'
password: 'topsecret'
.then (errors) ->
test.equal errors, null
validateUser
email: 'test@example.com'
name: 'this-name-is-taken'
password: 'topsecret'
.then (errors) ->
test.deepEqual errors,
name: 'taken'
validateUser
email: 'this-email-is-taken@example.com'
name: 'this-name-is-taken'
password: 'topsecret'
.then (errors) ->
test.deepEqual errors,
name: 'taken'
email: 'taken'
test.deepEqual callsToFirstUserWhereName, ['a', 'a', 'this-name-is-taken', 'this-name-is-taken']
test.deepEqual callsToFirstUserWhereEmail, ['test@example.com', 'test@example.com', 'this-email-is-taken@example.com']
test.done()
'or': (test) ->
validator = waechter.or(
waechter.email
waechter.true
waechter.numberWithin(6, 10)
)
test.deepEqual validator(), [
'one of the following conditions must be fulfilled:'
'must not be null or undefined'
'must be `true`'
'must be a number'
]
test.equal validator(true), null
test.deepEqual validator(10), [
'one of the following conditions must be fulfilled:'
'must be a string'
'must be `true`'
'must be a number within 6 and 10'
]
test.equal validator(8), null
test.deepEqual validator('aaa'), [
'one of the following conditions must be fulfilled:'
'must be an email address'
'must be `true`'
'must be a number'
]
test.equal validator('test@example.com'), null
test.done()
'and': (test) ->
validator = waechter.and(
waechter.exist
waechter.string
waechter.stringNotEmpty
waechter.email
)
test.equal validator(), 'must not be null or undefined'
test.equal validator(5), 'must be a string'
test.equal validator(''), 'must not be empty'
test.equal validator('aaa'), 'must be an email address'
test.equal validator('test@example.com'), null
test.done()
'undefinedOr': (test) ->
validator = waechter.undefinedOr(
waechter.email
)
test.equal validator(), null
test.deepEqual validator(null), [
'one of the following conditions must be fulfilled:'
'must be undefined'
'must not be null or undefined'
]
test.deepEqual validator('aa'), [
'one of the following conditions must be fulfilled:'
'must be undefined'
'must be an email address'
]
test.equal validator('test@example.com'), null
test.done()
'numberWithin': (test) ->
test.expect 8
try
test.equal waechter.numberWithin('a', 'b')
catch e
test.equal e.message, 'min and max arguments must be numbers'
validator = waechter.numberWithin(2, 6)
error = 'must be a number within 2 and 6'
test.equal validator(1), error
test.equal validator(2), error
test.equal validator(3), null
test.equal validator(4), null
test.equal validator(5), null
test.equal validator(6), error
test.equal validator(7), error
test.done()
'true': (test) ->
error = 'must be `true`'
test.equal waechter.true(true), null
test.equal waechter.true(false), error
test.equal waechter.true(null), error
test.equal waechter.true(), error
test.equal waechter.true('true'), error
test.done()
'false': (test) ->
error = 'must be `false`'
test.equal waechter.false(false), null
test.equal waechter.false(true), error
test.equal waechter.false(null), error
test.equal waechter.false(), error
test.equal waechter.false('false'), error
test.done()
| 128859 | Promise = require 'bluebird'
waechter = require '../src/waechter'
module.exports =
'schemaToValidator': (test) ->
userSchema =
email: waechter.email
password: waechter.stringNotEmpty
validateUser = waechter.schemaToValidator userSchema
test.equal validateUser(), 'must be an object'
test.deepEqual validateUser({
}), {
email: 'must not be null or undefined'
password: '<PASSWORD>'
}
test.deepEqual validateUser({
email: 'i am definitely not an email address'
password: ''
}), {
email: 'must be an email address'
password: '<PASSWORD>'
}
test.deepEqual validateUser({
email: '<EMAIL>'
}), {
password: '<PASSWORD>'
}
test.equal validateUser({
email: '<EMAIL>'
password: '<PASSWORD>'
}), null
test.done()
'schemasToLazyAsyncValidator': (test) ->
schemaUserShared =
name: waechter.stringNotEmpty
password: <PASSWORD>(8)
email: waechter.email
callsToFirstUserWhereName = []
firstUserWhereName = (name) ->
callsToFirstUserWhereName.push name
if name is 'this-name-is-taken'
Promise.delay({}, 10)
else
Promise.delay(null, 10)
callsToFirstUserWhereEmail = []
firstUserWhereEmail = (email) ->
callsToFirstUserWhereEmail.push email
if email is 'this-<EMAIL>'
Promise.delay({}, 10)
else
Promise.delay(null, 10)
schemaUserTakenAsync =
name: (value) ->
firstUserWhereName(value).then (user) ->
if user? then 'taken'
email: (value) ->
firstUserWhereEmail(value).then (user) ->
if user? then 'taken'
validateUser = waechter.schemasToLazyAsyncValidator(
schemaUserShared
schemaUserTakenAsync
)
validateUser()
.then (errors) ->
test.equal errors, 'must be an object'
validateUser
email: 'i am definitely not an email address'
password: ''
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
name: 'must not be null or undefined'
password: '<PASSWORD>'
validateUser
email: 'i am definitely not an email address'
name: ''
password: '<PASSWORD>'
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
name: 'must not be empty'
password: '<PASSWORD>'
validateUser
email: 'i am definitely not an email address'
name: 'a'
password: '<PASSWORD>'
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
password: '<PASSWORD>'
validateUser
email: '<EMAIL>'
name: 'a'
password: '<PASSWORD>'
.then (errors) ->
test.equal errors, null
validateUser
email: '<EMAIL>'
name: 'this-name-is-taken'
password: '<PASSWORD>'
.then (errors) ->
test.deepEqual errors,
name: 'taken'
validateUser
email: '<EMAIL>'
name: 'this-name-is-taken'
password: '<PASSWORD>'
.then (errors) ->
test.deepEqual errors,
name: 'taken'
email: 'taken'
test.deepEqual callsToFirstUserWhereName, ['a', 'a', 'this-name-is-taken', 'this-name-is-taken']
test.deepEqual callsToFirstUserWhereEmail, ['<EMAIL>', '<EMAIL>', '<EMAIL>']
test.done()
'or': (test) ->
validator = waechter.or(
waechter.email
waechter.true
waechter.numberWithin(6, 10)
)
test.deepEqual validator(), [
'one of the following conditions must be fulfilled:'
'must not be null or undefined'
'must be `true`'
'must be a number'
]
test.equal validator(true), null
test.deepEqual validator(10), [
'one of the following conditions must be fulfilled:'
'must be a string'
'must be `true`'
'must be a number within 6 and 10'
]
test.equal validator(8), null
test.deepEqual validator('aaa'), [
'one of the following conditions must be fulfilled:'
'must be an email address'
'must be `true`'
'must be a number'
]
test.equal validator('<EMAIL>'), null
test.done()
'and': (test) ->
validator = waechter.and(
waechter.exist
waechter.string
waechter.stringNotEmpty
waechter.email
)
test.equal validator(), 'must not be null or undefined'
test.equal validator(5), 'must be a string'
test.equal validator(''), 'must not be empty'
test.equal validator('aaa'), 'must be an email address'
test.equal validator('<EMAIL>'), null
test.done()
'undefinedOr': (test) ->
validator = waechter.undefinedOr(
waechter.email
)
test.equal validator(), null
test.deepEqual validator(null), [
'one of the following conditions must be fulfilled:'
'must be undefined'
'must not be null or undefined'
]
test.deepEqual validator('aa'), [
'one of the following conditions must be fulfilled:'
'must be undefined'
'must be an email address'
]
test.equal validator('<EMAIL>'), null
test.done()
'numberWithin': (test) ->
test.expect 8
try
test.equal waechter.numberWithin('a', 'b')
catch e
test.equal e.message, 'min and max arguments must be numbers'
validator = waechter.numberWithin(2, 6)
error = 'must be a number within 2 and 6'
test.equal validator(1), error
test.equal validator(2), error
test.equal validator(3), null
test.equal validator(4), null
test.equal validator(5), null
test.equal validator(6), error
test.equal validator(7), error
test.done()
'true': (test) ->
error = 'must be `true`'
test.equal waechter.true(true), null
test.equal waechter.true(false), error
test.equal waechter.true(null), error
test.equal waechter.true(), error
test.equal waechter.true('true'), error
test.done()
'false': (test) ->
error = 'must be `false`'
test.equal waechter.false(false), null
test.equal waechter.false(true), error
test.equal waechter.false(null), error
test.equal waechter.false(), error
test.equal waechter.false('false'), error
test.done()
| true | Promise = require 'bluebird'
waechter = require '../src/waechter'
module.exports =
'schemaToValidator': (test) ->
userSchema =
email: waechter.email
password: waechter.stringNotEmpty
validateUser = waechter.schemaToValidator userSchema
test.equal validateUser(), 'must be an object'
test.deepEqual validateUser({
}), {
email: 'must not be null or undefined'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
}
test.deepEqual validateUser({
email: 'i am definitely not an email address'
password: ''
}), {
email: 'must be an email address'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
}
test.deepEqual validateUser({
email: 'PI:EMAIL:<EMAIL>END_PI'
}), {
password: 'PI:PASSWORD:<PASSWORD>END_PI'
}
test.equal validateUser({
email: 'PI:EMAIL:<EMAIL>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
}), null
test.done()
'schemasToLazyAsyncValidator': (test) ->
schemaUserShared =
name: waechter.stringNotEmpty
password: PI:PASSWORD:<PASSWORD>END_PI(8)
email: waechter.email
callsToFirstUserWhereName = []
firstUserWhereName = (name) ->
callsToFirstUserWhereName.push name
if name is 'this-name-is-taken'
Promise.delay({}, 10)
else
Promise.delay(null, 10)
callsToFirstUserWhereEmail = []
firstUserWhereEmail = (email) ->
callsToFirstUserWhereEmail.push email
if email is 'this-PI:EMAIL:<EMAIL>END_PI'
Promise.delay({}, 10)
else
Promise.delay(null, 10)
schemaUserTakenAsync =
name: (value) ->
firstUserWhereName(value).then (user) ->
if user? then 'taken'
email: (value) ->
firstUserWhereEmail(value).then (user) ->
if user? then 'taken'
validateUser = waechter.schemasToLazyAsyncValidator(
schemaUserShared
schemaUserTakenAsync
)
validateUser()
.then (errors) ->
test.equal errors, 'must be an object'
validateUser
email: 'i am definitely not an email address'
password: ''
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
name: 'must not be null or undefined'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
validateUser
email: 'i am definitely not an email address'
name: ''
password: 'PI:PASSWORD:<PASSWORD>END_PI'
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
name: 'must not be empty'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
validateUser
email: 'i am definitely not an email address'
name: 'a'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
.then (errors) ->
test.deepEqual errors,
email: 'must be an email address'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
validateUser
email: 'PI:EMAIL:<EMAIL>END_PI'
name: 'a'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
.then (errors) ->
test.equal errors, null
validateUser
email: 'PI:EMAIL:<EMAIL>END_PI'
name: 'this-name-is-taken'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
.then (errors) ->
test.deepEqual errors,
name: 'taken'
validateUser
email: 'PI:EMAIL:<EMAIL>END_PI'
name: 'this-name-is-taken'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
.then (errors) ->
test.deepEqual errors,
name: 'taken'
email: 'taken'
test.deepEqual callsToFirstUserWhereName, ['a', 'a', 'this-name-is-taken', 'this-name-is-taken']
test.deepEqual callsToFirstUserWhereEmail, ['PI:EMAIL:<EMAIL>END_PI', 'PI:EMAIL:<EMAIL>END_PI', 'PI:EMAIL:<EMAIL>END_PI']
test.done()
'or': (test) ->
validator = waechter.or(
waechter.email
waechter.true
waechter.numberWithin(6, 10)
)
test.deepEqual validator(), [
'one of the following conditions must be fulfilled:'
'must not be null or undefined'
'must be `true`'
'must be a number'
]
test.equal validator(true), null
test.deepEqual validator(10), [
'one of the following conditions must be fulfilled:'
'must be a string'
'must be `true`'
'must be a number within 6 and 10'
]
test.equal validator(8), null
test.deepEqual validator('aaa'), [
'one of the following conditions must be fulfilled:'
'must be an email address'
'must be `true`'
'must be a number'
]
test.equal validator('PI:EMAIL:<EMAIL>END_PI'), null
test.done()
'and': (test) ->
validator = waechter.and(
waechter.exist
waechter.string
waechter.stringNotEmpty
waechter.email
)
test.equal validator(), 'must not be null or undefined'
test.equal validator(5), 'must be a string'
test.equal validator(''), 'must not be empty'
test.equal validator('aaa'), 'must be an email address'
test.equal validator('PI:EMAIL:<EMAIL>END_PI'), null
test.done()
'undefinedOr': (test) ->
validator = waechter.undefinedOr(
waechter.email
)
test.equal validator(), null
test.deepEqual validator(null), [
'one of the following conditions must be fulfilled:'
'must be undefined'
'must not be null or undefined'
]
test.deepEqual validator('aa'), [
'one of the following conditions must be fulfilled:'
'must be undefined'
'must be an email address'
]
test.equal validator('PI:EMAIL:<EMAIL>END_PI'), null
test.done()
'numberWithin': (test) ->
test.expect 8
try
test.equal waechter.numberWithin('a', 'b')
catch e
test.equal e.message, 'min and max arguments must be numbers'
validator = waechter.numberWithin(2, 6)
error = 'must be a number within 2 and 6'
test.equal validator(1), error
test.equal validator(2), error
test.equal validator(3), null
test.equal validator(4), null
test.equal validator(5), null
test.equal validator(6), error
test.equal validator(7), error
test.done()
'true': (test) ->
error = 'must be `true`'
test.equal waechter.true(true), null
test.equal waechter.true(false), error
test.equal waechter.true(null), error
test.equal waechter.true(), error
test.equal waechter.true('true'), error
test.done()
'false': (test) ->
error = 'must be `false`'
test.equal waechter.false(false), null
test.equal waechter.false(true), error
test.equal waechter.false(null), error
test.equal waechter.false(), error
test.equal waechter.false('false'), error
test.done()
|
[
{
"context": "You're looking at the <a href=\"https://github.com/KyleAMathews/coffee-react-quickstart\">Coffeescript React Quick",
"end": 195,
"score": 0.9867673516273499,
"start": 183,
"tag": "USERNAME",
"value": "KyleAMathews"
},
{
"context": "start</a> project by <a href=\"https:/... | src/scripts/hello_world.cjsx | arjunblj/pyplay | 0 | Link = require('react-router').Link
module.exports = React.createClass
render: ->
<div>
<h1>Hello world!</h1>
<p>You're looking at the <a href="https://github.com/KyleAMathews/coffee-react-quickstart">Coffeescript React Quickstart</a> project by <a href="https://twitter.com/kylemathews">Kyle Mathews</a>.</p>
<p>It has a number of nice goodies baked in including:</p>
<ul>
<li>Live reloading while developing for both CSS <em>and</em> Javascript! This really speeds up development. Live reloading is powered by the <a href="http://webpack.github.io/">Webpack module bundler</a> and <a href="https://github.com/gaearon/react-hot-loader">react-hot-loader</a> projects.</li>
<li>Write your JSX in Coffeescript thanks to <a href="https://github.com/jsdf/coffee-react-transform">coffee-react-transform</a>.</li>
<li>Amazing URL-driven-development (UDD) with the <a href="https://github.com/rackt/react-router">react-router project.</a></li>
<li>Uses <a href="http://gulpjs.com">Gulp</a> for building CSS and Javascript. Run <code>gulp watch</code> for rebuilding css/js on the fly while developing and <code>gulp build</code> to create minified versions for deploying to production.</li>
<li>Easily create a custom font icon! Drop svg files in <code>src/styles/icons</code> and use them like <code>{"<span className='icon-home' />"}</code> (which becomes <span className="icon-home" />) and <code>{"<span className='icon-search' />"}</code> (which becomes <span className="icon-search" />)</li>
<li>Includes <Link to="styleguide">sensible element stylings</Link> and several useful Sass plugins:</li>
<ul>
<li><a href="http://susy.oddbird.net/">Susy</a>: best-of-breed grid system.</li>
<li><a href="https://github.com/Team-Sass/modular-scale">modular-scale</a>: easily create pleasing modular type scales.</li>
<li><a href="https://github.com/jhardy/Sassy-Buttons">Sassy Buttons</a>: flexible button styling.</li>
<li><a href="http://breakpoint-sass.com/">Breakpoint</a>: Super simple media queries.</li>
</ul>
</ul>
</div>
| 208059 | Link = require('react-router').Link
module.exports = React.createClass
render: ->
<div>
<h1>Hello world!</h1>
<p>You're looking at the <a href="https://github.com/KyleAMathews/coffee-react-quickstart">Coffeescript React Quickstart</a> project by <a href="https://twitter.com/kylemathews"><NAME></a>.</p>
<p>It has a number of nice goodies baked in including:</p>
<ul>
<li>Live reloading while developing for both CSS <em>and</em> Javascript! This really speeds up development. Live reloading is powered by the <a href="http://webpack.github.io/">Webpack module bundler</a> and <a href="https://github.com/gaearon/react-hot-loader">react-hot-loader</a> projects.</li>
<li>Write your JSX in Coffeescript thanks to <a href="https://github.com/jsdf/coffee-react-transform">coffee-react-transform</a>.</li>
<li>Amazing URL-driven-development (UDD) with the <a href="https://github.com/rackt/react-router">react-router project.</a></li>
<li>Uses <a href="http://gulpjs.com">Gulp</a> for building CSS and Javascript. Run <code>gulp watch</code> for rebuilding css/js on the fly while developing and <code>gulp build</code> to create minified versions for deploying to production.</li>
<li>Easily create a custom font icon! Drop svg files in <code>src/styles/icons</code> and use them like <code>{"<span className='icon-home' />"}</code> (which becomes <span className="icon-home" />) and <code>{"<span className='icon-search' />"}</code> (which becomes <span className="icon-search" />)</li>
<li>Includes <Link to="styleguide">sensible element stylings</Link> and several useful Sass plugins:</li>
<ul>
<li><a href="http://susy.oddbird.net/">Susy</a>: best-of-breed grid system.</li>
<li><a href="https://github.com/Team-Sass/modular-scale">modular-scale</a>: easily create pleasing modular type scales.</li>
<li><a href="https://github.com/jhardy/Sassy-Buttons">Sassy Buttons</a>: flexible button styling.</li>
<li><a href="http://breakpoint-sass.com/">Breakpoint</a>: Super simple media queries.</li>
</ul>
</ul>
</div>
| true | Link = require('react-router').Link
module.exports = React.createClass
render: ->
<div>
<h1>Hello world!</h1>
<p>You're looking at the <a href="https://github.com/KyleAMathews/coffee-react-quickstart">Coffeescript React Quickstart</a> project by <a href="https://twitter.com/kylemathews">PI:NAME:<NAME>END_PI</a>.</p>
<p>It has a number of nice goodies baked in including:</p>
<ul>
<li>Live reloading while developing for both CSS <em>and</em> Javascript! This really speeds up development. Live reloading is powered by the <a href="http://webpack.github.io/">Webpack module bundler</a> and <a href="https://github.com/gaearon/react-hot-loader">react-hot-loader</a> projects.</li>
<li>Write your JSX in Coffeescript thanks to <a href="https://github.com/jsdf/coffee-react-transform">coffee-react-transform</a>.</li>
<li>Amazing URL-driven-development (UDD) with the <a href="https://github.com/rackt/react-router">react-router project.</a></li>
<li>Uses <a href="http://gulpjs.com">Gulp</a> for building CSS and Javascript. Run <code>gulp watch</code> for rebuilding css/js on the fly while developing and <code>gulp build</code> to create minified versions for deploying to production.</li>
<li>Easily create a custom font icon! Drop svg files in <code>src/styles/icons</code> and use them like <code>{"<span className='icon-home' />"}</code> (which becomes <span className="icon-home" />) and <code>{"<span className='icon-search' />"}</code> (which becomes <span className="icon-search" />)</li>
<li>Includes <Link to="styleguide">sensible element stylings</Link> and several useful Sass plugins:</li>
<ul>
<li><a href="http://susy.oddbird.net/">Susy</a>: best-of-breed grid system.</li>
<li><a href="https://github.com/Team-Sass/modular-scale">modular-scale</a>: easily create pleasing modular type scales.</li>
<li><a href="https://github.com/jhardy/Sassy-Buttons">Sassy Buttons</a>: flexible button styling.</li>
<li><a href="http://breakpoint-sass.com/">Breakpoint</a>: Super simple media queries.</li>
</ul>
</ul>
</div>
|
[
{
"context": "rackEvent('HoC2018 completed', {\n name: name,\n teacherEmail: teacherEmail,\n ",
"end": 909,
"score": 0.9947576522827148,
"start": 905,
"tag": "NAME",
"value": "name"
}
] | app/views/special_event/HoC2018VictoryModal.coffee | cihatislamdede/codecombat | 4,858 | ModalComponent = require 'views/core/ModalComponent'
HoC2018VictoryComponent = require('./HoC2018VictoryModal.vue').default
module.exports = class HoC2018VictoryModal extends ModalComponent
id: 'hoc-victory-modal'
template: require 'templates/core/modal-base-flat'
closeButton: true
VueComponent: HoC2018VictoryComponent
constructor: (options) ->
super(options)
if not options.shareURL
throw new Error("HoC2018VictoryModal requires shareURL value.")
if not options.campaign
throw new Error("HoC2018VictoryModal requires campaign slug.")
@propsData = {
navigateCertificate: (name, teacherEmail, shareURL) =>
url = "/certificates/#{me.id}/anon?campaign=#{options.campaign}&username=#{name}"
application.router.navigate(url, { trigger: true })
if teacherEmail
window.tracker?.trackEvent('HoC2018 completed', {
name: name,
teacherEmail: teacherEmail,
shareURL: shareURL,
certificateURL: url,
userId: me.id
})
,
shareURL: options.shareURL,
fullName: if me.isAnonymous() then "" else me.broadName()
}
| 35783 | ModalComponent = require 'views/core/ModalComponent'
HoC2018VictoryComponent = require('./HoC2018VictoryModal.vue').default
module.exports = class HoC2018VictoryModal extends ModalComponent
id: 'hoc-victory-modal'
template: require 'templates/core/modal-base-flat'
closeButton: true
VueComponent: HoC2018VictoryComponent
constructor: (options) ->
super(options)
if not options.shareURL
throw new Error("HoC2018VictoryModal requires shareURL value.")
if not options.campaign
throw new Error("HoC2018VictoryModal requires campaign slug.")
@propsData = {
navigateCertificate: (name, teacherEmail, shareURL) =>
url = "/certificates/#{me.id}/anon?campaign=#{options.campaign}&username=#{name}"
application.router.navigate(url, { trigger: true })
if teacherEmail
window.tracker?.trackEvent('HoC2018 completed', {
name: <NAME>,
teacherEmail: teacherEmail,
shareURL: shareURL,
certificateURL: url,
userId: me.id
})
,
shareURL: options.shareURL,
fullName: if me.isAnonymous() then "" else me.broadName()
}
| true | ModalComponent = require 'views/core/ModalComponent'
HoC2018VictoryComponent = require('./HoC2018VictoryModal.vue').default
module.exports = class HoC2018VictoryModal extends ModalComponent
id: 'hoc-victory-modal'
template: require 'templates/core/modal-base-flat'
closeButton: true
VueComponent: HoC2018VictoryComponent
constructor: (options) ->
super(options)
if not options.shareURL
throw new Error("HoC2018VictoryModal requires shareURL value.")
if not options.campaign
throw new Error("HoC2018VictoryModal requires campaign slug.")
@propsData = {
navigateCertificate: (name, teacherEmail, shareURL) =>
url = "/certificates/#{me.id}/anon?campaign=#{options.campaign}&username=#{name}"
application.router.navigate(url, { trigger: true })
if teacherEmail
window.tracker?.trackEvent('HoC2018 completed', {
name: PI:NAME:<NAME>END_PI,
teacherEmail: teacherEmail,
shareURL: shareURL,
certificateURL: url,
userId: me.id
})
,
shareURL: options.shareURL,
fullName: if me.isAnonymous() then "" else me.broadName()
}
|
[
{
"context": " module', ->\n\n hubotHear = (message, userName = 'momo', tempo = 40) ->\n beforeEach (done) ->\n r",
"end": 437,
"score": 0.999289870262146,
"start": 433,
"tag": "USERNAME",
"value": "momo"
},
{
"context": "ut (done), tempo\n\n hubot = (message, userName = 'm... | test/phabs_hear_test.coffee | f0ff886f/hubot-phabs | 22 | require('es6-promise').polyfill()
Helper = require('hubot-test-helper')
Hubot = require('../node_modules/hubot')
# helper loads a specific script if it's a file
helper = new Helper('../scripts/phabs_hear.coffee')
nock = require('nock')
sinon = require('sinon')
expect = require('chai').use(require('sinon-chai')).expect
moment = require 'moment'
room = null
describe 'phabs_hear module', ->
hubotHear = (message, userName = 'momo', tempo = 40) ->
beforeEach (done) ->
room.user.say userName, message
setTimeout (done), tempo
hubot = (message, userName = 'momo') ->
hubotHear "@hubot #{message}", userName
hubotResponse = (i = 1) ->
room.messages[i]?[1]
hubotResponseCount = ->
room.messages.length
beforeEach ->
process.env.PHABRICATOR_URL = 'http://example.com'
process.env.PHABRICATOR_API_KEY = 'xxx'
process.env.PHABRICATOR_BOT_PHID = 'PHID-USER-xxx'
process.env.PHABRICATOR_PROJECTS = 'PHID-PROJ-xxx:proj1,PHID-PCOL-yyy:proj2'
room = helper.createRoom { httpd: false }
room.robot.brain.userForId 'user', {
name: 'user'
}
room.robot.brain.userForId 'user_with_email', {
name: 'user_with_email',
email_address: 'user@example.com'
}
room.robot.brain.userForId 'user_with_phid', {
name: 'user_with_phid',
phid: 'PHID-USER-123456789'
}
room.receive = (userName, message) ->
new Promise (resolve) =>
@messages.push [userName, message]
user = room.robot.brain.userForId userName
@robot.receive(new Hubot.TextMessage(user, message), resolve)
afterEach ->
delete process.env.PHABRICATOR_URL
delete process.env.PHABRICATOR_API_KEY
delete process.env.PHABRICATOR_BOT_PHID
delete process.env.PHABRICATOR_PROJECTS
# ---------------------------------------------------------------------------------
context 'someone talks about a task that is blacklisted', ->
beforeEach ->
do nock.disableNetConnect
room.robot.brain.data.phabricator.blacklist = [ 'T42', 'V3' ]
afterEach ->
nock.cleanAll()
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a type of item that is disabled per configuration', ->
beforeEach ->
do nock.disableNetConnect
process.env.PHABRICATOR_ENABLED_ITEMS = 'P,r'
room = helper.createRoom { httpd: false }
afterEach ->
nock.cleanAll()
delete process.env.PHABRICATOR_ENABLED_ITEMS
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a type of item but the hear is totaly disabled', ->
beforeEach ->
do nock.disableNetConnect
process.env.PHABRICATOR_ENABLED_ITEMS = ''
room = helper.createRoom { httpd: false }
afterEach ->
nock.cleanAll()
delete process.env.PHABRICATOR_ENABLED_ITEMS
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a task', ->
context 'when the task is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { error_info: 'No such Maniphest task exists.' })
afterEach ->
nock.cleanAll()
context 'whatever about T424242 or something', ->
hubot 'whatever about T424242 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'oops T424242 No such Maniphest task exists.'
context 'when it is an open task', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { result: {
status: 'open',
isClosed: false,
title: 'some task',
priority: 'Low',
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'http://example.com/T42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'http://example.com/T42 - some task (Low, open 2 months ago)'
context 'whatever about http://example.com/T42 or something', ->
hubot 'whatever about http://example.com/T42 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'T42 - some task (Low, open 2 months ago)'
context 'when it is a closed task', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { result: {
status: 'resolved',
isClosed: true,
title: 'some task',
dateModified: moment().subtract(2, 'months').unix(),
priority: 'Low',
uri: 'http://example.com/T42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'gives information about the Task, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/T42 - some task (Low, resolved 2 months ago)'
context 'whatever about http://example.com/T42 or something', ->
hubot 'whatever about http://example.com/T42 or something'
it 'gives information about the Task, without uri', ->
expect(hubotResponse()).to.eql 'T42 - some task (Low, resolved 2 months ago)'
# ---------------------------------------------------------------------------------
context 'someone talks about a file', ->
context 'when the file is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/file.info')
.reply(200, { error_info: 'No such file exists.' })
afterEach ->
nock.cleanAll()
context 'whatever about F424242 or something', ->
hubot 'whatever about F424242 or something'
it "warns the user that this File doesn't exist", ->
expect(hubotResponse()).to.eql 'oops F424242 No such file exists.'
context 'when it is an existing file', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/file.info')
.reply(200, { result: {
name: 'image.png',
mimeType: 'image/png',
byteSize: '1409',
uri: 'https://example.com/F42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about F42 or something', ->
hubot 'whatever about F42 or something'
it 'gives information about the File, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/F42 - image.png (image/png 1.38 kB)'
context 'whatever about http://example.com/F42 or something', ->
hubot 'whatever about http://example.com/F42 or something'
it 'gives information about the File, without uri', ->
expect(hubotResponse()).to.eql 'F42 - image.png (image/png 1.38 kB)'
# ---------------------------------------------------------------------------------
context 'someone talks about a paste', ->
context 'when the Paste is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about P424242 or something', ->
hubot 'whatever about P424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops P424242 was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about P424242 or something', ->
hubot 'whatever about P424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops P424242 http error 404'
context 'when it is an existing Paste without a language set', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: {
'PHID-PSTE-hdxawtm6psdtsxy3nyzk': {
title: 'file.coffee',
language: null,
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'https://example.com/P42'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about P42 or something', ->
hubot 'whatever about P42 or something'
it 'gives information about the Paste, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/P42 - file.coffee (created 2 months ago)'
context 'whatever about http://example.com/P42 or something', ->
hubot 'whatever about http://example.com/P42 or something'
it 'gives information about the Paste, without uri', ->
expect(hubotResponse()).to.eql 'P42 - file.coffee (created 2 months ago)'
context 'when it is an existing Paste with a language set', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: {
'PHID-PSTE-hdxawtm6psdtsxy3nyzk': {
title: 'file.coffee',
language: 'coffee',
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'https://example.com/P42'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about P42 or something', ->
hubot 'whatever about P42 or something'
it 'gives information about the Paste, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/P42 - file.coffee (coffee, created 2 months ago)'
context 'whatever about http://example.com/P42 or something', ->
hubot 'whatever about http://example.com/P42 or something'
it 'gives information about the Paste, without uri', ->
expect(hubotResponse()).to.eql 'P42 - file.coffee (coffee, created 2 months ago)'
# ---------------------------------------------------------------------------------
context 'someone talks about a mock', ->
context 'when the mock is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about M424242 or something', ->
hubot 'whatever about M424242 or something'
it "warns the user that this Mock doesn't exist", ->
expect(hubotResponse()).to.eql 'oops M424242 was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about M424242 or something', ->
hubot 'whatever about M424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops M424242 http error 404'
context 'when it is an existing Mock without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'M42': {
'phid': 'PHID-MOCK-6g6p65ez5ctxudji5twy',
'uri': 'https://example.com/M42',
'typeName': 'Pholio Mock',
'type': 'MOCK',
'name': 'M42',
'fullName': 'M42: Test Mock',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about M42 or something', ->
hubot 'whatever about M42 or something'
it 'gives information about the mock, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/M42 - Test Mock'
context 'whatever about http://example.com/M42 or something', ->
hubot 'whatever about http://example.com/M42 or something'
it 'gives information about the mock, without uri', ->
expect(hubotResponse()).to.eql 'M42: Test Mock'
context 'when it is an existing Mock with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'M42': {
'phid': 'PHID-MOCK-6g6p65ez5ctxudji5twy',
'uri': 'https://example.com/M42',
'typeName': 'Pholio Mock',
'type': 'MOCK',
'name': 'M42',
'fullName': 'M42: Test Mock',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about M42 or something', ->
hubot 'whatever about M42 or something'
it 'gives information about the mock, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/M42 - Test Mock (closed)'
context 'whatever about http://example.com/M42 or something', ->
hubot 'whatever about http://example.com/M42 or something'
it 'gives information about the mock, without uri', ->
expect(hubotResponse()).to.eql 'M42: Test Mock (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a build', ->
context 'when the build is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about B424242 or something', ->
hubot 'whatever about B424242 or something'
it "warns the user that this build doesn't exist", ->
expect(hubotResponse()).to.eql 'oops B424242 was not found.'
context 'when it is an existing build without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'B12999': {
'phid': 'PHID-HMBB-zeg6ru5vnd4fbp744s5f',
'uri': 'https://example.com/B12999',
'typeName': 'Buildable',
'type': 'HMBB',
'name': 'B12999',
'fullName': 'B12999: rP46ceba728fee: (stable) Fix an issue',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about B12999 or something', ->
hubot 'whatever about B12999 or something'
it 'gives information about the build, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/B12999 - rP46ceba728fee: (stable) Fix an issue'
context 'whatever about http://example.com/B12999 or something', ->
hubot 'whatever about http://example.com/B12999 or something'
it 'gives information about the build, without uri', ->
expect(hubotResponse()).to.eql 'B12999: rP46ceba728fee: (stable) Fix an issue'
context 'when it is an existing build with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'B12999': {
'phid': 'PHID-HMBB-zeg6ru5vnd4fbp744s5f',
'uri': 'https://example.com/B12999',
'typeName': 'Buildable',
'type': 'HMBB',
'name': 'B12999',
'fullName': 'B12999: rP46ceba728fee: (stable) Fix an issue',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about B12999 or something', ->
hubot 'whatever about B12999 or something'
it 'gives information about the build, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/B12999 - rP46ceba728fee: (stable) Fix an issue (closed)'
context 'whatever about http://example.com/B12999 or something', ->
hubot 'whatever about http://example.com/B12999 or something'
it 'gives information about the build, without uri', ->
expect(hubotResponse()).to.eql 'B12999: rP46ceba728fee: (stable) Fix an issue (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a question', ->
context 'when the question is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about Q424242 or something', ->
hubot 'whatever about Q424242 or something'
it "warns the user that this question doesn't exist", ->
expect(hubotResponse()).to.eql 'oops Q424242 was not found.'
context 'when it is an existing question without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'Q434': {
'phid': 'PHID-QUES-j22mqmbhb3mbcd2it7zs',
'uri': 'https://example.com/Q434',
'typeName': 'Ponder Question',
'type': 'QUES',
'name': 'Q434',
'fullName': 'Q434: Width in wiki pages',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about Q434 or something', ->
hubot 'whatever about Q434 or something'
it 'gives information about the question, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/Q434 - Width in wiki pages'
context 'whatever about http://example.com/Q434 or something', ->
hubot 'whatever about http://example.com/Q434 or something'
it 'gives information about the question, without uri', ->
expect(hubotResponse()).to.eql 'Q434: Width in wiki pages'
context 'when it is an existing question with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'Q434': {
'phid': 'PHID-QUES-j22mqmbhb3mbcd2it7zs',
'uri': 'https://example.com/Q434',
'typeName': 'Ponder Question',
'type': 'QUES',
'name': 'Q434',
'fullName': 'Q434: Width in wiki pages',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about Q434 or something', ->
hubot 'whatever about Q434 or something'
it 'gives information about the question, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/Q434 - Width in wiki pages (closed)'
context 'whatever about http://example.com/Q434 or something', ->
hubot 'whatever about http://example.com/Q434 or something'
it 'gives information about the question, without uri', ->
expect(hubotResponse()).to.eql 'Q434: Width in wiki pages (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a legalpad', ->
context 'when the legalpad is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about L424242 or something', ->
hubot 'whatever about L424242 or something'
it "warns the user that this legalpad doesn't exist", ->
expect(hubotResponse()).to.eql 'oops L424242 was not found.'
context 'when it is an existing legalpad without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'L38': {
'phid': 'PHID-LEGD-chmhkotszvqaucdrvh5t',
'uri': 'https://example.com/L38',
'typeName': 'Legalpad Document',
'type': 'LEGD',
'name': 'L38 Test',
'fullName': 'L38 Test',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about L38 or something', ->
hubot 'whatever about L38 or something'
it 'gives information about the legalpad, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/L38 - Test'
context 'whatever about http://example.com/L38 or something', ->
hubot 'whatever about http://example.com/L38 or something'
it 'gives information about the legalpad, without uri', ->
expect(hubotResponse()).to.eql 'L38 Test'
context 'when it is an existing legalpad with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'L38': {
'phid': 'PHID-LEGD-chmhkotszvqaucdrvh5t',
'uri': 'https://example.com/L38',
'typeName': 'Legalpad Document',
'type': 'LEGD',
'name': 'L38 Test',
'fullName': 'L38 Test',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about L38 or something', ->
hubot 'whatever about L38 or something'
it 'gives information about the legalpad, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/L38 - Test (closed)'
context 'whatever about http://example.com/L38 or something', ->
hubot 'whatever about http://example.com/L38 or something'
it 'gives information about the legalpad, without uri', ->
expect(hubotResponse()).to.eql 'L38 Test (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a vote', ->
context 'when the vote is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about V424242 or something', ->
hubot 'whatever about V424242 or something'
it "warns the user that this vote doesn't exist", ->
expect(hubotResponse()).to.eql 'oops V424242 was not found.'
context 'when it is an existing vote without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'V30': {
'phid': 'PHID-POLL-hqztsdcva3jkucu4mmv2',
'uri': 'https://example.com/V30',
'typeName': 'Slowvote Poll',
'type': 'POLL',
'name': 'V30',
'fullName': 'V30: This is a poll',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about V30 or something', ->
hubot 'whatever about V30 or something'
it 'gives information about the vote, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/V30 - This is a poll'
context 'whatever about http://example.com/V30 or something', ->
hubot 'whatever about http://example.com/V30 or something'
it 'gives information about the vote, without uri', ->
expect(hubotResponse()).to.eql 'V30: This is a poll'
context 'when it is an existing vote with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'V30': {
'phid': 'PHID-POLL-hqztsdcva3jkucu4mmv2',
'uri': 'https://example.com/V30',
'typeName': 'Slowvote Poll',
'type': 'POLL',
'name': 'V30',
'fullName': 'V30: This is a poll',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about V30 or something', ->
hubot 'whatever about V30 or something'
it 'gives information about the vote, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/V30 - This is a poll (closed)'
context 'whatever about http://example.com/V30 or something', ->
hubot 'whatever about http://example.com/V30 or something'
it 'gives information about the vote, without uri', ->
expect(hubotResponse()).to.eql 'V30: This is a poll (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a diff', ->
context 'when the diff is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about D555555 or something', ->
hubot 'whatever about D555555 or something'
it "warns the user that this Diff doesn't exist", ->
expect(hubotResponse()).to.eql 'oops D555555 was not found.'
context 'when it is an open diff', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'D55': {
'phid': 'PHID-DREV-hqztsdcva3jkucu4mmv2',
'uri': 'http://example.com/D55',
'typeName': 'Differential Revision',
'type': 'DREV',
'name': 'D55',
'fullName': 'D55: some diff',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about D55 or something', ->
hubot 'whatever about D55 or something'
it 'gives information about the open Diff, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/D55 - some diff'
context 'whatever about http://example.com/D55 or something', ->
hubot 'whatever about http://example.com/D55 or something'
it 'gives information about the open Diff, without uri', ->
expect(hubotResponse()).to.eql 'D55: some diff'
context 'when it is a closed diff', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'D55': {
'phid': 'PHID-DREV-hqztsdcva3jkucu4mmv2',
'uri': 'http://example.com/D55',
'typeName': 'Differential Revision',
'type': 'DREV',
'name': 'D55',
'fullName': 'D55: some diff',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about D55 or something', ->
hubot 'whatever about D55 or something'
it 'gives information about the closed Diff, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/D55 - some diff (closed)'
context 'whatever about http://example.com/D55 or something', ->
hubot 'whatever about http://example.com/D55 or something'
it 'gives information about the closed Diff, without uri', ->
expect(hubotResponse()).to.eql 'D55: some diff (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a commit', ->
context 'when the commit is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about rP156f7196453c or something', ->
hubot 'whatever about rP156f7196453c or something'
it "warns the user that this commit doesn't exist", ->
expect(hubotResponse()).to.eql 'oops rP156f7196453c was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops rTULIP156f7196453c http error 404'
context 'when it is an existing commit without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'rTULIP156f7196453c': {
'phid': 'PHID-CMIT-7dpynrtygtd7z3bv7f64',
'uri': 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b',
'typeName': 'Diffusion Commit',
'type': 'CMIT',
'name': 'rTULIP156f7196453c',
'fullName': 'rTULIP156f7196453c: (stable) Promote 2016 Week 28',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it 'gives information about the Commit, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b - ' +
'(stable) Promote 2016 Week 28'
context 'whatever about http://example.com/rTULIP156f7196453c or something', ->
hubot 'whatever about http://example.com/rTULIP156f7196453c or something'
it 'gives information about the Commit, without uri', ->
expect(hubotResponse()).to.eql 'rTULIP156f7196453c: (stable) Promote 2016 Week 28'
context 'when it is an existing commit with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'rTULIP156f7196453c': {
'phid': 'PHID-CMIT-7dpynrtygtd7z3bv7f64',
'uri': 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b',
'typeName': 'Diffusion Commit',
'type': 'CMIT',
'name': 'rTULIP156f7196453c',
'fullName': 'rTULIP156f7196453c: (stable) Promote 2016 Week 28',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it 'gives information about the Commit, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b - ' +
'(stable) Promote 2016 Week 28 (closed)'
context 'whatever about http://example.com/rTULIP156f7196453c or something', ->
hubot 'whatever about http://example.com/rTULIP156f7196453c or something'
it 'gives information about the Commit, without uri', ->
expect(hubotResponse())
.to.eql 'rTULIP156f7196453c: (stable) Promote 2016 Week 28 (closed)'
| 113131 | require('es6-promise').polyfill()
Helper = require('hubot-test-helper')
Hubot = require('../node_modules/hubot')
# helper loads a specific script if it's a file
helper = new Helper('../scripts/phabs_hear.coffee')
nock = require('nock')
sinon = require('sinon')
expect = require('chai').use(require('sinon-chai')).expect
moment = require 'moment'
room = null
describe 'phabs_hear module', ->
hubotHear = (message, userName = 'momo', tempo = 40) ->
beforeEach (done) ->
room.user.say userName, message
setTimeout (done), tempo
hubot = (message, userName = 'momo') ->
hubotHear "@hubot #{message}", userName
hubotResponse = (i = 1) ->
room.messages[i]?[1]
hubotResponseCount = ->
room.messages.length
beforeEach ->
process.env.PHABRICATOR_URL = 'http://example.com'
process.env.PHABRICATOR_API_KEY = 'xxx'
process.env.PHABRICATOR_BOT_PHID = 'PHID-USER-xxx'
process.env.PHABRICATOR_PROJECTS = 'PHID-PROJ-xxx:proj1,PHID-PCOL-yyy:proj2'
room = helper.createRoom { httpd: false }
room.robot.brain.userForId 'user', {
name: 'user'
}
room.robot.brain.userForId 'user_with_email', {
name: 'user_with_email',
email_address: '<EMAIL>'
}
room.robot.brain.userForId 'user_with_phid', {
name: 'user_with_phid',
phid: 'PHID-USER-123456789'
}
room.receive = (userName, message) ->
new Promise (resolve) =>
@messages.push [userName, message]
user = room.robot.brain.userForId userName
@robot.receive(new Hubot.TextMessage(user, message), resolve)
afterEach ->
delete process.env.PHABRICATOR_URL
delete process.env.PHABRICATOR_API_KEY
delete process.env.PHABRICATOR_BOT_PHID
delete process.env.PHABRICATOR_PROJECTS
# ---------------------------------------------------------------------------------
context 'someone talks about a task that is blacklisted', ->
beforeEach ->
do nock.disableNetConnect
room.robot.brain.data.phabricator.blacklist = [ 'T42', 'V3' ]
afterEach ->
nock.cleanAll()
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a type of item that is disabled per configuration', ->
beforeEach ->
do nock.disableNetConnect
process.env.PHABRICATOR_ENABLED_ITEMS = 'P,r'
room = helper.createRoom { httpd: false }
afterEach ->
nock.cleanAll()
delete process.env.PHABRICATOR_ENABLED_ITEMS
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a type of item but the hear is totaly disabled', ->
beforeEach ->
do nock.disableNetConnect
process.env.PHABRICATOR_ENABLED_ITEMS = ''
room = helper.createRoom { httpd: false }
afterEach ->
nock.cleanAll()
delete process.env.PHABRICATOR_ENABLED_ITEMS
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a task', ->
context 'when the task is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { error_info: 'No such Maniphest task exists.' })
afterEach ->
nock.cleanAll()
context 'whatever about T424242 or something', ->
hubot 'whatever about T424242 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'oops T424242 No such Maniphest task exists.'
context 'when it is an open task', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { result: {
status: 'open',
isClosed: false,
title: 'some task',
priority: 'Low',
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'http://example.com/T42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'http://example.com/T42 - some task (Low, open 2 months ago)'
context 'whatever about http://example.com/T42 or something', ->
hubot 'whatever about http://example.com/T42 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'T42 - some task (Low, open 2 months ago)'
context 'when it is a closed task', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { result: {
status: 'resolved',
isClosed: true,
title: 'some task',
dateModified: moment().subtract(2, 'months').unix(),
priority: 'Low',
uri: 'http://example.com/T42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'gives information about the Task, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/T42 - some task (Low, resolved 2 months ago)'
context 'whatever about http://example.com/T42 or something', ->
hubot 'whatever about http://example.com/T42 or something'
it 'gives information about the Task, without uri', ->
expect(hubotResponse()).to.eql 'T42 - some task (Low, resolved 2 months ago)'
# ---------------------------------------------------------------------------------
context 'someone talks about a file', ->
context 'when the file is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/file.info')
.reply(200, { error_info: 'No such file exists.' })
afterEach ->
nock.cleanAll()
context 'whatever about F424242 or something', ->
hubot 'whatever about F424242 or something'
it "warns the user that this File doesn't exist", ->
expect(hubotResponse()).to.eql 'oops F424242 No such file exists.'
context 'when it is an existing file', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/file.info')
.reply(200, { result: {
name: 'image.png',
mimeType: 'image/png',
byteSize: '1409',
uri: 'https://example.com/F42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about F42 or something', ->
hubot 'whatever about F42 or something'
it 'gives information about the File, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/F42 - image.png (image/png 1.38 kB)'
context 'whatever about http://example.com/F42 or something', ->
hubot 'whatever about http://example.com/F42 or something'
it 'gives information about the File, without uri', ->
expect(hubotResponse()).to.eql 'F42 - image.png (image/png 1.38 kB)'
# ---------------------------------------------------------------------------------
context 'someone talks about a paste', ->
context 'when the Paste is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about P424242 or something', ->
hubot 'whatever about P424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops P424242 was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about P424242 or something', ->
hubot 'whatever about P424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops P424242 http error 404'
context 'when it is an existing Paste without a language set', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: {
'PHID-PSTE-hdxawtm6psdtsxy3nyzk': {
title: 'file.coffee',
language: null,
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'https://example.com/P42'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about P42 or something', ->
hubot 'whatever about P42 or something'
it 'gives information about the Paste, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/P42 - file.coffee (created 2 months ago)'
context 'whatever about http://example.com/P42 or something', ->
hubot 'whatever about http://example.com/P42 or something'
it 'gives information about the Paste, without uri', ->
expect(hubotResponse()).to.eql 'P42 - file.coffee (created 2 months ago)'
context 'when it is an existing Paste with a language set', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: {
'PHID-PSTE-hdxawtm6psdtsxy3nyzk': {
title: 'file.coffee',
language: 'coffee',
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'https://example.com/P42'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about P42 or something', ->
hubot 'whatever about P42 or something'
it 'gives information about the Paste, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/P42 - file.coffee (coffee, created 2 months ago)'
context 'whatever about http://example.com/P42 or something', ->
hubot 'whatever about http://example.com/P42 or something'
it 'gives information about the Paste, without uri', ->
expect(hubotResponse()).to.eql 'P42 - file.coffee (coffee, created 2 months ago)'
# ---------------------------------------------------------------------------------
context 'someone talks about a mock', ->
context 'when the mock is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about M424242 or something', ->
hubot 'whatever about M424242 or something'
it "warns the user that this Mock doesn't exist", ->
expect(hubotResponse()).to.eql 'oops M424242 was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about M424242 or something', ->
hubot 'whatever about M424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops M424242 http error 404'
context 'when it is an existing Mock without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'M42': {
'phid': 'PHID-MOCK-6g6p65ez5ctxudji5twy',
'uri': 'https://example.com/M42',
'typeName': 'Pholio Mock',
'type': 'MOCK',
'name': 'M42',
'fullName': 'M42: Test Mock',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about M42 or something', ->
hubot 'whatever about M42 or something'
it 'gives information about the mock, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/M42 - Test Mock'
context 'whatever about http://example.com/M42 or something', ->
hubot 'whatever about http://example.com/M42 or something'
it 'gives information about the mock, without uri', ->
expect(hubotResponse()).to.eql 'M42: Test Mock'
context 'when it is an existing Mock with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'M42': {
'phid': 'PHID-MOCK-6g6p65ez5ctxudji5twy',
'uri': 'https://example.com/M42',
'typeName': 'Pholio Mock',
'type': 'MOCK',
'name': 'M42',
'fullName': 'M42: Test Mock',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about M42 or something', ->
hubot 'whatever about M42 or something'
it 'gives information about the mock, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/M42 - Test Mock (closed)'
context 'whatever about http://example.com/M42 or something', ->
hubot 'whatever about http://example.com/M42 or something'
it 'gives information about the mock, without uri', ->
expect(hubotResponse()).to.eql 'M42: Test Mock (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a build', ->
context 'when the build is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about B424242 or something', ->
hubot 'whatever about B424242 or something'
it "warns the user that this build doesn't exist", ->
expect(hubotResponse()).to.eql 'oops B424242 was not found.'
context 'when it is an existing build without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'B12999': {
'phid': 'PHID-HMBB-zeg6ru5vnd4fbp744s5f',
'uri': 'https://example.com/B12999',
'typeName': 'Buildable',
'type': 'HMBB',
'name': 'B12999',
'fullName': 'B12999: rP46ceba728fee: (stable) Fix an issue',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about B12999 or something', ->
hubot 'whatever about B12999 or something'
it 'gives information about the build, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/B12999 - rP46ceba728fee: (stable) Fix an issue'
context 'whatever about http://example.com/B12999 or something', ->
hubot 'whatever about http://example.com/B12999 or something'
it 'gives information about the build, without uri', ->
expect(hubotResponse()).to.eql 'B12999: rP46ceba728fee: (stable) Fix an issue'
context 'when it is an existing build with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'B12999': {
'phid': 'PHID-HMBB-zeg6ru5vnd4fbp744s5f',
'uri': 'https://example.com/B12999',
'typeName': 'Buildable',
'type': 'HMBB',
'name': 'B12999',
'fullName': 'B12999: rP46ceba728fee: (stable) Fix an issue',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about B12999 or something', ->
hubot 'whatever about B12999 or something'
it 'gives information about the build, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/B12999 - rP46ceba728fee: (stable) Fix an issue (closed)'
context 'whatever about http://example.com/B12999 or something', ->
hubot 'whatever about http://example.com/B12999 or something'
it 'gives information about the build, without uri', ->
expect(hubotResponse()).to.eql 'B12999: rP46ceba728fee: (stable) Fix an issue (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a question', ->
context 'when the question is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about Q424242 or something', ->
hubot 'whatever about Q424242 or something'
it "warns the user that this question doesn't exist", ->
expect(hubotResponse()).to.eql 'oops Q424242 was not found.'
context 'when it is an existing question without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'Q434': {
'phid': 'PHID-QUES-j22mqmbhb3mbcd2it7zs',
'uri': 'https://example.com/Q434',
'typeName': 'Ponder Question',
'type': 'QUES',
'name': 'Q434',
'fullName': 'Q434: Width in wiki pages',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about Q434 or something', ->
hubot 'whatever about Q434 or something'
it 'gives information about the question, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/Q434 - Width in wiki pages'
context 'whatever about http://example.com/Q434 or something', ->
hubot 'whatever about http://example.com/Q434 or something'
it 'gives information about the question, without uri', ->
expect(hubotResponse()).to.eql 'Q434: Width in wiki pages'
context 'when it is an existing question with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'Q434': {
'phid': 'PHID-QUES-j22mqmbhb3mbcd2it7zs',
'uri': 'https://example.com/Q434',
'typeName': 'Ponder Question',
'type': 'QUES',
'name': 'Q434',
'fullName': 'Q434: Width in wiki pages',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about Q434 or something', ->
hubot 'whatever about Q434 or something'
it 'gives information about the question, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/Q434 - Width in wiki pages (closed)'
context 'whatever about http://example.com/Q434 or something', ->
hubot 'whatever about http://example.com/Q434 or something'
it 'gives information about the question, without uri', ->
expect(hubotResponse()).to.eql 'Q434: Width in wiki pages (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a legalpad', ->
context 'when the legalpad is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about L424242 or something', ->
hubot 'whatever about L424242 or something'
it "warns the user that this legalpad doesn't exist", ->
expect(hubotResponse()).to.eql 'oops L424242 was not found.'
context 'when it is an existing legalpad without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'L38': {
'phid': 'PHID-LEGD-chmhkotszvqaucdrvh5t',
'uri': 'https://example.com/L38',
'typeName': 'Legalpad Document',
'type': 'LEGD',
'name': 'L38 Test',
'fullName': 'L38 Test',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about L38 or something', ->
hubot 'whatever about L38 or something'
it 'gives information about the legalpad, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/L38 - Test'
context 'whatever about http://example.com/L38 or something', ->
hubot 'whatever about http://example.com/L38 or something'
it 'gives information about the legalpad, without uri', ->
expect(hubotResponse()).to.eql 'L38 Test'
context 'when it is an existing legalpad with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'L38': {
'phid': 'PHID-LEGD-chmhkotszvqaucdrvh5t',
'uri': 'https://example.com/L38',
'typeName': 'Legalpad Document',
'type': 'LEGD',
'name': 'L38 Test',
'fullName': 'L38 Test',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about L38 or something', ->
hubot 'whatever about L38 or something'
it 'gives information about the legalpad, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/L38 - Test (closed)'
context 'whatever about http://example.com/L38 or something', ->
hubot 'whatever about http://example.com/L38 or something'
it 'gives information about the legalpad, without uri', ->
expect(hubotResponse()).to.eql 'L38 Test (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a vote', ->
context 'when the vote is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about V424242 or something', ->
hubot 'whatever about V424242 or something'
it "warns the user that this vote doesn't exist", ->
expect(hubotResponse()).to.eql 'oops V424242 was not found.'
context 'when it is an existing vote without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'V30': {
'phid': 'PHID-POLL-hqztsdcva3jkucu4mmv2',
'uri': 'https://example.com/V30',
'typeName': 'Slowvote Poll',
'type': 'POLL',
'name': 'V30',
'fullName': 'V30: This is a poll',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about V30 or something', ->
hubot 'whatever about V30 or something'
it 'gives information about the vote, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/V30 - This is a poll'
context 'whatever about http://example.com/V30 or something', ->
hubot 'whatever about http://example.com/V30 or something'
it 'gives information about the vote, without uri', ->
expect(hubotResponse()).to.eql 'V30: This is a poll'
context 'when it is an existing vote with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'V30': {
'phid': 'PHID-POLL-hqztsdcva3jkucu4mmv2',
'uri': 'https://example.com/V30',
'typeName': 'Slowvote Poll',
'type': 'POLL',
'name': 'V30',
'fullName': 'V30: This is a poll',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about V30 or something', ->
hubot 'whatever about V30 or something'
it 'gives information about the vote, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/V30 - This is a poll (closed)'
context 'whatever about http://example.com/V30 or something', ->
hubot 'whatever about http://example.com/V30 or something'
it 'gives information about the vote, without uri', ->
expect(hubotResponse()).to.eql 'V30: This is a poll (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a diff', ->
context 'when the diff is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about D555555 or something', ->
hubot 'whatever about D555555 or something'
it "warns the user that this Diff doesn't exist", ->
expect(hubotResponse()).to.eql 'oops D555555 was not found.'
context 'when it is an open diff', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'D55': {
'phid': 'PHID-DREV-hqztsdcva3jkucu4mmv2',
'uri': 'http://example.com/D55',
'typeName': 'Differential Revision',
'type': 'DREV',
'name': 'D55',
'fullName': 'D55: some diff',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about D55 or something', ->
hubot 'whatever about D55 or something'
it 'gives information about the open Diff, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/D55 - some diff'
context 'whatever about http://example.com/D55 or something', ->
hubot 'whatever about http://example.com/D55 or something'
it 'gives information about the open Diff, without uri', ->
expect(hubotResponse()).to.eql 'D55: some diff'
context 'when it is a closed diff', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'D55': {
'phid': 'PHID-DREV-hqztsdcva3jkucu4mmv2',
'uri': 'http://example.com/D55',
'typeName': 'Differential Revision',
'type': 'DREV',
'name': 'D55',
'fullName': 'D55: some diff',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about D55 or something', ->
hubot 'whatever about D55 or something'
it 'gives information about the closed Diff, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/D55 - some diff (closed)'
context 'whatever about http://example.com/D55 or something', ->
hubot 'whatever about http://example.com/D55 or something'
it 'gives information about the closed Diff, without uri', ->
expect(hubotResponse()).to.eql 'D55: some diff (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a commit', ->
context 'when the commit is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about rP156f7196453c or something', ->
hubot 'whatever about rP156f7196453c or something'
it "warns the user that this commit doesn't exist", ->
expect(hubotResponse()).to.eql 'oops rP156f7196453c was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops rTULIP156f7196453c http error 404'
context 'when it is an existing commit without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'rTULIP156f7196453c': {
'phid': 'PHID-CMIT-7dpynrtygtd7z3bv7f64',
'uri': 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b',
'typeName': 'Diffusion Commit',
'type': 'CMIT',
'name': 'rTULIP156f7196453c',
'fullName': 'rTULIP156f7196453c: (stable) Promote 2016 Week 28',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it 'gives information about the Commit, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b - ' +
'(stable) Promote 2016 Week 28'
context 'whatever about http://example.com/rTULIP156f7196453c or something', ->
hubot 'whatever about http://example.com/rTULIP156f7196453c or something'
it 'gives information about the Commit, without uri', ->
expect(hubotResponse()).to.eql 'rTULIP156f7196453c: (stable) Promote 2016 Week 28'
context 'when it is an existing commit with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'rTULIP156f7196453c': {
'phid': 'PHID-CMIT-7dpynrtygtd7z3bv7f64',
'uri': 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b',
'typeName': 'Diffusion Commit',
'type': 'CMIT',
'name': 'rTULIP156f7196453c',
'fullName': 'rTULIP156f7196453c: (stable) Promote 2016 Week 28',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it 'gives information about the Commit, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b - ' +
'(stable) Promote 2016 Week 28 (closed)'
context 'whatever about http://example.com/rTULIP156f7196453c or something', ->
hubot 'whatever about http://example.com/rTULIP156f7196453c or something'
it 'gives information about the Commit, without uri', ->
expect(hubotResponse())
.to.eql 'rTULIP156f7196453c: (stable) Promote 2016 Week 28 (closed)'
| true | require('es6-promise').polyfill()
Helper = require('hubot-test-helper')
Hubot = require('../node_modules/hubot')
# helper loads a specific script if it's a file
helper = new Helper('../scripts/phabs_hear.coffee')
nock = require('nock')
sinon = require('sinon')
expect = require('chai').use(require('sinon-chai')).expect
moment = require 'moment'
room = null
describe 'phabs_hear module', ->
hubotHear = (message, userName = 'momo', tempo = 40) ->
beforeEach (done) ->
room.user.say userName, message
setTimeout (done), tempo
hubot = (message, userName = 'momo') ->
hubotHear "@hubot #{message}", userName
hubotResponse = (i = 1) ->
room.messages[i]?[1]
hubotResponseCount = ->
room.messages.length
beforeEach ->
process.env.PHABRICATOR_URL = 'http://example.com'
process.env.PHABRICATOR_API_KEY = 'xxx'
process.env.PHABRICATOR_BOT_PHID = 'PHID-USER-xxx'
process.env.PHABRICATOR_PROJECTS = 'PHID-PROJ-xxx:proj1,PHID-PCOL-yyy:proj2'
room = helper.createRoom { httpd: false }
room.robot.brain.userForId 'user', {
name: 'user'
}
room.robot.brain.userForId 'user_with_email', {
name: 'user_with_email',
email_address: 'PI:EMAIL:<EMAIL>END_PI'
}
room.robot.brain.userForId 'user_with_phid', {
name: 'user_with_phid',
phid: 'PHID-USER-123456789'
}
room.receive = (userName, message) ->
new Promise (resolve) =>
@messages.push [userName, message]
user = room.robot.brain.userForId userName
@robot.receive(new Hubot.TextMessage(user, message), resolve)
afterEach ->
delete process.env.PHABRICATOR_URL
delete process.env.PHABRICATOR_API_KEY
delete process.env.PHABRICATOR_BOT_PHID
delete process.env.PHABRICATOR_PROJECTS
# ---------------------------------------------------------------------------------
context 'someone talks about a task that is blacklisted', ->
beforeEach ->
do nock.disableNetConnect
room.robot.brain.data.phabricator.blacklist = [ 'T42', 'V3' ]
afterEach ->
nock.cleanAll()
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a type of item that is disabled per configuration', ->
beforeEach ->
do nock.disableNetConnect
process.env.PHABRICATOR_ENABLED_ITEMS = 'P,r'
room = helper.createRoom { httpd: false }
afterEach ->
nock.cleanAll()
delete process.env.PHABRICATOR_ENABLED_ITEMS
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a type of item but the hear is totaly disabled', ->
beforeEach ->
do nock.disableNetConnect
process.env.PHABRICATOR_ENABLED_ITEMS = ''
room = helper.createRoom { httpd: false }
afterEach ->
nock.cleanAll()
delete process.env.PHABRICATOR_ENABLED_ITEMS
room.robot.brain.data.phabricator.blacklist = [ ]
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'does not say anything', ->
expect(hubotResponseCount()).to.eql 1
expect(hubotResponse()).to.be.undefined
# ---------------------------------------------------------------------------------
context 'someone talks about a task', ->
context 'when the task is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { error_info: 'No such Maniphest task exists.' })
afterEach ->
nock.cleanAll()
context 'whatever about T424242 or something', ->
hubot 'whatever about T424242 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'oops T424242 No such Maniphest task exists.'
context 'when it is an open task', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { result: {
status: 'open',
isClosed: false,
title: 'some task',
priority: 'Low',
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'http://example.com/T42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'http://example.com/T42 - some task (Low, open 2 months ago)'
context 'whatever about http://example.com/T42 or something', ->
hubot 'whatever about http://example.com/T42 or something'
it "warns the user that this Task doesn't exist", ->
expect(hubotResponse()).to.eql 'T42 - some task (Low, open 2 months ago)'
context 'when it is a closed task', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/maniphest.info')
.reply(200, { result: {
status: 'resolved',
isClosed: true,
title: 'some task',
dateModified: moment().subtract(2, 'months').unix(),
priority: 'Low',
uri: 'http://example.com/T42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about T42 or something', ->
hubot 'whatever about T42 or something'
it 'gives information about the Task, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/T42 - some task (Low, resolved 2 months ago)'
context 'whatever about http://example.com/T42 or something', ->
hubot 'whatever about http://example.com/T42 or something'
it 'gives information about the Task, without uri', ->
expect(hubotResponse()).to.eql 'T42 - some task (Low, resolved 2 months ago)'
# ---------------------------------------------------------------------------------
context 'someone talks about a file', ->
context 'when the file is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/file.info')
.reply(200, { error_info: 'No such file exists.' })
afterEach ->
nock.cleanAll()
context 'whatever about F424242 or something', ->
hubot 'whatever about F424242 or something'
it "warns the user that this File doesn't exist", ->
expect(hubotResponse()).to.eql 'oops F424242 No such file exists.'
context 'when it is an existing file', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/file.info')
.reply(200, { result: {
name: 'image.png',
mimeType: 'image/png',
byteSize: '1409',
uri: 'https://example.com/F42'
} })
afterEach ->
nock.cleanAll()
context 'whatever about F42 or something', ->
hubot 'whatever about F42 or something'
it 'gives information about the File, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/F42 - image.png (image/png 1.38 kB)'
context 'whatever about http://example.com/F42 or something', ->
hubot 'whatever about http://example.com/F42 or something'
it 'gives information about the File, without uri', ->
expect(hubotResponse()).to.eql 'F42 - image.png (image/png 1.38 kB)'
# ---------------------------------------------------------------------------------
context 'someone talks about a paste', ->
context 'when the Paste is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about P424242 or something', ->
hubot 'whatever about P424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops P424242 was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about P424242 or something', ->
hubot 'whatever about P424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops P424242 http error 404'
context 'when it is an existing Paste without a language set', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: {
'PHID-PSTE-hdxawtm6psdtsxy3nyzk': {
title: 'file.coffee',
language: null,
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'https://example.com/P42'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about P42 or something', ->
hubot 'whatever about P42 or something'
it 'gives information about the Paste, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/P42 - file.coffee (created 2 months ago)'
context 'whatever about http://example.com/P42 or something', ->
hubot 'whatever about http://example.com/P42 or something'
it 'gives information about the Paste, without uri', ->
expect(hubotResponse()).to.eql 'P42 - file.coffee (created 2 months ago)'
context 'when it is an existing Paste with a language set', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/paste.query')
.reply(200, { result: {
'PHID-PSTE-hdxawtm6psdtsxy3nyzk': {
title: 'file.coffee',
language: 'coffee',
dateCreated: moment().subtract(2, 'months').unix(),
uri: 'https://example.com/P42'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about P42 or something', ->
hubot 'whatever about P42 or something'
it 'gives information about the Paste, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/P42 - file.coffee (coffee, created 2 months ago)'
context 'whatever about http://example.com/P42 or something', ->
hubot 'whatever about http://example.com/P42 or something'
it 'gives information about the Paste, without uri', ->
expect(hubotResponse()).to.eql 'P42 - file.coffee (coffee, created 2 months ago)'
# ---------------------------------------------------------------------------------
context 'someone talks about a mock', ->
context 'when the mock is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about M424242 or something', ->
hubot 'whatever about M424242 or something'
it "warns the user that this Mock doesn't exist", ->
expect(hubotResponse()).to.eql 'oops M424242 was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about M424242 or something', ->
hubot 'whatever about M424242 or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops M424242 http error 404'
context 'when it is an existing Mock without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'M42': {
'phid': 'PHID-MOCK-6g6p65ez5ctxudji5twy',
'uri': 'https://example.com/M42',
'typeName': 'Pholio Mock',
'type': 'MOCK',
'name': 'M42',
'fullName': 'M42: Test Mock',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about M42 or something', ->
hubot 'whatever about M42 or something'
it 'gives information about the mock, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/M42 - Test Mock'
context 'whatever about http://example.com/M42 or something', ->
hubot 'whatever about http://example.com/M42 or something'
it 'gives information about the mock, without uri', ->
expect(hubotResponse()).to.eql 'M42: Test Mock'
context 'when it is an existing Mock with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'M42': {
'phid': 'PHID-MOCK-6g6p65ez5ctxudji5twy',
'uri': 'https://example.com/M42',
'typeName': 'Pholio Mock',
'type': 'MOCK',
'name': 'M42',
'fullName': 'M42: Test Mock',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about M42 or something', ->
hubot 'whatever about M42 or something'
it 'gives information about the mock, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/M42 - Test Mock (closed)'
context 'whatever about http://example.com/M42 or something', ->
hubot 'whatever about http://example.com/M42 or something'
it 'gives information about the mock, without uri', ->
expect(hubotResponse()).to.eql 'M42: Test Mock (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a build', ->
context 'when the build is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about B424242 or something', ->
hubot 'whatever about B424242 or something'
it "warns the user that this build doesn't exist", ->
expect(hubotResponse()).to.eql 'oops B424242 was not found.'
context 'when it is an existing build without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'B12999': {
'phid': 'PHID-HMBB-zeg6ru5vnd4fbp744s5f',
'uri': 'https://example.com/B12999',
'typeName': 'Buildable',
'type': 'HMBB',
'name': 'B12999',
'fullName': 'B12999: rP46ceba728fee: (stable) Fix an issue',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about B12999 or something', ->
hubot 'whatever about B12999 or something'
it 'gives information about the build, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/B12999 - rP46ceba728fee: (stable) Fix an issue'
context 'whatever about http://example.com/B12999 or something', ->
hubot 'whatever about http://example.com/B12999 or something'
it 'gives information about the build, without uri', ->
expect(hubotResponse()).to.eql 'B12999: rP46ceba728fee: (stable) Fix an issue'
context 'when it is an existing build with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'B12999': {
'phid': 'PHID-HMBB-zeg6ru5vnd4fbp744s5f',
'uri': 'https://example.com/B12999',
'typeName': 'Buildable',
'type': 'HMBB',
'name': 'B12999',
'fullName': 'B12999: rP46ceba728fee: (stable) Fix an issue',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about B12999 or something', ->
hubot 'whatever about B12999 or something'
it 'gives information about the build, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/B12999 - rP46ceba728fee: (stable) Fix an issue (closed)'
context 'whatever about http://example.com/B12999 or something', ->
hubot 'whatever about http://example.com/B12999 or something'
it 'gives information about the build, without uri', ->
expect(hubotResponse()).to.eql 'B12999: rP46ceba728fee: (stable) Fix an issue (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a question', ->
context 'when the question is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about Q424242 or something', ->
hubot 'whatever about Q424242 or something'
it "warns the user that this question doesn't exist", ->
expect(hubotResponse()).to.eql 'oops Q424242 was not found.'
context 'when it is an existing question without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'Q434': {
'phid': 'PHID-QUES-j22mqmbhb3mbcd2it7zs',
'uri': 'https://example.com/Q434',
'typeName': 'Ponder Question',
'type': 'QUES',
'name': 'Q434',
'fullName': 'Q434: Width in wiki pages',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about Q434 or something', ->
hubot 'whatever about Q434 or something'
it 'gives information about the question, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/Q434 - Width in wiki pages'
context 'whatever about http://example.com/Q434 or something', ->
hubot 'whatever about http://example.com/Q434 or something'
it 'gives information about the question, without uri', ->
expect(hubotResponse()).to.eql 'Q434: Width in wiki pages'
context 'when it is an existing question with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'Q434': {
'phid': 'PHID-QUES-j22mqmbhb3mbcd2it7zs',
'uri': 'https://example.com/Q434',
'typeName': 'Ponder Question',
'type': 'QUES',
'name': 'Q434',
'fullName': 'Q434: Width in wiki pages',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about Q434 or something', ->
hubot 'whatever about Q434 or something'
it 'gives information about the question, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/Q434 - Width in wiki pages (closed)'
context 'whatever about http://example.com/Q434 or something', ->
hubot 'whatever about http://example.com/Q434 or something'
it 'gives information about the question, without uri', ->
expect(hubotResponse()).to.eql 'Q434: Width in wiki pages (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a legalpad', ->
context 'when the legalpad is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about L424242 or something', ->
hubot 'whatever about L424242 or something'
it "warns the user that this legalpad doesn't exist", ->
expect(hubotResponse()).to.eql 'oops L424242 was not found.'
context 'when it is an existing legalpad without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'L38': {
'phid': 'PHID-LEGD-chmhkotszvqaucdrvh5t',
'uri': 'https://example.com/L38',
'typeName': 'Legalpad Document',
'type': 'LEGD',
'name': 'L38 Test',
'fullName': 'L38 Test',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about L38 or something', ->
hubot 'whatever about L38 or something'
it 'gives information about the legalpad, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/L38 - Test'
context 'whatever about http://example.com/L38 or something', ->
hubot 'whatever about http://example.com/L38 or something'
it 'gives information about the legalpad, without uri', ->
expect(hubotResponse()).to.eql 'L38 Test'
context 'when it is an existing legalpad with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'L38': {
'phid': 'PHID-LEGD-chmhkotszvqaucdrvh5t',
'uri': 'https://example.com/L38',
'typeName': 'Legalpad Document',
'type': 'LEGD',
'name': 'L38 Test',
'fullName': 'L38 Test',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about L38 or something', ->
hubot 'whatever about L38 or something'
it 'gives information about the legalpad, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/L38 - Test (closed)'
context 'whatever about http://example.com/L38 or something', ->
hubot 'whatever about http://example.com/L38 or something'
it 'gives information about the legalpad, without uri', ->
expect(hubotResponse()).to.eql 'L38 Test (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a vote', ->
context 'when the vote is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about V424242 or something', ->
hubot 'whatever about V424242 or something'
it "warns the user that this vote doesn't exist", ->
expect(hubotResponse()).to.eql 'oops V424242 was not found.'
context 'when it is an existing vote without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'V30': {
'phid': 'PHID-POLL-hqztsdcva3jkucu4mmv2',
'uri': 'https://example.com/V30',
'typeName': 'Slowvote Poll',
'type': 'POLL',
'name': 'V30',
'fullName': 'V30: This is a poll',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about V30 or something', ->
hubot 'whatever about V30 or something'
it 'gives information about the vote, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/V30 - This is a poll'
context 'whatever about http://example.com/V30 or something', ->
hubot 'whatever about http://example.com/V30 or something'
it 'gives information about the vote, without uri', ->
expect(hubotResponse()).to.eql 'V30: This is a poll'
context 'when it is an existing vote with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'V30': {
'phid': 'PHID-POLL-hqztsdcva3jkucu4mmv2',
'uri': 'https://example.com/V30',
'typeName': 'Slowvote Poll',
'type': 'POLL',
'name': 'V30',
'fullName': 'V30: This is a poll',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about V30 or something', ->
hubot 'whatever about V30 or something'
it 'gives information about the vote, including uri', ->
expect(hubotResponse()).to.eql 'https://example.com/V30 - This is a poll (closed)'
context 'whatever about http://example.com/V30 or something', ->
hubot 'whatever about http://example.com/V30 or something'
it 'gives information about the vote, without uri', ->
expect(hubotResponse()).to.eql 'V30: This is a poll (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a diff', ->
context 'when the diff is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about D555555 or something', ->
hubot 'whatever about D555555 or something'
it "warns the user that this Diff doesn't exist", ->
expect(hubotResponse()).to.eql 'oops D555555 was not found.'
context 'when it is an open diff', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'D55': {
'phid': 'PHID-DREV-hqztsdcva3jkucu4mmv2',
'uri': 'http://example.com/D55',
'typeName': 'Differential Revision',
'type': 'DREV',
'name': 'D55',
'fullName': 'D55: some diff',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about D55 or something', ->
hubot 'whatever about D55 or something'
it 'gives information about the open Diff, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/D55 - some diff'
context 'whatever about http://example.com/D55 or something', ->
hubot 'whatever about http://example.com/D55 or something'
it 'gives information about the open Diff, without uri', ->
expect(hubotResponse()).to.eql 'D55: some diff'
context 'when it is a closed diff', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'D55': {
'phid': 'PHID-DREV-hqztsdcva3jkucu4mmv2',
'uri': 'http://example.com/D55',
'typeName': 'Differential Revision',
'type': 'DREV',
'name': 'D55',
'fullName': 'D55: some diff',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about D55 or something', ->
hubot 'whatever about D55 or something'
it 'gives information about the closed Diff, including uri', ->
expect(hubotResponse()).to.eql 'http://example.com/D55 - some diff (closed)'
context 'whatever about http://example.com/D55 or something', ->
hubot 'whatever about http://example.com/D55 or something'
it 'gives information about the closed Diff, without uri', ->
expect(hubotResponse()).to.eql 'D55: some diff (closed)'
# ---------------------------------------------------------------------------------
context 'someone talks about a commit', ->
context 'when the commit is unknown', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: { } })
afterEach ->
nock.cleanAll()
context 'whatever about rP156f7196453c or something', ->
hubot 'whatever about rP156f7196453c or something'
it "warns the user that this commit doesn't exist", ->
expect(hubotResponse()).to.eql 'oops rP156f7196453c was not found.'
context 'when the request returns an error', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(404, { message: 'not found' })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it "warns the user that this Paste doesn't exist", ->
expect(hubotResponse()).to.eql 'oops rTULIP156f7196453c http error 404'
context 'when it is an existing commit without a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'rTULIP156f7196453c': {
'phid': 'PHID-CMIT-7dpynrtygtd7z3bv7f64',
'uri': 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b',
'typeName': 'Diffusion Commit',
'type': 'CMIT',
'name': 'rTULIP156f7196453c',
'fullName': 'rTULIP156f7196453c: (stable) Promote 2016 Week 28',
'status': 'open'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it 'gives information about the Commit, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b - ' +
'(stable) Promote 2016 Week 28'
context 'whatever about http://example.com/rTULIP156f7196453c or something', ->
hubot 'whatever about http://example.com/rTULIP156f7196453c or something'
it 'gives information about the Commit, without uri', ->
expect(hubotResponse()).to.eql 'rTULIP156f7196453c: (stable) Promote 2016 Week 28'
context 'when it is an existing commit with a status closed', ->
beforeEach ->
do nock.disableNetConnect
nock(process.env.PHABRICATOR_URL)
.get('/api/phid.lookup')
.reply(200, { result: {
'rTULIP156f7196453c': {
'phid': 'PHID-CMIT-7dpynrtygtd7z3bv7f64',
'uri': 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b',
'typeName': 'Diffusion Commit',
'type': 'CMIT',
'name': 'rTULIP156f7196453c',
'fullName': 'rTULIP156f7196453c: (stable) Promote 2016 Week 28',
'status': 'closed'
}
} })
afterEach ->
nock.cleanAll()
context 'whatever about rTULIP156f7196453c or something', ->
hubot 'whatever about rTULIP156f7196453c or something'
it 'gives information about the Commit, including uri', ->
expect(hubotResponse())
.to.eql 'https://example.com/rP156f7196453c6612ee90f97e41bb9389e5d6ec0b - ' +
'(stable) Promote 2016 Week 28 (closed)'
context 'whatever about http://example.com/rTULIP156f7196453c or something', ->
hubot 'whatever about http://example.com/rTULIP156f7196453c or something'
it 'gives information about the Commit, without uri', ->
expect(hubotResponse())
.to.eql 'rTULIP156f7196453c: (stable) Promote 2016 Week 28 (closed)'
|
[
{
"context": "ileoverview Tests for react-in-jsx-scope\n# @author Glen Mailer\n###\n\n'use strict'\n\n# ----------------------------",
"end": 71,
"score": 0.999714732170105,
"start": 60,
"tag": "NAME",
"value": "Glen Mailer"
}
] | src/tests/rules/react-in-jsx-scope.coffee | helixbass/eslint-plugin-known-imports | 4 | ###*
# @fileoverview Tests for react-in-jsx-scope
# @author Glen Mailer
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require '../../rules/react-in-jsx-scope'
{RuleTester} = require 'eslint'
parserOptions =
ecmaVersion: 2018
sourceType: 'module'
ecmaFeatures:
jsx: yes
settings =
react:
pragma: 'Foo'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester {parserOptions}
ruleTester.run 'react-in-jsx-scope', rule,
valid: [
'var React, App; <App />;'
'var React; <img />;'
'var React; <x-gif />;'
'var React, App, a=1; <App attr={a} />;'
'var React, App, a=1; function elem() { return <App attr={a} />; }'
'var React, App; <App />;'
'/** @jsx Foo */ var Foo, App; <App />;'
'/** @jsx Foo.Bar */ var Foo, App; <App />;'
"""
import React from 'react/addons';
const Button = createReactClass({
render() {
return (
<button {...this.props}>{this.props.children}</button>
)
}
});
export default Button;
"""
,
{code: 'var Foo, App; <App />;', settings}
]
invalid: [
code: 'var App, a = <App />;'
output: """
import React from 'react'
var App, a = <App />;
"""
errors: [message: "'React' must be in scope when using JSX"]
,
code: 'var a = <App />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: 'var a = <img />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: '/** @jsx React.DOM */ var a = <img />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: '/** @jsx Foo.bar */ var React, a = <img />;'
errors: [message: "'Foo' must be in scope when using JSX"]
,
code: 'var React, a = <img />;'
errors: [message: "'Foo' must be in scope when using JSX"]
settings: settings
,
code: """
import {Fragment} from 'react'
var a = <Fragment>b</Fragment>
"""
output: """
import React, {Fragment} from 'react'
var a = <Fragment>b</Fragment>
"""
errors: [message: "'React' must be in scope when using JSX"]
]
| 4559 | ###*
# @fileoverview Tests for react-in-jsx-scope
# @author <NAME>
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require '../../rules/react-in-jsx-scope'
{RuleTester} = require 'eslint'
parserOptions =
ecmaVersion: 2018
sourceType: 'module'
ecmaFeatures:
jsx: yes
settings =
react:
pragma: 'Foo'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester {parserOptions}
ruleTester.run 'react-in-jsx-scope', rule,
valid: [
'var React, App; <App />;'
'var React; <img />;'
'var React; <x-gif />;'
'var React, App, a=1; <App attr={a} />;'
'var React, App, a=1; function elem() { return <App attr={a} />; }'
'var React, App; <App />;'
'/** @jsx Foo */ var Foo, App; <App />;'
'/** @jsx Foo.Bar */ var Foo, App; <App />;'
"""
import React from 'react/addons';
const Button = createReactClass({
render() {
return (
<button {...this.props}>{this.props.children}</button>
)
}
});
export default Button;
"""
,
{code: 'var Foo, App; <App />;', settings}
]
invalid: [
code: 'var App, a = <App />;'
output: """
import React from 'react'
var App, a = <App />;
"""
errors: [message: "'React' must be in scope when using JSX"]
,
code: 'var a = <App />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: 'var a = <img />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: '/** @jsx React.DOM */ var a = <img />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: '/** @jsx Foo.bar */ var React, a = <img />;'
errors: [message: "'Foo' must be in scope when using JSX"]
,
code: 'var React, a = <img />;'
errors: [message: "'Foo' must be in scope when using JSX"]
settings: settings
,
code: """
import {Fragment} from 'react'
var a = <Fragment>b</Fragment>
"""
output: """
import React, {Fragment} from 'react'
var a = <Fragment>b</Fragment>
"""
errors: [message: "'React' must be in scope when using JSX"]
]
| true | ###*
# @fileoverview Tests for react-in-jsx-scope
# @author PI:NAME:<NAME>END_PI
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require '../../rules/react-in-jsx-scope'
{RuleTester} = require 'eslint'
parserOptions =
ecmaVersion: 2018
sourceType: 'module'
ecmaFeatures:
jsx: yes
settings =
react:
pragma: 'Foo'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester {parserOptions}
ruleTester.run 'react-in-jsx-scope', rule,
valid: [
'var React, App; <App />;'
'var React; <img />;'
'var React; <x-gif />;'
'var React, App, a=1; <App attr={a} />;'
'var React, App, a=1; function elem() { return <App attr={a} />; }'
'var React, App; <App />;'
'/** @jsx Foo */ var Foo, App; <App />;'
'/** @jsx Foo.Bar */ var Foo, App; <App />;'
"""
import React from 'react/addons';
const Button = createReactClass({
render() {
return (
<button {...this.props}>{this.props.children}</button>
)
}
});
export default Button;
"""
,
{code: 'var Foo, App; <App />;', settings}
]
invalid: [
code: 'var App, a = <App />;'
output: """
import React from 'react'
var App, a = <App />;
"""
errors: [message: "'React' must be in scope when using JSX"]
,
code: 'var a = <App />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: 'var a = <img />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: '/** @jsx React.DOM */ var a = <img />;'
errors: [message: "'React' must be in scope when using JSX"]
,
code: '/** @jsx Foo.bar */ var React, a = <img />;'
errors: [message: "'Foo' must be in scope when using JSX"]
,
code: 'var React, a = <img />;'
errors: [message: "'Foo' must be in scope when using JSX"]
settings: settings
,
code: """
import {Fragment} from 'react'
var a = <Fragment>b</Fragment>
"""
output: """
import React, {Fragment} from 'react'
var a = <Fragment>b</Fragment>
"""
errors: [message: "'React' must be in scope when using JSX"]
]
|
[
{
"context": " has at least 1 keyboard event listener.\n# @author Ethan Cohen\n###\n\n# ------------------------------------------",
"end": 150,
"score": 0.9998871088027954,
"start": 139,
"tag": "NAME",
"value": "Ethan Cohen"
}
] | src/tests/rules/click-events-have-key-events.coffee | danielbayley/eslint-plugin-coffee | 21 | ### eslint-env jest ###
###*
# @fileoverview Enforce a clickable non-interactive element has at least 1 keyboard event listener.
# @author Ethan Cohen
###
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
path = require 'path'
{RuleTester} = require 'eslint'
{
default: parserOptionsMapper
} = require '../eslint-plugin-jsx-a11y-parser-options-mapper'
rule = require 'eslint-plugin-jsx-a11y/lib/rules/click-events-have-key-events'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
errorMessage =
'Visible, non-interactive elements with click handlers must have at least one keyboard listener.'
expectedError =
message: errorMessage
type: 'JSXOpeningElement'
ruleTester.run 'click-events-have-key-events', rule,
valid: [
code: '<div onClick={() => undefined} onKeyDown={foo}/>'
,
code: '<div onClick={() => undefined} onKeyUp={foo} />'
,
code: '<div onClick={() => undefined} onKeyPress={foo}/>'
,
code: '<div onClick={() => undefined} onKeyDown={foo} onKeyUp={bar} />'
,
code: '<div onClick={() => undefined} onKeyDown={foo} {...props} />'
,
code: '<div className="foo" />'
,
code: '<div onClick={() => undefined} aria-hidden />'
,
code: '<div onClick={() => undefined} aria-hidden={true} />'
,
code:
'<div onClick={() => undefined} aria-hidden={false} onKeyDown={foo} />'
,
code:
'<div onClick={() => undefined} onKeyDown={foo} aria-hidden={undefined} />'
,
code: '<input type="text" onClick={() => undefined} />'
,
code: '<input onClick={() => undefined} />'
,
code: '<button onClick={() => undefined} className="foo" />'
,
code: '<option onClick={() => undefined} className="foo" />'
,
code: '<select onClick={() => undefined} className="foo" />'
,
code: '<textarea onClick={() => undefined} className="foo" />'
,
code: '<a onClick={() => undefined} href="http://x.y.z" />'
,
code: '<a onClick={() => undefined} href="http://x.y.z" tabIndex="0" />'
,
code: '<input onClick={() => undefined} type="hidden" />'
,
code: '<div onClick={() => undefined} role="presentation" />'
,
code: '<div onClick={() => undefined} role="none" />'
,
code: '<TestComponent onClick={doFoo} />'
,
code: '<Button onClick={doFoo} />'
].map parserOptionsMapper
invalid: [
code: '<div onClick={() => undefined} />', errors: [expectedError]
,
code: '<div onClick={() => undefined} role={undefined} />'
errors: [expectedError]
,
code: '<div onClick={() => undefined} {...props} />'
errors: [expectedError]
,
code: '<section onClick={() => undefined} />', errors: [expectedError]
,
code: '<main onClick={() => undefined} />', errors: [expectedError]
,
code: '<article onClick={() => undefined} />', errors: [expectedError]
,
code: '<header onClick={() => undefined} />', errors: [expectedError]
,
code: '<footer onClick={() => undefined} />', errors: [expectedError]
,
code: '<div onClick={() => undefined} aria-hidden={false} />'
errors: [expectedError]
,
code: '<a onClick={() => undefined} />', errors: [expectedError]
,
code: '<a tabIndex="0" onClick={() => undefined} />'
errors: [expectedError]
].map parserOptionsMapper
| 219332 | ### eslint-env jest ###
###*
# @fileoverview Enforce a clickable non-interactive element has at least 1 keyboard event listener.
# @author <NAME>
###
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
path = require 'path'
{RuleTester} = require 'eslint'
{
default: parserOptionsMapper
} = require '../eslint-plugin-jsx-a11y-parser-options-mapper'
rule = require 'eslint-plugin-jsx-a11y/lib/rules/click-events-have-key-events'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
errorMessage =
'Visible, non-interactive elements with click handlers must have at least one keyboard listener.'
expectedError =
message: errorMessage
type: 'JSXOpeningElement'
ruleTester.run 'click-events-have-key-events', rule,
valid: [
code: '<div onClick={() => undefined} onKeyDown={foo}/>'
,
code: '<div onClick={() => undefined} onKeyUp={foo} />'
,
code: '<div onClick={() => undefined} onKeyPress={foo}/>'
,
code: '<div onClick={() => undefined} onKeyDown={foo} onKeyUp={bar} />'
,
code: '<div onClick={() => undefined} onKeyDown={foo} {...props} />'
,
code: '<div className="foo" />'
,
code: '<div onClick={() => undefined} aria-hidden />'
,
code: '<div onClick={() => undefined} aria-hidden={true} />'
,
code:
'<div onClick={() => undefined} aria-hidden={false} onKeyDown={foo} />'
,
code:
'<div onClick={() => undefined} onKeyDown={foo} aria-hidden={undefined} />'
,
code: '<input type="text" onClick={() => undefined} />'
,
code: '<input onClick={() => undefined} />'
,
code: '<button onClick={() => undefined} className="foo" />'
,
code: '<option onClick={() => undefined} className="foo" />'
,
code: '<select onClick={() => undefined} className="foo" />'
,
code: '<textarea onClick={() => undefined} className="foo" />'
,
code: '<a onClick={() => undefined} href="http://x.y.z" />'
,
code: '<a onClick={() => undefined} href="http://x.y.z" tabIndex="0" />'
,
code: '<input onClick={() => undefined} type="hidden" />'
,
code: '<div onClick={() => undefined} role="presentation" />'
,
code: '<div onClick={() => undefined} role="none" />'
,
code: '<TestComponent onClick={doFoo} />'
,
code: '<Button onClick={doFoo} />'
].map parserOptionsMapper
invalid: [
code: '<div onClick={() => undefined} />', errors: [expectedError]
,
code: '<div onClick={() => undefined} role={undefined} />'
errors: [expectedError]
,
code: '<div onClick={() => undefined} {...props} />'
errors: [expectedError]
,
code: '<section onClick={() => undefined} />', errors: [expectedError]
,
code: '<main onClick={() => undefined} />', errors: [expectedError]
,
code: '<article onClick={() => undefined} />', errors: [expectedError]
,
code: '<header onClick={() => undefined} />', errors: [expectedError]
,
code: '<footer onClick={() => undefined} />', errors: [expectedError]
,
code: '<div onClick={() => undefined} aria-hidden={false} />'
errors: [expectedError]
,
code: '<a onClick={() => undefined} />', errors: [expectedError]
,
code: '<a tabIndex="0" onClick={() => undefined} />'
errors: [expectedError]
].map parserOptionsMapper
| true | ### eslint-env jest ###
###*
# @fileoverview Enforce a clickable non-interactive element has at least 1 keyboard event listener.
# @author PI:NAME:<NAME>END_PI
###
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
path = require 'path'
{RuleTester} = require 'eslint'
{
default: parserOptionsMapper
} = require '../eslint-plugin-jsx-a11y-parser-options-mapper'
rule = require 'eslint-plugin-jsx-a11y/lib/rules/click-events-have-key-events'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
errorMessage =
'Visible, non-interactive elements with click handlers must have at least one keyboard listener.'
expectedError =
message: errorMessage
type: 'JSXOpeningElement'
ruleTester.run 'click-events-have-key-events', rule,
valid: [
code: '<div onClick={() => undefined} onKeyDown={foo}/>'
,
code: '<div onClick={() => undefined} onKeyUp={foo} />'
,
code: '<div onClick={() => undefined} onKeyPress={foo}/>'
,
code: '<div onClick={() => undefined} onKeyDown={foo} onKeyUp={bar} />'
,
code: '<div onClick={() => undefined} onKeyDown={foo} {...props} />'
,
code: '<div className="foo" />'
,
code: '<div onClick={() => undefined} aria-hidden />'
,
code: '<div onClick={() => undefined} aria-hidden={true} />'
,
code:
'<div onClick={() => undefined} aria-hidden={false} onKeyDown={foo} />'
,
code:
'<div onClick={() => undefined} onKeyDown={foo} aria-hidden={undefined} />'
,
code: '<input type="text" onClick={() => undefined} />'
,
code: '<input onClick={() => undefined} />'
,
code: '<button onClick={() => undefined} className="foo" />'
,
code: '<option onClick={() => undefined} className="foo" />'
,
code: '<select onClick={() => undefined} className="foo" />'
,
code: '<textarea onClick={() => undefined} className="foo" />'
,
code: '<a onClick={() => undefined} href="http://x.y.z" />'
,
code: '<a onClick={() => undefined} href="http://x.y.z" tabIndex="0" />'
,
code: '<input onClick={() => undefined} type="hidden" />'
,
code: '<div onClick={() => undefined} role="presentation" />'
,
code: '<div onClick={() => undefined} role="none" />'
,
code: '<TestComponent onClick={doFoo} />'
,
code: '<Button onClick={doFoo} />'
].map parserOptionsMapper
invalid: [
code: '<div onClick={() => undefined} />', errors: [expectedError]
,
code: '<div onClick={() => undefined} role={undefined} />'
errors: [expectedError]
,
code: '<div onClick={() => undefined} {...props} />'
errors: [expectedError]
,
code: '<section onClick={() => undefined} />', errors: [expectedError]
,
code: '<main onClick={() => undefined} />', errors: [expectedError]
,
code: '<article onClick={() => undefined} />', errors: [expectedError]
,
code: '<header onClick={() => undefined} />', errors: [expectedError]
,
code: '<footer onClick={() => undefined} />', errors: [expectedError]
,
code: '<div onClick={() => undefined} aria-hidden={false} />'
errors: [expectedError]
,
code: '<a onClick={() => undefined} />', errors: [expectedError]
,
code: '<a tabIndex="0" onClick={() => undefined} />'
errors: [expectedError]
].map parserOptionsMapper
|
[
{
"context": "stream, @role, @port, @land ) ->\n @name = 'DriveBar'\n @lastTrip = { name:'' }\n @created = f",
"end": 225,
"score": 0.4192925989627838,
"start": 220,
"tag": "NAME",
"value": "Drive"
},
{
"context": "m, @role, @port, @land ) ->\n @name = 'DriveBar'... | src/augm/tool/DriveBarUC.coffee | axiom6/aug | 0 |
import $ from 'jquery'
import * as d3 from 'd3'
import Util from '../util/Util.js'
class DriveBarUC
# @port [0,0,92,33] @land =[0,0,100,50
constructor:( @stream, @role, @port, @land ) ->
@name = 'DriveBar'
@lastTrip = { name:'' }
@created = false
@screen = null # Set by position() updated by position()
Util.noop( @onScreenTransform )
html:() ->
@htmlId = Util.id(@name,@role) # For createSvg()
"""<div id="#{@htmlId}" class="#{Util.css(@name)}"></div>""" # May or may not need ext for CSS
ready:() ->
@$ = $( @html() )
position:( screen ) ->
# Util.dbg( 'DriveBarUC.position()', @role, screen )
@screen = screen
#@screenOrig = screen
Util.cssPosition( @$, @screen, @port, @land )
[@svg,@$svg,@g,@$g,@gId,@gw,@gh,@y0] = @createSvg( @$, @htmlId, @name, @role, @svgWidth(), @svgHeight(), @barTop() )
@subscribe()
subscribe:() ->
@stream.subscribe( 'Location', 'Deals', (location) => @onLocation( location ) )
@stream.subscribe( 'Screen', 'Deals', (screen) => @onScreen( screen ) )
@stream.subscribe( 'Trip', 'Deals', (trip) => @onTrip( trip ) )
onLocation:( location ) ->
Util.noop( 'DriveBarUC.onLocation()', @role, location )
onTrip:( trip ) =>
if not @created or trip.name isnt @lastTrip.name
@createBars( trip )
else
@updateFills( trip )
@lastTrip = trip
onScreen:( screen ) ->
@screen = screen
Util.cssPosition( @$, @screen, @port, @land )
@svg.attr( "width", @svgWidth() ).attr( 'height', @svgHeight() )
@createBars( @lastTrip )
# Screenlayout changes base on orientation not working
onScreenTransform:( next ) ->
prev = @screen
@screen = next
Util.cssPosition( @$, @screen, @port, @land )
@svg.attr( "width", @svgWidth() ).attr( 'height', @svgHeight() )
xp = 0
yp = 0
xn = 0
yn = 0
[xp,yp] = if prev.orientation is 'Portrait' then [@port[2],@port[3]] else [@land[2],@land[3]]
[xn,yn] = if next.orientation is 'Portrait' then [@port[2],@port[3]] else [@land[2],@land[3]]
xs = next.width * xn / ( prev.width * xp )
ys = next.height * yn / ( prev.height * yp )
@g.attr( 'transform', "scale(#{xs},#{ys})" )
return
# index 2 is width index 3 is height
svgWidth: () -> if @screen.orientation is 'Portrait' then @screen.width * @port[2]/100 else @screen.width * @land[2]/100
svgHeight:() -> if @screen.orientation is 'Portrait' then @screen.height * @port[3]/100 else @screen.height * @land[3]/100
barHeight:() -> @svgHeight() * 0.33
barTop: () -> @svgHeight() * 0.50
# d3 Svg dependency
createSvg:( $, htmlId, name, ext, width, height, barTop ) ->
svgId = Util.svgId( name, ext, 'Svg' )
gId = Util.svgId( name, ext, 'G' )
svg = d3.select('#'+htmlId).append("svg:svg").attr("id",svgId).attr("width",width).attr("height",height)
g = svg.append("svg:g").attr("id",gId) # All tranforms are applied to g
$svg = $.find( '#'+svgId )
$g = $.find( '#'+gId )
[svg,$svg,g,$g,gId,width,height,barTop]
createBars:( trip ) ->
d3.select('#'+@gId).selectAll("*").remove()
@mileBeg = trip.begMile()
@mileEnd = trip.endMile()
@distance = Math.abs( @mileEnd - @mileBeg )
# Util.dbg( 'DriveBarUC.createBars() 1', { mileBeg:@mileBeg, mileEnd:@mileEnd, distance:@distance } )
thick = 1
x = 0
y = @barTop()
w = @svgWidth()
h = @barHeight()
@createTravelTime( trip, @g, x, y, w, h )
@rect( trip, @g, trip.segments[0], @role+'Border', x, y, w, h, 'transparent', 'white', thick*4, '' )
for seg in trip.segments
beg = w * Math.abs( Util.toFloat(seg['StartMileMarker']) - @mileBeg ) / @distance
end = w * Math.abs( Util.toFloat(seg['EndMileMarker']) - @mileBeg ) / @distance
fill = @fillCondition( seg.segId, trip.conditions )
# Util.dbg( 'DriveBarUC.createBars() 2', { segId:seg.segId, beg:beg, end:end, w:Math.abs(end-beg) } )
@rect( trip, @g, seg, seg.segId, beg, y, Math.abs(end-beg), h, fill, 'black', thick, '' )
@created = true
return
createTravelTime:( trip, g, x, y, w, h ) ->
Util.noop( h )
fontSize = 18
fontSizePx = fontSize + 'px'
g.append("svg:text").text(trip.source).attr("x",4).attr("y",y-fontSize).attr('fill','white')
.attr("text-anchor","start").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text('TRAVEL TIME').attr("x",w/2).attr("y",y-fontSize*3.3 ).attr('fill','white')
.attr("text-anchor","middle").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text(trip.etaHoursMins()).attr("x",w/2).attr("y",y-fontSize*2.2 ).attr('fill','white')
.attr("text-anchor","middle").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text(trip.destination).attr("x",w-4).attr("y",y-fontSize ).attr('fill','white')
.attr("text-anchor","end").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
fillCondition:( segId, conditions ) ->
Conditions = @getTheCondition( segId, conditions )
return 'gray' if not Conditions? or not Conditions.AverageSpeed?
@fillSpeed( Conditions.AverageSpeed )
# Brute force array interation
getTheCondition:( segId, conditions ) ->
for condition in conditions
if condition.SegmentId? and condition['Conditions']?
return condition['Conditions'] if segId is condition.SegmentId
undefined
fillSpeed:( speed ) ->
fill = 'gray'
if 50 < speed then fill = 'green'
else if 25 < speed and speed <= 50 then fill = 'yellow'
else if 15 < speed and speed <= 25 then fill = 'red'
else if 0 < speed and speed <= 15 then fill = 'black'
fill
updateFills:( trip ) ->
for condition in trip.conditions
segId = Util.toInt(condition.SegmentId)
fill = @fillSpeed( condition['Conditions'].AverageSpeed )
@updateRectFill( segId, fill )
return
rect:( trip, g, seg, segId, x0, y0, w, h, fill, stroke, thick, text ) ->
svgId = Util.svgId( @name, segId.toString(), @role )
onClick = () =>
`x = d3.mouse(this)[0]`
mile = @mileBeg + (@mileEnd-@mileBeg) * x / @svgWidth()
console.log( 'DriveBar.rect()', { segId:segId, beg:seg['StartMileMarker'], mile:Util.toFixed(mile,1), end:seg['EndMileMarker'] } )
@doSeqmentDeals(trip,segId,mile)
g.append("svg:rect").attr('id',svgId).attr("x",x0).attr("y",y0).attr("width",w).attr("height",h).attr('segId',segId)
.attr("fill",fill).attr("stroke",stroke).attr("stroke-width",thick)
.on('click',onClick) #.on('mouseover',onMouseOver)
if text isnt ''
g.append("svg:text").text(text).attr("x",x0+w/2).attr("y",y0+h/2+2).attr('fill',fill)
.attr("text-anchor","middle").attr("font-size","4px").attr("font-family","Droid Sans")
return
doSeqmentDeals:( trip, segId, mile ) ->
deals = trip.getDealsBySegId( segId )
Util.dbg( 'DriveBarUC.doSeqmentDeals()', deals.length )
if deals.length > 0
deals[0].exit = Util.toInt(mile)
@stream.publish( 'Deals', deals )
updateRectFill:( segId, fill ) ->
rectId = Util.svgId( @name, segId.toString(), @role )
rect = @$svg.find('#'+rectId)
rect.attr( 'fill', fill )
return
`export default DriveBarUC`
| 31860 |
import $ from 'jquery'
import * as d3 from 'd3'
import Util from '../util/Util.js'
class DriveBarUC
# @port [0,0,92,33] @land =[0,0,100,50
constructor:( @stream, @role, @port, @land ) ->
@name = '<NAME>Bar'
@lastTrip = { name:'' }
@created = false
@screen = null # Set by position() updated by position()
Util.noop( @onScreenTransform )
html:() ->
@htmlId = Util.id(@name,@role) # For createSvg()
"""<div id="#{@htmlId}" class="#{Util.css(@name)}"></div>""" # May or may not need ext for CSS
ready:() ->
@$ = $( @html() )
position:( screen ) ->
# Util.dbg( 'DriveBarUC.position()', @role, screen )
@screen = screen
#@screenOrig = screen
Util.cssPosition( @$, @screen, @port, @land )
[@svg,@$svg,@g,@$g,@gId,@gw,@gh,@y0] = @createSvg( @$, @htmlId, @name, @role, @svgWidth(), @svgHeight(), @barTop() )
@subscribe()
subscribe:() ->
@stream.subscribe( 'Location', 'Deals', (location) => @onLocation( location ) )
@stream.subscribe( 'Screen', 'Deals', (screen) => @onScreen( screen ) )
@stream.subscribe( 'Trip', 'Deals', (trip) => @onTrip( trip ) )
onLocation:( location ) ->
Util.noop( 'DriveBarUC.onLocation()', @role, location )
onTrip:( trip ) =>
if not @created or trip.name isnt @lastTrip.name
@createBars( trip )
else
@updateFills( trip )
@lastTrip = trip
onScreen:( screen ) ->
@screen = screen
Util.cssPosition( @$, @screen, @port, @land )
@svg.attr( "width", @svgWidth() ).attr( 'height', @svgHeight() )
@createBars( @lastTrip )
# Screenlayout changes base on orientation not working
onScreenTransform:( next ) ->
prev = @screen
@screen = next
Util.cssPosition( @$, @screen, @port, @land )
@svg.attr( "width", @svgWidth() ).attr( 'height', @svgHeight() )
xp = 0
yp = 0
xn = 0
yn = 0
[xp,yp] = if prev.orientation is 'Portrait' then [@port[2],@port[3]] else [@land[2],@land[3]]
[xn,yn] = if next.orientation is 'Portrait' then [@port[2],@port[3]] else [@land[2],@land[3]]
xs = next.width * xn / ( prev.width * xp )
ys = next.height * yn / ( prev.height * yp )
@g.attr( 'transform', "scale(#{xs},#{ys})" )
return
# index 2 is width index 3 is height
svgWidth: () -> if @screen.orientation is 'Portrait' then @screen.width * @port[2]/100 else @screen.width * @land[2]/100
svgHeight:() -> if @screen.orientation is 'Portrait' then @screen.height * @port[3]/100 else @screen.height * @land[3]/100
barHeight:() -> @svgHeight() * 0.33
barTop: () -> @svgHeight() * 0.50
# d3 Svg dependency
createSvg:( $, htmlId, name, ext, width, height, barTop ) ->
svgId = Util.svgId( name, ext, 'Svg' )
gId = Util.svgId( name, ext, 'G' )
svg = d3.select('#'+htmlId).append("svg:svg").attr("id",svgId).attr("width",width).attr("height",height)
g = svg.append("svg:g").attr("id",gId) # All tranforms are applied to g
$svg = $.find( '#'+svgId )
$g = $.find( '#'+gId )
[svg,$svg,g,$g,gId,width,height,barTop]
createBars:( trip ) ->
d3.select('#'+@gId).selectAll("*").remove()
@mileBeg = trip.begMile()
@mileEnd = trip.endMile()
@distance = Math.abs( @mileEnd - @mileBeg )
# Util.dbg( 'DriveBarUC.createBars() 1', { mileBeg:@mileBeg, mileEnd:@mileEnd, distance:@distance } )
thick = 1
x = 0
y = @barTop()
w = @svgWidth()
h = @barHeight()
@createTravelTime( trip, @g, x, y, w, h )
@rect( trip, @g, trip.segments[0], @role+'Border', x, y, w, h, 'transparent', 'white', thick*4, '' )
for seg in trip.segments
beg = w * Math.abs( Util.toFloat(seg['StartMileMarker']) - @mileBeg ) / @distance
end = w * Math.abs( Util.toFloat(seg['EndMileMarker']) - @mileBeg ) / @distance
fill = @fillCondition( seg.segId, trip.conditions )
# Util.dbg( 'DriveBarUC.createBars() 2', { segId:seg.segId, beg:beg, end:end, w:Math.abs(end-beg) } )
@rect( trip, @g, seg, seg.segId, beg, y, Math.abs(end-beg), h, fill, 'black', thick, '' )
@created = true
return
createTravelTime:( trip, g, x, y, w, h ) ->
Util.noop( h )
fontSize = 18
fontSizePx = fontSize + 'px'
g.append("svg:text").text(trip.source).attr("x",4).attr("y",y-fontSize).attr('fill','white')
.attr("text-anchor","start").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text('TRAVEL TIME').attr("x",w/2).attr("y",y-fontSize*3.3 ).attr('fill','white')
.attr("text-anchor","middle").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text(trip.etaHoursMins()).attr("x",w/2).attr("y",y-fontSize*2.2 ).attr('fill','white')
.attr("text-anchor","middle").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text(trip.destination).attr("x",w-4).attr("y",y-fontSize ).attr('fill','white')
.attr("text-anchor","end").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
fillCondition:( segId, conditions ) ->
Conditions = @getTheCondition( segId, conditions )
return 'gray' if not Conditions? or not Conditions.AverageSpeed?
@fillSpeed( Conditions.AverageSpeed )
# Brute force array interation
getTheCondition:( segId, conditions ) ->
for condition in conditions
if condition.SegmentId? and condition['Conditions']?
return condition['Conditions'] if segId is condition.SegmentId
undefined
fillSpeed:( speed ) ->
fill = 'gray'
if 50 < speed then fill = 'green'
else if 25 < speed and speed <= 50 then fill = 'yellow'
else if 15 < speed and speed <= 25 then fill = 'red'
else if 0 < speed and speed <= 15 then fill = 'black'
fill
updateFills:( trip ) ->
for condition in trip.conditions
segId = Util.toInt(condition.SegmentId)
fill = @fillSpeed( condition['Conditions'].AverageSpeed )
@updateRectFill( segId, fill )
return
rect:( trip, g, seg, segId, x0, y0, w, h, fill, stroke, thick, text ) ->
svgId = Util.svgId( @name, segId.toString(), @role )
onClick = () =>
`x = d3.mouse(this)[0]`
mile = @mileBeg + (@mileEnd-@mileBeg) * x / @svgWidth()
console.log( 'DriveBar.rect()', { segId:segId, beg:seg['StartMileMarker'], mile:Util.toFixed(mile,1), end:seg['EndMileMarker'] } )
@doSeqmentDeals(trip,segId,mile)
g.append("svg:rect").attr('id',svgId).attr("x",x0).attr("y",y0).attr("width",w).attr("height",h).attr('segId',segId)
.attr("fill",fill).attr("stroke",stroke).attr("stroke-width",thick)
.on('click',onClick) #.on('mouseover',onMouseOver)
if text isnt ''
g.append("svg:text").text(text).attr("x",x0+w/2).attr("y",y0+h/2+2).attr('fill',fill)
.attr("text-anchor","middle").attr("font-size","4px").attr("font-family","Droid Sans")
return
doSeqmentDeals:( trip, segId, mile ) ->
deals = trip.getDealsBySegId( segId )
Util.dbg( 'DriveBarUC.doSeqmentDeals()', deals.length )
if deals.length > 0
deals[0].exit = Util.toInt(mile)
@stream.publish( 'Deals', deals )
updateRectFill:( segId, fill ) ->
rectId = Util.svgId( @name, segId.toString(), @role )
rect = @$svg.find('#'+rectId)
rect.attr( 'fill', fill )
return
`export default DriveBarUC`
| true |
import $ from 'jquery'
import * as d3 from 'd3'
import Util from '../util/Util.js'
class DriveBarUC
# @port [0,0,92,33] @land =[0,0,100,50
constructor:( @stream, @role, @port, @land ) ->
@name = 'PI:NAME:<NAME>END_PIBar'
@lastTrip = { name:'' }
@created = false
@screen = null # Set by position() updated by position()
Util.noop( @onScreenTransform )
html:() ->
@htmlId = Util.id(@name,@role) # For createSvg()
"""<div id="#{@htmlId}" class="#{Util.css(@name)}"></div>""" # May or may not need ext for CSS
ready:() ->
@$ = $( @html() )
position:( screen ) ->
# Util.dbg( 'DriveBarUC.position()', @role, screen )
@screen = screen
#@screenOrig = screen
Util.cssPosition( @$, @screen, @port, @land )
[@svg,@$svg,@g,@$g,@gId,@gw,@gh,@y0] = @createSvg( @$, @htmlId, @name, @role, @svgWidth(), @svgHeight(), @barTop() )
@subscribe()
subscribe:() ->
@stream.subscribe( 'Location', 'Deals', (location) => @onLocation( location ) )
@stream.subscribe( 'Screen', 'Deals', (screen) => @onScreen( screen ) )
@stream.subscribe( 'Trip', 'Deals', (trip) => @onTrip( trip ) )
onLocation:( location ) ->
Util.noop( 'DriveBarUC.onLocation()', @role, location )
onTrip:( trip ) =>
if not @created or trip.name isnt @lastTrip.name
@createBars( trip )
else
@updateFills( trip )
@lastTrip = trip
onScreen:( screen ) ->
@screen = screen
Util.cssPosition( @$, @screen, @port, @land )
@svg.attr( "width", @svgWidth() ).attr( 'height', @svgHeight() )
@createBars( @lastTrip )
# Screenlayout changes base on orientation not working
onScreenTransform:( next ) ->
prev = @screen
@screen = next
Util.cssPosition( @$, @screen, @port, @land )
@svg.attr( "width", @svgWidth() ).attr( 'height', @svgHeight() )
xp = 0
yp = 0
xn = 0
yn = 0
[xp,yp] = if prev.orientation is 'Portrait' then [@port[2],@port[3]] else [@land[2],@land[3]]
[xn,yn] = if next.orientation is 'Portrait' then [@port[2],@port[3]] else [@land[2],@land[3]]
xs = next.width * xn / ( prev.width * xp )
ys = next.height * yn / ( prev.height * yp )
@g.attr( 'transform', "scale(#{xs},#{ys})" )
return
# index 2 is width index 3 is height
svgWidth: () -> if @screen.orientation is 'Portrait' then @screen.width * @port[2]/100 else @screen.width * @land[2]/100
svgHeight:() -> if @screen.orientation is 'Portrait' then @screen.height * @port[3]/100 else @screen.height * @land[3]/100
barHeight:() -> @svgHeight() * 0.33
barTop: () -> @svgHeight() * 0.50
# d3 Svg dependency
createSvg:( $, htmlId, name, ext, width, height, barTop ) ->
svgId = Util.svgId( name, ext, 'Svg' )
gId = Util.svgId( name, ext, 'G' )
svg = d3.select('#'+htmlId).append("svg:svg").attr("id",svgId).attr("width",width).attr("height",height)
g = svg.append("svg:g").attr("id",gId) # All tranforms are applied to g
$svg = $.find( '#'+svgId )
$g = $.find( '#'+gId )
[svg,$svg,g,$g,gId,width,height,barTop]
createBars:( trip ) ->
d3.select('#'+@gId).selectAll("*").remove()
@mileBeg = trip.begMile()
@mileEnd = trip.endMile()
@distance = Math.abs( @mileEnd - @mileBeg )
# Util.dbg( 'DriveBarUC.createBars() 1', { mileBeg:@mileBeg, mileEnd:@mileEnd, distance:@distance } )
thick = 1
x = 0
y = @barTop()
w = @svgWidth()
h = @barHeight()
@createTravelTime( trip, @g, x, y, w, h )
@rect( trip, @g, trip.segments[0], @role+'Border', x, y, w, h, 'transparent', 'white', thick*4, '' )
for seg in trip.segments
beg = w * Math.abs( Util.toFloat(seg['StartMileMarker']) - @mileBeg ) / @distance
end = w * Math.abs( Util.toFloat(seg['EndMileMarker']) - @mileBeg ) / @distance
fill = @fillCondition( seg.segId, trip.conditions )
# Util.dbg( 'DriveBarUC.createBars() 2', { segId:seg.segId, beg:beg, end:end, w:Math.abs(end-beg) } )
@rect( trip, @g, seg, seg.segId, beg, y, Math.abs(end-beg), h, fill, 'black', thick, '' )
@created = true
return
createTravelTime:( trip, g, x, y, w, h ) ->
Util.noop( h )
fontSize = 18
fontSizePx = fontSize + 'px'
g.append("svg:text").text(trip.source).attr("x",4).attr("y",y-fontSize).attr('fill','white')
.attr("text-anchor","start").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text('TRAVEL TIME').attr("x",w/2).attr("y",y-fontSize*3.3 ).attr('fill','white')
.attr("text-anchor","middle").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text(trip.etaHoursMins()).attr("x",w/2).attr("y",y-fontSize*2.2 ).attr('fill','white')
.attr("text-anchor","middle").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
g.append("svg:text").text(trip.destination).attr("x",w-4).attr("y",y-fontSize ).attr('fill','white')
.attr("text-anchor","end").attr("font-size",fontSizePx).attr("font-family","Droid Sans")
fillCondition:( segId, conditions ) ->
Conditions = @getTheCondition( segId, conditions )
return 'gray' if not Conditions? or not Conditions.AverageSpeed?
@fillSpeed( Conditions.AverageSpeed )
# Brute force array interation
getTheCondition:( segId, conditions ) ->
for condition in conditions
if condition.SegmentId? and condition['Conditions']?
return condition['Conditions'] if segId is condition.SegmentId
undefined
fillSpeed:( speed ) ->
fill = 'gray'
if 50 < speed then fill = 'green'
else if 25 < speed and speed <= 50 then fill = 'yellow'
else if 15 < speed and speed <= 25 then fill = 'red'
else if 0 < speed and speed <= 15 then fill = 'black'
fill
updateFills:( trip ) ->
for condition in trip.conditions
segId = Util.toInt(condition.SegmentId)
fill = @fillSpeed( condition['Conditions'].AverageSpeed )
@updateRectFill( segId, fill )
return
rect:( trip, g, seg, segId, x0, y0, w, h, fill, stroke, thick, text ) ->
svgId = Util.svgId( @name, segId.toString(), @role )
onClick = () =>
`x = d3.mouse(this)[0]`
mile = @mileBeg + (@mileEnd-@mileBeg) * x / @svgWidth()
console.log( 'DriveBar.rect()', { segId:segId, beg:seg['StartMileMarker'], mile:Util.toFixed(mile,1), end:seg['EndMileMarker'] } )
@doSeqmentDeals(trip,segId,mile)
g.append("svg:rect").attr('id',svgId).attr("x",x0).attr("y",y0).attr("width",w).attr("height",h).attr('segId',segId)
.attr("fill",fill).attr("stroke",stroke).attr("stroke-width",thick)
.on('click',onClick) #.on('mouseover',onMouseOver)
if text isnt ''
g.append("svg:text").text(text).attr("x",x0+w/2).attr("y",y0+h/2+2).attr('fill',fill)
.attr("text-anchor","middle").attr("font-size","4px").attr("font-family","Droid Sans")
return
doSeqmentDeals:( trip, segId, mile ) ->
deals = trip.getDealsBySegId( segId )
Util.dbg( 'DriveBarUC.doSeqmentDeals()', deals.length )
if deals.length > 0
deals[0].exit = Util.toInt(mile)
@stream.publish( 'Deals', deals )
updateRectFill:( segId, fill ) ->
rectId = Util.svgId( @name, segId.toString(), @role )
rect = @$svg.find('#'+rectId)
rect.attr( 'fill', fill )
return
`export default DriveBarUC`
|
[
{
"context": " テスト書いてない - returns a t_wada.png\n#\n# Author:\n# bouzuya <m@bouzuya.net>\n#\nmodule.exports = (robot) ->\n p",
"end": 160,
"score": 0.9996874928474426,
"start": 153,
"tag": "USERNAME",
"value": "bouzuya"
},
{
"context": "い - returns a t_wada.png\n#\n# Author:\n# ... | src/scripts/twada.coffee | bouzuya/hubot-twada | 7 | # Description
# A Hubot script that returns a t_wada.png
#
# Configuration:
# None
#
# Commands:
# テスト書いてない - returns a t_wada.png
#
# Author:
# bouzuya <m@bouzuya.net>
#
module.exports = (robot) ->
pattern = /((?:test|テスト)[をは]?(?:[書か]いてい?ない|[書か]きたくない))$/i
robot.hear pattern, (res) ->
res.send "#{res.match[1]}とかお前それ @t_wada の前でも同じこと言えんの?"
| 12839 | # Description
# A Hubot script that returns a t_wada.png
#
# Configuration:
# None
#
# Commands:
# テスト書いてない - returns a t_wada.png
#
# Author:
# bouzuya <<EMAIL>>
#
module.exports = (robot) ->
pattern = /((?:test|テスト)[をは]?(?:[書か]いてい?ない|[書か]きたくない))$/i
robot.hear pattern, (res) ->
res.send "#{res.match[1]}とかお前それ @t_wada の前でも同じこと言えんの?"
| true | # Description
# A Hubot script that returns a t_wada.png
#
# Configuration:
# None
#
# Commands:
# テスト書いてない - returns a t_wada.png
#
# Author:
# bouzuya <PI:EMAIL:<EMAIL>END_PI>
#
module.exports = (robot) ->
pattern = /((?:test|テスト)[をは]?(?:[書か]いてい?ない|[書か]きたくない))$/i
robot.hear pattern, (res) ->
res.send "#{res.match[1]}とかお前それ @t_wada の前でも同じこと言えんの?"
|
[
{
"context": "###\n QuoJS 2.1\n (c) 2011, 2012 Javi Jiménez Villar (@soyjavi)\n http://quojs.tapquo.com\n###\n\n(($$) -",
"end": 52,
"score": 0.9998633861541748,
"start": 33,
"tag": "NAME",
"value": "Javi Jiménez Villar"
},
{
"context": "#\n QuoJS 2.1\n (c) 2011, 2012 Javi Jimén... | src/QuoJS/src/quo.environment.coffee | biojazzard/kirbout | 2 | ###
QuoJS 2.1
(c) 2011, 2012 Javi Jiménez Villar (@soyjavi)
http://quojs.tapquo.com
###
(($$) ->
_current = null
IS_WEBKIT = /WebKit\/([\d.]+)/
SUPPORTED_OS =
Android: /(Android)\s+([\d.]+)/
ipad: /(iPad).*OS\s([\d_]+)/
iphone: /(iPhone\sOS)\s([\d_]+)/
blackberry: /(BlackBerry).*Version\/([\d.]+)/
webos: /(webOS|hpwOS)[\s\/]([\d.]+)/
$$.isMobile = ->
_current = _current or _detectEnvironment()
_current.isMobile
$$.environment = ->
_current = _current or _detectEnvironment()
_current
$$.isOnline = ->
navigator.onLine
_detectEnvironment = ->
user_agent = navigator.userAgent
environment = {}
environment.browser = _detectBrowser(user_agent)
environment.os = _detectOS(user_agent)
environment.isMobile = !!environment.os
environment.screen = _detectScreen()
environment
_detectBrowser = (user_agent) ->
is_webkit = user_agent.match(IS_WEBKIT)
if is_webkit then is_webkit[0] else user_agent
_detectOS = (user_agent) ->
detected_os = null
for os of SUPPORTED_OS
supported = user_agent.match(SUPPORTED_OS[os])
if supported
detected_os =
name: (if (os is "iphone" or os is "ipad") then "ios" else os)
version: supported[2].replace("_", ".")
break
detected_os
_detectScreen = ->
width: window.innerWidth
height: window.innerHeight
return
) Quo
| 221275 | ###
QuoJS 2.1
(c) 2011, 2012 <NAME> (@soyjavi)
http://quojs.tapquo.com
###
(($$) ->
_current = null
IS_WEBKIT = /WebKit\/([\d.]+)/
SUPPORTED_OS =
Android: /(Android)\s+([\d.]+)/
ipad: /(iPad).*OS\s([\d_]+)/
iphone: /(iPhone\sOS)\s([\d_]+)/
blackberry: /(BlackBerry).*Version\/([\d.]+)/
webos: /(webOS|hpwOS)[\s\/]([\d.]+)/
$$.isMobile = ->
_current = _current or _detectEnvironment()
_current.isMobile
$$.environment = ->
_current = _current or _detectEnvironment()
_current
$$.isOnline = ->
navigator.onLine
_detectEnvironment = ->
user_agent = navigator.userAgent
environment = {}
environment.browser = _detectBrowser(user_agent)
environment.os = _detectOS(user_agent)
environment.isMobile = !!environment.os
environment.screen = _detectScreen()
environment
_detectBrowser = (user_agent) ->
is_webkit = user_agent.match(IS_WEBKIT)
if is_webkit then is_webkit[0] else user_agent
_detectOS = (user_agent) ->
detected_os = null
for os of SUPPORTED_OS
supported = user_agent.match(SUPPORTED_OS[os])
if supported
detected_os =
name: (if (os is "iphone" or os is "ipad") then "ios" else os)
version: supported[2].replace("_", ".")
break
detected_os
_detectScreen = ->
width: window.innerWidth
height: window.innerHeight
return
) Quo
| true | ###
QuoJS 2.1
(c) 2011, 2012 PI:NAME:<NAME>END_PI (@soyjavi)
http://quojs.tapquo.com
###
(($$) ->
_current = null
IS_WEBKIT = /WebKit\/([\d.]+)/
SUPPORTED_OS =
Android: /(Android)\s+([\d.]+)/
ipad: /(iPad).*OS\s([\d_]+)/
iphone: /(iPhone\sOS)\s([\d_]+)/
blackberry: /(BlackBerry).*Version\/([\d.]+)/
webos: /(webOS|hpwOS)[\s\/]([\d.]+)/
$$.isMobile = ->
_current = _current or _detectEnvironment()
_current.isMobile
$$.environment = ->
_current = _current or _detectEnvironment()
_current
$$.isOnline = ->
navigator.onLine
_detectEnvironment = ->
user_agent = navigator.userAgent
environment = {}
environment.browser = _detectBrowser(user_agent)
environment.os = _detectOS(user_agent)
environment.isMobile = !!environment.os
environment.screen = _detectScreen()
environment
_detectBrowser = (user_agent) ->
is_webkit = user_agent.match(IS_WEBKIT)
if is_webkit then is_webkit[0] else user_agent
_detectOS = (user_agent) ->
detected_os = null
for os of SUPPORTED_OS
supported = user_agent.match(SUPPORTED_OS[os])
if supported
detected_os =
name: (if (os is "iphone" or os is "ipad") then "ios" else os)
version: supported[2].replace("_", ".")
break
detected_os
_detectScreen = ->
width: window.innerWidth
height: window.innerHeight
return
) Quo
|
[
{
"context": " end: 2.5,\n# texts: [\n# {text: 'Hum', position: 1.2},\n# {text: 'bewafaa'",
"end": 120,
"score": 0.7531973123550415,
"start": 117,
"tag": "NAME",
"value": "Hum"
},
{
"context": " end: 4.3,\n# texts: [\n# {text: 'Hargiz'... | app/assets/javascripts/strut_builder.js.coffee | shirshendu/kine_type | 1 | window.StrutBuilder ||= {}
# [
# { #slide 1
# start: 0,
# end: 2.5,
# texts: [
# {text: 'Hum', position: 1.2},
# {text: 'bewafaa', position: 1.7}
# ]
# },
# { #slide 2
# start: 2.5,
# end: 4.3,
# texts: [
# {text: 'Hargiz', position: 3.3},
# {text: 'na they', position: 3.8}
# ]
# }
# ]
StrutBuilder.build = (data) ->
slidesdata = (slide for slide in data when slide.data.type == 'segment')
itemdata = (item for item in data when item.data.type == 'segment_item')
for slidedata in slidesdata
items = for item, i in itemdata when (slidedata.start <= item.start and item.start < slidedata.end)
text: item.data.note
position: item.start
animation: item.data.animation
duration: if itemdata[i + 1] then (itemdata[i+1].start - itemdata[i].start - 0.2) else (slidedata.end - itemdata[i].start - 0.3)
slidedata.texts = items
bgs = []
slides = for slide, i in slidesdata
StrutBuilder.Slide.build(slide, i, bgs)
{
slides: slides,
activeSlide: slides[0],
fileName: 'presentation-unnamed',
customStylesheet: '',
deckVersion: '1.0',
customBackgrounds: { bgs: bgs }
}
| 168098 | window.StrutBuilder ||= {}
# [
# { #slide 1
# start: 0,
# end: 2.5,
# texts: [
# {text: '<NAME>', position: 1.2},
# {text: 'bewafaa', position: 1.7}
# ]
# },
# { #slide 2
# start: 2.5,
# end: 4.3,
# texts: [
# {text: '<NAME>', position: 3.3},
# {text: 'na they', position: 3.8}
# ]
# }
# ]
StrutBuilder.build = (data) ->
slidesdata = (slide for slide in data when slide.data.type == 'segment')
itemdata = (item for item in data when item.data.type == 'segment_item')
for slidedata in slidesdata
items = for item, i in itemdata when (slidedata.start <= item.start and item.start < slidedata.end)
text: item.data.note
position: item.start
animation: item.data.animation
duration: if itemdata[i + 1] then (itemdata[i+1].start - itemdata[i].start - 0.2) else (slidedata.end - itemdata[i].start - 0.3)
slidedata.texts = items
bgs = []
slides = for slide, i in slidesdata
StrutBuilder.Slide.build(slide, i, bgs)
{
slides: slides,
activeSlide: slides[0],
fileName: 'presentation-unnamed',
customStylesheet: '',
deckVersion: '1.0',
customBackgrounds: { bgs: bgs }
}
| true | window.StrutBuilder ||= {}
# [
# { #slide 1
# start: 0,
# end: 2.5,
# texts: [
# {text: 'PI:NAME:<NAME>END_PI', position: 1.2},
# {text: 'bewafaa', position: 1.7}
# ]
# },
# { #slide 2
# start: 2.5,
# end: 4.3,
# texts: [
# {text: 'PI:NAME:<NAME>END_PI', position: 3.3},
# {text: 'na they', position: 3.8}
# ]
# }
# ]
StrutBuilder.build = (data) ->
slidesdata = (slide for slide in data when slide.data.type == 'segment')
itemdata = (item for item in data when item.data.type == 'segment_item')
for slidedata in slidesdata
items = for item, i in itemdata when (slidedata.start <= item.start and item.start < slidedata.end)
text: item.data.note
position: item.start
animation: item.data.animation
duration: if itemdata[i + 1] then (itemdata[i+1].start - itemdata[i].start - 0.2) else (slidedata.end - itemdata[i].start - 0.3)
slidedata.texts = items
bgs = []
slides = for slide, i in slidesdata
StrutBuilder.Slide.build(slide, i, bgs)
{
slides: slides,
activeSlide: slides[0],
fileName: 'presentation-unnamed',
customStylesheet: '',
deckVersion: '1.0',
customBackgrounds: { bgs: bgs }
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.