entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": " when 'horizontal' then '|'\n key = \"#{orientationChar}:#{children.length}\"\n layout = {}\n ",
"end": 1313,
"score": 0.7196963429450989,
"start": 1291,
"tag": "KEY",
"value": "\"#{orientationChar}:#{"
}
] | lib/main.coffee | t9md/atom-dev | 0 | {Range, CompositeDisposable} = require 'atom'
settings = require './settings'
module.exports =
disposables: null
config: settings.config
activate: ->
@disposables = new CompositeDisposable
# for scope in ['atom-text-editor.vim-mode-plus', 'atom-text-editor', 'atom-workspace']
# @disposables.add atom.commands.add(scope, 'dev:propagate', @propagate)
@disposables.add atom.commands.add 'atom-workspace',
'dev:log-vim-state-mode': => @logVimStateMode()
'dev:set-var-in-dev-tools': => @setVarInDevTools()
'dev:throw-error': => @throwError()
'dev:log-pane-layout': => @logPaneLayout()
logPaneLayout: ->
paneLayoutFor = (root) ->
activePane = atom.workspace.getActivePane()
switch root.constructor.name
when 'Pane'
{activeItem} = root
isActivePane = root is activePane
root.getItems().map (item) ->
title = item.getTitle()
if item is activeItem
title = '*' + title
title = "[[#{title}]]" if isActivePane
title
when 'PaneAxis'
children = root.getChildren()
orientationChar = switch root.getOrientation()
when 'vertical' then '-'
when 'horizontal' then '|'
key = "#{orientationChar}:#{children.length}"
layout = {}
layout[key] = children.map(paneLayoutFor)
layout
root = atom.workspace.getActivePane().getContainer().getRoot()
inspect = require('util').inspect()
console.log inspect(paneLayoutFor(root), depth: 10)
throwError: ->
try
throw new Error('sample Error')
catch error
throw error
setVarInDevTools: ->
atom.openDevTools()
console.clear()
code = """
e = atom.workspace.getActiveTextEditor()
el = atom.views.getView(e)
c = e.getLastCursor()
s = e.getLastSelection()
p = atom.workspace.getActivePane()
"""
compiledCode = require('coffee-script').compile(code, bare: true)
require('vm').runInThisContext(compiledCode)
deactivate: ->
@disposables?.dispose()
consumeVimMode: (@vimModeService) ->
logVimStateMode: ->
vimState = @vimModeService.getEditorState(atom.workspace.getActiveTextEditor())
console.log {mode: vimState.mode, submode: vimState.mode}
| 191707 | {Range, CompositeDisposable} = require 'atom'
settings = require './settings'
module.exports =
disposables: null
config: settings.config
activate: ->
@disposables = new CompositeDisposable
# for scope in ['atom-text-editor.vim-mode-plus', 'atom-text-editor', 'atom-workspace']
# @disposables.add atom.commands.add(scope, 'dev:propagate', @propagate)
@disposables.add atom.commands.add 'atom-workspace',
'dev:log-vim-state-mode': => @logVimStateMode()
'dev:set-var-in-dev-tools': => @setVarInDevTools()
'dev:throw-error': => @throwError()
'dev:log-pane-layout': => @logPaneLayout()
logPaneLayout: ->
paneLayoutFor = (root) ->
activePane = atom.workspace.getActivePane()
switch root.constructor.name
when 'Pane'
{activeItem} = root
isActivePane = root is activePane
root.getItems().map (item) ->
title = item.getTitle()
if item is activeItem
title = '*' + title
title = "[[#{title}]]" if isActivePane
title
when 'PaneAxis'
children = root.getChildren()
orientationChar = switch root.getOrientation()
when 'vertical' then '-'
when 'horizontal' then '|'
key = <KEY>children.length}"
layout = {}
layout[key] = children.map(paneLayoutFor)
layout
root = atom.workspace.getActivePane().getContainer().getRoot()
inspect = require('util').inspect()
console.log inspect(paneLayoutFor(root), depth: 10)
throwError: ->
try
throw new Error('sample Error')
catch error
throw error
setVarInDevTools: ->
atom.openDevTools()
console.clear()
code = """
e = atom.workspace.getActiveTextEditor()
el = atom.views.getView(e)
c = e.getLastCursor()
s = e.getLastSelection()
p = atom.workspace.getActivePane()
"""
compiledCode = require('coffee-script').compile(code, bare: true)
require('vm').runInThisContext(compiledCode)
deactivate: ->
@disposables?.dispose()
consumeVimMode: (@vimModeService) ->
logVimStateMode: ->
vimState = @vimModeService.getEditorState(atom.workspace.getActiveTextEditor())
console.log {mode: vimState.mode, submode: vimState.mode}
| true | {Range, CompositeDisposable} = require 'atom'
settings = require './settings'
module.exports =
disposables: null
config: settings.config
activate: ->
@disposables = new CompositeDisposable
# for scope in ['atom-text-editor.vim-mode-plus', 'atom-text-editor', 'atom-workspace']
# @disposables.add atom.commands.add(scope, 'dev:propagate', @propagate)
@disposables.add atom.commands.add 'atom-workspace',
'dev:log-vim-state-mode': => @logVimStateMode()
'dev:set-var-in-dev-tools': => @setVarInDevTools()
'dev:throw-error': => @throwError()
'dev:log-pane-layout': => @logPaneLayout()
logPaneLayout: ->
paneLayoutFor = (root) ->
activePane = atom.workspace.getActivePane()
switch root.constructor.name
when 'Pane'
{activeItem} = root
isActivePane = root is activePane
root.getItems().map (item) ->
title = item.getTitle()
if item is activeItem
title = '*' + title
title = "[[#{title}]]" if isActivePane
title
when 'PaneAxis'
children = root.getChildren()
orientationChar = switch root.getOrientation()
when 'vertical' then '-'
when 'horizontal' then '|'
key = PI:KEY:<KEY>END_PIchildren.length}"
layout = {}
layout[key] = children.map(paneLayoutFor)
layout
root = atom.workspace.getActivePane().getContainer().getRoot()
inspect = require('util').inspect()
console.log inspect(paneLayoutFor(root), depth: 10)
throwError: ->
try
throw new Error('sample Error')
catch error
throw error
setVarInDevTools: ->
atom.openDevTools()
console.clear()
code = """
e = atom.workspace.getActiveTextEditor()
el = atom.views.getView(e)
c = e.getLastCursor()
s = e.getLastSelection()
p = atom.workspace.getActivePane()
"""
compiledCode = require('coffee-script').compile(code, bare: true)
require('vm').runInThisContext(compiledCode)
deactivate: ->
@disposables?.dispose()
consumeVimMode: (@vimModeService) ->
logVimStateMode: ->
vimState = @vimModeService.getEditorState(atom.workspace.getActiveTextEditor())
console.log {mode: vimState.mode, submode: vimState.mode}
|
[
{
"context": "PO_TEMPLATES =\n github : 'git clone git@github.com:your-organization/reponame.git'\n bitbucket ",
"end": 619,
"score": 0.992059588432312,
"start": 605,
"tag": "EMAIL",
"value": "git@github.com"
},
{
"context": "n/reponame.git'\n bitbucket ... | client/stack-editor/lib/onboarding/onboardingview.coffee | lionheart1022/koding | 0 | $ = require 'jquery'
kd = require 'kd'
hljs = require 'highlight.js'
JView = require 'app/jview'
CodeSetupView = require './codesetupview'
{ jsonToYaml } = require 'app/util/stacks/yamlutils'
GetStartedView = require './getstartedview'
ConfigurationView = require './configurationview'
ProviderSelectionView = require './providerselectionview'
Tracker = require 'app/util/tracker'
CustomLinkView = require 'app/customlinkview'
CLONE_REPO_TEMPLATES =
github : 'git clone git@github.com:your-organization/reponame.git'
bitbucket : 'git clone git@bitbucket.org/your-organization/reponame.git'
gitlab : 'git clone git@gitlab.com/your-organization/reponame.git'
yourgitserver : 'git clone git@yourgitserver.com/reponame.git'
PROVIDER_TEMPLATES =
aws :
aws :
access_key : '${var.aws_access_key}'
secret_key : '${var.aws_secret_key}'
module.exports = class OnboardingView extends JView
constructor: (options = {}, data) ->
options.cssClass = 'stack-onboarding main-content'
super options, data
@createViews()
@createFooter()
@bindPageEvents()
createViews: ->
@getStartedView = new GetStartedView
@codeSetupView = new CodeSetupView { cssClass: 'hidden' }
@configurationView = new ConfigurationView { cssClass: 'hidden' }
@providerSelectionView = new ProviderSelectionView { cssClass: 'hidden' }
@pages = [ @getStartedView, @providerSelectionView, @configurationView, @codeSetupView ]
@currentPage = @getStartedView
@setClass 'get-started'
bindPageEvents: ->
@pages.forEach (page) => page.on 'UpdateStackTemplate', => @updateStackTemplate()
@on 'PageNavigationRequested', @bound 'handlePageNavigationRequested'
@getStartedView.on 'NextPageRequested', =>
@unsetClass 'get-started'
@emit 'PageNavigationRequested', 'next'
@getStartedView.emit 'NextPageRequested' if @getOption 'skipOnboarding'
@providerSelectionView.on 'UpdateStackTemplate', @bound 'handleUpdateStackTemplate'
@configurationView.tabView.on 'PaneAdded', => @codeSetupView.addPane()
@configurationView.tabView.on 'PaneRemoved', =>
@codeSetupView.tabView.removePane @codeSetupView.tabView.panes.last
@configurationView.on 'InstanceTypeChanged', @bound 'handleInstanceTypeChanged'
handlePageNavigationRequested: (direction) ->
pageIndex = @pages.indexOf @currentPage
nextIndex = if direction is 'next' then ++pageIndex else --pageIndex
targetPage = @pages[nextIndex]
# Temporary solution ~ GG
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if direction is 'next' and selectedProvider is 'vagrant'
@onboardingCompleted()
else if targetPage
@currentPage.hide()
targetPage.show()
@setClass 'get-started' if targetPage is @getStartedView
@currentPage = targetPage
else
@onboardingCompleted()
handleUpdateStackTemplate: (isSelected) ->
if isSelected
@nextButton.enable()
@skipLink.show()
else
@nextButton.disable()
@skipLink.hide()
handleInstanceTypeChanged: (type) ->
for pane, index in @configurationView.tabView.panes
label = @codeSetupView.tabView.panes[index]?.instanceTypeLabel
label.updatePartial pane.instanceTypeSelectBox.getValue() if label
createFooter: ->
@cancelButton = new kd.ButtonView
cssClass : 'StackEditor-OnboardingModal--cancel'
title : 'CANCEL'
callback : => @emit 'StackCreationCancelled'
@backButton = new kd.ButtonView
cssClass : 'outline back'
title : 'Back'
callback : => @emit 'PageNavigationRequested', 'prev'
@nextButton = new kd.ButtonView
cssClass : 'outline next'
title : 'Next'
disabled : yes
callback : => @emit 'PageNavigationRequested', 'next'
@skipLink = new CustomLinkView
cssClass : 'HomeAppView--button hidden'
title : 'SKIP GUIDE'
click : =>
@destroy()
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if selectedProvider?
options = { selectedProvider }
if selectedProvider is 'vagrant'
options.template = { content: @stackTemplate }
Tracker.track Tracker.STACKS_SKIP_SETUP
@emit 'StackOnboardingCompleted', options
updateStackTemplate: ->
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if selectedProvider is 'vagrant'
@stackTemplate = @getDefaultStackTemplate selectedProvider, 'json'
return
codeSetupPanes = @codeSetupView.tabView.panes
serverConfigPanes = @configurationView.tabView.panes
selectedInstances = {}
serverConfigPanes.forEach (pane, index) ->
selectedServices = []
{ configView, instanceTypeSelectBox } = pane
{ configurationToggles } = configView
serverConfig = selectedInstances["example_#{++index}"] =
instance_type : instanceTypeSelectBox.getValue()
ami : ''
tags :
Name : '${var.koding_user_username}-${var.koding_group_slug}'
configurationToggles.forEach (toggle) ->
selectedServices.push (toggle.getOption 'package' or toggle.getOption 'name') if toggle.getValue()
if selectedServices.length
serverConfig.user_data = "export DEBIAN_FRONTEND=noninteractive\napt-get update -y\napt-get -y install #{selectedServices.join ' '}"
stackTemplate =
provider : PROVIDER_TEMPLATES[selectedProvider]
resource :
aws_instance : selectedInstances
codeSetupPanes.forEach (pane, index) ->
selectedService = pane.view.selected?.getOption 'service'
cloneText = CLONE_REPO_TEMPLATES[selectedService]
serverConfig = stackTemplate.resource.aws_instance["example_#{++index}"]
groupSlug = kd.singletons.groupsController.getCurrentGroup().slug
user_data = serverConfig?.user_data
if cloneText
cloneText = cloneText.replace 'your-organization', groupSlug
if serverConfig
if user_data
serverConfig.user_data = """
#{user_data}
#{cloneText}
"""
else
serverConfig.user_data = cloneText if serverConfig
{ content, err } = jsonToYaml stackTemplate
if err
return new kd.NotificationView 'Unable to update stack template preview'
@stackTemplate = JSON.stringify stackTemplate
getDefaultStackTemplate: (provider, format = 'json') ->
stackTemplates =
vagrant :
resource :
vagrant_instance :
localvm :
cpus : 2
memory : 2048
box : 'ubuntu/trusty64'
user_data : '''
sudo apt-get install sl -y
touch /tmp/${var.koding_user_username}.txt
'''
stackTemplate = stackTemplates[provider] ? { error: 'Provider not supported' }
if format is 'yaml'
{ content, err } = jsonToYaml stackTemplate
return content
else
return JSON.stringify stackTemplate
onboardingCompleted: ->
@hide()
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
@emit 'StackOnboardingCompleted', {
selectedProvider, template: { content: @stackTemplate }
}
pistachio: ->
return '''
{{> @getStartedView}}
{{> @providerSelectionView}}
{{> @configurationView}}
{{> @codeSetupView}}
<footer>
{{> @backButton}}
{{> @nextButton}}
{{> @skipLink}}
{{> @cancelButton}}
</footer>
'''
| 18809 | $ = require 'jquery'
kd = require 'kd'
hljs = require 'highlight.js'
JView = require 'app/jview'
CodeSetupView = require './codesetupview'
{ jsonToYaml } = require 'app/util/stacks/yamlutils'
GetStartedView = require './getstartedview'
ConfigurationView = require './configurationview'
ProviderSelectionView = require './providerselectionview'
Tracker = require 'app/util/tracker'
CustomLinkView = require 'app/customlinkview'
CLONE_REPO_TEMPLATES =
github : 'git clone <EMAIL>:your-organization/reponame.git'
bitbucket : 'git clone <EMAIL>/your-organization/reponame.git'
gitlab : 'git clone git<EMAIL>@gitlab.com/your-organization/reponame.git'
yourgitserver : 'git clone <EMAIL>/reponame.git'
PROVIDER_TEMPLATES =
aws :
aws :
access_key : '${var.aws_access_key}'
secret_key : '${var.aws_secret_key}'
module.exports = class OnboardingView extends JView
constructor: (options = {}, data) ->
options.cssClass = 'stack-onboarding main-content'
super options, data
@createViews()
@createFooter()
@bindPageEvents()
createViews: ->
@getStartedView = new GetStartedView
@codeSetupView = new CodeSetupView { cssClass: 'hidden' }
@configurationView = new ConfigurationView { cssClass: 'hidden' }
@providerSelectionView = new ProviderSelectionView { cssClass: 'hidden' }
@pages = [ @getStartedView, @providerSelectionView, @configurationView, @codeSetupView ]
@currentPage = @getStartedView
@setClass 'get-started'
bindPageEvents: ->
@pages.forEach (page) => page.on 'UpdateStackTemplate', => @updateStackTemplate()
@on 'PageNavigationRequested', @bound 'handlePageNavigationRequested'
@getStartedView.on 'NextPageRequested', =>
@unsetClass 'get-started'
@emit 'PageNavigationRequested', 'next'
@getStartedView.emit 'NextPageRequested' if @getOption 'skipOnboarding'
@providerSelectionView.on 'UpdateStackTemplate', @bound 'handleUpdateStackTemplate'
@configurationView.tabView.on 'PaneAdded', => @codeSetupView.addPane()
@configurationView.tabView.on 'PaneRemoved', =>
@codeSetupView.tabView.removePane @codeSetupView.tabView.panes.last
@configurationView.on 'InstanceTypeChanged', @bound 'handleInstanceTypeChanged'
handlePageNavigationRequested: (direction) ->
pageIndex = @pages.indexOf @currentPage
nextIndex = if direction is 'next' then ++pageIndex else --pageIndex
targetPage = @pages[nextIndex]
# Temporary solution ~ GG
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if direction is 'next' and selectedProvider is 'vagrant'
@onboardingCompleted()
else if targetPage
@currentPage.hide()
targetPage.show()
@setClass 'get-started' if targetPage is @getStartedView
@currentPage = targetPage
else
@onboardingCompleted()
handleUpdateStackTemplate: (isSelected) ->
if isSelected
@nextButton.enable()
@skipLink.show()
else
@nextButton.disable()
@skipLink.hide()
handleInstanceTypeChanged: (type) ->
for pane, index in @configurationView.tabView.panes
label = @codeSetupView.tabView.panes[index]?.instanceTypeLabel
label.updatePartial pane.instanceTypeSelectBox.getValue() if label
createFooter: ->
@cancelButton = new kd.ButtonView
cssClass : 'StackEditor-OnboardingModal--cancel'
title : 'CANCEL'
callback : => @emit 'StackCreationCancelled'
@backButton = new kd.ButtonView
cssClass : 'outline back'
title : 'Back'
callback : => @emit 'PageNavigationRequested', 'prev'
@nextButton = new kd.ButtonView
cssClass : 'outline next'
title : 'Next'
disabled : yes
callback : => @emit 'PageNavigationRequested', 'next'
@skipLink = new CustomLinkView
cssClass : 'HomeAppView--button hidden'
title : 'SKIP GUIDE'
click : =>
@destroy()
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if selectedProvider?
options = { selectedProvider }
if selectedProvider is 'vagrant'
options.template = { content: @stackTemplate }
Tracker.track Tracker.STACKS_SKIP_SETUP
@emit 'StackOnboardingCompleted', options
updateStackTemplate: ->
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if selectedProvider is 'vagrant'
@stackTemplate = @getDefaultStackTemplate selectedProvider, 'json'
return
codeSetupPanes = @codeSetupView.tabView.panes
serverConfigPanes = @configurationView.tabView.panes
selectedInstances = {}
serverConfigPanes.forEach (pane, index) ->
selectedServices = []
{ configView, instanceTypeSelectBox } = pane
{ configurationToggles } = configView
serverConfig = selectedInstances["example_#{++index}"] =
instance_type : instanceTypeSelectBox.getValue()
ami : ''
tags :
Name : '${var.koding_user_username}-${var.koding_group_slug}'
configurationToggles.forEach (toggle) ->
selectedServices.push (toggle.getOption 'package' or toggle.getOption 'name') if toggle.getValue()
if selectedServices.length
serverConfig.user_data = "export DEBIAN_FRONTEND=noninteractive\napt-get update -y\napt-get -y install #{selectedServices.join ' '}"
stackTemplate =
provider : PROVIDER_TEMPLATES[selectedProvider]
resource :
aws_instance : selectedInstances
codeSetupPanes.forEach (pane, index) ->
selectedService = pane.view.selected?.getOption 'service'
cloneText = CLONE_REPO_TEMPLATES[selectedService]
serverConfig = stackTemplate.resource.aws_instance["example_#{++index}"]
groupSlug = kd.singletons.groupsController.getCurrentGroup().slug
user_data = serverConfig?.user_data
if cloneText
cloneText = cloneText.replace 'your-organization', groupSlug
if serverConfig
if user_data
serverConfig.user_data = """
#{user_data}
#{cloneText}
"""
else
serverConfig.user_data = cloneText if serverConfig
{ content, err } = jsonToYaml stackTemplate
if err
return new kd.NotificationView 'Unable to update stack template preview'
@stackTemplate = JSON.stringify stackTemplate
getDefaultStackTemplate: (provider, format = 'json') ->
stackTemplates =
vagrant :
resource :
vagrant_instance :
localvm :
cpus : 2
memory : 2048
box : 'ubuntu/trusty64'
user_data : '''
sudo apt-get install sl -y
touch /tmp/${var.koding_user_username}.txt
'''
stackTemplate = stackTemplates[provider] ? { error: 'Provider not supported' }
if format is 'yaml'
{ content, err } = jsonToYaml stackTemplate
return content
else
return JSON.stringify stackTemplate
onboardingCompleted: ->
@hide()
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
@emit 'StackOnboardingCompleted', {
selectedProvider, template: { content: @stackTemplate }
}
pistachio: ->
return '''
{{> @getStartedView}}
{{> @providerSelectionView}}
{{> @configurationView}}
{{> @codeSetupView}}
<footer>
{{> @backButton}}
{{> @nextButton}}
{{> @skipLink}}
{{> @cancelButton}}
</footer>
'''
| true | $ = require 'jquery'
kd = require 'kd'
hljs = require 'highlight.js'
JView = require 'app/jview'
CodeSetupView = require './codesetupview'
{ jsonToYaml } = require 'app/util/stacks/yamlutils'
GetStartedView = require './getstartedview'
ConfigurationView = require './configurationview'
ProviderSelectionView = require './providerselectionview'
Tracker = require 'app/util/tracker'
CustomLinkView = require 'app/customlinkview'
CLONE_REPO_TEMPLATES =
github : 'git clone PI:EMAIL:<EMAIL>END_PI:your-organization/reponame.git'
bitbucket : 'git clone PI:EMAIL:<EMAIL>END_PI/your-organization/reponame.git'
gitlab : 'git clone gitPI:EMAIL:<EMAIL>END_PI@gitlab.com/your-organization/reponame.git'
yourgitserver : 'git clone PI:EMAIL:<EMAIL>END_PI/reponame.git'
PROVIDER_TEMPLATES =
aws :
aws :
access_key : '${var.aws_access_key}'
secret_key : '${var.aws_secret_key}'
module.exports = class OnboardingView extends JView
constructor: (options = {}, data) ->
options.cssClass = 'stack-onboarding main-content'
super options, data
@createViews()
@createFooter()
@bindPageEvents()
createViews: ->
@getStartedView = new GetStartedView
@codeSetupView = new CodeSetupView { cssClass: 'hidden' }
@configurationView = new ConfigurationView { cssClass: 'hidden' }
@providerSelectionView = new ProviderSelectionView { cssClass: 'hidden' }
@pages = [ @getStartedView, @providerSelectionView, @configurationView, @codeSetupView ]
@currentPage = @getStartedView
@setClass 'get-started'
bindPageEvents: ->
@pages.forEach (page) => page.on 'UpdateStackTemplate', => @updateStackTemplate()
@on 'PageNavigationRequested', @bound 'handlePageNavigationRequested'
@getStartedView.on 'NextPageRequested', =>
@unsetClass 'get-started'
@emit 'PageNavigationRequested', 'next'
@getStartedView.emit 'NextPageRequested' if @getOption 'skipOnboarding'
@providerSelectionView.on 'UpdateStackTemplate', @bound 'handleUpdateStackTemplate'
@configurationView.tabView.on 'PaneAdded', => @codeSetupView.addPane()
@configurationView.tabView.on 'PaneRemoved', =>
@codeSetupView.tabView.removePane @codeSetupView.tabView.panes.last
@configurationView.on 'InstanceTypeChanged', @bound 'handleInstanceTypeChanged'
handlePageNavigationRequested: (direction) ->
pageIndex = @pages.indexOf @currentPage
nextIndex = if direction is 'next' then ++pageIndex else --pageIndex
targetPage = @pages[nextIndex]
# Temporary solution ~ GG
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if direction is 'next' and selectedProvider is 'vagrant'
@onboardingCompleted()
else if targetPage
@currentPage.hide()
targetPage.show()
@setClass 'get-started' if targetPage is @getStartedView
@currentPage = targetPage
else
@onboardingCompleted()
handleUpdateStackTemplate: (isSelected) ->
if isSelected
@nextButton.enable()
@skipLink.show()
else
@nextButton.disable()
@skipLink.hide()
handleInstanceTypeChanged: (type) ->
for pane, index in @configurationView.tabView.panes
label = @codeSetupView.tabView.panes[index]?.instanceTypeLabel
label.updatePartial pane.instanceTypeSelectBox.getValue() if label
createFooter: ->
@cancelButton = new kd.ButtonView
cssClass : 'StackEditor-OnboardingModal--cancel'
title : 'CANCEL'
callback : => @emit 'StackCreationCancelled'
@backButton = new kd.ButtonView
cssClass : 'outline back'
title : 'Back'
callback : => @emit 'PageNavigationRequested', 'prev'
@nextButton = new kd.ButtonView
cssClass : 'outline next'
title : 'Next'
disabled : yes
callback : => @emit 'PageNavigationRequested', 'next'
@skipLink = new CustomLinkView
cssClass : 'HomeAppView--button hidden'
title : 'SKIP GUIDE'
click : =>
@destroy()
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if selectedProvider?
options = { selectedProvider }
if selectedProvider is 'vagrant'
options.template = { content: @stackTemplate }
Tracker.track Tracker.STACKS_SKIP_SETUP
@emit 'StackOnboardingCompleted', options
updateStackTemplate: ->
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
if selectedProvider is 'vagrant'
@stackTemplate = @getDefaultStackTemplate selectedProvider, 'json'
return
codeSetupPanes = @codeSetupView.tabView.panes
serverConfigPanes = @configurationView.tabView.panes
selectedInstances = {}
serverConfigPanes.forEach (pane, index) ->
selectedServices = []
{ configView, instanceTypeSelectBox } = pane
{ configurationToggles } = configView
serverConfig = selectedInstances["example_#{++index}"] =
instance_type : instanceTypeSelectBox.getValue()
ami : ''
tags :
Name : '${var.koding_user_username}-${var.koding_group_slug}'
configurationToggles.forEach (toggle) ->
selectedServices.push (toggle.getOption 'package' or toggle.getOption 'name') if toggle.getValue()
if selectedServices.length
serverConfig.user_data = "export DEBIAN_FRONTEND=noninteractive\napt-get update -y\napt-get -y install #{selectedServices.join ' '}"
stackTemplate =
provider : PROVIDER_TEMPLATES[selectedProvider]
resource :
aws_instance : selectedInstances
codeSetupPanes.forEach (pane, index) ->
selectedService = pane.view.selected?.getOption 'service'
cloneText = CLONE_REPO_TEMPLATES[selectedService]
serverConfig = stackTemplate.resource.aws_instance["example_#{++index}"]
groupSlug = kd.singletons.groupsController.getCurrentGroup().slug
user_data = serverConfig?.user_data
if cloneText
cloneText = cloneText.replace 'your-organization', groupSlug
if serverConfig
if user_data
serverConfig.user_data = """
#{user_data}
#{cloneText}
"""
else
serverConfig.user_data = cloneText if serverConfig
{ content, err } = jsonToYaml stackTemplate
if err
return new kd.NotificationView 'Unable to update stack template preview'
@stackTemplate = JSON.stringify stackTemplate
getDefaultStackTemplate: (provider, format = 'json') ->
stackTemplates =
vagrant :
resource :
vagrant_instance :
localvm :
cpus : 2
memory : 2048
box : 'ubuntu/trusty64'
user_data : '''
sudo apt-get install sl -y
touch /tmp/${var.koding_user_username}.txt
'''
stackTemplate = stackTemplates[provider] ? { error: 'Provider not supported' }
if format is 'yaml'
{ content, err } = jsonToYaml stackTemplate
return content
else
return JSON.stringify stackTemplate
onboardingCompleted: ->
@hide()
selectedProvider = @providerSelectionView.selected?.getOption 'provider'
@emit 'StackOnboardingCompleted', {
selectedProvider, template: { content: @stackTemplate }
}
pistachio: ->
return '''
{{> @getStartedView}}
{{> @providerSelectionView}}
{{> @configurationView}}
{{> @codeSetupView}}
<footer>
{{> @backButton}}
{{> @nextButton}}
{{> @skipLink}}
{{> @cancelButton}}
</footer>
'''
|
[
{
"context": "r.childNodes\n\n data_to_key = (d, i) ->\n return key_prefix + key_fn d, i\n\n node_to_key = ($n, i) ->\n retur",
"end": 6022,
"score": 0.9043229818344116,
"start": 6010,
"tag": "KEY",
"value": "key_prefix +"
},
{
"context": "ata_to_key = (d, i) ->\n return k... | src/coffee/singult/coffee.coffee | lynaghk/singult | 10 | ################################################################
# Singult
#
# A JavaScript implementation of the Hiccup templating language.
#
goog.require("goog.string")
goog.provide("singult.coffee")
goog.provide("singult.coffee.Unify")
goog.provide("singult.coffee.Ignore")
############
# Helper fns
# (Private)
p = (x) ->
console.log x
x
re_tag = /([^\s\.#]+)(?:#([^\s\.#]+))?(?:\.([^\s#]+))?/
re_svg_tags = /^(svg|g|rect|circle|clipPath|path|line|polygon|polyline|text|textPath)$/
re_whitespace = /^\s+$/
#Namespace separator colon should be greedy so it only splits the first colon.
re_namespace_sep = /:(.+)/
#Prefix for key-fns so there aren't problems when people use numbers as keys.
key_prefix = "\0"
xmlns =
xhtml: "http://www.w3.org/1999/xhtml"
xlink: "http://www.w3.org/1999/xlink"
svg: "http://www.w3.org/2000/svg"
xml: "http://www.w3.org/XML/1998/namespace"
xmlns: "http://www.w3.org/2000/xmlns"
#Determines namespace URI from tag string, defaulting to xhtml. Returns [nsp tag]
namespace_tag = (tag_str) ->
[nsp, tag] = tag_str.split ":"
if tag?
[xmlns[nsp] or nsp, tag]
else
if tag_str.match(re_svg_tags) then [xmlns.svg, tag_str] else [xmlns.xhtml, tag_str]
####################
# EMBRACE THE DUCK!
explode_p = (v) -> v[0] == ":*:"
unify_p = (x) -> x? and (x instanceof singult.coffee.Unify)
ignore_p = (x) -> x? and (x instanceof singult.coffee.Ignore)
array_p = (x) -> x? and x.forEach?
map_p = (x) -> x? and (not array_p x) and (not unify_p x) and (not ignore_p x) and (x instanceof Object)
dom_p = (x) -> x? and x.nodeType?
string_p = (x) -> x? and x.substring?
number_p = (x) -> x? and x.toFixed?
whitespace_node_p = ($n) ->
$n.nodeType == 8 or
($n.nodeType == 3 and $n.textContent.match re_whitespace)
##############################
# DOM helpers (side effects!)
singult.coffee.style = ($e, m) ->
for own k, v of m
$e.style[goog.string.toCamelCase(k)] = v
singult.coffee.properties = ($e, m) ->
for own prop, v of m
$e[prop] = v
singult.coffee.attr = ($e, attr_map) ->
#Special handling of style, properties, and class keys
if attr_map["style"]?
singult.coffee.style $e, attr_map["style"]
delete attr_map["style"]
if attr_map["properties"]?
singult.coffee.properties $e, attr_map["properties"]
delete attr_map["properties"]
if array_p attr_map["class"]
$e.setAttribute "class", attr_map["class"].join(" ")
delete attr_map["class"]
for own k, v of attr_map
if v?
[ns, attr] = k.split re_namespace_sep
if attr?
$e.setAttributeNS (xmlns[ns] or ns), attr, v
else
$e.setAttribute k, v
else
$e.removeAttribute k
singult.coffee.node_data = ($e, d) ->
if d?
$e["__singult_data__"] = d
else
$e["__singult_data__"]
#########################
# Hiccup vector reshaping
singult.coffee.canonicalize = (x) ->
if number_p x
x.toString()
else if array_p x
singult.coffee.canonicalize_hiccup x
else
x
singult.coffee.canonicalize_hiccup = (v) ->
#Destructure vec
tag = v[0]
[attr, children] = if map_p v[1]
[v[1], v[2..]]
else
[{}, v[1..]]
#Merge id/classes from tag str
[_, tag_str, id, cls_str] = tag.match re_tag
if id?
attr["id"] = id
if cls_str?
abbreviated_classes = cls_str.split(".")
if array_p attr["class"]
attr["class"] = attr["class"].concat abbreviated_classes
else if string_p attr["class"]
attr["class"] = abbreviated_classes.concat [attr["class"]]
else if not attr["class"]?
attr["class"] = abbreviated_classes
#Determine namespace from tag
[nsp, tag] = namespace_tag tag_str
canonical_children = []
children.forEach (v) ->
if v?
if explode_p(v)
v[1..].forEach (v) -> canonical_children.push singult.coffee.canonicalize(v)
else
canonical_children.push singult.coffee.canonicalize(v)
canonical =
nsp: nsp
tag: tag
attr: attr
children: canonical_children
return canonical
#Build and return DOM element (and any children) represented by canonical hiccup map m.
singult.coffee.render = (m) ->
if unify_p m
throw new Error("Unify must be the first and only child of its parent.")
else if ignore_p m
return null
else if string_p m #TODO: how to handle raw html?
return document.createTextNode m
else if dom_p m
return m
else #it's a canonical map
$e = document.createElementNS m.nsp, m.tag
singult.coffee.attr $e, m.attr
if unify_p (c = m.children[0])
if c.enter? #Use user-supplied enter fn.
c.data.forEach (d) ->
$el = c.enter d
singult.coffee.node_data $el, d
$e.appendChild $el
else #Construct a node from the mapping procided with the unify.
c.data.forEach (d) ->
$el = singult.coffee.render singult.coffee.canonicalize c.mapping d
singult.coffee.node_data $el, d
$e.appendChild $el
else
m.children.forEach (c) ->
$c = singult.coffee.render c
if $c?
$e.appendChild $c
return $e
#############
# Unification
#Struct-like thing that holds info needed for unification
###* @constructor ###
singult.coffee.Unify = (data, mapping, key_fn, enter, update, exit, force_update_p) ->
@data = data
@mapping = mapping
@key_fn = key_fn
@enter = enter
@update = update
@exit = exit
@force_update_p = force_update_p
return this
###* @constructor ###
singult.coffee.Ignore = -> return this
#Unifies $nodes with data and mapping contained in u.
singult.coffee.unify_ = ($container, u) ->
enter = u.enter or (d) ->
$el = singult.coffee.render singult.coffee.canonicalize u.mapping d
$container.appendChild $el
return $el
update = u.update or ($n, d) ->
return singult.coffee.merge $n, singult.coffee.canonicalize u.mapping d
exit = u.exit or ($n) -> $container.removeChild $n
key_fn = u.key_fn or (d, idx) -> idx
$nodes = $container.childNodes
data_to_key = (d, i) ->
return key_prefix + key_fn d, i
node_to_key = ($n, i) ->
return data_to_key singult.coffee.node_data($n), i
maybe_do_update = ($n, d) ->
if u.force_update_p
$el = update $n, d
singult.coffee.node_data $el, d
else #only update if the data is new
old_data = singult.coffee.node_data $n
identical_data_p = if old_data.cljs$core$IEquiv$_equiv$arity$2?
old_data.cljs$core$IEquiv$_equiv$arity$2(old_data, d)
else
old_data == d
unless identical_data_p
$el = update $n, d
singult.coffee.node_data $el, d
insert_at = ($n, i) ->
if i < $nodes.length
$container.insertBefore($n, $nodes[i])
else
$container.appendChild($n)
data_map = {}
u.data.forEach (d, i) ->
key = data_to_key d, i
data_map[key] = d
#extract nodes-to-keep
nodes_to_keep = {}
i = 0
while i < $nodes.length
key = node_to_key $nodes[i], i
if data_map[key]
nodes_to_keep[key] = $nodes[i]
i += 1
#iterate: d,i in u.data, for each d,i:
u.data.forEach (d, i) ->
# after each step of the forEach, the element at $nodes[i] is
# matches the input data of d.
$n = if i < $nodes.length then $nodes[i]
n_key = if $n then node_to_key $n, i
d_key = data_to_key d, i
if !$n?
$el = enter d
singult.coffee.node_data $el, d
else if n_key == d_key
maybe_do_update $nodes[i], d
else
if !nodes_to_keep[n_key]
exit $n
if nodes_to_keep[d_key]
$el = nodes_to_keep[d_key]
insert_at $el, i
maybe_do_update $el, d
else
$el = enter d
insert_at $el, i
singult.coffee.node_data $el, d
# if we've run out of d, kill everything else
data_len = u.data.length
while data_len < $nodes.length
exit $nodes[data_len]
return null
#Merge DOM node $e with canonical hiccup map m.
singult.coffee.merge = ($e, m) ->
if unify_p m
singult.coffee.unify_ $e, m
else if ignore_p m
#do nothing
else
if $e.nodeName.toLowerCase() != m.tag.toLowerCase()
p $e
p m
throw new Error("Cannot merge $e into node of different type")
#Merge attributes
singult.coffee.attr $e, m.attr
#Remove whitespace nodes from parent
if $e.hasChildNodes()
#Need to iterate from end because removing modifies the live collection
for i in [($e.childNodes.length-1)..0]
$c = $e.childNodes[i]
$e.removeChild($c) if whitespace_node_p $c
if unify_p m.children[0] #the children are data driven; recurse to unify
singult.coffee.merge $e, m.children[0]
else #the children are not data-driven; merge, assuming they match up by type & index
if $e.childNodes.length > m.children.length
# Remove all existing node children (TODO: try to match things up instead of rebuilding everything?)
for i in [($e.childNodes.length-1)..0]
$e.removeChild $e.childNodes[i]
i = 0
while i < m.children.length
c = m.children[i] or ""
$c = $e.childNodes[i]
if string_p c
if $c?
$c.textContent = c
else
$e.appendChild document.createTextNode c
else if ignore_p c
#do nothing
else if map_p c
if $c?
singult.coffee.merge $c, c
else
$e.appendChild singult.coffee.render c
else
p $c
p c
throw new Error("Cannot merge children")
i += 1
#Return element
return $e
| 57368 | ################################################################
# Singult
#
# A JavaScript implementation of the Hiccup templating language.
#
goog.require("goog.string")
goog.provide("singult.coffee")
goog.provide("singult.coffee.Unify")
goog.provide("singult.coffee.Ignore")
############
# Helper fns
# (Private)
p = (x) ->
console.log x
x
re_tag = /([^\s\.#]+)(?:#([^\s\.#]+))?(?:\.([^\s#]+))?/
re_svg_tags = /^(svg|g|rect|circle|clipPath|path|line|polygon|polyline|text|textPath)$/
re_whitespace = /^\s+$/
#Namespace separator colon should be greedy so it only splits the first colon.
re_namespace_sep = /:(.+)/
#Prefix for key-fns so there aren't problems when people use numbers as keys.
key_prefix = "\0"
xmlns =
xhtml: "http://www.w3.org/1999/xhtml"
xlink: "http://www.w3.org/1999/xlink"
svg: "http://www.w3.org/2000/svg"
xml: "http://www.w3.org/XML/1998/namespace"
xmlns: "http://www.w3.org/2000/xmlns"
#Determines namespace URI from tag string, defaulting to xhtml. Returns [nsp tag]
namespace_tag = (tag_str) ->
[nsp, tag] = tag_str.split ":"
if tag?
[xmlns[nsp] or nsp, tag]
else
if tag_str.match(re_svg_tags) then [xmlns.svg, tag_str] else [xmlns.xhtml, tag_str]
####################
# EMBRACE THE DUCK!
explode_p = (v) -> v[0] == ":*:"
unify_p = (x) -> x? and (x instanceof singult.coffee.Unify)
ignore_p = (x) -> x? and (x instanceof singult.coffee.Ignore)
array_p = (x) -> x? and x.forEach?
map_p = (x) -> x? and (not array_p x) and (not unify_p x) and (not ignore_p x) and (x instanceof Object)
dom_p = (x) -> x? and x.nodeType?
string_p = (x) -> x? and x.substring?
number_p = (x) -> x? and x.toFixed?
whitespace_node_p = ($n) ->
$n.nodeType == 8 or
($n.nodeType == 3 and $n.textContent.match re_whitespace)
##############################
# DOM helpers (side effects!)
singult.coffee.style = ($e, m) ->
for own k, v of m
$e.style[goog.string.toCamelCase(k)] = v
singult.coffee.properties = ($e, m) ->
for own prop, v of m
$e[prop] = v
singult.coffee.attr = ($e, attr_map) ->
#Special handling of style, properties, and class keys
if attr_map["style"]?
singult.coffee.style $e, attr_map["style"]
delete attr_map["style"]
if attr_map["properties"]?
singult.coffee.properties $e, attr_map["properties"]
delete attr_map["properties"]
if array_p attr_map["class"]
$e.setAttribute "class", attr_map["class"].join(" ")
delete attr_map["class"]
for own k, v of attr_map
if v?
[ns, attr] = k.split re_namespace_sep
if attr?
$e.setAttributeNS (xmlns[ns] or ns), attr, v
else
$e.setAttribute k, v
else
$e.removeAttribute k
singult.coffee.node_data = ($e, d) ->
if d?
$e["__singult_data__"] = d
else
$e["__singult_data__"]
#########################
# Hiccup vector reshaping
singult.coffee.canonicalize = (x) ->
if number_p x
x.toString()
else if array_p x
singult.coffee.canonicalize_hiccup x
else
x
singult.coffee.canonicalize_hiccup = (v) ->
#Destructure vec
tag = v[0]
[attr, children] = if map_p v[1]
[v[1], v[2..]]
else
[{}, v[1..]]
#Merge id/classes from tag str
[_, tag_str, id, cls_str] = tag.match re_tag
if id?
attr["id"] = id
if cls_str?
abbreviated_classes = cls_str.split(".")
if array_p attr["class"]
attr["class"] = attr["class"].concat abbreviated_classes
else if string_p attr["class"]
attr["class"] = abbreviated_classes.concat [attr["class"]]
else if not attr["class"]?
attr["class"] = abbreviated_classes
#Determine namespace from tag
[nsp, tag] = namespace_tag tag_str
canonical_children = []
children.forEach (v) ->
if v?
if explode_p(v)
v[1..].forEach (v) -> canonical_children.push singult.coffee.canonicalize(v)
else
canonical_children.push singult.coffee.canonicalize(v)
canonical =
nsp: nsp
tag: tag
attr: attr
children: canonical_children
return canonical
#Build and return DOM element (and any children) represented by canonical hiccup map m.
singult.coffee.render = (m) ->
if unify_p m
throw new Error("Unify must be the first and only child of its parent.")
else if ignore_p m
return null
else if string_p m #TODO: how to handle raw html?
return document.createTextNode m
else if dom_p m
return m
else #it's a canonical map
$e = document.createElementNS m.nsp, m.tag
singult.coffee.attr $e, m.attr
if unify_p (c = m.children[0])
if c.enter? #Use user-supplied enter fn.
c.data.forEach (d) ->
$el = c.enter d
singult.coffee.node_data $el, d
$e.appendChild $el
else #Construct a node from the mapping procided with the unify.
c.data.forEach (d) ->
$el = singult.coffee.render singult.coffee.canonicalize c.mapping d
singult.coffee.node_data $el, d
$e.appendChild $el
else
m.children.forEach (c) ->
$c = singult.coffee.render c
if $c?
$e.appendChild $c
return $e
#############
# Unification
#Struct-like thing that holds info needed for unification
###* @constructor ###
singult.coffee.Unify = (data, mapping, key_fn, enter, update, exit, force_update_p) ->
@data = data
@mapping = mapping
@key_fn = key_fn
@enter = enter
@update = update
@exit = exit
@force_update_p = force_update_p
return this
###* @constructor ###
singult.coffee.Ignore = -> return this
#Unifies $nodes with data and mapping contained in u.
singult.coffee.unify_ = ($container, u) ->
enter = u.enter or (d) ->
$el = singult.coffee.render singult.coffee.canonicalize u.mapping d
$container.appendChild $el
return $el
update = u.update or ($n, d) ->
return singult.coffee.merge $n, singult.coffee.canonicalize u.mapping d
exit = u.exit or ($n) -> $container.removeChild $n
key_fn = u.key_fn or (d, idx) -> idx
$nodes = $container.childNodes
data_to_key = (d, i) ->
return <KEY> key_<KEY> d, i
node_to_key = ($n, i) ->
return data_to_key singult.coffee.node_data($n), i
maybe_do_update = ($n, d) ->
if u.force_update_p
$el = update $n, d
singult.coffee.node_data $el, d
else #only update if the data is new
old_data = singult.coffee.node_data $n
identical_data_p = if old_data.cljs$core$IEquiv$_equiv$arity$2?
old_data.cljs$core$IEquiv$_equiv$arity$2(old_data, d)
else
old_data == d
unless identical_data_p
$el = update $n, d
singult.coffee.node_data $el, d
insert_at = ($n, i) ->
if i < $nodes.length
$container.insertBefore($n, $nodes[i])
else
$container.appendChild($n)
data_map = {}
u.data.forEach (d, i) ->
key = data_to_key d, i
data_map[key] = d
#extract nodes-to-keep
nodes_to_keep = {}
i = 0
while i < $nodes.length
key = node<KEY>_to_key $nodes[i], i
if data_map[key]
nodes_to_keep[key] = $nodes[i]
i += 1
#iterate: d,i in u.data, for each d,i:
u.data.forEach (d, i) ->
# after each step of the forEach, the element at $nodes[i] is
# matches the input data of d.
$n = if i < $nodes.length then $nodes[i]
n_key = if $n then node_to_key $n, i
d_key = data_to_key d, i
if !$n?
$el = enter d
singult.coffee.node_data $el, d
else if n_key == d_key
maybe_do_update $nodes[i], d
else
if !nodes_to_keep[n_key]
exit $n
if nodes_to_keep[d_key]
$el = nodes_to_keep[d_key]
insert_at $el, i
maybe_do_update $el, d
else
$el = enter d
insert_at $el, i
singult.coffee.node_data $el, d
# if we've run out of d, kill everything else
data_len = u.data.length
while data_len < $nodes.length
exit $nodes[data_len]
return null
#Merge DOM node $e with canonical hiccup map m.
singult.coffee.merge = ($e, m) ->
if unify_p m
singult.coffee.unify_ $e, m
else if ignore_p m
#do nothing
else
if $e.nodeName.toLowerCase() != m.tag.toLowerCase()
p $e
p m
throw new Error("Cannot merge $e into node of different type")
#Merge attributes
singult.coffee.attr $e, m.attr
#Remove whitespace nodes from parent
if $e.hasChildNodes()
#Need to iterate from end because removing modifies the live collection
for i in [($e.childNodes.length-1)..0]
$c = $e.childNodes[i]
$e.removeChild($c) if whitespace_node_p $c
if unify_p m.children[0] #the children are data driven; recurse to unify
singult.coffee.merge $e, m.children[0]
else #the children are not data-driven; merge, assuming they match up by type & index
if $e.childNodes.length > m.children.length
# Remove all existing node children (TODO: try to match things up instead of rebuilding everything?)
for i in [($e.childNodes.length-1)..0]
$e.removeChild $e.childNodes[i]
i = 0
while i < m.children.length
c = m.children[i] or ""
$c = $e.childNodes[i]
if string_p c
if $c?
$c.textContent = c
else
$e.appendChild document.createTextNode c
else if ignore_p c
#do nothing
else if map_p c
if $c?
singult.coffee.merge $c, c
else
$e.appendChild singult.coffee.render c
else
p $c
p c
throw new Error("Cannot merge children")
i += 1
#Return element
return $e
| true | ################################################################
# Singult
#
# A JavaScript implementation of the Hiccup templating language.
#
goog.require("goog.string")
goog.provide("singult.coffee")
goog.provide("singult.coffee.Unify")
goog.provide("singult.coffee.Ignore")
############
# Helper fns
# (Private)
p = (x) ->
console.log x
x
re_tag = /([^\s\.#]+)(?:#([^\s\.#]+))?(?:\.([^\s#]+))?/
re_svg_tags = /^(svg|g|rect|circle|clipPath|path|line|polygon|polyline|text|textPath)$/
re_whitespace = /^\s+$/
#Namespace separator colon should be greedy so it only splits the first colon.
re_namespace_sep = /:(.+)/
#Prefix for key-fns so there aren't problems when people use numbers as keys.
key_prefix = "\0"
xmlns =
xhtml: "http://www.w3.org/1999/xhtml"
xlink: "http://www.w3.org/1999/xlink"
svg: "http://www.w3.org/2000/svg"
xml: "http://www.w3.org/XML/1998/namespace"
xmlns: "http://www.w3.org/2000/xmlns"
#Determines namespace URI from tag string, defaulting to xhtml. Returns [nsp tag]
namespace_tag = (tag_str) ->
[nsp, tag] = tag_str.split ":"
if tag?
[xmlns[nsp] or nsp, tag]
else
if tag_str.match(re_svg_tags) then [xmlns.svg, tag_str] else [xmlns.xhtml, tag_str]
####################
# EMBRACE THE DUCK!
explode_p = (v) -> v[0] == ":*:"
unify_p = (x) -> x? and (x instanceof singult.coffee.Unify)
ignore_p = (x) -> x? and (x instanceof singult.coffee.Ignore)
array_p = (x) -> x? and x.forEach?
map_p = (x) -> x? and (not array_p x) and (not unify_p x) and (not ignore_p x) and (x instanceof Object)
dom_p = (x) -> x? and x.nodeType?
string_p = (x) -> x? and x.substring?
number_p = (x) -> x? and x.toFixed?
whitespace_node_p = ($n) ->
$n.nodeType == 8 or
($n.nodeType == 3 and $n.textContent.match re_whitespace)
##############################
# DOM helpers (side effects!)
singult.coffee.style = ($e, m) ->
for own k, v of m
$e.style[goog.string.toCamelCase(k)] = v
singult.coffee.properties = ($e, m) ->
for own prop, v of m
$e[prop] = v
singult.coffee.attr = ($e, attr_map) ->
#Special handling of style, properties, and class keys
if attr_map["style"]?
singult.coffee.style $e, attr_map["style"]
delete attr_map["style"]
if attr_map["properties"]?
singult.coffee.properties $e, attr_map["properties"]
delete attr_map["properties"]
if array_p attr_map["class"]
$e.setAttribute "class", attr_map["class"].join(" ")
delete attr_map["class"]
for own k, v of attr_map
if v?
[ns, attr] = k.split re_namespace_sep
if attr?
$e.setAttributeNS (xmlns[ns] or ns), attr, v
else
$e.setAttribute k, v
else
$e.removeAttribute k
singult.coffee.node_data = ($e, d) ->
if d?
$e["__singult_data__"] = d
else
$e["__singult_data__"]
#########################
# Hiccup vector reshaping
singult.coffee.canonicalize = (x) ->
if number_p x
x.toString()
else if array_p x
singult.coffee.canonicalize_hiccup x
else
x
singult.coffee.canonicalize_hiccup = (v) ->
#Destructure vec
tag = v[0]
[attr, children] = if map_p v[1]
[v[1], v[2..]]
else
[{}, v[1..]]
#Merge id/classes from tag str
[_, tag_str, id, cls_str] = tag.match re_tag
if id?
attr["id"] = id
if cls_str?
abbreviated_classes = cls_str.split(".")
if array_p attr["class"]
attr["class"] = attr["class"].concat abbreviated_classes
else if string_p attr["class"]
attr["class"] = abbreviated_classes.concat [attr["class"]]
else if not attr["class"]?
attr["class"] = abbreviated_classes
#Determine namespace from tag
[nsp, tag] = namespace_tag tag_str
canonical_children = []
children.forEach (v) ->
if v?
if explode_p(v)
v[1..].forEach (v) -> canonical_children.push singult.coffee.canonicalize(v)
else
canonical_children.push singult.coffee.canonicalize(v)
canonical =
nsp: nsp
tag: tag
attr: attr
children: canonical_children
return canonical
#Build and return DOM element (and any children) represented by canonical hiccup map m.
singult.coffee.render = (m) ->
if unify_p m
throw new Error("Unify must be the first and only child of its parent.")
else if ignore_p m
return null
else if string_p m #TODO: how to handle raw html?
return document.createTextNode m
else if dom_p m
return m
else #it's a canonical map
$e = document.createElementNS m.nsp, m.tag
singult.coffee.attr $e, m.attr
if unify_p (c = m.children[0])
if c.enter? #Use user-supplied enter fn.
c.data.forEach (d) ->
$el = c.enter d
singult.coffee.node_data $el, d
$e.appendChild $el
else #Construct a node from the mapping procided with the unify.
c.data.forEach (d) ->
$el = singult.coffee.render singult.coffee.canonicalize c.mapping d
singult.coffee.node_data $el, d
$e.appendChild $el
else
m.children.forEach (c) ->
$c = singult.coffee.render c
if $c?
$e.appendChild $c
return $e
#############
# Unification
#Struct-like thing that holds info needed for unification
###* @constructor ###
singult.coffee.Unify = (data, mapping, key_fn, enter, update, exit, force_update_p) ->
@data = data
@mapping = mapping
@key_fn = key_fn
@enter = enter
@update = update
@exit = exit
@force_update_p = force_update_p
return this
###* @constructor ###
singult.coffee.Ignore = -> return this
#Unifies $nodes with data and mapping contained in u.
singult.coffee.unify_ = ($container, u) ->
enter = u.enter or (d) ->
$el = singult.coffee.render singult.coffee.canonicalize u.mapping d
$container.appendChild $el
return $el
update = u.update or ($n, d) ->
return singult.coffee.merge $n, singult.coffee.canonicalize u.mapping d
exit = u.exit or ($n) -> $container.removeChild $n
key_fn = u.key_fn or (d, idx) -> idx
$nodes = $container.childNodes
data_to_key = (d, i) ->
return PI:KEY:<KEY>END_PI key_PI:KEY:<KEY>END_PI d, i
node_to_key = ($n, i) ->
return data_to_key singult.coffee.node_data($n), i
maybe_do_update = ($n, d) ->
if u.force_update_p
$el = update $n, d
singult.coffee.node_data $el, d
else #only update if the data is new
old_data = singult.coffee.node_data $n
identical_data_p = if old_data.cljs$core$IEquiv$_equiv$arity$2?
old_data.cljs$core$IEquiv$_equiv$arity$2(old_data, d)
else
old_data == d
unless identical_data_p
$el = update $n, d
singult.coffee.node_data $el, d
insert_at = ($n, i) ->
if i < $nodes.length
$container.insertBefore($n, $nodes[i])
else
$container.appendChild($n)
data_map = {}
u.data.forEach (d, i) ->
key = data_to_key d, i
data_map[key] = d
#extract nodes-to-keep
nodes_to_keep = {}
i = 0
while i < $nodes.length
key = nodePI:KEY:<KEY>END_PI_to_key $nodes[i], i
if data_map[key]
nodes_to_keep[key] = $nodes[i]
i += 1
#iterate: d,i in u.data, for each d,i:
u.data.forEach (d, i) ->
# after each step of the forEach, the element at $nodes[i] is
# matches the input data of d.
$n = if i < $nodes.length then $nodes[i]
n_key = if $n then node_to_key $n, i
d_key = data_to_key d, i
if !$n?
$el = enter d
singult.coffee.node_data $el, d
else if n_key == d_key
maybe_do_update $nodes[i], d
else
if !nodes_to_keep[n_key]
exit $n
if nodes_to_keep[d_key]
$el = nodes_to_keep[d_key]
insert_at $el, i
maybe_do_update $el, d
else
$el = enter d
insert_at $el, i
singult.coffee.node_data $el, d
# if we've run out of d, kill everything else
data_len = u.data.length
while data_len < $nodes.length
exit $nodes[data_len]
return null
#Merge DOM node $e with canonical hiccup map m.
singult.coffee.merge = ($e, m) ->
if unify_p m
singult.coffee.unify_ $e, m
else if ignore_p m
#do nothing
else
if $e.nodeName.toLowerCase() != m.tag.toLowerCase()
p $e
p m
throw new Error("Cannot merge $e into node of different type")
#Merge attributes
singult.coffee.attr $e, m.attr
#Remove whitespace nodes from parent
if $e.hasChildNodes()
#Need to iterate from end because removing modifies the live collection
for i in [($e.childNodes.length-1)..0]
$c = $e.childNodes[i]
$e.removeChild($c) if whitespace_node_p $c
if unify_p m.children[0] #the children are data driven; recurse to unify
singult.coffee.merge $e, m.children[0]
else #the children are not data-driven; merge, assuming they match up by type & index
if $e.childNodes.length > m.children.length
# Remove all existing node children (TODO: try to match things up instead of rebuilding everything?)
for i in [($e.childNodes.length-1)..0]
$e.removeChild $e.childNodes[i]
i = 0
while i < m.children.length
c = m.children[i] or ""
$c = $e.childNodes[i]
if string_p c
if $c?
$c.textContent = c
else
$e.appendChild document.createTextNode c
else if ignore_p c
#do nothing
else if map_p c
if $c?
singult.coffee.merge $c, c
else
$e.appendChild singult.coffee.render c
else
p $c
p c
throw new Error("Cannot merge children")
i += 1
#Return element
return $e
|
[
{
"context": ")\nopen = require('open')\n\nlastfm =\n api_key: 'cc9f13aac2db0b7bb34c27466debea9a'\n secret: 'c48542a15d29ce7d469a05c5f34b5f39'\n u",
"end": 135,
"score": 0.9997140765190125,
"start": 103,
"tag": "KEY",
"value": "cc9f13aac2db0b7bb34c27466debea9a"
},
{
"context": ... | public/js/lastfm.coffee | trmml/finale | 4 | request = require('request')
util = require('util')
open = require('open')
lastfm =
api_key: 'cc9f13aac2db0b7bb34c27466debea9a'
secret: 'c48542a15d29ce7d469a05c5f34b5f39'
url: 'http://ws.audioscrobbler.com/2.0/'
token = (cb) ->
url = util.format('%s?method=auth.getToken&api_key=%s&api_sig=%s&format=json', lastfm.url, lastfm.api_key, lastfm.secret)
request url, (err, res, body) ->
if !err and res.statusCode == 200
cb JSON.parse(body).token
return
return
auth = (cb) ->
token (token) ->
url = util.format('http://www.last.fm/api/auth/?api_key=%s&token=%s', lastfm.api_key, token)
open(url)
return
| 159573 | request = require('request')
util = require('util')
open = require('open')
lastfm =
api_key: '<KEY>'
secret: '<KEY>'
url: 'http://ws.audioscrobbler.com/2.0/'
token = (cb) ->
url = util.format('%s?method=auth.getToken&api_key=%s&api_sig=%s&format=json', lastfm.url, lastfm.api_key, lastfm.secret)
request url, (err, res, body) ->
if !err and res.statusCode == 200
cb JSON.parse(body).token
return
return
auth = (cb) ->
token (token) ->
url = util.format('http://www.last.fm/api/auth/?api_key=%s&token=%s', lastfm.api_key, token)
open(url)
return
| true | request = require('request')
util = require('util')
open = require('open')
lastfm =
api_key: 'PI:KEY:<KEY>END_PI'
secret: 'PI:KEY:<KEY>END_PI'
url: 'http://ws.audioscrobbler.com/2.0/'
token = (cb) ->
url = util.format('%s?method=auth.getToken&api_key=%s&api_sig=%s&format=json', lastfm.url, lastfm.api_key, lastfm.secret)
request url, (err, res, body) ->
if !err and res.statusCode == 200
cb JSON.parse(body).token
return
return
auth = (cb) ->
token (token) ->
url = util.format('http://www.last.fm/api/auth/?api_key=%s&token=%s', lastfm.api_key, token)
open(url)
return
|
[
{
"context": "', ->\n model = new Model( cafe: { vivace: 'brix' } )\n model.unset('cafe.vivace')\n\n ",
"end": 2481,
"score": 0.7544223070144653,
"start": 2477,
"tag": "NAME",
"value": "brix"
},
{
"context": "', ->\n model = new Model( cafe: { vivace: 'bri... | test/model/model.coffee | saraid/janus | 0 | should = require('should')
Model = require('../../lib/model/model').Model
Issue = require('../../lib/model/issue').Issue
attribute = require('../../lib/model/attribute')
{ Reference, Resolver } = require('../../lib/model/reference')
Varying = require('../../lib/core/varying').Varying
collection = require('../../lib/collection/collection')
describe 'Model', ->
describe 'core', ->
it 'should construct', ->
(new Model()).should.be.an.instanceof(Model)
it 'should construct with an attribute bag', ->
(new Model( test: 'attr' )).attributes.test.should.equal('attr')
describe 'attribute', ->
describe 'get', ->
it 'should be able to get a shallow attribute', ->
model = new Model( vivace: 'brix' )
model.get('vivace').should.equal('brix')
it 'should be able to get a deep attribute', ->
model = new Model( cafe: { vivace: 'brix' } )
model.get('cafe.vivace').should.equal('brix')
it 'should return null on nonexistent attributes', ->
model = new Model( broad: 'way' )
(model.get('vivace') is null).should.be.true
(model.get('cafe.vivace') is null).should.be.true
describe 'set', ->
it 'should be able to set a shallow attribute', ->
model = new Model()
model.set('colman', 'pool')
model.attributes.colman.should.equal('pool')
model.get('colman').should.equal('pool')
it 'should be able to set a deep attribute', ->
model = new Model()
model.set('colman.pool', 'slide')
model.attributes.colman.pool.should.equal('slide')
model.get('colman.pool').should.equal('slide')
it 'should accept a bag of attributes', ->
model = new Model()
model.set( the: 'stranger' )
model.attributes.the.should.equal('stranger')
it 'should deep write all attributes in a given bag', ->
model = new Model( the: { stranger: 'seattle' } )
model.set( the: { joule: 'apartments' }, black: 'dog' )
model.attributes.the.stranger.should.equal('seattle')
model.get('the.stranger').should.equal('seattle')
model.attributes.the.joule.should.equal('apartments')
model.get('the.joule').should.equal('apartments')
model.attributes.black.should.equal('dog')
model.get('black').should.equal('dog')
describe 'unset', ->
it 'should be able to unset an attribute', ->
model = new Model( cafe: { vivace: 'brix' } )
model.unset('cafe.vivace')
(model.get('cafe.vivace') is null).should.be.true
it 'should be able to unset an attribute tree', ->
model = new Model( cafe: { vivace: 'brix' } )
model.unset('cafe')
(model.get('cafe.vivace') is null).should.be.true
(model.get('cafe') is null).should.be.true
describe 'setAll', ->
it 'should set all attributes in the given bag', ->
model = new Model()
model.setAll( the: { stranger: 'seattle', joule: 'apartments' } )
model.attributes.the.stranger.should.equal('seattle')
model.get('the.stranger').should.equal('seattle')
model.attributes.the.joule.should.equal('apartments')
model.get('the.joule').should.equal('apartments')
it 'should clear attributes not in the given bag', ->
model = new Model( una: 'bella', tazza: { di: 'caffe' } )
model.setAll( tazza: { of: 'cafe' } )
should.not.exist(model.attributes.una)
(model.get('una') is null).should.be.true
should.not.exist(model.attributes.tazza.di)
(model.get('tazza.di') is null).should.be.true
model.attributes.tazza.of.should.equal('cafe')
model.get('tazza.of').should.equal('cafe')
describe 'binding', ->
describe 'application', ->
it 'should bind one attribute from another', ->
class TestModel extends Model
@bind('slave').from('master')
model = new TestModel()
should.not.exist(model.get('slave'))
model.set('master', 'commander')
model.get('slave').should.equal('commander')
it 'should iterate into nodes', ->
class TestModel extends Model
@bind('child_id').from('child', 'id')
(new TestModel( child: new Model( id: 1 ) )).get('child_id').should.equal(1)
it 'should flatMap multiple attributes together', ->
class TestModel extends Model
@bind('c').from('a').and('b').flatMap((a, b) -> a + b)
model = new TestModel()
model.set( a: 3, b: 4 )
model.get('c').should.equal(7)
it 'should be able to bind from a Varying', ->
v = new Varying(2)
class TestModel extends Model
@bind('x').fromVarying(-> v)
model = new TestModel()
model.get('x').should.equal(2)
v.setValue(4)
model.get('x').should.equal(4)
it 'should give model as this in Varying bind', ->
called = false
class TestModel extends Model
@bind('y').fromVarying ->
called = true
this.should.be.an.instanceof(TestModel)
new Varying()
new TestModel()
called.should.be.true
it 'should take a fallback', ->
class TestModel extends Model
@bind('z').from('a').fallback('value')
model = new TestModel()
model.get('z').should.equal('value')
model.set('a', 'test')
model.get('z').should.equal('test')
describe 'classtree', ->
it 'should not pollute across classdefs', ->
class TestA extends Model
@bind('a').from('c')
class TestB extends Model
@bind('b').from('c')
a = new TestA()
b = new TestB()
b.set('c', 47)
should.not.exist(b.get('a'))
it 'should not pollute crosstree', ->
class Root extends Model
@bind('root').from('x')
class Left extends Root
@bind('left').from('x')
class Right extends Root
@bind('right').from('x')
root = new Root( x: 'root' )
should.not.exist(root.get('left'))
should.not.exist(root.get('right'))
left = new Left( x: 'left' )
should.not.exist(left.get('right'))
right = new Right( x: 'right' )
should.not.exist(right.get('left'))
it 'should extend downtree', ->
class Root extends Model
@bind('root').from('x')
class Child extends Root
@bind('child').from('x')
(new Child( x: 'test' )).get('root').should.equal('test')
it 'should allow child bind to override parent', ->
class Root extends Model
@bind('contend').from('x')
class Child extends Root
@bind('contend').from('y')
(new Child( x: 1, y: 2 )).get('contend').should.equal(2)
describe 'defined attributes', ->
it 'should be definable and fetchable', ->
class TestModel extends Model
@attribute('attr', attribute.TextAttribute)
(new TestModel()).attribute('attr').should.be.an.instanceof(attribute.TextAttribute)
it 'should inherit down the classtree', ->
class Root extends Model
@attribute('attr', attribute.NumberAttribute)
class Child extends Root
(new Child()).attribute('attr').should.be.an.instanceof(attribute.NumberAttribute)
it 'should not pollute across classdefs', ->
class A extends Model
@attribute('a', attribute.NumberAttribute)
class B extends Model
@attribute('b', attribute.NumberAttribute)
should.not.exist((new A()).attribute('b'))
should.not.exist((new B()).attribute('a'))
it 'should memoize results', ->
class TestModel extends Model
@attribute('attr', attribute.BooleanAttribute)
model = new TestModel()
model.attribute('attr').should.equal(model.attribute('attr'))
# TODO: many noncovered methods
describe 'issues', ->
it 'should return an empty list by default', ->
issues = (new Model()).issues()
issues.should.be.an.instanceof(collection.Collection)
issues.list.length.should.equal(0)
it 'should contain issues from the Model level', ->
issueList = new collection.List()
class TestModel extends Model
_issues: -> issueList
model = new TestModel()
model.issues().list.length.should.equal(0)
issueList.add(new Issue( active: true ))
model.issues().list.length.should.equal(1)
issueList.removeAll()
model.issues().list.length.should.equal(0)
it 'should contain issues from the Attribute level', ->
issueList = new collection.List()
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> issueList
model = new TestModel()
model.issues().list.length.should.equal(0)
issueList.add(new Issue( active: true ))
model.issues().list.length.should.equal(1)
issueList.removeAll()
model.issues().list.length.should.equal(0)
it 'should only contain active issues', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: false )
model.issues().list.length.should.equal(0)
model.set('attr', true)
model.issues().list.length.should.equal(1)
model.set('attr', false)
model.issues().list.length.should.equal(0)
describe 'validity', ->
it 'should return true if no active issues exist', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: false )
model.valid().value.should.equal(true)
it 'should return false if one or more active issues exist', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
@attribute 'attr2', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: true, attr2: false )
model.valid().value.should.equal(false)
model.set('attr2', true)
model.valid().value.should.equal(false)
model.set('attr', false)
model.set('attr2', false)
model.valid().value.should.equal(true)
it 'should take a severity threshold', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: ->
new collection.List([
new Issue( active: this.watchValue().map((val) -> val > 0), severity: 2 )
new Issue( active: this.watchValue().map((val) -> val > 1), severity: 1 )
])
model = new TestModel( attr: 0 )
model.valid().value.should.equal(true)
model.set('attr', 1)
model.valid(1).value.should.equal(true)
model.valid(2).value.should.equal(false)
model.set('attr', 2)
model.valid(1).value.should.equal(false)
model.valid(2).value.should.equal(false)
describe 'shadowing', ->
describe 'creation', ->
it 'should create a new instance of the same model class', ->
class TestModel extends Model
model = new TestModel()
shadow = model.shadow()
shadow.should.not.equal(model)
shadow.should.be.an.instanceof(TestModel)
it 'should return the original of a shadow', ->
model = new Model()
model.shadow().original().should.equal(model)
it 'should return the original of a shadow\'s shadow', ->
model = new Model()
model.shadow().shadow().original().should.equal(model)
it 'should return all shadow parents of a model', ->
a = new Model()
b = a.shadow()
c = b.shadow()
originals = c.originals()
originals.length.should.equal(2)
originals[0].should.equal(b)
originals[1].should.equal(a)
it 'should return an empty array if it is an original asked for parents', ->
(new Model()).originals().should.eql([])
it 'should return itself as the original if it is not a shadow', ->
model = new Model()
model.original().should.equal(model)
describe 'attributes', ->
it 'should return the parent\'s values', ->
model = new Model( test1: 'a' )
shadow = model.shadow()
shadow.get('test1').should.equal('a')
model.set('test2', 'b')
shadow.get('test2').should.equal('b')
it 'should override the parent\'s values with its own', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.get('test').should.equal('x')
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
model.get('test').should.equal('x')
it 'should revert to the parent\'s value on revert()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
shadow.revert('test')
shadow.get('test').should.equal('x')
it 'should return null for values that have been set and unset, even if the parent has values', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
shadow.unset('test')
(shadow.get('test') is null).should.equal(true)
shadow.revert('test')
shadow.get('test').should.equal('x')
it 'should return null for values that have been directly unset, even if the parent has values', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
(shadow.get('test') is null).should.equal(true)
it 'should return a shadow submodel if it sees a model', ->
submodel = new Model()
model = new Model( test: submodel )
shadow = model.shadow()
shadow.get('test').original().should.equal(submodel)
it 'should return a shadow submodel if it sees one in a reference', ->
submodel = new Model()
reference = new Reference('x')
model = new Model( test: reference )
shadow = model.shadow()
shadow.get('test').value.should.be.an.instanceof(Resolver)
reference.setValue(submodel)
shadow.get('test').value.original().should.equal(submodel)
describe 'events', ->
it 'should event when an inherited attribute value changes', ->
model = new Model( test: 'x' )
shadow = model.shadow()
evented = false
shadow.watch('test').react (value) ->
evented = true
value.should.equal('y')
model.set('test', 'y')
evented.should.equal(true)
it 'should not event when an overriden inherited attribute changes', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
evented = false
shadow.watch('test').react(-> evented = true)
model.set('test', 'z')
evented.should.equal(false)
describe 'merging', ->
it 'should merge overriden changes up to its parent on merge()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.merge()
model.get('test').should.equal('y')
it 'should merge new attributes up to its parent on merge()', ->
model = new Model()
shadow = model.shadow()
shadow.set('test', 'x')
shadow.merge()
model.get('test').should.equal('x')
it 'should clear unset attributes up to its parent on merge()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
shadow.merge()
should.not.exist(model.get('test'))
describe 'modification detection', ->
it 'should return false if a model has no parent', ->
model = new Model()
model.modified().should.equal(false)
model.attrModified('test').should.equal(false)
describe 'attribute', ->
it 'should return whether an attribute has changed', ->
model = new Model( test: 'x', test2: 'y' )
shadow = model.shadow()
shadow.set('test', 'z')
shadow.attrModified('test').should.equal(true)
shadow.attrModified('test2').should.equal(false)
it 'should handle unset values correctly', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
shadow.attrModified('test').should.equal(true)
shadow.unset('test2')
shadow.attrModified('test2').should.equal(false)
it 'should handle newly set attributes correctly', ->
model = new Model()
shadow = model.shadow()
shadow.set('test', new Model())
shadow.attrModified('test').should.equal(true)
it 'should ignore transient attributes', ->
class TestModel extends Model
@attribute 'test', class extends attribute.Attribute
transient: true
model = new TestModel( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.attrModified('test').should.equal(false)
it 'should compare model reference on shallow compare', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('test2', 'x')
shadow.attrModified('test', false).should.equal(false)
it 'should compare model modified on deep compare', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('test2', 'x')
shadow.attrModified('test', true).should.equal(true)
it 'should call a function to determine deepness with the right params', ->
model = new Model( test: new Model() )
shadow = model.shadow()
nested = new Model()
shadow.set('test', nested)
called = false
isDeep = (obj, path, val) ->
obj.should.equal(shadow)
path.should.equal('test')
val.should.equal(nested)
called = true
shadow.attrModified('test', isDeep)
called.should.equal(true)
it 'should use the result of the function to determine deepness', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('x', 'y')
shadow.attrModified('test', -> true).should.equal(true)
shadow.attrModified('test', -> false).should.equal(false)
it 'should pass the function through if deep', ->
model = new Model( test: new Model( test2: 'x' ) )
shadow = model.shadow()
shadow.get('test').set('test2', 'y')
called = 0
isDeep = -> called += 1; true
shadow.attrModified('test', isDeep)
called.should.equal(2)
it 'should flatten and compare References', ->
submodel = new Model()
reference = new Reference()
model = new Model( test: reference )
shadow = model.shadow()
reference.setValue(submodel)
shadow.attrModified('test', false).should.equal(false)
shadow.attrModified('test', true).should.equal(false)
shadow.get('test').value.set('test2', 'x')
shadow.attrModified('test', false).should.equal(false)
shadow.attrModified('test', true).should.equal(true)
describe 'model', ->
it 'should return whether any attributes have changed', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.modified().should.equal(false)
shadow.set('test2', 'y')
shadow.modified().should.equal(true)
describe 'watch shallow', ->
it 'should vary depending on the modified state', ->
model = new Model()
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified(false).reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.set('test', 'x')
shadow.unset('test')
expected.length.should.equal(0)
it 'should watch nested models shallowly', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(false).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(false)
it 'should watch shallowly if a falsy function is provided', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(-> false).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(false)
describe 'watch deep', ->
it 'should vary depending on own modified state', ->
model = new Model()
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.set('test', 'x')
shadow.unset('test')
expected.length.should.equal(0)
it 'should vary depending on submodel state', ->
model = new Model( test: new Model() )
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.get('test').set('test2', 'x')
shadow.get('test').revert('test2')
it 'should vary depending on new submodel state', ->
model = new Model()
shadow = model.shadow()
evented = false
shadow.watchModified().reactNow((isModified) -> evented = true if isModified)
model.set('test', new Model())
evented.should.equal(false)
shadow.get('test').set('test2', 'x')
evented.should.equal(true)
it 'should not vary depending on discarded submodel state', ->
model = new Model( test: new Model() )
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
submodel = shadow.get('test')
submodel.set('test2', 'x')
shadow.unset('test')
submodel.set('test3', 'y')
it 'should watch deeply if a truish function is provided', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(-> true).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(true)
it 'should pass through the deepness function', ->
nested = new Model( test2: new Model() )
model = new Model( test: nested )
shadow = model.shadow()
evented = false
shadow.watchModified((model) -> model.original() isnt nested).reactNow((value) -> evented = true if value is true)
shadow.get('test').get('test2').set('x', 'y')
evented.should.equal(false)
shadow.get('test').set('a', 'b')
evented.should.equal(true)
it 'should vary when a Reference resolves', ->
varying = new Reference()
model = new Model( test: varying )
shadow = model.shadow()
expected = [ false, true ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
submodel = (new Model()).shadow()
shadow.get('test').setValue(submodel)
submodel.set('testSub', 'y')
| 157852 | should = require('should')
Model = require('../../lib/model/model').Model
Issue = require('../../lib/model/issue').Issue
attribute = require('../../lib/model/attribute')
{ Reference, Resolver } = require('../../lib/model/reference')
Varying = require('../../lib/core/varying').Varying
collection = require('../../lib/collection/collection')
describe 'Model', ->
describe 'core', ->
it 'should construct', ->
(new Model()).should.be.an.instanceof(Model)
it 'should construct with an attribute bag', ->
(new Model( test: 'attr' )).attributes.test.should.equal('attr')
describe 'attribute', ->
describe 'get', ->
it 'should be able to get a shallow attribute', ->
model = new Model( vivace: 'brix' )
model.get('vivace').should.equal('brix')
it 'should be able to get a deep attribute', ->
model = new Model( cafe: { vivace: 'brix' } )
model.get('cafe.vivace').should.equal('brix')
it 'should return null on nonexistent attributes', ->
model = new Model( broad: 'way' )
(model.get('vivace') is null).should.be.true
(model.get('cafe.vivace') is null).should.be.true
describe 'set', ->
it 'should be able to set a shallow attribute', ->
model = new Model()
model.set('colman', 'pool')
model.attributes.colman.should.equal('pool')
model.get('colman').should.equal('pool')
it 'should be able to set a deep attribute', ->
model = new Model()
model.set('colman.pool', 'slide')
model.attributes.colman.pool.should.equal('slide')
model.get('colman.pool').should.equal('slide')
it 'should accept a bag of attributes', ->
model = new Model()
model.set( the: 'stranger' )
model.attributes.the.should.equal('stranger')
it 'should deep write all attributes in a given bag', ->
model = new Model( the: { stranger: 'seattle' } )
model.set( the: { joule: 'apartments' }, black: 'dog' )
model.attributes.the.stranger.should.equal('seattle')
model.get('the.stranger').should.equal('seattle')
model.attributes.the.joule.should.equal('apartments')
model.get('the.joule').should.equal('apartments')
model.attributes.black.should.equal('dog')
model.get('black').should.equal('dog')
describe 'unset', ->
it 'should be able to unset an attribute', ->
model = new Model( cafe: { vivace: '<NAME>' } )
model.unset('cafe.vivace')
(model.get('cafe.vivace') is null).should.be.true
it 'should be able to unset an attribute tree', ->
model = new Model( cafe: { vivace: '<NAME>' } )
model.unset('cafe')
(model.get('cafe.vivace') is null).should.be.true
(model.get('cafe') is null).should.be.true
describe 'setAll', ->
it 'should set all attributes in the given bag', ->
model = new Model()
model.setAll( the: { stranger: 'seattle', joule: 'apartments' } )
model.attributes.the.stranger.should.equal('seattle')
model.get('the.stranger').should.equal('seattle')
model.attributes.the.joule.should.equal('apartments')
model.get('the.joule').should.equal('apartments')
it 'should clear attributes not in the given bag', ->
model = new Model( una: 'bella', tazza: { di: 'caffe' } )
model.setAll( tazza: { of: 'cafe' } )
should.not.exist(model.attributes.una)
(model.get('una') is null).should.be.true
should.not.exist(model.attributes.tazza.di)
(model.get('tazza.di') is null).should.be.true
model.attributes.tazza.of.should.equal('cafe')
model.get('tazza.of').should.equal('cafe')
describe 'binding', ->
describe 'application', ->
it 'should bind one attribute from another', ->
class TestModel extends Model
@bind('slave').from('master')
model = new TestModel()
should.not.exist(model.get('slave'))
model.set('master', 'commander')
model.get('slave').should.equal('commander')
it 'should iterate into nodes', ->
class TestModel extends Model
@bind('child_id').from('child', 'id')
(new TestModel( child: new Model( id: 1 ) )).get('child_id').should.equal(1)
it 'should flatMap multiple attributes together', ->
class TestModel extends Model
@bind('c').from('a').and('b').flatMap((a, b) -> a + b)
model = new TestModel()
model.set( a: 3, b: 4 )
model.get('c').should.equal(7)
it 'should be able to bind from a Varying', ->
v = new Varying(2)
class TestModel extends Model
@bind('x').fromVarying(-> v)
model = new TestModel()
model.get('x').should.equal(2)
v.setValue(4)
model.get('x').should.equal(4)
it 'should give model as this in Varying bind', ->
called = false
class TestModel extends Model
@bind('y').fromVarying ->
called = true
this.should.be.an.instanceof(TestModel)
new Varying()
new TestModel()
called.should.be.true
it 'should take a fallback', ->
class TestModel extends Model
@bind('z').from('a').fallback('value')
model = new TestModel()
model.get('z').should.equal('value')
model.set('a', 'test')
model.get('z').should.equal('test')
describe 'classtree', ->
it 'should not pollute across classdefs', ->
class TestA extends Model
@bind('a').from('c')
class TestB extends Model
@bind('b').from('c')
a = new TestA()
b = new TestB()
b.set('c', 47)
should.not.exist(b.get('a'))
it 'should not pollute crosstree', ->
class Root extends Model
@bind('root').from('x')
class Left extends Root
@bind('left').from('x')
class Right extends Root
@bind('right').from('x')
root = new Root( x: 'root' )
should.not.exist(root.get('left'))
should.not.exist(root.get('right'))
left = new Left( x: 'left' )
should.not.exist(left.get('right'))
right = new Right( x: 'right' )
should.not.exist(right.get('left'))
it 'should extend downtree', ->
class Root extends Model
@bind('root').from('x')
class Child extends Root
@bind('child').from('x')
(new Child( x: 'test' )).get('root').should.equal('test')
it 'should allow child bind to override parent', ->
class Root extends Model
@bind('contend').from('x')
class Child extends Root
@bind('contend').from('y')
(new Child( x: 1, y: 2 )).get('contend').should.equal(2)
describe 'defined attributes', ->
it 'should be definable and fetchable', ->
class TestModel extends Model
@attribute('attr', attribute.TextAttribute)
(new TestModel()).attribute('attr').should.be.an.instanceof(attribute.TextAttribute)
it 'should inherit down the classtree', ->
class Root extends Model
@attribute('attr', attribute.NumberAttribute)
class Child extends Root
(new Child()).attribute('attr').should.be.an.instanceof(attribute.NumberAttribute)
it 'should not pollute across classdefs', ->
class A extends Model
@attribute('a', attribute.NumberAttribute)
class B extends Model
@attribute('b', attribute.NumberAttribute)
should.not.exist((new A()).attribute('b'))
should.not.exist((new B()).attribute('a'))
it 'should memoize results', ->
class TestModel extends Model
@attribute('attr', attribute.BooleanAttribute)
model = new TestModel()
model.attribute('attr').should.equal(model.attribute('attr'))
# TODO: many noncovered methods
describe 'issues', ->
it 'should return an empty list by default', ->
issues = (new Model()).issues()
issues.should.be.an.instanceof(collection.Collection)
issues.list.length.should.equal(0)
it 'should contain issues from the Model level', ->
issueList = new collection.List()
class TestModel extends Model
_issues: -> issueList
model = new TestModel()
model.issues().list.length.should.equal(0)
issueList.add(new Issue( active: true ))
model.issues().list.length.should.equal(1)
issueList.removeAll()
model.issues().list.length.should.equal(0)
it 'should contain issues from the Attribute level', ->
issueList = new collection.List()
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> issueList
model = new TestModel()
model.issues().list.length.should.equal(0)
issueList.add(new Issue( active: true ))
model.issues().list.length.should.equal(1)
issueList.removeAll()
model.issues().list.length.should.equal(0)
it 'should only contain active issues', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: false )
model.issues().list.length.should.equal(0)
model.set('attr', true)
model.issues().list.length.should.equal(1)
model.set('attr', false)
model.issues().list.length.should.equal(0)
describe 'validity', ->
it 'should return true if no active issues exist', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: false )
model.valid().value.should.equal(true)
it 'should return false if one or more active issues exist', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
@attribute 'attr2', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: true, attr2: false )
model.valid().value.should.equal(false)
model.set('attr2', true)
model.valid().value.should.equal(false)
model.set('attr', false)
model.set('attr2', false)
model.valid().value.should.equal(true)
it 'should take a severity threshold', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: ->
new collection.List([
new Issue( active: this.watchValue().map((val) -> val > 0), severity: 2 )
new Issue( active: this.watchValue().map((val) -> val > 1), severity: 1 )
])
model = new TestModel( attr: 0 )
model.valid().value.should.equal(true)
model.set('attr', 1)
model.valid(1).value.should.equal(true)
model.valid(2).value.should.equal(false)
model.set('attr', 2)
model.valid(1).value.should.equal(false)
model.valid(2).value.should.equal(false)
describe 'shadowing', ->
describe 'creation', ->
it 'should create a new instance of the same model class', ->
class TestModel extends Model
model = new TestModel()
shadow = model.shadow()
shadow.should.not.equal(model)
shadow.should.be.an.instanceof(TestModel)
it 'should return the original of a shadow', ->
model = new Model()
model.shadow().original().should.equal(model)
it 'should return the original of a shadow\'s shadow', ->
model = new Model()
model.shadow().shadow().original().should.equal(model)
it 'should return all shadow parents of a model', ->
a = new Model()
b = a.shadow()
c = b.shadow()
originals = c.originals()
originals.length.should.equal(2)
originals[0].should.equal(b)
originals[1].should.equal(a)
it 'should return an empty array if it is an original asked for parents', ->
(new Model()).originals().should.eql([])
it 'should return itself as the original if it is not a shadow', ->
model = new Model()
model.original().should.equal(model)
describe 'attributes', ->
it 'should return the parent\'s values', ->
model = new Model( test1: 'a' )
shadow = model.shadow()
shadow.get('test1').should.equal('a')
model.set('test2', 'b')
shadow.get('test2').should.equal('b')
it 'should override the parent\'s values with its own', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.get('test').should.equal('x')
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
model.get('test').should.equal('x')
it 'should revert to the parent\'s value on revert()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
shadow.revert('test')
shadow.get('test').should.equal('x')
it 'should return null for values that have been set and unset, even if the parent has values', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
shadow.unset('test')
(shadow.get('test') is null).should.equal(true)
shadow.revert('test')
shadow.get('test').should.equal('x')
it 'should return null for values that have been directly unset, even if the parent has values', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
(shadow.get('test') is null).should.equal(true)
it 'should return a shadow submodel if it sees a model', ->
submodel = new Model()
model = new Model( test: submodel )
shadow = model.shadow()
shadow.get('test').original().should.equal(submodel)
it 'should return a shadow submodel if it sees one in a reference', ->
submodel = new Model()
reference = new Reference('x')
model = new Model( test: reference )
shadow = model.shadow()
shadow.get('test').value.should.be.an.instanceof(Resolver)
reference.setValue(submodel)
shadow.get('test').value.original().should.equal(submodel)
describe 'events', ->
it 'should event when an inherited attribute value changes', ->
model = new Model( test: 'x' )
shadow = model.shadow()
evented = false
shadow.watch('test').react (value) ->
evented = true
value.should.equal('y')
model.set('test', 'y')
evented.should.equal(true)
it 'should not event when an overriden inherited attribute changes', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
evented = false
shadow.watch('test').react(-> evented = true)
model.set('test', 'z')
evented.should.equal(false)
describe 'merging', ->
it 'should merge overriden changes up to its parent on merge()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.merge()
model.get('test').should.equal('y')
it 'should merge new attributes up to its parent on merge()', ->
model = new Model()
shadow = model.shadow()
shadow.set('test', 'x')
shadow.merge()
model.get('test').should.equal('x')
it 'should clear unset attributes up to its parent on merge()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
shadow.merge()
should.not.exist(model.get('test'))
describe 'modification detection', ->
it 'should return false if a model has no parent', ->
model = new Model()
model.modified().should.equal(false)
model.attrModified('test').should.equal(false)
describe 'attribute', ->
it 'should return whether an attribute has changed', ->
model = new Model( test: 'x', test2: 'y' )
shadow = model.shadow()
shadow.set('test', 'z')
shadow.attrModified('test').should.equal(true)
shadow.attrModified('test2').should.equal(false)
it 'should handle unset values correctly', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
shadow.attrModified('test').should.equal(true)
shadow.unset('test2')
shadow.attrModified('test2').should.equal(false)
it 'should handle newly set attributes correctly', ->
model = new Model()
shadow = model.shadow()
shadow.set('test', new Model())
shadow.attrModified('test').should.equal(true)
it 'should ignore transient attributes', ->
class TestModel extends Model
@attribute 'test', class extends attribute.Attribute
transient: true
model = new TestModel( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.attrModified('test').should.equal(false)
it 'should compare model reference on shallow compare', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('test2', 'x')
shadow.attrModified('test', false).should.equal(false)
it 'should compare model modified on deep compare', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('test2', 'x')
shadow.attrModified('test', true).should.equal(true)
it 'should call a function to determine deepness with the right params', ->
model = new Model( test: new Model() )
shadow = model.shadow()
nested = new Model()
shadow.set('test', nested)
called = false
isDeep = (obj, path, val) ->
obj.should.equal(shadow)
path.should.equal('test')
val.should.equal(nested)
called = true
shadow.attrModified('test', isDeep)
called.should.equal(true)
it 'should use the result of the function to determine deepness', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('x', 'y')
shadow.attrModified('test', -> true).should.equal(true)
shadow.attrModified('test', -> false).should.equal(false)
it 'should pass the function through if deep', ->
model = new Model( test: new Model( test2: 'x' ) )
shadow = model.shadow()
shadow.get('test').set('test2', 'y')
called = 0
isDeep = -> called += 1; true
shadow.attrModified('test', isDeep)
called.should.equal(2)
it 'should flatten and compare References', ->
submodel = new Model()
reference = new Reference()
model = new Model( test: reference )
shadow = model.shadow()
reference.setValue(submodel)
shadow.attrModified('test', false).should.equal(false)
shadow.attrModified('test', true).should.equal(false)
shadow.get('test').value.set('test2', 'x')
shadow.attrModified('test', false).should.equal(false)
shadow.attrModified('test', true).should.equal(true)
describe 'model', ->
it 'should return whether any attributes have changed', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.modified().should.equal(false)
shadow.set('test2', 'y')
shadow.modified().should.equal(true)
describe 'watch shallow', ->
it 'should vary depending on the modified state', ->
model = new Model()
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified(false).reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.set('test', 'x')
shadow.unset('test')
expected.length.should.equal(0)
it 'should watch nested models shallowly', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(false).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(false)
it 'should watch shallowly if a falsy function is provided', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(-> false).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(false)
describe 'watch deep', ->
it 'should vary depending on own modified state', ->
model = new Model()
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.set('test', 'x')
shadow.unset('test')
expected.length.should.equal(0)
it 'should vary depending on submodel state', ->
model = new Model( test: new Model() )
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.get('test').set('test2', 'x')
shadow.get('test').revert('test2')
it 'should vary depending on new submodel state', ->
model = new Model()
shadow = model.shadow()
evented = false
shadow.watchModified().reactNow((isModified) -> evented = true if isModified)
model.set('test', new Model())
evented.should.equal(false)
shadow.get('test').set('test2', 'x')
evented.should.equal(true)
it 'should not vary depending on discarded submodel state', ->
model = new Model( test: new Model() )
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
submodel = shadow.get('test')
submodel.set('test2', 'x')
shadow.unset('test')
submodel.set('test3', 'y')
it 'should watch deeply if a truish function is provided', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(-> true).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(true)
it 'should pass through the deepness function', ->
nested = new Model( test2: new Model() )
model = new Model( test: nested )
shadow = model.shadow()
evented = false
shadow.watchModified((model) -> model.original() isnt nested).reactNow((value) -> evented = true if value is true)
shadow.get('test').get('test2').set('x', 'y')
evented.should.equal(false)
shadow.get('test').set('a', 'b')
evented.should.equal(true)
it 'should vary when a Reference resolves', ->
varying = new Reference()
model = new Model( test: varying )
shadow = model.shadow()
expected = [ false, true ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
submodel = (new Model()).shadow()
shadow.get('test').setValue(submodel)
submodel.set('testSub', 'y')
| true | should = require('should')
Model = require('../../lib/model/model').Model
Issue = require('../../lib/model/issue').Issue
attribute = require('../../lib/model/attribute')
{ Reference, Resolver } = require('../../lib/model/reference')
Varying = require('../../lib/core/varying').Varying
collection = require('../../lib/collection/collection')
describe 'Model', ->
describe 'core', ->
it 'should construct', ->
(new Model()).should.be.an.instanceof(Model)
it 'should construct with an attribute bag', ->
(new Model( test: 'attr' )).attributes.test.should.equal('attr')
describe 'attribute', ->
describe 'get', ->
it 'should be able to get a shallow attribute', ->
model = new Model( vivace: 'brix' )
model.get('vivace').should.equal('brix')
it 'should be able to get a deep attribute', ->
model = new Model( cafe: { vivace: 'brix' } )
model.get('cafe.vivace').should.equal('brix')
it 'should return null on nonexistent attributes', ->
model = new Model( broad: 'way' )
(model.get('vivace') is null).should.be.true
(model.get('cafe.vivace') is null).should.be.true
describe 'set', ->
it 'should be able to set a shallow attribute', ->
model = new Model()
model.set('colman', 'pool')
model.attributes.colman.should.equal('pool')
model.get('colman').should.equal('pool')
it 'should be able to set a deep attribute', ->
model = new Model()
model.set('colman.pool', 'slide')
model.attributes.colman.pool.should.equal('slide')
model.get('colman.pool').should.equal('slide')
it 'should accept a bag of attributes', ->
model = new Model()
model.set( the: 'stranger' )
model.attributes.the.should.equal('stranger')
it 'should deep write all attributes in a given bag', ->
model = new Model( the: { stranger: 'seattle' } )
model.set( the: { joule: 'apartments' }, black: 'dog' )
model.attributes.the.stranger.should.equal('seattle')
model.get('the.stranger').should.equal('seattle')
model.attributes.the.joule.should.equal('apartments')
model.get('the.joule').should.equal('apartments')
model.attributes.black.should.equal('dog')
model.get('black').should.equal('dog')
describe 'unset', ->
it 'should be able to unset an attribute', ->
model = new Model( cafe: { vivace: 'PI:NAME:<NAME>END_PI' } )
model.unset('cafe.vivace')
(model.get('cafe.vivace') is null).should.be.true
it 'should be able to unset an attribute tree', ->
model = new Model( cafe: { vivace: 'PI:NAME:<NAME>END_PI' } )
model.unset('cafe')
(model.get('cafe.vivace') is null).should.be.true
(model.get('cafe') is null).should.be.true
describe 'setAll', ->
it 'should set all attributes in the given bag', ->
model = new Model()
model.setAll( the: { stranger: 'seattle', joule: 'apartments' } )
model.attributes.the.stranger.should.equal('seattle')
model.get('the.stranger').should.equal('seattle')
model.attributes.the.joule.should.equal('apartments')
model.get('the.joule').should.equal('apartments')
it 'should clear attributes not in the given bag', ->
model = new Model( una: 'bella', tazza: { di: 'caffe' } )
model.setAll( tazza: { of: 'cafe' } )
should.not.exist(model.attributes.una)
(model.get('una') is null).should.be.true
should.not.exist(model.attributes.tazza.di)
(model.get('tazza.di') is null).should.be.true
model.attributes.tazza.of.should.equal('cafe')
model.get('tazza.of').should.equal('cafe')
describe 'binding', ->
describe 'application', ->
it 'should bind one attribute from another', ->
class TestModel extends Model
@bind('slave').from('master')
model = new TestModel()
should.not.exist(model.get('slave'))
model.set('master', 'commander')
model.get('slave').should.equal('commander')
it 'should iterate into nodes', ->
class TestModel extends Model
@bind('child_id').from('child', 'id')
(new TestModel( child: new Model( id: 1 ) )).get('child_id').should.equal(1)
it 'should flatMap multiple attributes together', ->
class TestModel extends Model
@bind('c').from('a').and('b').flatMap((a, b) -> a + b)
model = new TestModel()
model.set( a: 3, b: 4 )
model.get('c').should.equal(7)
it 'should be able to bind from a Varying', ->
v = new Varying(2)
class TestModel extends Model
@bind('x').fromVarying(-> v)
model = new TestModel()
model.get('x').should.equal(2)
v.setValue(4)
model.get('x').should.equal(4)
it 'should give model as this in Varying bind', ->
called = false
class TestModel extends Model
@bind('y').fromVarying ->
called = true
this.should.be.an.instanceof(TestModel)
new Varying()
new TestModel()
called.should.be.true
it 'should take a fallback', ->
class TestModel extends Model
@bind('z').from('a').fallback('value')
model = new TestModel()
model.get('z').should.equal('value')
model.set('a', 'test')
model.get('z').should.equal('test')
describe 'classtree', ->
it 'should not pollute across classdefs', ->
class TestA extends Model
@bind('a').from('c')
class TestB extends Model
@bind('b').from('c')
a = new TestA()
b = new TestB()
b.set('c', 47)
should.not.exist(b.get('a'))
it 'should not pollute crosstree', ->
class Root extends Model
@bind('root').from('x')
class Left extends Root
@bind('left').from('x')
class Right extends Root
@bind('right').from('x')
root = new Root( x: 'root' )
should.not.exist(root.get('left'))
should.not.exist(root.get('right'))
left = new Left( x: 'left' )
should.not.exist(left.get('right'))
right = new Right( x: 'right' )
should.not.exist(right.get('left'))
it 'should extend downtree', ->
class Root extends Model
@bind('root').from('x')
class Child extends Root
@bind('child').from('x')
(new Child( x: 'test' )).get('root').should.equal('test')
it 'should allow child bind to override parent', ->
class Root extends Model
@bind('contend').from('x')
class Child extends Root
@bind('contend').from('y')
(new Child( x: 1, y: 2 )).get('contend').should.equal(2)
describe 'defined attributes', ->
it 'should be definable and fetchable', ->
class TestModel extends Model
@attribute('attr', attribute.TextAttribute)
(new TestModel()).attribute('attr').should.be.an.instanceof(attribute.TextAttribute)
it 'should inherit down the classtree', ->
class Root extends Model
@attribute('attr', attribute.NumberAttribute)
class Child extends Root
(new Child()).attribute('attr').should.be.an.instanceof(attribute.NumberAttribute)
it 'should not pollute across classdefs', ->
class A extends Model
@attribute('a', attribute.NumberAttribute)
class B extends Model
@attribute('b', attribute.NumberAttribute)
should.not.exist((new A()).attribute('b'))
should.not.exist((new B()).attribute('a'))
it 'should memoize results', ->
class TestModel extends Model
@attribute('attr', attribute.BooleanAttribute)
model = new TestModel()
model.attribute('attr').should.equal(model.attribute('attr'))
# TODO: many noncovered methods
describe 'issues', ->
it 'should return an empty list by default', ->
issues = (new Model()).issues()
issues.should.be.an.instanceof(collection.Collection)
issues.list.length.should.equal(0)
it 'should contain issues from the Model level', ->
issueList = new collection.List()
class TestModel extends Model
_issues: -> issueList
model = new TestModel()
model.issues().list.length.should.equal(0)
issueList.add(new Issue( active: true ))
model.issues().list.length.should.equal(1)
issueList.removeAll()
model.issues().list.length.should.equal(0)
it 'should contain issues from the Attribute level', ->
issueList = new collection.List()
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> issueList
model = new TestModel()
model.issues().list.length.should.equal(0)
issueList.add(new Issue( active: true ))
model.issues().list.length.should.equal(1)
issueList.removeAll()
model.issues().list.length.should.equal(0)
it 'should only contain active issues', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: false )
model.issues().list.length.should.equal(0)
model.set('attr', true)
model.issues().list.length.should.equal(1)
model.set('attr', false)
model.issues().list.length.should.equal(0)
describe 'validity', ->
it 'should return true if no active issues exist', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: false )
model.valid().value.should.equal(true)
it 'should return false if one or more active issues exist', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
@attribute 'attr2', class extends attribute.Attribute
issues: -> new collection.List([ new Issue( active: this.watchValue() ) ])
model = new TestModel( attr: true, attr2: false )
model.valid().value.should.equal(false)
model.set('attr2', true)
model.valid().value.should.equal(false)
model.set('attr', false)
model.set('attr2', false)
model.valid().value.should.equal(true)
it 'should take a severity threshold', ->
class TestModel extends Model
@attribute 'attr', class extends attribute.Attribute
issues: ->
new collection.List([
new Issue( active: this.watchValue().map((val) -> val > 0), severity: 2 )
new Issue( active: this.watchValue().map((val) -> val > 1), severity: 1 )
])
model = new TestModel( attr: 0 )
model.valid().value.should.equal(true)
model.set('attr', 1)
model.valid(1).value.should.equal(true)
model.valid(2).value.should.equal(false)
model.set('attr', 2)
model.valid(1).value.should.equal(false)
model.valid(2).value.should.equal(false)
describe 'shadowing', ->
describe 'creation', ->
it 'should create a new instance of the same model class', ->
class TestModel extends Model
model = new TestModel()
shadow = model.shadow()
shadow.should.not.equal(model)
shadow.should.be.an.instanceof(TestModel)
it 'should return the original of a shadow', ->
model = new Model()
model.shadow().original().should.equal(model)
it 'should return the original of a shadow\'s shadow', ->
model = new Model()
model.shadow().shadow().original().should.equal(model)
it 'should return all shadow parents of a model', ->
a = new Model()
b = a.shadow()
c = b.shadow()
originals = c.originals()
originals.length.should.equal(2)
originals[0].should.equal(b)
originals[1].should.equal(a)
it 'should return an empty array if it is an original asked for parents', ->
(new Model()).originals().should.eql([])
it 'should return itself as the original if it is not a shadow', ->
model = new Model()
model.original().should.equal(model)
describe 'attributes', ->
it 'should return the parent\'s values', ->
model = new Model( test1: 'a' )
shadow = model.shadow()
shadow.get('test1').should.equal('a')
model.set('test2', 'b')
shadow.get('test2').should.equal('b')
it 'should override the parent\'s values with its own', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.get('test').should.equal('x')
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
model.get('test').should.equal('x')
it 'should revert to the parent\'s value on revert()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
shadow.revert('test')
shadow.get('test').should.equal('x')
it 'should return null for values that have been set and unset, even if the parent has values', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.get('test').should.equal('y')
shadow.unset('test')
(shadow.get('test') is null).should.equal(true)
shadow.revert('test')
shadow.get('test').should.equal('x')
it 'should return null for values that have been directly unset, even if the parent has values', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
(shadow.get('test') is null).should.equal(true)
it 'should return a shadow submodel if it sees a model', ->
submodel = new Model()
model = new Model( test: submodel )
shadow = model.shadow()
shadow.get('test').original().should.equal(submodel)
it 'should return a shadow submodel if it sees one in a reference', ->
submodel = new Model()
reference = new Reference('x')
model = new Model( test: reference )
shadow = model.shadow()
shadow.get('test').value.should.be.an.instanceof(Resolver)
reference.setValue(submodel)
shadow.get('test').value.original().should.equal(submodel)
describe 'events', ->
it 'should event when an inherited attribute value changes', ->
model = new Model( test: 'x' )
shadow = model.shadow()
evented = false
shadow.watch('test').react (value) ->
evented = true
value.should.equal('y')
model.set('test', 'y')
evented.should.equal(true)
it 'should not event when an overriden inherited attribute changes', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
evented = false
shadow.watch('test').react(-> evented = true)
model.set('test', 'z')
evented.should.equal(false)
describe 'merging', ->
it 'should merge overriden changes up to its parent on merge()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.merge()
model.get('test').should.equal('y')
it 'should merge new attributes up to its parent on merge()', ->
model = new Model()
shadow = model.shadow()
shadow.set('test', 'x')
shadow.merge()
model.get('test').should.equal('x')
it 'should clear unset attributes up to its parent on merge()', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
shadow.merge()
should.not.exist(model.get('test'))
describe 'modification detection', ->
it 'should return false if a model has no parent', ->
model = new Model()
model.modified().should.equal(false)
model.attrModified('test').should.equal(false)
describe 'attribute', ->
it 'should return whether an attribute has changed', ->
model = new Model( test: 'x', test2: 'y' )
shadow = model.shadow()
shadow.set('test', 'z')
shadow.attrModified('test').should.equal(true)
shadow.attrModified('test2').should.equal(false)
it 'should handle unset values correctly', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.unset('test')
shadow.attrModified('test').should.equal(true)
shadow.unset('test2')
shadow.attrModified('test2').should.equal(false)
it 'should handle newly set attributes correctly', ->
model = new Model()
shadow = model.shadow()
shadow.set('test', new Model())
shadow.attrModified('test').should.equal(true)
it 'should ignore transient attributes', ->
class TestModel extends Model
@attribute 'test', class extends attribute.Attribute
transient: true
model = new TestModel( test: 'x' )
shadow = model.shadow()
shadow.set('test', 'y')
shadow.attrModified('test').should.equal(false)
it 'should compare model reference on shallow compare', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('test2', 'x')
shadow.attrModified('test', false).should.equal(false)
it 'should compare model modified on deep compare', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('test2', 'x')
shadow.attrModified('test', true).should.equal(true)
it 'should call a function to determine deepness with the right params', ->
model = new Model( test: new Model() )
shadow = model.shadow()
nested = new Model()
shadow.set('test', nested)
called = false
isDeep = (obj, path, val) ->
obj.should.equal(shadow)
path.should.equal('test')
val.should.equal(nested)
called = true
shadow.attrModified('test', isDeep)
called.should.equal(true)
it 'should use the result of the function to determine deepness', ->
model = new Model( test: new Model() )
shadow = model.shadow()
shadow.get('test').set('x', 'y')
shadow.attrModified('test', -> true).should.equal(true)
shadow.attrModified('test', -> false).should.equal(false)
it 'should pass the function through if deep', ->
model = new Model( test: new Model( test2: 'x' ) )
shadow = model.shadow()
shadow.get('test').set('test2', 'y')
called = 0
isDeep = -> called += 1; true
shadow.attrModified('test', isDeep)
called.should.equal(2)
it 'should flatten and compare References', ->
submodel = new Model()
reference = new Reference()
model = new Model( test: reference )
shadow = model.shadow()
reference.setValue(submodel)
shadow.attrModified('test', false).should.equal(false)
shadow.attrModified('test', true).should.equal(false)
shadow.get('test').value.set('test2', 'x')
shadow.attrModified('test', false).should.equal(false)
shadow.attrModified('test', true).should.equal(true)
describe 'model', ->
it 'should return whether any attributes have changed', ->
model = new Model( test: 'x' )
shadow = model.shadow()
shadow.modified().should.equal(false)
shadow.set('test2', 'y')
shadow.modified().should.equal(true)
describe 'watch shallow', ->
it 'should vary depending on the modified state', ->
model = new Model()
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified(false).reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.set('test', 'x')
shadow.unset('test')
expected.length.should.equal(0)
it 'should watch nested models shallowly', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(false).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(false)
it 'should watch shallowly if a falsy function is provided', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(-> false).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(false)
describe 'watch deep', ->
it 'should vary depending on own modified state', ->
model = new Model()
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.set('test', 'x')
shadow.unset('test')
expected.length.should.equal(0)
it 'should vary depending on submodel state', ->
model = new Model( test: new Model() )
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
shadow.get('test').set('test2', 'x')
shadow.get('test').revert('test2')
it 'should vary depending on new submodel state', ->
model = new Model()
shadow = model.shadow()
evented = false
shadow.watchModified().reactNow((isModified) -> evented = true if isModified)
model.set('test', new Model())
evented.should.equal(false)
shadow.get('test').set('test2', 'x')
evented.should.equal(true)
it 'should not vary depending on discarded submodel state', ->
model = new Model( test: new Model() )
shadow = model.shadow()
expected = [ false, true, false ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
submodel = shadow.get('test')
submodel.set('test2', 'x')
shadow.unset('test')
submodel.set('test3', 'y')
it 'should watch deeply if a truish function is provided', ->
model = new Model( test: new Model() )
shadow = model.shadow()
evented = false
shadow.watchModified(-> true).reactNow((value) -> evented = true if value is true)
shadow.get('test').set('test2', 'x')
evented.should.equal(true)
it 'should pass through the deepness function', ->
nested = new Model( test2: new Model() )
model = new Model( test: nested )
shadow = model.shadow()
evented = false
shadow.watchModified((model) -> model.original() isnt nested).reactNow((value) -> evented = true if value is true)
shadow.get('test').get('test2').set('x', 'y')
evented.should.equal(false)
shadow.get('test').set('a', 'b')
evented.should.equal(true)
it 'should vary when a Reference resolves', ->
varying = new Reference()
model = new Model( test: varying )
shadow = model.shadow()
expected = [ false, true ]
shadow.watchModified().reactNow((isModified) -> isModified.should.equal(expected.shift()))
submodel = (new Model()).shadow()
shadow.get('test').setValue(submodel)
submodel.set('testSub', 'y')
|
[
{
"context": "lue\"}\n myclient = \"myclient\"\n mysecret = \"mysecret\"\n\n afterEach ->\n restore(httpsMock)\n ",
"end": 752,
"score": 0.5084547400474548,
"start": 746,
"tag": "KEY",
"value": "secret"
}
] | test/auth/postman-spec.coffee | theodorick/ocelot | 13 | assert = require("assert")
sinon = require("sinon")
headers = require("../../src/auth/headers")
postman = require("../../src/auth/postman")
exchange = require("../../src/auth/exchange")
https = require("https")
config = require("config")
httpAgent = require('../../src/http-agent')
[postmanMock, headerMock, configMock, httpsMock, agentMock] = []
createAgent = (andThen) ->
url: ''
typeValue: ''
data: {}
post: (url) ->
@url = url
@
type: (typeValue) ->
@typeValue = typeValue
@
send: (data) ->
@data = merge(data, this.data)
@
then: andThen
describe 'postman', () ->
pingUrl = "http://someurl/like/this"
someQuery = {someKey: "someValue"}
myclient = "myclient"
mysecret = "mysecret"
afterEach ->
restore(httpsMock)
restore(postmanMock)
restore(headerMock)
restore(configMock)
restore(agentMock)
it 'post happy path', (done) ->
configMock = sinon.stub config, 'get'
configMock.withArgs('authentication.token-endpoint').returns pingUrl
agent = createAgent (pass, fail) ->
return Promise.resolve().then () ->
pass({body: {"pass": true}});
agentMock = sinon.stub(httpAgent, 'getAgent');
agentMock.withArgs().returns(agent);
postman.postAs(someQuery, myclient, mysecret).then () ->
assert.equal(agent.typeValue, "form")
assert.equal(agent.data.someKey, "someValue")
assert.equal(agent.data['client_id'], myclient)
assert.equal(agent.data['client_secret'], mysecret)
done()
, () -> done('post failed')
it 'returns error from json response', (done) ->
agent = createAgent (pass, fail) ->
Promise.resolve().then () ->
pass({statusCode: 404, text: '{"error": "error message"}'})
agentMock = sinon.stub httpAgent, 'getAgent'
agentMock.withArgs().returns(agent)
postman.postAs(someQuery, myclient, mysecret).then () ->
done('post should have failed');
, (error) ->
err = error.message
assert.equal(err, 'HTTP 404: {"error": "error message"}');
assert.equal(agent.typeValue, "form");
assert.equal(agent.data.someKey, "someValue");
assert.equal(agent.data['client_id'], myclient);
assert.equal(agent.data['client_secret'], mysecret);
done()
restore = (mockFunc) ->
if (mockFunc && mockFunc.restore)
mockFunc.restore()
merge = (obj1, obj2) ->
obj3 = {}
for name, value of obj1
obj3[name] = value
for name, value of obj2
obj3[name] = value
obj3;
| 58214 | assert = require("assert")
sinon = require("sinon")
headers = require("../../src/auth/headers")
postman = require("../../src/auth/postman")
exchange = require("../../src/auth/exchange")
https = require("https")
config = require("config")
httpAgent = require('../../src/http-agent')
[postmanMock, headerMock, configMock, httpsMock, agentMock] = []
createAgent = (andThen) ->
url: ''
typeValue: ''
data: {}
post: (url) ->
@url = url
@
type: (typeValue) ->
@typeValue = typeValue
@
send: (data) ->
@data = merge(data, this.data)
@
then: andThen
describe 'postman', () ->
pingUrl = "http://someurl/like/this"
someQuery = {someKey: "someValue"}
myclient = "myclient"
mysecret = "my<KEY>"
afterEach ->
restore(httpsMock)
restore(postmanMock)
restore(headerMock)
restore(configMock)
restore(agentMock)
it 'post happy path', (done) ->
configMock = sinon.stub config, 'get'
configMock.withArgs('authentication.token-endpoint').returns pingUrl
agent = createAgent (pass, fail) ->
return Promise.resolve().then () ->
pass({body: {"pass": true}});
agentMock = sinon.stub(httpAgent, 'getAgent');
agentMock.withArgs().returns(agent);
postman.postAs(someQuery, myclient, mysecret).then () ->
assert.equal(agent.typeValue, "form")
assert.equal(agent.data.someKey, "someValue")
assert.equal(agent.data['client_id'], myclient)
assert.equal(agent.data['client_secret'], mysecret)
done()
, () -> done('post failed')
it 'returns error from json response', (done) ->
agent = createAgent (pass, fail) ->
Promise.resolve().then () ->
pass({statusCode: 404, text: '{"error": "error message"}'})
agentMock = sinon.stub httpAgent, 'getAgent'
agentMock.withArgs().returns(agent)
postman.postAs(someQuery, myclient, mysecret).then () ->
done('post should have failed');
, (error) ->
err = error.message
assert.equal(err, 'HTTP 404: {"error": "error message"}');
assert.equal(agent.typeValue, "form");
assert.equal(agent.data.someKey, "someValue");
assert.equal(agent.data['client_id'], myclient);
assert.equal(agent.data['client_secret'], mysecret);
done()
restore = (mockFunc) ->
if (mockFunc && mockFunc.restore)
mockFunc.restore()
merge = (obj1, obj2) ->
obj3 = {}
for name, value of obj1
obj3[name] = value
for name, value of obj2
obj3[name] = value
obj3;
| true | assert = require("assert")
sinon = require("sinon")
headers = require("../../src/auth/headers")
postman = require("../../src/auth/postman")
exchange = require("../../src/auth/exchange")
https = require("https")
config = require("config")
httpAgent = require('../../src/http-agent')
[postmanMock, headerMock, configMock, httpsMock, agentMock] = []
createAgent = (andThen) ->
url: ''
typeValue: ''
data: {}
post: (url) ->
@url = url
@
type: (typeValue) ->
@typeValue = typeValue
@
send: (data) ->
@data = merge(data, this.data)
@
then: andThen
describe 'postman', () ->
pingUrl = "http://someurl/like/this"
someQuery = {someKey: "someValue"}
myclient = "myclient"
mysecret = "myPI:KEY:<KEY>END_PI"
afterEach ->
restore(httpsMock)
restore(postmanMock)
restore(headerMock)
restore(configMock)
restore(agentMock)
it 'post happy path', (done) ->
configMock = sinon.stub config, 'get'
configMock.withArgs('authentication.token-endpoint').returns pingUrl
agent = createAgent (pass, fail) ->
return Promise.resolve().then () ->
pass({body: {"pass": true}});
agentMock = sinon.stub(httpAgent, 'getAgent');
agentMock.withArgs().returns(agent);
postman.postAs(someQuery, myclient, mysecret).then () ->
assert.equal(agent.typeValue, "form")
assert.equal(agent.data.someKey, "someValue")
assert.equal(agent.data['client_id'], myclient)
assert.equal(agent.data['client_secret'], mysecret)
done()
, () -> done('post failed')
it 'returns error from json response', (done) ->
agent = createAgent (pass, fail) ->
Promise.resolve().then () ->
pass({statusCode: 404, text: '{"error": "error message"}'})
agentMock = sinon.stub httpAgent, 'getAgent'
agentMock.withArgs().returns(agent)
postman.postAs(someQuery, myclient, mysecret).then () ->
done('post should have failed');
, (error) ->
err = error.message
assert.equal(err, 'HTTP 404: {"error": "error message"}');
assert.equal(agent.typeValue, "form");
assert.equal(agent.data.someKey, "someValue");
assert.equal(agent.data['client_id'], myclient);
assert.equal(agent.data['client_secret'], mysecret);
done()
restore = (mockFunc) ->
if (mockFunc && mockFunc.restore)
mockFunc.restore()
merge = (obj1, obj2) ->
obj3 = {}
for name, value of obj1
obj3[name] = value
for name, value of obj2
obj3[name] = value
obj3;
|
[
{
"context": "secrets:\n clientId: 'QG5FVMYMDEKZHTGHRRI0EUJLLD5CRE3H315KSNAZTROQ3HOA'\n clientSecret: 'QGXGGD5KV3ROOMRK0BATF1XTTUPVE",
"end": 72,
"score": 0.7519492506980896,
"start": 24,
"tag": "KEY",
"value": "QG5FVMYMDEKZHTGHRRI0EUJLLD5CRE3H315KSNAZTROQ3HOA"
},
{
"context": ... | configuration/foursquare.cson | pandres95/foursquare-yummy | 1 | secrets:
clientId: 'QG5FVMYMDEKZHTGHRRI0EUJLLD5CRE3H315KSNAZTROQ3HOA'
clientSecret: 'QGXGGD5KV3ROOMRK0BATF1XTTUPVERO3FQBXWUZ4ZB2I1FAL'
redirectUrl: 'http://api.yummy.bool.xyz/auth/token'
foursquare:
mode: 'foursquare'
version: '20151101'
locale: 'en'
| 49933 | secrets:
clientId: '<KEY>'
clientSecret: '<KEY>'
redirectUrl: 'http://api.yummy.bool.xyz/auth/token'
foursquare:
mode: 'foursquare'
version: '20151101'
locale: 'en'
| true | secrets:
clientId: 'PI:KEY:<KEY>END_PI'
clientSecret: 'PI:KEY:<KEY>END_PI'
redirectUrl: 'http://api.yummy.bool.xyz/auth/token'
foursquare:
mode: 'foursquare'
version: '20151101'
locale: 'en'
|
[
{
"context": "ackage.email.Email\n# \t\t\t\t\tEmail.send\n# \t\t\t\t\t\tto: 'support@steedos.com'\n# \t\t\t\t\t\tfrom: Accounts.emailTemplates.from\n# \t\t\t",
"end": 842,
"score": 0.9999175667762756,
"start": 823,
"tag": "EMAIL",
"value": "support@steedos.com"
}
] | creator/packages/steedos-base/server/schedule/billing.coffee | yicone/steedos-platform | 42 | # Meteor.startup ->
# if Meteor.settings.cron and Meteor.settings.cron.billing
# schedule = require('node-schedule')
# # 定时执行同步
# rule = Meteor.settings.cron.billing
# schedule.scheduleJob rule, Meteor.bindEnvironment((->
# console.time 'billing'
# spaces = db.spaces.find({is_paid: true}, {fields: {_id: 1}})
# result = []
# spaces.forEach (s) ->
# try
# d = new Date
# accounting_month = moment(new Date(d.getFullYear(), d.getMonth(), 0).getTime()).format("YYYYMM")
# billingManager.caculate_by_accounting_month(accounting_month, s._id)
# catch err
# e = {}
# e._id = s._id
# e.name = s.name
# e.err = err
# result.push e
# if result.length > 0
# console.error result
# try
# Email = Package.email.Email
# Email.send
# to: 'support@steedos.com'
# from: Accounts.emailTemplates.from
# subject: 'billing settleup result'
# text: JSON.stringify('result': result)
# catch err
# console.error err
# console.timeEnd 'billing'
# ), ->
# console.log 'Failed to bind environment'
# ) | 103567 | # Meteor.startup ->
# if Meteor.settings.cron and Meteor.settings.cron.billing
# schedule = require('node-schedule')
# # 定时执行同步
# rule = Meteor.settings.cron.billing
# schedule.scheduleJob rule, Meteor.bindEnvironment((->
# console.time 'billing'
# spaces = db.spaces.find({is_paid: true}, {fields: {_id: 1}})
# result = []
# spaces.forEach (s) ->
# try
# d = new Date
# accounting_month = moment(new Date(d.getFullYear(), d.getMonth(), 0).getTime()).format("YYYYMM")
# billingManager.caculate_by_accounting_month(accounting_month, s._id)
# catch err
# e = {}
# e._id = s._id
# e.name = s.name
# e.err = err
# result.push e
# if result.length > 0
# console.error result
# try
# Email = Package.email.Email
# Email.send
# to: '<EMAIL>'
# from: Accounts.emailTemplates.from
# subject: 'billing settleup result'
# text: JSON.stringify('result': result)
# catch err
# console.error err
# console.timeEnd 'billing'
# ), ->
# console.log 'Failed to bind environment'
# ) | true | # Meteor.startup ->
# if Meteor.settings.cron and Meteor.settings.cron.billing
# schedule = require('node-schedule')
# # 定时执行同步
# rule = Meteor.settings.cron.billing
# schedule.scheduleJob rule, Meteor.bindEnvironment((->
# console.time 'billing'
# spaces = db.spaces.find({is_paid: true}, {fields: {_id: 1}})
# result = []
# spaces.forEach (s) ->
# try
# d = new Date
# accounting_month = moment(new Date(d.getFullYear(), d.getMonth(), 0).getTime()).format("YYYYMM")
# billingManager.caculate_by_accounting_month(accounting_month, s._id)
# catch err
# e = {}
# e._id = s._id
# e.name = s.name
# e.err = err
# result.push e
# if result.length > 0
# console.error result
# try
# Email = Package.email.Email
# Email.send
# to: 'PI:EMAIL:<EMAIL>END_PI'
# from: Accounts.emailTemplates.from
# subject: 'billing settleup result'
# text: JSON.stringify('result': result)
# catch err
# console.error err
# console.timeEnd 'billing'
# ), ->
# console.log 'Failed to bind environment'
# ) |
[
{
"context": "nts of files, using SparkMD5-Library\n#\n# @author Alexander Nickel <mr.alexander.nickel@gmail.com>\n# @param file ",
"end": 95,
"score": 0.9998810291290283,
"start": 79,
"tag": "NAME",
"value": "Alexander Nickel"
},
{
"context": " SparkMD5-Library\n#\n# @author ... | src/nickel.fingerprint.coffee | mralexandernickel/nickel.fingerprint | 0 | #
# Generating MD5-Fingerprints of files, using SparkMD5-Library
#
# @author Alexander Nickel <mr.alexander.nickel@gmail.com>
# @param file the file to generate checksum from...JS-File-Object
# @return promise
#
FingerprintProvider = ($q) ->
# SIZE OF SINGLE CHUNK
CHUNK_SIZE = 2097152
# DEFINE SLICE CONSTRUCTOR DEPENDING ON BROWSER
SLICE = Blob::slice or Blob::mozSlice or Blob::webkitSlice
# GENERATE THE FINGERPRINT
@generate = (file) ->
deferred = $q.defer()
loadNext = ->
fileReader = new FileReader()
fileReader.onload = frOnload
fileReader.onerror = frOnerror
start = currentChunk * CHUNK_SIZE
end = if start + CHUNK_SIZE >= file.size then file.size else start + CHUNK_SIZE
fileReader.readAsArrayBuffer SLICE.call(file, start, end)
chunks = Math.ceil(file.size / CHUNK_SIZE)
chunkPercentage = Math.ceil(100 / chunks)
currentChunk = 0
spark = new SparkMD5.ArrayBuffer()
frOnload = (e) ->
spark.append e.target.result
currentChunk++
if currentChunk < chunks
loadNext()
deferred.notify currentChunk * chunkPercentage
else
result =
filename: file.name
fingerprint: spark.end()
deferred.resolve result
frOnerror = ->
deferred.reject "ERROR"
loadNext()
return deferred.promise
# SIMPLY PREVENT COFFEESCRIPT FROM RETURNING @generate
return
# DEFINE ANGULAR MODULE
angular.module "nickel.fingerprint", []
.service "$fingerprint", FingerprintProvider
| 197884 | #
# Generating MD5-Fingerprints of files, using SparkMD5-Library
#
# @author <NAME> <<EMAIL>>
# @param file the file to generate checksum from...JS-File-Object
# @return promise
#
FingerprintProvider = ($q) ->
# SIZE OF SINGLE CHUNK
CHUNK_SIZE = 2097152
# DEFINE SLICE CONSTRUCTOR DEPENDING ON BROWSER
SLICE = Blob::slice or Blob::mozSlice or Blob::webkitSlice
# GENERATE THE FINGERPRINT
@generate = (file) ->
deferred = $q.defer()
loadNext = ->
fileReader = new FileReader()
fileReader.onload = frOnload
fileReader.onerror = frOnerror
start = currentChunk * CHUNK_SIZE
end = if start + CHUNK_SIZE >= file.size then file.size else start + CHUNK_SIZE
fileReader.readAsArrayBuffer SLICE.call(file, start, end)
chunks = Math.ceil(file.size / CHUNK_SIZE)
chunkPercentage = Math.ceil(100 / chunks)
currentChunk = 0
spark = new SparkMD5.ArrayBuffer()
frOnload = (e) ->
spark.append e.target.result
currentChunk++
if currentChunk < chunks
loadNext()
deferred.notify currentChunk * chunkPercentage
else
result =
filename: file.name
fingerprint: spark.end()
deferred.resolve result
frOnerror = ->
deferred.reject "ERROR"
loadNext()
return deferred.promise
# SIMPLY PREVENT COFFEESCRIPT FROM RETURNING @generate
return
# DEFINE ANGULAR MODULE
angular.module "nickel.fingerprint", []
.service "$fingerprint", FingerprintProvider
| true | #
# Generating MD5-Fingerprints of files, using SparkMD5-Library
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# @param file the file to generate checksum from...JS-File-Object
# @return promise
#
FingerprintProvider = ($q) ->
# SIZE OF SINGLE CHUNK
CHUNK_SIZE = 2097152
# DEFINE SLICE CONSTRUCTOR DEPENDING ON BROWSER
SLICE = Blob::slice or Blob::mozSlice or Blob::webkitSlice
# GENERATE THE FINGERPRINT
@generate = (file) ->
deferred = $q.defer()
loadNext = ->
fileReader = new FileReader()
fileReader.onload = frOnload
fileReader.onerror = frOnerror
start = currentChunk * CHUNK_SIZE
end = if start + CHUNK_SIZE >= file.size then file.size else start + CHUNK_SIZE
fileReader.readAsArrayBuffer SLICE.call(file, start, end)
chunks = Math.ceil(file.size / CHUNK_SIZE)
chunkPercentage = Math.ceil(100 / chunks)
currentChunk = 0
spark = new SparkMD5.ArrayBuffer()
frOnload = (e) ->
spark.append e.target.result
currentChunk++
if currentChunk < chunks
loadNext()
deferred.notify currentChunk * chunkPercentage
else
result =
filename: file.name
fingerprint: spark.end()
deferred.resolve result
frOnerror = ->
deferred.reject "ERROR"
loadNext()
return deferred.promise
# SIMPLY PREVENT COFFEESCRIPT FROM RETURNING @generate
return
# DEFINE ANGULAR MODULE
angular.module "nickel.fingerprint", []
.service "$fingerprint", FingerprintProvider
|
[
{
"context": " ->\n beforeEach (done) ->\n room.user.say 'Alisson', 'hubot teach me'\n setTimeout done, 100\n\n ",
"end": 756,
"score": 0.999729573726654,
"start": 749,
"tag": "NAME",
"value": "Alisson"
},
{
"context": ">\n expect(room.messages).to.eql [\n [... | test/minutos-de-sabedoria-test.coffee | alissonbrunosa/minutos-de-sabedoria | 1 | Helper = require('hubot-test-helper')
expect = require('chai').expect
helper = new Helper('../src/minutos-de-sabedoria.coffee')
nock = require('nock')
MESSAGE = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.'
describe 'minutos-de-sabedoria', ->
room = null
beforeEach ->
room = helper.createRoom()
do nock.disableNetConnect
nock('http://aecs.org.br',{
reqheaders: {
'Accept': 'application/json'
}
})
.get('/minutos/ajax/minutos.json')
.reply 200, [{ txt: MESSAGE }]
afterEach ->
room.destroy()
nock.cleanAll()
context 'user asks hubot for a good message', ->
beforeEach (done) ->
room.user.say 'Alisson', 'hubot teach me'
setTimeout done, 100
it 'should receive a good message', ->
expect(room.messages).to.eql [
[ 'Alisson', 'hubot teach me' ]
[ 'hubot', "*#{MESSAGE}*" ]
]
| 136513 | Helper = require('hubot-test-helper')
expect = require('chai').expect
helper = new Helper('../src/minutos-de-sabedoria.coffee')
nock = require('nock')
MESSAGE = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.'
describe 'minutos-de-sabedoria', ->
room = null
beforeEach ->
room = helper.createRoom()
do nock.disableNetConnect
nock('http://aecs.org.br',{
reqheaders: {
'Accept': 'application/json'
}
})
.get('/minutos/ajax/minutos.json')
.reply 200, [{ txt: MESSAGE }]
afterEach ->
room.destroy()
nock.cleanAll()
context 'user asks hubot for a good message', ->
beforeEach (done) ->
room.user.say '<NAME>', 'hubot teach me'
setTimeout done, 100
it 'should receive a good message', ->
expect(room.messages).to.eql [
[ '<NAME>', 'hubot teach me' ]
[ 'hubot', "*#{MESSAGE}*" ]
]
| true | Helper = require('hubot-test-helper')
expect = require('chai').expect
helper = new Helper('../src/minutos-de-sabedoria.coffee')
nock = require('nock')
MESSAGE = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.'
describe 'minutos-de-sabedoria', ->
room = null
beforeEach ->
room = helper.createRoom()
do nock.disableNetConnect
nock('http://aecs.org.br',{
reqheaders: {
'Accept': 'application/json'
}
})
.get('/minutos/ajax/minutos.json')
.reply 200, [{ txt: MESSAGE }]
afterEach ->
room.destroy()
nock.cleanAll()
context 'user asks hubot for a good message', ->
beforeEach (done) ->
room.user.say 'PI:NAME:<NAME>END_PI', 'hubot teach me'
setTimeout done, 100
it 'should receive a good message', ->
expect(room.messages).to.eql [
[ 'PI:NAME:<NAME>END_PI', 'hubot teach me' ]
[ 'hubot', "*#{MESSAGE}*" ]
]
|
[
{
"context": "redential-recovery/forgot-password/:token', token: '@token',\n update: {method: 'PUT'}\n]",
"end": 184,
"score": 0.6669349670410156,
"start": 177,
"tag": "PASSWORD",
"value": "'@token"
}
] | html-app/app/scripts/services/forgot-password-request.coffee | flyingmachine/gratefulplace2 | 28 | 'use strict';
angular.module("gratefulplaceApp").factory "ForgotPasswordRequest", ["$resource", ($resource) ->
$resource '/credential-recovery/forgot-password/:token', token: '@token',
update: {method: 'PUT'}
] | 30459 | 'use strict';
angular.module("gratefulplaceApp").factory "ForgotPasswordRequest", ["$resource", ($resource) ->
$resource '/credential-recovery/forgot-password/:token', token: <PASSWORD>',
update: {method: 'PUT'}
] | true | 'use strict';
angular.module("gratefulplaceApp").factory "ForgotPasswordRequest", ["$resource", ($resource) ->
$resource '/credential-recovery/forgot-password/:token', token: PI:PASSWORD:<PASSWORD>END_PI',
update: {method: 'PUT'}
] |
[
{
"context": "st 'hex'\n\n user.salt = salt\n user.password = hash\n\n Users.findOne(email: user.email).then (u) ->",
"end": 844,
"score": 0.9895761013031006,
"start": 840,
"tag": "PASSWORD",
"value": "hash"
}
] | server/src/controllers/users_controller.coffee | codyseibert/jotit | 1 | models = require '../models/models'
Users = models.Users
Pets = models.Pets
ObjectId = require('mongoose').Types.ObjectId
crypto = require 'crypto'
uuid = require 'node-uuid'
config = require '../config/config'
Joi = require 'joi'
_ = require 'lodash'
log4js = require 'log4js'
logger = log4js.getLogger 'app'
SALT_ROUNDS = 10
createSalt = ->
Math.round((new Date().valueOf() * Math.random())) + ''
module.exports = do ->
show: (req, res) ->
Users.findById(req.params.id).then (user) ->
if not user?
res.status 404
res.send 'user not found'
else
res.status 200
res.send user
post: (req, res) ->
user = req.body
salt = createSalt()
hash = crypto
.createHmac 'sha1', salt
.update user.password
.digest 'hex'
user.salt = salt
user.password = hash
Users.findOne(email: user.email).then (u) ->
if u?
logger.info "a user tried to create a user with an email that already existed email=#{user.email}"
res.status 400
res.send 'user already exists with this email'
else
Users.create(user).then (obj) ->
if not obj?
logger.error "there was an error creating a new user email=#{email}"
res.status 400
res.send 'there was an error creating the user'
else
res.status 200
res.send user
put: (req, res) ->
Users.update(_id: new ObjectId(req.params.id), req.body).then (obj) ->
res.status 200
res.send obj
| 182499 | models = require '../models/models'
Users = models.Users
Pets = models.Pets
ObjectId = require('mongoose').Types.ObjectId
crypto = require 'crypto'
uuid = require 'node-uuid'
config = require '../config/config'
Joi = require 'joi'
_ = require 'lodash'
log4js = require 'log4js'
logger = log4js.getLogger 'app'
SALT_ROUNDS = 10
createSalt = ->
Math.round((new Date().valueOf() * Math.random())) + ''
module.exports = do ->
show: (req, res) ->
Users.findById(req.params.id).then (user) ->
if not user?
res.status 404
res.send 'user not found'
else
res.status 200
res.send user
post: (req, res) ->
user = req.body
salt = createSalt()
hash = crypto
.createHmac 'sha1', salt
.update user.password
.digest 'hex'
user.salt = salt
user.password = <PASSWORD>
Users.findOne(email: user.email).then (u) ->
if u?
logger.info "a user tried to create a user with an email that already existed email=#{user.email}"
res.status 400
res.send 'user already exists with this email'
else
Users.create(user).then (obj) ->
if not obj?
logger.error "there was an error creating a new user email=#{email}"
res.status 400
res.send 'there was an error creating the user'
else
res.status 200
res.send user
put: (req, res) ->
Users.update(_id: new ObjectId(req.params.id), req.body).then (obj) ->
res.status 200
res.send obj
| true | models = require '../models/models'
Users = models.Users
Pets = models.Pets
ObjectId = require('mongoose').Types.ObjectId
crypto = require 'crypto'
uuid = require 'node-uuid'
config = require '../config/config'
Joi = require 'joi'
_ = require 'lodash'
log4js = require 'log4js'
logger = log4js.getLogger 'app'
SALT_ROUNDS = 10
createSalt = ->
Math.round((new Date().valueOf() * Math.random())) + ''
module.exports = do ->
show: (req, res) ->
Users.findById(req.params.id).then (user) ->
if not user?
res.status 404
res.send 'user not found'
else
res.status 200
res.send user
post: (req, res) ->
user = req.body
salt = createSalt()
hash = crypto
.createHmac 'sha1', salt
.update user.password
.digest 'hex'
user.salt = salt
user.password = PI:PASSWORD:<PASSWORD>END_PI
Users.findOne(email: user.email).then (u) ->
if u?
logger.info "a user tried to create a user with an email that already existed email=#{user.email}"
res.status 400
res.send 'user already exists with this email'
else
Users.create(user).then (obj) ->
if not obj?
logger.error "there was an error creating a new user email=#{email}"
res.status 400
res.send 'there was an error creating the user'
else
res.status 200
res.send user
put: (req, res) ->
Users.update(_id: new ObjectId(req.params.id), req.body).then (obj) ->
res.status 200
res.send obj
|
[
{
"context": "###\n * grunt-tankipas\n * https://github.com/Leny/grunt-tankipas\n *\n * Copyright (c) 2014 Leny\n * L",
"end": 48,
"score": 0.9992353916168213,
"start": 44,
"tag": "USERNAME",
"value": "Leny"
},
{
"context": "b.com/Leny/grunt-tankipas\n *\n * Copyright (c) 2014 Leny\n... | src/tankipas.coffee | leny/grunt-tankipas | 0 | ###
* grunt-tankipas
* https://github.com/Leny/grunt-tankipas
*
* Copyright (c) 2014 Leny
* Licensed under the MIT license.
###
"use strict"
tankipas = require "tankipas"
chalk = require "chalk"
error = chalk.bold.red
( spinner = require "simple-spinner" )
.change_sequence [
"◓"
"◑"
"◒"
"◐"
]
module.exports = ( grunt ) ->
tankipasTask = ->
fNext = @async()
oOptions = @options
system: null
gap: 120
user: null
branch: null
commit: null
raw: no
spinner.start 50
tankipas process.cwd(), oOptions, ( oError, iTotal ) ->
spinner.stop()
if oError
grunt.log.error oError
fNext no
if oOptions.raw
grunt.log.writeln iTotal
else
iTotal /= 1000
iMinutes = if ( iMinutes = Math.floor( iTotal / 60 ) ) > 60 then ( iMinutes % 60 ) else iMinutes
iHours = Math.floor iTotal / 3600
sUserString = if oOptions.user then " (for #{ chalk.cyan( oOptions.user ) })" else ""
grunt.log.writeln "Time spent on project#{ sUserString }: ±#{ chalk.yellow( iHours ) } hours & #{ chalk.yellow( iMinutes ) } minutes."
fNext()
if grunt.config.data.tankipas
grunt.registerMultiTask "tankipas", "Compute approximate development time spent on a project, using logs from version control system.", tankipasTask
else
grunt.registerTask "tankipas", "Compute approximate development time spent on a project, using logs from version control system.", tankipasTask
| 150968 | ###
* grunt-tankipas
* https://github.com/Leny/grunt-tankipas
*
* Copyright (c) 2014 <NAME>
* Licensed under the MIT license.
###
"use strict"
tankipas = require "tankipas"
chalk = require "chalk"
error = chalk.bold.red
( spinner = require "simple-spinner" )
.change_sequence [
"◓"
"◑"
"◒"
"◐"
]
module.exports = ( grunt ) ->
tankipasTask = ->
fNext = @async()
oOptions = @options
system: null
gap: 120
user: null
branch: null
commit: null
raw: no
spinner.start 50
tankipas process.cwd(), oOptions, ( oError, iTotal ) ->
spinner.stop()
if oError
grunt.log.error oError
fNext no
if oOptions.raw
grunt.log.writeln iTotal
else
iTotal /= 1000
iMinutes = if ( iMinutes = Math.floor( iTotal / 60 ) ) > 60 then ( iMinutes % 60 ) else iMinutes
iHours = Math.floor iTotal / 3600
sUserString = if oOptions.user then " (for #{ chalk.cyan( oOptions.user ) })" else ""
grunt.log.writeln "Time spent on project#{ sUserString }: ±#{ chalk.yellow( iHours ) } hours & #{ chalk.yellow( iMinutes ) } minutes."
fNext()
if grunt.config.data.tankipas
grunt.registerMultiTask "tankipas", "Compute approximate development time spent on a project, using logs from version control system.", tankipasTask
else
grunt.registerTask "tankipas", "Compute approximate development time spent on a project, using logs from version control system.", tankipasTask
| true | ###
* grunt-tankipas
* https://github.com/Leny/grunt-tankipas
*
* Copyright (c) 2014 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
"use strict"
tankipas = require "tankipas"
chalk = require "chalk"
error = chalk.bold.red
( spinner = require "simple-spinner" )
.change_sequence [
"◓"
"◑"
"◒"
"◐"
]
module.exports = ( grunt ) ->
tankipasTask = ->
fNext = @async()
oOptions = @options
system: null
gap: 120
user: null
branch: null
commit: null
raw: no
spinner.start 50
tankipas process.cwd(), oOptions, ( oError, iTotal ) ->
spinner.stop()
if oError
grunt.log.error oError
fNext no
if oOptions.raw
grunt.log.writeln iTotal
else
iTotal /= 1000
iMinutes = if ( iMinutes = Math.floor( iTotal / 60 ) ) > 60 then ( iMinutes % 60 ) else iMinutes
iHours = Math.floor iTotal / 3600
sUserString = if oOptions.user then " (for #{ chalk.cyan( oOptions.user ) })" else ""
grunt.log.writeln "Time spent on project#{ sUserString }: ±#{ chalk.yellow( iHours ) } hours & #{ chalk.yellow( iMinutes ) } minutes."
fNext()
if grunt.config.data.tankipas
grunt.registerMultiTask "tankipas", "Compute approximate development time spent on a project, using logs from version control system.", tankipasTask
else
grunt.registerTask "tankipas", "Compute approximate development time spent on a project, using logs from version control system.", tankipasTask
|
[
{
"context": "type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}\n {type: 'tr",
"end": 33983,
"score": 0.8858046531677246,
"start": 33978,
"tag": "NAME",
"value": "zwölf"
},
{
"context": " table =\n id: '001'\n name: 'a... | test/mocha/element/table.coffee | alinex/node-report | 1 | ### eslint-env node, mocha ###
test = require '../test'
async = require 'async'
Report = require '../../../src'
before (cb) -> Report.init cb
Table = require 'alinex-table'
describe "table", ->
describe "examples", ->
it "should make two tables", (cb) ->
test.markdown 'table/align', """
| Left-aligned | Center-aligned | Right-aligned |
| :--- | :---: | ---: |
| 1 | one | eins |
| 2 | two | zwei |
| 3 | three | drei |
| 4 | four | vier |
| 5 | five | fünf |
""", null, true, cb
describe "api", ->
describe "from alinex-table", ->
table = null
beforeEach ->
table = new Table [
['ID', 'English', 'German']
[1, 'one', 'eins']
[2, 'two', '_zwei_']
[12, 'twelve', 'zwölf']
]
it "should create table", (cb) ->
# create report
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '_zwei_'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should align column", (cb) ->
table.style null, 'ID', {align: 'right'}
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '_zwei_'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should allow markdown within", (cb) ->
table.style null, 'ID', {align: 'right'}
report = new Report()
report.table table, true
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'emphasis'}, {type: 'text', content: 'zwei'}, {type: 'emphasis'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from list map", ->
table = [
{id: 1, en: 'one', de: 'eins'}
{id: 2, en: 'two', de: 'zwei'}
{id: 12, en: 'twelve', de: 'zwölf'}
]
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'id'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'en'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'de'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column map-map", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list array", (cb) ->
columns = [
['id', 'en']
['ID', 'English']
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['ID', 'English', 'German']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column map", (cb) ->
columns =
id: 'ID'
en: 'English'
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort map", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = {de: 'desc'}
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort list", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = ['de']
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort key", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = 'de'
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from list array", ->
table = null
beforeEach ->
table = [
[1, 'one', 'eins']
[2, 'two', 'zwei']
[3, 'three', 'drei']
[12, 'twelve', 'zwölf']
]
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 1}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'one'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'eins'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['ID', 'English', 'German']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list-array", (cb) ->
columns = [
[0, 1]
['ID', 'English']
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list-map", (cb) ->
columns = [
title: 'ID'
align: 'right'
,
title: 'English'
,
title: 'German'
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with empty fields", (cb) ->
report = new Report()
report.table [
[1, 'one', 'eins']
[2, '', 'zwei']
[3, null, 'drei']
[12, undefined, 'zwölf']
]
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 1}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'one'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'eins'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from map", ->
table = null
beforeEach ->
table =
id: '001'
name: 'alex'
position: 'developer'
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Name'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Value'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'id'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '001'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'alex'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'position'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['NAME', 'VALUE']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'NAME'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'VALUE'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'id'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '001'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'alex'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'position'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with list and object content", (cb) ->
report = new Report()
report.table
number: [1..8]
name: 'alex'
data:
type: 'developer'
lang: 'javascript'
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Name'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Value'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'number'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '1, 2, 3, 4, 5, 6, 7, 8'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'alex'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'data.type'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'data.lang'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'javascript'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
| 212862 | ### eslint-env node, mocha ###
test = require '../test'
async = require 'async'
Report = require '../../../src'
before (cb) -> Report.init cb
Table = require 'alinex-table'
describe "table", ->
describe "examples", ->
it "should make two tables", (cb) ->
test.markdown 'table/align', """
| Left-aligned | Center-aligned | Right-aligned |
| :--- | :---: | ---: |
| 1 | one | eins |
| 2 | two | zwei |
| 3 | three | drei |
| 4 | four | vier |
| 5 | five | fünf |
""", null, true, cb
describe "api", ->
describe "from alinex-table", ->
table = null
beforeEach ->
table = new Table [
['ID', 'English', 'German']
[1, 'one', 'eins']
[2, 'two', '_zwei_']
[12, 'twelve', 'zwölf']
]
it "should create table", (cb) ->
# create report
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '_zwei_'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should align column", (cb) ->
table.style null, 'ID', {align: 'right'}
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '_zwei_'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should allow markdown within", (cb) ->
table.style null, 'ID', {align: 'right'}
report = new Report()
report.table table, true
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'emphasis'}, {type: 'text', content: 'zwei'}, {type: 'emphasis'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from list map", ->
table = [
{id: 1, en: 'one', de: 'eins'}
{id: 2, en: 'two', de: 'zwei'}
{id: 12, en: 'twelve', de: 'zwölf'}
]
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'id'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'en'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'de'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column map-map", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list array", (cb) ->
columns = [
['id', 'en']
['ID', 'English']
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['ID', 'English', 'German']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column map", (cb) ->
columns =
id: 'ID'
en: 'English'
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort map", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = {de: 'desc'}
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort list", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = ['de']
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort key", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = 'de'
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from list array", ->
table = null
beforeEach ->
table = [
[1, 'one', 'eins']
[2, 'two', 'zwei']
[3, 'three', 'drei']
[12, 'twelve', 'zwölf']
]
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 1}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'one'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'eins'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['ID', 'English', 'German']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list-array", (cb) ->
columns = [
[0, 1]
['ID', 'English']
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list-map", (cb) ->
columns = [
title: 'ID'
align: 'right'
,
title: 'English'
,
title: 'German'
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with empty fields", (cb) ->
report = new Report()
report.table [
[1, 'one', 'eins']
[2, '', 'zwei']
[3, null, 'drei']
[12, undefined, 'zwölf']
]
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 1}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'one'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'eins'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '<NAME>'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from map", ->
table = null
beforeEach ->
table =
id: '001'
name: '<NAME>'
position: 'developer'
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Name'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Value'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'id'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '001'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '<NAME>'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'position'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['NAME', 'VALUE']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'NAME'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'VALUE'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'id'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '001'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '<NAME>'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'position'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with list and object content", (cb) ->
report = new Report()
report.table
number: [1..8]
name: '<NAME>'
data:
type: 'developer'
lang: 'javascript'
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Name'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Value'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'number'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '1, 2, 3, 4, 5, 6, 7, 8'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '<NAME>'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'data.type'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'data.lang'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'javascript'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
| true | ### eslint-env node, mocha ###
test = require '../test'
async = require 'async'
Report = require '../../../src'
before (cb) -> Report.init cb
Table = require 'alinex-table'
describe "table", ->
describe "examples", ->
it "should make two tables", (cb) ->
test.markdown 'table/align', """
| Left-aligned | Center-aligned | Right-aligned |
| :--- | :---: | ---: |
| 1 | one | eins |
| 2 | two | zwei |
| 3 | three | drei |
| 4 | four | vier |
| 5 | five | fünf |
""", null, true, cb
describe "api", ->
describe "from alinex-table", ->
table = null
beforeEach ->
table = new Table [
['ID', 'English', 'German']
[1, 'one', 'eins']
[2, 'two', '_zwei_']
[12, 'twelve', 'zwölf']
]
it "should create table", (cb) ->
# create report
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '_zwei_'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should align column", (cb) ->
table.style null, 'ID', {align: 'right'}
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '_zwei_'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should allow markdown within", (cb) ->
table.style null, 'ID', {align: 'right'}
report = new Report()
report.table table, true
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'emphasis'}, {type: 'text', content: 'zwei'}, {type: 'emphasis'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from list map", ->
table = [
{id: 1, en: 'one', de: 'eins'}
{id: 2, en: 'two', de: 'zwei'}
{id: 12, en: 'twelve', de: 'zwölf'}
]
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'id'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'en'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'de'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column map-map", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list array", (cb) ->
columns = [
['id', 'en']
['ID', 'English']
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['ID', 'English', 'German']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column map", (cb) ->
columns =
id: 'ID'
en: 'English'
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort map", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = {de: 'desc'}
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort list", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = ['de']
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with sort key", (cb) ->
columns =
id:
title: 'ID'
align: 'right'
de:
title: 'German'
en:
title: 'English'
sort = 'de'
report = new Report()
report.table table, columns, sort
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from list array", ->
table = null
beforeEach ->
table = [
[1, 'one', 'eins']
[2, 'two', 'zwei']
[3, 'three', 'drei']
[12, 'twelve', 'zwölf']
]
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 1}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'one'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'eins'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['ID', 'English', 'German']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list-array", (cb) ->
columns = [
[0, 1]
['ID', 'English']
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list-map", (cb) ->
columns = [
title: 'ID'
align: 'right'
,
title: 'English'
,
title: 'German'
]
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'right'}, {type: 'text', content: 'ID'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'English'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'German'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '1'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'one'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'eins'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'two'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'three'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'twelve'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwölf'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with empty fields", (cb) ->
report = new Report()
report.table [
[1, 'one', 'eins']
[2, '', 'zwei']
[3, null, 'drei']
[12, undefined, 'zwölf']
]
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 1}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'one'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'eins'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '2'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'zwei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '3'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'drei'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: '12'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: ''}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'PI:NAME:<NAME>END_PI'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
describe "from map", ->
table = null
beforeEach ->
table =
id: '001'
name: 'PI:NAME:<NAME>END_PI'
position: 'developer'
it "should create table", (cb) ->
report = new Report()
report.table table
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Name'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Value'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'id'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '001'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'PI:NAME:<NAME>END_PI'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'position'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with column list", (cb) ->
columns = ['NAME', 'VALUE']
report = new Report()
report.table table, columns
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'NAME'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'VALUE'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'id'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '001'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'PI:NAME:<NAME>END_PI'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'position'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
it "should create table with list and object content", (cb) ->
report = new Report()
report.table
number: [1..8]
name: 'PI:NAME:<NAME>END_PI'
data:
type: 'developer'
lang: 'javascript'
# check it
test.report null, report, [
{type: 'document', nesting: 1}
{type: 'table', nesting: 1}
{type: 'thead', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Name'}, {type: 'th', nesting: -1}
{type: 'th', nesting: 1, align: 'left'}, {type: 'text', content: 'Value'}, {type: 'th', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'thead', nesting: -1}
{type: 'tbody', nesting: 1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'number'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: '1, 2, 3, 4, 5, 6, 7, 8'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'name'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'PI:NAME:<NAME>END_PI'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'data.type'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'developer'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tr', nesting: 1}
{type: 'td', nesting: 1}, {type: 'text', content: 'data.lang'}, {type: 'td', nesting: -1}
{type: 'td', nesting: 1}, {type: 'text', content: 'javascript'}, {type: 'td', nesting: -1}
{type: 'tr', nesting: -1}
{type: 'tbody', nesting: -1}
{type: 'table', nesting: -1}
{type: 'document', nesting: -1}
], null, cb
|
[
{
"context": "key: 'typographic-quotes'\n\n# http://asciidoctor.org/docs/user-manual/#curv",
"end": 24,
"score": 0.9789977073669434,
"start": 6,
"tag": "KEY",
"value": "typographic-quotes"
}
] | grammars/repositories/inlines/typographic-quotes-grammar.cson | andrewcarver/atom-language-asciidoc | 45 | key: 'typographic-quotes'
# http://asciidoctor.org/docs/user-manual/#curved
patterns: [
# Matches typographic double quotes.
#
# Examples:
#
# "`double-quoted`"
# [bar]"`double-quoted`"
#
name: 'markup.italic.quote.typographic-quotes.asciidoc'
comment: 'double-quoted'
match: '(?:^|(?<!\\p{Word}|;|:))(\\[([^\\]]+?)\\])?(\"\`)(\\S|\\S.*?\\S)(\`\")(?!\\p{Word})'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
,
# Matches typographic single quotes.
#
# Examples:
#
# '`single-quoted`'
# [bar]'`single-quoted`'
#
name: 'markup.italic.quote.typographic-quotes.asciidoc'
comment: 'single-quoted'
match: '(?:^|(?<!\\p{Word}|;|:))(\\[([^\\]]+?)\\])?(\'\`)(\\S|\\S.*?\\S)(\`\')(?!\\p{Word})'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
]
| 117113 | key: '<KEY>'
# http://asciidoctor.org/docs/user-manual/#curved
patterns: [
# Matches typographic double quotes.
#
# Examples:
#
# "`double-quoted`"
# [bar]"`double-quoted`"
#
name: 'markup.italic.quote.typographic-quotes.asciidoc'
comment: 'double-quoted'
match: '(?:^|(?<!\\p{Word}|;|:))(\\[([^\\]]+?)\\])?(\"\`)(\\S|\\S.*?\\S)(\`\")(?!\\p{Word})'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
,
# Matches typographic single quotes.
#
# Examples:
#
# '`single-quoted`'
# [bar]'`single-quoted`'
#
name: 'markup.italic.quote.typographic-quotes.asciidoc'
comment: 'single-quoted'
match: '(?:^|(?<!\\p{Word}|;|:))(\\[([^\\]]+?)\\])?(\'\`)(\\S|\\S.*?\\S)(\`\')(?!\\p{Word})'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
]
| true | key: 'PI:KEY:<KEY>END_PI'
# http://asciidoctor.org/docs/user-manual/#curved
patterns: [
# Matches typographic double quotes.
#
# Examples:
#
# "`double-quoted`"
# [bar]"`double-quoted`"
#
name: 'markup.italic.quote.typographic-quotes.asciidoc'
comment: 'double-quoted'
match: '(?:^|(?<!\\p{Word}|;|:))(\\[([^\\]]+?)\\])?(\"\`)(\\S|\\S.*?\\S)(\`\")(?!\\p{Word})'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
,
# Matches typographic single quotes.
#
# Examples:
#
# '`single-quoted`'
# [bar]'`single-quoted`'
#
name: 'markup.italic.quote.typographic-quotes.asciidoc'
comment: 'single-quoted'
match: '(?:^|(?<!\\p{Word}|;|:))(\\[([^\\]]+?)\\])?(\'\`)(\\S|\\S.*?\\S)(\`\')(?!\\p{Word})'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
]
|
[
{
"context": "# Switcher widget written by Juha Mustonen / SC5\n\n# Switches (reloads to another address) th",
"end": 42,
"score": 0.9998888969421387,
"start": 29,
"tag": "NAME",
"value": "Juha Mustonen"
}
] | dashing-contrib/lib/dashing-contrib/assets/widgets/switcher/switcher.coffee | cusi-dev/SupportDashboard | 3 | # Switcher widget written by Juha Mustonen / SC5
# Switches (reloads to another address) the dashboards in periodic manner
# <div id="container" data-switcher-interval="10000" data-switcher-dashboards="dashboard1 dashboard2">
# <%= yield %>
# </div>
#
class DashboardSwitcher
constructor: () ->
@dashboardNames = []
# Collect the dashboard names from attribute, if provided (otherwise skip switching)
names = $('[data-switcher-dashboards]').first().attr('data-switcher-dashboards') || ''
if names.length > 1
# Get names separated with comma or space
@dashboardNames = (name.trim() for name in names.split(/[ ,]+/).filter(Boolean))
start: (interval=60000) ->
interval = parseInt(interval, 10)
@maxPos = @dashboardNames.length - 1
# Skip switching if no names defined
if @dashboardNames.length == 0
return
# Take the dashboard name from that last part of the path
pathParts = window.location.pathname.split('/')
@curName = pathParts[pathParts.length - 1]
@curPos = @dashboardNames.indexOf(@curName)
# If not found, default to first
if @curPos == -1
@curPos = 0
@curName = @dashboardNames[@curPos]
# instantiate switcher controls for countdown and manual switching
@switcherControls = new DashboardSwitcherControls(interval, @)
@switcherControls.start() if @switcherControls.present()
@startLoop(interval)
startLoop: (interval) ->
self = @
@handle = setTimeout(() ->
# Increase the position or reset back to zero
self.curPos += 1
if self.curPos > self.maxPos
self.curPos = 0
# Switch to new dashboard
self.curName = self.dashboardNames[self.curPos]
window.location.pathname = "/#{self.curName}"
, interval)
stopLoop: () ->
clearTimeout @handle
currentName: () ->
@curName
nextName: () ->
@dashboardNames[@curPos + 1] || @dashboardNames[0]
previousName: () ->
@dashboardNames[@curPos - 1] || @dashboardNames[@dashboardNames.length - 1]
# Switches (hides and shows) elements within on list item
# <li switcher-interval="3000">
# <div widget-1></div>
# <div widget-2></div>
# <div widget-3></div>
# </li>
#
# Supports optional switcher interval, defaults to 5sec
class WidgetSwitcher
constructor: (@elements) ->
@$elements = $(elements)
start: (interval=5000) ->
self = @
@maxPos = @$elements.length - 1;
@curPos = 0
# Show only first at start
self.$elements.slice(1).hide()
# Start loop
@handle = setInterval(()->
# Hide all at first - then show the current and ensure it uses table-cell display type
#self.$elements.hide()
self.$elements.hide(333)
$(self.$elements[self.curPos]).show(333).css('display', 'table-cell')
# Increase the position or reset back to zero
self.curPos += 1
if self.curPos > self.maxPos
self.curPos = 0
, parseInt(interval, 10))
stop: () ->
clearInterval(@handle)
# Adds a countdown timer to show when next dashboard will appear
# TODO:
# - show the name of the next dashboard
# - add controls for manually cycling through dashboards
class DashboardSwitcherControls
arrowContent = "→"
stopTimerContent = "stop timer"
startTimerContent = "start timer"
constructor: (interval=60000, dashboardSwitcher) ->
@currentTime = parseInt(interval, 10)
@interval = parseInt(interval, 10)
@$elements = $('#dc-switcher-controls')
@dashboardSwitcher = dashboardSwitcher
@incrementTime = 1000 # refresh every 1000 milliseconds
@arrowContent = @$elements.data('next-dashboard-content') || DashboardSwitcherControls.arrowContent
@stopTimerContent = @$elements.data('stop-timer-content') || DashboardSwitcherControls.stopTimerContent
@startTimerContent = @$elements.data('start-timer-content') || DashboardSwitcherControls.startTimerContent
@
present: () ->
@$elements.length
start: () ->
@addElements()
@$timer = $.timer(@updateTimer, @incrementTime, true)
addElements: () ->
template = @$elements.find('dashboard-name-template')
if template.length
@$nextDashboardNameTemplate = template
@$nextDashboardNameTemplate.remove()
else
@$nextDashboardNameTemplate = $("<dashboard-name-template>Next dashboard: $nextName in </dashboard-name-template>")
@$nextDashboardNameContainer = $("<span id='dc-switcher-next-name'></span>")
@$countdown = $("<span id='dc-switcher-countdown'></span>")
@$manualSwitcher = $("<span id='dc-switcher-next' class='fa fa-forward'></span>").
html(@arrowContent).
click () =>
location.href = "/#{@dashboardSwitcher.nextName()}"
@$switcherStopper = $("<span id='dc-switcher-pause-reset' class='fa fa-pause'></span>").
html(@stopTimerContent).
click(@pause)
@$elements.
append(@$nextDashboardNameContainer).
append(@$countdown).
append(@$manualSwitcher).
append(@$switcherStopper)
formatTime: (time) ->
time = time / 10;
min = parseInt(time / 6000, 10)
sec = parseInt(time / 100, 10) - (min * 60)
"#{(if min > 0 then @pad(min, 2) else "00")}:#{@pad(sec, 2)}"
pad: (number, length) =>
str = "#{number}"
while str.length < length
str = "0#{str}"
str
pause: () =>
@$timer.toggle()
if @isRunning()
@dashboardSwitcher.stopLoop()
@$switcherStopper.removeClass('fa-pause').addClass('fa-play').html(@startTimerContent)
else
@dashboardSwitcher.startLoop @currentTime
@$switcherStopper.removeClass('fa-play').addClass('fa-pause').html(@stopTimerContent)
isRunning: () =>
@$switcherStopper.hasClass('fa-pause')
resetCountdown: () ->
# Get time from form
newTime = @interval
if newTime > 0
@currentTime = newTime
# Stop and reset timer
@$timer.stop().once()
updateTimer: () =>
# Update dashboard name
@$nextDashboardNameContainer.html(
@$nextDashboardNameTemplate.html().replace('$nextName', @dashboardSwitcher.nextName())
)
# Output timer position
timeString = @formatTime(@currentTime)
@$countdown.html(timeString)
# If timer is complete, trigger alert
if @currentTime is 0
@pause()
@resetCountdown()
return
# Increment timer position
@currentTime -= @incrementTime
if @currentTime < 0
@currentTime = 0
# Dashboard loaded and ready
Dashing.on 'ready', ->
# If multiple widgets per list item, switch them periodically
$('.gridster li').each (index, listItem) ->
$listItem = $(listItem)
# Take the element(s) right under the li
$widgets = $listItem.children('div')
if $widgets.length > 1
switcher = new WidgetSwitcher $widgets
switcher.start($listItem.attr('data-switcher-interval') or 5000)
# If multiple dashboards defined (using data-swticher-dashboards="board1 board2")
$container = $('#container')
ditcher = new DashboardSwitcher()
ditcher.start($container.attr('data-switcher-interval') or 60000)
| 104090 | # Switcher widget written by <NAME> / SC5
# Switches (reloads to another address) the dashboards in periodic manner
# <div id="container" data-switcher-interval="10000" data-switcher-dashboards="dashboard1 dashboard2">
# <%= yield %>
# </div>
#
class DashboardSwitcher
constructor: () ->
@dashboardNames = []
# Collect the dashboard names from attribute, if provided (otherwise skip switching)
names = $('[data-switcher-dashboards]').first().attr('data-switcher-dashboards') || ''
if names.length > 1
# Get names separated with comma or space
@dashboardNames = (name.trim() for name in names.split(/[ ,]+/).filter(Boolean))
start: (interval=60000) ->
interval = parseInt(interval, 10)
@maxPos = @dashboardNames.length - 1
# Skip switching if no names defined
if @dashboardNames.length == 0
return
# Take the dashboard name from that last part of the path
pathParts = window.location.pathname.split('/')
@curName = pathParts[pathParts.length - 1]
@curPos = @dashboardNames.indexOf(@curName)
# If not found, default to first
if @curPos == -1
@curPos = 0
@curName = @dashboardNames[@curPos]
# instantiate switcher controls for countdown and manual switching
@switcherControls = new DashboardSwitcherControls(interval, @)
@switcherControls.start() if @switcherControls.present()
@startLoop(interval)
startLoop: (interval) ->
self = @
@handle = setTimeout(() ->
# Increase the position or reset back to zero
self.curPos += 1
if self.curPos > self.maxPos
self.curPos = 0
# Switch to new dashboard
self.curName = self.dashboardNames[self.curPos]
window.location.pathname = "/#{self.curName}"
, interval)
stopLoop: () ->
clearTimeout @handle
currentName: () ->
@curName
nextName: () ->
@dashboardNames[@curPos + 1] || @dashboardNames[0]
previousName: () ->
@dashboardNames[@curPos - 1] || @dashboardNames[@dashboardNames.length - 1]
# Switches (hides and shows) elements within on list item
# <li switcher-interval="3000">
# <div widget-1></div>
# <div widget-2></div>
# <div widget-3></div>
# </li>
#
# Supports optional switcher interval, defaults to 5sec
class WidgetSwitcher
constructor: (@elements) ->
@$elements = $(elements)
start: (interval=5000) ->
self = @
@maxPos = @$elements.length - 1;
@curPos = 0
# Show only first at start
self.$elements.slice(1).hide()
# Start loop
@handle = setInterval(()->
# Hide all at first - then show the current and ensure it uses table-cell display type
#self.$elements.hide()
self.$elements.hide(333)
$(self.$elements[self.curPos]).show(333).css('display', 'table-cell')
# Increase the position or reset back to zero
self.curPos += 1
if self.curPos > self.maxPos
self.curPos = 0
, parseInt(interval, 10))
stop: () ->
clearInterval(@handle)
# Adds a countdown timer to show when next dashboard will appear
# TODO:
# - show the name of the next dashboard
# - add controls for manually cycling through dashboards
class DashboardSwitcherControls
arrowContent = "→"
stopTimerContent = "stop timer"
startTimerContent = "start timer"
constructor: (interval=60000, dashboardSwitcher) ->
@currentTime = parseInt(interval, 10)
@interval = parseInt(interval, 10)
@$elements = $('#dc-switcher-controls')
@dashboardSwitcher = dashboardSwitcher
@incrementTime = 1000 # refresh every 1000 milliseconds
@arrowContent = @$elements.data('next-dashboard-content') || DashboardSwitcherControls.arrowContent
@stopTimerContent = @$elements.data('stop-timer-content') || DashboardSwitcherControls.stopTimerContent
@startTimerContent = @$elements.data('start-timer-content') || DashboardSwitcherControls.startTimerContent
@
present: () ->
@$elements.length
start: () ->
@addElements()
@$timer = $.timer(@updateTimer, @incrementTime, true)
addElements: () ->
template = @$elements.find('dashboard-name-template')
if template.length
@$nextDashboardNameTemplate = template
@$nextDashboardNameTemplate.remove()
else
@$nextDashboardNameTemplate = $("<dashboard-name-template>Next dashboard: $nextName in </dashboard-name-template>")
@$nextDashboardNameContainer = $("<span id='dc-switcher-next-name'></span>")
@$countdown = $("<span id='dc-switcher-countdown'></span>")
@$manualSwitcher = $("<span id='dc-switcher-next' class='fa fa-forward'></span>").
html(@arrowContent).
click () =>
location.href = "/#{@dashboardSwitcher.nextName()}"
@$switcherStopper = $("<span id='dc-switcher-pause-reset' class='fa fa-pause'></span>").
html(@stopTimerContent).
click(@pause)
@$elements.
append(@$nextDashboardNameContainer).
append(@$countdown).
append(@$manualSwitcher).
append(@$switcherStopper)
formatTime: (time) ->
time = time / 10;
min = parseInt(time / 6000, 10)
sec = parseInt(time / 100, 10) - (min * 60)
"#{(if min > 0 then @pad(min, 2) else "00")}:#{@pad(sec, 2)}"
pad: (number, length) =>
str = "#{number}"
while str.length < length
str = "0#{str}"
str
pause: () =>
@$timer.toggle()
if @isRunning()
@dashboardSwitcher.stopLoop()
@$switcherStopper.removeClass('fa-pause').addClass('fa-play').html(@startTimerContent)
else
@dashboardSwitcher.startLoop @currentTime
@$switcherStopper.removeClass('fa-play').addClass('fa-pause').html(@stopTimerContent)
isRunning: () =>
@$switcherStopper.hasClass('fa-pause')
resetCountdown: () ->
# Get time from form
newTime = @interval
if newTime > 0
@currentTime = newTime
# Stop and reset timer
@$timer.stop().once()
updateTimer: () =>
# Update dashboard name
@$nextDashboardNameContainer.html(
@$nextDashboardNameTemplate.html().replace('$nextName', @dashboardSwitcher.nextName())
)
# Output timer position
timeString = @formatTime(@currentTime)
@$countdown.html(timeString)
# If timer is complete, trigger alert
if @currentTime is 0
@pause()
@resetCountdown()
return
# Increment timer position
@currentTime -= @incrementTime
if @currentTime < 0
@currentTime = 0
# Dashboard loaded and ready
Dashing.on 'ready', ->
# If multiple widgets per list item, switch them periodically
$('.gridster li').each (index, listItem) ->
$listItem = $(listItem)
# Take the element(s) right under the li
$widgets = $listItem.children('div')
if $widgets.length > 1
switcher = new WidgetSwitcher $widgets
switcher.start($listItem.attr('data-switcher-interval') or 5000)
# If multiple dashboards defined (using data-swticher-dashboards="board1 board2")
$container = $('#container')
ditcher = new DashboardSwitcher()
ditcher.start($container.attr('data-switcher-interval') or 60000)
| true | # Switcher widget written by PI:NAME:<NAME>END_PI / SC5
# Switches (reloads to another address) the dashboards in periodic manner
# <div id="container" data-switcher-interval="10000" data-switcher-dashboards="dashboard1 dashboard2">
# <%= yield %>
# </div>
#
class DashboardSwitcher
constructor: () ->
@dashboardNames = []
# Collect the dashboard names from attribute, if provided (otherwise skip switching)
names = $('[data-switcher-dashboards]').first().attr('data-switcher-dashboards') || ''
if names.length > 1
# Get names separated with comma or space
@dashboardNames = (name.trim() for name in names.split(/[ ,]+/).filter(Boolean))
start: (interval=60000) ->
interval = parseInt(interval, 10)
@maxPos = @dashboardNames.length - 1
# Skip switching if no names defined
if @dashboardNames.length == 0
return
# Take the dashboard name from that last part of the path
pathParts = window.location.pathname.split('/')
@curName = pathParts[pathParts.length - 1]
@curPos = @dashboardNames.indexOf(@curName)
# If not found, default to first
if @curPos == -1
@curPos = 0
@curName = @dashboardNames[@curPos]
# instantiate switcher controls for countdown and manual switching
@switcherControls = new DashboardSwitcherControls(interval, @)
@switcherControls.start() if @switcherControls.present()
@startLoop(interval)
startLoop: (interval) ->
self = @
@handle = setTimeout(() ->
# Increase the position or reset back to zero
self.curPos += 1
if self.curPos > self.maxPos
self.curPos = 0
# Switch to new dashboard
self.curName = self.dashboardNames[self.curPos]
window.location.pathname = "/#{self.curName}"
, interval)
stopLoop: () ->
clearTimeout @handle
currentName: () ->
@curName
nextName: () ->
@dashboardNames[@curPos + 1] || @dashboardNames[0]
previousName: () ->
@dashboardNames[@curPos - 1] || @dashboardNames[@dashboardNames.length - 1]
# Switches (hides and shows) elements within on list item
# <li switcher-interval="3000">
# <div widget-1></div>
# <div widget-2></div>
# <div widget-3></div>
# </li>
#
# Supports optional switcher interval, defaults to 5sec
class WidgetSwitcher
constructor: (@elements) ->
@$elements = $(elements)
start: (interval=5000) ->
self = @
@maxPos = @$elements.length - 1;
@curPos = 0
# Show only first at start
self.$elements.slice(1).hide()
# Start loop
@handle = setInterval(()->
# Hide all at first - then show the current and ensure it uses table-cell display type
#self.$elements.hide()
self.$elements.hide(333)
$(self.$elements[self.curPos]).show(333).css('display', 'table-cell')
# Increase the position or reset back to zero
self.curPos += 1
if self.curPos > self.maxPos
self.curPos = 0
, parseInt(interval, 10))
stop: () ->
clearInterval(@handle)
# Adds a countdown timer to show when next dashboard will appear
# TODO:
# - show the name of the next dashboard
# - add controls for manually cycling through dashboards
class DashboardSwitcherControls
arrowContent = "→"
stopTimerContent = "stop timer"
startTimerContent = "start timer"
constructor: (interval=60000, dashboardSwitcher) ->
@currentTime = parseInt(interval, 10)
@interval = parseInt(interval, 10)
@$elements = $('#dc-switcher-controls')
@dashboardSwitcher = dashboardSwitcher
@incrementTime = 1000 # refresh every 1000 milliseconds
@arrowContent = @$elements.data('next-dashboard-content') || DashboardSwitcherControls.arrowContent
@stopTimerContent = @$elements.data('stop-timer-content') || DashboardSwitcherControls.stopTimerContent
@startTimerContent = @$elements.data('start-timer-content') || DashboardSwitcherControls.startTimerContent
@
present: () ->
@$elements.length
start: () ->
@addElements()
@$timer = $.timer(@updateTimer, @incrementTime, true)
addElements: () ->
template = @$elements.find('dashboard-name-template')
if template.length
@$nextDashboardNameTemplate = template
@$nextDashboardNameTemplate.remove()
else
@$nextDashboardNameTemplate = $("<dashboard-name-template>Next dashboard: $nextName in </dashboard-name-template>")
@$nextDashboardNameContainer = $("<span id='dc-switcher-next-name'></span>")
@$countdown = $("<span id='dc-switcher-countdown'></span>")
@$manualSwitcher = $("<span id='dc-switcher-next' class='fa fa-forward'></span>").
html(@arrowContent).
click () =>
location.href = "/#{@dashboardSwitcher.nextName()}"
@$switcherStopper = $("<span id='dc-switcher-pause-reset' class='fa fa-pause'></span>").
html(@stopTimerContent).
click(@pause)
@$elements.
append(@$nextDashboardNameContainer).
append(@$countdown).
append(@$manualSwitcher).
append(@$switcherStopper)
formatTime: (time) ->
time = time / 10;
min = parseInt(time / 6000, 10)
sec = parseInt(time / 100, 10) - (min * 60)
"#{(if min > 0 then @pad(min, 2) else "00")}:#{@pad(sec, 2)}"
pad: (number, length) =>
str = "#{number}"
while str.length < length
str = "0#{str}"
str
pause: () =>
@$timer.toggle()
if @isRunning()
@dashboardSwitcher.stopLoop()
@$switcherStopper.removeClass('fa-pause').addClass('fa-play').html(@startTimerContent)
else
@dashboardSwitcher.startLoop @currentTime
@$switcherStopper.removeClass('fa-play').addClass('fa-pause').html(@stopTimerContent)
isRunning: () =>
@$switcherStopper.hasClass('fa-pause')
resetCountdown: () ->
# Get time from form
newTime = @interval
if newTime > 0
@currentTime = newTime
# Stop and reset timer
@$timer.stop().once()
updateTimer: () =>
# Update dashboard name
@$nextDashboardNameContainer.html(
@$nextDashboardNameTemplate.html().replace('$nextName', @dashboardSwitcher.nextName())
)
# Output timer position
timeString = @formatTime(@currentTime)
@$countdown.html(timeString)
# If timer is complete, trigger alert
if @currentTime is 0
@pause()
@resetCountdown()
return
# Increment timer position
@currentTime -= @incrementTime
if @currentTime < 0
@currentTime = 0
# Dashboard loaded and ready
Dashing.on 'ready', ->
# If multiple widgets per list item, switch them periodically
$('.gridster li').each (index, listItem) ->
$listItem = $(listItem)
# Take the element(s) right under the li
$widgets = $listItem.children('div')
if $widgets.length > 1
switcher = new WidgetSwitcher $widgets
switcher.start($listItem.attr('data-switcher-interval') or 5000)
# If multiple dashboards defined (using data-swticher-dashboards="board1 board2")
$container = $('#container')
ditcher = new DashboardSwitcher()
ditcher.start($container.attr('data-switcher-interval') or 60000)
|
[
{
"context": ") and (keycode isnt 91) and (keycode not in [13, 37, 38, 39, 40])\n if $scope.chatEvents.areWeT",
"end": 6797,
"score": 0.544938325881958,
"start": 6796,
"tag": "KEY",
"value": "7"
}
] | web/app/scripts/controllers/main.coffee | AhmedSoliman/instalk | 2 | 'use strict'
Array::toDict = (key) ->
@reduce ((dict, obj) -> dict[ obj[key] ] = obj if obj[key]?; return dict), {}
unless Array::filter
Array::filter = (callback) ->
element for element in this when callback(element)
Instalk.myApp
.controller 'MainCtrl', ['$scope', 'visibilityApiService', 'faviconService', '$rootScope', '$timeout', '$log', '$routeParams', '$cookies', 'InstalkProtocol', ($scope, visibilityApiService, faviconService, $rootScope, $timeout, $log, $routeParams, $cookies, InstalkProtocol) ->
$log.debug("Starting up controller...")
if InstalkProtocol.isInitialised()
InstalkProtocol.reconnect true
_inRoom = false
_retrier = null
_hidden = false
_autoScrollEnabled = true
_autoScrollSuspended = false
$scope.scrolledToBottom = ($event, isEnded) ->
if not _autoScrollSuspended
if isEnded
_autoScrollEnabled = true
stopMarkingMessages()
else
startMarkingMessages()
_autoScrollEnabled = false
_retryBase = 1
_unread = 0
_resetTitle = $rootScope.title
_titleAnimation = false
marker =
o: 'marker'
_markerLoc = -1
$scope.roomId = $routeParams.roomId
$scope.room =
topic: ""
$scope.user = null
$scope.form = {}
$scope.members = {}
$scope.messages = []
$scope.chatEvents =
areWeTyping: false
whoIsTyping: []
$scope.retryAfter = 0
scrollToBottom = () ->
if _autoScrollEnabled
_autoScrollSuspended = true
$('#messages').animate({
scrollTop: $('#messages').last()[0].scrollHeight
}, 150, () ->
$log.info "Animation Completed"
_autoScrollSuspended = false
)
enableAnimateTitle = (resetTitle) ->
_resetTitle = resetTitle
faviconService.badge(_unread)
if not _titleAnimation
_titleAnimation = true
$rootScope.title = _resetTitle
animator = () ->
if _titleAnimation
$rootScope.title = $rootScope.title.substring(3)
if $rootScope.title.length is 0
$rootScope.title = _resetTitle
$timeout(animator, 1000)
$timeout(animator, 1000)
disableAnimateTitle = () ->
_titleAnimation = false
InstalkProtocol.onRoomWelcome (data) ->
#actual init...
$log.debug "Room #{$scope.roomId} Joined, Members:", data.data.members
_inRoom = true
$log.debug "SYNC:", data.data
$scope.members = data.data.members.toDict 'username'
#debugger
$scope.messages = data.data.messages
$scope.room.topic = data.data.topic
$timeout(scrollToBottom, 500)
isMarked = () -> _markerLoc > -1
addMarker = () ->
if not isMarked()
$log.info "Adding Marker"
_markerLoc = ($scope.messages.push marker) - 1
removeMarkers = () ->
if isMarked()
$log.info "Removing Marker", _markerLoc
$log.info "Messages before:", $scope.messages
$log.info("Removing:", $scope.messages.splice(_markerLoc, 1))
$log.info "Messages now:", $scope.messages
_markerLoc = -1
setTitle = (title) ->
$rootScope.title = 'Instalk | #' + $scope.roomId + ' ' + title
formatTitle = (title) ->
'Instalk | #' + $scope.roomId + ' ' + title
InstalkProtocol.onWelcome (user) ->
if _retrier then $timeout.cancel(_retrier)
_retryBase = 1
$log.debug 'Got Welcome...'
$scope.user = user
$cookies.userInfo = JSON.stringify user
InstalkProtocol.joinRoom $scope.roomId
InstalkProtocol.onJoin (data) ->
$log.debug "#{data.data.user.username} joined the room"
$scope.members[data.data.user.username] = data.data.user
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onLeft (data) ->
delete $scope.members[data.data.user.username]
$log.debug "User: #{data.data.user.username} Left Room"
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onMessage (data) ->
$log.debug 'Adding Message To History:', data
if _hidden
_unread += 1
addMarker()
enableAnimateTitle(formatTitle("(#{_unread})"))
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onRoomTopicChange (data) ->
$scope.messages.push data
$scope.room.topic = data.data.topic
scrollToBottom()
InstalkProtocol.onBeginTyping (data) ->
if data.data.sender isnt $scope.user.username
$log.debug("Someone started typing:", data.data.sender)
if data.data.sender not in $scope.chatEvents.whoIsTyping
$log.debug(data.data.sender + " IS typing...")
$scope.chatEvents.whoIsTyping.push data.data.sender
InstalkProtocol.onStopTyping (data) ->
if data.data.sender isnt $scope.user.username
$log.debug("Someone stopped typing:", data.data.sender)
i = $scope.chatEvents.whoIsTyping.indexOf(data.data.sender)
$scope.chatEvents.whoIsTyping.splice(i, 1)
InstalkProtocol.onUserInfoUpdate (data) ->
$scope.messages.push data
#check if it's me or not first
if $scope.user.username is data.data.originalUsername
$log.debug 'Updating my own data to ', data.data.newUserInfo
$scope.user = data.data.newUserInfo
$cookies.userInfo = JSON.stringify $scope.user
else
#search in members
$log.debug 'Updating a member data to ', data.data.newUserInfo
delete $scope.members[data.data.originalUsername]
$scope.members[data.data.newUserInfo.username] = data.data.newUserInfo
scrollToBottom()
handleConnectionDrop = () ->
if _retrier then $timeout.cancel(_retrier)
$log.debug("We lost connection")
_retryBase += 1
if _retryBase > 7
_retryBase = 7
$scope.retryAfter = Math.pow(2, _retryBase)
_retrier = $timeout(retryDecay, 1000)
InstalkProtocol.onConnectionDrop handleConnectionDrop
retryDecay = () ->
if _retrier then $timeout.cancel(_retrier)
$scope.retryAfter -= 1
if $scope.retryAfter <= 0
#it's time to retry
$scope.reconnect()
else
_retrier = $timeout(retryDecay, 1000)
scheduleStopTyping = () ->
$scope.chatEvents.timer = $timeout(stopTyping, 2000)
$scope.isSomeoneTyping = () -> $scope.chatEvents.whoIsTyping.length > 0
$scope.whoIsTyping = () ->
names = $scope.chatEvents.whoIsTyping.map (w) ->
$scope.members[w]?.info.name
names.join(', ')
$scope.beginTyping = (ev) ->
keycode = ev.which
if (keycode >= 0) and (keycode > 19) and (keycode isnt 224) and (keycode isnt 91) and (keycode not in [13, 37, 38, 39, 40])
if $scope.chatEvents.areWeTyping and $scope.chatEvents.timer
$timeout.cancel($scope.chatEvents.timer)
scheduleStopTyping()
else
$log.debug("We started typing...:", ev)
InstalkProtocol.beginTyping $scope.roomId
$scope.chatEvents.areWeTyping = true
scheduleStopTyping()
stopTyping = () ->
$log.debug("We stopped Typing")
if $scope.chatEvents.timer
$log.debug("Cancelling timer...")
$timeout.cancel($scope.chatEvents.timer)
else
$log.debug("No timer to cancel")
$scope.chatEvents.areWeTyping = false
InstalkProtocol.stopTyping $scope.roomId
$scope.getLag = () -> InstalkProtocol.getLag()
$scope.isConnecting = () ->
(InstalkProtocol.currentState() is 'OPEN' or InstalkProtocol.currentState() is 'CONNECTING') and not $scope.isOnline()
$scope.isDisconnected = () ->
(InstalkProtocol.currentState() is 'CLOSED') or (InstalkProtocol.currentState() is 'CLOSING')
$scope.isConnected = () -> InstalkProtocol.currentState() == 'OPEN'
$scope.isOnline = () -> InstalkProtocol.isOnline() and _inRoom is true
$scope.reconnect = () ->
if _retrier then $timeout.cancel(_retrier)
InstalkProtocol.reconnect()
$scope.currentState = () -> InstalkProtocol.currentState()
$scope.initialisationStatus = () ->
switch InstalkProtocol.currentState()
when 'OPEN'
if InstalkProtocol.isInitialised()
if _inRoom then 'Ready...' else 'Joining Room...'
else 'Initialising...'
when 'CONNECTING' then 'Connecting...'
else 'Unknown...'
$scope.updateUserInfo = () ->
InstalkProtocol.updateUserInfo $scope.user.info.name, $scope.user.info.color
$scope.setRoomTopic = () ->
$log.info("Updating the room topic to:", $scope.room.topic)
InstalkProtocol.setRoomTopic $scope.roomId, $scope.room.topic
$scope.sendMessage = () ->
stopTyping()
$log.debug 'Sending: ', $scope.form.msg
_autoScrollEnabled = true
InstalkProtocol.sendMessage $scope.roomId, $scope.form.msg
$scope.form.msg = ''
$scope.$on '$destroy', () ->
$log.debug("Controller is dying...")
if $scope.isDisconnected()
_retryBase = 1
if _retrier then $timeout.cancel(_retrier)
handleConnectionDrop()
startMarkingMessages = () ->
_hidden = true
_autoScrollEnabled = false
stopMarkingMessages = () ->
_hidden = false
_unread = 0
setTitle('')
disableAnimateTitle()
faviconService.reset()
$scope.$on 'visibilityChanged', (event, isHidden) ->
$log.info("Visibility Changed", event, isHidden)
if isHidden
removeMarkers()
startMarkingMessages()
else
stopMarkingMessages()
$scope.$apply()
$rootScope.$apply()
]
| 26058 | 'use strict'
Array::toDict = (key) ->
@reduce ((dict, obj) -> dict[ obj[key] ] = obj if obj[key]?; return dict), {}
unless Array::filter
Array::filter = (callback) ->
element for element in this when callback(element)
Instalk.myApp
.controller 'MainCtrl', ['$scope', 'visibilityApiService', 'faviconService', '$rootScope', '$timeout', '$log', '$routeParams', '$cookies', 'InstalkProtocol', ($scope, visibilityApiService, faviconService, $rootScope, $timeout, $log, $routeParams, $cookies, InstalkProtocol) ->
$log.debug("Starting up controller...")
if InstalkProtocol.isInitialised()
InstalkProtocol.reconnect true
_inRoom = false
_retrier = null
_hidden = false
_autoScrollEnabled = true
_autoScrollSuspended = false
$scope.scrolledToBottom = ($event, isEnded) ->
if not _autoScrollSuspended
if isEnded
_autoScrollEnabled = true
stopMarkingMessages()
else
startMarkingMessages()
_autoScrollEnabled = false
_retryBase = 1
_unread = 0
_resetTitle = $rootScope.title
_titleAnimation = false
marker =
o: 'marker'
_markerLoc = -1
$scope.roomId = $routeParams.roomId
$scope.room =
topic: ""
$scope.user = null
$scope.form = {}
$scope.members = {}
$scope.messages = []
$scope.chatEvents =
areWeTyping: false
whoIsTyping: []
$scope.retryAfter = 0
scrollToBottom = () ->
if _autoScrollEnabled
_autoScrollSuspended = true
$('#messages').animate({
scrollTop: $('#messages').last()[0].scrollHeight
}, 150, () ->
$log.info "Animation Completed"
_autoScrollSuspended = false
)
enableAnimateTitle = (resetTitle) ->
_resetTitle = resetTitle
faviconService.badge(_unread)
if not _titleAnimation
_titleAnimation = true
$rootScope.title = _resetTitle
animator = () ->
if _titleAnimation
$rootScope.title = $rootScope.title.substring(3)
if $rootScope.title.length is 0
$rootScope.title = _resetTitle
$timeout(animator, 1000)
$timeout(animator, 1000)
disableAnimateTitle = () ->
_titleAnimation = false
InstalkProtocol.onRoomWelcome (data) ->
#actual init...
$log.debug "Room #{$scope.roomId} Joined, Members:", data.data.members
_inRoom = true
$log.debug "SYNC:", data.data
$scope.members = data.data.members.toDict 'username'
#debugger
$scope.messages = data.data.messages
$scope.room.topic = data.data.topic
$timeout(scrollToBottom, 500)
isMarked = () -> _markerLoc > -1
addMarker = () ->
if not isMarked()
$log.info "Adding Marker"
_markerLoc = ($scope.messages.push marker) - 1
removeMarkers = () ->
if isMarked()
$log.info "Removing Marker", _markerLoc
$log.info "Messages before:", $scope.messages
$log.info("Removing:", $scope.messages.splice(_markerLoc, 1))
$log.info "Messages now:", $scope.messages
_markerLoc = -1
setTitle = (title) ->
$rootScope.title = 'Instalk | #' + $scope.roomId + ' ' + title
formatTitle = (title) ->
'Instalk | #' + $scope.roomId + ' ' + title
InstalkProtocol.onWelcome (user) ->
if _retrier then $timeout.cancel(_retrier)
_retryBase = 1
$log.debug 'Got Welcome...'
$scope.user = user
$cookies.userInfo = JSON.stringify user
InstalkProtocol.joinRoom $scope.roomId
InstalkProtocol.onJoin (data) ->
$log.debug "#{data.data.user.username} joined the room"
$scope.members[data.data.user.username] = data.data.user
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onLeft (data) ->
delete $scope.members[data.data.user.username]
$log.debug "User: #{data.data.user.username} Left Room"
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onMessage (data) ->
$log.debug 'Adding Message To History:', data
if _hidden
_unread += 1
addMarker()
enableAnimateTitle(formatTitle("(#{_unread})"))
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onRoomTopicChange (data) ->
$scope.messages.push data
$scope.room.topic = data.data.topic
scrollToBottom()
InstalkProtocol.onBeginTyping (data) ->
if data.data.sender isnt $scope.user.username
$log.debug("Someone started typing:", data.data.sender)
if data.data.sender not in $scope.chatEvents.whoIsTyping
$log.debug(data.data.sender + " IS typing...")
$scope.chatEvents.whoIsTyping.push data.data.sender
InstalkProtocol.onStopTyping (data) ->
if data.data.sender isnt $scope.user.username
$log.debug("Someone stopped typing:", data.data.sender)
i = $scope.chatEvents.whoIsTyping.indexOf(data.data.sender)
$scope.chatEvents.whoIsTyping.splice(i, 1)
InstalkProtocol.onUserInfoUpdate (data) ->
$scope.messages.push data
#check if it's me or not first
if $scope.user.username is data.data.originalUsername
$log.debug 'Updating my own data to ', data.data.newUserInfo
$scope.user = data.data.newUserInfo
$cookies.userInfo = JSON.stringify $scope.user
else
#search in members
$log.debug 'Updating a member data to ', data.data.newUserInfo
delete $scope.members[data.data.originalUsername]
$scope.members[data.data.newUserInfo.username] = data.data.newUserInfo
scrollToBottom()
handleConnectionDrop = () ->
if _retrier then $timeout.cancel(_retrier)
$log.debug("We lost connection")
_retryBase += 1
if _retryBase > 7
_retryBase = 7
$scope.retryAfter = Math.pow(2, _retryBase)
_retrier = $timeout(retryDecay, 1000)
InstalkProtocol.onConnectionDrop handleConnectionDrop
retryDecay = () ->
if _retrier then $timeout.cancel(_retrier)
$scope.retryAfter -= 1
if $scope.retryAfter <= 0
#it's time to retry
$scope.reconnect()
else
_retrier = $timeout(retryDecay, 1000)
scheduleStopTyping = () ->
$scope.chatEvents.timer = $timeout(stopTyping, 2000)
$scope.isSomeoneTyping = () -> $scope.chatEvents.whoIsTyping.length > 0
$scope.whoIsTyping = () ->
names = $scope.chatEvents.whoIsTyping.map (w) ->
$scope.members[w]?.info.name
names.join(', ')
$scope.beginTyping = (ev) ->
keycode = ev.which
if (keycode >= 0) and (keycode > 19) and (keycode isnt 224) and (keycode isnt 91) and (keycode not in [13, 3<KEY>, 38, 39, 40])
if $scope.chatEvents.areWeTyping and $scope.chatEvents.timer
$timeout.cancel($scope.chatEvents.timer)
scheduleStopTyping()
else
$log.debug("We started typing...:", ev)
InstalkProtocol.beginTyping $scope.roomId
$scope.chatEvents.areWeTyping = true
scheduleStopTyping()
stopTyping = () ->
$log.debug("We stopped Typing")
if $scope.chatEvents.timer
$log.debug("Cancelling timer...")
$timeout.cancel($scope.chatEvents.timer)
else
$log.debug("No timer to cancel")
$scope.chatEvents.areWeTyping = false
InstalkProtocol.stopTyping $scope.roomId
$scope.getLag = () -> InstalkProtocol.getLag()
$scope.isConnecting = () ->
(InstalkProtocol.currentState() is 'OPEN' or InstalkProtocol.currentState() is 'CONNECTING') and not $scope.isOnline()
$scope.isDisconnected = () ->
(InstalkProtocol.currentState() is 'CLOSED') or (InstalkProtocol.currentState() is 'CLOSING')
$scope.isConnected = () -> InstalkProtocol.currentState() == 'OPEN'
$scope.isOnline = () -> InstalkProtocol.isOnline() and _inRoom is true
$scope.reconnect = () ->
if _retrier then $timeout.cancel(_retrier)
InstalkProtocol.reconnect()
$scope.currentState = () -> InstalkProtocol.currentState()
$scope.initialisationStatus = () ->
switch InstalkProtocol.currentState()
when 'OPEN'
if InstalkProtocol.isInitialised()
if _inRoom then 'Ready...' else 'Joining Room...'
else 'Initialising...'
when 'CONNECTING' then 'Connecting...'
else 'Unknown...'
$scope.updateUserInfo = () ->
InstalkProtocol.updateUserInfo $scope.user.info.name, $scope.user.info.color
$scope.setRoomTopic = () ->
$log.info("Updating the room topic to:", $scope.room.topic)
InstalkProtocol.setRoomTopic $scope.roomId, $scope.room.topic
$scope.sendMessage = () ->
stopTyping()
$log.debug 'Sending: ', $scope.form.msg
_autoScrollEnabled = true
InstalkProtocol.sendMessage $scope.roomId, $scope.form.msg
$scope.form.msg = ''
$scope.$on '$destroy', () ->
$log.debug("Controller is dying...")
if $scope.isDisconnected()
_retryBase = 1
if _retrier then $timeout.cancel(_retrier)
handleConnectionDrop()
startMarkingMessages = () ->
_hidden = true
_autoScrollEnabled = false
stopMarkingMessages = () ->
_hidden = false
_unread = 0
setTitle('')
disableAnimateTitle()
faviconService.reset()
$scope.$on 'visibilityChanged', (event, isHidden) ->
$log.info("Visibility Changed", event, isHidden)
if isHidden
removeMarkers()
startMarkingMessages()
else
stopMarkingMessages()
$scope.$apply()
$rootScope.$apply()
]
| true | 'use strict'
Array::toDict = (key) ->
@reduce ((dict, obj) -> dict[ obj[key] ] = obj if obj[key]?; return dict), {}
unless Array::filter
Array::filter = (callback) ->
element for element in this when callback(element)
Instalk.myApp
.controller 'MainCtrl', ['$scope', 'visibilityApiService', 'faviconService', '$rootScope', '$timeout', '$log', '$routeParams', '$cookies', 'InstalkProtocol', ($scope, visibilityApiService, faviconService, $rootScope, $timeout, $log, $routeParams, $cookies, InstalkProtocol) ->
$log.debug("Starting up controller...")
if InstalkProtocol.isInitialised()
InstalkProtocol.reconnect true
_inRoom = false
_retrier = null
_hidden = false
_autoScrollEnabled = true
_autoScrollSuspended = false
$scope.scrolledToBottom = ($event, isEnded) ->
if not _autoScrollSuspended
if isEnded
_autoScrollEnabled = true
stopMarkingMessages()
else
startMarkingMessages()
_autoScrollEnabled = false
_retryBase = 1
_unread = 0
_resetTitle = $rootScope.title
_titleAnimation = false
marker =
o: 'marker'
_markerLoc = -1
$scope.roomId = $routeParams.roomId
$scope.room =
topic: ""
$scope.user = null
$scope.form = {}
$scope.members = {}
$scope.messages = []
$scope.chatEvents =
areWeTyping: false
whoIsTyping: []
$scope.retryAfter = 0
scrollToBottom = () ->
if _autoScrollEnabled
_autoScrollSuspended = true
$('#messages').animate({
scrollTop: $('#messages').last()[0].scrollHeight
}, 150, () ->
$log.info "Animation Completed"
_autoScrollSuspended = false
)
enableAnimateTitle = (resetTitle) ->
_resetTitle = resetTitle
faviconService.badge(_unread)
if not _titleAnimation
_titleAnimation = true
$rootScope.title = _resetTitle
animator = () ->
if _titleAnimation
$rootScope.title = $rootScope.title.substring(3)
if $rootScope.title.length is 0
$rootScope.title = _resetTitle
$timeout(animator, 1000)
$timeout(animator, 1000)
disableAnimateTitle = () ->
_titleAnimation = false
InstalkProtocol.onRoomWelcome (data) ->
#actual init...
$log.debug "Room #{$scope.roomId} Joined, Members:", data.data.members
_inRoom = true
$log.debug "SYNC:", data.data
$scope.members = data.data.members.toDict 'username'
#debugger
$scope.messages = data.data.messages
$scope.room.topic = data.data.topic
$timeout(scrollToBottom, 500)
isMarked = () -> _markerLoc > -1
addMarker = () ->
if not isMarked()
$log.info "Adding Marker"
_markerLoc = ($scope.messages.push marker) - 1
removeMarkers = () ->
if isMarked()
$log.info "Removing Marker", _markerLoc
$log.info "Messages before:", $scope.messages
$log.info("Removing:", $scope.messages.splice(_markerLoc, 1))
$log.info "Messages now:", $scope.messages
_markerLoc = -1
setTitle = (title) ->
$rootScope.title = 'Instalk | #' + $scope.roomId + ' ' + title
formatTitle = (title) ->
'Instalk | #' + $scope.roomId + ' ' + title
InstalkProtocol.onWelcome (user) ->
if _retrier then $timeout.cancel(_retrier)
_retryBase = 1
$log.debug 'Got Welcome...'
$scope.user = user
$cookies.userInfo = JSON.stringify user
InstalkProtocol.joinRoom $scope.roomId
InstalkProtocol.onJoin (data) ->
$log.debug "#{data.data.user.username} joined the room"
$scope.members[data.data.user.username] = data.data.user
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onLeft (data) ->
delete $scope.members[data.data.user.username]
$log.debug "User: #{data.data.user.username} Left Room"
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onMessage (data) ->
$log.debug 'Adding Message To History:', data
if _hidden
_unread += 1
addMarker()
enableAnimateTitle(formatTitle("(#{_unread})"))
$scope.messages.push data
scrollToBottom()
InstalkProtocol.onRoomTopicChange (data) ->
$scope.messages.push data
$scope.room.topic = data.data.topic
scrollToBottom()
InstalkProtocol.onBeginTyping (data) ->
if data.data.sender isnt $scope.user.username
$log.debug("Someone started typing:", data.data.sender)
if data.data.sender not in $scope.chatEvents.whoIsTyping
$log.debug(data.data.sender + " IS typing...")
$scope.chatEvents.whoIsTyping.push data.data.sender
InstalkProtocol.onStopTyping (data) ->
if data.data.sender isnt $scope.user.username
$log.debug("Someone stopped typing:", data.data.sender)
i = $scope.chatEvents.whoIsTyping.indexOf(data.data.sender)
$scope.chatEvents.whoIsTyping.splice(i, 1)
InstalkProtocol.onUserInfoUpdate (data) ->
$scope.messages.push data
#check if it's me or not first
if $scope.user.username is data.data.originalUsername
$log.debug 'Updating my own data to ', data.data.newUserInfo
$scope.user = data.data.newUserInfo
$cookies.userInfo = JSON.stringify $scope.user
else
#search in members
$log.debug 'Updating a member data to ', data.data.newUserInfo
delete $scope.members[data.data.originalUsername]
$scope.members[data.data.newUserInfo.username] = data.data.newUserInfo
scrollToBottom()
handleConnectionDrop = () ->
if _retrier then $timeout.cancel(_retrier)
$log.debug("We lost connection")
_retryBase += 1
if _retryBase > 7
_retryBase = 7
$scope.retryAfter = Math.pow(2, _retryBase)
_retrier = $timeout(retryDecay, 1000)
InstalkProtocol.onConnectionDrop handleConnectionDrop
retryDecay = () ->
if _retrier then $timeout.cancel(_retrier)
$scope.retryAfter -= 1
if $scope.retryAfter <= 0
#it's time to retry
$scope.reconnect()
else
_retrier = $timeout(retryDecay, 1000)
scheduleStopTyping = () ->
$scope.chatEvents.timer = $timeout(stopTyping, 2000)
$scope.isSomeoneTyping = () -> $scope.chatEvents.whoIsTyping.length > 0
$scope.whoIsTyping = () ->
names = $scope.chatEvents.whoIsTyping.map (w) ->
$scope.members[w]?.info.name
names.join(', ')
$scope.beginTyping = (ev) ->
keycode = ev.which
if (keycode >= 0) and (keycode > 19) and (keycode isnt 224) and (keycode isnt 91) and (keycode not in [13, 3PI:KEY:<KEY>END_PI, 38, 39, 40])
if $scope.chatEvents.areWeTyping and $scope.chatEvents.timer
$timeout.cancel($scope.chatEvents.timer)
scheduleStopTyping()
else
$log.debug("We started typing...:", ev)
InstalkProtocol.beginTyping $scope.roomId
$scope.chatEvents.areWeTyping = true
scheduleStopTyping()
stopTyping = () ->
$log.debug("We stopped Typing")
if $scope.chatEvents.timer
$log.debug("Cancelling timer...")
$timeout.cancel($scope.chatEvents.timer)
else
$log.debug("No timer to cancel")
$scope.chatEvents.areWeTyping = false
InstalkProtocol.stopTyping $scope.roomId
$scope.getLag = () -> InstalkProtocol.getLag()
$scope.isConnecting = () ->
(InstalkProtocol.currentState() is 'OPEN' or InstalkProtocol.currentState() is 'CONNECTING') and not $scope.isOnline()
$scope.isDisconnected = () ->
(InstalkProtocol.currentState() is 'CLOSED') or (InstalkProtocol.currentState() is 'CLOSING')
$scope.isConnected = () -> InstalkProtocol.currentState() == 'OPEN'
$scope.isOnline = () -> InstalkProtocol.isOnline() and _inRoom is true
$scope.reconnect = () ->
if _retrier then $timeout.cancel(_retrier)
InstalkProtocol.reconnect()
$scope.currentState = () -> InstalkProtocol.currentState()
$scope.initialisationStatus = () ->
switch InstalkProtocol.currentState()
when 'OPEN'
if InstalkProtocol.isInitialised()
if _inRoom then 'Ready...' else 'Joining Room...'
else 'Initialising...'
when 'CONNECTING' then 'Connecting...'
else 'Unknown...'
$scope.updateUserInfo = () ->
InstalkProtocol.updateUserInfo $scope.user.info.name, $scope.user.info.color
$scope.setRoomTopic = () ->
$log.info("Updating the room topic to:", $scope.room.topic)
InstalkProtocol.setRoomTopic $scope.roomId, $scope.room.topic
$scope.sendMessage = () ->
stopTyping()
$log.debug 'Sending: ', $scope.form.msg
_autoScrollEnabled = true
InstalkProtocol.sendMessage $scope.roomId, $scope.form.msg
$scope.form.msg = ''
$scope.$on '$destroy', () ->
$log.debug("Controller is dying...")
if $scope.isDisconnected()
_retryBase = 1
if _retrier then $timeout.cancel(_retrier)
handleConnectionDrop()
startMarkingMessages = () ->
_hidden = true
_autoScrollEnabled = false
stopMarkingMessages = () ->
_hidden = false
_unread = 0
setTitle('')
disableAnimateTitle()
faviconService.reset()
$scope.$on 'visibilityChanged', (event, isHidden) ->
$log.info("Visibility Changed", event, isHidden)
if isHidden
removeMarkers()
startMarkingMessages()
else
stopMarkingMessages()
$scope.$apply()
$rootScope.$apply()
]
|
[
{
"context": ", text, html)->\n mailOptions =\n from: from #'\"WuSen\" <wusen@yidian-inc.com>' # sender address \n to",
"end": 256,
"score": 0.9931051731109619,
"start": 251,
"tag": "NAME",
"value": "WuSen"
},
{
"context": "html)->\n mailOptions =\n from: from #'\"WuSen\" ... | lib/service/reporter/mailReporter.coffee | vastwu/Codelinter | 0 | nodemailer = require 'nodemailer'
configure = require '../configure'
transporter = nodemailer.createTransport
host: configure.MAIL_HOST
port: configure.MAIL_PORT
###
testMail = (from, to, subject, text, html)->
mailOptions =
from: from #'"WuSen" <wusen@yidian-inc.com>' # sender address
to: to # 'wusen@yidian-inc.com' # list of receivers
subject: subject #'Hello' # Subject line
text: text # 'plain: Hello world' # plaintext body
html: html # '<b>html: Hello world</b>' # html body
transporter.sendMail mailOptions, (err, info)->
console.log 'maildone', err, info
###
module.exports = (data, app, from, to)->
app.render 'mail.jade', data, (err, html)->
#console.log err, html
#res.send html
#return
if err
html = err
mailOptions =
from: from #'"WuSen" <wusen@yidian-inc.com>' # sender address
to: to # 'wusen@yidian-inc.com' # list of receivers
cc: 'wusen@yidian-inc.com'
subject: "CodeLint" #'Hello' # Subject line
#text: text # 'plain: Hello world' # plaintext body
html: html # '<b>html: Hello world</b>' # html body
transporter.sendMail mailOptions, (err, info)->
console.log 'maildone', err, info
| 68035 | nodemailer = require 'nodemailer'
configure = require '../configure'
transporter = nodemailer.createTransport
host: configure.MAIL_HOST
port: configure.MAIL_PORT
###
testMail = (from, to, subject, text, html)->
mailOptions =
from: from #'"<NAME>" <<EMAIL>>' # sender address
to: to # '<EMAIL>' # list of receivers
subject: subject #'Hello' # Subject line
text: text # 'plain: Hello world' # plaintext body
html: html # '<b>html: Hello world</b>' # html body
transporter.sendMail mailOptions, (err, info)->
console.log 'maildone', err, info
###
module.exports = (data, app, from, to)->
app.render 'mail.jade', data, (err, html)->
#console.log err, html
#res.send html
#return
if err
html = err
mailOptions =
from: from #'"<NAME>" <<EMAIL>>' # sender address
to: to # '<EMAIL>' # list of receivers
cc: '<EMAIL>'
subject: "CodeLint" #'Hello' # Subject line
#text: text # 'plain: Hello world' # plaintext body
html: html # '<b>html: Hello world</b>' # html body
transporter.sendMail mailOptions, (err, info)->
console.log 'maildone', err, info
| true | nodemailer = require 'nodemailer'
configure = require '../configure'
transporter = nodemailer.createTransport
host: configure.MAIL_HOST
port: configure.MAIL_PORT
###
testMail = (from, to, subject, text, html)->
mailOptions =
from: from #'"PI:NAME:<NAME>END_PI" <PI:EMAIL:<EMAIL>END_PI>' # sender address
to: to # 'PI:EMAIL:<EMAIL>END_PI' # list of receivers
subject: subject #'Hello' # Subject line
text: text # 'plain: Hello world' # plaintext body
html: html # '<b>html: Hello world</b>' # html body
transporter.sendMail mailOptions, (err, info)->
console.log 'maildone', err, info
###
module.exports = (data, app, from, to)->
app.render 'mail.jade', data, (err, html)->
#console.log err, html
#res.send html
#return
if err
html = err
mailOptions =
from: from #'"PI:NAME:<NAME>END_PI" <PI:EMAIL:<EMAIL>END_PI>' # sender address
to: to # 'PI:EMAIL:<EMAIL>END_PI' # list of receivers
cc: 'PI:EMAIL:<EMAIL>END_PI'
subject: "CodeLint" #'Hello' # Subject line
#text: text # 'plain: Hello world' # plaintext body
html: html # '<b>html: Hello world</b>' # html body
transporter.sendMail mailOptions, (err, info)->
console.log 'maildone', err, info
|
[
{
"context": "\n doc.id = Utils.random_string()\n doc.name = name\n doc.pair = pair\n doc.email = email\n doc",
"end": 3633,
"score": 0.993546724319458,
"start": 3629,
"tag": "NAME",
"value": "name"
}
] | lib/ajax-cat/public/AjaxCatList.coffee | hypertornado/ajax-cat | 2 | class AjaxCatList
constructor: ->
$('#new-translation-modal').hide()
$('#new-experiment-modal').hide()
$('#new-translation').on('click',@new_translation)
$('#create-new-translation').on('click',@create_new_translation)
@show_translations()
$(".example").click(
(event) =>
text = $(event.currentTarget).data("text")
$('#new-translation-text').val(text)
return false
)
$("#new-experiment-translation").click(
=>
@new_experiment_translation()
)
$("#create-new-experiment").click(
=>
@create_experiment_translation()
)
create_experiment_translation: =>
email = $("#new-experiment-email").val()
pair = $("#new-experiment-pair").val()
filter = /^([a-zA-Z0-9_\.\-])+\@(([a-zA-Z0-9\-])+\.)+([a-zA-Z0-9]{2,4})+$/
unless filter.test(email)
alert "Write your email, please."
return
$.cookie("email", email)
$.ajax "/admin/get_experiment"
data:
email: email
pair: pair
success: (data) =>
data = JSON.parse(data)
id = @add_translation(JSON.parse(data.sentences), "EXPERIMENT ##{data.task_id}, #{data.email}", data.pair, data.task_id, data.email, data)
#return
window.location = "/translation.html##{id}"
error: =>
alert "Could not find experiment for you."
new_experiment_translation: =>
$("#new-experiment-pair").html("")
$("#new-experiment-email").val($.cookie("email"))
$.ajax "/api/info"
success: (data) =>
data = JSON.parse(data)
for p in data.pairs
$("#new-experiment-pair").append("<option value='#{p}'>#{p}</option>")
$('#new-experiment-modal').modal('show')
new_translation: =>
$("#new-translation-pair").html("")
$.ajax "/api/info"
success: (data) =>
data = JSON.parse(data)
for p in data.pairs
$("#new-translation-pair").append("<option value='#{p}'>#{p}</option>")
$('#new-translation-name').val('Name')
$('#new-translation-text').val('')
$('#new-translation-modal').modal('show')
$('#new-translation-text').focus()
show_translations: =>
$("#translation-list").html('')
return unless localStorage['ac-data']
ids = JSON.parse(localStorage['ac-data'])
for i in ids
doc = JSON.parse(localStorage[i])
$("#translation-list").append("<tr><td width='100%'><a href='/translation.html##{doc.id}'>#{doc.name}</a></td><td><button data-id='#{doc.id}' class='btn btn-danger btn-mini delete-button'>delete</button></td></tr>")
$(".delete-button").click(
(event) =>
id = $(event.currentTarget).data("id")
if confirm("Delete this translation?")
AjaxCatList.delete_document(id)
@show_translations()
)
@delete_document: (id) =>
return unless localStorage['ac-data']
ids = JSON.parse(localStorage['ac-data'])
new_ids = []
for i in ids
new_ids.push(i) unless i == id
localStorage.removeItem(id)
localStorage.setItem('ac-data', JSON.stringify(new_ids))
create_new_translation: =>
text = $('#new-translation-text').val()
name = $('#new-translation-name').val()
pair = $('#new-translation-pair').val()
@add_translation(text, name, pair)
$('#new-translation-modal').modal('hide')
@show_translations()
add_translation: (text, name, pair, task_id = false, email = false, experiment_data) =>
if localStorage['ac-data']
docs = JSON.parse(localStorage['ac-data'])
else
docs = []
doc = {}
doc.id = Utils.random_string()
doc.name = name
doc.pair = pair
doc.email = email
doc.task_id = task_id
#is experiment
if jQuery.isArray(text)
doc.source = text
doc.options = JSON.parse(experiment_data.options)
else
doc.source = Utils.split_source(text)
doc.target = new Array(doc.source.length)
docs.push(doc.id)
localStorage.setItem('ac-data', JSON.stringify(docs))
localStorage.setItem(doc.id, JSON.stringify(doc))
return doc.id
| 92465 | class AjaxCatList
constructor: ->
$('#new-translation-modal').hide()
$('#new-experiment-modal').hide()
$('#new-translation').on('click',@new_translation)
$('#create-new-translation').on('click',@create_new_translation)
@show_translations()
$(".example").click(
(event) =>
text = $(event.currentTarget).data("text")
$('#new-translation-text').val(text)
return false
)
$("#new-experiment-translation").click(
=>
@new_experiment_translation()
)
$("#create-new-experiment").click(
=>
@create_experiment_translation()
)
create_experiment_translation: =>
email = $("#new-experiment-email").val()
pair = $("#new-experiment-pair").val()
filter = /^([a-zA-Z0-9_\.\-])+\@(([a-zA-Z0-9\-])+\.)+([a-zA-Z0-9]{2,4})+$/
unless filter.test(email)
alert "Write your email, please."
return
$.cookie("email", email)
$.ajax "/admin/get_experiment"
data:
email: email
pair: pair
success: (data) =>
data = JSON.parse(data)
id = @add_translation(JSON.parse(data.sentences), "EXPERIMENT ##{data.task_id}, #{data.email}", data.pair, data.task_id, data.email, data)
#return
window.location = "/translation.html##{id}"
error: =>
alert "Could not find experiment for you."
new_experiment_translation: =>
$("#new-experiment-pair").html("")
$("#new-experiment-email").val($.cookie("email"))
$.ajax "/api/info"
success: (data) =>
data = JSON.parse(data)
for p in data.pairs
$("#new-experiment-pair").append("<option value='#{p}'>#{p}</option>")
$('#new-experiment-modal').modal('show')
new_translation: =>
$("#new-translation-pair").html("")
$.ajax "/api/info"
success: (data) =>
data = JSON.parse(data)
for p in data.pairs
$("#new-translation-pair").append("<option value='#{p}'>#{p}</option>")
$('#new-translation-name').val('Name')
$('#new-translation-text').val('')
$('#new-translation-modal').modal('show')
$('#new-translation-text').focus()
show_translations: =>
$("#translation-list").html('')
return unless localStorage['ac-data']
ids = JSON.parse(localStorage['ac-data'])
for i in ids
doc = JSON.parse(localStorage[i])
$("#translation-list").append("<tr><td width='100%'><a href='/translation.html##{doc.id}'>#{doc.name}</a></td><td><button data-id='#{doc.id}' class='btn btn-danger btn-mini delete-button'>delete</button></td></tr>")
$(".delete-button").click(
(event) =>
id = $(event.currentTarget).data("id")
if confirm("Delete this translation?")
AjaxCatList.delete_document(id)
@show_translations()
)
@delete_document: (id) =>
return unless localStorage['ac-data']
ids = JSON.parse(localStorage['ac-data'])
new_ids = []
for i in ids
new_ids.push(i) unless i == id
localStorage.removeItem(id)
localStorage.setItem('ac-data', JSON.stringify(new_ids))
create_new_translation: =>
text = $('#new-translation-text').val()
name = $('#new-translation-name').val()
pair = $('#new-translation-pair').val()
@add_translation(text, name, pair)
$('#new-translation-modal').modal('hide')
@show_translations()
add_translation: (text, name, pair, task_id = false, email = false, experiment_data) =>
if localStorage['ac-data']
docs = JSON.parse(localStorage['ac-data'])
else
docs = []
doc = {}
doc.id = Utils.random_string()
doc.name = <NAME>
doc.pair = pair
doc.email = email
doc.task_id = task_id
#is experiment
if jQuery.isArray(text)
doc.source = text
doc.options = JSON.parse(experiment_data.options)
else
doc.source = Utils.split_source(text)
doc.target = new Array(doc.source.length)
docs.push(doc.id)
localStorage.setItem('ac-data', JSON.stringify(docs))
localStorage.setItem(doc.id, JSON.stringify(doc))
return doc.id
| true | class AjaxCatList
constructor: ->
$('#new-translation-modal').hide()
$('#new-experiment-modal').hide()
$('#new-translation').on('click',@new_translation)
$('#create-new-translation').on('click',@create_new_translation)
@show_translations()
$(".example").click(
(event) =>
text = $(event.currentTarget).data("text")
$('#new-translation-text').val(text)
return false
)
$("#new-experiment-translation").click(
=>
@new_experiment_translation()
)
$("#create-new-experiment").click(
=>
@create_experiment_translation()
)
create_experiment_translation: =>
email = $("#new-experiment-email").val()
pair = $("#new-experiment-pair").val()
filter = /^([a-zA-Z0-9_\.\-])+\@(([a-zA-Z0-9\-])+\.)+([a-zA-Z0-9]{2,4})+$/
unless filter.test(email)
alert "Write your email, please."
return
$.cookie("email", email)
$.ajax "/admin/get_experiment"
data:
email: email
pair: pair
success: (data) =>
data = JSON.parse(data)
id = @add_translation(JSON.parse(data.sentences), "EXPERIMENT ##{data.task_id}, #{data.email}", data.pair, data.task_id, data.email, data)
#return
window.location = "/translation.html##{id}"
error: =>
alert "Could not find experiment for you."
new_experiment_translation: =>
$("#new-experiment-pair").html("")
$("#new-experiment-email").val($.cookie("email"))
$.ajax "/api/info"
success: (data) =>
data = JSON.parse(data)
for p in data.pairs
$("#new-experiment-pair").append("<option value='#{p}'>#{p}</option>")
$('#new-experiment-modal').modal('show')
new_translation: =>
$("#new-translation-pair").html("")
$.ajax "/api/info"
success: (data) =>
data = JSON.parse(data)
for p in data.pairs
$("#new-translation-pair").append("<option value='#{p}'>#{p}</option>")
$('#new-translation-name').val('Name')
$('#new-translation-text').val('')
$('#new-translation-modal').modal('show')
$('#new-translation-text').focus()
show_translations: =>
$("#translation-list").html('')
return unless localStorage['ac-data']
ids = JSON.parse(localStorage['ac-data'])
for i in ids
doc = JSON.parse(localStorage[i])
$("#translation-list").append("<tr><td width='100%'><a href='/translation.html##{doc.id}'>#{doc.name}</a></td><td><button data-id='#{doc.id}' class='btn btn-danger btn-mini delete-button'>delete</button></td></tr>")
$(".delete-button").click(
(event) =>
id = $(event.currentTarget).data("id")
if confirm("Delete this translation?")
AjaxCatList.delete_document(id)
@show_translations()
)
@delete_document: (id) =>
return unless localStorage['ac-data']
ids = JSON.parse(localStorage['ac-data'])
new_ids = []
for i in ids
new_ids.push(i) unless i == id
localStorage.removeItem(id)
localStorage.setItem('ac-data', JSON.stringify(new_ids))
create_new_translation: =>
text = $('#new-translation-text').val()
name = $('#new-translation-name').val()
pair = $('#new-translation-pair').val()
@add_translation(text, name, pair)
$('#new-translation-modal').modal('hide')
@show_translations()
add_translation: (text, name, pair, task_id = false, email = false, experiment_data) =>
if localStorage['ac-data']
docs = JSON.parse(localStorage['ac-data'])
else
docs = []
doc = {}
doc.id = Utils.random_string()
doc.name = PI:NAME:<NAME>END_PI
doc.pair = pair
doc.email = email
doc.task_id = task_id
#is experiment
if jQuery.isArray(text)
doc.source = text
doc.options = JSON.parse(experiment_data.options)
else
doc.source = Utils.split_source(text)
doc.target = new Array(doc.source.length)
docs.push(doc.id)
localStorage.setItem('ac-data', JSON.stringify(docs))
localStorage.setItem(doc.id, JSON.stringify(doc))
return doc.id
|
[
{
"context": "\n id: 0\n idGame: 0\n idUser: 0\n name: 'Unknown'\n title: null\n gameTitle: ''\n gameTag: n",
"end": 330,
"score": 0.5309553742408752,
"start": 323,
"tag": "USERNAME",
"value": "Unknown"
},
{
"context": " identObj = new ClientIdentity\n ... | src/server/clientidentity.coffee | Atanamo/IRC-Gateway | 0 |
## Include app modules
db = require './database'
## Abstraction of a client's identity.
## Used to identify clients and to store information to be sent to other clients.
## Instances have to be created by appropriate factory methods of the class.
##
class ClientIdentity
id: 0
idGame: 0
idUser: 0
name: 'Unknown'
title: null
gameTitle: ''
gameTag: null
isIrcClient: false
securityToken: ''
constructor: (data) ->
for key, val of data
@[key] = val
@title = @name unless @title?
@gameTag = @gameTitle unless @gameTag?
@createFromIrcNick: (nickName, idGame=null) ->
identObj = new ClientIdentity
name: nickName
title: "#{nickName} (IRC)"
idGame: idGame
isIrcClient: true
return identObj
@createFromDatabase: (idUser, idGame) ->
promise = db.getClientIdentityData(idUser, idGame)
promise = promise.then (data) =>
return new ClientIdentity
id: data.id
idGame: data.idGame
idUser: data.idUser
name: data.name
title: data.title
gameTitle: data.gameTitle
gameTag: data.gameTag
securityToken: data.token
return promise
getName: ->
return @name
getID: ->
return @id
getGlobalID: ->
return "#{@idGame}_#{@id}"
getUserID: ->
return @idUser
getGameID: ->
return @idGame
getGameTitle: ->
return @gameTitle
getGameTag: ->
return @gameTag
toData: ->
# Filter idUser and securityToken, because these must be secret to clients
return {
@id
@idGame
@name
@title
@gameTitle
@gameTag
@isIrcClient
}
## Export class
module.exports = ClientIdentity
| 37078 |
## Include app modules
db = require './database'
## Abstraction of a client's identity.
## Used to identify clients and to store information to be sent to other clients.
## Instances have to be created by appropriate factory methods of the class.
##
class ClientIdentity
id: 0
idGame: 0
idUser: 0
name: 'Unknown'
title: null
gameTitle: ''
gameTag: null
isIrcClient: false
securityToken: ''
constructor: (data) ->
for key, val of data
@[key] = val
@title = @name unless @title?
@gameTag = @gameTitle unless @gameTag?
@createFromIrcNick: (nickName, idGame=null) ->
identObj = new ClientIdentity
name: <NAME>
title: "#{nickName} (IRC)"
idGame: idGame
isIrcClient: true
return identObj
@createFromDatabase: (idUser, idGame) ->
promise = db.getClientIdentityData(idUser, idGame)
promise = promise.then (data) =>
return new ClientIdentity
id: data.id
idGame: data.idGame
idUser: data.idUser
name: data.name
title: data.title
gameTitle: data.gameTitle
gameTag: data.gameTag
securityToken: data.token
return promise
getName: ->
return @name
getID: ->
return @id
getGlobalID: ->
return "#{@idGame}_#{@id}"
getUserID: ->
return @idUser
getGameID: ->
return @idGame
getGameTitle: ->
return @gameTitle
getGameTag: ->
return @gameTag
toData: ->
# Filter idUser and securityToken, because these must be secret to clients
return {
@id
@idGame
@name
@title
@gameTitle
@gameTag
@isIrcClient
}
## Export class
module.exports = ClientIdentity
| true |
## Include app modules
db = require './database'
## Abstraction of a client's identity.
## Used to identify clients and to store information to be sent to other clients.
## Instances have to be created by appropriate factory methods of the class.
##
class ClientIdentity
id: 0
idGame: 0
idUser: 0
name: 'Unknown'
title: null
gameTitle: ''
gameTag: null
isIrcClient: false
securityToken: ''
constructor: (data) ->
for key, val of data
@[key] = val
@title = @name unless @title?
@gameTag = @gameTitle unless @gameTag?
@createFromIrcNick: (nickName, idGame=null) ->
identObj = new ClientIdentity
name: PI:NAME:<NAME>END_PI
title: "#{nickName} (IRC)"
idGame: idGame
isIrcClient: true
return identObj
@createFromDatabase: (idUser, idGame) ->
promise = db.getClientIdentityData(idUser, idGame)
promise = promise.then (data) =>
return new ClientIdentity
id: data.id
idGame: data.idGame
idUser: data.idUser
name: data.name
title: data.title
gameTitle: data.gameTitle
gameTag: data.gameTag
securityToken: data.token
return promise
getName: ->
return @name
getID: ->
return @id
getGlobalID: ->
return "#{@idGame}_#{@id}"
getUserID: ->
return @idUser
getGameID: ->
return @idGame
getGameTitle: ->
return @gameTitle
getGameTag: ->
return @gameTag
toData: ->
# Filter idUser and securityToken, because these must be secret to clients
return {
@id
@idGame
@name
@title
@gameTitle
@gameTag
@isIrcClient
}
## Export class
module.exports = ClientIdentity
|
[
{
"context": ": version 0.0'\n'fileTypes': [\n 'ivan'\n]\n'name': 'Ivan'\n'patterns': [\n {\n 'captures':\n '1':\n ",
"end": 77,
"score": 0.9621492624282837,
"start": 73,
"tag": "NAME",
"value": "Ivan"
}
] | grammars/ivan.cson | nie-game/language-ivan | 0 | 'comment': 'Ivan Syntax: version 0.0'
'fileTypes': [
'ivan'
]
'name': 'Ivan'
'patterns': [
{
'captures':
'1':
'name': 'keyword.control.ivan'
'2':
'name': 'entity.name.function.scope.ivan'
'3':
'name': 'entity.name.function.ivan'
'4':
'name': 'punctuation.definition.parameters.begin.ivan'
'5':
'name': 'variable.parameter.function.ivan'
'6':
'name': 'punctuation.definition.parameters.end.ivan'
'match': '\\b(function)(?:\\s+([a-zA-Z_.:]+[.:])?([a-zA-Z_]\\w*)\\s*)?(\\()([^)]*)(\\))'
'name': 'meta.function.ivan'
}
{
'match': '(?<![\\d.])\\s0x[a-fA-F\\d]+|\\b\\d+(\\.\\d+)?([eE]-?\\d+)?|\\.\\d+([eE]-?\\d+)?'
'name': 'constant.numeric.ivan'
}
{
'begin': '\''
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '\''
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.single.ivan'
'patterns': [
{
'include': 'punctuation.definition.string.begin.ivan'
}
{
'include': 'punctuation.definition.string.end.ivan'
}
{
'match': '\\\\.'
'name': 'constant.character.escape.ivan'
}
]
}
{
'begin': '"'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '"'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.double.ivan'
'patterns': [
{
'include': 'punctuation.definition.string.begin.ivan'
}
{
'include': 'punctuation.definition.string.end.ivan'
}
{
'match': '\\\\.'
'name': 'constant.character.escape.ivan'
}
]
}
{
'begin': '(?<=\\.cdef)\\s*(\\[(=*)\\[)'
'beginCaptures':
'0':
'name': 'string.quoted.other.multiline.ivan'
'1':
'name': 'punctuation.definition.string.begin.ivan'
'contentName': 'meta.embedded.ivan'
'end': '(\\]\\2\\])'
'endCaptures':
'0':
'name': 'string.quoted.other.multiline.ivan'
'1':
'name': 'punctuation.definition.string.end.ivan'
'patterns': [
{
'include': 'source.c'
}
]
}
{
'begin': '(?<!--)\\[(=*)\\['
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '\\]\\1\\]'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.other.multiline.ivan'
}
{
'begin': '--\\[(=*)\\['
'captures':
'0':
'name': 'punctuation.definition.comment.ivan'
'end': '\\]\\1\\]'
'name': 'comment.block.ivan'
}
{
'begin': '(^[ \\t]+)?(?=--(?!\\[(=*)\\[))'
'beginCaptures':
'1':
'name': 'punctuation.whitespace.comment.leading.ivan'
'end': '(?!\\G)'
'patterns': [
{
'begin': '--'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.ivan'
'end': '\\n'
'name': 'comment.line.double-dash.ivan'
}
]
}
{
'match': '\\b(and|or|not|break|do|else|for|if|elseif|return|then|repeat|while|until|end|function|local|in|goto|ivan|ivanmethod)\\b'
'name': 'keyword.control.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b([A-Z_]+|false|nil|true|math\\.(pi|huge))\\b|(?<![.])\\.{3}(?!\\.)'
'name': 'constant.language.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(self)\\b'
'name': 'variable.language.self.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(assert|collectgarbage|dofile|error|getfenv|getmetatable|ipairs|loadfile|loadstring|module|next|pairs|pcall|print|rawequal|rawget|rawset|require|select|setfenv|setmetatable|tonumber|tostring|type|unpack|xpcall)\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(coroutine\\.(create|resume|running|status|wrap|yield)|string\\.(byte|char|dump|find|format|gmatch|gsub|len|lower|match|rep|reverse|sub|upper)|table\\.(concat|insert|maxn|remove|sort)|math\\.(abs|acos|asin|atan2?|ceil|cosh?|deg|exp|floor|fmod|frexp|ldexp|log|log10|max|min|modf|pow|rad|random|randomseed|sinh?|sqrt|tanh?)|io\\.(close|flush|input|lines|open|output|popen|read|tmpfile|type|write)|os\\.(clock|date|difftime|execute|exit|getenv|remove|rename|setlocale|time|tmpname)|package\\.(cpath|loaded|loadlib|path|preload|seeall)|debug\\.(debug|[gs]etfenv|[gs]ethook|getinfo|[gs]etlocal|[gs]etmetatable|getregistry|[gs]etupvalue|traceback))\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.library.ivan'
}
{
'match': '\\b([A-Za-z_]\\w*)\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.any-method.ivan'
}
{
'match': '(?<=[^.]\\.|:)\\b([A-Za-z_]\\w*)'
'name': 'variable.other.ivan'
}
{
'match': '\\+|-|%|#|\\*|\\/|\\^|==?|~=|<=?|>=?|(?<!\\.)\\.{2}(?!\\.)'
'name': 'keyword.operator.ivan'
}
]
'scopeName': 'source.ivan'
| 5807 | 'comment': 'Ivan Syntax: version 0.0'
'fileTypes': [
'ivan'
]
'name': '<NAME>'
'patterns': [
{
'captures':
'1':
'name': 'keyword.control.ivan'
'2':
'name': 'entity.name.function.scope.ivan'
'3':
'name': 'entity.name.function.ivan'
'4':
'name': 'punctuation.definition.parameters.begin.ivan'
'5':
'name': 'variable.parameter.function.ivan'
'6':
'name': 'punctuation.definition.parameters.end.ivan'
'match': '\\b(function)(?:\\s+([a-zA-Z_.:]+[.:])?([a-zA-Z_]\\w*)\\s*)?(\\()([^)]*)(\\))'
'name': 'meta.function.ivan'
}
{
'match': '(?<![\\d.])\\s0x[a-fA-F\\d]+|\\b\\d+(\\.\\d+)?([eE]-?\\d+)?|\\.\\d+([eE]-?\\d+)?'
'name': 'constant.numeric.ivan'
}
{
'begin': '\''
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '\''
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.single.ivan'
'patterns': [
{
'include': 'punctuation.definition.string.begin.ivan'
}
{
'include': 'punctuation.definition.string.end.ivan'
}
{
'match': '\\\\.'
'name': 'constant.character.escape.ivan'
}
]
}
{
'begin': '"'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '"'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.double.ivan'
'patterns': [
{
'include': 'punctuation.definition.string.begin.ivan'
}
{
'include': 'punctuation.definition.string.end.ivan'
}
{
'match': '\\\\.'
'name': 'constant.character.escape.ivan'
}
]
}
{
'begin': '(?<=\\.cdef)\\s*(\\[(=*)\\[)'
'beginCaptures':
'0':
'name': 'string.quoted.other.multiline.ivan'
'1':
'name': 'punctuation.definition.string.begin.ivan'
'contentName': 'meta.embedded.ivan'
'end': '(\\]\\2\\])'
'endCaptures':
'0':
'name': 'string.quoted.other.multiline.ivan'
'1':
'name': 'punctuation.definition.string.end.ivan'
'patterns': [
{
'include': 'source.c'
}
]
}
{
'begin': '(?<!--)\\[(=*)\\['
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '\\]\\1\\]'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.other.multiline.ivan'
}
{
'begin': '--\\[(=*)\\['
'captures':
'0':
'name': 'punctuation.definition.comment.ivan'
'end': '\\]\\1\\]'
'name': 'comment.block.ivan'
}
{
'begin': '(^[ \\t]+)?(?=--(?!\\[(=*)\\[))'
'beginCaptures':
'1':
'name': 'punctuation.whitespace.comment.leading.ivan'
'end': '(?!\\G)'
'patterns': [
{
'begin': '--'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.ivan'
'end': '\\n'
'name': 'comment.line.double-dash.ivan'
}
]
}
{
'match': '\\b(and|or|not|break|do|else|for|if|elseif|return|then|repeat|while|until|end|function|local|in|goto|ivan|ivanmethod)\\b'
'name': 'keyword.control.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b([A-Z_]+|false|nil|true|math\\.(pi|huge))\\b|(?<![.])\\.{3}(?!\\.)'
'name': 'constant.language.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(self)\\b'
'name': 'variable.language.self.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(assert|collectgarbage|dofile|error|getfenv|getmetatable|ipairs|loadfile|loadstring|module|next|pairs|pcall|print|rawequal|rawget|rawset|require|select|setfenv|setmetatable|tonumber|tostring|type|unpack|xpcall)\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(coroutine\\.(create|resume|running|status|wrap|yield)|string\\.(byte|char|dump|find|format|gmatch|gsub|len|lower|match|rep|reverse|sub|upper)|table\\.(concat|insert|maxn|remove|sort)|math\\.(abs|acos|asin|atan2?|ceil|cosh?|deg|exp|floor|fmod|frexp|ldexp|log|log10|max|min|modf|pow|rad|random|randomseed|sinh?|sqrt|tanh?)|io\\.(close|flush|input|lines|open|output|popen|read|tmpfile|type|write)|os\\.(clock|date|difftime|execute|exit|getenv|remove|rename|setlocale|time|tmpname)|package\\.(cpath|loaded|loadlib|path|preload|seeall)|debug\\.(debug|[gs]etfenv|[gs]ethook|getinfo|[gs]etlocal|[gs]etmetatable|getregistry|[gs]etupvalue|traceback))\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.library.ivan'
}
{
'match': '\\b([A-Za-z_]\\w*)\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.any-method.ivan'
}
{
'match': '(?<=[^.]\\.|:)\\b([A-Za-z_]\\w*)'
'name': 'variable.other.ivan'
}
{
'match': '\\+|-|%|#|\\*|\\/|\\^|==?|~=|<=?|>=?|(?<!\\.)\\.{2}(?!\\.)'
'name': 'keyword.operator.ivan'
}
]
'scopeName': 'source.ivan'
| true | 'comment': 'Ivan Syntax: version 0.0'
'fileTypes': [
'ivan'
]
'name': 'PI:NAME:<NAME>END_PI'
'patterns': [
{
'captures':
'1':
'name': 'keyword.control.ivan'
'2':
'name': 'entity.name.function.scope.ivan'
'3':
'name': 'entity.name.function.ivan'
'4':
'name': 'punctuation.definition.parameters.begin.ivan'
'5':
'name': 'variable.parameter.function.ivan'
'6':
'name': 'punctuation.definition.parameters.end.ivan'
'match': '\\b(function)(?:\\s+([a-zA-Z_.:]+[.:])?([a-zA-Z_]\\w*)\\s*)?(\\()([^)]*)(\\))'
'name': 'meta.function.ivan'
}
{
'match': '(?<![\\d.])\\s0x[a-fA-F\\d]+|\\b\\d+(\\.\\d+)?([eE]-?\\d+)?|\\.\\d+([eE]-?\\d+)?'
'name': 'constant.numeric.ivan'
}
{
'begin': '\''
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '\''
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.single.ivan'
'patterns': [
{
'include': 'punctuation.definition.string.begin.ivan'
}
{
'include': 'punctuation.definition.string.end.ivan'
}
{
'match': '\\\\.'
'name': 'constant.character.escape.ivan'
}
]
}
{
'begin': '"'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '"'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.double.ivan'
'patterns': [
{
'include': 'punctuation.definition.string.begin.ivan'
}
{
'include': 'punctuation.definition.string.end.ivan'
}
{
'match': '\\\\.'
'name': 'constant.character.escape.ivan'
}
]
}
{
'begin': '(?<=\\.cdef)\\s*(\\[(=*)\\[)'
'beginCaptures':
'0':
'name': 'string.quoted.other.multiline.ivan'
'1':
'name': 'punctuation.definition.string.begin.ivan'
'contentName': 'meta.embedded.ivan'
'end': '(\\]\\2\\])'
'endCaptures':
'0':
'name': 'string.quoted.other.multiline.ivan'
'1':
'name': 'punctuation.definition.string.end.ivan'
'patterns': [
{
'include': 'source.c'
}
]
}
{
'begin': '(?<!--)\\[(=*)\\['
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.ivan'
'end': '\\]\\1\\]'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.ivan'
'name': 'string.quoted.other.multiline.ivan'
}
{
'begin': '--\\[(=*)\\['
'captures':
'0':
'name': 'punctuation.definition.comment.ivan'
'end': '\\]\\1\\]'
'name': 'comment.block.ivan'
}
{
'begin': '(^[ \\t]+)?(?=--(?!\\[(=*)\\[))'
'beginCaptures':
'1':
'name': 'punctuation.whitespace.comment.leading.ivan'
'end': '(?!\\G)'
'patterns': [
{
'begin': '--'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.ivan'
'end': '\\n'
'name': 'comment.line.double-dash.ivan'
}
]
}
{
'match': '\\b(and|or|not|break|do|else|for|if|elseif|return|then|repeat|while|until|end|function|local|in|goto|ivan|ivanmethod)\\b'
'name': 'keyword.control.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b([A-Z_]+|false|nil|true|math\\.(pi|huge))\\b|(?<![.])\\.{3}(?!\\.)'
'name': 'constant.language.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(self)\\b'
'name': 'variable.language.self.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(assert|collectgarbage|dofile|error|getfenv|getmetatable|ipairs|loadfile|loadstring|module|next|pairs|pcall|print|rawequal|rawget|rawset|require|select|setfenv|setmetatable|tonumber|tostring|type|unpack|xpcall)\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.ivan'
}
{
'match': '(?<![^.]\\.|:)\\b(coroutine\\.(create|resume|running|status|wrap|yield)|string\\.(byte|char|dump|find|format|gmatch|gsub|len|lower|match|rep|reverse|sub|upper)|table\\.(concat|insert|maxn|remove|sort)|math\\.(abs|acos|asin|atan2?|ceil|cosh?|deg|exp|floor|fmod|frexp|ldexp|log|log10|max|min|modf|pow|rad|random|randomseed|sinh?|sqrt|tanh?)|io\\.(close|flush|input|lines|open|output|popen|read|tmpfile|type|write)|os\\.(clock|date|difftime|execute|exit|getenv|remove|rename|setlocale|time|tmpname)|package\\.(cpath|loaded|loadlib|path|preload|seeall)|debug\\.(debug|[gs]etfenv|[gs]ethook|getinfo|[gs]etlocal|[gs]etmetatable|getregistry|[gs]etupvalue|traceback))\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.library.ivan'
}
{
'match': '\\b([A-Za-z_]\\w*)\\b(?=\\s*(?:[({"\']|\\[\\[))'
'name': 'support.function.any-method.ivan'
}
{
'match': '(?<=[^.]\\.|:)\\b([A-Za-z_]\\w*)'
'name': 'variable.other.ivan'
}
{
'match': '\\+|-|%|#|\\*|\\/|\\^|==?|~=|<=?|>=?|(?<!\\.)\\.{2}(?!\\.)'
'name': 'keyword.operator.ivan'
}
]
'scopeName': 'source.ivan'
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9990363121032715,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-http-contentLength0.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
http = require("http")
# Simple test of Node's HTTP Client choking on a response
# with a 'Content-Length: 0 ' response header.
# I.E. a space character after the 'Content-Length' throws an `error` event.
s = http.createServer((req, res) ->
res.writeHead 200,
"Content-Length": "0 "
res.end()
return
)
s.listen common.PORT, ->
request = http.request(
port: common.PORT
, (response) ->
console.log "STATUS: " + response.statusCode
s.close()
response.resume()
return
)
request.end()
return
| 222703 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
http = require("http")
# Simple test of Node's HTTP Client choking on a response
# with a 'Content-Length: 0 ' response header.
# I.E. a space character after the 'Content-Length' throws an `error` event.
s = http.createServer((req, res) ->
res.writeHead 200,
"Content-Length": "0 "
res.end()
return
)
s.listen common.PORT, ->
request = http.request(
port: common.PORT
, (response) ->
console.log "STATUS: " + response.statusCode
s.close()
response.resume()
return
)
request.end()
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
http = require("http")
# Simple test of Node's HTTP Client choking on a response
# with a 'Content-Length: 0 ' response header.
# I.E. a space character after the 'Content-Length' throws an `error` event.
s = http.createServer((req, res) ->
res.writeHead 200,
"Content-Length": "0 "
res.end()
return
)
s.listen common.PORT, ->
request = http.request(
port: common.PORT
, (response) ->
console.log "STATUS: " + response.statusCode
s.close()
response.resume()
return
)
request.end()
return
|
[
{
"context": "me, password}, scheme).error is null\n @username = username\n @password = password\n @id = ++id\n return @\n\nU",
"end": 250,
"score": 0.9960722923278809,
"start": 242,
"tag": "USERNAME",
"value": "username"
},
{
"context": "error is null\n @username = username\n ... | user.coffee | RoryDuncan/csci2-final-lab | 1 | Joi = require 'Joi'
scheme =
username: Joi.string().max(64).required()
password: Joi.string().max(255).required()
id = 0
User = (username, password) ->
@isValid = Joi.validate({username, password}, scheme).error is null
@username = username
@password = password
@id = ++id
return @
User::save = () ->
throw new Error("Not Implemented")
User::isValid = false
module.exports = User
| 56865 | Joi = require 'Joi'
scheme =
username: Joi.string().max(64).required()
password: Joi.string().max(255).required()
id = 0
User = (username, password) ->
@isValid = Joi.validate({username, password}, scheme).error is null
@username = username
@password = <PASSWORD>
@id = ++id
return @
User::save = () ->
throw new Error("Not Implemented")
User::isValid = false
module.exports = User
| true | Joi = require 'Joi'
scheme =
username: Joi.string().max(64).required()
password: Joi.string().max(255).required()
id = 0
User = (username, password) ->
@isValid = Joi.validate({username, password}, scheme).error is null
@username = username
@password = PI:PASSWORD:<PASSWORD>END_PI
@id = ++id
return @
User::save = () ->
throw new Error("Not Implemented")
User::isValid = false
module.exports = User
|
[
{
"context": "orNames[index]\n creatorNames[index] = \"Anonymous\"\n @simulationStatus += (if index != 0 th",
"end": 3810,
"score": 0.5058824419975281,
"start": 3801,
"tag": "NAME",
"value": "Anonymous"
}
] | app/views/play/ladder_view.coffee | q276188500/codecombat | 1 | RootView = require 'views/kinds/RootView'
Level = require 'models/Level'
Simulator = require 'lib/simulator/Simulator'
LevelSession = require 'models/LevelSession'
CocoCollection = require 'models/CocoCollection'
{teamDataFromLevel} = require './ladder/utils'
{me} = require 'lib/auth'
application = require 'application'
LadderTabView = require './ladder/ladder_tab'
MyMatchesTabView = require './ladder/my_matches_tab'
LadderPlayModal = require './ladder/play_modal'
HIGHEST_SCORE = 1000000
class LevelSessionsCollection extends CocoCollection
url: ''
model: LevelSession
constructor: (levelID) ->
super()
@url = "/db/level/#{levelID}/my_sessions"
module.exports = class LadderView extends RootView
id: 'ladder-view'
template: require 'templates/play/ladder'
subscriptions:
'application:idle-changed': 'onIdleChanged'
events:
'click #simulate-button': 'onSimulateButtonClick'
'click #simulate-all-button': 'onSimulateAllButtonClick'
'click .play-button': 'onClickPlayButton'
'click a': 'onClickedLink'
constructor: (options, @levelID) ->
super(options)
@level = new Level(_id:@levelID)
@level.fetch()
@sessions = new LevelSessionsCollection(levelID)
@sessions.fetch({})
@addResourceToLoad(@sessions, 'your_sessions')
@addResourceToLoad(@level, 'level')
@simulator = new Simulator()
@listenTo(@simulator, 'statusUpdate', @updateSimulationStatus)
@teams = []
onLoaded: ->
@teams = teamDataFromLevel @level
super()
getRenderData: ->
ctx = super()
ctx.level = @level
ctx.link = "/play/level/#{@level.get('name')}"
ctx.simulationStatus = @simulationStatus
ctx.teams = @teams
ctx.levelID = @levelID
ctx.levelDescription = marked(@level.get('description')) if @level.get('description')
ctx
afterRender: ->
super()
return if @loading()
@insertSubView(@ladderTab = new LadderTabView({}, @level, @sessions))
@insertSubView(@myMatchesTab = new MyMatchesTabView({}, @level, @sessions))
@refreshInterval = setInterval(@fetchSessionsAndRefreshViews.bind(@), 10 * 1000)
hash = document.location.hash[1..] if document.location.hash
if hash and not (hash in ['my-matches', 'simulate', 'ladder'])
@showPlayModal(hash) if @sessions.loaded
fetchSessionsAndRefreshViews: ->
return if @destroyed or application.userIsIdle or @$el.find('#simulate.active').length or (new Date() - 2000 < @lastRefreshTime) or @loading()
@sessions.fetch({"success": @refreshViews})
refreshViews: =>
return if @destroyed or application.userIsIdle
@lastRefreshTime = new Date()
@ladderTab.refreshLadder()
@myMatchesTab.refreshMatches()
console.log "Refreshed sessions for ladder and matches."
onIdleChanged: (e) ->
@fetchSessionsAndRefreshViews() unless e.idle
# Simulations
onSimulateAllButtonClick: (e) ->
submitIDs = _.pluck @leaderboards[@teams[0].id].topPlayers.models, "id"
for ID in submitIDs
$.ajax
url: '/queue/scoring'
method: 'POST'
data:
session: ID
$("#simulate-all-button").prop "disabled", true
$("#simulate-all-button").text "Submitted all!"
onSimulateButtonClick: (e) ->
$("#simulate-button").prop "disabled",true
$("#simulate-button").text "Simulating..."
@simulator.fetchAndSimulateTask()
updateSimulationStatus: (simulationStatus, sessions) ->
@simulationStatus = simulationStatus
try
if sessions?
#TODO: Fetch names from Redis, the creatorName is denormalized
creatorNames = (session.creatorName for session in sessions)
@simulationStatus = "Simulating game between "
for index in [0...creatorNames.length]
unless creatorNames[index]
creatorNames[index] = "Anonymous"
@simulationStatus += (if index != 0 then " and " else "") + creatorNames[index]
@simulationStatus += "..."
catch e
console.log "There was a problem with the named simulation status: #{e}"
$("#simulation-status-text").text @simulationStatus
onClickPlayButton: (e) ->
@showPlayModal($(e.target).closest('.play-button').data('team'))
resimulateAllSessions: ->
postData =
originalLevelID: @level.get('original')
levelMajorVersion: @level.get('version').major
console.log postData
$.ajax
url: '/queue/scoring/resimulateAllSessions'
method: 'POST'
data: postData
complete: (jqxhr) ->
console.log jqxhr.responseText
showPlayModal: (teamID) ->
return @showApologeticSignupModal() if me.get('anonymous')
session = (s for s in @sessions.models when s.get('team') is teamID)[0]
modal = new LadderPlayModal({}, @level, session, teamID)
@openModalView modal
showApologeticSignupModal: ->
SignupModal = require 'views/modal/signup_modal'
@openModalView(new SignupModal({showRequiredError:true}))
onClickedLink: (e) ->
link = $(e.target).closest('a').attr('href')
if link?.startsWith('/play/level') and me.get('anonymous')
e.stopPropagation()
e.preventDefault()
@showApologeticSignupModal()
destroy: ->
clearInterval @refreshInterval
@simulator.destroy()
super()
| 83622 | RootView = require 'views/kinds/RootView'
Level = require 'models/Level'
Simulator = require 'lib/simulator/Simulator'
LevelSession = require 'models/LevelSession'
CocoCollection = require 'models/CocoCollection'
{teamDataFromLevel} = require './ladder/utils'
{me} = require 'lib/auth'
application = require 'application'
LadderTabView = require './ladder/ladder_tab'
MyMatchesTabView = require './ladder/my_matches_tab'
LadderPlayModal = require './ladder/play_modal'
HIGHEST_SCORE = 1000000
class LevelSessionsCollection extends CocoCollection
url: ''
model: LevelSession
constructor: (levelID) ->
super()
@url = "/db/level/#{levelID}/my_sessions"
module.exports = class LadderView extends RootView
id: 'ladder-view'
template: require 'templates/play/ladder'
subscriptions:
'application:idle-changed': 'onIdleChanged'
events:
'click #simulate-button': 'onSimulateButtonClick'
'click #simulate-all-button': 'onSimulateAllButtonClick'
'click .play-button': 'onClickPlayButton'
'click a': 'onClickedLink'
constructor: (options, @levelID) ->
super(options)
@level = new Level(_id:@levelID)
@level.fetch()
@sessions = new LevelSessionsCollection(levelID)
@sessions.fetch({})
@addResourceToLoad(@sessions, 'your_sessions')
@addResourceToLoad(@level, 'level')
@simulator = new Simulator()
@listenTo(@simulator, 'statusUpdate', @updateSimulationStatus)
@teams = []
onLoaded: ->
@teams = teamDataFromLevel @level
super()
getRenderData: ->
ctx = super()
ctx.level = @level
ctx.link = "/play/level/#{@level.get('name')}"
ctx.simulationStatus = @simulationStatus
ctx.teams = @teams
ctx.levelID = @levelID
ctx.levelDescription = marked(@level.get('description')) if @level.get('description')
ctx
afterRender: ->
super()
return if @loading()
@insertSubView(@ladderTab = new LadderTabView({}, @level, @sessions))
@insertSubView(@myMatchesTab = new MyMatchesTabView({}, @level, @sessions))
@refreshInterval = setInterval(@fetchSessionsAndRefreshViews.bind(@), 10 * 1000)
hash = document.location.hash[1..] if document.location.hash
if hash and not (hash in ['my-matches', 'simulate', 'ladder'])
@showPlayModal(hash) if @sessions.loaded
fetchSessionsAndRefreshViews: ->
return if @destroyed or application.userIsIdle or @$el.find('#simulate.active').length or (new Date() - 2000 < @lastRefreshTime) or @loading()
@sessions.fetch({"success": @refreshViews})
refreshViews: =>
return if @destroyed or application.userIsIdle
@lastRefreshTime = new Date()
@ladderTab.refreshLadder()
@myMatchesTab.refreshMatches()
console.log "Refreshed sessions for ladder and matches."
onIdleChanged: (e) ->
@fetchSessionsAndRefreshViews() unless e.idle
# Simulations
onSimulateAllButtonClick: (e) ->
submitIDs = _.pluck @leaderboards[@teams[0].id].topPlayers.models, "id"
for ID in submitIDs
$.ajax
url: '/queue/scoring'
method: 'POST'
data:
session: ID
$("#simulate-all-button").prop "disabled", true
$("#simulate-all-button").text "Submitted all!"
onSimulateButtonClick: (e) ->
$("#simulate-button").prop "disabled",true
$("#simulate-button").text "Simulating..."
@simulator.fetchAndSimulateTask()
updateSimulationStatus: (simulationStatus, sessions) ->
@simulationStatus = simulationStatus
try
if sessions?
#TODO: Fetch names from Redis, the creatorName is denormalized
creatorNames = (session.creatorName for session in sessions)
@simulationStatus = "Simulating game between "
for index in [0...creatorNames.length]
unless creatorNames[index]
creatorNames[index] = "<NAME>"
@simulationStatus += (if index != 0 then " and " else "") + creatorNames[index]
@simulationStatus += "..."
catch e
console.log "There was a problem with the named simulation status: #{e}"
$("#simulation-status-text").text @simulationStatus
onClickPlayButton: (e) ->
@showPlayModal($(e.target).closest('.play-button').data('team'))
resimulateAllSessions: ->
postData =
originalLevelID: @level.get('original')
levelMajorVersion: @level.get('version').major
console.log postData
$.ajax
url: '/queue/scoring/resimulateAllSessions'
method: 'POST'
data: postData
complete: (jqxhr) ->
console.log jqxhr.responseText
showPlayModal: (teamID) ->
return @showApologeticSignupModal() if me.get('anonymous')
session = (s for s in @sessions.models when s.get('team') is teamID)[0]
modal = new LadderPlayModal({}, @level, session, teamID)
@openModalView modal
showApologeticSignupModal: ->
SignupModal = require 'views/modal/signup_modal'
@openModalView(new SignupModal({showRequiredError:true}))
onClickedLink: (e) ->
link = $(e.target).closest('a').attr('href')
if link?.startsWith('/play/level') and me.get('anonymous')
e.stopPropagation()
e.preventDefault()
@showApologeticSignupModal()
destroy: ->
clearInterval @refreshInterval
@simulator.destroy()
super()
| true | RootView = require 'views/kinds/RootView'
Level = require 'models/Level'
Simulator = require 'lib/simulator/Simulator'
LevelSession = require 'models/LevelSession'
CocoCollection = require 'models/CocoCollection'
{teamDataFromLevel} = require './ladder/utils'
{me} = require 'lib/auth'
application = require 'application'
LadderTabView = require './ladder/ladder_tab'
MyMatchesTabView = require './ladder/my_matches_tab'
LadderPlayModal = require './ladder/play_modal'
HIGHEST_SCORE = 1000000
class LevelSessionsCollection extends CocoCollection
url: ''
model: LevelSession
constructor: (levelID) ->
super()
@url = "/db/level/#{levelID}/my_sessions"
module.exports = class LadderView extends RootView
id: 'ladder-view'
template: require 'templates/play/ladder'
subscriptions:
'application:idle-changed': 'onIdleChanged'
events:
'click #simulate-button': 'onSimulateButtonClick'
'click #simulate-all-button': 'onSimulateAllButtonClick'
'click .play-button': 'onClickPlayButton'
'click a': 'onClickedLink'
constructor: (options, @levelID) ->
super(options)
@level = new Level(_id:@levelID)
@level.fetch()
@sessions = new LevelSessionsCollection(levelID)
@sessions.fetch({})
@addResourceToLoad(@sessions, 'your_sessions')
@addResourceToLoad(@level, 'level')
@simulator = new Simulator()
@listenTo(@simulator, 'statusUpdate', @updateSimulationStatus)
@teams = []
onLoaded: ->
@teams = teamDataFromLevel @level
super()
getRenderData: ->
ctx = super()
ctx.level = @level
ctx.link = "/play/level/#{@level.get('name')}"
ctx.simulationStatus = @simulationStatus
ctx.teams = @teams
ctx.levelID = @levelID
ctx.levelDescription = marked(@level.get('description')) if @level.get('description')
ctx
afterRender: ->
super()
return if @loading()
@insertSubView(@ladderTab = new LadderTabView({}, @level, @sessions))
@insertSubView(@myMatchesTab = new MyMatchesTabView({}, @level, @sessions))
@refreshInterval = setInterval(@fetchSessionsAndRefreshViews.bind(@), 10 * 1000)
hash = document.location.hash[1..] if document.location.hash
if hash and not (hash in ['my-matches', 'simulate', 'ladder'])
@showPlayModal(hash) if @sessions.loaded
fetchSessionsAndRefreshViews: ->
return if @destroyed or application.userIsIdle or @$el.find('#simulate.active').length or (new Date() - 2000 < @lastRefreshTime) or @loading()
@sessions.fetch({"success": @refreshViews})
refreshViews: =>
return if @destroyed or application.userIsIdle
@lastRefreshTime = new Date()
@ladderTab.refreshLadder()
@myMatchesTab.refreshMatches()
console.log "Refreshed sessions for ladder and matches."
onIdleChanged: (e) ->
@fetchSessionsAndRefreshViews() unless e.idle
# Simulations
onSimulateAllButtonClick: (e) ->
submitIDs = _.pluck @leaderboards[@teams[0].id].topPlayers.models, "id"
for ID in submitIDs
$.ajax
url: '/queue/scoring'
method: 'POST'
data:
session: ID
$("#simulate-all-button").prop "disabled", true
$("#simulate-all-button").text "Submitted all!"
onSimulateButtonClick: (e) ->
$("#simulate-button").prop "disabled",true
$("#simulate-button").text "Simulating..."
@simulator.fetchAndSimulateTask()
updateSimulationStatus: (simulationStatus, sessions) ->
@simulationStatus = simulationStatus
try
if sessions?
#TODO: Fetch names from Redis, the creatorName is denormalized
creatorNames = (session.creatorName for session in sessions)
@simulationStatus = "Simulating game between "
for index in [0...creatorNames.length]
unless creatorNames[index]
creatorNames[index] = "PI:NAME:<NAME>END_PI"
@simulationStatus += (if index != 0 then " and " else "") + creatorNames[index]
@simulationStatus += "..."
catch e
console.log "There was a problem with the named simulation status: #{e}"
$("#simulation-status-text").text @simulationStatus
onClickPlayButton: (e) ->
@showPlayModal($(e.target).closest('.play-button').data('team'))
resimulateAllSessions: ->
postData =
originalLevelID: @level.get('original')
levelMajorVersion: @level.get('version').major
console.log postData
$.ajax
url: '/queue/scoring/resimulateAllSessions'
method: 'POST'
data: postData
complete: (jqxhr) ->
console.log jqxhr.responseText
showPlayModal: (teamID) ->
return @showApologeticSignupModal() if me.get('anonymous')
session = (s for s in @sessions.models when s.get('team') is teamID)[0]
modal = new LadderPlayModal({}, @level, session, teamID)
@openModalView modal
showApologeticSignupModal: ->
SignupModal = require 'views/modal/signup_modal'
@openModalView(new SignupModal({showRequiredError:true}))
onClickedLink: (e) ->
link = $(e.target).closest('a').attr('href')
if link?.startsWith('/play/level') and me.get('anonymous')
e.stopPropagation()
e.preventDefault()
@showApologeticSignupModal()
destroy: ->
clearInterval @refreshInterval
@simulator.destroy()
super()
|
[
{
"context": "ne )->\n\n\t\t\tquery =\n\t\t\t\ttoken:\n\t\t\t\t\t\"startsWith\": \"desfire-801e\"\n\t\t\topt =\n\t\t\t\tlimit: 0\n\t\t\t\t_customQueryFilter:\n\t\t",
"end": 6835,
"score": 0.9815346598625183,
"start": 6823,
"tag": "PASSWORD",
"value": "desfire-801e"
},
{
"context": "n... | _src/test/general.coffee | mpneuried/mysql-factory | 1 | _CONFIG = require './config'
MySQLFactory = require( "../." )
_map = require( "lodash/map" )
_difference = require( "lodash/difference" )
should = require('should')
moment = require('moment')
_startTime = Date.now() - 1000 * 60
_utils = require( "../lib/utils" )
DBFactory = null
cbMulti = ( count, cb )->
return ->
count--
if count is 0
cb()
return
console.log "\nCONFIG:\n", _CONFIG.mysql
describe "----- MySQL Factory TESTS -----", ->
before ( done )->
done()
return
describe 'Initialization', ->
it 'init factory', ( done )->
DBFactory = new MySQLFactory( _CONFIG.mysql, _CONFIG.tables )
done()
return
describe 'Factory Tests', ->
it "List the existing tables", ( done )->
DBFactory.list ( err, tables )->
throw err if err
tables.should.eql( Object.keys( _CONFIG.tables ) )
done()
return
it "Get a table", ( done )->
_cnf = _CONFIG.tables[ _CONFIG.test.singleCreateTableTest ]
_tbl = DBFactory.get( _CONFIG.test.singleCreateTableTest )
_tbl.should.exist
_tbl?.name?.should.eql( _cnf.name )
done()
return
it "Try to get a not existend table", ( done )->
_tbl = DBFactory.get( "notexistend" )
should.not.exist( _tbl )
done()
return
it "has for existend table", ( done )->
_has = DBFactory.has( _CONFIG.test.singleCreateTableTest )
_has.should.be.true
done()
return
it "has for not existend table", ( done )->
_has = DBFactory.has( "notexistend" )
_has.should.be.false
done()
return
return
describe 'Table Tests', ->
tableU = null # user
tableT = null # tokens
tableC = null # contracts
fieldsTest = [ "id", "firstname" ]
allFields = Object.keys( _CONFIG.tables[ _CONFIG.test.getTest.tbl ].fields )
_saveUserId = null
_saveUserT = 0
_testUsers = []
it "get test table `#{_CONFIG.test.getTest.tbl}`", ( done )->
tableU = DBFactory.get( _CONFIG.test.getTest.tbl )
tableU?.name?.should.eql( _CONFIG.test.getTest.tbl )
done()
return
it "get test table `#{_CONFIG.test.tokenTable}`", ( done )->
tableT = DBFactory.get( _CONFIG.test.tokenTable )
tableT?.name?.should.eql( _CONFIG.test.tokenTable )
done()
return
it "get test table `#{_CONFIG.test.contractsTable}`", ( done )->
tableC = DBFactory.get( _CONFIG.test.contractsTable )
tableC?.name?.should.eql( _CONFIG.test.contractsTable )
done()
return
it "TABLE.GET", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get _id, ( err, item )->
throw err if err
should.exist( item.id )
item.id.should.equal( _id )
done()
return
return
it "TABLE.GET fields as array", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: fieldsTest )
return
it "TABLE.GET fields as string", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: fieldsTest.join( ", " ) )
return
it "TABLE.GET fields as set", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: "set:test" )
return
it "TABLE.GET fields `all`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,allFields).should.have.length(0)
done()
return
, fields: "all" )
return
it "TABLE.GET fields `*`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,allFields).should.have.length(0)
done()
return
, fields: "all" )
return
it "TABLE.GET fields `idonly`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,[ "id" ]).should.have.length(0)
done()
return
, fields: "idonly" )
return
it "TABLE.GET fields by filter function", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,[ "id", "_t", "_u" ]).should.have.length(0)
done()
return
, fields: ( (fld)->
fld.name.length <= 2 ) )
return
it "TABLE.MGET", ( done )->
_ids = JSON.parse( JSON.stringify( _CONFIG.test.mgetTest.id ) )
tableU.mget _ids, ( err, items )->
throw err if err
items.should.have.length(2)
_difference(_CONFIG.test.mgetTest.id,_map( items, "id" ) ).should.have.length(0)
done()
return
return
it "TABLE.MGET empty", ( done )->
tableU.mget [], ( err, items )->
throw err if err
items.should.have.length(0)
done()
return
return
it "TABLE.FIND all", ( done )->
@timeout( 6000 )
tableU.find {}, ( err, items )->
throw err if err
items.should.have.length( _CONFIG.tables.Users.limit)
done()
return
return
it "TABLE.FIND query", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find query, ( err, items )->
throw err if err
items.should.have.length( _CONFIG.test.findTest.count )
done()
return
return
it "TABLE.FIND with limit", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
query.limit = 1
tableU.find( query, ( err, items )->
throw err if err
items.should.have.length(1)
done()
return
, {} )
return
it "TABLE.FIND with limit by option", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find( query, ( err, items )->
throw err if err
items.should.have.length(1)
done()
return
, { limit: 1 } )
return
it "TABLE.FIND with `idonly`", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find( query, ( err, items )->
throw err if err
items.should.be.an.instanceOf(Array)
for id in items
id.should.be.a.String
done()
return
, { fields: "idonly" } )
return
it "TABLE.FIND studio tokens with subquery", ( done )->
query =
token:
"startsWith": "desfire-801e"
opt =
limit: 0
_customQueryFilter:
"user_id":
sub:
table: "contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-TOKENS SUB", items.length
done()
return
, opt )
return
it "TABLE.FIND studio users with subquery", ( done )->
query =
"id":
sub:
table: "contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
opt =
limit: 0
tableU.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-USR SUB", items.length
done()
return
, opt )
return
it "TABLE.FIND studio tokens with TABLE.JOIN", ( done )->
query =
token:
"startsWith": "desfire-801e"
opt =
limit: 0
joins:
"user_id":
type: "inner"
table: "Contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-TOKENS JOIN", items.length
done()
return
, opt )
return
it "TABLE.FIND studio users with TABLE.JOIN with table instance", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-USR JOIN", items.length
done()
return
, opt )
return
it "TABLE.JOIN without table", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
#table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "missing-join-table" )
done()
return
, opt )
return
it "TABLE.JOIN without field", ( done )->
query =
studio_id: 1
contracttype: 1
opt =
fields: "*"
limit: 0
joins:
"user_id":
type: "left outer"
table: tableU
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
done()
return
, opt )
return
it "TABLE.JOIN with invalid field", ( done )->
query = {}
opt =
limit: 0
joins:
"_id":
table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-field" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid foreign field", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: tableC
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-foreignfield" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid table", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: "_Contracts"
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-table" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid type", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
type: "foo"
table: tableC
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-type" )
done()
return
, opt )
return
###
it "TABLE.FIND with option `_customQueryFilter`", ( done )->
query = _CONFIG.test.findTest.q
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "deprecated-option" )
done()
return
, { _customQueryFilter: "id = 'abcde'" } )
return
###
it "TABLE.FIND with invalid filter", ( done )->
tableU.find "wrong", ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-filter" )
done()
return
return
it "TABLE.FIND with complex filter", ( done )->
ts = 1381322463000
query =
user_id: _CONFIG.test.mgetTest.id[1]
_t: { ">": ts }
opt =
limit: 3
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
items.should.have.property( "length" ).and.be.below(4)
for item in items
item._t.should.be.above( ts )
done()
return
, opt )
return
it "TABLE.INSERT string-id", ( done )->
data =
firstname: "Test"
lastname: "Test"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}@test.de"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_saveUserId = item.id
_saveUserT = item._t
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT second test case", ( done )->
data =
firstname: "Test2"
lastname: "Test"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}@test.de"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test2" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT third test case", ( done )->
data =
firstname: "Test3"
lastname: "Test"
gender: false
role: "USER"
email: "test.#{_utils.randomString( 5 )}@test.de"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test3" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( false )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT fourth test case", ( done )->
data =
firstname: "Test4"
lastname: "Test"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}@test.de"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test4" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT with createId function", ( done )->
_tbl = DBFactory.get( "Apikeys" )
data =
studio_id: 1
jsonOptions: {}
_tbl.set( data, ( err, item )->
throw err if err
item.should.have.property('apikey').and.match( /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}/ )
item.should.have.property('studio_id').and.equal( 1 )
done()
return
, {} )
return
it "TABLE.INSERT autoincrement-id", ( done )->
data = JSON.parse( JSON.stringify( _CONFIG.test.insertTestToken ) )
tableT.set( data, ( err, item )->
throw err if err
item.should.have.property('id')
item.should.have.property('user_id')
item.should.have.property('studio_id')
item.should.have.property('token')
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT predefined string-id", ( done )->
_id = _utils.randomString( 5 )
data =
id: _id
firstname: "Test"
lastname: "Test"
gender: true
role: "USER"
_t: 0
tableU.set( data, ( err, item )->
# special case. A predefined is could allready exist
if err?.code is "ER_DUP_ENTRY"
done()
return
throw err if err
item.should.have.property('id').and.equal(_id)
item.should.have.property('firstname').and.equal( "Test" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.within( _startTime, +Infinity )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT existing predefined string-id", ( done )->
data =
id: _CONFIG.test.getTest.id
firstname: "Test"
lastname: "Test"
gender: true
role: "USER"
_t: 0
tableU.set( data, ( err, item )->
should.exist( err )
err.code.should.equal( "ER_DUP_ENTRY" )
done()
return
, {} )
return
it "TABLE.UPDATE", ( done )->
data =
lastname: "Update1"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update1" )
item.should.have.property('_u').and.equal( 1 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with crypting passowrd", ( done )->
data =
lastname: "Update2"
password: "test"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update2" )
item.should.have.property('password').and.containEql( "$2b$10$" )
item.should.have.property('_u').and.equal( 2 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with event check", ( done )->
data =
lastname: "Update3"
birthday: new Date( 1950,5,15 )
image: "testimage.jpg"
role: "TRAINER"
_t: _saveUserT
_done = cbMulti 5, ->
tableU.removeListener( "lastname.userchanged", fnEvnt1 )
tableU.removeListener( "birthday.userchanged", fnEvnt2 )
tableU.removeListener( "image.userchanged", fnEvnt3 )
tableU.removeListener( "set", fnEvnt4 )
done()
return
fnEvnt1 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
oldValue.should.equal( "Update2" )
newValue.should.equal( "Update3" )
_done()
return
tableU.on "lastname.userchanged", fnEvnt1
fnEvnt2 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
should.not.exist( oldValue )
newValue.toUTCString().should.equal(new Date( 1950,5,15 ).toUTCString())
_done()
return
tableU.on "birthday.userchanged", fnEvnt2
fnEvnt3 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
should.not.exist( oldValue )
newValue.should.equal( "testimage.jpg" )
_done()
return
tableU.on "image.userchanged", fnEvnt3
fnEvnt4 = ( err, item )->
item.should.have.property('lastname').and.equal( "Update3" )
item.should.have.property('role').and.equal( "TRAINER" )
item.should.have.property('_u').and.equal( 3 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_done()
return
tableU.on "set", fnEvnt4
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update3" )
item.should.have.property('role').and.equal( "TRAINER" )
item.should.have.property('_u').and.equal( 3 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
_done()
return
, {} )
return
it "TABLE.UPDATE with invalid role", ( done )->
data =
lastname: "Update4"
role: "MILON"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "value-not-allowed" )
done()
return
, {} )
return
it "TABLE.UPDATE with with json object", ( done )->
data =
lastname: "Update5"
jsonSettings:
a: 123
b: 456
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update5" )
item.should.have.property('jsonSettings').and.eql
a: 123
b: 456
item.should.have.property('_u').and.equal( 4 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
done()
return
, {} )
return
it "TABLE.UPDATE with wrong `_t` check", ( done )->
data =
lastname: "Update6"
_t: _startTime
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-notequal" )
should.exist( err.field )
err.field.should.equal( "_t" )
should.exist( err.value )
err.value.should.equal( _startTime )
should.exist( err.curr )
err.curr.should.equal( _saveUserT )
done()
return
, {} )
return
it "TABLE.UPDATE without `_t", ( done )->
data =
lastname: "Update7b"
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-notequal-required" )
should.exist( err.field )
err.field.should.equal( "_t" )
done()
return
, {} )
return
it "TABLE.UPDATE try a manual of `_u`", ( done )->
data =
lastname: "Update7"
_u: 99
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update7" )
item.should.have.property('_u').and.equal( 5 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with existing `mail`", ( done )->
data =
lastname: "Update7"
email: " "
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-already-existend" )
should.exist( err.field )
err.field.should.equal( "email" )
should.exist( err.value )
err.value.should.equal( " " )
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Date`", ( done )->
_date = new Date()
data =
lastname: "Update8"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( Math.round( _date.getTime() / 1000 ) )
item.should.have.property('deletedate').and.equal( _date.getTime() )
item.should.have.property('lastname').and.equal( "Update8" )
item.should.have.property('_u').and.equal( 6 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Number` in ms", ( done )->
_date = new Date().getTime()
data =
lastname: "Update9"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( Math.round( _date / 1000 ) )
item.should.have.property('deletedate').and.equal( _date )
item.should.have.property('lastname').and.equal( "Update9" )
item.should.have.property('_u').and.equal( 7 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Number` in s", ( done )->
_date = Math.round( new Date().getTime() / 1000 )
data =
lastname: "Update10"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( _date )
item.should.have.property('deletedate').and.equal( _date * 1000 )
item.should.have.property('lastname').and.equal( "Update10" )
item.should.have.property('_u').and.equal( 8 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `String`", ( done )->
_date = moment( moment().format( "YYYY-MM-DD HH:mm" ), "YYYY-MM-DD HH:mm" )
data =
lastname: "Update11"
lastlogin: _date.format( "YYYY-MM-DD HH:mm" )
deletedate: _date.format( "YYYY-MM-DD HH:mm" )
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( _date.unix() )
item.should.have.property('deletedate').and.equal( _date.valueOf() )
item.should.have.property('lastname').and.equal( "Update11" )
item.should.have.property('_u').and.equal( 9 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.HAS", ( done )->
tableU.has( _saveUserId, ( err, existend )->
throw err if err
existend.should.be.ok
done()
return
, {} )
return
it "TABLE.HAS not existend", ( done )->
tableU.has( "notexist", ( err, existend )->
throw err if err
existend.should.not.be.ok
done()
return
, {} )
return
it "TABLE.COUNT", ( done )->
filter = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.count( filter, ( err, count )->
throw err if err
should.exist( count )
count.should.equal( _CONFIG.test.findTest.count )
done()
return
, {} )
return
it "TABLE.COUNT empty", ( done )->
filter =
firstname: "Maxi"
role: "INVALIDROLE"
tableU.count( filter, ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 0 )
done()
return
, {} )
return
it "TABLE.INCREMENT", ( done )->
tableU.increment( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 1 )
done()
return
, {} )
return
it "TABLE.INCREMENT second increment", ( done )->
tableU.increment( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 2 )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown field", ( done )->
tableU.increment( _saveUserId, "unknown", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-field" )
should.exist( err.field )
err.field.should.equal( "unknown" )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown id", ( done )->
tableU.increment( "unknown", "plansversion", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DECREMENT", ( done )->
tableU.decrement( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 1 )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown field", ( done )->
tableU.decrement( _saveUserId, "unknown", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-field" )
should.exist( err.field )
err.field.should.equal( "unknown" )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown id", ( done )->
tableU.decrement( "unknown", "plansversion", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DEL", ( done )->
_usr = _testUsers[ 0 ]
tableU.del( _usr.id, ( err, item )->
throw err if err
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test2" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.GET deleted", ( done )->
_usr = _testUsers[ 0 ]
tableU.get( _usr.id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DEL deleted", ( done )->
_usr = _testUsers[ 0 ]
tableU.del( _usr.id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.MDEL invalid filter", ( done )->
_usrA = _testUsers[ 1 ]
_usrB = _testUsers[ 2 ]
ids = [ _usrA.id, _usrB.id ]
tableU.mdel( user_id: ids, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "no-filter" )
done()
return
, {} )
return
it "TABLE.MDEL", ( done )->
_usrA = _testUsers[ 1 ]
_usrB = _testUsers[ 2 ]
ids = [ _usrA.id, _usrB.id ]
tableU.mdel( id: ids, ( err, items )->
throw err if err
_difference(ids,_map( items, "id" ) ).should.have.length(0)
done()
return
, {} )
return
it "TABLE.GET", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get _testUsers[ 1 ].id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
return
| 15348 | _CONFIG = require './config'
MySQLFactory = require( "../." )
_map = require( "lodash/map" )
_difference = require( "lodash/difference" )
should = require('should')
moment = require('moment')
_startTime = Date.now() - 1000 * 60
_utils = require( "../lib/utils" )
DBFactory = null
cbMulti = ( count, cb )->
return ->
count--
if count is 0
cb()
return
console.log "\nCONFIG:\n", _CONFIG.mysql
describe "----- MySQL Factory TESTS -----", ->
before ( done )->
done()
return
describe 'Initialization', ->
it 'init factory', ( done )->
DBFactory = new MySQLFactory( _CONFIG.mysql, _CONFIG.tables )
done()
return
describe 'Factory Tests', ->
it "List the existing tables", ( done )->
DBFactory.list ( err, tables )->
throw err if err
tables.should.eql( Object.keys( _CONFIG.tables ) )
done()
return
it "Get a table", ( done )->
_cnf = _CONFIG.tables[ _CONFIG.test.singleCreateTableTest ]
_tbl = DBFactory.get( _CONFIG.test.singleCreateTableTest )
_tbl.should.exist
_tbl?.name?.should.eql( _cnf.name )
done()
return
it "Try to get a not existend table", ( done )->
_tbl = DBFactory.get( "notexistend" )
should.not.exist( _tbl )
done()
return
it "has for existend table", ( done )->
_has = DBFactory.has( _CONFIG.test.singleCreateTableTest )
_has.should.be.true
done()
return
it "has for not existend table", ( done )->
_has = DBFactory.has( "notexistend" )
_has.should.be.false
done()
return
return
describe 'Table Tests', ->
tableU = null # user
tableT = null # tokens
tableC = null # contracts
fieldsTest = [ "id", "firstname" ]
allFields = Object.keys( _CONFIG.tables[ _CONFIG.test.getTest.tbl ].fields )
_saveUserId = null
_saveUserT = 0
_testUsers = []
it "get test table `#{_CONFIG.test.getTest.tbl}`", ( done )->
tableU = DBFactory.get( _CONFIG.test.getTest.tbl )
tableU?.name?.should.eql( _CONFIG.test.getTest.tbl )
done()
return
it "get test table `#{_CONFIG.test.tokenTable}`", ( done )->
tableT = DBFactory.get( _CONFIG.test.tokenTable )
tableT?.name?.should.eql( _CONFIG.test.tokenTable )
done()
return
it "get test table `#{_CONFIG.test.contractsTable}`", ( done )->
tableC = DBFactory.get( _CONFIG.test.contractsTable )
tableC?.name?.should.eql( _CONFIG.test.contractsTable )
done()
return
it "TABLE.GET", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get _id, ( err, item )->
throw err if err
should.exist( item.id )
item.id.should.equal( _id )
done()
return
return
it "TABLE.GET fields as array", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: fieldsTest )
return
it "TABLE.GET fields as string", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: fieldsTest.join( ", " ) )
return
it "TABLE.GET fields as set", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: "set:test" )
return
it "TABLE.GET fields `all`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,allFields).should.have.length(0)
done()
return
, fields: "all" )
return
it "TABLE.GET fields `*`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,allFields).should.have.length(0)
done()
return
, fields: "all" )
return
it "TABLE.GET fields `idonly`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,[ "id" ]).should.have.length(0)
done()
return
, fields: "idonly" )
return
it "TABLE.GET fields by filter function", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,[ "id", "_t", "_u" ]).should.have.length(0)
done()
return
, fields: ( (fld)->
fld.name.length <= 2 ) )
return
it "TABLE.MGET", ( done )->
_ids = JSON.parse( JSON.stringify( _CONFIG.test.mgetTest.id ) )
tableU.mget _ids, ( err, items )->
throw err if err
items.should.have.length(2)
_difference(_CONFIG.test.mgetTest.id,_map( items, "id" ) ).should.have.length(0)
done()
return
return
it "TABLE.MGET empty", ( done )->
tableU.mget [], ( err, items )->
throw err if err
items.should.have.length(0)
done()
return
return
it "TABLE.FIND all", ( done )->
@timeout( 6000 )
tableU.find {}, ( err, items )->
throw err if err
items.should.have.length( _CONFIG.tables.Users.limit)
done()
return
return
it "TABLE.FIND query", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find query, ( err, items )->
throw err if err
items.should.have.length( _CONFIG.test.findTest.count )
done()
return
return
it "TABLE.FIND with limit", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
query.limit = 1
tableU.find( query, ( err, items )->
throw err if err
items.should.have.length(1)
done()
return
, {} )
return
it "TABLE.FIND with limit by option", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find( query, ( err, items )->
throw err if err
items.should.have.length(1)
done()
return
, { limit: 1 } )
return
it "TABLE.FIND with `idonly`", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find( query, ( err, items )->
throw err if err
items.should.be.an.instanceOf(Array)
for id in items
id.should.be.a.String
done()
return
, { fields: "idonly" } )
return
it "TABLE.FIND studio tokens with subquery", ( done )->
query =
token:
"startsWith": "<PASSWORD>"
opt =
limit: 0
_customQueryFilter:
"user_id":
sub:
table: "contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-TOKENS SUB", items.length
done()
return
, opt )
return
it "TABLE.FIND studio users with subquery", ( done )->
query =
"id":
sub:
table: "contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
opt =
limit: 0
tableU.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-USR SUB", items.length
done()
return
, opt )
return
it "TABLE.FIND studio tokens with TABLE.JOIN", ( done )->
query =
token:
"startsWith": "<PASSWORD>"
opt =
limit: 0
joins:
"user_id":
type: "inner"
table: "Contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-TOKENS JOIN", items.length
done()
return
, opt )
return
it "TABLE.FIND studio users with TABLE.JOIN with table instance", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-USR JOIN", items.length
done()
return
, opt )
return
it "TABLE.JOIN without table", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
#table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "missing-join-table" )
done()
return
, opt )
return
it "TABLE.JOIN without field", ( done )->
query =
studio_id: 1
contracttype: 1
opt =
fields: "*"
limit: 0
joins:
"user_id":
type: "left outer"
table: tableU
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
done()
return
, opt )
return
it "TABLE.JOIN with invalid field", ( done )->
query = {}
opt =
limit: 0
joins:
"_id":
table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-field" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid foreign field", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: tableC
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-foreignfield" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid table", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: "_Contracts"
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-table" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid type", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
type: "foo"
table: tableC
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-type" )
done()
return
, opt )
return
###
it "TABLE.FIND with option `_customQueryFilter`", ( done )->
query = _CONFIG.test.findTest.q
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "deprecated-option" )
done()
return
, { _customQueryFilter: "id = 'abcde'" } )
return
###
it "TABLE.FIND with invalid filter", ( done )->
tableU.find "wrong", ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-filter" )
done()
return
return
it "TABLE.FIND with complex filter", ( done )->
ts = 1381322463000
query =
user_id: _CONFIG.test.mgetTest.id[1]
_t: { ">": ts }
opt =
limit: 3
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
items.should.have.property( "length" ).and.be.below(4)
for item in items
item._t.should.be.above( ts )
done()
return
, opt )
return
it "TABLE.INSERT string-id", ( done )->
data =
firstname: "<NAME>"
lastname: "<NAME>"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}<EMAIL>"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_saveUserId = item.id
_saveUserT = item._t
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "<NAME>" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT second test case", ( done )->
data =
firstname: "<NAME>"
lastname: "<NAME>"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}<EMAIL>"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test2" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT third test case", ( done )->
data =
firstname: "<NAME>"
lastname: "<NAME>"
gender: false
role: "USER"
email: "test.#{_utils.randomString( 5 )}<EMAIL>"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "<NAME>" )
item.should.have.property('lastname').and.equal( "<NAME>" )
item.should.have.property('gender').and.equal( false )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT fourth test case", ( done )->
data =
firstname: "<NAME>"
lastname: "<NAME>"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}<EMAIL>"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test<NAME>" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT with createId function", ( done )->
_tbl = DBFactory.get( "Apikeys" )
data =
studio_id: 1
jsonOptions: {}
_tbl.set( data, ( err, item )->
throw err if err
item.should.have.property('apikey').and.match( /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}/ )
item.should.have.property('studio_id').and.equal( 1 )
done()
return
, {} )
return
it "TABLE.INSERT autoincrement-id", ( done )->
data = JSON.parse( JSON.stringify( _CONFIG.test.insertTestToken ) )
tableT.set( data, ( err, item )->
throw err if err
item.should.have.property('id')
item.should.have.property('user_id')
item.should.have.property('studio_id')
item.should.have.property('token')
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT predefined string-id", ( done )->
_id = _utils.randomString( 5 )
data =
id: _id
firstname: "<NAME>"
lastname: "<NAME>"
gender: true
role: "USER"
_t: 0
tableU.set( data, ( err, item )->
# special case. A predefined is could allready exist
if err?.code is "ER_DUP_ENTRY"
done()
return
throw err if err
item.should.have.property('id').and.equal(_id)
item.should.have.property('firstname').and.equal( "<NAME>" )
item.should.have.property('lastname').and.equal( "<NAME>" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.within( _startTime, +Infinity )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT existing predefined string-id", ( done )->
data =
id: _CONFIG.test.getTest.id
firstname: "Test"
lastname: "Test"
gender: true
role: "USER"
_t: 0
tableU.set( data, ( err, item )->
should.exist( err )
err.code.should.equal( "ER_DUP_ENTRY" )
done()
return
, {} )
return
it "TABLE.UPDATE", ( done )->
data =
lastname: "Update1"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update1" )
item.should.have.property('_u').and.equal( 1 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with crypting passowrd", ( done )->
data =
lastname: "Update2"
password: "<PASSWORD>"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update2" )
item.should.have.property('password').and.containEql( <PASSWORD>$" )
item.should.have.property('_u').and.equal( 2 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with event check", ( done )->
data =
lastname: "Update3"
birthday: new Date( 1950,5,15 )
image: "testimage.jpg"
role: "TRAINER"
_t: _saveUserT
_done = cbMulti 5, ->
tableU.removeListener( "lastname.userchanged", fnEvnt1 )
tableU.removeListener( "birthday.userchanged", fnEvnt2 )
tableU.removeListener( "image.userchanged", fnEvnt3 )
tableU.removeListener( "set", fnEvnt4 )
done()
return
fnEvnt1 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
oldValue.should.equal( "Update2" )
newValue.should.equal( "Update3" )
_done()
return
tableU.on "lastname.userchanged", fnEvnt1
fnEvnt2 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
should.not.exist( oldValue )
newValue.toUTCString().should.equal(new Date( 1950,5,15 ).toUTCString())
_done()
return
tableU.on "birthday.userchanged", fnEvnt2
fnEvnt3 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
should.not.exist( oldValue )
newValue.should.equal( "testimage.jpg" )
_done()
return
tableU.on "image.userchanged", fnEvnt3
fnEvnt4 = ( err, item )->
item.should.have.property('lastname').and.equal( "Update3" )
item.should.have.property('role').and.equal( "TRAINER" )
item.should.have.property('_u').and.equal( 3 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_done()
return
tableU.on "set", fnEvnt4
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update3" )
item.should.have.property('role').and.equal( "TRAINER" )
item.should.have.property('_u').and.equal( 3 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
_done()
return
, {} )
return
it "TABLE.UPDATE with invalid role", ( done )->
data =
lastname: "Update4"
role: "MILON"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "value-not-allowed" )
done()
return
, {} )
return
it "TABLE.UPDATE with with json object", ( done )->
data =
lastname: "Update5"
jsonSettings:
a: 123
b: 456
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update5" )
item.should.have.property('jsonSettings').and.eql
a: 123
b: 456
item.should.have.property('_u').and.equal( 4 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
done()
return
, {} )
return
it "TABLE.UPDATE with wrong `_t` check", ( done )->
data =
lastname: "Update6"
_t: _startTime
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-notequal" )
should.exist( err.field )
err.field.should.equal( "_t" )
should.exist( err.value )
err.value.should.equal( _startTime )
should.exist( err.curr )
err.curr.should.equal( _saveUserT )
done()
return
, {} )
return
it "TABLE.UPDATE without `_t", ( done )->
data =
lastname: "Update7b"
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-notequal-required" )
should.exist( err.field )
err.field.should.equal( "_t" )
done()
return
, {} )
return
it "TABLE.UPDATE try a manual of `_u`", ( done )->
data =
lastname: "Update7"
_u: 99
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update7" )
item.should.have.property('_u').and.equal( 5 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with existing `mail`", ( done )->
data =
lastname: "Update7"
email: " "
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-already-existend" )
should.exist( err.field )
err.field.should.equal( "email" )
should.exist( err.value )
err.value.should.equal( " " )
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Date`", ( done )->
_date = new Date()
data =
lastname: "Update8"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( Math.round( _date.getTime() / 1000 ) )
item.should.have.property('deletedate').and.equal( _date.getTime() )
item.should.have.property('lastname').and.equal( "Update8" )
item.should.have.property('_u').and.equal( 6 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Number` in ms", ( done )->
_date = new Date().getTime()
data =
lastname: "Update9"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( Math.round( _date / 1000 ) )
item.should.have.property('deletedate').and.equal( _date )
item.should.have.property('lastname').and.equal( "Update9" )
item.should.have.property('_u').and.equal( 7 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Number` in s", ( done )->
_date = Math.round( new Date().getTime() / 1000 )
data =
lastname: "Update10"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( _date )
item.should.have.property('deletedate').and.equal( _date * 1000 )
item.should.have.property('lastname').and.equal( "Update10" )
item.should.have.property('_u').and.equal( 8 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `String`", ( done )->
_date = moment( moment().format( "YYYY-MM-DD HH:mm" ), "YYYY-MM-DD HH:mm" )
data =
lastname: "Update11"
lastlogin: _date.format( "YYYY-MM-DD HH:mm" )
deletedate: _date.format( "YYYY-MM-DD HH:mm" )
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( _date.unix() )
item.should.have.property('deletedate').and.equal( _date.valueOf() )
item.should.have.property('lastname').and.equal( "Update11" )
item.should.have.property('_u').and.equal( 9 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.HAS", ( done )->
tableU.has( _saveUserId, ( err, existend )->
throw err if err
existend.should.be.ok
done()
return
, {} )
return
it "TABLE.HAS not existend", ( done )->
tableU.has( "notexist", ( err, existend )->
throw err if err
existend.should.not.be.ok
done()
return
, {} )
return
it "TABLE.COUNT", ( done )->
filter = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.count( filter, ( err, count )->
throw err if err
should.exist( count )
count.should.equal( _CONFIG.test.findTest.count )
done()
return
, {} )
return
it "TABLE.COUNT empty", ( done )->
filter =
firstname: "<NAME>"
role: "INVALIDROLE"
tableU.count( filter, ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 0 )
done()
return
, {} )
return
it "TABLE.INCREMENT", ( done )->
tableU.increment( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 1 )
done()
return
, {} )
return
it "TABLE.INCREMENT second increment", ( done )->
tableU.increment( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 2 )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown field", ( done )->
tableU.increment( _saveUserId, "unknown", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-field" )
should.exist( err.field )
err.field.should.equal( "unknown" )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown id", ( done )->
tableU.increment( "unknown", "plansversion", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DECREMENT", ( done )->
tableU.decrement( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 1 )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown field", ( done )->
tableU.decrement( _saveUserId, "unknown", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-field" )
should.exist( err.field )
err.field.should.equal( "unknown" )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown id", ( done )->
tableU.decrement( "unknown", "plansversion", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DEL", ( done )->
_usr = _testUsers[ 0 ]
tableU.del( _usr.id, ( err, item )->
throw err if err
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test2" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.GET deleted", ( done )->
_usr = _testUsers[ 0 ]
tableU.get( _usr.id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DEL deleted", ( done )->
_usr = _testUsers[ 0 ]
tableU.del( _usr.id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.MDEL invalid filter", ( done )->
_usrA = _testUsers[ 1 ]
_usrB = _testUsers[ 2 ]
ids = [ _usrA.id, _usrB.id ]
tableU.mdel( user_id: ids, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "no-filter" )
done()
return
, {} )
return
it "TABLE.MDEL", ( done )->
_usrA = _testUsers[ 1 ]
_usrB = _testUsers[ 2 ]
ids = [ _usrA.id, _usrB.id ]
tableU.mdel( id: ids, ( err, items )->
throw err if err
_difference(ids,_map( items, "id" ) ).should.have.length(0)
done()
return
, {} )
return
it "TABLE.GET", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get _testUsers[ 1 ].id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
return
| true | _CONFIG = require './config'
MySQLFactory = require( "../." )
_map = require( "lodash/map" )
_difference = require( "lodash/difference" )
should = require('should')
moment = require('moment')
_startTime = Date.now() - 1000 * 60
_utils = require( "../lib/utils" )
DBFactory = null
cbMulti = ( count, cb )->
return ->
count--
if count is 0
cb()
return
console.log "\nCONFIG:\n", _CONFIG.mysql
describe "----- MySQL Factory TESTS -----", ->
before ( done )->
done()
return
describe 'Initialization', ->
it 'init factory', ( done )->
DBFactory = new MySQLFactory( _CONFIG.mysql, _CONFIG.tables )
done()
return
describe 'Factory Tests', ->
it "List the existing tables", ( done )->
DBFactory.list ( err, tables )->
throw err if err
tables.should.eql( Object.keys( _CONFIG.tables ) )
done()
return
it "Get a table", ( done )->
_cnf = _CONFIG.tables[ _CONFIG.test.singleCreateTableTest ]
_tbl = DBFactory.get( _CONFIG.test.singleCreateTableTest )
_tbl.should.exist
_tbl?.name?.should.eql( _cnf.name )
done()
return
it "Try to get a not existend table", ( done )->
_tbl = DBFactory.get( "notexistend" )
should.not.exist( _tbl )
done()
return
it "has for existend table", ( done )->
_has = DBFactory.has( _CONFIG.test.singleCreateTableTest )
_has.should.be.true
done()
return
it "has for not existend table", ( done )->
_has = DBFactory.has( "notexistend" )
_has.should.be.false
done()
return
return
describe 'Table Tests', ->
tableU = null # user
tableT = null # tokens
tableC = null # contracts
fieldsTest = [ "id", "firstname" ]
allFields = Object.keys( _CONFIG.tables[ _CONFIG.test.getTest.tbl ].fields )
_saveUserId = null
_saveUserT = 0
_testUsers = []
it "get test table `#{_CONFIG.test.getTest.tbl}`", ( done )->
tableU = DBFactory.get( _CONFIG.test.getTest.tbl )
tableU?.name?.should.eql( _CONFIG.test.getTest.tbl )
done()
return
it "get test table `#{_CONFIG.test.tokenTable}`", ( done )->
tableT = DBFactory.get( _CONFIG.test.tokenTable )
tableT?.name?.should.eql( _CONFIG.test.tokenTable )
done()
return
it "get test table `#{_CONFIG.test.contractsTable}`", ( done )->
tableC = DBFactory.get( _CONFIG.test.contractsTable )
tableC?.name?.should.eql( _CONFIG.test.contractsTable )
done()
return
it "TABLE.GET", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get _id, ( err, item )->
throw err if err
should.exist( item.id )
item.id.should.equal( _id )
done()
return
return
it "TABLE.GET fields as array", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: fieldsTest )
return
it "TABLE.GET fields as string", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: fieldsTest.join( ", " ) )
return
it "TABLE.GET fields as set", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,fieldsTest).should.have.length(0)
done()
return
, fields: "set:test" )
return
it "TABLE.GET fields `all`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,allFields).should.have.length(0)
done()
return
, fields: "all" )
return
it "TABLE.GET fields `*`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,allFields).should.have.length(0)
done()
return
, fields: "all" )
return
it "TABLE.GET fields `idonly`", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,[ "id" ]).should.have.length(0)
done()
return
, fields: "idonly" )
return
it "TABLE.GET fields by filter function", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get( _id, ( err, item )->
throw err if err
should.exist( item.id )
_keys = Object.keys( item )
_difference(_keys,[ "id", "_t", "_u" ]).should.have.length(0)
done()
return
, fields: ( (fld)->
fld.name.length <= 2 ) )
return
it "TABLE.MGET", ( done )->
_ids = JSON.parse( JSON.stringify( _CONFIG.test.mgetTest.id ) )
tableU.mget _ids, ( err, items )->
throw err if err
items.should.have.length(2)
_difference(_CONFIG.test.mgetTest.id,_map( items, "id" ) ).should.have.length(0)
done()
return
return
it "TABLE.MGET empty", ( done )->
tableU.mget [], ( err, items )->
throw err if err
items.should.have.length(0)
done()
return
return
it "TABLE.FIND all", ( done )->
@timeout( 6000 )
tableU.find {}, ( err, items )->
throw err if err
items.should.have.length( _CONFIG.tables.Users.limit)
done()
return
return
it "TABLE.FIND query", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find query, ( err, items )->
throw err if err
items.should.have.length( _CONFIG.test.findTest.count )
done()
return
return
it "TABLE.FIND with limit", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
query.limit = 1
tableU.find( query, ( err, items )->
throw err if err
items.should.have.length(1)
done()
return
, {} )
return
it "TABLE.FIND with limit by option", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find( query, ( err, items )->
throw err if err
items.should.have.length(1)
done()
return
, { limit: 1 } )
return
it "TABLE.FIND with `idonly`", ( done )->
@timeout( 6000 )
query = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.find( query, ( err, items )->
throw err if err
items.should.be.an.instanceOf(Array)
for id in items
id.should.be.a.String
done()
return
, { fields: "idonly" } )
return
it "TABLE.FIND studio tokens with subquery", ( done )->
query =
token:
"startsWith": "PI:PASSWORD:<PASSWORD>END_PI"
opt =
limit: 0
_customQueryFilter:
"user_id":
sub:
table: "contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-TOKENS SUB", items.length
done()
return
, opt )
return
it "TABLE.FIND studio users with subquery", ( done )->
query =
"id":
sub:
table: "contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
opt =
limit: 0
tableU.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-USR SUB", items.length
done()
return
, opt )
return
it "TABLE.FIND studio tokens with TABLE.JOIN", ( done )->
query =
token:
"startsWith": "PI:PASSWORD:<PASSWORD>END_PI"
opt =
limit: 0
joins:
"user_id":
type: "inner"
table: "Contracts"
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-TOKENS JOIN", items.length
done()
return
, opt )
return
it "TABLE.FIND studio users with TABLE.JOIN with table instance", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
#console.log "ITEMS STD-USR JOIN", items.length
done()
return
, opt )
return
it "TABLE.JOIN without table", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
#table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "missing-join-table" )
done()
return
, opt )
return
it "TABLE.JOIN without field", ( done )->
query =
studio_id: 1
contracttype: 1
opt =
fields: "*"
limit: 0
joins:
"user_id":
type: "left outer"
table: tableU
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
done()
return
, opt )
return
it "TABLE.JOIN with invalid field", ( done )->
query = {}
opt =
limit: 0
joins:
"_id":
table: tableC
field: "user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-field" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid foreign field", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: tableC
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-foreignfield" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid table", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
table: "_Contracts"
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-table" )
done()
return
, opt )
return
it "TABLE.JOIN with invalid type", ( done )->
query = {}
opt =
limit: 0
joins:
"id":
type: "foo"
table: tableC
field: "_user_id"
filter:
studio_id: 1
contracttype: 1
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-join-type" )
done()
return
, opt )
return
###
it "TABLE.FIND with option `_customQueryFilter`", ( done )->
query = _CONFIG.test.findTest.q
tableU.find( query, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "deprecated-option" )
done()
return
, { _customQueryFilter: "id = 'abcde'" } )
return
###
it "TABLE.FIND with invalid filter", ( done )->
tableU.find "wrong", ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-filter" )
done()
return
return
it "TABLE.FIND with complex filter", ( done )->
ts = 1381322463000
query =
user_id: _CONFIG.test.mgetTest.id[1]
_t: { ">": ts }
opt =
limit: 3
tableT.find( query, ( err, items )->
throw err if err
items.should.have.property( "length" ).and.be.above(1)
items.should.have.property( "length" ).and.be.below(4)
for item in items
item._t.should.be.above( ts )
done()
return
, opt )
return
it "TABLE.INSERT string-id", ( done )->
data =
firstname: "PI:NAME:<NAME>END_PI"
lastname: "PI:NAME:<NAME>END_PI"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}PI:EMAIL:<EMAIL>END_PI"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_saveUserId = item.id
_saveUserT = item._t
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "PI:NAME:<NAME>END_PI" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT second test case", ( done )->
data =
firstname: "PI:NAME:<NAME>END_PI"
lastname: "PI:NAME:<NAME>END_PI"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}PI:EMAIL:<EMAIL>END_PI"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test2" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT third test case", ( done )->
data =
firstname: "PI:NAME:<NAME>END_PI"
lastname: "PI:NAME:<NAME>END_PI"
gender: false
role: "USER"
email: "test.#{_utils.randomString( 5 )}PI:EMAIL:<EMAIL>END_PI"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "PI:NAME:<NAME>END_PI" )
item.should.have.property('lastname').and.equal( "PI:NAME:<NAME>END_PI" )
item.should.have.property('gender').and.equal( false )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT fourth test case", ( done )->
data =
firstname: "PI:NAME:<NAME>END_PI"
lastname: "PI:NAME:<NAME>END_PI"
gender: true
role: "USER"
email: "test.#{_utils.randomString( 5 )}PI:EMAIL:<EMAIL>END_PI"
_t: 0
tableU.set( data, ( err, item )->
throw err if err
_testUsers.push item
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "TestPI:NAME:<NAME>END_PI" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT with createId function", ( done )->
_tbl = DBFactory.get( "Apikeys" )
data =
studio_id: 1
jsonOptions: {}
_tbl.set( data, ( err, item )->
throw err if err
item.should.have.property('apikey').and.match( /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}/ )
item.should.have.property('studio_id').and.equal( 1 )
done()
return
, {} )
return
it "TABLE.INSERT autoincrement-id", ( done )->
data = JSON.parse( JSON.stringify( _CONFIG.test.insertTestToken ) )
tableT.set( data, ( err, item )->
throw err if err
item.should.have.property('id')
item.should.have.property('user_id')
item.should.have.property('studio_id')
item.should.have.property('token')
item.should.have.property('_t').and.be.above( _startTime )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT predefined string-id", ( done )->
_id = _utils.randomString( 5 )
data =
id: _id
firstname: "PI:NAME:<NAME>END_PI"
lastname: "PI:NAME:<NAME>END_PI"
gender: true
role: "USER"
_t: 0
tableU.set( data, ( err, item )->
# special case. A predefined is could allready exist
if err?.code is "ER_DUP_ENTRY"
done()
return
throw err if err
item.should.have.property('id').and.equal(_id)
item.should.have.property('firstname').and.equal( "PI:NAME:<NAME>END_PI" )
item.should.have.property('lastname').and.equal( "PI:NAME:<NAME>END_PI" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_t').and.be.within( _startTime, +Infinity )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.INSERT existing predefined string-id", ( done )->
data =
id: _CONFIG.test.getTest.id
firstname: "Test"
lastname: "Test"
gender: true
role: "USER"
_t: 0
tableU.set( data, ( err, item )->
should.exist( err )
err.code.should.equal( "ER_DUP_ENTRY" )
done()
return
, {} )
return
it "TABLE.UPDATE", ( done )->
data =
lastname: "Update1"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update1" )
item.should.have.property('_u').and.equal( 1 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with crypting passowrd", ( done )->
data =
lastname: "Update2"
password: "PI:PASSWORD:<PASSWORD>END_PI"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update2" )
item.should.have.property('password').and.containEql( PI:PASSWORD:<PASSWORD>END_PI$" )
item.should.have.property('_u').and.equal( 2 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with event check", ( done )->
data =
lastname: "Update3"
birthday: new Date( 1950,5,15 )
image: "testimage.jpg"
role: "TRAINER"
_t: _saveUserT
_done = cbMulti 5, ->
tableU.removeListener( "lastname.userchanged", fnEvnt1 )
tableU.removeListener( "birthday.userchanged", fnEvnt2 )
tableU.removeListener( "image.userchanged", fnEvnt3 )
tableU.removeListener( "set", fnEvnt4 )
done()
return
fnEvnt1 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
oldValue.should.equal( "Update2" )
newValue.should.equal( "Update3" )
_done()
return
tableU.on "lastname.userchanged", fnEvnt1
fnEvnt2 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
should.not.exist( oldValue )
newValue.toUTCString().should.equal(new Date( 1950,5,15 ).toUTCString())
_done()
return
tableU.on "birthday.userchanged", fnEvnt2
fnEvnt3 = ( oldValue, newValue, id )->
id.should.equal( _saveUserId )
should.not.exist( oldValue )
newValue.should.equal( "testimage.jpg" )
_done()
return
tableU.on "image.userchanged", fnEvnt3
fnEvnt4 = ( err, item )->
item.should.have.property('lastname').and.equal( "Update3" )
item.should.have.property('role').and.equal( "TRAINER" )
item.should.have.property('_u').and.equal( 3 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_done()
return
tableU.on "set", fnEvnt4
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update3" )
item.should.have.property('role').and.equal( "TRAINER" )
item.should.have.property('_u').and.equal( 3 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
_done()
return
, {} )
return
it "TABLE.UPDATE with invalid role", ( done )->
data =
lastname: "Update4"
role: "MILON"
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "value-not-allowed" )
done()
return
, {} )
return
it "TABLE.UPDATE with with json object", ( done )->
data =
lastname: "Update5"
jsonSettings:
a: 123
b: 456
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update5" )
item.should.have.property('jsonSettings').and.eql
a: 123
b: 456
item.should.have.property('_u').and.equal( 4 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
done()
return
, {} )
return
it "TABLE.UPDATE with wrong `_t` check", ( done )->
data =
lastname: "Update6"
_t: _startTime
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-notequal" )
should.exist( err.field )
err.field.should.equal( "_t" )
should.exist( err.value )
err.value.should.equal( _startTime )
should.exist( err.curr )
err.curr.should.equal( _saveUserT )
done()
return
, {} )
return
it "TABLE.UPDATE without `_t", ( done )->
data =
lastname: "Update7b"
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-notequal-required" )
should.exist( err.field )
err.field.should.equal( "_t" )
done()
return
, {} )
return
it "TABLE.UPDATE try a manual of `_u`", ( done )->
data =
lastname: "Update7"
_u: 99
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastname').and.equal( "Update7" )
item.should.have.property('_u').and.equal( 5 )
item.should.have.property('_t').and.be.within( _saveUserT, +Infinity )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE with existing `mail`", ( done )->
data =
lastname: "Update7"
email: " "
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "validation-already-existend" )
should.exist( err.field )
err.field.should.equal( "email" )
should.exist( err.value )
err.value.should.equal( " " )
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Date`", ( done )->
_date = new Date()
data =
lastname: "Update8"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( Math.round( _date.getTime() / 1000 ) )
item.should.have.property('deletedate').and.equal( _date.getTime() )
item.should.have.property('lastname').and.equal( "Update8" )
item.should.have.property('_u').and.equal( 6 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Number` in ms", ( done )->
_date = new Date().getTime()
data =
lastname: "Update9"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( Math.round( _date / 1000 ) )
item.should.have.property('deletedate').and.equal( _date )
item.should.have.property('lastname').and.equal( "Update9" )
item.should.have.property('_u').and.equal( 7 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `Number` in s", ( done )->
_date = Math.round( new Date().getTime() / 1000 )
data =
lastname: "Update10"
lastlogin: _date
deletedate: _date
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( _date )
item.should.have.property('deletedate').and.equal( _date * 1000 )
item.should.have.property('lastname').and.equal( "Update10" )
item.should.have.property('_u').and.equal( 8 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.UPDATE date fields as `String`", ( done )->
_date = moment( moment().format( "YYYY-MM-DD HH:mm" ), "YYYY-MM-DD HH:mm" )
data =
lastname: "Update11"
lastlogin: _date.format( "YYYY-MM-DD HH:mm" )
deletedate: _date.format( "YYYY-MM-DD HH:mm" )
_t: _saveUserT
tableU.set( _saveUserId, data, ( err, item )->
throw err if err
item.should.have.property('lastlogin').and.equal( _date.unix() )
item.should.have.property('deletedate').and.equal( _date.valueOf() )
item.should.have.property('lastname').and.equal( "Update11" )
item.should.have.property('_u').and.equal( 9 )
_saveUserT = item._t
done()
return
, {} )
return
it "TABLE.HAS", ( done )->
tableU.has( _saveUserId, ( err, existend )->
throw err if err
existend.should.be.ok
done()
return
, {} )
return
it "TABLE.HAS not existend", ( done )->
tableU.has( "notexist", ( err, existend )->
throw err if err
existend.should.not.be.ok
done()
return
, {} )
return
it "TABLE.COUNT", ( done )->
filter = JSON.parse( JSON.stringify( _CONFIG.test.findTest.q ) )
tableU.count( filter, ( err, count )->
throw err if err
should.exist( count )
count.should.equal( _CONFIG.test.findTest.count )
done()
return
, {} )
return
it "TABLE.COUNT empty", ( done )->
filter =
firstname: "PI:NAME:<NAME>END_PI"
role: "INVALIDROLE"
tableU.count( filter, ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 0 )
done()
return
, {} )
return
it "TABLE.INCREMENT", ( done )->
tableU.increment( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 1 )
done()
return
, {} )
return
it "TABLE.INCREMENT second increment", ( done )->
tableU.increment( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 2 )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown field", ( done )->
tableU.increment( _saveUserId, "unknown", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-field" )
should.exist( err.field )
err.field.should.equal( "unknown" )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown id", ( done )->
tableU.increment( "unknown", "plansversion", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DECREMENT", ( done )->
tableU.decrement( _saveUserId, "plansversion", ( err, count )->
throw err if err
should.exist( count )
count.should.equal( 1 )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown field", ( done )->
tableU.decrement( _saveUserId, "unknown", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "invalid-field" )
should.exist( err.field )
err.field.should.equal( "unknown" )
done()
return
, {} )
return
it "TABLE.INCREMENT unknown id", ( done )->
tableU.decrement( "unknown", "plansversion", ( err, count )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DEL", ( done )->
_usr = _testUsers[ 0 ]
tableU.del( _usr.id, ( err, item )->
throw err if err
item.should.have.property('id')
item.should.have.property('firstname').and.equal( "Test2" )
item.should.have.property('lastname').and.equal( "Test" )
item.should.have.property('gender').and.equal( true )
item.should.have.property('role').and.equal( "USER" )
item.should.have.property('_u')
done()
return
, {} )
return
it "TABLE.GET deleted", ( done )->
_usr = _testUsers[ 0 ]
tableU.get( _usr.id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.DEL deleted", ( done )->
_usr = _testUsers[ 0 ]
tableU.del( _usr.id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
, {} )
return
it "TABLE.MDEL invalid filter", ( done )->
_usrA = _testUsers[ 1 ]
_usrB = _testUsers[ 2 ]
ids = [ _usrA.id, _usrB.id ]
tableU.mdel( user_id: ids, ( err, items )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "no-filter" )
done()
return
, {} )
return
it "TABLE.MDEL", ( done )->
_usrA = _testUsers[ 1 ]
_usrB = _testUsers[ 2 ]
ids = [ _usrA.id, _usrB.id ]
tableU.mdel( id: ids, ( err, items )->
throw err if err
_difference(ids,_map( items, "id" ) ).should.have.length(0)
done()
return
, {} )
return
it "TABLE.GET", ( done )->
_id = _CONFIG.test.getTest.id
tableU.get _testUsers[ 1 ].id, ( err, item )->
should.exist( err )
should.exist( err.name )
err.name.should.equal( "not-found" )
done()
return
return
|
[
{
"context": " new LocalStrategy\n usernameField: \"username\",\n passReqToCallback: true,\n ",
"end": 419,
"score": 0.8650732636451721,
"start": 411,
"tag": "USERNAME",
"value": "username"
},
{
"context": " User.findOne\n u... | src/server/middleware/passport.coffee | phnz/vpnht-frontend | 1 | LocalStrategy = require("passport-local").Strategy
User = require("../models/user")
module.exports = (passport) ->
# some helpers
passport.serializeUser (user, done) ->
done null, user.id
passport.deserializeUser (id, done) ->
User.findById id, (err, user) ->
done err, user
# login
passport.use "login",
new LocalStrategy
usernameField: "username",
passReqToCallback: true,
(req, username, password, done) ->
User.findOne
username: username,
(err, user) ->
return done(err) if err
return done(null, false, req.flash("error", "User not found")) unless user
# compare password
user.comparePassword password, (err, isMatch) ->
if isMatch
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user, req.flash("success", "Successfully logged in.")
else
done null, false, req.flash("error", "Invalid Password")
passport.use "signup", new LocalStrategy(
usernameField: "username",
passReqToCallback: true,
(req, username, password, done) ->
findOrCreateUser = ->
# try to find a user by username
User.findOne
username: req.body.username,
(err, existingUser) ->
if existingUser
req.flash "form",
email: req.body.email
return done(null, false, req.flash("error", "An account with that username already exists."))
# try to find a user by mail
User.findOne
email: req.body.email,
(err, existingUser) ->
if existingUser
req.flash "form",
email: req.body.email
return done(null, false, req.flash("error", "An account with that email address already exists."))
# ok we can create and save our user
user = new User
email: req.body.email,
username: req.body.username,
password: req.body.password,
user.save (err) ->
return done(err, false, req.flash("error", "Error... please contact us at support@vpn.ht")) if err
# if we have a coupon
if req.body.coupon
user.setCoupon req.body.coupon,
(err) ->
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user
else
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user
process.nextTick findOrCreateUser
)
| 168990 | LocalStrategy = require("passport-local").Strategy
User = require("../models/user")
module.exports = (passport) ->
# some helpers
passport.serializeUser (user, done) ->
done null, user.id
passport.deserializeUser (id, done) ->
User.findById id, (err, user) ->
done err, user
# login
passport.use "login",
new LocalStrategy
usernameField: "username",
passReqToCallback: true,
(req, username, password, done) ->
User.findOne
username: username,
(err, user) ->
return done(err) if err
return done(null, false, req.flash("error", "User not found")) unless user
# compare password
user.comparePassword password, (err, isMatch) ->
if isMatch
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user, req.flash("success", "Successfully logged in.")
else
done null, false, req.flash("error", "Invalid Password")
passport.use "signup", new LocalStrategy(
usernameField: "username",
passReqToCallback: true,
(req, username, password, done) ->
findOrCreateUser = ->
# try to find a user by username
User.findOne
username: req.body.username,
(err, existingUser) ->
if existingUser
req.flash "form",
email: req.body.email
return done(null, false, req.flash("error", "An account with that username already exists."))
# try to find a user by mail
User.findOne
email: req.body.email,
(err, existingUser) ->
if existingUser
req.flash "form",
email: req.body.email
return done(null, false, req.flash("error", "An account with that email address already exists."))
# ok we can create and save our user
user = new User
email: req.body.email,
username: req.body.username,
password: <PASSWORD>,
user.save (err) ->
return done(err, false, req.flash("error", "Error... please contact us at <EMAIL>")) if err
# if we have a coupon
if req.body.coupon
user.setCoupon req.body.coupon,
(err) ->
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user
else
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user
process.nextTick findOrCreateUser
)
| true | LocalStrategy = require("passport-local").Strategy
User = require("../models/user")
module.exports = (passport) ->
# some helpers
passport.serializeUser (user, done) ->
done null, user.id
passport.deserializeUser (id, done) ->
User.findById id, (err, user) ->
done err, user
# login
passport.use "login",
new LocalStrategy
usernameField: "username",
passReqToCallback: true,
(req, username, password, done) ->
User.findOne
username: username,
(err, user) ->
return done(err) if err
return done(null, false, req.flash("error", "User not found")) unless user
# compare password
user.comparePassword password, (err, isMatch) ->
if isMatch
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user, req.flash("success", "Successfully logged in.")
else
done null, false, req.flash("error", "Invalid Password")
passport.use "signup", new LocalStrategy(
usernameField: "username",
passReqToCallback: true,
(req, username, password, done) ->
findOrCreateUser = ->
# try to find a user by username
User.findOne
username: req.body.username,
(err, existingUser) ->
if existingUser
req.flash "form",
email: req.body.email
return done(null, false, req.flash("error", "An account with that username already exists."))
# try to find a user by mail
User.findOne
email: req.body.email,
(err, existingUser) ->
if existingUser
req.flash "form",
email: req.body.email
return done(null, false, req.flash("error", "An account with that email address already exists."))
# ok we can create and save our user
user = new User
email: req.body.email,
username: req.body.username,
password: PI:PASSWORD:<PASSWORD>END_PI,
user.save (err) ->
return done(err, false, req.flash("error", "Error... please contact us at PI:EMAIL:<EMAIL>END_PI")) if err
# if we have a coupon
if req.body.coupon
user.setCoupon req.body.coupon,
(err) ->
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user
else
time = 14 * 24 * 3600000
req.session.cookie.maxAge = time #2 weeks
req.session.cookie.expires = new Date(Date.now() + time)
req.session.touch()
done null, user
process.nextTick findOrCreateUser
)
|
[
{
"context": "tructor: (@key, iv = '554f0cafd67ddcaa', salt = '846cea3ae6a33474d6ae2221d8563eaaba73ef9ea20e1803') ->\n @_params =\n v: 1\n iter: 1000\n ",
"end": 223,
"score": 0.6187644004821777,
"start": 176,
"tag": "PASSWORD",
"value": "46cea3ae6a33474d6ae2221d8563eaaba73ef9ea... | app/src/utils/crypto/aes.coffee | doged/ledger-wallet-doged-chrome | 1 | @ledger.crypto ?= {}
# Wrapper around Stanford AES Library for encyrpt/decrypt data with AES
class @ledger.crypto.AES
constructor: (@key, iv = '554f0cafd67ddcaa', salt = '846cea3ae6a33474d6ae2221d8563eaaba73ef9ea20e1803') ->
@_params =
v: 1
iter: 1000
ks: 256
ts: 128
mode: 'ccm'
adata: ''
cipher: 'aes'
iv: sjcl.codec.base64.toBits(iv)
salt: sjcl.codec.base64.toBits(salt)
# Encrypts the given string using AES-256
# @param [String] data Data to encrypt
encrypt: (data) ->
encryption = sjcl.json._encrypt(@key, data, @_params)
sjcl.codec.base64.fromBits(encryption.ct,0)
# Decrypts the given encrypted data
# @param [String] encryptedData An encrypted string
decrypt: (encryptedData) ->
params = _.clone(@_params)
params.ct = sjcl.codec.base64.toBits(encryptedData)
sjcl.json._decrypt(@key, params) | 13152 | @ledger.crypto ?= {}
# Wrapper around Stanford AES Library for encyrpt/decrypt data with AES
class @ledger.crypto.AES
constructor: (@key, iv = '554f0cafd67ddcaa', salt = '8<PASSWORD>') ->
@_params =
v: 1
iter: 1000
ks: 256
ts: 128
mode: 'ccm'
adata: ''
cipher: 'aes'
iv: sjcl.codec.base64.toBits(iv)
salt: sjcl.codec.base64.toBits(salt)
# Encrypts the given string using AES-256
# @param [String] data Data to encrypt
encrypt: (data) ->
encryption = sjcl.json._encrypt(@key, data, @_params)
sjcl.codec.base64.fromBits(encryption.ct,0)
# Decrypts the given encrypted data
# @param [String] encryptedData An encrypted string
decrypt: (encryptedData) ->
params = _.clone(@_params)
params.ct = sjcl.codec.base64.toBits(encryptedData)
sjcl.json._decrypt(@key, params) | true | @ledger.crypto ?= {}
# Wrapper around Stanford AES Library for encyrpt/decrypt data with AES
class @ledger.crypto.AES
constructor: (@key, iv = '554f0cafd67ddcaa', salt = '8PI:PASSWORD:<PASSWORD>END_PI') ->
@_params =
v: 1
iter: 1000
ks: 256
ts: 128
mode: 'ccm'
adata: ''
cipher: 'aes'
iv: sjcl.codec.base64.toBits(iv)
salt: sjcl.codec.base64.toBits(salt)
# Encrypts the given string using AES-256
# @param [String] data Data to encrypt
encrypt: (data) ->
encryption = sjcl.json._encrypt(@key, data, @_params)
sjcl.codec.base64.fromBits(encryption.ct,0)
# Decrypts the given encrypted data
# @param [String] encryptedData An encrypted string
decrypt: (encryptedData) ->
params = _.clone(@_params)
params.ct = sjcl.codec.base64.toBits(encryptedData)
sjcl.json._decrypt(@key, params) |
[
{
"context": "o_branch = \"feat_zhaoli03\"\r\ninfo.owner = \"Li Zhao\"\r\ninfo.git_dir = 'C:/git/leandi/cvom_html'\r\n\r",
"end": 293,
"score": 0.9998237490653992,
"start": 286,
"tag": "NAME",
"value": "Li Zhao"
}
] | js/npm_try/gift/edit_new_branch_gitreview_via_npmgift.coffee | zhaoace/codecraft | 0 | git = require "gift"
replace = require "replace"
# utils
{exec} = require 'child_process'
p = (x) -> console.log x
s = {} #results
# utils
info = {}
info.info = "Default info."
info.from_branch = "feat_50"
info.to_branch = "feat_zhaoli03"
info.owner = "Li Zhao"
info.git_dir = 'C:/git/leandi/cvom_html'
# https://git.wdf.sap.corp:8080/#/admin/projects/SV/cvom.html,branches
repo = git info.git_dir
clean_repo = (info, callback) ->
p "clean_repo called with '#{info.info}'"
p repo.add "*" , (err) ->
p repo.reset "HEAD" , { hard: true } , (err) ->
return callback err if err?
return callback null
fetch_remote = (info, callback) ->
p "fetch_remote called with '#{info.info}'"
p repo.remote_fetch "origin" , (err, msg) ->
return callback err if err?
return callback null
clean_local_branches = (info, callback) ->
p "clean_local_branches called with '#{info.info}'"
p repo.branches (err,branches) ->
for branch in branches
if branch.name != 'master'
p repo.delete_branch "'#{branch.name}'" , (err, msg) ->
return null
update_gitreview = (info, callback) ->
p "update_gitreview called with '#{info.info}'"
replace({
regex: "defaultbranch=.*",
replacement: "defaultbranch=#{info.to_branch}",
paths: ["#{info.git_dir}/.gitreview"],
recursive: true,
silent: true,});
return callback err if err?
return callback null
reset_and_checkout_to = (info, callback) ->
p "reset_and_checkout_to called with '#{info.info}'"
p repo.checkout info.to_branch , (err,head) ->
p repo.reset "HEAD" , { hard: true } , (err) ->
return callback err if err?
return callback null
commit_new_gitreview = (info, callback) ->
p "commit_new_gitreview called with '#{info.info}'"
p repo.add "#{info.git_dir}/.gitreview" , (err) ->
p repo.commit "Owner: #{info.owner} ; From: #{info.from_branch} ", (err) ->
return callback err if err?
return callback null
publish_commits_to_gerrit = (info, callback) ->
# "C:\Program Files (x86)\Git\bin\git.exe" push -u --progress "origin" HEAD:refs/publish/feat_zhaoli
p repo.remote_push "origin","HEAD:refs/publish/#{info.to_branch}" , (err) ->
return callback err if err?
return callback null
if false
fetch_remote info, (err) ->
clean_repo info , (err) ->
reset_and_checkout_to info, (err) ->
update_gitreview info, (err) ->
commit_new_gitreview info, (err) ->
publish_commits_to_gerrit info, (err) ->
return err if err?
if true
info.to_branch = "master"
reset_and_checkout_to info, (err) ->
clean_local_branches (err) ->
return err if err?
| 92199 | git = require "gift"
replace = require "replace"
# utils
{exec} = require 'child_process'
p = (x) -> console.log x
s = {} #results
# utils
info = {}
info.info = "Default info."
info.from_branch = "feat_50"
info.to_branch = "feat_zhaoli03"
info.owner = "<NAME>"
info.git_dir = 'C:/git/leandi/cvom_html'
# https://git.wdf.sap.corp:8080/#/admin/projects/SV/cvom.html,branches
repo = git info.git_dir
clean_repo = (info, callback) ->
p "clean_repo called with '#{info.info}'"
p repo.add "*" , (err) ->
p repo.reset "HEAD" , { hard: true } , (err) ->
return callback err if err?
return callback null
fetch_remote = (info, callback) ->
p "fetch_remote called with '#{info.info}'"
p repo.remote_fetch "origin" , (err, msg) ->
return callback err if err?
return callback null
clean_local_branches = (info, callback) ->
p "clean_local_branches called with '#{info.info}'"
p repo.branches (err,branches) ->
for branch in branches
if branch.name != 'master'
p repo.delete_branch "'#{branch.name}'" , (err, msg) ->
return null
update_gitreview = (info, callback) ->
p "update_gitreview called with '#{info.info}'"
replace({
regex: "defaultbranch=.*",
replacement: "defaultbranch=#{info.to_branch}",
paths: ["#{info.git_dir}/.gitreview"],
recursive: true,
silent: true,});
return callback err if err?
return callback null
reset_and_checkout_to = (info, callback) ->
p "reset_and_checkout_to called with '#{info.info}'"
p repo.checkout info.to_branch , (err,head) ->
p repo.reset "HEAD" , { hard: true } , (err) ->
return callback err if err?
return callback null
commit_new_gitreview = (info, callback) ->
p "commit_new_gitreview called with '#{info.info}'"
p repo.add "#{info.git_dir}/.gitreview" , (err) ->
p repo.commit "Owner: #{info.owner} ; From: #{info.from_branch} ", (err) ->
return callback err if err?
return callback null
publish_commits_to_gerrit = (info, callback) ->
# "C:\Program Files (x86)\Git\bin\git.exe" push -u --progress "origin" HEAD:refs/publish/feat_zhaoli
p repo.remote_push "origin","HEAD:refs/publish/#{info.to_branch}" , (err) ->
return callback err if err?
return callback null
if false
fetch_remote info, (err) ->
clean_repo info , (err) ->
reset_and_checkout_to info, (err) ->
update_gitreview info, (err) ->
commit_new_gitreview info, (err) ->
publish_commits_to_gerrit info, (err) ->
return err if err?
if true
info.to_branch = "master"
reset_and_checkout_to info, (err) ->
clean_local_branches (err) ->
return err if err?
| true | git = require "gift"
replace = require "replace"
# utils
{exec} = require 'child_process'
p = (x) -> console.log x
s = {} #results
# utils
info = {}
info.info = "Default info."
info.from_branch = "feat_50"
info.to_branch = "feat_zhaoli03"
info.owner = "PI:NAME:<NAME>END_PI"
info.git_dir = 'C:/git/leandi/cvom_html'
# https://git.wdf.sap.corp:8080/#/admin/projects/SV/cvom.html,branches
repo = git info.git_dir
clean_repo = (info, callback) ->
p "clean_repo called with '#{info.info}'"
p repo.add "*" , (err) ->
p repo.reset "HEAD" , { hard: true } , (err) ->
return callback err if err?
return callback null
fetch_remote = (info, callback) ->
p "fetch_remote called with '#{info.info}'"
p repo.remote_fetch "origin" , (err, msg) ->
return callback err if err?
return callback null
clean_local_branches = (info, callback) ->
p "clean_local_branches called with '#{info.info}'"
p repo.branches (err,branches) ->
for branch in branches
if branch.name != 'master'
p repo.delete_branch "'#{branch.name}'" , (err, msg) ->
return null
update_gitreview = (info, callback) ->
p "update_gitreview called with '#{info.info}'"
replace({
regex: "defaultbranch=.*",
replacement: "defaultbranch=#{info.to_branch}",
paths: ["#{info.git_dir}/.gitreview"],
recursive: true,
silent: true,});
return callback err if err?
return callback null
reset_and_checkout_to = (info, callback) ->
p "reset_and_checkout_to called with '#{info.info}'"
p repo.checkout info.to_branch , (err,head) ->
p repo.reset "HEAD" , { hard: true } , (err) ->
return callback err if err?
return callback null
commit_new_gitreview = (info, callback) ->
p "commit_new_gitreview called with '#{info.info}'"
p repo.add "#{info.git_dir}/.gitreview" , (err) ->
p repo.commit "Owner: #{info.owner} ; From: #{info.from_branch} ", (err) ->
return callback err if err?
return callback null
publish_commits_to_gerrit = (info, callback) ->
# "C:\Program Files (x86)\Git\bin\git.exe" push -u --progress "origin" HEAD:refs/publish/feat_zhaoli
p repo.remote_push "origin","HEAD:refs/publish/#{info.to_branch}" , (err) ->
return callback err if err?
return callback null
if false
fetch_remote info, (err) ->
clean_repo info , (err) ->
reset_and_checkout_to info, (err) ->
update_gitreview info, (err) ->
commit_new_gitreview info, (err) ->
publish_commits_to_gerrit info, (err) ->
return err if err?
if true
info.to_branch = "master"
reset_and_checkout_to info, (err) ->
clean_local_branches (err) ->
return err if err?
|
[
{
"context": " autocomplete mentions\n http://ichord.github.com/At.js\n\n Copyright (c) 2013 chord.luo@gmail.com\n Licen",
"end": 82,
"score": 0.9969876408576965,
"start": 77,
"tag": "USERNAME",
"value": "At.js"
},
{
"context": "tp://ichord.github.com/At.js\n\n Copyright (c) 2013... | public/bower_components/Caret.js/src/jquery.caret.coffee | sujthk/thermax | 1 | ###
Implement Github like autocomplete mentions
http://ichord.github.com/At.js
Copyright (c) 2013 chord.luo@gmail.com
Licensed under the MIT license.
###
###
本插件操作 textarea 或者 input 内的插入符
只实现了获得插入符在文本框中的位置,我设置
插入符的位置.
###
"use strict";
pluginName = 'caret'
class EditableCaret
constructor: (@$inputor) ->
@domInputor = @$inputor[0]
# NOTE: Duck type
setPos: (pos) -> @domInputor
getIEPosition: -> this.getPosition()
getPosition: ->
offset = this.getOffset()
inputor_offset = @$inputor.offset()
offset.left -= inputor_offset.left
offset.top -= inputor_offset.top
offset
getOldIEPos: ->
textRange = oDocument.selection.createRange()
preCaretTextRange = oDocument.body.createTextRange()
preCaretTextRange.moveToElementText(@domInputor)
preCaretTextRange.setEndPoint("EndToEnd", textRange)
preCaretTextRange.text.length
getPos: ->
if range = this.range() # Major Browser and IE > 10
clonedRange = range.cloneRange()
clonedRange.selectNodeContents(@domInputor)
clonedRange.setEnd(range.endContainer, range.endOffset)
pos = clonedRange.toString().length
clonedRange.detach()
pos
else if oDocument.selection #IE < 9
this.getOldIEPos()
getOldIEOffset: ->
range = oDocument.selection.createRange().duplicate()
range.moveStart "character", -1
rect = range.getBoundingClientRect()
{ height: rect.bottom - rect.top, left: rect.left, top: rect.top }
getOffset: (pos) ->
if oWindow.getSelection and range = this.range()
# endContainer would be the inputor in Firefox at the begnning of a line
if range.endOffset - 1 > 0 and range.endContainer is not @domInputor
clonedRange = range.cloneRange()
clonedRange.setStart(range.endContainer, range.endOffset - 1)
clonedRange.setEnd(range.endContainer, range.endOffset)
rect = clonedRange.getBoundingClientRect()
offset = { height: rect.height, left: rect.left + rect.width, top: rect.top }
clonedRange.detach()
# At the begnning of the inputor, the offset height is 0 in Chrome and Safari
# This work fine in all browers but except while the inputor break a line into two (wrapped line).
# so we can't use it in all cases.
if !offset or offset?.height == 0
clonedRange = range.cloneRange()
shadowCaret = $ oDocument.createTextNode "|"
clonedRange.insertNode shadowCaret[0]
clonedRange.selectNode shadowCaret[0]
rect = clonedRange.getBoundingClientRect()
offset = {height: rect.height, left: rect.left, top: rect.top }
shadowCaret.remove()
clonedRange.detach()
else if oDocument.selection # ie < 9
offset = this.getOldIEOffset()
if offset
offset.top += $(oWindow).scrollTop()
offset.left += $(oWindow).scrollLeft()
offset
range: ->
return unless oWindow.getSelection
sel = oWindow.getSelection()
if sel.rangeCount > 0 then sel.getRangeAt(0) else null
class InputCaret
constructor: (@$inputor) ->
@domInputor = @$inputor[0]
getIEPos: ->
# https://github.com/ichord/Caret.js/wiki/Get-pos-of-caret-in-IE
inputor = @domInputor
range = oDocument.selection.createRange()
pos = 0
# selection should in the inputor.
if range and range.parentElement() is inputor
normalizedValue = inputor.value.replace /\r\n/g, "\n"
len = normalizedValue.length
textInputRange = inputor.createTextRange()
textInputRange.moveToBookmark range.getBookmark()
endRange = inputor.createTextRange()
endRange.collapse false
if textInputRange.compareEndPoints("StartToEnd", endRange) > -1
pos = len
else
pos = -textInputRange.moveStart "character", -len
pos
getPos: ->
if oDocument.selection then this.getIEPos() else @domInputor.selectionStart
setPos: (pos) ->
inputor = @domInputor
if oDocument.selection #IE
range = inputor.createTextRange()
range.move "character", pos
range.select()
else if inputor.setSelectionRange
inputor.setSelectionRange pos, pos
inputor
getIEOffset: (pos) ->
textRange = @domInputor.createTextRange()
pos ||= this.getPos()
textRange.move('character', pos)
x = textRange.boundingLeft
y = textRange.boundingTop
h = textRange.boundingHeight
{left: x, top: y, height: h}
getOffset: (pos) ->
$inputor = @$inputor
if oDocument.selection
offset = this.getIEOffset(pos)
offset.top += $(oWindow).scrollTop() + $inputor.scrollTop()
offset.left += $(oWindow).scrollLeft() + $inputor.scrollLeft()
offset
else
offset = $inputor.offset()
position = this.getPosition(pos)
offset =
left: offset.left + position.left - $inputor.scrollLeft()
top: offset.top + position.top - $inputor.scrollTop()
height: position.height
getPosition: (pos)->
$inputor = @$inputor
format = (value) ->
value = value.replace(/<|>|`|"|&/g, '?').replace(/\r\n|\r|\n/g,"<br/>")
if /firefox/i.test navigator.userAgent
value = value.replace(/\s/g, ' ')
value
pos = this.getPos() if pos is undefined
start_range = $inputor.val().slice(0, pos)
end_range = $inputor.val().slice(pos)
html = "<span style='position: relative; display: inline;'>"+format(start_range)+"</span>"
html += "<span id='caret' style='position: relative; display: inline;'>|</span>"
html += "<span style='position: relative; display: inline;'>"+format(end_range)+"</span>"
mirror = new Mirror($inputor)
at_rect = mirror.create(html).rect()
getIEPosition: (pos) ->
offset = this.getIEOffset pos
inputorOffset = @$inputor.offset()
x = offset.left - inputorOffset.left
y = offset.top - inputorOffset.top
h = offset.height
{left: x, top: y, height: h}
# @example
# mirror = new Mirror($("textarea#inputor"))
# html = "<p>We will get the rect of <span>@</span>icho</p>"
# mirror.create(html).rect()
class Mirror
css_attr: [
"borderBottomWidth",
"borderLeftWidth",
"borderRightWidth",
"borderTopStyle",
"borderRightStyle",
"borderBottomStyle",
"borderLeftStyle",
"borderTopWidth",
"boxSizing",
"fontFamily",
"fontSize",
"fontWeight",
"height",
"letterSpacing",
"lineHeight",
"marginBottom",
"marginLeft",
"marginRight",
"marginTop",
"outlineWidth",
"overflow",
"overflowX",
"overflowY",
"paddingBottom",
"paddingLeft",
"paddingRight",
"paddingTop",
"textAlign",
"textOverflow",
"textTransform",
"whiteSpace",
"wordBreak",
"wordWrap",
]
constructor: (@$inputor) ->
mirrorCss: ->
css =
position: 'absolute'
left: -9999
top: 0
zIndex: -20000
if @$inputor.prop( 'tagName' ) == 'TEXTAREA'
@css_attr.push( 'width' )
$.each @css_attr, (i,p) =>
css[p] = @$inputor.css p
css
create: (html) ->
@$mirror = $('<div></div>')
@$mirror.css this.mirrorCss()
@$mirror.html(html)
@$inputor.after(@$mirror)
this
# 获得标记的位置
#
# @return [Object] 标记的坐标
# {left: 0, top: 0, bottom: 0}
rect: ->
$flag = @$mirror.find "#caret"
pos = $flag.position()
rect = {left: pos.left, top: pos.top, height: $flag.height() }
@$mirror.remove()
rect
Utils =
contentEditable: ($inputor)->
!!($inputor[0].contentEditable && $inputor[0].contentEditable == 'true')
methods =
pos: (pos) ->
if pos or pos == 0
this.setPos pos
else
this.getPos()
position: (pos) ->
if oDocument.selection then this.getIEPosition pos else this.getPosition pos
offset: (pos) ->
offset = this.getOffset(pos)
offset
oDocument = null
oWindow = null
oFrame = null
setContextBy = (settings) ->
if iframe = settings?.iframe
oFrame = iframe
oWindow = iframe.contentWindow
oDocument = iframe.contentDocument || oWindow.document
else
oFrame = undefined
oWindow = window
oDocument = document
discoveryIframeOf = ($dom) ->
oDocument = $dom[0].ownerDocument
oWindow = oDocument.defaultView || oDocument.parentWindow
try
oFrame = oWindow.frameElement
catch error
# throws error in cross-domain iframes
$.fn.caret = (method, value, settings) ->
# http://stackoverflow.com/questions/16010204/get-reference-of-window-object-from-a-dom-element
if methods[method]
if $.isPlainObject(value)
setContextBy value
value = undefined
else
setContextBy settings
caret = if Utils.contentEditable(this) then new EditableCaret(this) else new InputCaret(this)
methods[method].apply caret, [value]
else
$.error "Method #{method} does not exist on jQuery.caret"
$.fn.caret.EditableCaret = EditableCaret
$.fn.caret.InputCaret = InputCaret
$.fn.caret.Utils = Utils
$.fn.caret.apis = methods
| 39859 | ###
Implement Github like autocomplete mentions
http://ichord.github.com/At.js
Copyright (c) 2013 <EMAIL>
Licensed under the MIT license.
###
###
本插件操作 textarea 或者 input 内的插入符
只实现了获得插入符在文本框中的位置,我设置
插入符的位置.
###
"use strict";
pluginName = 'caret'
class EditableCaret
constructor: (@$inputor) ->
@domInputor = @$inputor[0]
# NOTE: Duck type
setPos: (pos) -> @domInputor
getIEPosition: -> this.getPosition()
getPosition: ->
offset = this.getOffset()
inputor_offset = @$inputor.offset()
offset.left -= inputor_offset.left
offset.top -= inputor_offset.top
offset
getOldIEPos: ->
textRange = oDocument.selection.createRange()
preCaretTextRange = oDocument.body.createTextRange()
preCaretTextRange.moveToElementText(@domInputor)
preCaretTextRange.setEndPoint("EndToEnd", textRange)
preCaretTextRange.text.length
getPos: ->
if range = this.range() # Major Browser and IE > 10
clonedRange = range.cloneRange()
clonedRange.selectNodeContents(@domInputor)
clonedRange.setEnd(range.endContainer, range.endOffset)
pos = clonedRange.toString().length
clonedRange.detach()
pos
else if oDocument.selection #IE < 9
this.getOldIEPos()
getOldIEOffset: ->
range = oDocument.selection.createRange().duplicate()
range.moveStart "character", -1
rect = range.getBoundingClientRect()
{ height: rect.bottom - rect.top, left: rect.left, top: rect.top }
getOffset: (pos) ->
if oWindow.getSelection and range = this.range()
# endContainer would be the inputor in Firefox at the begnning of a line
if range.endOffset - 1 > 0 and range.endContainer is not @domInputor
clonedRange = range.cloneRange()
clonedRange.setStart(range.endContainer, range.endOffset - 1)
clonedRange.setEnd(range.endContainer, range.endOffset)
rect = clonedRange.getBoundingClientRect()
offset = { height: rect.height, left: rect.left + rect.width, top: rect.top }
clonedRange.detach()
# At the begnning of the inputor, the offset height is 0 in Chrome and Safari
# This work fine in all browers but except while the inputor break a line into two (wrapped line).
# so we can't use it in all cases.
if !offset or offset?.height == 0
clonedRange = range.cloneRange()
shadowCaret = $ oDocument.createTextNode "|"
clonedRange.insertNode shadowCaret[0]
clonedRange.selectNode shadowCaret[0]
rect = clonedRange.getBoundingClientRect()
offset = {height: rect.height, left: rect.left, top: rect.top }
shadowCaret.remove()
clonedRange.detach()
else if oDocument.selection # ie < 9
offset = this.getOldIEOffset()
if offset
offset.top += $(oWindow).scrollTop()
offset.left += $(oWindow).scrollLeft()
offset
range: ->
return unless oWindow.getSelection
sel = oWindow.getSelection()
if sel.rangeCount > 0 then sel.getRangeAt(0) else null
class InputCaret
constructor: (@$inputor) ->
@domInputor = @$inputor[0]
getIEPos: ->
# https://github.com/ichord/Caret.js/wiki/Get-pos-of-caret-in-IE
inputor = @domInputor
range = oDocument.selection.createRange()
pos = 0
# selection should in the inputor.
if range and range.parentElement() is inputor
normalizedValue = inputor.value.replace /\r\n/g, "\n"
len = normalizedValue.length
textInputRange = inputor.createTextRange()
textInputRange.moveToBookmark range.getBookmark()
endRange = inputor.createTextRange()
endRange.collapse false
if textInputRange.compareEndPoints("StartToEnd", endRange) > -1
pos = len
else
pos = -textInputRange.moveStart "character", -len
pos
getPos: ->
if oDocument.selection then this.getIEPos() else @domInputor.selectionStart
setPos: (pos) ->
inputor = @domInputor
if oDocument.selection #IE
range = inputor.createTextRange()
range.move "character", pos
range.select()
else if inputor.setSelectionRange
inputor.setSelectionRange pos, pos
inputor
getIEOffset: (pos) ->
textRange = @domInputor.createTextRange()
pos ||= this.getPos()
textRange.move('character', pos)
x = textRange.boundingLeft
y = textRange.boundingTop
h = textRange.boundingHeight
{left: x, top: y, height: h}
getOffset: (pos) ->
$inputor = @$inputor
if oDocument.selection
offset = this.getIEOffset(pos)
offset.top += $(oWindow).scrollTop() + $inputor.scrollTop()
offset.left += $(oWindow).scrollLeft() + $inputor.scrollLeft()
offset
else
offset = $inputor.offset()
position = this.getPosition(pos)
offset =
left: offset.left + position.left - $inputor.scrollLeft()
top: offset.top + position.top - $inputor.scrollTop()
height: position.height
getPosition: (pos)->
$inputor = @$inputor
format = (value) ->
value = value.replace(/<|>|`|"|&/g, '?').replace(/\r\n|\r|\n/g,"<br/>")
if /firefox/i.test navigator.userAgent
value = value.replace(/\s/g, ' ')
value
pos = this.getPos() if pos is undefined
start_range = $inputor.val().slice(0, pos)
end_range = $inputor.val().slice(pos)
html = "<span style='position: relative; display: inline;'>"+format(start_range)+"</span>"
html += "<span id='caret' style='position: relative; display: inline;'>|</span>"
html += "<span style='position: relative; display: inline;'>"+format(end_range)+"</span>"
mirror = new Mirror($inputor)
at_rect = mirror.create(html).rect()
getIEPosition: (pos) ->
offset = this.getIEOffset pos
inputorOffset = @$inputor.offset()
x = offset.left - inputorOffset.left
y = offset.top - inputorOffset.top
h = offset.height
{left: x, top: y, height: h}
# @example
# mirror = new Mirror($("textarea#inputor"))
# html = "<p>We will get the rect of <span>@</span>icho</p>"
# mirror.create(html).rect()
class Mirror
css_attr: [
"borderBottomWidth",
"borderLeftWidth",
"borderRightWidth",
"borderTopStyle",
"borderRightStyle",
"borderBottomStyle",
"borderLeftStyle",
"borderTopWidth",
"boxSizing",
"fontFamily",
"fontSize",
"fontWeight",
"height",
"letterSpacing",
"lineHeight",
"marginBottom",
"marginLeft",
"marginRight",
"marginTop",
"outlineWidth",
"overflow",
"overflowX",
"overflowY",
"paddingBottom",
"paddingLeft",
"paddingRight",
"paddingTop",
"textAlign",
"textOverflow",
"textTransform",
"whiteSpace",
"wordBreak",
"wordWrap",
]
constructor: (@$inputor) ->
mirrorCss: ->
css =
position: 'absolute'
left: -9999
top: 0
zIndex: -20000
if @$inputor.prop( 'tagName' ) == 'TEXTAREA'
@css_attr.push( 'width' )
$.each @css_attr, (i,p) =>
css[p] = @$inputor.css p
css
create: (html) ->
@$mirror = $('<div></div>')
@$mirror.css this.mirrorCss()
@$mirror.html(html)
@$inputor.after(@$mirror)
this
# 获得标记的位置
#
# @return [Object] 标记的坐标
# {left: 0, top: 0, bottom: 0}
rect: ->
$flag = @$mirror.find "#caret"
pos = $flag.position()
rect = {left: pos.left, top: pos.top, height: $flag.height() }
@$mirror.remove()
rect
Utils =
contentEditable: ($inputor)->
!!($inputor[0].contentEditable && $inputor[0].contentEditable == 'true')
methods =
pos: (pos) ->
if pos or pos == 0
this.setPos pos
else
this.getPos()
position: (pos) ->
if oDocument.selection then this.getIEPosition pos else this.getPosition pos
offset: (pos) ->
offset = this.getOffset(pos)
offset
oDocument = null
oWindow = null
oFrame = null
setContextBy = (settings) ->
if iframe = settings?.iframe
oFrame = iframe
oWindow = iframe.contentWindow
oDocument = iframe.contentDocument || oWindow.document
else
oFrame = undefined
oWindow = window
oDocument = document
discoveryIframeOf = ($dom) ->
oDocument = $dom[0].ownerDocument
oWindow = oDocument.defaultView || oDocument.parentWindow
try
oFrame = oWindow.frameElement
catch error
# throws error in cross-domain iframes
$.fn.caret = (method, value, settings) ->
# http://stackoverflow.com/questions/16010204/get-reference-of-window-object-from-a-dom-element
if methods[method]
if $.isPlainObject(value)
setContextBy value
value = undefined
else
setContextBy settings
caret = if Utils.contentEditable(this) then new EditableCaret(this) else new InputCaret(this)
methods[method].apply caret, [value]
else
$.error "Method #{method} does not exist on jQuery.caret"
$.fn.caret.EditableCaret = EditableCaret
$.fn.caret.InputCaret = InputCaret
$.fn.caret.Utils = Utils
$.fn.caret.apis = methods
| true | ###
Implement Github like autocomplete mentions
http://ichord.github.com/At.js
Copyright (c) 2013 PI:EMAIL:<EMAIL>END_PI
Licensed under the MIT license.
###
###
本插件操作 textarea 或者 input 内的插入符
只实现了获得插入符在文本框中的位置,我设置
插入符的位置.
###
"use strict";
pluginName = 'caret'
class EditableCaret
constructor: (@$inputor) ->
@domInputor = @$inputor[0]
# NOTE: Duck type
setPos: (pos) -> @domInputor
getIEPosition: -> this.getPosition()
getPosition: ->
offset = this.getOffset()
inputor_offset = @$inputor.offset()
offset.left -= inputor_offset.left
offset.top -= inputor_offset.top
offset
getOldIEPos: ->
textRange = oDocument.selection.createRange()
preCaretTextRange = oDocument.body.createTextRange()
preCaretTextRange.moveToElementText(@domInputor)
preCaretTextRange.setEndPoint("EndToEnd", textRange)
preCaretTextRange.text.length
getPos: ->
if range = this.range() # Major Browser and IE > 10
clonedRange = range.cloneRange()
clonedRange.selectNodeContents(@domInputor)
clonedRange.setEnd(range.endContainer, range.endOffset)
pos = clonedRange.toString().length
clonedRange.detach()
pos
else if oDocument.selection #IE < 9
this.getOldIEPos()
getOldIEOffset: ->
range = oDocument.selection.createRange().duplicate()
range.moveStart "character", -1
rect = range.getBoundingClientRect()
{ height: rect.bottom - rect.top, left: rect.left, top: rect.top }
getOffset: (pos) ->
if oWindow.getSelection and range = this.range()
# endContainer would be the inputor in Firefox at the begnning of a line
if range.endOffset - 1 > 0 and range.endContainer is not @domInputor
clonedRange = range.cloneRange()
clonedRange.setStart(range.endContainer, range.endOffset - 1)
clonedRange.setEnd(range.endContainer, range.endOffset)
rect = clonedRange.getBoundingClientRect()
offset = { height: rect.height, left: rect.left + rect.width, top: rect.top }
clonedRange.detach()
# At the begnning of the inputor, the offset height is 0 in Chrome and Safari
# This work fine in all browers but except while the inputor break a line into two (wrapped line).
# so we can't use it in all cases.
if !offset or offset?.height == 0
clonedRange = range.cloneRange()
shadowCaret = $ oDocument.createTextNode "|"
clonedRange.insertNode shadowCaret[0]
clonedRange.selectNode shadowCaret[0]
rect = clonedRange.getBoundingClientRect()
offset = {height: rect.height, left: rect.left, top: rect.top }
shadowCaret.remove()
clonedRange.detach()
else if oDocument.selection # ie < 9
offset = this.getOldIEOffset()
if offset
offset.top += $(oWindow).scrollTop()
offset.left += $(oWindow).scrollLeft()
offset
range: ->
return unless oWindow.getSelection
sel = oWindow.getSelection()
if sel.rangeCount > 0 then sel.getRangeAt(0) else null
class InputCaret
constructor: (@$inputor) ->
@domInputor = @$inputor[0]
getIEPos: ->
# https://github.com/ichord/Caret.js/wiki/Get-pos-of-caret-in-IE
inputor = @domInputor
range = oDocument.selection.createRange()
pos = 0
# selection should in the inputor.
if range and range.parentElement() is inputor
normalizedValue = inputor.value.replace /\r\n/g, "\n"
len = normalizedValue.length
textInputRange = inputor.createTextRange()
textInputRange.moveToBookmark range.getBookmark()
endRange = inputor.createTextRange()
endRange.collapse false
if textInputRange.compareEndPoints("StartToEnd", endRange) > -1
pos = len
else
pos = -textInputRange.moveStart "character", -len
pos
getPos: ->
if oDocument.selection then this.getIEPos() else @domInputor.selectionStart
setPos: (pos) ->
inputor = @domInputor
if oDocument.selection #IE
range = inputor.createTextRange()
range.move "character", pos
range.select()
else if inputor.setSelectionRange
inputor.setSelectionRange pos, pos
inputor
getIEOffset: (pos) ->
textRange = @domInputor.createTextRange()
pos ||= this.getPos()
textRange.move('character', pos)
x = textRange.boundingLeft
y = textRange.boundingTop
h = textRange.boundingHeight
{left: x, top: y, height: h}
getOffset: (pos) ->
$inputor = @$inputor
if oDocument.selection
offset = this.getIEOffset(pos)
offset.top += $(oWindow).scrollTop() + $inputor.scrollTop()
offset.left += $(oWindow).scrollLeft() + $inputor.scrollLeft()
offset
else
offset = $inputor.offset()
position = this.getPosition(pos)
offset =
left: offset.left + position.left - $inputor.scrollLeft()
top: offset.top + position.top - $inputor.scrollTop()
height: position.height
getPosition: (pos)->
$inputor = @$inputor
format = (value) ->
value = value.replace(/<|>|`|"|&/g, '?').replace(/\r\n|\r|\n/g,"<br/>")
if /firefox/i.test navigator.userAgent
value = value.replace(/\s/g, ' ')
value
pos = this.getPos() if pos is undefined
start_range = $inputor.val().slice(0, pos)
end_range = $inputor.val().slice(pos)
html = "<span style='position: relative; display: inline;'>"+format(start_range)+"</span>"
html += "<span id='caret' style='position: relative; display: inline;'>|</span>"
html += "<span style='position: relative; display: inline;'>"+format(end_range)+"</span>"
mirror = new Mirror($inputor)
at_rect = mirror.create(html).rect()
getIEPosition: (pos) ->
offset = this.getIEOffset pos
inputorOffset = @$inputor.offset()
x = offset.left - inputorOffset.left
y = offset.top - inputorOffset.top
h = offset.height
{left: x, top: y, height: h}
# @example
# mirror = new Mirror($("textarea#inputor"))
# html = "<p>We will get the rect of <span>@</span>icho</p>"
# mirror.create(html).rect()
class Mirror
css_attr: [
"borderBottomWidth",
"borderLeftWidth",
"borderRightWidth",
"borderTopStyle",
"borderRightStyle",
"borderBottomStyle",
"borderLeftStyle",
"borderTopWidth",
"boxSizing",
"fontFamily",
"fontSize",
"fontWeight",
"height",
"letterSpacing",
"lineHeight",
"marginBottom",
"marginLeft",
"marginRight",
"marginTop",
"outlineWidth",
"overflow",
"overflowX",
"overflowY",
"paddingBottom",
"paddingLeft",
"paddingRight",
"paddingTop",
"textAlign",
"textOverflow",
"textTransform",
"whiteSpace",
"wordBreak",
"wordWrap",
]
constructor: (@$inputor) ->
mirrorCss: ->
css =
position: 'absolute'
left: -9999
top: 0
zIndex: -20000
if @$inputor.prop( 'tagName' ) == 'TEXTAREA'
@css_attr.push( 'width' )
$.each @css_attr, (i,p) =>
css[p] = @$inputor.css p
css
create: (html) ->
@$mirror = $('<div></div>')
@$mirror.css this.mirrorCss()
@$mirror.html(html)
@$inputor.after(@$mirror)
this
# 获得标记的位置
#
# @return [Object] 标记的坐标
# {left: 0, top: 0, bottom: 0}
rect: ->
$flag = @$mirror.find "#caret"
pos = $flag.position()
rect = {left: pos.left, top: pos.top, height: $flag.height() }
@$mirror.remove()
rect
Utils =
contentEditable: ($inputor)->
!!($inputor[0].contentEditable && $inputor[0].contentEditable == 'true')
methods =
pos: (pos) ->
if pos or pos == 0
this.setPos pos
else
this.getPos()
position: (pos) ->
if oDocument.selection then this.getIEPosition pos else this.getPosition pos
offset: (pos) ->
offset = this.getOffset(pos)
offset
oDocument = null
oWindow = null
oFrame = null
setContextBy = (settings) ->
if iframe = settings?.iframe
oFrame = iframe
oWindow = iframe.contentWindow
oDocument = iframe.contentDocument || oWindow.document
else
oFrame = undefined
oWindow = window
oDocument = document
discoveryIframeOf = ($dom) ->
oDocument = $dom[0].ownerDocument
oWindow = oDocument.defaultView || oDocument.parentWindow
try
oFrame = oWindow.frameElement
catch error
# throws error in cross-domain iframes
$.fn.caret = (method, value, settings) ->
# http://stackoverflow.com/questions/16010204/get-reference-of-window-object-from-a-dom-element
if methods[method]
if $.isPlainObject(value)
setContextBy value
value = undefined
else
setContextBy settings
caret = if Utils.contentEditable(this) then new EditableCaret(this) else new InputCaret(this)
methods[method].apply caret, [value]
else
$.error "Method #{method} does not exist on jQuery.caret"
$.fn.caret.EditableCaret = EditableCaret
$.fn.caret.InputCaret = InputCaret
$.fn.caret.Utils = Utils
$.fn.caret.apis = methods
|
[
{
"context": "(Darwin)\nComment: GPGTools - https://gpgtools.org\n\nmQINBFLyu3wBEAC1zq7+3kmHy1hF9aCr47PCPBkkbADzNAEp5KB0/9p4DOmTcDnW\n5AQW/rh9wH8ilDhZKPPH/xOqlKa0XSn7JscT/KpigweYu9WvpnB2nnPpX2j7tBD/\nx8/jtoroJxrni+s5grZo0Md3q5MsePOwFdJCrr8ezQHaBAVVg8LNVMcY37H3+UbN\n/NzC8iUYl5+VNA3eap/bHRi6gWK2RFADL/ECSxcxcvoTBC... | test/files/sig_various_hashes.iced | thinq4yourself/kbpgp | 1 | {KeyManager} = require '../../lib/keymanager'
{do_message,Processor} = require '../../lib/openpgp/processor'
#==================================================================
key = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
mQINBFLyu3wBEAC1zq7+3kmHy1hF9aCr47PCPBkkbADzNAEp5KB0/9p4DOmTcDnW
5AQW/rh9wH8ilDhZKPPH/xOqlKa0XSn7JscT/KpigweYu9WvpnB2nnPpX2j7tBD/
x8/jtoroJxrni+s5grZo0Md3q5MsePOwFdJCrr8ezQHaBAVVg8LNVMcY37H3+UbN
/NzC8iUYl5+VNA3eap/bHRi6gWK2RFADL/ECSxcxcvoTBCwo/f2UXs8VGy229lHG
Yc4K7VWcIUOdSdUVJ2MA/5HizgEUte9GLBfDpRKm599OMwiQTbo4IRleUPYT6/0a
klsh9mtPzneNWXa1qEJ5ei+Wk7ZiXt0ujAL9Ynk5DGo6qCBWU7hMv7KOeEjhHr01
JVof+i3g286KUQYk0N6do4E9hE5jRwJQp+50sj9E5yLj0+pEWQ0x/+2C3uuf9otr
vRWYk6XC799ZvI3C+0tPEDsTakgTQJm6ceUtUXGtK/TPAen7hwAM4x9VXjQc7dCZ
BZijo8GR1iMaktQpysva0N9ewN86+FiddXtyad6K4WelZQQRrj5tizehjLTm18G1
Gv/R4BCMIFgbE8naBBB+1fcLDc7SiK5wUWv00YDRilX8zjh/3/0dBZwY7Alz9jtw
XRA1Tbjlr5FSc5x5woCrSX5cyCwWfMrODN+uoTSn4Awt8T01pioSgHVp1wARAQAB
tChrZXliYXNlLmlvL21heCAodjAuMC4xKSA8bWF4QGtleWJhc2UuaW8+iQI2BBAB
CgAgAhsvBQkSzAMAAwsJBwMVCggCHgECF4AFAlMjPAICGQEACgkQYFKyrTGmYxwQ
NhAAjetKZUC2wPQPAMRGz2ROE1CX2Z2Smndyp7fSijhG2GsD4OP5w8Mj5lUoOyOX
B8Bo3rlMwL+rH2eHgyP6D0an5qj8GbGRuiqSngIpfxvtkfiiZYMYy2+6H7pK58ly
y9qgTjx6sHuryWOkvxE7PpavUlFdJXqV9bbnRDoOSNWjCI16nd4V0VErdlLsJCcn
9KMOXz9T1nLjpX/Lg0xiuGNu4IXH9AaJtWTqs7E8kJIbnxux8SB4pQzBcgYybKgg
VWebqJQNMgUNnzKlgH3RV0PzulCt39eKfT2k1eangCzotk50bhViJWcHpuWSArKE
EFUdTiv9s3w1QZCtXWF6enIyxHo4z3bkmN+ddsraXCkboFeT/vwNHzkNxWv1ELmN
x5UzsmNURo3Iegs1tal7kRuFLHL+Z0Kh7ag7z+MTIXFCwZhn1pSWQwKVgEsgVvAR
AFArXOr3PRkkDfx9cd6qq2I7kwJl4bMzYusOSMqRZu86l9vktAcb11HZkPaFvpZS
7vvhuuYLW95CMA01bDCxhriERjpZqw19e2DktFPnQ5DpKzmLjB1eAmJQ1h9atsCw
UV37IA4hFz8xSySqdRRZ0D9QPZ6AmHsLS8qXzbARlPQx5k+jSPTtFBaSm8uoTN44
P0L1UfztgFPEJqKnk1deG3daFXhtUjax4vq/KhC32tOQgMe0IE1heHdlbGwgS3Jv
aG4gPHRoZW1heEBnbWFpbC5jb20+iQI9BBMBCgAnBQJTIzvgAhsvBQkSzAMABQsJ
CAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEGBSsq0xpmMcaCoP+wTS5G4zUI8BotXi
16CR9ELGkMJbnVmWVLBTFqdnv63BzFQ0HruBE1qr/b5xPQsEFvAgjQ9F9TbajaLH
7qpTxjlPgdAl0Sb3lwycMZ9pe/04xRcCJwY3RdMPIv2ByW/3k0GJ5/c3rebLk/8d
0UqFYwZ3ZVnuGGP5vltuk8aPoXnLs4LJdCDESTFj99TEq1+0VzDfkQJf5WpCbzJZ
02g/v/JgaMKlia44EdVihbVh45Bj9Rwd4MPV0gE4PdQXKvijIva8NL30KGqJjswu
rQDpMmTO92fkv7G7DgxhP/BeDytmEBAPMz4SubHzKQj2gjRdlWqDIGe8yg4zHY9X
Efyj+9oj9d+6ezmSRplV/8HVNOsR5DxUHQMc2vRDxXqMD0AK8/k7VtA40JVpNOYH
Hih6rmM3EskLZNAwUoudGOTud8BM2JCMtMgRj3Yrc7BfoWcZ6Ck8eNZkj4AW4+FZ
ETu77LoU3s1wVbCcAb/ip+hQcdb1colUO/vYIivTuz72NbxxwPxWFgbFhXxmtuR2
ChuO250hF1JrjpqpqiOCN2wVIuxU5hOIJhCHKTTHhJRVMrA1zHJndRxkp0PKQjnC
bKUKELF4NebuejOMJXIGw7EYNZGxukeDCwFmF49nFXkeRxalpilsEGBGEng2Cuzj
Qz6yn/VoBxH0/o78PXCNxh+b+uP6tBxNYXh3ZWxsIEtyb2huIDxtYXhAbWF4ay5v
cmc+iQI9BBMBCgAnBQJTIzv6AhsvBQkSzAMABQsJCAcDBRUKCQgLBRYCAwEAAh4B
AheAAAoJEGBSsq0xpmMcPk0P/i0Dax4AuTswj0MxvYBjTAncNjHdNEnJmYy1PNPK
WjtQRS9LyRQ9MpZadQpEsWeb5FjQcxoSgJ1DGa6NTrAXmhKxOlWBLLJ1IuqFS8kl
pM5ybFSGEBdgwPgWIACpxXQuVGkzR+8lCncnQ9+tOY2mfcXLkiGaBYEl6FCaZZso
f6JWStCOEp5GCyMg1k1P78V+52E878UPcYohGJycZPwGfAg1F2ogfqj5C8QR4FVF
6EMUOmLu9+qEcaVYIMBYhbvURjZn8nfHSzru4FmOmGoRIhr4s/2VmISeNjxmwl6F
WC+415x/4pXzOgZ+TPeDXiWHQULtKrklRUHlo6x466aK0oLZIsZfGcDdj/56wlOH
qi4QBhVHNVimcAIYScRihly5U/jhzA+xlkf2GdwAOEq1EIzK9Oo14Yrzmbv4tCzB
3G/w+e6SQXzrdWEQMZjuovpk6vAWxcnbQld+RclBXYSRw4z4rSnzgng7UpCvk6Kp
xf2/mBxKB1ukKpEAlUIbu3K9By4SJYFq/2OnMSAMQYkVorFVz/R593WI9t//Htnt
LN0LShkhLOcQpP0mXNYRJs0Jti7LnPUuAS3xjPm6Nwz062BBO7eXjKq/KnkExV+H
oyXp1Kii6bsE4AX9WjuXF86/KrO5it7LjiXnvxH3MYqelrcAEZt0uN/MvYxZc+4b
c3rztB1NYXh3ZWxsIEtyb2huIDxrcm9obkBtaXQuZWR1PokCPQQTAQoAJwUCUzBx
VwIbLwUJEswDAAULCQgHAwUVCgkICwUWAgMBAAIeAQIXgAAKCRBgUrKtMaZjHImA
EACVkhpJbAC0r26HndJQ/m9CiKLxAJhqZPbonGTIzxQU8ZvBnyvYwI2MMD4foTyM
eJvDvIdFEArV2eV7t+nvewezrsNFMTisoGS9omtQFVp8Y00h5/vm3f+RsG0Tkx3L
yn6iPFDxQUm73va62dev6E8eyr+4nAHSulLj4HZJgTQzsHdyZLzPvoGLV0RSKj4e
9zHU8e2DTqZWaYlZCm/Rv6w7m+CWFzp6XNC1ujEblCRaZ7X4D7G7ssxm7MCuTZXE
f6OEpSn206lP73LtYXlj/Xi78NRsacrIn8fnxc6Elk7LjeUuqFAAf9yN0N6TdV6w
18ra0mAddk6qtVmfG7Pn7nFMJh0G5mu8iECNI0tv18RCigyZd93YCmwIYQViJ1A+
HrCZ7Y72b3Yq3ERs10F704JLH0VqTEMfQAhsaaGaL81orvLJR7K7/icvoFedLrM9
qiakOQLyL4Ngr5VqVy6bV8/CYUV5MvBtJpdmLi/wlK6Nw7cEO4OFAYIvP47zgKEf
mRdpAlOj5bZtGGy6H+eJJxcH60YXQ+QApOAAXnUW+elsmwq/XeFv9cop8DNJLn5h
PC9emD/tF0VAveIz/J1NIt8c3uOr4hHL5g3fZopNXI1lo3OkJlQO8+cS4hSMDGpF
Wn4j8ACdLx5nNzLiPOBH4DrEDPujog8OtnSk5DxBcl8ytrQmTWF4d2VsbCBLcm9o
biA8a3JvaG5AcG9zdC5oYXJ2YXJkLmVkdT6JAj0EEwEKACcFAlMwcXQCGy8FCRLM
AwAFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQYFKyrTGmYxxG6g/9HvzgQFXJ
RdcjMdRKKDewgBRjqbPhxEUmBOHGLMKbDCfIXWXWV7DmkGc9aeVprgwszYbaH+fo
0HcT+g+MhK1dPGtWq+G3E6L/LhMBhHBa5AqW3FWaN0fWiZjCqHnD5DtGkXLBxcju
SMZ08rSnPEAoqv+20vepp2yECiuNGo+3SYIGJWRmmNxS56wTDImXnIA8+hNrTAeL
UWMcwqXpgo8lPRHHkDqBsJUo/1sfFBT8qbsVfisAXAeoFN1U0sZK2jRB8b/O7fEy
var/W9mT7pz5DEVfTXpbVONPAb1TMsRb4LvGJvMRc3Yy1KMAz2vRwpEph3AOu2Mr
N+T1hGJddeJ4Goc4K7lGeWYwxlmOqVD2BVL4j29IinBt2GXaitA4YStx4H7WJEyd
rF992iGCoEhtQCgXnXJE30g9dkn+CPIlZ2jth1gkwUJrkdpRZ8zlMZE6yx4nTmIh
MfzZ+YAszqumcnEZH0Iz5BFbXGJJ+OfG1r4zWI8zzU6pZXmK9pSRR+OhB16WQUX/
a1hxTmbe3MPrpFksdXzT3URm6vZ3DFhG/2gnoQCGj83bW1szv7jrCIZnFHz0VPQy
DkMGZf/O58xoZO1ETHQERspau0H74542V7wWA7y9W7DAQHrylqGCHf8Sw2vc5tUd
khx5LolObkVS3ElxNnvBX9EYgu1rJjLDRMG0Ik1heHdlbGwgS3JvaG4gPGtyb2hu
QGFsdW0ubWl0LmVkdT6JAj0EEwEKACcFAlMwcbsCGy8FCRLMAwAFCwkIBwMFFQoJ
CAsFFgIDAQACHgECF4AACgkQYFKyrTGmYxzhaRAAitV34CP2hSEAm/EtQalqzu7o
TuMDiuLAxhJ0UMh9TdhJpZ8b9/eA1I219GWQfAyggkorPVB+L6sN8s4gTmSmOXVb
tmbo/TbuOSYARriPc6yx4nai5EMjuXJrD2L5956dYrd6cyvMO9eIZroaPBc7sIMF
3YkMiAJP5wNuH1eWcMB8f+GZWGkynBiYP2AE+pOXXJkgyu2wnuJmWmpiYsoyW1ul
fGrSt8wFoDoLQZ+wIPhckGrBfa/TZvuxE9lz+tS2MLMLbyCEdn3s8qSAwi++7GMG
SXHiUeopzs6hto9Egwhv5934+kCkgL3PWdSCNu1mg1aNTsauomzG5QsDM4tNTZQ8
ItsfksMVOhgVdAXtQlHPewVZ6liabyaQTYQPzUU29HM3u7WkxNvImImVz+bR6Ru5
CU2toDKs/OtxQwMat1p3a8yENn4T8030EFAUi5/hL4zvu27YEyJQc28o3a3rd30+
tihjJ86UguBvk1AJXKnmy/fr5tFuw1Izy7zfjznyyh4mhb7uA4hZElz9rR68c6aT
eAyK/kjXVIhfteKe0El3rLJRDY7W56fv/NIxF4ozKqliarLhclP4WArIo8WVO6m1
tglyulpRppQpvFOaDXmYZufVKNMMOtQlSyLEMBk02EhzFf7MxupsOHsuxS1zZ17k
xrSUvn7r//I6BOvdXQO5AQ0EUvK7fAEIAMw3F9CU/IaCeOneMAiHAUlsrkMgmBk5
0KH8h23I+zLK+jxLWKtohsmGn0jnczn0p4uiEdhqRE464T/emSFHEbAQd8r9bgcJ
E033hKJ3FXrm1HnAeCeFwVNxiS2cWRgnUP6w17YXk2Zdq2X9uDPyKUhp2pRKlic/
FkhEpz1makzKvm6lUUptq9/xUzYpXUDo2xqqT4fAf0Dwv0h4um5jd87irXZ1Txc0
QMBeFyWWuKvmnL5bdCGWedLyTp3ULCXexuu7Gd7AdDUU5icDLSe/Hpyst/Ss9Us4
vTZu6hiKsLBnrR/O/4VREnnmEAmJMmx1pZvYFSVoXDzWBirG4LhMcAsAEQEAAYkD
RAQYAQoADwUCUvK7fAUJAeEzgAIbLgEpCRBgUrKtMaZjHMBdIAQZAQoABgUCUvK7
fAAKCRCYCj8NAf4E34z7B/9Xj+EEWbn01l2cvPMMmnsH5vFYqHkbvk9T8CXE/QeJ
uyMgldPj0LKGHCcAP7pUlUxLlAMNAJE8Nd+evzXaNsTqjF3akidZnqjKX/URoj1d
lRVHet7u01sAjzNYiMa3ysWRB6FDrQsp36iz1kR+lGVpieZNn8gC+ylQz09SVHFT
rZux6XHHJe23GrFZOmjyJniTDfQ/qNtnoAOK/T1lHXgec6lxB9rcah1ggBKhier/
+s19dLUnwwTG16z9f5dtGD7k4vL+IYSkZkRLTNW3eKiIRJd7fthHRsVOPDhOCAzP
2b82p7KiR7EtrMDFiHpXxKcBQVjeOscss7oUurX7lhKFp/gP/0B+mdvSPXF5axkr
ITrwmjkW760yJgs24qKmudnUqisNBQtkWYeNUW+/ws3zL2uH/1xwKQpRjRdtxDhK
aREZSzIUGxKVW1ztuwnZyUHDBDitzSSqXjRf0Y46zsTaK6VxpvH2DQrXkM7sDmqe
HLGe0mEHrzrhm731ZTaAFEp9+hUWOdnUHGjMX4LjWVwR44qW9gaZkJEpWJpCtEhC
guz/LvVoQcX0zbx9ke8SmREyNydwvVFrX8v8LTYOFUjpZDMmaaV/KF9E/5EL5m0Y
KYR3pXrRykJnJVeRqomhpz1vdP8tsYqK8+kPSJCFDn5bhqlBnFLtJWaFAKAsesFC
6/C1DUKuHhGH9eKRDEOhLjNX2Fc7D5t+Oni9AVyJ16Qku3sigo3E8IynXobEgYxg
xdzDAKKugxnXx+jHE04zt0WWoxZJ99GVyRuZwBoJ0tVxWEaI40Tz0zqcqgFw1f19
5yQY9MysQh2Pr/Zlec4Y3dvW4+wMJKnijwKoCFHRr9FbQogvPnyKAFX1xYMxrS9v
1eWTQ9DN7bsS86d2BvqoOIX+Th7/7oBWvDXOJK0NGjQ8m4uL2B5a3CTdABlYdKb6
h/fESNPk3DN7y8horMo6LbCytKzttBKcPZzjVoqrlM+TJLElpe/iG4ReyMzfHwBx
M3ORWFncu87+HH//p4VSw0MfvjIz
=lhzE
-----END PGP PUBLIC KEY BLOCK-----
"""
#================================================================================
sigs = [
{
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
owEBSgG1/pANAwAJAZgKPw0B/gTfAcsaYgBTS9bpdGhpcyBpcyB3aXRoIFNIQTM4
NAqJARwEAAEJAAYFAlNL1ukACgkQmAo/DQH+BN+31Qf/Wz3ZNNmA8ptd6uSxb38K
F7dwO3Blt/GJ46bOm7kDZc1+kv3/lkJg1Gl7NDfEySfXAAIJXbPiZxALwMkuR5jj
Wgif9qJgKfwSeMdjTBKKLtlRprwee6unSgTQ8zL2Q1BUGlqThWyTGj3kUWnoIA1w
rnwFQL0SBSu8Fg7HKEIEc1hMoP3R0RvjVmkzmA80d5d8xK+j+qqAGTZY3uTsV7XJ
VLVd31euXaOXXs+/ZPF6bRmC2hf73WIdC7MjtUO4VHRgAEGCprtRbPAVE5aX9LDY
dPR6bmTn5B4yI7hFkZg+ga3uSBcU5ay7jd3sAUeI1g9rp4bg36RTAsdsCC+ikBtS
qw==
=ccex
-----END PGP MESSAGE-----
""",
which : "SHA384"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
owEBSgG1/pANAwALAZgKPw0B/gTfAcsaYgBTS9isdGhpcyBpcyB3aXRoIFNIQTIy
NAqJARwEAAELAAYFAlNL2KwACgkQmAo/DQH+BN/Nggf/cFQvzcTwZ6R4F9zxqkVV
kmSfgCpBqMoZlv5e4o/pcCnoNGxAqk6U1+BUSWyMuZO6q/0h4y9NeOZVqViGs6Ma
JwwCjnrzQOc8BJEzJjCDjDkxelOkLRY2NzyJzwP6RUPIvF0GMIVOZIvSnmrCMdAY
lcAmSlz9RPYcWSkaStONi/9H8t5Ecih54sPR9iD24/VBgxfR7VNKCyOR85pf1qnE
Vt5VhPor4vTR4JzxLqysi8Lk+ghi4mzmAG4iPeSQCgJk1IhaFIIdY/2KxSN5QZIH
vceDHXDE0fW3ekPLm8+n30kheTlcllG2YJUYYE2LSpLPq8WCn65E74Ixp1a8wxLn
6g==
=gxp2
-----END PGP MESSAGE-----
""",
which : "SHA224"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
owEBSgG1/pANAwAIAZgKPw0B/gTfAcsaYgBTS9pedGhpcyBpcyB3aXRoIFNIQTI1
NgqJARwEAAEIAAYFAlNL2l4ACgkQmAo/DQH+BN8SxAgAjuJmx3EraPjJ86g+C7Ma
jB0fovW2DsHKULdXiawzUiAPeBBP+gweReCnV9aIVgnxz5yDxxvQZKYxQ+0oD47f
hlUN4orhyI5Pbmq+uj7dDVuzwG53W1doNNeyJggO8OtEBlmXtMbDuf6J23adorij
hPRVkjLBKAmjz5ZN9TIznWcY12VMsbon9gJvzZ+8py7TlZlawxy02Q4/GnzvDje1
qx+sGKqNdwN+wqWU5K+T/ofbLfvAH2kfiKqoCYVMHutUgUC5x3tWPEdeZKgkCAa1
FhaSZ+DdsZJs77qAM0hIQfAP65F+8iJiBsRZ8XzD+wn2gZv7k8SR6WgbtvvDgJ6l
xg==
=jK3a
-----END PGP MESSAGE-----
""",
which : "SHA256"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
owEBSAG3/pANAwACAZgKPw0B/gTfAcsYYgBTS9qXdGhpcyBpcyB3aXRoIFNIQTEK
iQEcBAABAgAGBQJTS9qXAAoJEJgKPw0B/gTf9FoH/1MO7IFFxBnpLdnWcolVhxT/
R7ZvU2hUTllvhquDG3+3lRECJEcPhCmlfr0tw5CDQjCNOHwqbbiXbAVAq+I3S1J2
EEzneoGzjFEdJiUBMHzbOo4UnIE4TUssiWxnCt5BYWKptjnZ5D416GoIxaSq0vZo
IRw7QlFoIXMfQcsL8cROOhuEhmqtGHck0W/tarbO3ezbHxtcZxiWBXj6UesXlwyL
4mxxLOreEsShBolKz+urofr7OM4ADlrqRYcDTsRCuX8JrhS1G4Uh7KOmW56k/ld2
sR6yXPI2RbMiMyKMS9Z7pDq99d/Wn8VWahU1pN72rI6B65Qcxb4XWqeUFsRfsQU=
=SqWv
-----END PGP MESSAGE-----
""",
which : "SHA1"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
owEBSgG1/pANAwAKAZgKPw0B/gTfAcsaYgBTS9rOdGhpcyBpcyB3aXRoIFNIQTUx
MgqJARwEAAEKAAYFAlNL2s4ACgkQmAo/DQH+BN/sWQgAsoDbzGtMZKvCOmE8/Eiw
mq3d/JbihGY77I7SM6HLOkxqlO6cXXTve3rzUuOPZTmJ/oSscUZPethrYgS/R+AJ
Eb2Cndmzv65HT165Fo+JcN9hC5jJjQZRgqiJxii36OxTcna8mgcNytM0GGkATNEA
SKdRp2uZI+plQogmDjYXip76Io4UNApWFxpKk0N6HN4ns90z+P5Pwv90ulWEwBCx
9EUag/YlYX+SqZoavyhEa6fnEmkQvNOFHScQBRnSnqE6Nl5Uanz5wU+XfTL10Rtj
VRN8iOqxkReADkY+tX+Qj33rVNTjrhrmZ/+i8YGMVvkeNKyrUtxhV0UuSCmz8M2w
/A==
=+r2b
-----END PGP MESSAGE-----
""",
which : "SHA512"
}
]
#==================================================================
verify = ({sig,which}, T,cb) ->
await KeyManager.import_from_armored_pgp { raw : key }, defer err, km
T.no_error err
await do_message { armored : sig , keyfetch : km }, defer err
T.no_error err
T.waypoint which
cb()
#--------------------------------
exports.verify = (T,cb) ->
for sig in sigs
await verify sig, T, defer()
cb()
#==================================================================
| 120999 | {KeyManager} = require '../../lib/keymanager'
{do_message,Processor} = require '../../lib/openpgp/processor'
#==================================================================
key = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
<KEY>
<KEY>
-----END PGP PUBLIC KEY BLOCK-----
"""
#================================================================================
sigs = [
{
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
owEBSgG1/<KEY>
=ccex
-----END PGP MESSAGE-----
""",
which : "SHA384"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
<KEY>
<KEY>
-----END PGP MESSAGE-----
""",
which : "SHA224"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
<KEY>
<KEY>=jK<KEY>a
-----END PGP MESSAGE-----
""",
which : "SHA256"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
<KEY>
-----END PGP MESSAGE-----
""",
which : "SHA1"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpg<KEY>
<KEY>
/A==
=+r2b
-----END PGP MESSAGE-----
""",
which : "SHA512"
}
]
#==================================================================
verify = ({sig,which}, T,cb) ->
await KeyManager.import_from_armored_pgp { raw : key }, defer err, km
T.no_error err
await do_message { armored : sig , keyfetch : km }, defer err
T.no_error err
T.waypoint which
cb()
#--------------------------------
exports.verify = (T,cb) ->
for sig in sigs
await verify sig, T, defer()
cb()
#==================================================================
| true | {KeyManager} = require '../../lib/keymanager'
{do_message,Processor} = require '../../lib/openpgp/processor'
#==================================================================
key = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
PI:KEY:<KEY>END_PI
PI:KEY:<KEY>END_PI
-----END PGP PUBLIC KEY BLOCK-----
"""
#================================================================================
sigs = [
{
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
owEBSgG1/PI:KEY:<KEY>END_PI
=ccex
-----END PGP MESSAGE-----
""",
which : "SHA384"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
PI:KEY:<KEY>END_PI
PI:KEY:<KEY>END_PI
-----END PGP MESSAGE-----
""",
which : "SHA224"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
PI:KEY:<KEY>END_PI
PI:KEY:<KEY>END_PI=jKPI:KEY:<KEY>END_PIa
-----END PGP MESSAGE-----
""",
which : "SHA256"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
PI:KEY:<KEY>END_PI
-----END PGP MESSAGE-----
""",
which : "SHA1"
}, {
sig : """
-----BEGIN PGP MESSAGE-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgPI:KEY:<KEY>END_PI
PI:KEY:<KEY>END_PI
/A==
=+r2b
-----END PGP MESSAGE-----
""",
which : "SHA512"
}
]
#==================================================================
verify = ({sig,which}, T,cb) ->
await KeyManager.import_from_armored_pgp { raw : key }, defer err, km
T.no_error err
await do_message { armored : sig , keyfetch : km }, defer err
T.no_error err
T.waypoint which
cb()
#--------------------------------
exports.verify = (T,cb) ->
for sig in sigs
await verify sig, T, defer()
cb()
#==================================================================
|
[
{
"context": "# times - times loop for your coffee.\n#\n# Author: Veselin Todorov <hi@vesln.com>\n# Licensed under the MIT License.\n",
"end": 65,
"score": 0.999873161315918,
"start": 50,
"tag": "NAME",
"value": "Veselin Todorov"
},
{
"context": "oop for your coffee.\n#\n# Author: Ve... | examples/index.coffee | vesln/times | 1 | # times - times loop for your coffee.
#
# Author: Veselin Todorov <hi@vesln.com>
# Licensed under the MIT License.
require '../'
5.times ->
console.log 'win'
9.times (i) ->
console.log "Cats have #{i} lives"
console.log 9.times (i) ->
"win #{i}"
console.log 6.times('meow')
console.log 6.times(1)
| 110441 | # times - times loop for your coffee.
#
# Author: <NAME> <<EMAIL>>
# Licensed under the MIT License.
require '../'
5.times ->
console.log 'win'
9.times (i) ->
console.log "Cats have #{i} lives"
console.log 9.times (i) ->
"win #{i}"
console.log 6.times('meow')
console.log 6.times(1)
| true | # times - times loop for your coffee.
#
# Author: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Licensed under the MIT License.
require '../'
5.times ->
console.log 'win'
9.times (i) ->
console.log "Cats have #{i} lives"
console.log 9.times (i) ->
"win #{i}"
console.log 6.times('meow')
console.log 6.times(1)
|
[
{
"context": "pour hard - Grab me a specific beer\n#\n# Author:\n# John Sullivan\n\nnitros = [\n \"https://www.premiergourmet.com/m",
"end": 167,
"score": 0.9998847246170044,
"start": 154,
"tag": "NAME",
"value": "John Sullivan"
}
] | scripts/pourhard.coffee | jsullivan3/pinkybot | 1 | # Description:
# None
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot pour hard - Grab me a specific beer
#
# Author:
# John Sullivan
nitros = [
"https://www.premiergourmet.com/media/catalog/product/cache/1/image/300x261/9df78eab33525d08d6e5fb8d27136e95/m/i/milk_stout_nitro_bottle_and_glass_795556920.png"
"https://i.ytimg.com/vi/dY-ti2-bBG0/hqdefault.jpg"
"http://archive.boston.com/lifestyle/food/blogs/99bottles/milk%20stout%20nitro%20005.jpg"
"https://i.pinimg.com/originals/25/ac/b8/25acb809605474e5159d80901efcfcee.jpg"
"https://ewscripps.brightspotcdn.com/dims4/default/4ea7cda/2147483647/strip/true/crop/640x360+0+33/resize/1280x720!/quality/90/?url=https%3A%2F%2Fmediaassets.thedenverchannel.com%2Fphoto%2F2017%2F06%2F21%2Fleft%20hand%20milk%20stout%20nitro_1498065114798_61558441_ver1.0_640_480.jpg"
"http://thebeerrunkingsport.com/wp-content/uploads/2017/06/a782b012_MilkStoutNitro1.jpeg"
"http://www.lefthandbrewing.com/wp/wp-content/uploads/2013/11/Nitro-3-way-pour.jpg"
]
module.exports = (robot) ->
robot.hear /.*(pour hard).*/i, (msg) ->
msg.send msg.random nitros
| 98977 | # Description:
# None
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot pour hard - Grab me a specific beer
#
# Author:
# <NAME>
nitros = [
"https://www.premiergourmet.com/media/catalog/product/cache/1/image/300x261/9df78eab33525d08d6e5fb8d27136e95/m/i/milk_stout_nitro_bottle_and_glass_795556920.png"
"https://i.ytimg.com/vi/dY-ti2-bBG0/hqdefault.jpg"
"http://archive.boston.com/lifestyle/food/blogs/99bottles/milk%20stout%20nitro%20005.jpg"
"https://i.pinimg.com/originals/25/ac/b8/25acb809605474e5159d80901efcfcee.jpg"
"https://ewscripps.brightspotcdn.com/dims4/default/4ea7cda/2147483647/strip/true/crop/640x360+0+33/resize/1280x720!/quality/90/?url=https%3A%2F%2Fmediaassets.thedenverchannel.com%2Fphoto%2F2017%2F06%2F21%2Fleft%20hand%20milk%20stout%20nitro_1498065114798_61558441_ver1.0_640_480.jpg"
"http://thebeerrunkingsport.com/wp-content/uploads/2017/06/a782b012_MilkStoutNitro1.jpeg"
"http://www.lefthandbrewing.com/wp/wp-content/uploads/2013/11/Nitro-3-way-pour.jpg"
]
module.exports = (robot) ->
robot.hear /.*(pour hard).*/i, (msg) ->
msg.send msg.random nitros
| true | # Description:
# None
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot pour hard - Grab me a specific beer
#
# Author:
# PI:NAME:<NAME>END_PI
nitros = [
"https://www.premiergourmet.com/media/catalog/product/cache/1/image/300x261/9df78eab33525d08d6e5fb8d27136e95/m/i/milk_stout_nitro_bottle_and_glass_795556920.png"
"https://i.ytimg.com/vi/dY-ti2-bBG0/hqdefault.jpg"
"http://archive.boston.com/lifestyle/food/blogs/99bottles/milk%20stout%20nitro%20005.jpg"
"https://i.pinimg.com/originals/25/ac/b8/25acb809605474e5159d80901efcfcee.jpg"
"https://ewscripps.brightspotcdn.com/dims4/default/4ea7cda/2147483647/strip/true/crop/640x360+0+33/resize/1280x720!/quality/90/?url=https%3A%2F%2Fmediaassets.thedenverchannel.com%2Fphoto%2F2017%2F06%2F21%2Fleft%20hand%20milk%20stout%20nitro_1498065114798_61558441_ver1.0_640_480.jpg"
"http://thebeerrunkingsport.com/wp-content/uploads/2017/06/a782b012_MilkStoutNitro1.jpeg"
"http://www.lefthandbrewing.com/wp/wp-content/uploads/2013/11/Nitro-3-way-pour.jpg"
]
module.exports = (robot) ->
robot.hear /.*(pour hard).*/i, (msg) ->
msg.send msg.random nitros
|
[
{
"context": "\n stateSave: true\n columns: [\n {data: 'farmer_name'}\n {data: 'value'}\n {data: 'account_id'",
"end": 223,
"score": 0.9925116300582886,
"start": 212,
"tag": "NAME",
"value": "farmer_name"
}
] | app/assets/javascripts/payments_table.coffee | wagura-maurice/glowing-umbrella | 0 | ready = ->
$('#payments-table').dataTable
processing: true
serverSide: true
ajax: $('#payments-table').data('source')
pagingType: 'full_numbers'
stateSave: true
columns: [
{data: 'farmer_name'}
{data: 'value'}
{data: 'account_id'}
{data: 'completed_at'}
{data: 'name'}
{data: 'txn_type'}
{data: 'phone_number'}
]
$(document).ready(ready)
$(document).on('turbolinks:load', ready) | 112236 | ready = ->
$('#payments-table').dataTable
processing: true
serverSide: true
ajax: $('#payments-table').data('source')
pagingType: 'full_numbers'
stateSave: true
columns: [
{data: '<NAME>'}
{data: 'value'}
{data: 'account_id'}
{data: 'completed_at'}
{data: 'name'}
{data: 'txn_type'}
{data: 'phone_number'}
]
$(document).ready(ready)
$(document).on('turbolinks:load', ready) | true | ready = ->
$('#payments-table').dataTable
processing: true
serverSide: true
ajax: $('#payments-table').data('source')
pagingType: 'full_numbers'
stateSave: true
columns: [
{data: 'PI:NAME:<NAME>END_PI'}
{data: 'value'}
{data: 'account_id'}
{data: 'completed_at'}
{data: 'name'}
{data: 'txn_type'}
{data: 'phone_number'}
]
$(document).ready(ready)
$(document).on('turbolinks:load', ready) |
[
{
"context": "1)\n )\n\n return (repo, revision) ->\n key = \"#{repo.git_dir}:#{revision}\"\n\n # Only spawn a new GitRevParse if one isn't ",
"end": 1312,
"score": 0.9979553818702698,
"start": 1283,
"tag": "KEY",
"value": "\"#{repo.git_dir}:#{revision}\""
}
] | lib/git-util.coffee | etaoins/git-tree-srv | 0 | {EventEmitter} = require 'events'
child_process = require 'child_process'
gitArgs = (repo, subcommand, cmdArgs = []) ->
["--git-dir=#{repo.git_dir}", subcommand].concat(cmdArgs)
# Spawns Git and returns the process
spawnGit = (repo, subcommand, cmdArgs = []) ->
gitProcess = child_process.spawn('git', gitArgs(repo, subcommand, cmdArgs))
# Dump any badness to the console
gitProcess.stderr.on 'data', (data) ->
console.warn("git: #{data}")
return gitProcess
# Execs Git and invokes the callback with the contents of stdout on success
# or null on error
gitOutput = (repo, subcommand, cmdArgs, callback) ->
args = gitArgs(repo, subcommand, cmdArgs)
child_process.execFile('git', args, {}, (error, stdout, stderr) ->
if error?
console.warn("git: #{stderr}")
callback(null)
else
# Trim off the newline
callback(stdout.trimRight())
)
# Parses a given repo and tree revision to a tree SHA-1
parseRevision = do ->
# Map of running GitRevParse instances indexed by repo:revision
revParseCommands = {}
class GitRevParse extends EventEmitter
constructor: (repo, revision) ->
gitOutput(repo, 'rev-parse', ['--verify', revision], (sha1) =>
@emit('finish', sha1)
)
return (repo, revision) ->
key = "#{repo.git_dir}:#{revision}"
# Only spawn a new GitRevParse if one isn't running
unless revParseCommands[key]?
revParse = new GitRevParse(repo, revision)
revParseCommands[key] = revParse
# Forget about the GitRevParse once it completes
revParse.once 'finish', ->
delete revParseCommands[key]
return revParseCommands[key]
module.exports.spawnGit = spawnGit
module.exports.gitOutput = gitOutput
module.exports.parseRevision = parseRevision
| 183608 | {EventEmitter} = require 'events'
child_process = require 'child_process'
gitArgs = (repo, subcommand, cmdArgs = []) ->
["--git-dir=#{repo.git_dir}", subcommand].concat(cmdArgs)
# Spawns Git and returns the process
spawnGit = (repo, subcommand, cmdArgs = []) ->
gitProcess = child_process.spawn('git', gitArgs(repo, subcommand, cmdArgs))
# Dump any badness to the console
gitProcess.stderr.on 'data', (data) ->
console.warn("git: #{data}")
return gitProcess
# Execs Git and invokes the callback with the contents of stdout on success
# or null on error
gitOutput = (repo, subcommand, cmdArgs, callback) ->
args = gitArgs(repo, subcommand, cmdArgs)
child_process.execFile('git', args, {}, (error, stdout, stderr) ->
if error?
console.warn("git: #{stderr}")
callback(null)
else
# Trim off the newline
callback(stdout.trimRight())
)
# Parses a given repo and tree revision to a tree SHA-1
parseRevision = do ->
# Map of running GitRevParse instances indexed by repo:revision
revParseCommands = {}
class GitRevParse extends EventEmitter
constructor: (repo, revision) ->
gitOutput(repo, 'rev-parse', ['--verify', revision], (sha1) =>
@emit('finish', sha1)
)
return (repo, revision) ->
key = <KEY>
# Only spawn a new GitRevParse if one isn't running
unless revParseCommands[key]?
revParse = new GitRevParse(repo, revision)
revParseCommands[key] = revParse
# Forget about the GitRevParse once it completes
revParse.once 'finish', ->
delete revParseCommands[key]
return revParseCommands[key]
module.exports.spawnGit = spawnGit
module.exports.gitOutput = gitOutput
module.exports.parseRevision = parseRevision
| true | {EventEmitter} = require 'events'
child_process = require 'child_process'
gitArgs = (repo, subcommand, cmdArgs = []) ->
["--git-dir=#{repo.git_dir}", subcommand].concat(cmdArgs)
# Spawns Git and returns the process
spawnGit = (repo, subcommand, cmdArgs = []) ->
gitProcess = child_process.spawn('git', gitArgs(repo, subcommand, cmdArgs))
# Dump any badness to the console
gitProcess.stderr.on 'data', (data) ->
console.warn("git: #{data}")
return gitProcess
# Execs Git and invokes the callback with the contents of stdout on success
# or null on error
gitOutput = (repo, subcommand, cmdArgs, callback) ->
args = gitArgs(repo, subcommand, cmdArgs)
child_process.execFile('git', args, {}, (error, stdout, stderr) ->
if error?
console.warn("git: #{stderr}")
callback(null)
else
# Trim off the newline
callback(stdout.trimRight())
)
# Parses a given repo and tree revision to a tree SHA-1
parseRevision = do ->
# Map of running GitRevParse instances indexed by repo:revision
revParseCommands = {}
class GitRevParse extends EventEmitter
constructor: (repo, revision) ->
gitOutput(repo, 'rev-parse', ['--verify', revision], (sha1) =>
@emit('finish', sha1)
)
return (repo, revision) ->
key = PI:KEY:<KEY>END_PI
# Only spawn a new GitRevParse if one isn't running
unless revParseCommands[key]?
revParse = new GitRevParse(repo, revision)
revParseCommands[key] = revParse
# Forget about the GitRevParse once it completes
revParse.once 'finish', ->
delete revParseCommands[key]
return revParseCommands[key]
module.exports.spawnGit = spawnGit
module.exports.gitOutput = gitOutput
module.exports.parseRevision = parseRevision
|
[
{
"context": "buteEditView(new TestAttribute(new Model({ test: 'charlie' }), 'test'))).artifact()\n select.childre",
"end": 4491,
"score": 0.7811277508735657,
"start": 4487,
"tag": "NAME",
"value": "char"
},
{
"context": "harlie', 'delta' ])\n\n m = new Model({ test: 'charl... | stdlib/test/view/enum-attribute.coffee | issa-tseng/janus | 11 | should = require('should')
{ Varying, Model, attribute, List, from } = require('janus')
{ EnumAttributeEditView } = require('../../lib/view/enum-attribute')
$ = require('../../lib/view/dollar')
checkText = (select, expected) -> select.children().eq(idx).text().should.equal(text) for text, idx in expected
describe 'view', ->
describe 'enum attribute (select)', ->
it 'renders a select tag', ->
select = (new EnumAttributeEditView(new attribute.Enum(new Model(), 'test'))).artifact()
select.is('select').should.equal(true)
it 'renders an option tag for each value', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
it 'renders appropriate text given primitive values', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test', '1', 'true', 'false' ])
it 'renders appropriate text given options.stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify = (x) -> "#{x}!"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
it 'renders appropriate text given attribute#stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify: (x) -> "#{x}?"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test?', '1?', 'true?', 'false?' ])
it 'prefers options.stringify over attribute#stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify: (x) -> "#{x}?"
stringify = (x) -> "#{x}!"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
it 'updates text values if given a Varying text', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
v = new Varying('!')
stringify = (x) -> v.map((y) -> "#{x}#{y}")
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
v.set('?')
checkText(select, [ 'test?', '1?', 'true?', 'false?' ])
it 'renders additional option tags for new values', ->
values = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> values
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
values.add('delta')
select.children().length.should.equal(4)
checkText(select, [ 'alpha', 'bravo', 'charlie', 'delta' ])
values.add('nonsequitor', 2)
select.children().length.should.equal(5)
checkText(select, [ 'alpha', 'bravo', 'nonsequitor', 'charlie', 'delta' ])
it 'removes option tags for removed values', ->
values = new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
class TestAttribute extends attribute.Enum
_values: -> values
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
values.remove('bravo')
select.children().length.should.equal(3)
checkText(select, [ 'alpha', 'charlie', 'delta' ])
values.remove('alpha')
select.children().length.should.equal(2)
checkText(select, [ 'charlie', 'delta' ])
it 'populates the select with the correct value initially', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
select = (new EnumAttributeEditView(new TestAttribute(new Model({ test: 'charlie' }), 'test'))).artifact()
select.children(':selected').length.should.equal(1)
select.children(':selected').text().should.equal('charlie')
it 'updates the selected value if the model changes', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
m = new Model({ test: 'charlie' })
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
m.set('test', 'bravo')
select.children(':selected').length.should.equal(1)
select.children(':selected').text().should.equal('bravo')
# originally this test also tried {}, but this doesn't work as when you
# go to set the value it just thinks you want to set a bag of nothing.
it 'knows how to set the value for fringe data types', ->
mval = new Model()
arrval = []
class TestAttribute extends attribute.Enum
_values: -> new List([ mval, arrval ])
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
select.val(select.children().eq(1).val())
select.trigger('change')
m.get_('test').should.equal(arrval)
select.val(select.children().eq(0).val())
select.trigger('change')
m.get_('test').should.equal(mval)
it 'updates the model if the selected value changes', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
m = new Model({ test: 'charlie' })
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
select.val('bravo')
select.trigger('change')
m.get_('test').should.equal('bravo')
select.val('charlie')
select.trigger('change')
m.get_('test').should.equal('charlie')
it 'sets the model value upon event wiring to the apparent value', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie', 'delta' ]
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
m.get_('test').should.equal('alpha')
it 'inserts a blank placeholder if the field is declared nullable', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
nullable: true
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ '', 'alpha', 'bravo', 'charlie' ])
it 'deals well with a Varying values list ref changing wholesale', ->
v = new Varying([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> v
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
checkText(select, [ 'alpha', 'bravo', 'charlie' ])
v.set([ 'puppies', 'kittens', 'ducklings' ])
select.children().length.should.equal(3)
checkText(select, [ 'puppies', 'kittens', 'ducklings' ])
it 'deals well with a from expression', ->
class TestAttribute extends attribute.Enum
_values: -> from('list')
model = new Model({ list: new List([ 'xray', 'yankee', 'zulu' ]) })
select = (new EnumAttributeEditView(new TestAttribute(model, 'test'))).artifact()
select.children().length.should.equal(3)
checkText(select, [ 'xray', 'yankee', 'zulu' ])
model.get_('list').add('missingno')
checkText(select, [ 'xray', 'yankee', 'zulu', 'missingno' ])
it 'inserts a blank placeholder if the field is declared nullable', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
nullable: true
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ '', 'alpha', 'bravo', 'charlie' ])
describe 'attach', ->
it 'should leave the initial markup alone', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
dom = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
(new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).attach(dom)
dom.children().eq(0).text().should.equal('a')
dom.children().eq(1).text().should.equal('b')
dom.children().eq(2).text().should.equal('c')
it 'should still set the correct model values on change', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
view.attach(select)
view.wireEvents()
select.val('bravo')
select.trigger('change')
m.get_('test').should.equal('bravo')
it 'should replace options correctly', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test')))
view.attach(select)
l.remove('bravo')
select.children().length.should.equal(2)
select.children().eq(1).attr('value').should.equal('charlie')
select.children().eq(1).text().should.equal('c')
l.add('delta')
select.children().length.should.equal(3)
select.children().eq(2).attr('value').should.equal('delta')
select.children().eq(2).text().should.equal('delta')
it 'should update label text on stringify change', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
btext = new Varying('bravo')
stringify = (x) -> if x is 'bravo' then btext else x
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify }))
view.attach(select)
btext.set('bueno')
select.children().eq(1).text().should.equal('bueno')
it 'should bind new options correctly', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
dtext = new Varying('delta')
stringify = (x) -> if x is 'delta' then dtext else x
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify }))
view.attach(select)
l.add('delta')
dtext.set('delightful')
select.children().eq(3).text().should.equal('delightful')
| 121673 | should = require('should')
{ Varying, Model, attribute, List, from } = require('janus')
{ EnumAttributeEditView } = require('../../lib/view/enum-attribute')
$ = require('../../lib/view/dollar')
checkText = (select, expected) -> select.children().eq(idx).text().should.equal(text) for text, idx in expected
describe 'view', ->
describe 'enum attribute (select)', ->
it 'renders a select tag', ->
select = (new EnumAttributeEditView(new attribute.Enum(new Model(), 'test'))).artifact()
select.is('select').should.equal(true)
it 'renders an option tag for each value', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
it 'renders appropriate text given primitive values', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test', '1', 'true', 'false' ])
it 'renders appropriate text given options.stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify = (x) -> "#{x}!"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
it 'renders appropriate text given attribute#stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify: (x) -> "#{x}?"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test?', '1?', 'true?', 'false?' ])
it 'prefers options.stringify over attribute#stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify: (x) -> "#{x}?"
stringify = (x) -> "#{x}!"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
it 'updates text values if given a Varying text', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
v = new Varying('!')
stringify = (x) -> v.map((y) -> "#{x}#{y}")
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
v.set('?')
checkText(select, [ 'test?', '1?', 'true?', 'false?' ])
it 'renders additional option tags for new values', ->
values = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> values
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
values.add('delta')
select.children().length.should.equal(4)
checkText(select, [ 'alpha', 'bravo', 'charlie', 'delta' ])
values.add('nonsequitor', 2)
select.children().length.should.equal(5)
checkText(select, [ 'alpha', 'bravo', 'nonsequitor', 'charlie', 'delta' ])
it 'removes option tags for removed values', ->
values = new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
class TestAttribute extends attribute.Enum
_values: -> values
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
values.remove('bravo')
select.children().length.should.equal(3)
checkText(select, [ 'alpha', 'charlie', 'delta' ])
values.remove('alpha')
select.children().length.should.equal(2)
checkText(select, [ 'charlie', 'delta' ])
it 'populates the select with the correct value initially', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
select = (new EnumAttributeEditView(new TestAttribute(new Model({ test: '<NAME>lie' }), 'test'))).artifact()
select.children(':selected').length.should.equal(1)
select.children(':selected').text().should.equal('charlie')
it 'updates the selected value if the model changes', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
m = new Model({ test: '<NAME>lie' })
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
m.set('test', 'bravo')
select.children(':selected').length.should.equal(1)
select.children(':selected').text().should.equal('bravo')
# originally this test also tried {}, but this doesn't work as when you
# go to set the value it just thinks you want to set a bag of nothing.
it 'knows how to set the value for fringe data types', ->
mval = new Model()
arrval = []
class TestAttribute extends attribute.Enum
_values: -> new List([ mval, arrval ])
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
select.val(select.children().eq(1).val())
select.trigger('change')
m.get_('test').should.equal(arrval)
select.val(select.children().eq(0).val())
select.trigger('change')
m.get_('test').should.equal(mval)
it 'updates the model if the selected value changes', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
m = new Model({ test: '<NAME>' })
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
select.val('<NAME>')
select.trigger('change')
m.get_('test').should.equal('bravo')
select.val('<NAME>')
select.trigger('change')
m.get_('test').should.equal('charlie')
it 'sets the model value upon event wiring to the apparent value', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie', 'delta' ]
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
m.get_('test').should.equal('alpha')
it 'inserts a blank placeholder if the field is declared nullable', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
nullable: true
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ '', 'alpha', 'bravo', 'charlie' ])
it 'deals well with a Varying values list ref changing wholesale', ->
v = new Varying([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> v
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
checkText(select, [ 'alpha', 'bravo', 'charlie' ])
v.set([ 'puppies', 'kittens', 'ducklings' ])
select.children().length.should.equal(3)
checkText(select, [ 'puppies', 'kittens', 'ducklings' ])
it 'deals well with a from expression', ->
class TestAttribute extends attribute.Enum
_values: -> from('list')
model = new Model({ list: new List([ '<NAME>', '<NAME>', 'z<NAME>' ]) })
select = (new EnumAttributeEditView(new TestAttribute(model, 'test'))).artifact()
select.children().length.should.equal(3)
checkText(select, [ 'x<NAME>', 'yankee', 'zulu' ])
model.get_('list').add('missingno')
checkText(select, [ 'x<NAME>', 'yankee', 'zulu', 'missingno' ])
it 'inserts a blank placeholder if the field is declared nullable', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
nullable: true
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ '', 'alpha', 'bravo', 'charlie' ])
describe 'attach', ->
it 'should leave the initial markup alone', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
dom = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
(new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).attach(dom)
dom.children().eq(0).text().should.equal('a')
dom.children().eq(1).text().should.equal('b')
dom.children().eq(2).text().should.equal('c')
it 'should still set the correct model values on change', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
view.attach(select)
view.wireEvents()
select.val('bravo')
select.trigger('change')
m.get_('test').should.equal('bravo')
it 'should replace options correctly', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test')))
view.attach(select)
l.remove('bravo')
select.children().length.should.equal(2)
select.children().eq(1).attr('value').should.equal('charlie')
select.children().eq(1).text().should.equal('c')
l.add('delta')
select.children().length.should.equal(3)
select.children().eq(2).attr('value').should.equal('delta')
select.children().eq(2).text().should.equal('delta')
it 'should update label text on stringify change', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
btext = new Varying('bravo')
stringify = (x) -> if x is 'bravo' then btext else x
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify }))
view.attach(select)
btext.set('bueno')
select.children().eq(1).text().should.equal('bueno')
it 'should bind new options correctly', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="<NAME>">c</option></select>')
dtext = new Varying('delta')
stringify = (x) -> if x is 'delta' then dtext else x
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify }))
view.attach(select)
l.add('delta')
dtext.set('delightful')
select.children().eq(3).text().should.equal('delightful')
| true | should = require('should')
{ Varying, Model, attribute, List, from } = require('janus')
{ EnumAttributeEditView } = require('../../lib/view/enum-attribute')
$ = require('../../lib/view/dollar')
checkText = (select, expected) -> select.children().eq(idx).text().should.equal(text) for text, idx in expected
describe 'view', ->
describe 'enum attribute (select)', ->
it 'renders a select tag', ->
select = (new EnumAttributeEditView(new attribute.Enum(new Model(), 'test'))).artifact()
select.is('select').should.equal(true)
it 'renders an option tag for each value', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
it 'renders appropriate text given primitive values', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test', '1', 'true', 'false' ])
it 'renders appropriate text given options.stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify = (x) -> "#{x}!"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
it 'renders appropriate text given attribute#stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify: (x) -> "#{x}?"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test?', '1?', 'true?', 'false?' ])
it 'prefers options.stringify over attribute#stringify', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
stringify: (x) -> "#{x}?"
stringify = (x) -> "#{x}!"
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
it 'updates text values if given a Varying text', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'test', 1, true, false ]
v = new Varying('!')
stringify = (x) -> v.map((y) -> "#{x}#{y}")
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify })).artifact()
select.children().length.should.equal(4)
checkText(select, [ 'test!', '1!', 'true!', 'false!' ])
v.set('?')
checkText(select, [ 'test?', '1?', 'true?', 'false?' ])
it 'renders additional option tags for new values', ->
values = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> values
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
values.add('delta')
select.children().length.should.equal(4)
checkText(select, [ 'alpha', 'bravo', 'charlie', 'delta' ])
values.add('nonsequitor', 2)
select.children().length.should.equal(5)
checkText(select, [ 'alpha', 'bravo', 'nonsequitor', 'charlie', 'delta' ])
it 'removes option tags for removed values', ->
values = new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
class TestAttribute extends attribute.Enum
_values: -> values
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
values.remove('bravo')
select.children().length.should.equal(3)
checkText(select, [ 'alpha', 'charlie', 'delta' ])
values.remove('alpha')
select.children().length.should.equal(2)
checkText(select, [ 'charlie', 'delta' ])
it 'populates the select with the correct value initially', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
select = (new EnumAttributeEditView(new TestAttribute(new Model({ test: 'PI:NAME:<NAME>END_PIlie' }), 'test'))).artifact()
select.children(':selected').length.should.equal(1)
select.children(':selected').text().should.equal('charlie')
it 'updates the selected value if the model changes', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
m = new Model({ test: 'PI:NAME:<NAME>END_PIlie' })
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
m.set('test', 'bravo')
select.children(':selected').length.should.equal(1)
select.children(':selected').text().should.equal('bravo')
# originally this test also tried {}, but this doesn't work as when you
# go to set the value it just thinks you want to set a bag of nothing.
it 'knows how to set the value for fringe data types', ->
mval = new Model()
arrval = []
class TestAttribute extends attribute.Enum
_values: -> new List([ mval, arrval ])
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
select.val(select.children().eq(1).val())
select.trigger('change')
m.get_('test').should.equal(arrval)
select.val(select.children().eq(0).val())
select.trigger('change')
m.get_('test').should.equal(mval)
it 'updates the model if the selected value changes', ->
class TestAttribute extends attribute.Enum
_values: -> new List([ 'alpha', 'bravo', 'charlie', 'delta' ])
m = new Model({ test: 'PI:NAME:<NAME>END_PI' })
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
select.val('PI:NAME:<NAME>END_PI')
select.trigger('change')
m.get_('test').should.equal('bravo')
select.val('PI:NAME:<NAME>END_PI')
select.trigger('change')
m.get_('test').should.equal('charlie')
it 'sets the model value upon event wiring to the apparent value', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie', 'delta' ]
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
select = view.artifact()
view.wireEvents()
m.get_('test').should.equal('alpha')
it 'inserts a blank placeholder if the field is declared nullable', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
nullable: true
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ '', 'alpha', 'bravo', 'charlie' ])
it 'deals well with a Varying values list ref changing wholesale', ->
v = new Varying([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> v
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(3)
checkText(select, [ 'alpha', 'bravo', 'charlie' ])
v.set([ 'puppies', 'kittens', 'ducklings' ])
select.children().length.should.equal(3)
checkText(select, [ 'puppies', 'kittens', 'ducklings' ])
it 'deals well with a from expression', ->
class TestAttribute extends attribute.Enum
_values: -> from('list')
model = new Model({ list: new List([ 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'zPI:NAME:<NAME>END_PI' ]) })
select = (new EnumAttributeEditView(new TestAttribute(model, 'test'))).artifact()
select.children().length.should.equal(3)
checkText(select, [ 'xPI:NAME:<NAME>END_PI', 'yankee', 'zulu' ])
model.get_('list').add('missingno')
checkText(select, [ 'xPI:NAME:<NAME>END_PI', 'yankee', 'zulu', 'missingno' ])
it 'inserts a blank placeholder if the field is declared nullable', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
nullable: true
select = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).artifact()
select.children().length.should.equal(4)
checkText(select, [ '', 'alpha', 'bravo', 'charlie' ])
describe 'attach', ->
it 'should leave the initial markup alone', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
dom = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
(new EnumAttributeEditView(new TestAttribute(new Model(), 'test'))).attach(dom)
dom.children().eq(0).text().should.equal('a')
dom.children().eq(1).text().should.equal('b')
dom.children().eq(2).text().should.equal('c')
it 'should still set the correct model values on change', ->
class TestAttribute extends attribute.Enum
_values: -> [ 'alpha', 'bravo', 'charlie' ]
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
m = new Model()
view = new EnumAttributeEditView(new TestAttribute(m, 'test'))
view.attach(select)
view.wireEvents()
select.val('bravo')
select.trigger('change')
m.get_('test').should.equal('bravo')
it 'should replace options correctly', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test')))
view.attach(select)
l.remove('bravo')
select.children().length.should.equal(2)
select.children().eq(1).attr('value').should.equal('charlie')
select.children().eq(1).text().should.equal('c')
l.add('delta')
select.children().length.should.equal(3)
select.children().eq(2).attr('value').should.equal('delta')
select.children().eq(2).text().should.equal('delta')
it 'should update label text on stringify change', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="charlie">c</option></select>')
btext = new Varying('bravo')
stringify = (x) -> if x is 'bravo' then btext else x
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify }))
view.attach(select)
btext.set('bueno')
select.children().eq(1).text().should.equal('bueno')
it 'should bind new options correctly', ->
l = new List([ 'alpha', 'bravo', 'charlie' ])
class TestAttribute extends attribute.Enum
_values: -> l
select = $('<select><option value="alpha">a</option><option value="bravo">b</option><option value="PI:NAME:<NAME>END_PI">c</option></select>')
dtext = new Varying('delta')
stringify = (x) -> if x is 'delta' then dtext else x
view = (new EnumAttributeEditView(new TestAttribute(new Model(), 'test'), { stringify }))
view.attach(select)
l.add('delta')
dtext.set('delightful')
select.children().eq(3).text().should.equal('delightful')
|
[
{
"context": "verview Tests for no-throw-literal rule.\n# @author Dieter Oberkofler\n###\n\n'use strict'\n\n#-----------------------------",
"end": 81,
"score": 0.9998484253883362,
"start": 64,
"tag": "NAME",
"value": "Dieter Oberkofler"
}
] | src/tests/rules/no-throw-literal.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-throw-literal rule.
# @author Dieter Oberkofler
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-throw-literal'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
### eslint-disable coffee/no-template-curly-in-string ###
ruleTester.run 'no-throw-literal', rule,
valid: [
'throw new Error()'
"throw new Error('error')"
"throw Error('error')"
'''
e = new Error()
throw e
'''
'''
try
throw new Error()
catch e
throw e
'''
'throw a' # Identifier
'throw foo()' # CallExpression
'throw new foo()' # NewExpression
'throw foo.bar' # MemberExpression
'throw foo[bar]' # MemberExpression
'throw foo = new Error()' # AssignmentExpression
'throw (1; 2; new Error())' # SequenceExpression
"throw 'literal' && new Error()" # LogicalExpression (right)
"throw new Error() || 'literal'" # LogicalExpression (left)
"throw if foo then new Error() else 'literal'" # ConditionalExpression (consequent)
"throw if foo then 'literal' else new Error()" # ConditionalExpression (alternate)
,
code: 'throw tag"#{foo}"' # TaggedTemplateExpression
,
code: '''
->
index = 0
throw yield index++
''' # YieldExpression
,
code: '-> throw await bar' # AwaitExpression
]
invalid: [
code: "throw 'error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw 0'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw false'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw null'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw undefined'
errors: [
message: 'Do not throw undefined.'
type: 'ThrowStatement'
]
,
# String concatenation
code: "throw 'a' + 'b'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: '''
b = new Error()
throw 'a' + b
'''
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# AssignmentExpression
code: "throw foo = 'error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# SequenceExpression
code: 'throw (new Error(); 1; 2; 3)'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# LogicalExpression
code: "throw 'literal' and 'not an Error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# ConditionalExpression
code: "throw if foo then 'not an Error' else 'literal'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# TemplateLiteral
code: 'throw "#{err}"'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
]
| 58944 | ###*
# @fileoverview Tests for no-throw-literal rule.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-throw-literal'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
### eslint-disable coffee/no-template-curly-in-string ###
ruleTester.run 'no-throw-literal', rule,
valid: [
'throw new Error()'
"throw new Error('error')"
"throw Error('error')"
'''
e = new Error()
throw e
'''
'''
try
throw new Error()
catch e
throw e
'''
'throw a' # Identifier
'throw foo()' # CallExpression
'throw new foo()' # NewExpression
'throw foo.bar' # MemberExpression
'throw foo[bar]' # MemberExpression
'throw foo = new Error()' # AssignmentExpression
'throw (1; 2; new Error())' # SequenceExpression
"throw 'literal' && new Error()" # LogicalExpression (right)
"throw new Error() || 'literal'" # LogicalExpression (left)
"throw if foo then new Error() else 'literal'" # ConditionalExpression (consequent)
"throw if foo then 'literal' else new Error()" # ConditionalExpression (alternate)
,
code: 'throw tag"#{foo}"' # TaggedTemplateExpression
,
code: '''
->
index = 0
throw yield index++
''' # YieldExpression
,
code: '-> throw await bar' # AwaitExpression
]
invalid: [
code: "throw 'error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw 0'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw false'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw null'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw undefined'
errors: [
message: 'Do not throw undefined.'
type: 'ThrowStatement'
]
,
# String concatenation
code: "throw 'a' + 'b'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: '''
b = new Error()
throw 'a' + b
'''
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# AssignmentExpression
code: "throw foo = 'error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# SequenceExpression
code: 'throw (new Error(); 1; 2; 3)'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# LogicalExpression
code: "throw 'literal' and 'not an Error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# ConditionalExpression
code: "throw if foo then 'not an Error' else 'literal'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# TemplateLiteral
code: 'throw "#{err}"'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
]
| true | ###*
# @fileoverview Tests for no-throw-literal rule.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-throw-literal'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
### eslint-disable coffee/no-template-curly-in-string ###
ruleTester.run 'no-throw-literal', rule,
valid: [
'throw new Error()'
"throw new Error('error')"
"throw Error('error')"
'''
e = new Error()
throw e
'''
'''
try
throw new Error()
catch e
throw e
'''
'throw a' # Identifier
'throw foo()' # CallExpression
'throw new foo()' # NewExpression
'throw foo.bar' # MemberExpression
'throw foo[bar]' # MemberExpression
'throw foo = new Error()' # AssignmentExpression
'throw (1; 2; new Error())' # SequenceExpression
"throw 'literal' && new Error()" # LogicalExpression (right)
"throw new Error() || 'literal'" # LogicalExpression (left)
"throw if foo then new Error() else 'literal'" # ConditionalExpression (consequent)
"throw if foo then 'literal' else new Error()" # ConditionalExpression (alternate)
,
code: 'throw tag"#{foo}"' # TaggedTemplateExpression
,
code: '''
->
index = 0
throw yield index++
''' # YieldExpression
,
code: '-> throw await bar' # AwaitExpression
]
invalid: [
code: "throw 'error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw 0'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw false'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw null'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: 'throw undefined'
errors: [
message: 'Do not throw undefined.'
type: 'ThrowStatement'
]
,
# String concatenation
code: "throw 'a' + 'b'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
code: '''
b = new Error()
throw 'a' + b
'''
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# AssignmentExpression
code: "throw foo = 'error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# SequenceExpression
code: 'throw (new Error(); 1; 2; 3)'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# LogicalExpression
code: "throw 'literal' and 'not an Error'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# ConditionalExpression
code: "throw if foo then 'not an Error' else 'literal'"
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
,
# TemplateLiteral
code: 'throw "#{err}"'
errors: [
message: 'Expected an error object to be thrown.'
type: 'ThrowStatement'
]
]
|
[
{
"context": "scp', [\n '/home/panos/kmeans/kmeans.html'\n 'root@vlantis.gr:/var/www/html/static/'\n ]\n",
"end": 4464,
"score": 0.9521909952163696,
"start": 4460,
"tag": "EMAIL",
"value": "root"
},
{
"context": " [\n '/home/panos/kmeans/kmeans.html'\n 'root@vlantis.gr:/... | gulpfile.coffee | panayiotis/kMeans | 0 | gulp = require 'gulp'
gutil = require 'gulp-util'
concat = require 'gulp-concat'
prefix = require 'gulp-autoprefixer'
coffee = require 'gulp-coffee'
sourcemaps = require 'gulp-sourcemaps'
nodemon = require 'gulp-nodemon'
sass = require 'gulp-sass'
uglify = require 'gulp-uglify'
jasmine = require 'gulp-jasmine'
browserSync = require 'browser-sync'
spawn = require('child_process').spawn
exec = require('child_process').exec
reload = browserSync.reload
chalk = gutil.colors
# Paths
paths =
coffee: [ './app/javascripts/*.coffee' ]
spec: [ './spec/*' ]
js: [
'vendor/components/lodash/lodash.min.js'
'vendor/components/d3/d3.min.js'
'vendor/components/highlightjs/highlight.coffee.js'
'public/*.js'
'!public/application.js'
]
sass: [
'vendor/components/foundation/scss/*.scss'
'app/stylesheets/*.scss'
]
sass_paths: [
'vendor/components/foundation/scss/'
'app/stylesheets/'
]
css: [
'vendor/components/highlightjs/styles/tomorrow.css'
'./public/*.css'
'!./public/application.css'
]
views: [ 'views/*.jade' ]
# Javascript
# concaternate javascripts to
gulp.task 'javascript', ['coffee'], ->
gulp.src paths.js
.pipe concat('application.js')
.pipe gulp.dest('./public/')
# Coffeescript
# compile .coffee files to js
gulp.task 'coffee', ->
gulp.src paths.coffee
.pipe sourcemaps.init()
.pipe coffee({bare: true}).on 'error', (error) ->
gutil.log(
chalk.red(error.name),
"from #{error.plugin}\n",
"#{chalk.yellow(error.message)}\n",
"\n#{chalk.yellow('Error Stack')}\n#{error.stack}"
)
spawn 'notify-send', [
'--urgency=low'
'--expire-time=5'
'--icon=/home/panos/Pictures/coffeescript.svg'
"Coffeescript " + error.name
error.message
]
#.pipe uglify()
.pipe sourcemaps.write()
.pipe gulp.dest('./public/')
# Stylesheets
# concaternate css together
gulp.task 'stylesheets', ['sass'], ->
gulp.src paths.css
.pipe concat('application.css')
.pipe gulp.dest('public')
.pipe reload(stream: true)
# Sass
# compile .scss files to css
gulp.task 'sass', ->
gulp.src paths.sass
.pipe sass(
includePaths: paths.sass_paths
outputStyle: 'expanded'
, errLogToConsole: true)
.pipe prefix 'last 2 versions','> 1%','ie 8','Android 2','Firefox ESR'
.pipe gulp.dest('public/')
.pipe reload(stream: true)
gulp.task 'browser-sync', [ 'nodemon' ], ->
browserSync.init
# Express server starts on port 5000
proxy: 'localhost:5000'
browser: ['google-chrome']
gulp.task 'default', [ 'watch', 'javascript', 'stylesheets', 'browser-sync']
gulp.task 'watch', ->
console.reset()
gulp.watch paths.sass, ['stylesheets']
gulp.watch paths.coffee, ['javascript']
gulp.watch paths.views, reload
gulp.watch ['./public/application.css'], reload
gulp.watch ['./public/application.js'], reload
gulp.task 'wget', ->
console.log('wget html page')
spawn 'wget', ['http://localhost:5000/', '-O', 'kmeans.html']
gulp.task 'nodemon', (cb) ->
# clear terminal
process.stdout.write('\x1b[2J\x1b[1;1H')
called = false
nodemon
# nodemon our expressjs server
script: 'app.coffee'
# watch core server file(s) that require server restart on change
watch: ['app.coffee']
.on 'start', ->
# ensure start only got called once
if !called
called = true
cb()
.on 'restart', ->
# reload connected browsers after a slight delay
setTimeout (->
reload stream: false
), 500
gulp.task 'jasmine', ->
console.reset()
gulp.src paths.coffee.concat(paths.specs)
.pipe jasmine(includeStackTrace: true)
.on('error', (error) ->
gutil.log(
chalk.red(error.name),
"from #{error.plugin}\n",
"#{chalk.yellow(error.message)}\n",
"\n#{chalk.yellow('Error Stack')}\n#{error.stack}"
)
)
gulp.task 'test', ['jasmine'], ->
gulp.watch [ 'app/javascripts/kmeans.coffee', 'spec/*' ], [ 'jasmine' ]
gulp.task 'deploy', ['wget'], ->
exec 'scp', [
'/home/panos/kmeans/kmeans.html'
'root@vlantis.gr:/var/www/html/static/'
]
| 184624 | gulp = require 'gulp'
gutil = require 'gulp-util'
concat = require 'gulp-concat'
prefix = require 'gulp-autoprefixer'
coffee = require 'gulp-coffee'
sourcemaps = require 'gulp-sourcemaps'
nodemon = require 'gulp-nodemon'
sass = require 'gulp-sass'
uglify = require 'gulp-uglify'
jasmine = require 'gulp-jasmine'
browserSync = require 'browser-sync'
spawn = require('child_process').spawn
exec = require('child_process').exec
reload = browserSync.reload
chalk = gutil.colors
# Paths
paths =
coffee: [ './app/javascripts/*.coffee' ]
spec: [ './spec/*' ]
js: [
'vendor/components/lodash/lodash.min.js'
'vendor/components/d3/d3.min.js'
'vendor/components/highlightjs/highlight.coffee.js'
'public/*.js'
'!public/application.js'
]
sass: [
'vendor/components/foundation/scss/*.scss'
'app/stylesheets/*.scss'
]
sass_paths: [
'vendor/components/foundation/scss/'
'app/stylesheets/'
]
css: [
'vendor/components/highlightjs/styles/tomorrow.css'
'./public/*.css'
'!./public/application.css'
]
views: [ 'views/*.jade' ]
# Javascript
# concaternate javascripts to
gulp.task 'javascript', ['coffee'], ->
gulp.src paths.js
.pipe concat('application.js')
.pipe gulp.dest('./public/')
# Coffeescript
# compile .coffee files to js
gulp.task 'coffee', ->
gulp.src paths.coffee
.pipe sourcemaps.init()
.pipe coffee({bare: true}).on 'error', (error) ->
gutil.log(
chalk.red(error.name),
"from #{error.plugin}\n",
"#{chalk.yellow(error.message)}\n",
"\n#{chalk.yellow('Error Stack')}\n#{error.stack}"
)
spawn 'notify-send', [
'--urgency=low'
'--expire-time=5'
'--icon=/home/panos/Pictures/coffeescript.svg'
"Coffeescript " + error.name
error.message
]
#.pipe uglify()
.pipe sourcemaps.write()
.pipe gulp.dest('./public/')
# Stylesheets
# concaternate css together
gulp.task 'stylesheets', ['sass'], ->
gulp.src paths.css
.pipe concat('application.css')
.pipe gulp.dest('public')
.pipe reload(stream: true)
# Sass
# compile .scss files to css
gulp.task 'sass', ->
gulp.src paths.sass
.pipe sass(
includePaths: paths.sass_paths
outputStyle: 'expanded'
, errLogToConsole: true)
.pipe prefix 'last 2 versions','> 1%','ie 8','Android 2','Firefox ESR'
.pipe gulp.dest('public/')
.pipe reload(stream: true)
gulp.task 'browser-sync', [ 'nodemon' ], ->
browserSync.init
# Express server starts on port 5000
proxy: 'localhost:5000'
browser: ['google-chrome']
gulp.task 'default', [ 'watch', 'javascript', 'stylesheets', 'browser-sync']
gulp.task 'watch', ->
console.reset()
gulp.watch paths.sass, ['stylesheets']
gulp.watch paths.coffee, ['javascript']
gulp.watch paths.views, reload
gulp.watch ['./public/application.css'], reload
gulp.watch ['./public/application.js'], reload
gulp.task 'wget', ->
console.log('wget html page')
spawn 'wget', ['http://localhost:5000/', '-O', 'kmeans.html']
gulp.task 'nodemon', (cb) ->
# clear terminal
process.stdout.write('\x1b[2J\x1b[1;1H')
called = false
nodemon
# nodemon our expressjs server
script: 'app.coffee'
# watch core server file(s) that require server restart on change
watch: ['app.coffee']
.on 'start', ->
# ensure start only got called once
if !called
called = true
cb()
.on 'restart', ->
# reload connected browsers after a slight delay
setTimeout (->
reload stream: false
), 500
gulp.task 'jasmine', ->
console.reset()
gulp.src paths.coffee.concat(paths.specs)
.pipe jasmine(includeStackTrace: true)
.on('error', (error) ->
gutil.log(
chalk.red(error.name),
"from #{error.plugin}\n",
"#{chalk.yellow(error.message)}\n",
"\n#{chalk.yellow('Error Stack')}\n#{error.stack}"
)
)
gulp.task 'test', ['jasmine'], ->
gulp.watch [ 'app/javascripts/kmeans.coffee', 'spec/*' ], [ 'jasmine' ]
gulp.task 'deploy', ['wget'], ->
exec 'scp', [
'/home/panos/kmeans/kmeans.html'
'<EMAIL>@<EMAIL>:/var/www/html/static/'
]
| true | gulp = require 'gulp'
gutil = require 'gulp-util'
concat = require 'gulp-concat'
prefix = require 'gulp-autoprefixer'
coffee = require 'gulp-coffee'
sourcemaps = require 'gulp-sourcemaps'
nodemon = require 'gulp-nodemon'
sass = require 'gulp-sass'
uglify = require 'gulp-uglify'
jasmine = require 'gulp-jasmine'
browserSync = require 'browser-sync'
spawn = require('child_process').spawn
exec = require('child_process').exec
reload = browserSync.reload
chalk = gutil.colors
# Paths
paths =
coffee: [ './app/javascripts/*.coffee' ]
spec: [ './spec/*' ]
js: [
'vendor/components/lodash/lodash.min.js'
'vendor/components/d3/d3.min.js'
'vendor/components/highlightjs/highlight.coffee.js'
'public/*.js'
'!public/application.js'
]
sass: [
'vendor/components/foundation/scss/*.scss'
'app/stylesheets/*.scss'
]
sass_paths: [
'vendor/components/foundation/scss/'
'app/stylesheets/'
]
css: [
'vendor/components/highlightjs/styles/tomorrow.css'
'./public/*.css'
'!./public/application.css'
]
views: [ 'views/*.jade' ]
# Javascript
# concaternate javascripts to
gulp.task 'javascript', ['coffee'], ->
gulp.src paths.js
.pipe concat('application.js')
.pipe gulp.dest('./public/')
# Coffeescript
# compile .coffee files to js
gulp.task 'coffee', ->
gulp.src paths.coffee
.pipe sourcemaps.init()
.pipe coffee({bare: true}).on 'error', (error) ->
gutil.log(
chalk.red(error.name),
"from #{error.plugin}\n",
"#{chalk.yellow(error.message)}\n",
"\n#{chalk.yellow('Error Stack')}\n#{error.stack}"
)
spawn 'notify-send', [
'--urgency=low'
'--expire-time=5'
'--icon=/home/panos/Pictures/coffeescript.svg'
"Coffeescript " + error.name
error.message
]
#.pipe uglify()
.pipe sourcemaps.write()
.pipe gulp.dest('./public/')
# Stylesheets
# concaternate css together
gulp.task 'stylesheets', ['sass'], ->
gulp.src paths.css
.pipe concat('application.css')
.pipe gulp.dest('public')
.pipe reload(stream: true)
# Sass
# compile .scss files to css
gulp.task 'sass', ->
gulp.src paths.sass
.pipe sass(
includePaths: paths.sass_paths
outputStyle: 'expanded'
, errLogToConsole: true)
.pipe prefix 'last 2 versions','> 1%','ie 8','Android 2','Firefox ESR'
.pipe gulp.dest('public/')
.pipe reload(stream: true)
gulp.task 'browser-sync', [ 'nodemon' ], ->
browserSync.init
# Express server starts on port 5000
proxy: 'localhost:5000'
browser: ['google-chrome']
gulp.task 'default', [ 'watch', 'javascript', 'stylesheets', 'browser-sync']
gulp.task 'watch', ->
console.reset()
gulp.watch paths.sass, ['stylesheets']
gulp.watch paths.coffee, ['javascript']
gulp.watch paths.views, reload
gulp.watch ['./public/application.css'], reload
gulp.watch ['./public/application.js'], reload
gulp.task 'wget', ->
console.log('wget html page')
spawn 'wget', ['http://localhost:5000/', '-O', 'kmeans.html']
gulp.task 'nodemon', (cb) ->
# clear terminal
process.stdout.write('\x1b[2J\x1b[1;1H')
called = false
nodemon
# nodemon our expressjs server
script: 'app.coffee'
# watch core server file(s) that require server restart on change
watch: ['app.coffee']
.on 'start', ->
# ensure start only got called once
if !called
called = true
cb()
.on 'restart', ->
# reload connected browsers after a slight delay
setTimeout (->
reload stream: false
), 500
gulp.task 'jasmine', ->
console.reset()
gulp.src paths.coffee.concat(paths.specs)
.pipe jasmine(includeStackTrace: true)
.on('error', (error) ->
gutil.log(
chalk.red(error.name),
"from #{error.plugin}\n",
"#{chalk.yellow(error.message)}\n",
"\n#{chalk.yellow('Error Stack')}\n#{error.stack}"
)
)
gulp.task 'test', ['jasmine'], ->
gulp.watch [ 'app/javascripts/kmeans.coffee', 'spec/*' ], [ 'jasmine' ]
gulp.task 'deploy', ['wget'], ->
exec 'scp', [
'/home/panos/kmeans/kmeans.html'
'PI:EMAIL:<EMAIL>END_PI@PI:EMAIL:<EMAIL>END_PI:/var/www/html/static/'
]
|
[
{
"context": "tml(\"\").fullCalendar\n googleCalendarApiKey: \"AIzaSyCaiegXWqtrKqG4rDT9odrd-NyFKEc8MV8\"\n events:\n googleCalendarId: calId\n ",
"end": 1434,
"score": 0.9997697472572327,
"start": 1395,
"tag": "KEY",
"value": "AIzaSyCaiegXWqtrKqG4rDT9odrd-NyFKEc8MV8"
}
] | app/assets/javascripts/main.js.coffee | samrayner/sheffield-ultimate | 0 | class Viewport
@getWidth: ->
size = window
.getComputedStyle(document.body,':after')
.getPropertyValue('content')
size.replace(/\"/g, '')
class Lightbox
@init: ->
$imageLinks = $("a[href$='.jpg'],a[href$='.png'],a[href$='.jpeg']")
.filter ->
$link = $(this)
$images = $link.children('img[src="'+$link.attr("href")+'"]')
if $images.length == 1
$link.attr("title", $images.attr("title"))
else
false
$imageLinks.fancybox
padding: 0
helpers:
title:
type: 'over'
thumbs: $imageLinks.length > 1
class FluidVideos
@init: (container=null, $videos=null) ->
$videos ||= $("iframe[src*='vimeo.com'], iframe[src*='youtube.com']")
$videos.each ->
$(this)
.data('aspectRatio', this.height/this.width)
.removeAttr('height')
.removeAttr('width')
$(window).resize ->
$videos.each ->
$elm = $(this)
$parent = if container then $(container) else $elm.parent()
newWidth = $parent.width()
$elm
.width(newWidth)
.height(newWidth * $elm.data('aspectRatio'))
$(window).resize()
class Calendar
@init: ->
$placeholder = $('#fullcalendar')
calId = $placeholder.data("calendar-id")
$placeholder.html("").fullCalendar
googleCalendarApiKey: "AIzaSyCaiegXWqtrKqG4rDT9odrd-NyFKEc8MV8"
events:
googleCalendarId: calId
firstDay: 1
timeFormat: 'h(:mm)a'
eventRender: (event, element) ->
if event.location && event.location.length
element.tooltip(title: event.location, container: "body")
return
eventClick: (event) ->
if event.description && event.description.indexOf("http") == 0
window.open(event.description)
return false
if event.location && event.location.length
window.open("https://www.google.co.uk/maps/preview?q="+encodeURIComponent(event.location))
return false
class Results
constructor: (@traceClass="trace", @collapsedClass="collapsed") ->
@$tables = $("#results table")
@$ths = @$tables.find("th")
clearTraces: =>
@$ths.removeClass(@traceClass)
trace: (e) =>
@clearTraces()
@traceRow(e.currentTarget)
@traceCol(e.currentTarget)
traceRow: (cell) ->
$(cell).closest("tr").find("th").addClass(@traceClass)
traceCol: (cell) ->
cellIndex = $(cell).closest("tr").find("th,td").index(cell)+1
$(cell).closest("table").find("th:nth-child("+cellIndex+")").addClass(@traceClass)
collapseMisc: ->
$("#misc-results").addClass(@collapsedClass)
$("#misc-results h1").click(@expandParent)
expandParent: (e) =>
$(e.currentTarget).closest("."+@collapsedClass).toggleClass(@collapsedClass)
init: ->
@collapseMisc()
return false unless @$tables.length
@$tables.mouseout(@clearTraces)
@$tables.find("td").hover(@trace)
$ ->
Calendar.init()
FluidVideos.init()
results = new Results()
results.init()
Lightbox.init() if Viewport.getWidth() == "wide"
| 64512 | class Viewport
@getWidth: ->
size = window
.getComputedStyle(document.body,':after')
.getPropertyValue('content')
size.replace(/\"/g, '')
class Lightbox
@init: ->
$imageLinks = $("a[href$='.jpg'],a[href$='.png'],a[href$='.jpeg']")
.filter ->
$link = $(this)
$images = $link.children('img[src="'+$link.attr("href")+'"]')
if $images.length == 1
$link.attr("title", $images.attr("title"))
else
false
$imageLinks.fancybox
padding: 0
helpers:
title:
type: 'over'
thumbs: $imageLinks.length > 1
class FluidVideos
@init: (container=null, $videos=null) ->
$videos ||= $("iframe[src*='vimeo.com'], iframe[src*='youtube.com']")
$videos.each ->
$(this)
.data('aspectRatio', this.height/this.width)
.removeAttr('height')
.removeAttr('width')
$(window).resize ->
$videos.each ->
$elm = $(this)
$parent = if container then $(container) else $elm.parent()
newWidth = $parent.width()
$elm
.width(newWidth)
.height(newWidth * $elm.data('aspectRatio'))
$(window).resize()
class Calendar
@init: ->
$placeholder = $('#fullcalendar')
calId = $placeholder.data("calendar-id")
$placeholder.html("").fullCalendar
googleCalendarApiKey: "<KEY>"
events:
googleCalendarId: calId
firstDay: 1
timeFormat: 'h(:mm)a'
eventRender: (event, element) ->
if event.location && event.location.length
element.tooltip(title: event.location, container: "body")
return
eventClick: (event) ->
if event.description && event.description.indexOf("http") == 0
window.open(event.description)
return false
if event.location && event.location.length
window.open("https://www.google.co.uk/maps/preview?q="+encodeURIComponent(event.location))
return false
class Results
constructor: (@traceClass="trace", @collapsedClass="collapsed") ->
@$tables = $("#results table")
@$ths = @$tables.find("th")
clearTraces: =>
@$ths.removeClass(@traceClass)
trace: (e) =>
@clearTraces()
@traceRow(e.currentTarget)
@traceCol(e.currentTarget)
traceRow: (cell) ->
$(cell).closest("tr").find("th").addClass(@traceClass)
traceCol: (cell) ->
cellIndex = $(cell).closest("tr").find("th,td").index(cell)+1
$(cell).closest("table").find("th:nth-child("+cellIndex+")").addClass(@traceClass)
collapseMisc: ->
$("#misc-results").addClass(@collapsedClass)
$("#misc-results h1").click(@expandParent)
expandParent: (e) =>
$(e.currentTarget).closest("."+@collapsedClass).toggleClass(@collapsedClass)
init: ->
@collapseMisc()
return false unless @$tables.length
@$tables.mouseout(@clearTraces)
@$tables.find("td").hover(@trace)
$ ->
Calendar.init()
FluidVideos.init()
results = new Results()
results.init()
Lightbox.init() if Viewport.getWidth() == "wide"
| true | class Viewport
@getWidth: ->
size = window
.getComputedStyle(document.body,':after')
.getPropertyValue('content')
size.replace(/\"/g, '')
class Lightbox
@init: ->
$imageLinks = $("a[href$='.jpg'],a[href$='.png'],a[href$='.jpeg']")
.filter ->
$link = $(this)
$images = $link.children('img[src="'+$link.attr("href")+'"]')
if $images.length == 1
$link.attr("title", $images.attr("title"))
else
false
$imageLinks.fancybox
padding: 0
helpers:
title:
type: 'over'
thumbs: $imageLinks.length > 1
class FluidVideos
@init: (container=null, $videos=null) ->
$videos ||= $("iframe[src*='vimeo.com'], iframe[src*='youtube.com']")
$videos.each ->
$(this)
.data('aspectRatio', this.height/this.width)
.removeAttr('height')
.removeAttr('width')
$(window).resize ->
$videos.each ->
$elm = $(this)
$parent = if container then $(container) else $elm.parent()
newWidth = $parent.width()
$elm
.width(newWidth)
.height(newWidth * $elm.data('aspectRatio'))
$(window).resize()
class Calendar
@init: ->
$placeholder = $('#fullcalendar')
calId = $placeholder.data("calendar-id")
$placeholder.html("").fullCalendar
googleCalendarApiKey: "PI:KEY:<KEY>END_PI"
events:
googleCalendarId: calId
firstDay: 1
timeFormat: 'h(:mm)a'
eventRender: (event, element) ->
if event.location && event.location.length
element.tooltip(title: event.location, container: "body")
return
eventClick: (event) ->
if event.description && event.description.indexOf("http") == 0
window.open(event.description)
return false
if event.location && event.location.length
window.open("https://www.google.co.uk/maps/preview?q="+encodeURIComponent(event.location))
return false
class Results
constructor: (@traceClass="trace", @collapsedClass="collapsed") ->
@$tables = $("#results table")
@$ths = @$tables.find("th")
clearTraces: =>
@$ths.removeClass(@traceClass)
trace: (e) =>
@clearTraces()
@traceRow(e.currentTarget)
@traceCol(e.currentTarget)
traceRow: (cell) ->
$(cell).closest("tr").find("th").addClass(@traceClass)
traceCol: (cell) ->
cellIndex = $(cell).closest("tr").find("th,td").index(cell)+1
$(cell).closest("table").find("th:nth-child("+cellIndex+")").addClass(@traceClass)
collapseMisc: ->
$("#misc-results").addClass(@collapsedClass)
$("#misc-results h1").click(@expandParent)
expandParent: (e) =>
$(e.currentTarget).closest("."+@collapsedClass).toggleClass(@collapsedClass)
init: ->
@collapseMisc()
return false unless @$tables.length
@$tables.mouseout(@clearTraces)
@$tables.find("td").hover(@trace)
$ ->
Calendar.init()
FluidVideos.init()
results = new Results()
results.init()
Lightbox.init() if Viewport.getWidth() == "wide"
|
[
{
"context": "./src/team'\n client = new Client accessToken: 'dummy'\n clock = sinon.useFakeTimers currentTime\n ",
"end": 448,
"score": 0.9812232851982117,
"start": 443,
"tag": "KEY",
"value": "dummy"
},
{
"context": " team = new Team client, 'oneteam',\n name: 'Onete... | test/team_test.coffee | oneteam-dev/node-oneteam-client | 1 | {EventEmitter} = require 'events'
{expect} = require 'chai'
nock = require 'nock'
sinon = require 'sinon'
describe 'team', ->
client = null
clock = null
nockScope = null
currentTime = 1455008759942
Team = null
Client = null
team = null
beforeEach ->
process.env.ONETEAM_BASE_API_URL = 'https://api.one-team.test'
Client = require '../src/client'
Team = require '../src/team'
client = new Client accessToken: 'dummy'
clock = sinon.useFakeTimers currentTime
nockScope = nock 'https://api.one-team.test'
nock.disableNetConnect()
team = new Team client, 'oneteam',
name: 'Oneteam Inc.'
timezone_offset: 9
profile_photo: { url: 'hoge' }
locale: 'ja'
afterEach ->
nock.cleanAll()
describe 'constructor', ->
it 'retains options in argument', ->
expect(team.name).to.equal 'Oneteam Inc.'
expect(team.teamName).to.equal 'oneteam'
expect(team.timezoneOffset).to.equal 9
expect(team.profilePhoto).to.deep.equal { url: 'hoge' }
expect(team.locale).to.equal 'ja'
describe 'createTopic', ->
beforeEach ->
nockScope
.post '/teams/oneteam/topics'
.reply 201,
title: 'qwerty'
key: 'asdfe1234'
body: 'YO'
html_body: '<p>YO</p>'
it 'sends request', (done) ->
team.createTopic { title: 'foo', body: 'hello' }, (err, res, topic) ->
return done err if err
expect(topic.title).to.equal 'qwerty'
do done
| 15326 | {EventEmitter} = require 'events'
{expect} = require 'chai'
nock = require 'nock'
sinon = require 'sinon'
describe 'team', ->
client = null
clock = null
nockScope = null
currentTime = 1455008759942
Team = null
Client = null
team = null
beforeEach ->
process.env.ONETEAM_BASE_API_URL = 'https://api.one-team.test'
Client = require '../src/client'
Team = require '../src/team'
client = new Client accessToken: '<KEY>'
clock = sinon.useFakeTimers currentTime
nockScope = nock 'https://api.one-team.test'
nock.disableNetConnect()
team = new Team client, 'oneteam',
name: '<NAME>.'
timezone_offset: 9
profile_photo: { url: 'hoge' }
locale: 'ja'
afterEach ->
nock.cleanAll()
describe 'constructor', ->
it 'retains options in argument', ->
expect(team.name).to.equal 'Oneteam Inc.'
expect(team.teamName).to.equal 'oneteam'
expect(team.timezoneOffset).to.equal 9
expect(team.profilePhoto).to.deep.equal { url: 'hoge' }
expect(team.locale).to.equal 'ja'
describe 'createTopic', ->
beforeEach ->
nockScope
.post '/teams/oneteam/topics'
.reply 201,
title: 'qwerty'
key: '<KEY>'
body: 'YO'
html_body: '<p>YO</p>'
it 'sends request', (done) ->
team.createTopic { title: 'foo', body: 'hello' }, (err, res, topic) ->
return done err if err
expect(topic.title).to.equal 'qwerty'
do done
| true | {EventEmitter} = require 'events'
{expect} = require 'chai'
nock = require 'nock'
sinon = require 'sinon'
describe 'team', ->
client = null
clock = null
nockScope = null
currentTime = 1455008759942
Team = null
Client = null
team = null
beforeEach ->
process.env.ONETEAM_BASE_API_URL = 'https://api.one-team.test'
Client = require '../src/client'
Team = require '../src/team'
client = new Client accessToken: 'PI:KEY:<KEY>END_PI'
clock = sinon.useFakeTimers currentTime
nockScope = nock 'https://api.one-team.test'
nock.disableNetConnect()
team = new Team client, 'oneteam',
name: 'PI:NAME:<NAME>END_PI.'
timezone_offset: 9
profile_photo: { url: 'hoge' }
locale: 'ja'
afterEach ->
nock.cleanAll()
describe 'constructor', ->
it 'retains options in argument', ->
expect(team.name).to.equal 'Oneteam Inc.'
expect(team.teamName).to.equal 'oneteam'
expect(team.timezoneOffset).to.equal 9
expect(team.profilePhoto).to.deep.equal { url: 'hoge' }
expect(team.locale).to.equal 'ja'
describe 'createTopic', ->
beforeEach ->
nockScope
.post '/teams/oneteam/topics'
.reply 201,
title: 'qwerty'
key: 'PI:KEY:<KEY>END_PI'
body: 'YO'
html_body: '<p>YO</p>'
it 'sends request', (done) ->
team.createTopic { title: 'foo', body: 'hello' }, (err, res, topic) ->
return done err if err
expect(topic.title).to.equal 'qwerty'
do done
|
[
{
"context": "[\n {\n key: \"username\"\n }\n {\n key: \"password\"\n type: \"password\"",
"end": 24,
"score": 0.9981058239936829,
"start": 16,
"tag": "USERNAME",
"value": "username"
},
{
"context": "[\n {\n key: \"username\"\n }\n {\n key: \"password\"\n type... | schemas/api-authentication-form-schema.cson | octoblu/endo-sendgrid | 0 | [
{
key: "username"
}
{
key: "password"
type: "password"
}
]
| 17638 | [
{
key: "username"
}
{
key: "<KEY>"
type: "password"
}
]
| true | [
{
key: "username"
}
{
key: "PI:KEY:<KEY>END_PI"
type: "password"
}
]
|
[
{
"context": "freely available on \n# [github](http://github.com/matehat/brewer.js).*\n#\n# This is the module obtained when",
"end": 140,
"score": 0.9992620944976807,
"start": 133,
"tag": "USERNAME",
"value": "matehat"
},
{
"context": "file\n\n##### MIT License\n#\n# Copyright (... | src/index.coffee | matehat/brewer.js | 4 | ##### Brewer.js, your asset management friend
#
# *Version 0.2.2 - Source code is freely available on
# [github](http://github.com/matehat/brewer.js).*
#
# This is the module obtained when importing `brewer`.
# It exports the main classes from [project](project.html), [package](package.html),
# [source](source.html) and [file](file.html).
{@Package} = require './package'
{@Project} = require './project'
{@Source} = require './source'
{@File} = require './file'
@watchers = {
count: 0
incr: -> @count++
decr: -> @count--
}
# It also parses the _extensions/_ directory to find modules that extends **brewer.js**
# functionality.
for file in (require 'fs').readdirSync((require 'path').resolve(__dirname, './extensions'))
if file[0] != '.'
require './extensions/' + file
##### MIT License
#
# Copyright (c) 2012 Mathieu D'Amours
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to
# whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE. | 147919 | ##### Brewer.js, your asset management friend
#
# *Version 0.2.2 - Source code is freely available on
# [github](http://github.com/matehat/brewer.js).*
#
# This is the module obtained when importing `brewer`.
# It exports the main classes from [project](project.html), [package](package.html),
# [source](source.html) and [file](file.html).
{@Package} = require './package'
{@Project} = require './project'
{@Source} = require './source'
{@File} = require './file'
@watchers = {
count: 0
incr: -> @count++
decr: -> @count--
}
# It also parses the _extensions/_ directory to find modules that extends **brewer.js**
# functionality.
for file in (require 'fs').readdirSync((require 'path').resolve(__dirname, './extensions'))
if file[0] != '.'
require './extensions/' + file
##### MIT License
#
# Copyright (c) 2012 <NAME>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to
# whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE. | true | ##### Brewer.js, your asset management friend
#
# *Version 0.2.2 - Source code is freely available on
# [github](http://github.com/matehat/brewer.js).*
#
# This is the module obtained when importing `brewer`.
# It exports the main classes from [project](project.html), [package](package.html),
# [source](source.html) and [file](file.html).
{@Package} = require './package'
{@Project} = require './project'
{@Source} = require './source'
{@File} = require './file'
@watchers = {
count: 0
incr: -> @count++
decr: -> @count--
}
# It also parses the _extensions/_ directory to find modules that extends **brewer.js**
# functionality.
for file in (require 'fs').readdirSync((require 'path').resolve(__dirname, './extensions'))
if file[0] != '.'
require './extensions/' + file
##### MIT License
#
# Copyright (c) 2012 PI:NAME:<NAME>END_PI
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to
# whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE. |
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999104738235474,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/beatmapset-download-observer.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapsetDownloadObserver
targetSelector: '.support-osu-popup'
container: '#popup-container'
wrapperClass: 'empty-popup'
constructor: ->
$(document).on 'click mouseup', '.js-beatmapset-download-link', @quotaCheck
quotaCheck: (e) =>
return if currentUser?.is_supporter
return if (e.type == 'mouseup' && e.which != 2) # we only use mouseup to catch middle-click
$.ajax laroute.route('download-quota-check')
.done (json) =>
downloaded = json.quota_used
# after 20 downloads and every multiple of 50 thereafter, maybe move this to a config var later?
if (downloaded == 20 || (downloaded > 0 && downloaded % 50 == 0))
@loadAndShowPopup
loadAndShowPopup: =>
if $(@targetSelector).length == 0
$.get laroute.route('support-osu-popup'), (data) =>
@createPopup data
@showPopup()
else
@showPopup()
createPopup: (content) =>
return if content is undefined
$popup = $(".#{@wrapperClass}--clone").clone()
$popup.removeClass "#{@wrapperClass}--clone"
$popup.find('.popup-content').html content
$popup.find('.support-osu-popup__close-button').on 'click', (e) ->
e.preventDefault()
$popup.fadeOut()
Blackout.hide()
$popup.appendTo($(@container))
showPopup: =>
document.activeElement.blur?()
Blackout.show()
$(@targetSelector).parents(".#{@wrapperClass}").fadeIn()
| 74669 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapsetDownloadObserver
targetSelector: '.support-osu-popup'
container: '#popup-container'
wrapperClass: 'empty-popup'
constructor: ->
$(document).on 'click mouseup', '.js-beatmapset-download-link', @quotaCheck
quotaCheck: (e) =>
return if currentUser?.is_supporter
return if (e.type == 'mouseup' && e.which != 2) # we only use mouseup to catch middle-click
$.ajax laroute.route('download-quota-check')
.done (json) =>
downloaded = json.quota_used
# after 20 downloads and every multiple of 50 thereafter, maybe move this to a config var later?
if (downloaded == 20 || (downloaded > 0 && downloaded % 50 == 0))
@loadAndShowPopup
loadAndShowPopup: =>
if $(@targetSelector).length == 0
$.get laroute.route('support-osu-popup'), (data) =>
@createPopup data
@showPopup()
else
@showPopup()
createPopup: (content) =>
return if content is undefined
$popup = $(".#{@wrapperClass}--clone").clone()
$popup.removeClass "#{@wrapperClass}--clone"
$popup.find('.popup-content').html content
$popup.find('.support-osu-popup__close-button').on 'click', (e) ->
e.preventDefault()
$popup.fadeOut()
Blackout.hide()
$popup.appendTo($(@container))
showPopup: =>
document.activeElement.blur?()
Blackout.show()
$(@targetSelector).parents(".#{@wrapperClass}").fadeIn()
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapsetDownloadObserver
targetSelector: '.support-osu-popup'
container: '#popup-container'
wrapperClass: 'empty-popup'
constructor: ->
$(document).on 'click mouseup', '.js-beatmapset-download-link', @quotaCheck
quotaCheck: (e) =>
return if currentUser?.is_supporter
return if (e.type == 'mouseup' && e.which != 2) # we only use mouseup to catch middle-click
$.ajax laroute.route('download-quota-check')
.done (json) =>
downloaded = json.quota_used
# after 20 downloads and every multiple of 50 thereafter, maybe move this to a config var later?
if (downloaded == 20 || (downloaded > 0 && downloaded % 50 == 0))
@loadAndShowPopup
loadAndShowPopup: =>
if $(@targetSelector).length == 0
$.get laroute.route('support-osu-popup'), (data) =>
@createPopup data
@showPopup()
else
@showPopup()
createPopup: (content) =>
return if content is undefined
$popup = $(".#{@wrapperClass}--clone").clone()
$popup.removeClass "#{@wrapperClass}--clone"
$popup.find('.popup-content').html content
$popup.find('.support-osu-popup__close-button').on 'click', (e) ->
e.preventDefault()
$popup.fadeOut()
Blackout.hide()
$popup.appendTo($(@container))
showPopup: =>
document.activeElement.blur?()
Blackout.show()
$(@targetSelector).parents(".#{@wrapperClass}").fadeIn()
|
[
{
"context": "'form'), @gplusAttrs)\n for field in ['email', 'firstName', 'lastName']\n input = @$(\"input[name='#{fie",
"end": 12883,
"score": 0.7938928604125977,
"start": 12874,
"tag": "NAME",
"value": "firstName"
},
{
"context": "usAttrs)\n for field in ['email', 'first... | app/views/teachers/CreateTeacherAccountView.coffee | jongio/codecombat | 0 | RootView = require 'views/core/RootView'
forms = require 'core/forms'
TrialRequest = require 'models/TrialRequest'
TrialRequests = require 'collections/TrialRequests'
AuthModal = require 'views/core/AuthModal'
errors = require 'core/errors'
User = require 'models/User'
algolia = require 'core/services/algolia'
State = require 'models/State'
loadSegment = require('core/services/segment')
SIGNUP_REDIRECT = '/teachers/classes'
DISTRICT_NCES_KEYS = ['district', 'district_id', 'district_schools', 'district_students', 'phone']
SCHOOL_NCES_KEYS = DISTRICT_NCES_KEYS.concat(['id', 'name', 'students'])
module.exports = class CreateTeacherAccountView extends RootView
id: 'create-teacher-account-view'
template: require 'templates/teachers/create-teacher-account-view'
events:
'click .login-link': 'onClickLoginLink'
'change form#signup-form': 'onChangeForm'
'submit form#signup-form': 'onSubmitForm'
'click #gplus-signup-btn': 'onClickGPlusSignupButton'
'click #facebook-signup-btn': 'onClickFacebookSignupButton'
'change input[name="city"]': 'invalidateNCES'
'change input[name="state"]': 'invalidateNCES'
'change input[name="district"]': 'invalidateNCES'
'change input[name="country"]': 'invalidateNCES'
'change input[name="email"]': 'onChangeEmail'
'change input[name="name"]': 'onChangeName'
initialize: ->
@trialRequest = new TrialRequest()
@trialRequests = new TrialRequests()
@trialRequests.fetchOwn()
@supermodel.trackCollection(@trialRequests)
window.tracker?.trackEvent 'Teachers Create Account Loaded', category: 'Teachers', ['Mixpanel']
@state = new State {
suggestedNameText: '...'
checkEmailState: 'standby' # 'checking', 'exists', 'available'
checkEmailValue: null
checkEmailPromise: null
checkNameState: 'standby' # same
checkNameValue: null
checkNamePromise: null
authModalInitialValues: {}
}
@listenTo @state, 'change:checkEmailState', -> @renderSelectors('.email-check')
@listenTo @state, 'change:checkNameState', -> @renderSelectors('.name-check')
@listenTo @state, 'change:error', -> @renderSelectors('.error-area')
loadSegment() unless @segmentLoaded
onLeaveMessage: ->
if @formChanged
return 'Your account has not been created! If you continue, your changes will be lost.'
onLoaded: ->
if @trialRequests.size()
@trialRequest = @trialRequests.first()
@state.set({
authModalInitialValues: {
email: @trialRequest?.get('properties')?.email
}
})
super()
invalidateNCES: ->
for key in SCHOOL_NCES_KEYS
@$('input[name="nces_' + key + '"]').val ''
afterRender: ->
super()
# apply existing trial request on form
properties = @trialRequest.get('properties')
if properties
forms.objectToForm(@$('form'), properties)
commonLevels = _.map @$('[name="educationLevel"]'), (el) -> $(el).val()
submittedLevels = properties.educationLevel or []
otherLevel = _.first(_.difference(submittedLevels, commonLevels)) or ''
@$('#other-education-level-checkbox').attr('checked', !!otherLevel)
@$('#other-education-level-input').val(otherLevel)
$("#organization-control").algolia_autocomplete({hint: false}, [
source: (query, callback) ->
algolia.schoolsIndex.search(query, { hitsPerPage: 5, aroundLatLngViaIP: false }).then (answer) ->
callback answer.hits
, ->
callback []
displayKey: 'name',
templates:
suggestion: (suggestion) ->
hr = suggestion._highlightResult
"<div class='school'> #{hr.name.value} </div>" +
"<div class='district'>#{hr.district.value}, " +
"<span>#{hr.city?.value}, #{hr.state.value}</span></div>"
]).on 'autocomplete:selected', (event, suggestion, dataset) =>
# Tell Algolioa about the change but don't open the suggestion dropdown
@$('input[name="district"]').val(suggestion.district).trigger('input').trigger('blur')
@$('input[name="city"]').val suggestion.city
@$('input[name="state"]').val suggestion.state
@$('input[name="country"]').val 'USA'
for key in SCHOOL_NCES_KEYS
@$('input[name="nces_' + key + '"]').val suggestion[key]
@onChangeForm()
$("#district-control").algolia_autocomplete({hint: false}, [
source: (query, callback) ->
algolia.schoolsIndex.search(query, { hitsPerPage: 5, aroundLatLngViaIP: false }).then (answer) ->
callback answer.hits
, ->
callback []
displayKey: 'district',
templates:
suggestion: (suggestion) ->
hr = suggestion._highlightResult
"<div class='district'>#{hr.district.value}, " +
"<span>#{hr.city?.value}, #{hr.state.value}</span></div>"
]).on 'autocomplete:selected', (event, suggestion, dataset) =>
@$('input[name="organization"]').val('').trigger('input').trigger('blur')
@$('input[name="city"]').val suggestion.city
@$('input[name="state"]').val suggestion.state
@$('input[name="country"]').val 'USA'
for key in DISTRICT_NCES_KEYS
@$('input[name="nces_' + key + '"]').val suggestion[key]
@onChangeForm()
onClickLoginLink: ->
@openModalView(new AuthModal({ initialValues: @state.get('authModalInitialValues') }))
onChangeForm: ->
unless @formChanged
window.tracker?.trackEvent 'Teachers Create Account Form Started', category: 'Teachers', ['Mixpanel']
@formChanged = true
onSubmitForm: (e) ->
e.preventDefault()
# Creating Trial Request first, validate user attributes but do not use them
form = @$('form')
allAttrs = forms.formToObject(form)
trialRequestAttrs = _.omit(allAttrs, 'name', 'password1', 'password2')
# Don't save n/a district entries, but do validate required district client-side
trialRequestAttrs = _.omit(trialRequestAttrs, 'district') if trialRequestAttrs.district?.replace(/\s/ig, '').match(/n\/a/ig)
if @$('#other-education-level-checkbox').is(':checked')
val = @$('#other-education-level-input').val()
trialRequestAttrs.educationLevel.push(val) if val
forms.clearFormAlerts(form)
tv4.addFormat({
'phoneNumber': (phoneNumber) ->
if forms.validatePhoneNumber(phoneNumber)
return null
else
return {code: tv4.errorCodes.FORMAT_CUSTOM, message: 'Please enter a valid phone number, including area code.'}
})
result = tv4.validateMultiple(trialRequestAttrs, formSchema)
error = false
if not result.valid
forms.applyErrorsToForm(form, result.errors)
error = true
if not error and not forms.validateEmail(trialRequestAttrs.email)
forms.setErrorToProperty(form, 'email', 'invalid email')
error = true
if not error and forms.validateEmail(allAttrs.name)
forms.setErrorToProperty(form, 'name', 'username may not be an email')
error = true
if not _.size(trialRequestAttrs.educationLevel)
forms.setErrorToProperty(form, 'educationLevel', 'include at least one')
error = true
if not allAttrs.name
forms.setErrorToProperty(form, 'name', $.i18n.t('common.required_field'))
error = true
unless allAttrs.district
forms.setErrorToProperty(form, 'district', $.i18n.t('common.required_field'))
error = true
unless @gplusAttrs or @facebookAttrs
if not allAttrs.password1
forms.setErrorToProperty(form, 'password1', $.i18n.t('common.required_field'))
error = true
else if not allAttrs.password2
forms.setErrorToProperty(form, 'password2', $.i18n.t('common.required_field'))
error = true
else if allAttrs.password1 isnt allAttrs.password2
forms.setErrorToProperty(form, 'password1', 'Password fields are not equivalent')
error = true
if error
forms.scrollToFirstError()
return
trialRequestAttrs['siteOrigin'] = 'create teacher'
@trialRequest = new TrialRequest({
type: 'course'
properties: trialRequestAttrs
})
@trialRequest.notyErrors = false
@$('#create-account-btn').text('Sending').attr('disabled', true)
@trialRequest.save()
@trialRequest.on 'sync', @onTrialRequestSubmit, @
@trialRequest.on 'error', @onTrialRequestError, @
onTrialRequestError: (model, jqxhr) ->
@$('#create-account-btn').text('Submit').attr('disabled', false)
if jqxhr.status is 409
userExists = $.i18n.t('teachers_quote.email_exists')
logIn = $.i18n.t('login.log_in')
@$('#email-form-group')
.addClass('has-error')
.append($("<div class='help-block error-help-block'>#{userExists} <a class='login-link'>#{logIn}</a>"))
forms.scrollToFirstError()
else
errors.showNotyNetworkError(arguments...)
onTrialRequestSubmit: ->
window.tracker?.trackEvent 'Teachers Create Account Submitted', category: 'Teachers', ['Mixpanel']
@formChanged = false
Promise.resolve()
.then =>
attrs = _.pick(forms.formToObject(@$('form')), 'role', 'firstName', 'lastName')
attrs.role = attrs.role.toLowerCase()
me.set(attrs)
me.set(_.omit(@gplusAttrs, 'gplusID', 'email')) if @gplusAttrs
me.set(_.omit(@facebookAttrs, 'facebookID', 'email')) if @facebookAttrs
jqxhr = me.save()
if not jqxhr
throw new Error('Could not save user')
@trigger 'update-settings'
return jqxhr
.then =>
{ name, email } = forms.formToObject(@$('form'))
if @gplusAttrs
{ email, gplusID } = @gplusAttrs
{ name } = forms.formToObject(@$el)
jqxhr = me.signupWithGPlus(name, email, @gplusAttrs.gplusID)
else if @facebookAttrs
{ email, facebookID } = @facebookAttrs
{ name } = forms.formToObject(@$el)
jqxhr = me.signupWithFacebook(name, email, facebookID)
else
{ name, email, password1 } = forms.formToObject(@$el)
jqxhr = me.signupWithPassword(name, email, password1)
@trigger 'signup'
return jqxhr
.then =>
trialRequestIntercomData = _.pick @trialRequest.attributes.properties, ["siteOrigin", "marketingReferrer", "referrer", "notes", "numStudentsTotal", "numStudents", "purchaserRole", "role", "phoneNumber", "country", "state", "city", "district", "organization", "nces_students", "nces_name", "nces_id", "nces_phone", "nces_district_students", "nces_district_schools", "nces_district_id", "nces_district"]
trialRequestIntercomData.educationLevel_elementary = _.contains @trialRequest.attributes.properties.educationLevel, "Elementary"
trialRequestIntercomData.educationLevel_middle = _.contains @trialRequest.attributes.properties.educationLevel, "Middle"
trialRequestIntercomData.educationLevel_high = _.contains @trialRequest.attributes.properties.educationLevel, "High"
trialRequestIntercomData.educationLevel_college = _.contains @trialRequest.attributes.properties.educationLevel, "College+"
application.tracker.updateTrialRequestData trialRequestIntercomData
.then =>
application.router.navigate(SIGNUP_REDIRECT, { trigger: true })
application.router.reload()
.catch (e) =>
if e instanceof Error
noty {
text: e.message
layout: 'topCenter'
type: 'error'
timeout: 5000
killer: false,
dismissQueue: true
}
else
errors.showNotyNetworkError(arguments...)
@$('#create-account-btn').text('Submit').attr('disabled', false)
# GPlus signup
onClickGPlusSignupButton: ->
btn = @$('#gplus-signup-btn')
btn.attr('disabled', true)
application.gplusHandler.loadAPI({
success: =>
btn.attr('disabled', false)
application.gplusHandler.connect({
success: =>
btn.find('.sign-in-blurb').text($.i18n.t('signup.creating'))
btn.attr('disabled', true)
application.gplusHandler.loadPerson({
success: (@gplusAttrs) =>
existingUser = new User()
existingUser.fetchGPlusUser(@gplusAttrs.gplusID, {
error: (user, jqxhr) =>
if jqxhr.status is 404
@onGPlusConnected()
else
errors.showNotyNetworkError(jqxhr)
success: =>
me.loginGPlusUser(@gplusAttrs.gplusID, {
success: ->
application.router.navigate('/teachers/update-account', {trigger: true})
error: errors.showNotyNetworkError
})
})
})
})
})
onGPlusConnected: ->
@formChanged = true
forms.objectToForm(@$('form'), @gplusAttrs)
for field in ['email', 'firstName', 'lastName']
input = @$("input[name='#{field}']")
if input.val()
input.attr('disabled', true)
@$('input[type="password"]').attr('disabled', true)
@$('#gplus-logged-in-row, #social-network-signups').toggleClass('hide')
# Facebook signup
onClickFacebookSignupButton: ->
btn = @$('#facebook-signup-btn')
btn.attr('disabled', true)
application.facebookHandler.loadAPI({
success: =>
btn.attr('disabled', false)
application.facebookHandler.connect({
success: =>
btn.find('.sign-in-blurb').text($.i18n.t('signup.creating'))
btn.attr('disabled', true)
application.facebookHandler.loadPerson({
success: (@facebookAttrs) =>
existingUser = new User()
existingUser.fetchFacebookUser(@facebookAttrs.facebookID, {
error: (user, jqxhr) =>
if jqxhr.status is 404
@onFacebookConnected()
else
errors.showNotyNetworkError(jqxhr)
success: =>
me.loginFacebookUser(@facebookAttrs.facebookID, {
success: ->
application.router.navigate('/teachers/update-account', {trigger: true})
error: errors.showNotyNetworkError
})
})
})
})
})
onFacebookConnected: ->
@formChanged = true
forms.objectToForm(@$('form'), @facebookAttrs)
for field in ['email', 'firstName', 'lastName']
input = @$("input[name='#{field}']")
if input.val()
input.attr('disabled', true)
@$('input[type="password"]').attr('disabled', true)
@$('#facebook-logged-in-row, #social-network-signups').toggleClass('hide')
updateAuthModalInitialValues: (values) ->
@state.set {
authModalInitialValues: _.merge @state.get('authModalInitialValues'), values
}, { silent: true }
onChangeName: (e) ->
@updateAuthModalInitialValues { name: @$(e.currentTarget).val() }
@checkName()
checkName: ->
name = @$('input[name="name"]').val()
if name is @state.get('checkNameValue')
return @state.get('checkNamePromise')
if not name
@state.set({
checkNameState: 'standby'
checkNameValue: name
checkNamePromise: null
})
return Promise.resolve()
@state.set({
checkNameState: 'checking'
checkNameValue: name
checkNamePromise: (User.checkNameConflicts(name)
.then ({ suggestedName, conflicts }) =>
return unless name is @$('input[name="name"]').val()
if conflicts
suggestedNameText = $.i18n.t('signup.name_taken').replace('{{suggestedName}}', suggestedName)
@state.set({ checkNameState: 'exists', suggestedNameText })
else
@state.set { checkNameState: 'available' }
.catch (error) =>
@state.set('checkNameState', 'standby')
throw error
)
})
return @state.get('checkNamePromise')
onChangeEmail: (e) ->
@updateAuthModalInitialValues { email: @$(e.currentTarget).val() }
@checkEmail()
checkEmail: ->
email = @$('[name="email"]').val()
if not _.isEmpty(email) and email is @state.get('checkEmailValue')
return @state.get('checkEmailPromise')
if not (email and forms.validateEmail(email))
@state.set({
checkEmailState: 'standby'
checkEmailValue: email
checkEmailPromise: null
})
return Promise.resolve()
@state.set({
checkEmailState: 'checking'
checkEmailValue: email
checkEmailPromise: (User.checkEmailExists(email)
.then ({exists}) =>
return unless email is @$('[name="email"]').val()
if exists
@state.set('checkEmailState', 'exists')
else
@state.set('checkEmailState', 'available')
.catch (e) =>
@state.set('checkEmailState', 'standby')
throw e
)
})
return @state.get('checkEmailPromise')
formSchema = {
type: 'object'
required: ['firstName', 'lastName', 'email', 'role', 'numStudents', 'numStudentsTotal', 'city', 'state', 'country', 'phoneNumber']
properties:
password1: { type: 'string' }
password2: { type: 'string' }
firstName: { type: 'string' }
lastName: { type: 'string' }
name: { type: 'string', minLength: 1 }
email: { type: 'string', format: 'email' }
phoneNumber: { type: 'string', format: 'phoneNumber' }
role: { type: 'string' }
organization: { type: 'string' }
district: { type: 'string' }
city: { type: 'string' }
state: { type: 'string' }
country: { type: 'string' }
numStudents: { type: 'string' }
numStudentsTotal: { type: 'string' }
educationLevel: {
type: 'array'
items: { type: 'string' }
}
notes: { type: 'string' }
}
for key in SCHOOL_NCES_KEYS
formSchema['nces_' + key] = type: 'string'
| 3468 | RootView = require 'views/core/RootView'
forms = require 'core/forms'
TrialRequest = require 'models/TrialRequest'
TrialRequests = require 'collections/TrialRequests'
AuthModal = require 'views/core/AuthModal'
errors = require 'core/errors'
User = require 'models/User'
algolia = require 'core/services/algolia'
State = require 'models/State'
loadSegment = require('core/services/segment')
SIGNUP_REDIRECT = '/teachers/classes'
DISTRICT_NCES_KEYS = ['district', 'district_id', 'district_schools', 'district_students', 'phone']
SCHOOL_NCES_KEYS = DISTRICT_NCES_KEYS.concat(['id', 'name', 'students'])
module.exports = class CreateTeacherAccountView extends RootView
id: 'create-teacher-account-view'
template: require 'templates/teachers/create-teacher-account-view'
events:
'click .login-link': 'onClickLoginLink'
'change form#signup-form': 'onChangeForm'
'submit form#signup-form': 'onSubmitForm'
'click #gplus-signup-btn': 'onClickGPlusSignupButton'
'click #facebook-signup-btn': 'onClickFacebookSignupButton'
'change input[name="city"]': 'invalidateNCES'
'change input[name="state"]': 'invalidateNCES'
'change input[name="district"]': 'invalidateNCES'
'change input[name="country"]': 'invalidateNCES'
'change input[name="email"]': 'onChangeEmail'
'change input[name="name"]': 'onChangeName'
initialize: ->
@trialRequest = new TrialRequest()
@trialRequests = new TrialRequests()
@trialRequests.fetchOwn()
@supermodel.trackCollection(@trialRequests)
window.tracker?.trackEvent 'Teachers Create Account Loaded', category: 'Teachers', ['Mixpanel']
@state = new State {
suggestedNameText: '...'
checkEmailState: 'standby' # 'checking', 'exists', 'available'
checkEmailValue: null
checkEmailPromise: null
checkNameState: 'standby' # same
checkNameValue: null
checkNamePromise: null
authModalInitialValues: {}
}
@listenTo @state, 'change:checkEmailState', -> @renderSelectors('.email-check')
@listenTo @state, 'change:checkNameState', -> @renderSelectors('.name-check')
@listenTo @state, 'change:error', -> @renderSelectors('.error-area')
loadSegment() unless @segmentLoaded
onLeaveMessage: ->
if @formChanged
return 'Your account has not been created! If you continue, your changes will be lost.'
onLoaded: ->
if @trialRequests.size()
@trialRequest = @trialRequests.first()
@state.set({
authModalInitialValues: {
email: @trialRequest?.get('properties')?.email
}
})
super()
invalidateNCES: ->
for key in SCHOOL_NCES_KEYS
@$('input[name="nces_' + key + '"]').val ''
afterRender: ->
super()
# apply existing trial request on form
properties = @trialRequest.get('properties')
if properties
forms.objectToForm(@$('form'), properties)
commonLevels = _.map @$('[name="educationLevel"]'), (el) -> $(el).val()
submittedLevels = properties.educationLevel or []
otherLevel = _.first(_.difference(submittedLevels, commonLevels)) or ''
@$('#other-education-level-checkbox').attr('checked', !!otherLevel)
@$('#other-education-level-input').val(otherLevel)
$("#organization-control").algolia_autocomplete({hint: false}, [
source: (query, callback) ->
algolia.schoolsIndex.search(query, { hitsPerPage: 5, aroundLatLngViaIP: false }).then (answer) ->
callback answer.hits
, ->
callback []
displayKey: 'name',
templates:
suggestion: (suggestion) ->
hr = suggestion._highlightResult
"<div class='school'> #{hr.name.value} </div>" +
"<div class='district'>#{hr.district.value}, " +
"<span>#{hr.city?.value}, #{hr.state.value}</span></div>"
]).on 'autocomplete:selected', (event, suggestion, dataset) =>
# Tell Algolioa about the change but don't open the suggestion dropdown
@$('input[name="district"]').val(suggestion.district).trigger('input').trigger('blur')
@$('input[name="city"]').val suggestion.city
@$('input[name="state"]').val suggestion.state
@$('input[name="country"]').val 'USA'
for key in SCHOOL_NCES_KEYS
@$('input[name="nces_' + key + '"]').val suggestion[key]
@onChangeForm()
$("#district-control").algolia_autocomplete({hint: false}, [
source: (query, callback) ->
algolia.schoolsIndex.search(query, { hitsPerPage: 5, aroundLatLngViaIP: false }).then (answer) ->
callback answer.hits
, ->
callback []
displayKey: 'district',
templates:
suggestion: (suggestion) ->
hr = suggestion._highlightResult
"<div class='district'>#{hr.district.value}, " +
"<span>#{hr.city?.value}, #{hr.state.value}</span></div>"
]).on 'autocomplete:selected', (event, suggestion, dataset) =>
@$('input[name="organization"]').val('').trigger('input').trigger('blur')
@$('input[name="city"]').val suggestion.city
@$('input[name="state"]').val suggestion.state
@$('input[name="country"]').val 'USA'
for key in DISTRICT_NCES_KEYS
@$('input[name="nces_' + key + '"]').val suggestion[key]
@onChangeForm()
onClickLoginLink: ->
@openModalView(new AuthModal({ initialValues: @state.get('authModalInitialValues') }))
onChangeForm: ->
unless @formChanged
window.tracker?.trackEvent 'Teachers Create Account Form Started', category: 'Teachers', ['Mixpanel']
@formChanged = true
onSubmitForm: (e) ->
e.preventDefault()
# Creating Trial Request first, validate user attributes but do not use them
form = @$('form')
allAttrs = forms.formToObject(form)
trialRequestAttrs = _.omit(allAttrs, 'name', 'password1', 'password2')
# Don't save n/a district entries, but do validate required district client-side
trialRequestAttrs = _.omit(trialRequestAttrs, 'district') if trialRequestAttrs.district?.replace(/\s/ig, '').match(/n\/a/ig)
if @$('#other-education-level-checkbox').is(':checked')
val = @$('#other-education-level-input').val()
trialRequestAttrs.educationLevel.push(val) if val
forms.clearFormAlerts(form)
tv4.addFormat({
'phoneNumber': (phoneNumber) ->
if forms.validatePhoneNumber(phoneNumber)
return null
else
return {code: tv4.errorCodes.FORMAT_CUSTOM, message: 'Please enter a valid phone number, including area code.'}
})
result = tv4.validateMultiple(trialRequestAttrs, formSchema)
error = false
if not result.valid
forms.applyErrorsToForm(form, result.errors)
error = true
if not error and not forms.validateEmail(trialRequestAttrs.email)
forms.setErrorToProperty(form, 'email', 'invalid email')
error = true
if not error and forms.validateEmail(allAttrs.name)
forms.setErrorToProperty(form, 'name', 'username may not be an email')
error = true
if not _.size(trialRequestAttrs.educationLevel)
forms.setErrorToProperty(form, 'educationLevel', 'include at least one')
error = true
if not allAttrs.name
forms.setErrorToProperty(form, 'name', $.i18n.t('common.required_field'))
error = true
unless allAttrs.district
forms.setErrorToProperty(form, 'district', $.i18n.t('common.required_field'))
error = true
unless @gplusAttrs or @facebookAttrs
if not allAttrs.password1
forms.setErrorToProperty(form, 'password1', $.i18n.t('common.required_field'))
error = true
else if not allAttrs.password2
forms.setErrorToProperty(form, 'password2', $.i18n.t('common.required_field'))
error = true
else if allAttrs.password1 isnt allAttrs.password2
forms.setErrorToProperty(form, 'password1', 'Password fields are not equivalent')
error = true
if error
forms.scrollToFirstError()
return
trialRequestAttrs['siteOrigin'] = 'create teacher'
@trialRequest = new TrialRequest({
type: 'course'
properties: trialRequestAttrs
})
@trialRequest.notyErrors = false
@$('#create-account-btn').text('Sending').attr('disabled', true)
@trialRequest.save()
@trialRequest.on 'sync', @onTrialRequestSubmit, @
@trialRequest.on 'error', @onTrialRequestError, @
onTrialRequestError: (model, jqxhr) ->
@$('#create-account-btn').text('Submit').attr('disabled', false)
if jqxhr.status is 409
userExists = $.i18n.t('teachers_quote.email_exists')
logIn = $.i18n.t('login.log_in')
@$('#email-form-group')
.addClass('has-error')
.append($("<div class='help-block error-help-block'>#{userExists} <a class='login-link'>#{logIn}</a>"))
forms.scrollToFirstError()
else
errors.showNotyNetworkError(arguments...)
onTrialRequestSubmit: ->
window.tracker?.trackEvent 'Teachers Create Account Submitted', category: 'Teachers', ['Mixpanel']
@formChanged = false
Promise.resolve()
.then =>
attrs = _.pick(forms.formToObject(@$('form')), 'role', 'firstName', 'lastName')
attrs.role = attrs.role.toLowerCase()
me.set(attrs)
me.set(_.omit(@gplusAttrs, 'gplusID', 'email')) if @gplusAttrs
me.set(_.omit(@facebookAttrs, 'facebookID', 'email')) if @facebookAttrs
jqxhr = me.save()
if not jqxhr
throw new Error('Could not save user')
@trigger 'update-settings'
return jqxhr
.then =>
{ name, email } = forms.formToObject(@$('form'))
if @gplusAttrs
{ email, gplusID } = @gplusAttrs
{ name } = forms.formToObject(@$el)
jqxhr = me.signupWithGPlus(name, email, @gplusAttrs.gplusID)
else if @facebookAttrs
{ email, facebookID } = @facebookAttrs
{ name } = forms.formToObject(@$el)
jqxhr = me.signupWithFacebook(name, email, facebookID)
else
{ name, email, password1 } = forms.formToObject(@$el)
jqxhr = me.signupWithPassword(name, email, password1)
@trigger 'signup'
return jqxhr
.then =>
trialRequestIntercomData = _.pick @trialRequest.attributes.properties, ["siteOrigin", "marketingReferrer", "referrer", "notes", "numStudentsTotal", "numStudents", "purchaserRole", "role", "phoneNumber", "country", "state", "city", "district", "organization", "nces_students", "nces_name", "nces_id", "nces_phone", "nces_district_students", "nces_district_schools", "nces_district_id", "nces_district"]
trialRequestIntercomData.educationLevel_elementary = _.contains @trialRequest.attributes.properties.educationLevel, "Elementary"
trialRequestIntercomData.educationLevel_middle = _.contains @trialRequest.attributes.properties.educationLevel, "Middle"
trialRequestIntercomData.educationLevel_high = _.contains @trialRequest.attributes.properties.educationLevel, "High"
trialRequestIntercomData.educationLevel_college = _.contains @trialRequest.attributes.properties.educationLevel, "College+"
application.tracker.updateTrialRequestData trialRequestIntercomData
.then =>
application.router.navigate(SIGNUP_REDIRECT, { trigger: true })
application.router.reload()
.catch (e) =>
if e instanceof Error
noty {
text: e.message
layout: 'topCenter'
type: 'error'
timeout: 5000
killer: false,
dismissQueue: true
}
else
errors.showNotyNetworkError(arguments...)
@$('#create-account-btn').text('Submit').attr('disabled', false)
# GPlus signup
onClickGPlusSignupButton: ->
btn = @$('#gplus-signup-btn')
btn.attr('disabled', true)
application.gplusHandler.loadAPI({
success: =>
btn.attr('disabled', false)
application.gplusHandler.connect({
success: =>
btn.find('.sign-in-blurb').text($.i18n.t('signup.creating'))
btn.attr('disabled', true)
application.gplusHandler.loadPerson({
success: (@gplusAttrs) =>
existingUser = new User()
existingUser.fetchGPlusUser(@gplusAttrs.gplusID, {
error: (user, jqxhr) =>
if jqxhr.status is 404
@onGPlusConnected()
else
errors.showNotyNetworkError(jqxhr)
success: =>
me.loginGPlusUser(@gplusAttrs.gplusID, {
success: ->
application.router.navigate('/teachers/update-account', {trigger: true})
error: errors.showNotyNetworkError
})
})
})
})
})
onGPlusConnected: ->
@formChanged = true
forms.objectToForm(@$('form'), @gplusAttrs)
for field in ['email', '<NAME>', '<NAME>']
input = @$("input[name='#{field}']")
if input.val()
input.attr('disabled', true)
@$('input[type="password"]').attr('disabled', true)
@$('#gplus-logged-in-row, #social-network-signups').toggleClass('hide')
# Facebook signup
onClickFacebookSignupButton: ->
btn = @$('#facebook-signup-btn')
btn.attr('disabled', true)
application.facebookHandler.loadAPI({
success: =>
btn.attr('disabled', false)
application.facebookHandler.connect({
success: =>
btn.find('.sign-in-blurb').text($.i18n.t('signup.creating'))
btn.attr('disabled', true)
application.facebookHandler.loadPerson({
success: (@facebookAttrs) =>
existingUser = new User()
existingUser.fetchFacebookUser(@facebookAttrs.facebookID, {
error: (user, jqxhr) =>
if jqxhr.status is 404
@onFacebookConnected()
else
errors.showNotyNetworkError(jqxhr)
success: =>
me.loginFacebookUser(@facebookAttrs.facebookID, {
success: ->
application.router.navigate('/teachers/update-account', {trigger: true})
error: errors.showNotyNetworkError
})
})
})
})
})
onFacebookConnected: ->
@formChanged = true
forms.objectToForm(@$('form'), @facebookAttrs)
for field in ['email', '<NAME>', '<NAME>']
input = @$("input[name='#{field}']")
if input.val()
input.attr('disabled', true)
@$('input[type="password"]').attr('disabled', true)
@$('#facebook-logged-in-row, #social-network-signups').toggleClass('hide')
updateAuthModalInitialValues: (values) ->
@state.set {
authModalInitialValues: _.merge @state.get('authModalInitialValues'), values
}, { silent: true }
onChangeName: (e) ->
@updateAuthModalInitialValues { name: @$(e.currentTarget).val() }
@checkName()
checkName: ->
name = @$('input[name="name"]').val()
if name is @state.get('checkNameValue')
return @state.get('checkNamePromise')
if not name
@state.set({
checkNameState: 'standby'
checkNameValue: name
checkNamePromise: null
})
return Promise.resolve()
@state.set({
checkNameState: 'checking'
checkNameValue: name
checkNamePromise: (User.checkNameConflicts(name)
.then ({ suggestedName, conflicts }) =>
return unless name is @$('input[name="name"]').val()
if conflicts
suggestedNameText = $.i18n.t('signup.name_taken').replace('{{suggestedName}}', suggestedName)
@state.set({ checkNameState: 'exists', suggestedNameText })
else
@state.set { checkNameState: 'available' }
.catch (error) =>
@state.set('checkNameState', 'standby')
throw error
)
})
return @state.get('checkNamePromise')
onChangeEmail: (e) ->
@updateAuthModalInitialValues { email: @$(e.currentTarget).val() }
@checkEmail()
checkEmail: ->
email = @$('[name="email"]').val()
if not _.isEmpty(email) and email is @state.get('checkEmailValue')
return @state.get('checkEmailPromise')
if not (email and forms.validateEmail(email))
@state.set({
checkEmailState: 'standby'
checkEmailValue: email
checkEmailPromise: null
})
return Promise.resolve()
@state.set({
checkEmailState: 'checking'
checkEmailValue: email
checkEmailPromise: (User.checkEmailExists(email)
.then ({exists}) =>
return unless email is @$('[name="email"]').val()
if exists
@state.set('checkEmailState', 'exists')
else
@state.set('checkEmailState', 'available')
.catch (e) =>
@state.set('checkEmailState', 'standby')
throw e
)
})
return @state.get('checkEmailPromise')
formSchema = {
type: 'object'
required: ['<NAME>', '<NAME>', 'email', 'role', 'numStudents', 'numStudentsTotal', 'city', 'state', 'country', 'phoneNumber']
properties:
password1: { type: 'string' }
password2: { type: 'string' }
firstName: { type: 'string' }
lastName: { type: 'string' }
name: { type: 'string', minLength: 1 }
email: { type: 'string', format: 'email' }
phoneNumber: { type: 'string', format: 'phoneNumber' }
role: { type: 'string' }
organization: { type: 'string' }
district: { type: 'string' }
city: { type: 'string' }
state: { type: 'string' }
country: { type: 'string' }
numStudents: { type: 'string' }
numStudentsTotal: { type: 'string' }
educationLevel: {
type: 'array'
items: { type: 'string' }
}
notes: { type: 'string' }
}
for key in SCHOOL_NCES_KEYS
formSchema['nces_' + key] = type: 'string'
| true | RootView = require 'views/core/RootView'
forms = require 'core/forms'
TrialRequest = require 'models/TrialRequest'
TrialRequests = require 'collections/TrialRequests'
AuthModal = require 'views/core/AuthModal'
errors = require 'core/errors'
User = require 'models/User'
algolia = require 'core/services/algolia'
State = require 'models/State'
loadSegment = require('core/services/segment')
SIGNUP_REDIRECT = '/teachers/classes'
DISTRICT_NCES_KEYS = ['district', 'district_id', 'district_schools', 'district_students', 'phone']
SCHOOL_NCES_KEYS = DISTRICT_NCES_KEYS.concat(['id', 'name', 'students'])
module.exports = class CreateTeacherAccountView extends RootView
id: 'create-teacher-account-view'
template: require 'templates/teachers/create-teacher-account-view'
events:
'click .login-link': 'onClickLoginLink'
'change form#signup-form': 'onChangeForm'
'submit form#signup-form': 'onSubmitForm'
'click #gplus-signup-btn': 'onClickGPlusSignupButton'
'click #facebook-signup-btn': 'onClickFacebookSignupButton'
'change input[name="city"]': 'invalidateNCES'
'change input[name="state"]': 'invalidateNCES'
'change input[name="district"]': 'invalidateNCES'
'change input[name="country"]': 'invalidateNCES'
'change input[name="email"]': 'onChangeEmail'
'change input[name="name"]': 'onChangeName'
initialize: ->
@trialRequest = new TrialRequest()
@trialRequests = new TrialRequests()
@trialRequests.fetchOwn()
@supermodel.trackCollection(@trialRequests)
window.tracker?.trackEvent 'Teachers Create Account Loaded', category: 'Teachers', ['Mixpanel']
@state = new State {
suggestedNameText: '...'
checkEmailState: 'standby' # 'checking', 'exists', 'available'
checkEmailValue: null
checkEmailPromise: null
checkNameState: 'standby' # same
checkNameValue: null
checkNamePromise: null
authModalInitialValues: {}
}
@listenTo @state, 'change:checkEmailState', -> @renderSelectors('.email-check')
@listenTo @state, 'change:checkNameState', -> @renderSelectors('.name-check')
@listenTo @state, 'change:error', -> @renderSelectors('.error-area')
loadSegment() unless @segmentLoaded
onLeaveMessage: ->
if @formChanged
return 'Your account has not been created! If you continue, your changes will be lost.'
onLoaded: ->
if @trialRequests.size()
@trialRequest = @trialRequests.first()
@state.set({
authModalInitialValues: {
email: @trialRequest?.get('properties')?.email
}
})
super()
invalidateNCES: ->
for key in SCHOOL_NCES_KEYS
@$('input[name="nces_' + key + '"]').val ''
afterRender: ->
super()
# apply existing trial request on form
properties = @trialRequest.get('properties')
if properties
forms.objectToForm(@$('form'), properties)
commonLevels = _.map @$('[name="educationLevel"]'), (el) -> $(el).val()
submittedLevels = properties.educationLevel or []
otherLevel = _.first(_.difference(submittedLevels, commonLevels)) or ''
@$('#other-education-level-checkbox').attr('checked', !!otherLevel)
@$('#other-education-level-input').val(otherLevel)
$("#organization-control").algolia_autocomplete({hint: false}, [
source: (query, callback) ->
algolia.schoolsIndex.search(query, { hitsPerPage: 5, aroundLatLngViaIP: false }).then (answer) ->
callback answer.hits
, ->
callback []
displayKey: 'name',
templates:
suggestion: (suggestion) ->
hr = suggestion._highlightResult
"<div class='school'> #{hr.name.value} </div>" +
"<div class='district'>#{hr.district.value}, " +
"<span>#{hr.city?.value}, #{hr.state.value}</span></div>"
]).on 'autocomplete:selected', (event, suggestion, dataset) =>
# Tell Algolioa about the change but don't open the suggestion dropdown
@$('input[name="district"]').val(suggestion.district).trigger('input').trigger('blur')
@$('input[name="city"]').val suggestion.city
@$('input[name="state"]').val suggestion.state
@$('input[name="country"]').val 'USA'
for key in SCHOOL_NCES_KEYS
@$('input[name="nces_' + key + '"]').val suggestion[key]
@onChangeForm()
$("#district-control").algolia_autocomplete({hint: false}, [
source: (query, callback) ->
algolia.schoolsIndex.search(query, { hitsPerPage: 5, aroundLatLngViaIP: false }).then (answer) ->
callback answer.hits
, ->
callback []
displayKey: 'district',
templates:
suggestion: (suggestion) ->
hr = suggestion._highlightResult
"<div class='district'>#{hr.district.value}, " +
"<span>#{hr.city?.value}, #{hr.state.value}</span></div>"
]).on 'autocomplete:selected', (event, suggestion, dataset) =>
@$('input[name="organization"]').val('').trigger('input').trigger('blur')
@$('input[name="city"]').val suggestion.city
@$('input[name="state"]').val suggestion.state
@$('input[name="country"]').val 'USA'
for key in DISTRICT_NCES_KEYS
@$('input[name="nces_' + key + '"]').val suggestion[key]
@onChangeForm()
onClickLoginLink: ->
@openModalView(new AuthModal({ initialValues: @state.get('authModalInitialValues') }))
onChangeForm: ->
unless @formChanged
window.tracker?.trackEvent 'Teachers Create Account Form Started', category: 'Teachers', ['Mixpanel']
@formChanged = true
onSubmitForm: (e) ->
e.preventDefault()
# Creating Trial Request first, validate user attributes but do not use them
form = @$('form')
allAttrs = forms.formToObject(form)
trialRequestAttrs = _.omit(allAttrs, 'name', 'password1', 'password2')
# Don't save n/a district entries, but do validate required district client-side
trialRequestAttrs = _.omit(trialRequestAttrs, 'district') if trialRequestAttrs.district?.replace(/\s/ig, '').match(/n\/a/ig)
if @$('#other-education-level-checkbox').is(':checked')
val = @$('#other-education-level-input').val()
trialRequestAttrs.educationLevel.push(val) if val
forms.clearFormAlerts(form)
tv4.addFormat({
'phoneNumber': (phoneNumber) ->
if forms.validatePhoneNumber(phoneNumber)
return null
else
return {code: tv4.errorCodes.FORMAT_CUSTOM, message: 'Please enter a valid phone number, including area code.'}
})
result = tv4.validateMultiple(trialRequestAttrs, formSchema)
error = false
if not result.valid
forms.applyErrorsToForm(form, result.errors)
error = true
if not error and not forms.validateEmail(trialRequestAttrs.email)
forms.setErrorToProperty(form, 'email', 'invalid email')
error = true
if not error and forms.validateEmail(allAttrs.name)
forms.setErrorToProperty(form, 'name', 'username may not be an email')
error = true
if not _.size(trialRequestAttrs.educationLevel)
forms.setErrorToProperty(form, 'educationLevel', 'include at least one')
error = true
if not allAttrs.name
forms.setErrorToProperty(form, 'name', $.i18n.t('common.required_field'))
error = true
unless allAttrs.district
forms.setErrorToProperty(form, 'district', $.i18n.t('common.required_field'))
error = true
unless @gplusAttrs or @facebookAttrs
if not allAttrs.password1
forms.setErrorToProperty(form, 'password1', $.i18n.t('common.required_field'))
error = true
else if not allAttrs.password2
forms.setErrorToProperty(form, 'password2', $.i18n.t('common.required_field'))
error = true
else if allAttrs.password1 isnt allAttrs.password2
forms.setErrorToProperty(form, 'password1', 'Password fields are not equivalent')
error = true
if error
forms.scrollToFirstError()
return
trialRequestAttrs['siteOrigin'] = 'create teacher'
@trialRequest = new TrialRequest({
type: 'course'
properties: trialRequestAttrs
})
@trialRequest.notyErrors = false
@$('#create-account-btn').text('Sending').attr('disabled', true)
@trialRequest.save()
@trialRequest.on 'sync', @onTrialRequestSubmit, @
@trialRequest.on 'error', @onTrialRequestError, @
onTrialRequestError: (model, jqxhr) ->
@$('#create-account-btn').text('Submit').attr('disabled', false)
if jqxhr.status is 409
userExists = $.i18n.t('teachers_quote.email_exists')
logIn = $.i18n.t('login.log_in')
@$('#email-form-group')
.addClass('has-error')
.append($("<div class='help-block error-help-block'>#{userExists} <a class='login-link'>#{logIn}</a>"))
forms.scrollToFirstError()
else
errors.showNotyNetworkError(arguments...)
onTrialRequestSubmit: ->
window.tracker?.trackEvent 'Teachers Create Account Submitted', category: 'Teachers', ['Mixpanel']
@formChanged = false
Promise.resolve()
.then =>
attrs = _.pick(forms.formToObject(@$('form')), 'role', 'firstName', 'lastName')
attrs.role = attrs.role.toLowerCase()
me.set(attrs)
me.set(_.omit(@gplusAttrs, 'gplusID', 'email')) if @gplusAttrs
me.set(_.omit(@facebookAttrs, 'facebookID', 'email')) if @facebookAttrs
jqxhr = me.save()
if not jqxhr
throw new Error('Could not save user')
@trigger 'update-settings'
return jqxhr
.then =>
{ name, email } = forms.formToObject(@$('form'))
if @gplusAttrs
{ email, gplusID } = @gplusAttrs
{ name } = forms.formToObject(@$el)
jqxhr = me.signupWithGPlus(name, email, @gplusAttrs.gplusID)
else if @facebookAttrs
{ email, facebookID } = @facebookAttrs
{ name } = forms.formToObject(@$el)
jqxhr = me.signupWithFacebook(name, email, facebookID)
else
{ name, email, password1 } = forms.formToObject(@$el)
jqxhr = me.signupWithPassword(name, email, password1)
@trigger 'signup'
return jqxhr
.then =>
trialRequestIntercomData = _.pick @trialRequest.attributes.properties, ["siteOrigin", "marketingReferrer", "referrer", "notes", "numStudentsTotal", "numStudents", "purchaserRole", "role", "phoneNumber", "country", "state", "city", "district", "organization", "nces_students", "nces_name", "nces_id", "nces_phone", "nces_district_students", "nces_district_schools", "nces_district_id", "nces_district"]
trialRequestIntercomData.educationLevel_elementary = _.contains @trialRequest.attributes.properties.educationLevel, "Elementary"
trialRequestIntercomData.educationLevel_middle = _.contains @trialRequest.attributes.properties.educationLevel, "Middle"
trialRequestIntercomData.educationLevel_high = _.contains @trialRequest.attributes.properties.educationLevel, "High"
trialRequestIntercomData.educationLevel_college = _.contains @trialRequest.attributes.properties.educationLevel, "College+"
application.tracker.updateTrialRequestData trialRequestIntercomData
.then =>
application.router.navigate(SIGNUP_REDIRECT, { trigger: true })
application.router.reload()
.catch (e) =>
if e instanceof Error
noty {
text: e.message
layout: 'topCenter'
type: 'error'
timeout: 5000
killer: false,
dismissQueue: true
}
else
errors.showNotyNetworkError(arguments...)
@$('#create-account-btn').text('Submit').attr('disabled', false)
# GPlus signup
onClickGPlusSignupButton: ->
btn = @$('#gplus-signup-btn')
btn.attr('disabled', true)
application.gplusHandler.loadAPI({
success: =>
btn.attr('disabled', false)
application.gplusHandler.connect({
success: =>
btn.find('.sign-in-blurb').text($.i18n.t('signup.creating'))
btn.attr('disabled', true)
application.gplusHandler.loadPerson({
success: (@gplusAttrs) =>
existingUser = new User()
existingUser.fetchGPlusUser(@gplusAttrs.gplusID, {
error: (user, jqxhr) =>
if jqxhr.status is 404
@onGPlusConnected()
else
errors.showNotyNetworkError(jqxhr)
success: =>
me.loginGPlusUser(@gplusAttrs.gplusID, {
success: ->
application.router.navigate('/teachers/update-account', {trigger: true})
error: errors.showNotyNetworkError
})
})
})
})
})
onGPlusConnected: ->
@formChanged = true
forms.objectToForm(@$('form'), @gplusAttrs)
for field in ['email', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
input = @$("input[name='#{field}']")
if input.val()
input.attr('disabled', true)
@$('input[type="password"]').attr('disabled', true)
@$('#gplus-logged-in-row, #social-network-signups').toggleClass('hide')
# Facebook signup
onClickFacebookSignupButton: ->
btn = @$('#facebook-signup-btn')
btn.attr('disabled', true)
application.facebookHandler.loadAPI({
success: =>
btn.attr('disabled', false)
application.facebookHandler.connect({
success: =>
btn.find('.sign-in-blurb').text($.i18n.t('signup.creating'))
btn.attr('disabled', true)
application.facebookHandler.loadPerson({
success: (@facebookAttrs) =>
existingUser = new User()
existingUser.fetchFacebookUser(@facebookAttrs.facebookID, {
error: (user, jqxhr) =>
if jqxhr.status is 404
@onFacebookConnected()
else
errors.showNotyNetworkError(jqxhr)
success: =>
me.loginFacebookUser(@facebookAttrs.facebookID, {
success: ->
application.router.navigate('/teachers/update-account', {trigger: true})
error: errors.showNotyNetworkError
})
})
})
})
})
onFacebookConnected: ->
@formChanged = true
forms.objectToForm(@$('form'), @facebookAttrs)
for field in ['email', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
input = @$("input[name='#{field}']")
if input.val()
input.attr('disabled', true)
@$('input[type="password"]').attr('disabled', true)
@$('#facebook-logged-in-row, #social-network-signups').toggleClass('hide')
updateAuthModalInitialValues: (values) ->
@state.set {
authModalInitialValues: _.merge @state.get('authModalInitialValues'), values
}, { silent: true }
onChangeName: (e) ->
@updateAuthModalInitialValues { name: @$(e.currentTarget).val() }
@checkName()
checkName: ->
name = @$('input[name="name"]').val()
if name is @state.get('checkNameValue')
return @state.get('checkNamePromise')
if not name
@state.set({
checkNameState: 'standby'
checkNameValue: name
checkNamePromise: null
})
return Promise.resolve()
@state.set({
checkNameState: 'checking'
checkNameValue: name
checkNamePromise: (User.checkNameConflicts(name)
.then ({ suggestedName, conflicts }) =>
return unless name is @$('input[name="name"]').val()
if conflicts
suggestedNameText = $.i18n.t('signup.name_taken').replace('{{suggestedName}}', suggestedName)
@state.set({ checkNameState: 'exists', suggestedNameText })
else
@state.set { checkNameState: 'available' }
.catch (error) =>
@state.set('checkNameState', 'standby')
throw error
)
})
return @state.get('checkNamePromise')
onChangeEmail: (e) ->
@updateAuthModalInitialValues { email: @$(e.currentTarget).val() }
@checkEmail()
checkEmail: ->
email = @$('[name="email"]').val()
if not _.isEmpty(email) and email is @state.get('checkEmailValue')
return @state.get('checkEmailPromise')
if not (email and forms.validateEmail(email))
@state.set({
checkEmailState: 'standby'
checkEmailValue: email
checkEmailPromise: null
})
return Promise.resolve()
@state.set({
checkEmailState: 'checking'
checkEmailValue: email
checkEmailPromise: (User.checkEmailExists(email)
.then ({exists}) =>
return unless email is @$('[name="email"]').val()
if exists
@state.set('checkEmailState', 'exists')
else
@state.set('checkEmailState', 'available')
.catch (e) =>
@state.set('checkEmailState', 'standby')
throw e
)
})
return @state.get('checkEmailPromise')
formSchema = {
type: 'object'
required: ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'email', 'role', 'numStudents', 'numStudentsTotal', 'city', 'state', 'country', 'phoneNumber']
properties:
password1: { type: 'string' }
password2: { type: 'string' }
firstName: { type: 'string' }
lastName: { type: 'string' }
name: { type: 'string', minLength: 1 }
email: { type: 'string', format: 'email' }
phoneNumber: { type: 'string', format: 'phoneNumber' }
role: { type: 'string' }
organization: { type: 'string' }
district: { type: 'string' }
city: { type: 'string' }
state: { type: 'string' }
country: { type: 'string' }
numStudents: { type: 'string' }
numStudentsTotal: { type: 'string' }
educationLevel: {
type: 'array'
items: { type: 'string' }
}
notes: { type: 'string' }
}
for key in SCHOOL_NCES_KEYS
formSchema['nces_' + key] = type: 'string'
|
[
{
"context": " model of an\n# analogue ring-modulator proposed by Julian Parker. (Julian Parker.\n# [A Simple Digital Model Of The",
"end": 803,
"score": 0.9998568296432495,
"start": 790,
"tag": "NAME",
"value": "Julian Parker"
},
{
"context": "alogue ring-modulator proposed by Julia... | src/ring-modulator.coffee | bbc/webaudio.prototyping.bbc.co.uk | 3 | # # Ring Modulator
#
# [Ring Modulation](http://en.wikipedia.org/wiki/Ring_modulation) was
# one of the most recognisable effects used by the Radiophonic
# Workshop. It was the effect used to create the voices of both the
# Cybermen and The Daleks for Dr Who.
#
# A simple way to achieve a Ring Modulation effect is to simply
# multiply the input signal by the carrier signal. This approach
# doesn't allow for the characteristic distortion sound that was
# present in early analogue ring modulators which used a "ring" of
# diodes to achieve the multiplication of the signals.
#
# 
#
# To create a more realistic sound we use the digital model of an
# analogue ring-modulator proposed by Julian Parker. (Julian Parker.
# [A Simple Digital Model Of The Diode-Based
# Ring-Modulator](http://recherche.ircam.fr/pub/dafx11/Papers/66_e.pdf).
# Proc. 14th Int. Conf. Digital Audio Effects, Paris, France, 2011.)
#
# To create the voice of the Daleks the Workshop used a 30Hz sine wave
# as the modulating signal - this was recorded onto a tape loop and
# connected to one input. A microphone was connected to the second
# (carrier) input. The actor could then use the effect live on the set
# of Dr Who. In our demo we allow you to change the frequency (by
# modifying the playback speed of the tape machine). The tape machines
# used originally did not playback at a constant speed - this
# contributed to the distinctive sound of the early Daleks.
# # Preamble
#
# We use jQuery, backbone.js and some custom UI elements (namely a
# [knob](/docs/knob.html) a [speech bubble](/docs/speechbubble.html) and a
# [switch](/docs/switch.html)) in this application. We make these libraries
# available to our application using
# [require.js](http://requirejs.org/)
require(["jquery", "backbone", "knob", "speechbubble", "switch"], ($, Backbone, Knob, SpeechBubble, Switch) ->
$(document).ready ->
# # SamplePlayer
#
# When a speech bubble is clicked we load a sample using an AJAX
# request and put it into the buffer of an
# [AudioBufferSourceNode](https://webaudio.github.io/web-audio-api/#AudioBufferSourceNode).
# The sample is then triggered and looped. The `SamplePlayer`
# class encapsulates this operation.
class SamplePlayer extends Backbone.View
# Instances require the AudioContext in order to create a
# source buffer.
constructor: (context) ->
super()
@context = context
play: () ->
this.stop()
# Create a new source
@source = @context.createBufferSource()
# Assign the loaded buffer to the source
@source.buffer = @buffer
# Enable looping
@source.loop = true
# Connect the source to the node's destination
@source.connect(@destination)
# Play immediately
@source.start(0)
stop: ->
if @source
# Stop the source from playing
@source.stop(0)
@source.disconnect
@source = null
# We provide a connect method so that it can
# be connected to other nodes in a consistant way.
connect: (destination) ->
if (typeof destination.node == 'object')
@destination = destination.node
else
@destination = destination
# Make a request for the sound file to load into this buffer,
# decode it and set the buffer contents
loadBuffer: (url) ->
self = this
request = new XMLHttpRequest()
request.open('GET', url, true)
request.responseType = 'arraybuffer'
request.onload = =>
onsuccess = (buffer) ->
self.buffer = buffer
self.trigger('bufferLoaded')
onerror = -> alert "Could not decode #{self.url}"
@context.decodeAudioData request.response, onsuccess, onerror
request.send()
# # DiodeNode
#
# This class implements the diode described in Parker's paper
# using the Web Audio API's
# [WaveShaperNode](https://webaudio.github.io/web-audio-api/#WaveShaperNode)
# interface.
class DiodeNode
constructor: (@context) ->
@node = @context.createWaveShaper()
# three initial parameters controlling the shape of the curve
@vb = 0.2
@vl = 0.4
@h = 1
this.setCurve()
setDistortion: (distortion) ->
# We increase the distortion by increasing the gradient of the
# linear portion of the waveshaper's curve.
@h = distortion
this.setCurve()
setCurve: ->
# The non-linear waveshaper curve describes the transformation
# between an input signal and an output signal. We calculate a
# 1024-point curve following equation (2) from Parker's paper.
samples = 1024;
wsCurve = new Float32Array(samples);
for i in [0...wsCurve.length]
# Convert the index to a voltage of range -1 to 1.
v = (i - samples/2) / (samples/2)
v = Math.abs(v)
if (v <= @vb)
value = 0
else if ((@vb < v) && (v <= @vl))
value = @h * ((Math.pow(v - @vb, 2)) / (2 * @vl - 2 * @vb))
else
value = @h * v - @h * @vl + (@h * ((Math.pow(@vl - @vb, 2)) / (2 * @vl - 2 * @vb)))
wsCurve[i] = value
@node.curve = wsCurve
# We provide a connect method so that instances of this class
# can be connected to other nodes in a consistent way.
connect: (destination) ->
@node.connect(destination)
# # Connect the graph
#
# The following graph layout is proposed by Parker:
#
# 
#
# Where `Vin` is the modulation oscillator input and `Vc` is the voice
# input.
#
# Signal addition is shown with a `+` and signal gain by a triangle.
# The 4 rectangular boxes are non-linear waveshapers which model the
# diodes in the ring modulator.
#
# We implement this graph as in the diagram with the following
# correspondences:
#
# - A triangle is implemented with a [GainNode](https://webaudio.github.io/web-audio-api/#idl-def-GainNode)
# - Addition is achieved by noting that WebAudio nodes sum their inputs
# - The diodes are implemented in the DiodeNode class
#
context = new AudioContext
# First we create the objects on the Vin side of the graph.
vIn = context.createOscillator()
vIn.frequency.value = 30
vIn.start(0)
vInGain = context.createGain()
vInGain.gain.value = 0.5
# GainNodes can take negative gain which represents phase
# inversion.
vInInverter1 = context.createGain()
vInInverter1.gain.value = -1
vInInverter2 = context.createGain()
vInInverter2.gain.value = -1
vInDiode1 = new DiodeNode(context)
vInDiode2 = new DiodeNode(context)
vInInverter3 = context.createGain()
vInInverter3.gain.value = -1
# Now we create the objects on the Vc side of the graph.
player = new SamplePlayer(context)
vcInverter1 = context.createGain()
vcInverter1.gain.value = -1
vcDiode3 = new DiodeNode(context)
vcDiode4 = new DiodeNode(context)
# A gain node to control master output levels.
outGain = context.createGain()
outGain.gain.value = 4
# A small addition to the graph given in Parker's paper is a
# compressor node immediately before the output. This ensures that
# the user's volume remains somewhat constant when the distortion
# is increased.
compressor = context.createDynamicsCompressor()
compressor.threshold.value = -12
# Now we connect up the graph following the block diagram above.
# When working on complex graphs it helps to have a pen and paper
# handy!
# First the Vc side,
player.connect(vcInverter1)
player.connect(vcDiode4)
vcInverter1.connect(vcDiode3.node)
# then the Vin side.
vIn.connect(vInGain)
vInGain.connect(vInInverter1)
vInGain.connect(vcInverter1)
vInGain.connect(vcDiode4.node)
vInInverter1.connect(vInInverter2)
vInInverter1.connect(vInDiode2.node)
vInInverter2.connect(vInDiode1.node)
# Finally connect the four diodes to the destination via the
# output-stage compressor and master gain node.
vInDiode1.connect(vInInverter3)
vInDiode2.connect(vInInverter3)
vInInverter3.connect(compressor)
vcDiode3.connect(compressor)
vcDiode4.connect(compressor)
compressor.connect(outGain)
outGain.connect(context.destination)
# # User Interface
# A [speech bubble](/docs/speechbubble.html) is a simple
# backbone.js view with a toggle and hover state.
bubble1 = new SpeechBubble(el: $("#voice1"))
bubble2 = new SpeechBubble(el: $("#voice2"))
bubble3 = new SpeechBubble(el: $("#voice3"))
bubble4 = new SpeechBubble(el: $("#voice4"))
# [Knobs](/docs/knob.html) for the oscillator frequency,
speedKnob = new Knob(
el: "#tape-speed"
initial_value: 30
valueMin: 0
valueMax: 2000
)
# and the distortion control.
distortionKnob = new Knob(
el: "#mod-distortion",
initial_value: 1
valueMin: 0.2
valueMax: 50
)
# Map events that are fired when user interface objects are
# interacted with to the corresponding parameters in the ring
# modulator.
distortionKnob.on('valueChanged', (v) =>
_.each([vInDiode1, vInDiode2, vcDiode3, vcDiode4], (diode) -> diode.setDistortion(v))
)
speedKnob.on('valueChanged', (v) =>
vIn.frequency.value = v
)
# For each speech bubble, when clicked we stop any currently
# playing buffers and play the sample associated with this buffer.
bubble1.on('on', ->
_.each([bubble2, bubble3, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_exterminate.wav")
player.on('bufferLoaded', -> player.play())
)
bubble1.on('off', ->
player.stop()
)
bubble2.on('on', ->
_.each([bubble1, bubble3, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_good-dalek.wav")
player.on('bufferLoaded', -> player.play())
)
bubble2.on('off', ->
player.stop()
)
bubble3.on('on', ->
_.each([bubble1, bubble2, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_upgrading.wav")
player.on('bufferLoaded', -> player.play())
)
bubble3.on('off', ->
player.stop()
)
bubble4.on('on', ->
_.each([bubble1, bubble2, bubble3], (o) -> o.turnOff() )
player.loadBuffer("/audio/ringmod_delete.wav")
player.on('bufferLoaded', -> player.play())
)
bubble4.on('off', ->
player.stop()
)
# # Experimental! Microphone input support
#
# This will only work on Chrome Canary builds on OS X and Windows.
# [HTML5
# Rocks](http://updates.html5rocks.com/2012/09/Live-Web-Audio-Input-Enabled)
# has the information you'll need to try this feature out.
liveInputGain = context.createGain()
liveInput = null
# There's no easy way to feature detect if this is supported so
# we have to browser detect the version of Chrome.
isLiveInputSupported = ->
isSupported = false
browser = $.browser
if browser.chrome
majorVersion = parseInt( browser.version.split('.')[0] )
isSupported = true if majorVersion >= 23
isSupported
getLive = =>
navigator.webkitGetUserMedia({ audio: true }, gotStream)
gotStream = (stream) =>
liveInput = context.createMediaStreamSource(stream)
liveInput.connect(liveInputGain)
liveInputGain.connect(vcInverter1)
liveInputGain.gain.value = 1.0
class KonamiCode
constructor: () ->
# ↑ ↑ ↓ ↓ ← → ← → B A
@konami = [38,38,40,40,37,39,37,39,66,65];
@keys = []
@callback = null
$(document).keydown(@keydown)
onPowerup: (callback) =>
@callback = callback
keydown: (e) =>
@keys.push(e.keyCode)
isCorrectCode = @keys.join(',').indexOf(@konami.join(',')) >= 0
if isCorrectCode
@callback() if @callback?
@keys = []
else if @keys.length == @konami.length
@keys = []
konami = new KonamiCode()
konami.onPowerup ->
console.log("powerup")
activateLiveMicButton()
activateLiveMicButton = ->
tapeswitch = new Switch(el: '#live-input')
tapeswitch.on('off', ->
liveInputGain.gain.value = 0
)
tapeswitch.on('on', ->
getLive()
)
)
| 199162 | # # Ring Modulator
#
# [Ring Modulation](http://en.wikipedia.org/wiki/Ring_modulation) was
# one of the most recognisable effects used by the Radiophonic
# Workshop. It was the effect used to create the voices of both the
# Cybermen and The Daleks for Dr Who.
#
# A simple way to achieve a Ring Modulation effect is to simply
# multiply the input signal by the carrier signal. This approach
# doesn't allow for the characteristic distortion sound that was
# present in early analogue ring modulators which used a "ring" of
# diodes to achieve the multiplication of the signals.
#
# 
#
# To create a more realistic sound we use the digital model of an
# analogue ring-modulator proposed by <NAME>. (<NAME>.
# [A Simple Digital Model Of The Diode-Based
# Ring-Modulator](http://recherche.ircam.fr/pub/dafx11/Papers/66_e.pdf).
# Proc. 14th Int. Conf. Digital Audio Effects, Paris, France, 2011.)
#
# To create the voice of the Daleks the Workshop used a 30Hz sine wave
# as the modulating signal - this was recorded onto a tape loop and
# connected to one input. A microphone was connected to the second
# (carrier) input. The actor could then use the effect live on the set
# of Dr Who. In our demo we allow you to change the frequency (by
# modifying the playback speed of the tape machine). The tape machines
# used originally did not playback at a constant speed - this
# contributed to the distinctive sound of the early Daleks.
# # Preamble
#
# We use jQuery, backbone.js and some custom UI elements (namely a
# [knob](/docs/knob.html) a [speech bubble](/docs/speechbubble.html) and a
# [switch](/docs/switch.html)) in this application. We make these libraries
# available to our application using
# [require.js](http://requirejs.org/)
require(["jquery", "backbone", "knob", "speechbubble", "switch"], ($, Backbone, Knob, SpeechBubble, Switch) ->
$(document).ready ->
# # SamplePlayer
#
# When a speech bubble is clicked we load a sample using an AJAX
# request and put it into the buffer of an
# [AudioBufferSourceNode](https://webaudio.github.io/web-audio-api/#AudioBufferSourceNode).
# The sample is then triggered and looped. The `SamplePlayer`
# class encapsulates this operation.
class SamplePlayer extends Backbone.View
# Instances require the AudioContext in order to create a
# source buffer.
constructor: (context) ->
super()
@context = context
play: () ->
this.stop()
# Create a new source
@source = @context.createBufferSource()
# Assign the loaded buffer to the source
@source.buffer = @buffer
# Enable looping
@source.loop = true
# Connect the source to the node's destination
@source.connect(@destination)
# Play immediately
@source.start(0)
stop: ->
if @source
# Stop the source from playing
@source.stop(0)
@source.disconnect
@source = null
# We provide a connect method so that it can
# be connected to other nodes in a consistant way.
connect: (destination) ->
if (typeof destination.node == 'object')
@destination = destination.node
else
@destination = destination
# Make a request for the sound file to load into this buffer,
# decode it and set the buffer contents
loadBuffer: (url) ->
self = this
request = new XMLHttpRequest()
request.open('GET', url, true)
request.responseType = 'arraybuffer'
request.onload = =>
onsuccess = (buffer) ->
self.buffer = buffer
self.trigger('bufferLoaded')
onerror = -> alert "Could not decode #{self.url}"
@context.decodeAudioData request.response, onsuccess, onerror
request.send()
# # DiodeNode
#
# This class implements the diode described in Parker's paper
# using the Web Audio API's
# [WaveShaperNode](https://webaudio.github.io/web-audio-api/#WaveShaperNode)
# interface.
class DiodeNode
constructor: (@context) ->
@node = @context.createWaveShaper()
# three initial parameters controlling the shape of the curve
@vb = 0.2
@vl = 0.4
@h = 1
this.setCurve()
setDistortion: (distortion) ->
# We increase the distortion by increasing the gradient of the
# linear portion of the waveshaper's curve.
@h = distortion
this.setCurve()
setCurve: ->
# The non-linear waveshaper curve describes the transformation
# between an input signal and an output signal. We calculate a
# 1024-point curve following equation (2) from Parker's paper.
samples = 1024;
wsCurve = new Float32Array(samples);
for i in [0...wsCurve.length]
# Convert the index to a voltage of range -1 to 1.
v = (i - samples/2) / (samples/2)
v = Math.abs(v)
if (v <= @vb)
value = 0
else if ((@vb < v) && (v <= @vl))
value = @h * ((Math.pow(v - @vb, 2)) / (2 * @vl - 2 * @vb))
else
value = @h * v - @h * @vl + (@h * ((Math.pow(@vl - @vb, 2)) / (2 * @vl - 2 * @vb)))
wsCurve[i] = value
@node.curve = wsCurve
# We provide a connect method so that instances of this class
# can be connected to other nodes in a consistent way.
connect: (destination) ->
@node.connect(destination)
# # Connect the graph
#
# The following graph layout is proposed by Parker:
#
# 
#
# Where `Vin` is the modulation oscillator input and `Vc` is the voice
# input.
#
# Signal addition is shown with a `+` and signal gain by a triangle.
# The 4 rectangular boxes are non-linear waveshapers which model the
# diodes in the ring modulator.
#
# We implement this graph as in the diagram with the following
# correspondences:
#
# - A triangle is implemented with a [GainNode](https://webaudio.github.io/web-audio-api/#idl-def-GainNode)
# - Addition is achieved by noting that WebAudio nodes sum their inputs
# - The diodes are implemented in the DiodeNode class
#
context = new AudioContext
# First we create the objects on the Vin side of the graph.
vIn = context.createOscillator()
vIn.frequency.value = 30
vIn.start(0)
vInGain = context.createGain()
vInGain.gain.value = 0.5
# GainNodes can take negative gain which represents phase
# inversion.
vInInverter1 = context.createGain()
vInInverter1.gain.value = -1
vInInverter2 = context.createGain()
vInInverter2.gain.value = -1
vInDiode1 = new DiodeNode(context)
vInDiode2 = new DiodeNode(context)
vInInverter3 = context.createGain()
vInInverter3.gain.value = -1
# Now we create the objects on the Vc side of the graph.
player = new SamplePlayer(context)
vcInverter1 = context.createGain()
vcInverter1.gain.value = -1
vcDiode3 = new DiodeNode(context)
vcDiode4 = new DiodeNode(context)
# A gain node to control master output levels.
outGain = context.createGain()
outGain.gain.value = 4
# A small addition to the graph given in Parker's paper is a
# compressor node immediately before the output. This ensures that
# the user's volume remains somewhat constant when the distortion
# is increased.
compressor = context.createDynamicsCompressor()
compressor.threshold.value = -12
# Now we connect up the graph following the block diagram above.
# When working on complex graphs it helps to have a pen and paper
# handy!
# First the Vc side,
player.connect(vcInverter1)
player.connect(vcDiode4)
vcInverter1.connect(vcDiode3.node)
# then the Vin side.
vIn.connect(vInGain)
vInGain.connect(vInInverter1)
vInGain.connect(vcInverter1)
vInGain.connect(vcDiode4.node)
vInInverter1.connect(vInInverter2)
vInInverter1.connect(vInDiode2.node)
vInInverter2.connect(vInDiode1.node)
# Finally connect the four diodes to the destination via the
# output-stage compressor and master gain node.
vInDiode1.connect(vInInverter3)
vInDiode2.connect(vInInverter3)
vInInverter3.connect(compressor)
vcDiode3.connect(compressor)
vcDiode4.connect(compressor)
compressor.connect(outGain)
outGain.connect(context.destination)
# # User Interface
# A [speech bubble](/docs/speechbubble.html) is a simple
# backbone.js view with a toggle and hover state.
bubble1 = new SpeechBubble(el: $("#voice1"))
bubble2 = new SpeechBubble(el: $("#voice2"))
bubble3 = new SpeechBubble(el: $("#voice3"))
bubble4 = new SpeechBubble(el: $("#voice4"))
# [Knobs](/docs/knob.html) for the oscillator frequency,
speedKnob = new Knob(
el: "#tape-speed"
initial_value: 30
valueMin: 0
valueMax: 2000
)
# and the distortion control.
distortionKnob = new Knob(
el: "#mod-distortion",
initial_value: 1
valueMin: 0.2
valueMax: 50
)
# Map events that are fired when user interface objects are
# interacted with to the corresponding parameters in the ring
# modulator.
distortionKnob.on('valueChanged', (v) =>
_.each([vInDiode1, vInDiode2, vcDiode3, vcDiode4], (diode) -> diode.setDistortion(v))
)
speedKnob.on('valueChanged', (v) =>
vIn.frequency.value = v
)
# For each speech bubble, when clicked we stop any currently
# playing buffers and play the sample associated with this buffer.
bubble1.on('on', ->
_.each([bubble2, bubble3, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_exterminate.wav")
player.on('bufferLoaded', -> player.play())
)
bubble1.on('off', ->
player.stop()
)
bubble2.on('on', ->
_.each([bubble1, bubble3, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_good-dalek.wav")
player.on('bufferLoaded', -> player.play())
)
bubble2.on('off', ->
player.stop()
)
bubble3.on('on', ->
_.each([bubble1, bubble2, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_upgrading.wav")
player.on('bufferLoaded', -> player.play())
)
bubble3.on('off', ->
player.stop()
)
bubble4.on('on', ->
_.each([bubble1, bubble2, bubble3], (o) -> o.turnOff() )
player.loadBuffer("/audio/ringmod_delete.wav")
player.on('bufferLoaded', -> player.play())
)
bubble4.on('off', ->
player.stop()
)
# # Experimental! Microphone input support
#
# This will only work on Chrome Canary builds on OS X and Windows.
# [HTML5
# Rocks](http://updates.html5rocks.com/2012/09/Live-Web-Audio-Input-Enabled)
# has the information you'll need to try this feature out.
liveInputGain = context.createGain()
liveInput = null
# There's no easy way to feature detect if this is supported so
# we have to browser detect the version of Chrome.
isLiveInputSupported = ->
isSupported = false
browser = $.browser
if browser.chrome
majorVersion = parseInt( browser.version.split('.')[0] )
isSupported = true if majorVersion >= 23
isSupported
getLive = =>
navigator.webkitGetUserMedia({ audio: true }, gotStream)
gotStream = (stream) =>
liveInput = context.createMediaStreamSource(stream)
liveInput.connect(liveInputGain)
liveInputGain.connect(vcInverter1)
liveInputGain.gain.value = 1.0
class KonamiCode
constructor: () ->
# ↑ ↑ ↓ ↓ ← → ← → B A
@konami = [38,38,40,40,37,39,37,39,66,65];
@keys = []
@callback = null
$(document).keydown(@keydown)
onPowerup: (callback) =>
@callback = callback
keydown: (e) =>
@keys.push(e.keyCode)
isCorrectCode = @keys.join(',').indexOf(@konami.join(',')) >= 0
if isCorrectCode
@callback() if @callback?
@keys = []
else if @keys.length == @konami.length
@keys = []
konami = new KonamiCode()
konami.onPowerup ->
console.log("powerup")
activateLiveMicButton()
activateLiveMicButton = ->
tapeswitch = new Switch(el: '#live-input')
tapeswitch.on('off', ->
liveInputGain.gain.value = 0
)
tapeswitch.on('on', ->
getLive()
)
)
| true | # # Ring Modulator
#
# [Ring Modulation](http://en.wikipedia.org/wiki/Ring_modulation) was
# one of the most recognisable effects used by the Radiophonic
# Workshop. It was the effect used to create the voices of both the
# Cybermen and The Daleks for Dr Who.
#
# A simple way to achieve a Ring Modulation effect is to simply
# multiply the input signal by the carrier signal. This approach
# doesn't allow for the characteristic distortion sound that was
# present in early analogue ring modulators which used a "ring" of
# diodes to achieve the multiplication of the signals.
#
# 
#
# To create a more realistic sound we use the digital model of an
# analogue ring-modulator proposed by PI:NAME:<NAME>END_PI. (PI:NAME:<NAME>END_PI.
# [A Simple Digital Model Of The Diode-Based
# Ring-Modulator](http://recherche.ircam.fr/pub/dafx11/Papers/66_e.pdf).
# Proc. 14th Int. Conf. Digital Audio Effects, Paris, France, 2011.)
#
# To create the voice of the Daleks the Workshop used a 30Hz sine wave
# as the modulating signal - this was recorded onto a tape loop and
# connected to one input. A microphone was connected to the second
# (carrier) input. The actor could then use the effect live on the set
# of Dr Who. In our demo we allow you to change the frequency (by
# modifying the playback speed of the tape machine). The tape machines
# used originally did not playback at a constant speed - this
# contributed to the distinctive sound of the early Daleks.
# # Preamble
#
# We use jQuery, backbone.js and some custom UI elements (namely a
# [knob](/docs/knob.html) a [speech bubble](/docs/speechbubble.html) and a
# [switch](/docs/switch.html)) in this application. We make these libraries
# available to our application using
# [require.js](http://requirejs.org/)
require(["jquery", "backbone", "knob", "speechbubble", "switch"], ($, Backbone, Knob, SpeechBubble, Switch) ->
$(document).ready ->
# # SamplePlayer
#
# When a speech bubble is clicked we load a sample using an AJAX
# request and put it into the buffer of an
# [AudioBufferSourceNode](https://webaudio.github.io/web-audio-api/#AudioBufferSourceNode).
# The sample is then triggered and looped. The `SamplePlayer`
# class encapsulates this operation.
class SamplePlayer extends Backbone.View
# Instances require the AudioContext in order to create a
# source buffer.
constructor: (context) ->
super()
@context = context
play: () ->
this.stop()
# Create a new source
@source = @context.createBufferSource()
# Assign the loaded buffer to the source
@source.buffer = @buffer
# Enable looping
@source.loop = true
# Connect the source to the node's destination
@source.connect(@destination)
# Play immediately
@source.start(0)
stop: ->
if @source
# Stop the source from playing
@source.stop(0)
@source.disconnect
@source = null
# We provide a connect method so that it can
# be connected to other nodes in a consistant way.
connect: (destination) ->
if (typeof destination.node == 'object')
@destination = destination.node
else
@destination = destination
# Make a request for the sound file to load into this buffer,
# decode it and set the buffer contents
loadBuffer: (url) ->
self = this
request = new XMLHttpRequest()
request.open('GET', url, true)
request.responseType = 'arraybuffer'
request.onload = =>
onsuccess = (buffer) ->
self.buffer = buffer
self.trigger('bufferLoaded')
onerror = -> alert "Could not decode #{self.url}"
@context.decodeAudioData request.response, onsuccess, onerror
request.send()
# # DiodeNode
#
# This class implements the diode described in Parker's paper
# using the Web Audio API's
# [WaveShaperNode](https://webaudio.github.io/web-audio-api/#WaveShaperNode)
# interface.
class DiodeNode
constructor: (@context) ->
@node = @context.createWaveShaper()
# three initial parameters controlling the shape of the curve
@vb = 0.2
@vl = 0.4
@h = 1
this.setCurve()
setDistortion: (distortion) ->
# We increase the distortion by increasing the gradient of the
# linear portion of the waveshaper's curve.
@h = distortion
this.setCurve()
setCurve: ->
# The non-linear waveshaper curve describes the transformation
# between an input signal and an output signal. We calculate a
# 1024-point curve following equation (2) from Parker's paper.
samples = 1024;
wsCurve = new Float32Array(samples);
for i in [0...wsCurve.length]
# Convert the index to a voltage of range -1 to 1.
v = (i - samples/2) / (samples/2)
v = Math.abs(v)
if (v <= @vb)
value = 0
else if ((@vb < v) && (v <= @vl))
value = @h * ((Math.pow(v - @vb, 2)) / (2 * @vl - 2 * @vb))
else
value = @h * v - @h * @vl + (@h * ((Math.pow(@vl - @vb, 2)) / (2 * @vl - 2 * @vb)))
wsCurve[i] = value
@node.curve = wsCurve
# We provide a connect method so that instances of this class
# can be connected to other nodes in a consistent way.
connect: (destination) ->
@node.connect(destination)
# # Connect the graph
#
# The following graph layout is proposed by Parker:
#
# 
#
# Where `Vin` is the modulation oscillator input and `Vc` is the voice
# input.
#
# Signal addition is shown with a `+` and signal gain by a triangle.
# The 4 rectangular boxes are non-linear waveshapers which model the
# diodes in the ring modulator.
#
# We implement this graph as in the diagram with the following
# correspondences:
#
# - A triangle is implemented with a [GainNode](https://webaudio.github.io/web-audio-api/#idl-def-GainNode)
# - Addition is achieved by noting that WebAudio nodes sum their inputs
# - The diodes are implemented in the DiodeNode class
#
context = new AudioContext
# First we create the objects on the Vin side of the graph.
vIn = context.createOscillator()
vIn.frequency.value = 30
vIn.start(0)
vInGain = context.createGain()
vInGain.gain.value = 0.5
# GainNodes can take negative gain which represents phase
# inversion.
vInInverter1 = context.createGain()
vInInverter1.gain.value = -1
vInInverter2 = context.createGain()
vInInverter2.gain.value = -1
vInDiode1 = new DiodeNode(context)
vInDiode2 = new DiodeNode(context)
vInInverter3 = context.createGain()
vInInverter3.gain.value = -1
# Now we create the objects on the Vc side of the graph.
player = new SamplePlayer(context)
vcInverter1 = context.createGain()
vcInverter1.gain.value = -1
vcDiode3 = new DiodeNode(context)
vcDiode4 = new DiodeNode(context)
# A gain node to control master output levels.
outGain = context.createGain()
outGain.gain.value = 4
# A small addition to the graph given in Parker's paper is a
# compressor node immediately before the output. This ensures that
# the user's volume remains somewhat constant when the distortion
# is increased.
compressor = context.createDynamicsCompressor()
compressor.threshold.value = -12
# Now we connect up the graph following the block diagram above.
# When working on complex graphs it helps to have a pen and paper
# handy!
# First the Vc side,
player.connect(vcInverter1)
player.connect(vcDiode4)
vcInverter1.connect(vcDiode3.node)
# then the Vin side.
vIn.connect(vInGain)
vInGain.connect(vInInverter1)
vInGain.connect(vcInverter1)
vInGain.connect(vcDiode4.node)
vInInverter1.connect(vInInverter2)
vInInverter1.connect(vInDiode2.node)
vInInverter2.connect(vInDiode1.node)
# Finally connect the four diodes to the destination via the
# output-stage compressor and master gain node.
vInDiode1.connect(vInInverter3)
vInDiode2.connect(vInInverter3)
vInInverter3.connect(compressor)
vcDiode3.connect(compressor)
vcDiode4.connect(compressor)
compressor.connect(outGain)
outGain.connect(context.destination)
# # User Interface
# A [speech bubble](/docs/speechbubble.html) is a simple
# backbone.js view with a toggle and hover state.
bubble1 = new SpeechBubble(el: $("#voice1"))
bubble2 = new SpeechBubble(el: $("#voice2"))
bubble3 = new SpeechBubble(el: $("#voice3"))
bubble4 = new SpeechBubble(el: $("#voice4"))
# [Knobs](/docs/knob.html) for the oscillator frequency,
speedKnob = new Knob(
el: "#tape-speed"
initial_value: 30
valueMin: 0
valueMax: 2000
)
# and the distortion control.
distortionKnob = new Knob(
el: "#mod-distortion",
initial_value: 1
valueMin: 0.2
valueMax: 50
)
# Map events that are fired when user interface objects are
# interacted with to the corresponding parameters in the ring
# modulator.
distortionKnob.on('valueChanged', (v) =>
_.each([vInDiode1, vInDiode2, vcDiode3, vcDiode4], (diode) -> diode.setDistortion(v))
)
speedKnob.on('valueChanged', (v) =>
vIn.frequency.value = v
)
# For each speech bubble, when clicked we stop any currently
# playing buffers and play the sample associated with this buffer.
bubble1.on('on', ->
_.each([bubble2, bubble3, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_exterminate.wav")
player.on('bufferLoaded', -> player.play())
)
bubble1.on('off', ->
player.stop()
)
bubble2.on('on', ->
_.each([bubble1, bubble3, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_good-dalek.wav")
player.on('bufferLoaded', -> player.play())
)
bubble2.on('off', ->
player.stop()
)
bubble3.on('on', ->
_.each([bubble1, bubble2, bubble4], (o) -> o.turnOff())
player.loadBuffer("/audio/ringmod_upgrading.wav")
player.on('bufferLoaded', -> player.play())
)
bubble3.on('off', ->
player.stop()
)
bubble4.on('on', ->
_.each([bubble1, bubble2, bubble3], (o) -> o.turnOff() )
player.loadBuffer("/audio/ringmod_delete.wav")
player.on('bufferLoaded', -> player.play())
)
bubble4.on('off', ->
player.stop()
)
# # Experimental! Microphone input support
#
# This will only work on Chrome Canary builds on OS X and Windows.
# [HTML5
# Rocks](http://updates.html5rocks.com/2012/09/Live-Web-Audio-Input-Enabled)
# has the information you'll need to try this feature out.
liveInputGain = context.createGain()
liveInput = null
# There's no easy way to feature detect if this is supported so
# we have to browser detect the version of Chrome.
isLiveInputSupported = ->
isSupported = false
browser = $.browser
if browser.chrome
majorVersion = parseInt( browser.version.split('.')[0] )
isSupported = true if majorVersion >= 23
isSupported
getLive = =>
navigator.webkitGetUserMedia({ audio: true }, gotStream)
gotStream = (stream) =>
liveInput = context.createMediaStreamSource(stream)
liveInput.connect(liveInputGain)
liveInputGain.connect(vcInverter1)
liveInputGain.gain.value = 1.0
class KonamiCode
constructor: () ->
# ↑ ↑ ↓ ↓ ← → ← → B A
@konami = [38,38,40,40,37,39,37,39,66,65];
@keys = []
@callback = null
$(document).keydown(@keydown)
onPowerup: (callback) =>
@callback = callback
keydown: (e) =>
@keys.push(e.keyCode)
isCorrectCode = @keys.join(',').indexOf(@konami.join(',')) >= 0
if isCorrectCode
@callback() if @callback?
@keys = []
else if @keys.length == @konami.length
@keys = []
konami = new KonamiCode()
konami.onPowerup ->
console.log("powerup")
activateLiveMicButton()
activateLiveMicButton = ->
tapeswitch = new Switch(el: '#live-input')
tapeswitch.on('off', ->
liveInputGain.gain.value = 0
)
tapeswitch.on('on', ->
getLive()
)
)
|
[
{
"context": "y is not required when using this method.\r\n@author Nathan Klick\r\n@copyright QRef 2012\r\n###\r\nclass RegisterAccount",
"end": 229,
"score": 0.999838650226593,
"start": 217,
"tag": "NAME",
"value": "Nathan Klick"
}
] | Workspace/QRef/NodeServer/src/specification/request/rpc/RegisterAccountRpcRequest.coffee | qrefdev/qref | 0 | RpcRequest = require('../../../serialization/RpcRequest')
###
Object sent as the body of an HTTP POST request to create a new user account.
@note The token property is not required when using this method.
@author Nathan Klick
@copyright QRef 2012
###
class RegisterAccountRpcRequest extends RpcRequest
###
@property [String] (Required) The username used to perform authentication. This should always be the user's email address.
###
userName:
type: String
required: true
unique: true
###
@property [String] (Required) The clear text version of the user's password.
###
password:
type: String
required: true
module.exports = RegisterAccountRpcRequest | 60204 | RpcRequest = require('../../../serialization/RpcRequest')
###
Object sent as the body of an HTTP POST request to create a new user account.
@note The token property is not required when using this method.
@author <NAME>
@copyright QRef 2012
###
class RegisterAccountRpcRequest extends RpcRequest
###
@property [String] (Required) The username used to perform authentication. This should always be the user's email address.
###
userName:
type: String
required: true
unique: true
###
@property [String] (Required) The clear text version of the user's password.
###
password:
type: String
required: true
module.exports = RegisterAccountRpcRequest | true | RpcRequest = require('../../../serialization/RpcRequest')
###
Object sent as the body of an HTTP POST request to create a new user account.
@note The token property is not required when using this method.
@author PI:NAME:<NAME>END_PI
@copyright QRef 2012
###
class RegisterAccountRpcRequest extends RpcRequest
###
@property [String] (Required) The username used to perform authentication. This should always be the user's email address.
###
userName:
type: String
required: true
unique: true
###
@property [String] (Required) The clear text version of the user's password.
###
password:
type: String
required: true
module.exports = RegisterAccountRpcRequest |
[
{
"context": "# GainText\n#\n# Martin Waitz <tali@admingilde.org>\n\nmona = require 'mona-parse",
"end": 27,
"score": 0.9997928738594055,
"start": 15,
"tag": "NAME",
"value": "Martin Waitz"
},
{
"context": "# GainText\n#\n# Martin Waitz <tali@admingilde.org>\n\nmona = require 'mona-p... | src/block.coffee | gaintext/gaintext.js | 0 | # GainText
#
# Martin Waitz <tali@admingilde.org>
mona = require 'mona-parser'
{
copy, collect, collectText,
newline, vskip, hskip,
noWhitespace
} = require './parserutils'
class ParserScope
constructor: (blocks=[], spans=[]) ->
@blockParsers = (@createParser element for element in blocks)
@spanParsers = (@createParser element for element in spans)
createParser: (element) ->
p = element.parser()
if typeof p != 'function'
throw new Error "parser() returned #{p}"
return p
addBlock: (element) ->
@blockParsers.push @createParser element
addSpan: (element) ->
@spanParsers.push @createParser element
blockParserList: ->
return @blockParsers
spanParserList: ->
return @spanParsers
blockParser: (extra=[]) ->
parsers = @blockParserList().concat(extra)
if not parsers.length
throw new Error "empty parser list"
return mona.or parsers...
spanParser: (extra=[]) ->
debugger
parsers = @spanParserList().concat(extra)
if not parsers.length
throw new Error "empty parser list"
return mona.or parsers...
exports.globalScope = globalScope = new ParserScope()
class NestedParserScope extends ParserScope
constructor: (@parent=globalScope, schema=[]) ->
super(schema) # XXX
blockParserList: ->
if @parent
return @blockParsers.concat @parent.blockParserList()
else return @blockParsers
spanParserList: ->
if @parent
return @spanParsers.concat @parent.spanParserList()
else return @spanParsers
class Element
constructor: (@schema=[]) ->
setScope: (scope) ->
return (parserState) ->
newState = copy(parserState)
newState.scope = scope
newState.value = scope
return newState
newScope: ->
return (parserState) =>
parent = parserState.scope
scope = new NestedParserScope(parent, @schema)
return @setScope(scope)(parserState)
collect: (parser) ->
return mona.or mona.collect(parser, min: 1),
mona.value []
createElement: (name, title, content) ->
return name: name, title: title, content: content
class NamedElement extends Element
constructor: (@nameParser, @schema=[]) ->
if typeof @nameParser == 'string'
@nameParser = mona.string @nameParser
if typeof @nameParser != 'function'
throw new Error "@nameParser is #{@nameParser}"
class NamedBlockElement extends NamedElement
indentedContentParser: ->
return mona.sequence (parse) =>
parse vskip
parse sameIndent
name = parse @nameParser
parse mona.string(':')
parse hskip
title = parse mona.text(mona.noneOf '\r\n')
parse newline
parse vskip
# TBD: move the newScope into a new parser
# which is called inside indentedBlock?
scope = parse @newScope()
content = parse @collect (indentedBlock scope.blockParser())
return mona.value @createElement(name, title, content)
underlinedTitleParser: ->
return mona.sequence (parse) =>
# TBD
return mona.fail()
parser: ->
return mona.or(
@indentedContentParser(),
@underlinedTitleParser(),
)
class SymbolicBlockElement extends Element
constructor: (@symbol, @name) ->
parser: ->
return mona.sequence (parse) =>
parse vskip
parse sameIndent
parse mona.string @symbol
parse hskip
title = parse mona.text(mona.noneOf '\r\n')
parse newline
parse vskip
scope = parse @newScope()
content = parse @collect (indentedBlock scope.blockParser())
return mona.value @createElement(@name, title, content)
class NamedSpanElement extends NamedElement
parser: ->
return mona.sequence (parse) =>
parse mona.string '['
name = parse @nameParser
parse hskip
attributes = parse mona.text mona.noneOf(':]\r\n')
# XXX: parse attributes
if parse mona.maybe mona.string ':'
parse hskip
scope = parse @newScope()
content = parse collectText scope.spanParser [mona.noneOf ']\r\n']
else
content = []
parse mona.string ']'
# TBD: parse attributes
return mona.value @createElement(name, attributes, content)
class Paragraph extends Element
constructor: ->
@normalText = mona.noneOf('\r\n')
parser: ->
return mona.sequence (parse) =>
parse vskip
scope = parse @newScope()
textInLine = collectText scope.spanParser [@normalText]
textLine = mona.followedBy(
mona.and(sameIndent,
noWhitespace,
textInLine),
newline)
return collect textLine
exports.Element = Element
exports.NamedBlockElement = NamedBlockElement
exports.NamedSpanElement = NamedSpanElement
exports.SymbolicBlockElement = SymbolicBlockElement
exports.Paragraph = Paragraph
indentation = mona.followedBy mona.text(mona.oneOf(' \t'), min: 1),
mona.noneOf('\n')
exports.indentation = indentation = mona.label indentation, 'indentation'
exports.getIndentLevel = getIndentLevel = (parserState) ->
levels = parserState.indentLevels || [""]
curLevel = levels[levels.length-1]
newState = copy(parserState)
newState.value = curLevel
return newState
exports.pushIndentLevel = pushIndentLevel = (level) ->
return (parserState) ->
levels = copy(parserState.indentLevels || [""])
levels.push(level)
parserState = copy(parserState)
parserState.indentLevels = levels
return parserState
exports.popIndentLevel = popIndentLevel = (parserState) ->
parserState = copy(parserState)
parserState.indentLevels = copy(parserState.indentLevels)
parserState.indentLevels.pop()
return parserState
exports.indentedBlock = indentedBlock = (content, indent=indentation) ->
return mona.sequence (parse) ->
parse vskip
i = parse(mona.lookAhead(indent))
if not i? or i == ''
# no indentation at all
return mona.fail()
level = parse(getIndentLevel)
if i.substr(0, level.length) != level
# not indented enough
return mona.fail()
parse(pushIndentLevel(i))
c = parse(content)
parse(popIndentLevel)
return mona.value c
exports.sameIndent = sameIndent = mona.sequence (parse) ->
level = parse(getIndentLevel)
if level == ''
return mona.value ''
else
return mona.string level
| 2045 | # GainText
#
# <NAME> <<EMAIL>>
mona = require 'mona-parser'
{
copy, collect, collectText,
newline, vskip, hskip,
noWhitespace
} = require './parserutils'
class ParserScope
constructor: (blocks=[], spans=[]) ->
@blockParsers = (@createParser element for element in blocks)
@spanParsers = (@createParser element for element in spans)
createParser: (element) ->
p = element.parser()
if typeof p != 'function'
throw new Error "parser() returned #{p}"
return p
addBlock: (element) ->
@blockParsers.push @createParser element
addSpan: (element) ->
@spanParsers.push @createParser element
blockParserList: ->
return @blockParsers
spanParserList: ->
return @spanParsers
blockParser: (extra=[]) ->
parsers = @blockParserList().concat(extra)
if not parsers.length
throw new Error "empty parser list"
return mona.or parsers...
spanParser: (extra=[]) ->
debugger
parsers = @spanParserList().concat(extra)
if not parsers.length
throw new Error "empty parser list"
return mona.or parsers...
exports.globalScope = globalScope = new ParserScope()
class NestedParserScope extends ParserScope
constructor: (@parent=globalScope, schema=[]) ->
super(schema) # XXX
blockParserList: ->
if @parent
return @blockParsers.concat @parent.blockParserList()
else return @blockParsers
spanParserList: ->
if @parent
return @spanParsers.concat @parent.spanParserList()
else return @spanParsers
class Element
constructor: (@schema=[]) ->
setScope: (scope) ->
return (parserState) ->
newState = copy(parserState)
newState.scope = scope
newState.value = scope
return newState
newScope: ->
return (parserState) =>
parent = parserState.scope
scope = new NestedParserScope(parent, @schema)
return @setScope(scope)(parserState)
collect: (parser) ->
return mona.or mona.collect(parser, min: 1),
mona.value []
createElement: (name, title, content) ->
return name: name, title: title, content: content
class NamedElement extends Element
constructor: (@nameParser, @schema=[]) ->
if typeof @nameParser == 'string'
@nameParser = mona.string @nameParser
if typeof @nameParser != 'function'
throw new Error "@nameParser is #{@nameParser}"
class NamedBlockElement extends NamedElement
indentedContentParser: ->
return mona.sequence (parse) =>
parse vskip
parse sameIndent
name = parse @nameParser
parse mona.string(':')
parse hskip
title = parse mona.text(mona.noneOf '\r\n')
parse newline
parse vskip
# TBD: move the newScope into a new parser
# which is called inside indentedBlock?
scope = parse @newScope()
content = parse @collect (indentedBlock scope.blockParser())
return mona.value @createElement(name, title, content)
underlinedTitleParser: ->
return mona.sequence (parse) =>
# TBD
return mona.fail()
parser: ->
return mona.or(
@indentedContentParser(),
@underlinedTitleParser(),
)
class SymbolicBlockElement extends Element
constructor: (@symbol, @name) ->
parser: ->
return mona.sequence (parse) =>
parse vskip
parse sameIndent
parse mona.string @symbol
parse hskip
title = parse mona.text(mona.noneOf '\r\n')
parse newline
parse vskip
scope = parse @newScope()
content = parse @collect (indentedBlock scope.blockParser())
return mona.value @createElement(@name, title, content)
class NamedSpanElement extends NamedElement
parser: ->
return mona.sequence (parse) =>
parse mona.string '['
name = parse @nameParser
parse hskip
attributes = parse mona.text mona.noneOf(':]\r\n')
# XXX: parse attributes
if parse mona.maybe mona.string ':'
parse hskip
scope = parse @newScope()
content = parse collectText scope.spanParser [mona.noneOf ']\r\n']
else
content = []
parse mona.string ']'
# TBD: parse attributes
return mona.value @createElement(name, attributes, content)
class Paragraph extends Element
constructor: ->
@normalText = mona.noneOf('\r\n')
parser: ->
return mona.sequence (parse) =>
parse vskip
scope = parse @newScope()
textInLine = collectText scope.spanParser [@normalText]
textLine = mona.followedBy(
mona.and(sameIndent,
noWhitespace,
textInLine),
newline)
return collect textLine
exports.Element = Element
exports.NamedBlockElement = NamedBlockElement
exports.NamedSpanElement = NamedSpanElement
exports.SymbolicBlockElement = SymbolicBlockElement
exports.Paragraph = Paragraph
indentation = mona.followedBy mona.text(mona.oneOf(' \t'), min: 1),
mona.noneOf('\n')
exports.indentation = indentation = mona.label indentation, 'indentation'
exports.getIndentLevel = getIndentLevel = (parserState) ->
levels = parserState.indentLevels || [""]
curLevel = levels[levels.length-1]
newState = copy(parserState)
newState.value = curLevel
return newState
exports.pushIndentLevel = pushIndentLevel = (level) ->
return (parserState) ->
levels = copy(parserState.indentLevels || [""])
levels.push(level)
parserState = copy(parserState)
parserState.indentLevels = levels
return parserState
exports.popIndentLevel = popIndentLevel = (parserState) ->
parserState = copy(parserState)
parserState.indentLevels = copy(parserState.indentLevels)
parserState.indentLevels.pop()
return parserState
exports.indentedBlock = indentedBlock = (content, indent=indentation) ->
return mona.sequence (parse) ->
parse vskip
i = parse(mona.lookAhead(indent))
if not i? or i == ''
# no indentation at all
return mona.fail()
level = parse(getIndentLevel)
if i.substr(0, level.length) != level
# not indented enough
return mona.fail()
parse(pushIndentLevel(i))
c = parse(content)
parse(popIndentLevel)
return mona.value c
exports.sameIndent = sameIndent = mona.sequence (parse) ->
level = parse(getIndentLevel)
if level == ''
return mona.value ''
else
return mona.string level
| true | # GainText
#
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
mona = require 'mona-parser'
{
copy, collect, collectText,
newline, vskip, hskip,
noWhitespace
} = require './parserutils'
class ParserScope
constructor: (blocks=[], spans=[]) ->
@blockParsers = (@createParser element for element in blocks)
@spanParsers = (@createParser element for element in spans)
createParser: (element) ->
p = element.parser()
if typeof p != 'function'
throw new Error "parser() returned #{p}"
return p
addBlock: (element) ->
@blockParsers.push @createParser element
addSpan: (element) ->
@spanParsers.push @createParser element
blockParserList: ->
return @blockParsers
spanParserList: ->
return @spanParsers
blockParser: (extra=[]) ->
parsers = @blockParserList().concat(extra)
if not parsers.length
throw new Error "empty parser list"
return mona.or parsers...
spanParser: (extra=[]) ->
debugger
parsers = @spanParserList().concat(extra)
if not parsers.length
throw new Error "empty parser list"
return mona.or parsers...
exports.globalScope = globalScope = new ParserScope()
class NestedParserScope extends ParserScope
constructor: (@parent=globalScope, schema=[]) ->
super(schema) # XXX
blockParserList: ->
if @parent
return @blockParsers.concat @parent.blockParserList()
else return @blockParsers
spanParserList: ->
if @parent
return @spanParsers.concat @parent.spanParserList()
else return @spanParsers
class Element
constructor: (@schema=[]) ->
setScope: (scope) ->
return (parserState) ->
newState = copy(parserState)
newState.scope = scope
newState.value = scope
return newState
newScope: ->
return (parserState) =>
parent = parserState.scope
scope = new NestedParserScope(parent, @schema)
return @setScope(scope)(parserState)
collect: (parser) ->
return mona.or mona.collect(parser, min: 1),
mona.value []
createElement: (name, title, content) ->
return name: name, title: title, content: content
class NamedElement extends Element
constructor: (@nameParser, @schema=[]) ->
if typeof @nameParser == 'string'
@nameParser = mona.string @nameParser
if typeof @nameParser != 'function'
throw new Error "@nameParser is #{@nameParser}"
class NamedBlockElement extends NamedElement
indentedContentParser: ->
return mona.sequence (parse) =>
parse vskip
parse sameIndent
name = parse @nameParser
parse mona.string(':')
parse hskip
title = parse mona.text(mona.noneOf '\r\n')
parse newline
parse vskip
# TBD: move the newScope into a new parser
# which is called inside indentedBlock?
scope = parse @newScope()
content = parse @collect (indentedBlock scope.blockParser())
return mona.value @createElement(name, title, content)
underlinedTitleParser: ->
return mona.sequence (parse) =>
# TBD
return mona.fail()
parser: ->
return mona.or(
@indentedContentParser(),
@underlinedTitleParser(),
)
class SymbolicBlockElement extends Element
constructor: (@symbol, @name) ->
parser: ->
return mona.sequence (parse) =>
parse vskip
parse sameIndent
parse mona.string @symbol
parse hskip
title = parse mona.text(mona.noneOf '\r\n')
parse newline
parse vskip
scope = parse @newScope()
content = parse @collect (indentedBlock scope.blockParser())
return mona.value @createElement(@name, title, content)
class NamedSpanElement extends NamedElement
parser: ->
return mona.sequence (parse) =>
parse mona.string '['
name = parse @nameParser
parse hskip
attributes = parse mona.text mona.noneOf(':]\r\n')
# XXX: parse attributes
if parse mona.maybe mona.string ':'
parse hskip
scope = parse @newScope()
content = parse collectText scope.spanParser [mona.noneOf ']\r\n']
else
content = []
parse mona.string ']'
# TBD: parse attributes
return mona.value @createElement(name, attributes, content)
class Paragraph extends Element
constructor: ->
@normalText = mona.noneOf('\r\n')
parser: ->
return mona.sequence (parse) =>
parse vskip
scope = parse @newScope()
textInLine = collectText scope.spanParser [@normalText]
textLine = mona.followedBy(
mona.and(sameIndent,
noWhitespace,
textInLine),
newline)
return collect textLine
exports.Element = Element
exports.NamedBlockElement = NamedBlockElement
exports.NamedSpanElement = NamedSpanElement
exports.SymbolicBlockElement = SymbolicBlockElement
exports.Paragraph = Paragraph
indentation = mona.followedBy mona.text(mona.oneOf(' \t'), min: 1),
mona.noneOf('\n')
exports.indentation = indentation = mona.label indentation, 'indentation'
exports.getIndentLevel = getIndentLevel = (parserState) ->
levels = parserState.indentLevels || [""]
curLevel = levels[levels.length-1]
newState = copy(parserState)
newState.value = curLevel
return newState
exports.pushIndentLevel = pushIndentLevel = (level) ->
return (parserState) ->
levels = copy(parserState.indentLevels || [""])
levels.push(level)
parserState = copy(parserState)
parserState.indentLevels = levels
return parserState
exports.popIndentLevel = popIndentLevel = (parserState) ->
parserState = copy(parserState)
parserState.indentLevels = copy(parserState.indentLevels)
parserState.indentLevels.pop()
return parserState
exports.indentedBlock = indentedBlock = (content, indent=indentation) ->
return mona.sequence (parse) ->
parse vskip
i = parse(mona.lookAhead(indent))
if not i? or i == ''
# no indentation at all
return mona.fail()
level = parse(getIndentLevel)
if i.substr(0, level.length) != level
# not indented enough
return mona.fail()
parse(pushIndentLevel(i))
c = parse(content)
parse(popIndentLevel)
return mona.value c
exports.sameIndent = sameIndent = mona.sequence (parse) ->
level = parse(getIndentLevel)
if level == ''
return mona.value ''
else
return mona.string level
|
[
{
"context": "# Bill Cospy says NO\n#\nmodule.exports = (robot) ->\n robot.hea",
"end": 12,
"score": 0.995614230632782,
"start": 2,
"tag": "NAME",
"value": "Bill Cospy"
}
] | scripts/cosby-no.coffee | RiotGamesMinions/lefay | 7 | # Bill Cospy says NO
#
module.exports = (robot) ->
robot.hear /uh uh/i, (msg) ->
msg.send "http://mlkshk.com/r/ISTL.gif"
| 167772 | # <NAME> says NO
#
module.exports = (robot) ->
robot.hear /uh uh/i, (msg) ->
msg.send "http://mlkshk.com/r/ISTL.gif"
| true | # PI:NAME:<NAME>END_PI says NO
#
module.exports = (robot) ->
robot.hear /uh uh/i, (msg) ->
msg.send "http://mlkshk.com/r/ISTL.gif"
|
[
{
"context": " member.teamId = team.id\n member.firstName = 'Test'\n teamsnap.saveMember member, (err, result) ->",
"end": 180,
"score": 0.9990757703781128,
"start": 176,
"tag": "NAME",
"value": "Test"
}
] | test/memberPhoneNumbers.coffee | teamsnap/teamsnap-javascript-sdk | 9 | describe 'Member Phone Numbers', ->
member = null
phone = null
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = 'Test'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to load all member phones for team', (done) ->
teamsnap.loadMemberPhoneNumbers team.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to create a member phone', (done) ->
phone = teamsnap.createMemberPhoneNumber()
phone.memberId = member.id
phone.url = 'http://example.com'
phone.phoneNumber = value = 'An example'
teamsnap.saveMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
result.should.have.property('phoneNumber', value)
teamsnap.loadMemberPhoneNumbers memberId: member.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
result.should.have.property('length', 1)
done()
it 'should be able to update a member phone', (done) ->
phone.phoneNumber = value = 'Changed text'
teamsnap.saveMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
result.should.have.property('phoneNumber', value)
done()
it 'should be able to delete a member phone', (done) ->
teamsnap.deleteMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
teamsnap.loadMemberPhoneNumbers memberId: member.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
result.should.have.property('length', 0)
done()
| 97843 | describe 'Member Phone Numbers', ->
member = null
phone = null
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = '<NAME>'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to load all member phones for team', (done) ->
teamsnap.loadMemberPhoneNumbers team.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to create a member phone', (done) ->
phone = teamsnap.createMemberPhoneNumber()
phone.memberId = member.id
phone.url = 'http://example.com'
phone.phoneNumber = value = 'An example'
teamsnap.saveMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
result.should.have.property('phoneNumber', value)
teamsnap.loadMemberPhoneNumbers memberId: member.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
result.should.have.property('length', 1)
done()
it 'should be able to update a member phone', (done) ->
phone.phoneNumber = value = 'Changed text'
teamsnap.saveMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
result.should.have.property('phoneNumber', value)
done()
it 'should be able to delete a member phone', (done) ->
teamsnap.deleteMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
teamsnap.loadMemberPhoneNumbers memberId: member.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
result.should.have.property('length', 0)
done()
| true | describe 'Member Phone Numbers', ->
member = null
phone = null
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = 'PI:NAME:<NAME>END_PI'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to load all member phones for team', (done) ->
teamsnap.loadMemberPhoneNumbers team.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to create a member phone', (done) ->
phone = teamsnap.createMemberPhoneNumber()
phone.memberId = member.id
phone.url = 'http://example.com'
phone.phoneNumber = value = 'An example'
teamsnap.saveMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
result.should.have.property('phoneNumber', value)
teamsnap.loadMemberPhoneNumbers memberId: member.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
result.should.have.property('length', 1)
done()
it 'should be able to update a member phone', (done) ->
phone.phoneNumber = value = 'Changed text'
teamsnap.saveMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
result.should.have.property('phoneNumber', value)
done()
it 'should be able to delete a member phone', (done) ->
teamsnap.deleteMemberPhoneNumber phone, (err, result) ->
expect(err).to.be.null
teamsnap.loadMemberPhoneNumbers memberId: member.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
result.should.have.property('length', 0)
done()
|
[
{
"context": ") ->\n super( @alias, @hostname, @directory, @username, @port, @localFiles, @usePassword, @lastOpenDirec",
"end": 892,
"score": 0.5870793461799622,
"start": 884,
"tag": "USERNAME",
"value": "username"
},
{
"context": "@hostname, port: @port, user: @username, passwo... | lib/model/ftp-host.coffee | fojtCz/remote-edit | 1 | Host = require './host'
RemoteFile = require './remote-file'
LocalFile = require './local-file'
async = require 'async'
filesize = require 'file-size'
moment = require 'moment'
ftp = require 'ftp'
Serializable = require 'serializable'
Path = require 'path'
_ = require 'underscore-plus'
fs = require 'fs-plus'
try
keytar = require 'keytar'
catch err
console.debug 'Keytar could not be loaded! Passwords will be stored in cleartext to remoteEdit.json!'
keytar = undefined
module.exports =
class FtpHost extends Host
Serializable.includeInto(this)
atom.deserializers.add(this)
Host.registerDeserializers(FtpHost)
connection: undefined
protocol: "ftp"
constructor: (@alias = null, @hostname, @directory, @username, @port = "21", @localFiles = [], @usePassword = true, @password, @lastOpenDirectory) ->
super( @alias, @hostname, @directory, @username, @port, @localFiles, @usePassword, @lastOpenDirectory )
createRemoteFileFromListObj: (name, item) ->
unless item.name? and item.name isnt '..' and item.name isnt '.'
return undefined
remoteFile = new RemoteFile(Path.normalize((name + '/' + item.name)).split(Path.sep).join('/'), false, false, false, filesize(item.size).human(), null, null)
if item.type == "d"
remoteFile.isDir = true
else if item.type == "-"
remoteFile.isFile = true
else if item.type == 'l'
remoteFile.isLink = true
if item.rights?
remoteFile.permissions = (@convertRWXToNumber(item.rights.user) + @convertRWXToNumber(item.rights.group) + @convertRWXToNumber(item.rights.other))
if item.date?
remoteFile.lastModified = moment(item.date).format("HH:mm:ss DD/MM/YYYY")
return remoteFile
convertRWXToNumber: (str) ->
toreturn = 0
for i in str
if i == 'r'
toreturn += 4
else if i == 'w'
toreturn += 2
else if i == 'x'
toreturn += 1
return toreturn.toString()
getServiceNamePassword: ->
"atom.remote-edit.ftp.password"
####################
# Overridden methods
getConnectionString: (connectionOptions) ->
if atom.config.get('remote-edit.storePasswordsUsingKeytar') and (keytar?)
keytarPassword = keytar.getPassword(@getServiceNamePassword(), @getServiceAccount())
_.extend({host: @hostname, port: @port, user: @username, password: keytarPassword}, connectionOptions)
else
_.extend({host: @hostname, port: @port, user: @username, password: @password}, connectionOptions)
close: (callback) ->
@connection?.end()
callback?(null)
connect: (callback, connectionOptions = {}) ->
@emitter.emit 'info', {message: "Connecting to ftp://#{@username}@#{@hostname}:#{@port}", type: 'info'}
async.waterfall([
(callback) =>
@connection = new ftp()
@connection.on 'error', (err) =>
@connection.end()
@emitter.emit 'info', {message: "Error occured when connecting to ftp://#{@username}@#{@hostname}:#{@port}", type: 'error'}
callback?(err)
@connection.on 'ready', =>
@emitter.emit 'info', {message: "Successfully connected to ftp://#{@username}@#{@hostname}:#{@port}", type: 'success'}
callback(null)
@connection.connect(@getConnectionString(connectionOptions))
], (err) ->
callback?(err)
)
isConnected: ->
@connection? and @connection.connected
getFilesMetadata: (path, callback) ->
async.waterfall([
(callback) =>
@connection.list(path, callback)
(files, callback) =>
async.map(files, ((item, callback) => callback(null, @createRemoteFileFromListObj(path, item))), callback)
(objects, callback) ->
async.filter(objects, ((item, callback) -> callback(item?)), ((result) -> callback(null, result)))
(objects, callback) ->
objects.push(new RemoteFile((path + "/.."), false, true, false, null, null, null))
if atom.config.get 'remote-edit.showHiddenFiles'
callback(null, objects)
else
async.filter(objects, ((item, callback) -> item.isHidden(callback)), ((result) -> callback(null, result)))
], (err, result) =>
if err?
@emitter.emit('info', {message: "Error occured when reading remote directory ftp://#{@username}@#{@hostname}:#{@port}:#{path}", type: 'error'} )
console.error err if err?
callback?(err)
else
callback?(err, (result.sort (a, b) -> return if a.name.toLowerCase() >= b.name.toLowerCase() then 1 else -1))
)
getFile: (localFile, callback) ->
@emitter.emit('info', {message: "Getting remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'info'})
async.waterfall([
(callback) =>
@connection.get(localFile.remoteFile.path, callback)
(readableStream, callback) =>
writableStream = fs.createWriteStream(localFile.path)
readableStream.pipe(writableStream)
readableStream.on 'end', -> callback(null)
], (err) =>
if err?
@emitter.emit('info', {message: "Error when reading remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'error'})
callback?(err, localFile)
else
@emitter.emit('info', {message: "Successfully read remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'success'})
callback?(null, localFile)
)
writeFile: (localFile, callback) ->
@emitter.emit 'info', {message: "Writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'info'}
async.waterfall([
(callback) =>
@connection.put(localFile.path, localFile.remoteFile.path, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occured when writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully wrote remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'success'})
@close()
callback?(err)
)
serializeParams: ->
{
@alias
@hostname
@directory
@username
@port
localFiles: localFile.serialize() for localFile in @localFiles
@usePassword
@password
@lastOpenDirectory
}
deserializeParams: (params) ->
tmpArray = []
tmpArray.push(LocalFile.deserialize(localFile, host: this)) for localFile in params.localFiles
params.localFiles = tmpArray
params
createFolder: (folderpath, callback) ->
async.waterfall([
(callback) =>
@connection.mkdir(folderpath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when creating remote folder ftp://#{@username}@#{@hostname}:#{@port}#{folderpath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully created remote folder ftp://#{@username}@#{@hostname}:#{@port}#{folderpath}", type: 'success'})
callback?(err)
)
createFile: (filepath, callback) ->
if filepath.indexOf(".") == -1
@emitter.emit('info', {message: "Invalid file name", type: 'error'})
else
@connection.get(filepath, (err, result) =>
if result
@emitter.emit('info', {message: "File already exists", type: 'error'})
else
async.waterfall([
(callback) =>
@connection.put(new Buffer(''), filepath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{filepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully wrote remote file ftp://#{@username}@#{@hostname}:#{@port}#{filepath}", type: 'success'})
callback?(err)
)
)
renameFolderFile: (path, oldName, newName, isFolder, callback) ->
if oldName == newName
@emitter.emit('info', {message: "The new name is same as the old", type: 'error'})
else
oldPath = path + "/" + oldName
newPath = path + "/" + newName
async.waterfall([
(callback) =>
if(isFolder)
@connection.list(newPath, callback)
else
@connection.get(newPath, callback)
], (err, result) =>
if (isFolder and result.length > 0) or (!isFolder and result)
@emitter.emit('info', {message: "#{if isFolder then 'Folder' else 'File'} already exists", type: 'error'})
else
async.waterfall([
(callback) =>
@connection.rename(oldPath, newPath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when renaming remote folder/file ftp://#{@username}@#{@hostname}:#{@port}#{oldPath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully renamed remote folder/file ftp://#{@username}@#{@hostname}:#{@port}#{oldPath}", type: 'success'})
callback?(err)
)
)
deleteFolderFile: (deletepath, isFolder, callback) ->
if isFolder
@connection.rmdir(deletepath, (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when deleting remote folder ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully deleted remote folder ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'success'})
callback?(err)
)
else
@connection.delete(deletepath, (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when deleting remote file ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully deleted remote file ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'success'})
callback?(err)
)
| 22321 | Host = require './host'
RemoteFile = require './remote-file'
LocalFile = require './local-file'
async = require 'async'
filesize = require 'file-size'
moment = require 'moment'
ftp = require 'ftp'
Serializable = require 'serializable'
Path = require 'path'
_ = require 'underscore-plus'
fs = require 'fs-plus'
try
keytar = require 'keytar'
catch err
console.debug 'Keytar could not be loaded! Passwords will be stored in cleartext to remoteEdit.json!'
keytar = undefined
module.exports =
class FtpHost extends Host
Serializable.includeInto(this)
atom.deserializers.add(this)
Host.registerDeserializers(FtpHost)
connection: undefined
protocol: "ftp"
constructor: (@alias = null, @hostname, @directory, @username, @port = "21", @localFiles = [], @usePassword = true, @password, @lastOpenDirectory) ->
super( @alias, @hostname, @directory, @username, @port, @localFiles, @usePassword, @lastOpenDirectory )
createRemoteFileFromListObj: (name, item) ->
unless item.name? and item.name isnt '..' and item.name isnt '.'
return undefined
remoteFile = new RemoteFile(Path.normalize((name + '/' + item.name)).split(Path.sep).join('/'), false, false, false, filesize(item.size).human(), null, null)
if item.type == "d"
remoteFile.isDir = true
else if item.type == "-"
remoteFile.isFile = true
else if item.type == 'l'
remoteFile.isLink = true
if item.rights?
remoteFile.permissions = (@convertRWXToNumber(item.rights.user) + @convertRWXToNumber(item.rights.group) + @convertRWXToNumber(item.rights.other))
if item.date?
remoteFile.lastModified = moment(item.date).format("HH:mm:ss DD/MM/YYYY")
return remoteFile
convertRWXToNumber: (str) ->
toreturn = 0
for i in str
if i == 'r'
toreturn += 4
else if i == 'w'
toreturn += 2
else if i == 'x'
toreturn += 1
return toreturn.toString()
getServiceNamePassword: ->
"atom.remote-edit.ftp.password"
####################
# Overridden methods
getConnectionString: (connectionOptions) ->
if atom.config.get('remote-edit.storePasswordsUsingKeytar') and (keytar?)
keytarPassword = keytar.getPassword(@getServiceNamePassword(), @getServiceAccount())
_.extend({host: @hostname, port: @port, user: @username, password: keytarPassword}, connectionOptions)
else
_.extend({host: @hostname, port: @port, user: @username, password: <PASSWORD>}, connectionOptions)
close: (callback) ->
@connection?.end()
callback?(null)
connect: (callback, connectionOptions = {}) ->
@emitter.emit 'info', {message: "Connecting to ftp://#{@username}@#{@hostname}:#{@port}", type: 'info'}
async.waterfall([
(callback) =>
@connection = new ftp()
@connection.on 'error', (err) =>
@connection.end()
@emitter.emit 'info', {message: "Error occured when connecting to ftp://#{@username}@#{@hostname}:#{@port}", type: 'error'}
callback?(err)
@connection.on 'ready', =>
@emitter.emit 'info', {message: "Successfully connected to ftp://#{@username}@#{@hostname}:#{@port}", type: 'success'}
callback(null)
@connection.connect(@getConnectionString(connectionOptions))
], (err) ->
callback?(err)
)
isConnected: ->
@connection? and @connection.connected
getFilesMetadata: (path, callback) ->
async.waterfall([
(callback) =>
@connection.list(path, callback)
(files, callback) =>
async.map(files, ((item, callback) => callback(null, @createRemoteFileFromListObj(path, item))), callback)
(objects, callback) ->
async.filter(objects, ((item, callback) -> callback(item?)), ((result) -> callback(null, result)))
(objects, callback) ->
objects.push(new RemoteFile((path + "/.."), false, true, false, null, null, null))
if atom.config.get 'remote-edit.showHiddenFiles'
callback(null, objects)
else
async.filter(objects, ((item, callback) -> item.isHidden(callback)), ((result) -> callback(null, result)))
], (err, result) =>
if err?
@emitter.emit('info', {message: "Error occured when reading remote directory ftp://#{@username}@#{@hostname}:#{@port}:#{path}", type: 'error'} )
console.error err if err?
callback?(err)
else
callback?(err, (result.sort (a, b) -> return if a.name.toLowerCase() >= b.name.toLowerCase() then 1 else -1))
)
getFile: (localFile, callback) ->
@emitter.emit('info', {message: "Getting remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'info'})
async.waterfall([
(callback) =>
@connection.get(localFile.remoteFile.path, callback)
(readableStream, callback) =>
writableStream = fs.createWriteStream(localFile.path)
readableStream.pipe(writableStream)
readableStream.on 'end', -> callback(null)
], (err) =>
if err?
@emitter.emit('info', {message: "Error when reading remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'error'})
callback?(err, localFile)
else
@emitter.emit('info', {message: "Successfully read remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'success'})
callback?(null, localFile)
)
writeFile: (localFile, callback) ->
@emitter.emit 'info', {message: "Writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'info'}
async.waterfall([
(callback) =>
@connection.put(localFile.path, localFile.remoteFile.path, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occured when writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully wrote remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'success'})
@close()
callback?(err)
)
serializeParams: ->
{
@alias
@hostname
@directory
@username
@port
localFiles: localFile.serialize() for localFile in @localFiles
@usePassword
@password
@lastOpenDirectory
}
deserializeParams: (params) ->
tmpArray = []
tmpArray.push(LocalFile.deserialize(localFile, host: this)) for localFile in params.localFiles
params.localFiles = tmpArray
params
createFolder: (folderpath, callback) ->
async.waterfall([
(callback) =>
@connection.mkdir(folderpath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when creating remote folder ftp://#{@username}@#{@hostname}:#{@port}#{folderpath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully created remote folder ftp://#{@username}@#{@hostname}:#{@port}#{folderpath}", type: 'success'})
callback?(err)
)
createFile: (filepath, callback) ->
if filepath.indexOf(".") == -1
@emitter.emit('info', {message: "Invalid file name", type: 'error'})
else
@connection.get(filepath, (err, result) =>
if result
@emitter.emit('info', {message: "File already exists", type: 'error'})
else
async.waterfall([
(callback) =>
@connection.put(new Buffer(''), filepath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{filepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully wrote remote file ftp://#{@username}@#{@hostname}:#{@port}#{filepath}", type: 'success'})
callback?(err)
)
)
renameFolderFile: (path, oldName, newName, isFolder, callback) ->
if oldName == newName
@emitter.emit('info', {message: "The new name is same as the old", type: 'error'})
else
oldPath = path + "/" + oldName
newPath = path + "/" + newName
async.waterfall([
(callback) =>
if(isFolder)
@connection.list(newPath, callback)
else
@connection.get(newPath, callback)
], (err, result) =>
if (isFolder and result.length > 0) or (!isFolder and result)
@emitter.emit('info', {message: "#{if isFolder then 'Folder' else 'File'} already exists", type: 'error'})
else
async.waterfall([
(callback) =>
@connection.rename(oldPath, newPath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when renaming remote folder/file ftp://#{@username}@#{@hostname}:#{@port}#{oldPath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully renamed remote folder/file ftp://#{@username}@#{@hostname}:#{@port}#{oldPath}", type: 'success'})
callback?(err)
)
)
deleteFolderFile: (deletepath, isFolder, callback) ->
if isFolder
@connection.rmdir(deletepath, (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when deleting remote folder ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully deleted remote folder ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'success'})
callback?(err)
)
else
@connection.delete(deletepath, (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when deleting remote file ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully deleted remote file ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'success'})
callback?(err)
)
| true | Host = require './host'
RemoteFile = require './remote-file'
LocalFile = require './local-file'
async = require 'async'
filesize = require 'file-size'
moment = require 'moment'
ftp = require 'ftp'
Serializable = require 'serializable'
Path = require 'path'
_ = require 'underscore-plus'
fs = require 'fs-plus'
try
keytar = require 'keytar'
catch err
console.debug 'Keytar could not be loaded! Passwords will be stored in cleartext to remoteEdit.json!'
keytar = undefined
module.exports =
class FtpHost extends Host
Serializable.includeInto(this)
atom.deserializers.add(this)
Host.registerDeserializers(FtpHost)
connection: undefined
protocol: "ftp"
constructor: (@alias = null, @hostname, @directory, @username, @port = "21", @localFiles = [], @usePassword = true, @password, @lastOpenDirectory) ->
super( @alias, @hostname, @directory, @username, @port, @localFiles, @usePassword, @lastOpenDirectory )
createRemoteFileFromListObj: (name, item) ->
unless item.name? and item.name isnt '..' and item.name isnt '.'
return undefined
remoteFile = new RemoteFile(Path.normalize((name + '/' + item.name)).split(Path.sep).join('/'), false, false, false, filesize(item.size).human(), null, null)
if item.type == "d"
remoteFile.isDir = true
else if item.type == "-"
remoteFile.isFile = true
else if item.type == 'l'
remoteFile.isLink = true
if item.rights?
remoteFile.permissions = (@convertRWXToNumber(item.rights.user) + @convertRWXToNumber(item.rights.group) + @convertRWXToNumber(item.rights.other))
if item.date?
remoteFile.lastModified = moment(item.date).format("HH:mm:ss DD/MM/YYYY")
return remoteFile
convertRWXToNumber: (str) ->
toreturn = 0
for i in str
if i == 'r'
toreturn += 4
else if i == 'w'
toreturn += 2
else if i == 'x'
toreturn += 1
return toreturn.toString()
getServiceNamePassword: ->
"atom.remote-edit.ftp.password"
####################
# Overridden methods
getConnectionString: (connectionOptions) ->
if atom.config.get('remote-edit.storePasswordsUsingKeytar') and (keytar?)
keytarPassword = keytar.getPassword(@getServiceNamePassword(), @getServiceAccount())
_.extend({host: @hostname, port: @port, user: @username, password: keytarPassword}, connectionOptions)
else
_.extend({host: @hostname, port: @port, user: @username, password: PI:PASSWORD:<PASSWORD>END_PI}, connectionOptions)
close: (callback) ->
@connection?.end()
callback?(null)
connect: (callback, connectionOptions = {}) ->
@emitter.emit 'info', {message: "Connecting to ftp://#{@username}@#{@hostname}:#{@port}", type: 'info'}
async.waterfall([
(callback) =>
@connection = new ftp()
@connection.on 'error', (err) =>
@connection.end()
@emitter.emit 'info', {message: "Error occured when connecting to ftp://#{@username}@#{@hostname}:#{@port}", type: 'error'}
callback?(err)
@connection.on 'ready', =>
@emitter.emit 'info', {message: "Successfully connected to ftp://#{@username}@#{@hostname}:#{@port}", type: 'success'}
callback(null)
@connection.connect(@getConnectionString(connectionOptions))
], (err) ->
callback?(err)
)
isConnected: ->
@connection? and @connection.connected
getFilesMetadata: (path, callback) ->
async.waterfall([
(callback) =>
@connection.list(path, callback)
(files, callback) =>
async.map(files, ((item, callback) => callback(null, @createRemoteFileFromListObj(path, item))), callback)
(objects, callback) ->
async.filter(objects, ((item, callback) -> callback(item?)), ((result) -> callback(null, result)))
(objects, callback) ->
objects.push(new RemoteFile((path + "/.."), false, true, false, null, null, null))
if atom.config.get 'remote-edit.showHiddenFiles'
callback(null, objects)
else
async.filter(objects, ((item, callback) -> item.isHidden(callback)), ((result) -> callback(null, result)))
], (err, result) =>
if err?
@emitter.emit('info', {message: "Error occured when reading remote directory ftp://#{@username}@#{@hostname}:#{@port}:#{path}", type: 'error'} )
console.error err if err?
callback?(err)
else
callback?(err, (result.sort (a, b) -> return if a.name.toLowerCase() >= b.name.toLowerCase() then 1 else -1))
)
getFile: (localFile, callback) ->
@emitter.emit('info', {message: "Getting remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'info'})
async.waterfall([
(callback) =>
@connection.get(localFile.remoteFile.path, callback)
(readableStream, callback) =>
writableStream = fs.createWriteStream(localFile.path)
readableStream.pipe(writableStream)
readableStream.on 'end', -> callback(null)
], (err) =>
if err?
@emitter.emit('info', {message: "Error when reading remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'error'})
callback?(err, localFile)
else
@emitter.emit('info', {message: "Successfully read remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'success'})
callback?(null, localFile)
)
writeFile: (localFile, callback) ->
@emitter.emit 'info', {message: "Writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'info'}
async.waterfall([
(callback) =>
@connection.put(localFile.path, localFile.remoteFile.path, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occured when writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully wrote remote file ftp://#{@username}@#{@hostname}:#{@port}#{localFile.remoteFile.path}", type: 'success'})
@close()
callback?(err)
)
serializeParams: ->
{
@alias
@hostname
@directory
@username
@port
localFiles: localFile.serialize() for localFile in @localFiles
@usePassword
@password
@lastOpenDirectory
}
deserializeParams: (params) ->
tmpArray = []
tmpArray.push(LocalFile.deserialize(localFile, host: this)) for localFile in params.localFiles
params.localFiles = tmpArray
params
createFolder: (folderpath, callback) ->
async.waterfall([
(callback) =>
@connection.mkdir(folderpath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when creating remote folder ftp://#{@username}@#{@hostname}:#{@port}#{folderpath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully created remote folder ftp://#{@username}@#{@hostname}:#{@port}#{folderpath}", type: 'success'})
callback?(err)
)
createFile: (filepath, callback) ->
if filepath.indexOf(".") == -1
@emitter.emit('info', {message: "Invalid file name", type: 'error'})
else
@connection.get(filepath, (err, result) =>
if result
@emitter.emit('info', {message: "File already exists", type: 'error'})
else
async.waterfall([
(callback) =>
@connection.put(new Buffer(''), filepath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when writing remote file ftp://#{@username}@#{@hostname}:#{@port}#{filepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully wrote remote file ftp://#{@username}@#{@hostname}:#{@port}#{filepath}", type: 'success'})
callback?(err)
)
)
renameFolderFile: (path, oldName, newName, isFolder, callback) ->
if oldName == newName
@emitter.emit('info', {message: "The new name is same as the old", type: 'error'})
else
oldPath = path + "/" + oldName
newPath = path + "/" + newName
async.waterfall([
(callback) =>
if(isFolder)
@connection.list(newPath, callback)
else
@connection.get(newPath, callback)
], (err, result) =>
if (isFolder and result.length > 0) or (!isFolder and result)
@emitter.emit('info', {message: "#{if isFolder then 'Folder' else 'File'} already exists", type: 'error'})
else
async.waterfall([
(callback) =>
@connection.rename(oldPath, newPath, callback)
], (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when renaming remote folder/file ftp://#{@username}@#{@hostname}:#{@port}#{oldPath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully renamed remote folder/file ftp://#{@username}@#{@hostname}:#{@port}#{oldPath}", type: 'success'})
callback?(err)
)
)
deleteFolderFile: (deletepath, isFolder, callback) ->
if isFolder
@connection.rmdir(deletepath, (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when deleting remote folder ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully deleted remote folder ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'success'})
callback?(err)
)
else
@connection.delete(deletepath, (err) =>
if err?
@emitter.emit('info', {message: "Error occurred when deleting remote file ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'error'})
console.error err if err?
else
@emitter.emit('info', {message: "Successfully deleted remote file ftp://#{@username}@#{@hostname}:#{@port}#{deletepath}", type: 'success'})
callback?(err)
)
|
[
{
"context": " describe 'with ascii values', ->\n key = \"Hello\"\n val = \"World\"\n test key, val\n de",
"end": 724,
"score": 0.9540519118309021,
"start": 719,
"tag": "KEY",
"value": "Hello"
},
{
"context": "scii values', ->\n key = \"Hello\"\n val ... | test/handle-test.coffee | kyledrake/node-leveldb | 1 | assert = require 'assert'
crypto = require 'crypto'
leveldb = require '../lib'
path = require 'path'
fs = require 'fs'
exists = fs.existsSync || path.existsSync
#Test sync open -- Jus t test a single get/put cycle to verify it's a valid handle
#As the code is basically the same as the async version's code, all subsequ
describe 'syncOpen', ->
filename = "#{__dirname}/../tmp/open-sync-test-file"
db = null
beforeEach (done) ->
db = leveldb.openSync filename, {create_if_missing: true, error_if_exists: true}
done()
afterEach (done) ->
db = null
iterator = null
leveldb.destroy filename, done
itShouldBehave = (test) ->
describe 'with ascii values', ->
key = "Hello"
val = "World"
test key, val
describe 'as_buffer', -> test key, val, true
describe 'with buffer values', ->
key = new Buffer [1,9,9,9]
val = new Buffer [1,2,3,4]
test key, val
describe 'as_buffer', -> test key, val, true
itShouldBehave (key, val, asBuffer) ->
it 'should put key/value pair', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.get key, as_buffer: asBuffer, (err, value) ->
assert.ifError err
assert Buffer.isBuffer value if asBuffer
assert.equal val.toString(), value.toString()
done()
describe 'Handle', ->
filename = "#{__dirname}/../tmp/handle-test-file"
db = null
itShouldBehave = (test) ->
describe 'with ascii values', ->
key = "Hello"
val = "World"
test key, val
describe 'as_buffer', -> test key, val, true
describe 'with buffer values', ->
key = new Buffer [1,9,9,9]
val = new Buffer [1,2,3,4]
test key, val
describe 'as_buffer', -> test key, val, true
beforeEach (done) ->
leveldb.open filename,
create_if_missing: true, error_if_exists: true, (err, handle) ->
assert.ifError err
db = handle
done()
afterEach (done) ->
db = null
iterator = null
leveldb.destroy filename, done
it 'should repair database', (done) ->
leveldb.repair filename, (err) ->
assert.ifError err
assert exists filename
done()
it 'should get property', (done) ->
db.property 'leveldb.stats', (err, value) ->
assert.ifError err
assert value
done err
it 'should not property', (done) ->
db.property '', (err, value) ->
assert.ifError value
done err
it 'should get approximate size of 0', (done) ->
db.approximateSizes ['0', '1'], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert.ifError sizes[0]
db.approximateSizes [['0', '1']], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert.ifError sizes[0]
done()
it 'should get approximate size of range', (done) ->
batch = db.batch()
batch.put "#{i}", crypto.randomBytes 1024 for i in [10..99]
batch.write (err) ->
assert.ifError err
# reopen database for accurate sizes
leveldb.open filename, (err, handle) ->
assert.ifError err
db = handle
db.approximateSizes ['10', '99'], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert sizes[0]
db.approximateSizes [['10', '99']], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert sizes[0]
db.approximateSizes [['10', '49'], ['50', '99']], (err, sizes) ->
assert.ifError err
assert.equal 2, sizes.length
assert sizes[0]
assert sizes[1]
done()
itShouldBehave (key, val, asBuffer) ->
it 'should put key/value pair', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.get key, as_buffer: asBuffer, (err, value) ->
assert.ifError err
assert Buffer.isBuffer value if asBuffer
assert.equal val.toString(), value.toString()
done()
it 'should delete key', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.del key, (err) ->
assert.ifError err
db.get key, (err, value) ->
assert.ifError value
done err
| 139934 | assert = require 'assert'
crypto = require 'crypto'
leveldb = require '../lib'
path = require 'path'
fs = require 'fs'
exists = fs.existsSync || path.existsSync
#Test sync open -- Jus t test a single get/put cycle to verify it's a valid handle
#As the code is basically the same as the async version's code, all subsequ
describe 'syncOpen', ->
filename = "#{__dirname}/../tmp/open-sync-test-file"
db = null
beforeEach (done) ->
db = leveldb.openSync filename, {create_if_missing: true, error_if_exists: true}
done()
afterEach (done) ->
db = null
iterator = null
leveldb.destroy filename, done
itShouldBehave = (test) ->
describe 'with ascii values', ->
key = "<KEY>"
val = "<KEY>"
test key, val
describe 'as_buffer', -> test key, val, true
describe 'with buffer values', ->
key = new Buffer [<KEY>]
val = new Buffer [1,2,3,4]
test key, val
describe 'as_buffer', -> test key, val, true
itShouldBehave (key, val, asBuffer) ->
it 'should put key/value pair', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.get key, as_buffer: asBuffer, (err, value) ->
assert.ifError err
assert Buffer.isBuffer value if asBuffer
assert.equal val.toString(), value.toString()
done()
describe 'Handle', ->
filename = "#{__dirname}/../tmp/handle-test-file"
db = null
itShouldBehave = (test) ->
describe 'with ascii values', ->
key = "<KEY>"
val = "<KEY>"
test key, val
describe 'as_buffer', -> test key, val, true
describe 'with buffer values', ->
key = new Buffer [<KEY>]
val = new Buffer [1,2,3,4]
test key, val
describe 'as_buffer', -> test key, val, true
beforeEach (done) ->
leveldb.open filename,
create_if_missing: true, error_if_exists: true, (err, handle) ->
assert.ifError err
db = handle
done()
afterEach (done) ->
db = null
iterator = null
leveldb.destroy filename, done
it 'should repair database', (done) ->
leveldb.repair filename, (err) ->
assert.ifError err
assert exists filename
done()
it 'should get property', (done) ->
db.property 'leveldb.stats', (err, value) ->
assert.ifError err
assert value
done err
it 'should not property', (done) ->
db.property '', (err, value) ->
assert.ifError value
done err
it 'should get approximate size of 0', (done) ->
db.approximateSizes ['0', '1'], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert.ifError sizes[0]
db.approximateSizes [['0', '1']], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert.ifError sizes[0]
done()
it 'should get approximate size of range', (done) ->
batch = db.batch()
batch.put "#{i}", crypto.randomBytes 1024 for i in [10..99]
batch.write (err) ->
assert.ifError err
# reopen database for accurate sizes
leveldb.open filename, (err, handle) ->
assert.ifError err
db = handle
db.approximateSizes ['10', '99'], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert sizes[0]
db.approximateSizes [['10', '99']], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert sizes[0]
db.approximateSizes [['10', '49'], ['50', '99']], (err, sizes) ->
assert.ifError err
assert.equal 2, sizes.length
assert sizes[0]
assert sizes[1]
done()
itShouldBehave (key, val, asBuffer) ->
it 'should put key/value pair', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.get key, as_buffer: asBuffer, (err, value) ->
assert.ifError err
assert Buffer.isBuffer value if asBuffer
assert.equal val.toString(), value.toString()
done()
it 'should delete key', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.del key, (err) ->
assert.ifError err
db.get key, (err, value) ->
assert.ifError value
done err
| true | assert = require 'assert'
crypto = require 'crypto'
leveldb = require '../lib'
path = require 'path'
fs = require 'fs'
exists = fs.existsSync || path.existsSync
#Test sync open -- Jus t test a single get/put cycle to verify it's a valid handle
#As the code is basically the same as the async version's code, all subsequ
describe 'syncOpen', ->
filename = "#{__dirname}/../tmp/open-sync-test-file"
db = null
beforeEach (done) ->
db = leveldb.openSync filename, {create_if_missing: true, error_if_exists: true}
done()
afterEach (done) ->
db = null
iterator = null
leveldb.destroy filename, done
itShouldBehave = (test) ->
describe 'with ascii values', ->
key = "PI:KEY:<KEY>END_PI"
val = "PI:KEY:<KEY>END_PI"
test key, val
describe 'as_buffer', -> test key, val, true
describe 'with buffer values', ->
key = new Buffer [PI:KEY:<KEY>END_PI]
val = new Buffer [1,2,3,4]
test key, val
describe 'as_buffer', -> test key, val, true
itShouldBehave (key, val, asBuffer) ->
it 'should put key/value pair', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.get key, as_buffer: asBuffer, (err, value) ->
assert.ifError err
assert Buffer.isBuffer value if asBuffer
assert.equal val.toString(), value.toString()
done()
describe 'Handle', ->
filename = "#{__dirname}/../tmp/handle-test-file"
db = null
itShouldBehave = (test) ->
describe 'with ascii values', ->
key = "PI:KEY:<KEY>END_PI"
val = "PI:KEY:<KEY>END_PI"
test key, val
describe 'as_buffer', -> test key, val, true
describe 'with buffer values', ->
key = new Buffer [PI:KEY:<KEY>END_PI]
val = new Buffer [1,2,3,4]
test key, val
describe 'as_buffer', -> test key, val, true
beforeEach (done) ->
leveldb.open filename,
create_if_missing: true, error_if_exists: true, (err, handle) ->
assert.ifError err
db = handle
done()
afterEach (done) ->
db = null
iterator = null
leveldb.destroy filename, done
it 'should repair database', (done) ->
leveldb.repair filename, (err) ->
assert.ifError err
assert exists filename
done()
it 'should get property', (done) ->
db.property 'leveldb.stats', (err, value) ->
assert.ifError err
assert value
done err
it 'should not property', (done) ->
db.property '', (err, value) ->
assert.ifError value
done err
it 'should get approximate size of 0', (done) ->
db.approximateSizes ['0', '1'], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert.ifError sizes[0]
db.approximateSizes [['0', '1']], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert.ifError sizes[0]
done()
it 'should get approximate size of range', (done) ->
batch = db.batch()
batch.put "#{i}", crypto.randomBytes 1024 for i in [10..99]
batch.write (err) ->
assert.ifError err
# reopen database for accurate sizes
leveldb.open filename, (err, handle) ->
assert.ifError err
db = handle
db.approximateSizes ['10', '99'], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert sizes[0]
db.approximateSizes [['10', '99']], (err, sizes) ->
assert.ifError err
assert.equal 1, sizes.length
assert sizes[0]
db.approximateSizes [['10', '49'], ['50', '99']], (err, sizes) ->
assert.ifError err
assert.equal 2, sizes.length
assert sizes[0]
assert sizes[1]
done()
itShouldBehave (key, val, asBuffer) ->
it 'should put key/value pair', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.get key, as_buffer: asBuffer, (err, value) ->
assert.ifError err
assert Buffer.isBuffer value if asBuffer
assert.equal val.toString(), value.toString()
done()
it 'should delete key', (done) ->
db.put key, val, (err) ->
assert.ifError err
db.del key, (err) ->
assert.ifError err
db.get key, (err, value) ->
assert.ifError value
done err
|
[
{
"context": "\n date: 'Fri Mar 9 16:22:13 2018'\n author: 'Jonas Treu'\n }\n {\n commit: '08e7197'\n date: 'Thu Mar",
"end": 104,
"score": 0.9998968243598938,
"start": 94,
"tag": "NAME",
"value": "Jonas Treu"
},
{
"context": "\n date: 'Thu Mar 8 16:19:36 2018'\n ... | prototypes/framerBuilds.framer/modules/builds.coffee | davo/FramerMunichMeetup | 2 | exports.builds = [
{
commit: 'c042de3'
date: 'Fri Mar 9 16:22:13 2018'
author: 'Jonas Treu'
}
{
commit: '08e7197'
date: 'Thu Mar 8 16:19:36 2018'
author: 'Jonas Treu'
}
{
commit: 'a426bbe'
date: 'Wed Mar 7 15:43:02 2018'
author: 'Jonas Treu'
}
{
commit: '7cd32b4'
date: 'Fri Mar 2 13:25:08 2018'
author: 'Niels van Hoor'
}
{
commit: '3788e18'
date: 'Fri Mar 2 12:18:38 2018'
author: 'Niels van Hoor'
}
{
commit: '1206293'
date: 'Fri Feb 23 11:47:58 2018'
author: 'Niels van Hoor'
}
{
commit: 'f32dd1f'
date: 'Tue Feb 20 13:03:00 2018'
author: 'Niels van Hoor'
}
{
commit: '5cf9935'
date: 'Tue Feb 20 13:01:11 2018'
author: 'Niels van Hoor'
}
{
commit: '03ae78d'
date: 'Tue Feb 20 09:41:19 2018'
author: 'Niels van Hoor'
}
{
commit: '8616043'
date: 'Tue Feb 20 09:32:19 2018'
author: 'Eelco Lempsink'
}
{
commit: 'c110c3b'
date: 'Mon Feb 19 14:14:33 2018'
author: 'Onne'
}
{
commit: 'aaf80b0'
date: 'Fri Feb 16 15:38:13 2018'
author: 'Niels van Hoor'
}
{
commit: '64a7037'
date: 'Fri Feb 16 15:33:36 2018'
author: 'Niels van Hoor'
}
{
commit: '454aef4'
date: 'Fri Feb 16 10:20:27 2018'
author: 'Eelco Lempsink'
}
{
commit: '5ce33ab'
date: 'Mon Feb 12 13:20:46 2018'
author: 'Niels van Hoor'
}
{
commit: '94daa97'
date: 'Mon Feb 12 13:05:12 2018'
author: 'Niels van Hoor'
}
{
commit: 'a32809d'
date: 'Mon Feb 12 12:24:54 2018'
author: 'Niels van Hoor'
}
{
commit: '0590c8c'
date: 'Thu Feb 1 23:24:17 2018'
author: 'Niels van Hoor'
}
{
commit: 'e210fb1'
date: 'Thu Feb 1 23:05:19 2018'
author: 'Niels van Hoor'
}
{
commit: '90d9896'
date: 'Thu Feb 1 18:47:35 2018'
author: 'Niels van Hoor'
}
{
commit: 'e249eb0'
date: 'Thu Feb 1 15:34:44 2018'
author: 'Niels van Hoor'
}
{
commit: '08c3304'
date: 'Thu Feb 1 11:55:58 2018'
author: 'Niels van Hoor'
}
{
commit: '2d58487'
date: 'Thu Feb 1 11:55:42 2018'
author: 'Niels van Hoor'
}
{
commit: '9f07664'
date: 'Thu Feb 1 09:47:28 2018'
author: 'Niels van Hoor'
}
{
commit: '35dd174'
date: 'Thu Feb 1 09:47:15 2018'
author: 'Niels van Hoor'
}
{
commit: '8cc82e7'
date: 'Thu Feb 1 09:18:03 2018'
author: 'Niels van Hoor'
}
{
commit: 'e1711bd'
date: 'Thu Feb 1 00:08:08 2018'
author: 'Niels van Hoor'
}
{
commit: '29e1659'
date: 'Wed Jan 31 23:29:27 2018'
author: 'Niels van Hoor'
}
{
commit: '0ff94ee'
date: 'Wed Jan 31 23:19:56 2018'
author: 'Niels van Hoor'
}
{
commit: '8b1c6f3'
date: 'Wed Jan 31 22:46:38 2018'
author: 'Niels van Hoor'
}
{
commit: '77dbf87'
date: 'Wed Jan 31 22:41:34 2018'
author: 'Niels van Hoor'
}
{
commit: '23da2bf'
date: 'Sun Jan 28 01:11:53 2018'
author: 'Niels van Hoor'
}
{
commit: '0b25219'
date: 'Fri Jan 26 17:35:28 2018'
author: 'Niels van Hoor'
}
{
commit: 'd9d5ef5'
date: 'Fri Jan 26 16:34:42 2018'
author: 'Niels van Hoor'
}
{
commit: 'e8c41ca'
date: 'Fri Jan 26 16:34:28 2018'
author: 'Niels van Hoor'
}
{
commit: '5d18f8a'
date: 'Fri Jan 26 16:20:39 2018'
author: 'Niels van Hoor'
}
{
commit: '8bb17ec'
date: 'Fri Jan 26 16:19:31 2018'
author: 'Niels van Hoor'
}
{
commit: '8744c3e'
date: 'Wed Jan 24 12:12:02 2018'
author: 'Niels van Hoor'
}
{
commit: 'a2342fc'
date: 'Wed Jan 24 12:11:50 2018'
author: 'Niels van Hoor'
}
{
commit: '370f5c8'
date: 'Wed Jan 24 12:11:18 2018'
author: 'Niels van Hoor'
}
{
commit: '044f470'
date: 'Wed Jan 24 12:07:05 2018'
author: 'Niels van Hoor'
}
{
commit: '8df9369'
date: 'Tue Jan 23 13:39:41 2018'
author: 'Niels van Hoor'
}
{
commit: '73e3afb'
date: 'Tue Jan 23 13:26:17 2018'
author: 'Niels van Hoor'
}
{
commit: 'b6af600'
date: 'Tue Jan 23 13:23:57 2018'
author: 'Edwin van Rijkom'
}
{
commit: 'f6146fa'
date: 'Mon Jan 22 16:57:11 2018'
author: 'Niels van Hoor'
}
{
commit: 'c3e2c64'
date: 'Mon Jan 22 16:56:59 2018'
author: 'Niels van Hoor'
}
{
commit: '97d82c5'
date: 'Mon Jan 22 16:16:30 2018'
author: 'Niels van Hoor'
}
{
commit: '7803f03'
date: 'Mon Jan 22 13:57:39 2018'
author: 'Niels van Hoor'
}
{
commit: 'a15f981'
date: 'Fri Jan 19 17:38:50 2018'
author: 'Niels van Hoor'
}
{
commit: '3118f37'
date: 'Fri Jan 19 16:39:35 2018'
author: 'Niels van Hoor'
}
{
commit: '5ae3756'
date: 'Fri Jan 19 16:39:24 2018'
author: 'Niels van Hoor'
}
{
commit: '3b115bf'
date: 'Fri Jan 19 13:51:22 2018'
author: 'Niels van Hoor'
}
{
commit: 'd35606e'
date: 'Fri Jan 19 13:45:08 2018'
author: 'Niels van Hoor'
}
{
commit: 'ee99adf'
date: 'Fri Jan 19 13:02:30 2018'
author: 'Niels van Hoor'
}
{
commit: '55a881b'
date: 'Fri Jan 19 12:29:14 2018'
author: 'Niels van Hoor'
}
{
commit: '875e2ba'
date: 'Fri Jan 19 12:15:26 2018'
author: 'Niels van Hoor'
}
{
commit: 'c404ded'
date: 'Fri Jan 19 12:07:22 2018'
author: 'Niels van Hoor'
}
{
commit: '37e97cb'
date: 'Thu Jan 18 15:19:33 2018'
author: 'Edwin van Rijkom'
}
{
commit: 'c40be67'
date: 'Thu Jan 18 12:51:14 2018'
author: 'Niels van Hoor'
}
{
commit: '6cdc580'
date: 'Thu Jan 18 12:06:37 2018'
author: 'Niels van Hoor'
}
{
commit: '60e5d26'
date: 'Thu Jan 18 11:40:23 2018'
author: 'Niels van Hoor'
}
{
commit: '6d165dd'
date: 'Thu Jan 18 09:58:52 2018'
author: 'Niels van Hoor'
}
{
commit: 'fed1f13'
date: 'Wed Jan 17 16:56:16 2018'
author: 'Niels van Hoor'
}
{
commit: 'f89a6b9'
date: 'Wed Jan 17 13:23:55 2018'
author: 'Niels van Hoor'
}
{
commit: 'd19ad1e'
date: 'Wed Jan 17 12:27:32 2018'
author: 'Niels van Hoor'
}
{
commit: 'cea1e51'
date: 'Wed Jan 17 10:59:18 2018'
author: 'Niels van Hoor'
}
{
commit: 'ad45c89'
date: 'Wed Jan 17 10:58:52 2018'
author: 'Niels van Hoor'
}
{
commit: '6ffdfe9'
date: 'Wed Jan 17 09:53:15 2018'
author: 'Niels van Hoor'
}
{
commit: '10af20c'
date: 'Mon Jan 15 15:16:26 2018'
author: 'Edwin van Rijkom'
}
{
commit: 'd047b30'
date: 'Wed Jan 17 09:21:32 2018'
author: 'Niels van Hoor'
}
{
commit: '263476e'
date: 'Wed Jan 17 09:21:13 2018'
author: 'Niels van Hoor'
}
{
commit: '75be5e7'
date: 'Tue Jan 16 23:13:22 2018'
author: 'Niels van Hoor'
}
{
commit: '43fd2a4'
date: 'Tue Jan 16 23:12:40 2018'
author: 'Niels van Hoor'
}
{
commit: 'efbc233'
date: 'Tue Jan 16 17:28:16 2018'
author: 'Niels van Hoor'
}
{
commit: 'e80c465'
date: 'Tue Jan 16 14:16:21 2018'
author: 'Niels van Hoor'
}
{
commit: '1aec23d'
date: 'Tue Jan 16 14:15:50 2018'
author: 'Niels van Hoor'
}
{
commit: 'e3a10c8'
date: 'Tue Jan 16 12:39:11 2018'
author: 'Onne'
}
{
commit: 'ca62c90'
date: 'Tue Jan 16 11:01:50 2018'
author: 'Niels van Hoor'
}
{
commit: '42cc2a3'
date: 'Mon Jan 15 18:26:31 2018'
author: 'Niels van Hoor'
}
{
commit: 'a0030c0'
date: 'Mon Jan 15 16:22:43 2018'
author: 'Niels van Hoor'
}
{
commit: 'a909132'
date: 'Mon Jan 15 14:27:18 2018'
author: 'Niels van Hoor'
}
{
commit: '07ac667'
date: 'Mon Jan 15 14:26:59 2018'
author: 'Niels van Hoor'
}
{
commit: '3413e12'
date: 'Mon Jan 15 14:22:27 2018'
author: 'Niels van Hoor'
}
{
commit: '00df8de'
date: 'Mon Jan 15 14:22:14 2018'
author: 'Niels van Hoor'
}
{
commit: 'd90e148'
date: 'Mon Jan 15 12:39:26 2018'
author: 'Niels van Hoor'
}
{
commit: '6fd6c54'
date: 'Mon Jan 15 12:33:39 2018'
author: 'Niels van Hoor'
}
{
commit: '939b4df'
date: 'Mon Jan 15 12:21:22 2018'
author: 'Niels van Hoor'
}
{
commit: '206fb0e'
date: 'Thu Dec 21 14:46:04 2017'
author: 'Niels van Hoor'
}
{
commit: '3d6094a'
date: 'Tue Dec 19 23:07:35 2017'
author: 'Niels van Hoor'
}
{
commit: '7173b2d'
date: 'Tue Dec 19 23:07:17 2017'
author: 'Niels van Hoor'
}
{
commit: '549b1d0'
date: 'Tue Dec 19 22:22:35 2017'
author: 'Niels van Hoor'
}
{
commit: '3575b95'
date: 'Sun Dec 17 21:37:31 2017'
author: 'Niels van Hoor'
}
{
commit: '2aa68af'
date: 'Sun Dec 17 21:37:11 2017'
author: 'Niels van Hoor'
}
{
commit: '0821535'
date: 'Wed Dec 13 14:10:56 2017'
author: 'Niels van Hoor'
}
{
commit: 'a557f66'
date: 'Wed Dec 13 14:10:42 2017'
author: 'Niels van Hoor'
}
{
commit: 'a740c2d'
date: 'Tue Dec 12 18:12:26 2017'
author: 'Niels van Hoor'
}
{
commit: 'd6fd324'
date: 'Tue Dec 12 17:02:02 2017'
author: 'Niels van Hoor'
}
{
commit: '6f33b51'
date: 'Mon Dec 11 21:12:55 2017'
author: 'Niels van Hoor'
}
{
commit: 'c93bf72'
date: 'Mon Dec 11 14:18:31 2017'
author: 'Niels van Hoor'
}
{
commit: '651439b'
date: 'Fri Dec 8 12:26:06 2017'
author: 'Niels van Hoor'
}
{
commit: '5fbb70e'
date: 'Thu Dec 7 15:16:08 2017'
author: 'Niels van Hoor'
}
{
commit: '6e31668'
date: 'Thu Dec 7 15:17:53 2017'
author: 'Niels van Hoor'
}
{
commit: '58d7faf'
date: 'Fri Nov 17 16:20:40 2017'
author: 'Niels van Hoor'
}
{
commit: '021b060'
date: 'Wed Nov 15 23:51:41 2017'
author: 'Niels van Hoor'
}
{
commit: 'a206425'
date: 'Wed Nov 15 17:38:06 2017'
author: 'Niels van Hoor'
}
{
commit: 'd3ed9ec'
date: 'Wed Nov 15 17:13:43 2017'
author: 'Niels van Hoor'
}
{
commit: '4939336'
date: 'Wed Nov 15 13:48:23 2017'
author: 'Niels van Hoor'
}
{
commit: '863dc71'
date: 'Wed Nov 15 12:07:10 2017'
author: 'Niels van Hoor'
}
{
commit: 'f5a45ee'
date: 'Wed Nov 15 11:34:28 2017'
author: 'Niels van Hoor'
}
{
commit: '8d833db'
date: 'Wed Nov 15 10:28:16 2017'
author: 'Niels van Hoor'
}
{
commit: '613c07b'
date: 'Wed Nov 15 10:27:50 2017'
author: 'Niels van Hoor'
}
{
commit: 'd1ffabc'
date: 'Wed Nov 8 12:40:42 2017'
author: 'Niels van Hoor'
}
{
commit: '9dd8b13'
date: 'Wed Nov 8 11:36:59 2017'
author: 'Niels van Hoor'
}
{
commit: '76769d6'
date: 'Wed Nov 8 11:11:04 2017'
author: 'Niels van Hoor'
}
{
commit: 'b0ada03'
date: 'Wed Nov 8 10:16:47 2017'
author: 'Niels van Hoor'
}
{
commit: '87660fa'
date: 'Tue Nov 7 17:13:49 2017'
author: 'Niels van Hoor'
}
{
commit: '56878c5'
date: 'Tue Nov 7 14:03:36 2017'
author: 'Niels van Hoor'
}
{
commit: 'a4be0ab'
date: 'Tue Nov 7 13:59:00 2017'
author: 'Niels van Hoor'
}
{
commit: '7252e66'
date: 'Tue Nov 7 12:28:52 2017'
author: 'Niels van Hoor'
}
{
commit: 'a03d1f7'
date: 'Mon Nov 6 16:30:24 2017'
author: 'Niels van Hoor'
}
{
commit: 'f622504'
date: 'Sat Nov 4 22:07:07 2017'
author: 'Niels van Hoor'
}
{
commit: '7a25c71'
date: 'Sat Nov 4 21:16:05 2017'
author: 'Niels van Hoor'
}
{
commit: '2e42846'
date: 'Sat Nov 4 21:03:13 2017'
author: 'Niels van Hoor'
}
{
commit: '088eddd'
date: 'Sat Nov 4 20:11:11 2017'
author: 'Niels van Hoor'
}
{
commit: 'aa5e733'
date: 'Sat Nov 4 20:02:05 2017'
author: 'Niels van Hoor'
}
{
commit: '4a61eae'
date: 'Sat Nov 4 18:13:40 2017'
author: 'Niels van Hoor'
}
{
commit: '44d206b'
date: 'Sat Nov 4 17:26:20 2017'
author: 'Niels van Hoor'
}
{
commit: '04e95d1'
date: 'Sat Nov 4 15:01:26 2017'
author: 'Niels van Hoor'
}
{
commit: '975adc6'
date: 'Sat Nov 4 14:14:30 2017'
author: 'Niels van Hoor'
}
{
commit: '0f34f5c'
date: 'Fri Oct 27 15:58:47 2017'
author: 'Niels van Hoor'
}
{
commit: '54e9bcb'
date: 'Fri Oct 20 13:51:48 2017'
author: 'Niels van Hoor'
}
{
commit: '4af09bc'
date: 'Thu Oct 19 14:19:50 2017'
author: 'Niels van Hoor'
}
{
commit: '91579c4'
date: 'Thu Oct 19 14:10:17 2017'
author: 'Niels van Hoor'
}
{
commit: 'bac4ce9'
date: 'Mon Oct 9 10:44:22 2017'
author: 'Niels van Hoor'
}
{
commit: '0524216'
date: 'Mon Oct 9 09:00:51 2017'
author: 'Niels van Hoor'
}
{
commit: 'b274bf2'
date: 'Tue Oct 3 15:07:46 2017'
author: 'Niels van Hoor'
}
{
commit: 'e5c0723'
date: 'Mon Oct 2 13:32:43 2017'
author: 'Niels van Hoor'
}
{
commit: '2a91ec1'
date: 'Mon Oct 2 11:43:47 2017'
author: 'Niels van Hoor'
}
{
commit: 'f057af6'
date: 'Thu Sep 28 15:59:32 2017'
author: 'Niels van Hoor'
}
{
commit: '6e1d21b'
date: 'Thu Sep 28 12:48:05 2017'
author: 'Niels van Hoor'
}
{
commit: '86e248f'
date: 'Thu Sep 28 12:45:09 2017'
author: 'Niels van Hoor'
}
{
commit: 'df8942c'
date: 'Thu Sep 28 12:44:30 2017'
author: 'Niels van Hoor'
}
{
commit: '790e32e'
date: 'Thu Sep 28 11:33:03 2017'
author: 'Eelco Lempsink'
}
{
commit: 'a61783d'
date: 'Tue Sep 19 14:11:22 2017'
author: 'Niels van Hoor'
}
{
commit: '266f591'
date: 'Mon Sep 18 17:53:43 2017'
author: 'Niels van Hoor'
}
{
commit: 'f14b32f'
date: 'Mon Sep 18 16:28:25 2017'
author: 'Niels van Hoor'
}
{
commit: '7f67e44'
date: 'Mon Sep 18 10:59:14 2017'
author: 'Niels van Hoor'
}
{
commit: 'a3b0c61'
date: 'Mon Sep 18 10:34:53 2017'
author: 'Niels van Hoor'
}
{
commit: 'a32d208'
date: 'Mon Sep 18 10:04:56 2017'
author: 'Niels van Hoor'
}
{
commit: '70e3606'
date: 'Wed Sep 13 16:56:00 2017'
author: 'Niels van Hoor'
}
{
commit: 'adda6dd'
date: 'Wed Sep 13 16:13:06 2017'
author: 'Niels van Hoor'
}
{
commit: '5b6867b'
date: 'Wed Sep 13 11:52:14 2017'
author: 'Eelco Lempsink'
}
{
commit: '670822c'
date: 'Tue Sep 12 11:47:22 2017'
author: 'Niels van Hoor'
}
{
commit: '16ca97e'
date: 'Mon Sep 11 23:25:30 2017'
author: 'Eelco Lempsink'
}
{
commit: '5745d60'
date: 'Mon Sep 11 12:12:47 2017'
author: 'Eelco Lempsink'
}
{
commit: '9f0dc99'
date: 'Fri Sep 8 09:17:43 2017'
author: 'Niels van Hoor'
}
{
commit: 'e399eba'
date: 'Fri Sep 8 09:17:28 2017'
author: 'Niels van Hoor'
}
{
commit: '98a128c'
date: 'Fri Sep 8 09:03:01 2017'
author: 'Niels van Hoor'
}
{
commit: '53fd1ee'
date: 'Fri Sep 8 09:02:18 2017'
author: 'Niels van Hoor'
}
{
commit: '7845534'
date: 'Fri Sep 8 09:01:52 2017'
author: 'Niels van Hoor'
}
{
commit: '9d9f59d'
date: 'Fri Sep 8 08:24:28 2017'
author: 'Niels van Hoor'
}
{
commit: '48e7b78'
date: 'Thu Sep 7 18:42:20 2017'
author: 'Niels van Hoor'
}
{
commit: '865771b'
date: 'Thu Sep 7 18:28:29 2017'
author: 'Niels van Hoor'
}
{
commit: '3ec195f'
date: 'Thu Sep 7 18:27:27 2017'
author: 'Niels van Hoor'
}
{
commit: 'd0a2b52'
date: 'Tue Sep 5 14:33:56 2017'
author: 'Niels van Hoor'
}
{
commit: 'ac2d0b2'
date: 'Tue Sep 5 14:18:53 2017'
author: 'Niels van Hoor'
}
{
commit: '7994d36'
date: 'Tue Sep 5 12:28:17 2017'
author: 'Niels van Hoor'
}
{
commit: '242f61d'
date: 'Tue Sep 5 11:50:03 2017'
author: 'Niels van Hoor'
}
{
commit: 'fa5ddfa'
date: 'Fri Sep 1 11:21:38 2017'
author: 'Niels van Hoor'
}
{
commit: 'e982caf'
date: 'Fri Sep 1 10:21:58 2017'
author: 'Niels van Hoor'
}
{
commit: '4fdc139'
date: 'Tue Aug 29 16:07:45 2017'
author: 'Koen Bok'
}
{
commit: '0c8e7c2'
date: 'Tue Aug 29 15:18:55 2017'
author: 'Koen Bok'
}
{
commit: 'e0abfdf'
date: 'Tue Aug 29 13:52:24 2017'
author: 'Onne Gorte'
}
{
commit: '76592c1'
date: 'Tue Aug 29 13:49:51 2017'
author: 'Koen Bok'
}
{
commit: '1d564f1'
date: 'Thu Aug 24 14:12:16 2017'
author: 'Niels van Hoor'
}
{
commit: 'd526e51'
date: 'Thu Aug 24 13:02:48 2017'
author: 'Niels van Hoor'
}
{
commit: 'a9c2efe'
date: 'Wed Aug 23 13:11:23 2017'
author: 'Niels van Hoor'
}
{
commit: 'ee120e5'
date: 'Thu Jul 27 12:16:28 2017'
author: 'Onne Gorte'
}
{
commit: 'b6b792c'
date: 'Thu Jul 27 11:51:19 2017'
author: 'Onne Gorte'
}
{
commit: '25a34cf'
date: 'Thu Jul 27 08:46:01 2017'
author: 'Onne Gorte'
}
{
commit: '6bc3024'
date: 'Thu Jul 27 08:25:49 2017'
author: 'Onne Gorte'
}
{
commit: 'f75a057'
date: 'Mon Jul 24 17:02:14 2017'
author: 'Jonas Treu'
}
{
commit: 'fcf05a7'
date: 'Mon Jul 24 17:01:49 2017'
author: 'Jonas Treu'
}
{
commit: '76ca229'
date: 'Mon Jul 24 17:01:24 2017'
author: 'Jonas Treu'
}
{
commit: 'd231231'
date: 'Mon Jul 24 11:29:59 2017'
author: 'Jonas Treu'
}
{
commit: 'bb2ef14'
date: 'Mon Jul 24 11:13:46 2017'
author: 'Jonas Treu'
}
{
commit: '369b1c8'
date: 'Sat Jul 22 01:12:11 2017'
author: 'Niels van Hoor'
}
{
commit: '32df3a8'
date: 'Fri Jul 21 16:11:21 2017'
author: 'Niels van Hoor'
}
{
commit: '212874c'
date: 'Fri Jul 21 16:11:08 2017'
author: 'Niels van Hoor'
}
{
commit: '72ed711'
date: 'Fri Jul 21 13:33:04 2017'
author: 'Niels van Hoor'
}
{
commit: 'b39ecb6'
date: 'Fri Jul 21 13:32:51 2017'
author: 'Niels van Hoor'
}
{
commit: '364ccbf'
date: 'Fri Jul 21 12:06:19 2017'
author: 'Niels van Hoor'
}
{
commit: '9ab9796'
date: 'Fri Jul 21 10:09:01 2017'
author: 'Niels van Hoor'
}
{
commit: '4059d42'
date: 'Fri Jul 21 11:18:30 2017'
author: 'Niels van Hoor'
}
{
commit: '60c2b63'
date: 'Fri Jul 21 10:47:00 2017'
author: 'Niels van Hoor'
}
{
commit: '37d64cf'
date: 'Fri Jul 21 09:22:45 2017'
author: 'Niels van Hoor'
}
{
commit: 'f1e64b9'
date: 'Thu Jul 20 19:59:08 2017'
author: 'Niels van Hoor'
}
{
commit: '6c82841'
date: 'Thu Jul 20 19:54:44 2017'
author: 'Niels van Hoor'
}
{
commit: '4198d50'
date: 'Thu Jul 20 17:09:14 2017'
author: 'Niels van Hoor'
}
{
commit: 'a30cfcb'
date: 'Wed Jul 19 16:32:27 2017'
author: 'Niels van Hoor'
}
{
commit: 'eac82f2'
date: 'Wed Jul 19 15:11:59 2017'
author: 'Niels van Hoor'
}
{
commit: '9f17f49'
date: 'Wed Jul 19 10:08:58 2017'
author: 'Niels van Hoor'
}
{
commit: '0343ada'
date: 'Wed Jul 19 10:08:42 2017'
author: 'Niels van Hoor'
}
{
commit: 'eb512c2'
date: 'Mon Jul 17 18:31:05 2017'
author: 'Niels van Hoor'
}
{
commit: 'bac9d60'
date: 'Mon Jul 17 18:15:07 2017'
author: 'Niels van Hoor'
}
{
commit: '962b105'
date: 'Mon Jul 17 15:18:51 2017'
author: 'Niels van Hoor'
}
{
commit: '9fe65a5'
date: 'Fri Jul 14 15:42:40 2017'
author: 'Niels van Hoor'
}
{
commit: '32da80b'
date: 'Fri Jul 14 15:42:31 2017'
author: 'Niels van Hoor'
}
{
commit: '60674ee'
date: 'Fri Jul 14 13:21:32 2017'
author: 'Niels van Hoor'
}
{
commit: '026c816'
date: 'Thu Jul 13 19:30:51 2017'
author: 'Niels van Hoor'
}
{
commit: 'da39dc3'
date: 'Thu Jul 13 16:13:36 2017'
author: 'Niels van Hoor'
}
{
commit: 'fc6bd36'
date: 'Thu Jul 13 16:03:37 2017'
author: 'Niels van Hoor'
}
{
commit: '3c5bbb6'
date: 'Thu Jul 13 15:54:33 2017'
author: 'Niels van Hoor'
}
{
commit: 'c7d4cda'
date: 'Thu Jul 13 14:43:16 2017'
author: 'Niels van Hoor'
}
{
commit: '6229106'
date: 'Thu Jul 13 14:39:49 2017'
author: 'Niels van Hoor'
}
{
commit: '7f807e6'
date: 'Wed Jul 12 14:12:24 2017'
author: 'Niels van Hoor'
}
{
commit: '9e9ce72'
date: 'Wed Jul 12 11:43:39 2017'
author: 'Niels van Hoor'
}
{
commit: 'b0ae258'
date: 'Wed Jul 12 11:42:46 2017'
author: 'Niels van Hoor'
}
{
commit: '0697f93'
date: 'Tue Jul 11 22:22:37 2017'
author: 'Niels van Hoor'
}
{
commit: '4ce455d'
date: 'Tue Jul 11 17:41:08 2017'
author: 'Niels van Hoor'
}
{
commit: 'b1cb2ca'
date: 'Tue Jul 11 15:57:46 2017'
author: 'Jonas Treu'
}
{
commit: '687d7c2'
date: 'Fri Jul 7 17:05:06 2017'
author: 'Niels van Hoor'
}
{
commit: '1cb646c'
date: 'Fri Jul 7 15:38:14 2017'
author: 'Niels van Hoor'
}
{
commit: '5317cbc'
date: 'Thu Jul 6 14:30:48 2017'
author: 'Eelco Lempsink'
}
{
commit: 'e1e8316'
date: 'Thu Jul 6 13:29:28 2017'
author: 'Eelco Lempsink'
}
{
commit: '6e48e05'
date: 'Thu Jul 6 11:29:24 2017'
author: 'Jonas Treu'
}
{
commit: 'b3764ff'
date: 'Wed Jul 5 10:23:51 2017'
author: 'Eelco Lempsink'
}
{
commit: '3430da7'
date: 'Fri Jun 30 11:48:52 2017'
author: 'Eelco Lempsink'
}
{
commit: '635adc9'
date: 'Thu Jun 29 16:25:35 2017'
author: 'Jonas Treu'
}
{
commit: 'dee606e'
date: 'Thu Jun 29 16:21:26 2017'
author: 'Jonas Treu'
}
{
commit: '338f840'
date: 'Thu Jun 29 16:20:31 2017'
author: 'Jonas Treu'
}
{
commit: 'cfa6db3'
date: 'Wed Jun 28 18:18:17 2017'
author: 'Eelco Lempsink'
}
{
commit: '272bb60'
date: 'Wed Jun 28 12:35:01 2017'
author: 'Jonas Treu'
}
{
commit: 'ae0b628'
date: 'Wed Jun 28 05:26:16 2017'
author: 'Eelco Lempsink'
}
{
commit: '0d40d08'
date: 'Tue Jun 27 14:06:46 2017'
author: 'Jonas Treu'
}
{
commit: '5b4c3bc'
date: 'Tue Jun 27 13:50:53 2017'
author: 'Jonas Treu'
}
{
commit: '8e73cb1'
date: 'Tue Jun 27 13:46:57 2017'
author: 'Jonas Treu'
}
{
commit: '16e23b2'
date: 'Tue Jun 27 11:29:44 2017'
author: 'Jonas Treu'
}
{
commit: 'a23f50d'
date: 'Tue Jun 20 16:40:23 2017'
author: 'Eelco Lempsink'
}
{
commit: '7aacaa8'
date: 'Tue Jun 20 16:38:59 2017'
author: 'Eelco Lempsink'
}
{
commit: '6d4efa6'
date: 'Tue Jun 20 14:37:03 2017'
author: 'Eelco Lempsink'
}
{
commit: 'f32e84e'
date: 'Tue Jun 13 16:47:36 2017'
author: 'Jonas Treu'
}
{
commit: '9e3aeda'
date: 'Tue Jun 13 16:39:01 2017'
author: 'Jonas Treu'
}
{
commit: '8df882d'
date: 'Tue Jun 13 13:20:08 2017'
author: 'Jonas Treu'
}
{
commit: '34d048e'
date: 'Mon Jun 12 22:36:48 2017'
author: 'Jonas Treu'
}
{
commit: 'e65f583'
date: 'Mon Jun 12 16:57:22 2017'
author: 'Jonas Treu'
}
{
commit: '714bc24'
date: 'Mon Jun 12 16:54:50 2017'
author: 'Jonas Treu'
}
{
commit: 'bfaf154'
date: 'Mon Jun 12 16:33:01 2017'
author: 'Jonas Treu'
}
{
commit: '7cea533'
date: 'Mon Jun 12 16:06:24 2017'
author: 'Jonas Treu'
}
{
commit: 'cc7d5b1'
date: 'Mon Jun 12 13:59:33 2017'
author: 'Jonas Treu'
}
{
commit: '289e98b'
date: 'Mon Jun 12 13:38:01 2017'
author: 'Jonas Treu'
}
{
commit: '3fb7b5d'
date: 'Mon Jun 12 11:56:50 2017'
author: 'Jonas Treu'
}
{
commit: '266546d'
date: 'Mon Jun 12 11:56:19 2017'
author: 'Jonas Treu'
}
{
commit: 'e42c966'
date: 'Wed Jun 28 17:24:09 2017'
author: 'Niels van Hoor'
}
{
commit: 'eaca3b9'
date: 'Wed Jun 28 17:23:23 2017'
author: 'Niels van Hoor'
}
{
commit: '8c43ee2'
date: 'Wed Jun 28 17:22:09 2017'
author: 'Niels van Hoor'
}
{
commit: '4db0275'
date: 'Wed Jun 28 17:21:30 2017'
author: 'Niels van Hoor'
}
{
commit: '950b059'
date: 'Sat Jun 24 17:22:04 2017'
author: 'Eelco Lempsink'
}
{
commit: 'fd7851c'
date: 'Thu Jun 22 13:45:44 2017'
author: 'Onne Gorte'
}
{
commit: 'ec9817d'
date: 'Fri Jun 16 12:22:03 2017'
author: 'Eelco Lempsink'
}
{
commit: '280d7df'
date: 'Thu Jun 15 11:49:07 2017'
author: 'Niels van Hoor'
}
{
commit: '8ca7f3c'
date: 'Thu Jun 15 11:49:00 2017'
author: 'Niels van Hoor'
}
{
commit: '76f4f25'
date: 'Thu Apr 27 14:05:59 2017'
author: 'Marc Krenn'
}
{
commit: 'dc0728f'
date: 'Thu Jun 15 10:46:05 2017'
author: 'Niels van Hoor'
}
{
commit: '86844fd'
date: 'Thu Jun 15 10:41:46 2017'
author: 'Eelco Lempsink'
}
{
commit: '0c93b70'
date: 'Thu Jun 15 10:07:14 2017'
author: 'Niels van Hoor'
}
{
commit: '582d861'
date: 'Wed Jun 14 17:00:22 2017'
author: 'Niels van Hoor'
}
{
commit: '67e7f9e'
date: 'Mon Jun 12 18:10:38 2017'
author: 'Eelco Lempsink'
}
{
commit: 'e5f8f39'
date: 'Mon Jun 12 17:58:51 2017'
author: 'Eelco Lempsink'
}
{
commit: '580846c'
date: 'Fri Jun 9 16:58:36 2017'
author: 'Niels van Hoor'
}
{
commit: 'c9450ed'
date: 'Fri Jun 9 14:27:35 2017'
author: 'Niels van Hoor'
}
{
commit: 'd511624'
date: 'Wed Jun 7 14:25:06 2017'
author: 'Eelco Lempsink'
}
{
commit: '7b8793e'
date: 'Fri Jun 2 10:59:56 2017'
author: 'Eelco Lempsink'
}
{
commit: 'f52c855'
date: 'Wed May 31 17:08:25 2017'
author: 'Jurre Houtkamp'
}
{
commit: '0ee68bd'
date: 'Wed May 31 15:27:31 2017'
author: 'Niels van Hoor'
}
{
commit: '2c2404c'
date: 'Wed Apr 26 16:55:20 2017'
author: 'Jurre Houtkamp'
}
{
commit: '812dda3'
date: 'Wed Apr 26 14:29:54 2017'
author: 'Niels van Hoor'
}
{
commit: 'cc24de1'
date: 'Tue Apr 25 19:10:50 2017'
author: 'Niels van Hoor'
}
{
commit: 'd083f4e'
date: 'Tue Apr 25 15:25:50 2017'
author: 'Koen Bok'
}
{
commit: 'ce38df8'
date: 'Tue Apr 25 15:20:58 2017'
author: 'Koen Bok'
}
{
commit: '2aedf22'
date: 'Tue Apr 25 15:13:29 2017'
author: 'Koen Bok'
}
{
commit: 'bce05cc'
date: 'Mon Apr 24 11:36:11 2017'
author: 'Niels van Hoor'
}
{
commit: '63333cf'
date: 'Fri Apr 21 11:21:24 2017'
author: 'Eelco Lempsink'
}
{
commit: 'fc5f1f0'
date: 'Thu Apr 20 17:20:06 2017'
author: 'Eelco Lempsink'
}
{
commit: '2979f9b'
date: 'Tue Apr 18 13:49:54 2017'
author: 'Edwin van Rijkom'
}
{
commit: '376bc04'
date: 'Sun Apr 16 13:45:28 2017'
author: 'Niels van Hoor'
}
{
commit: 'b9956e9'
date: 'Sun Apr 16 13:07:02 2017'
author: 'Niels van Hoor'
}
{
commit: '1e0c2ce'
date: 'Wed Apr 12 18:40:52 2017'
author: 'Niels van Hoor'
}
{
commit: '17d75f7'
date: 'Thu Mar 30 17:19:19 2017'
author: 'Edwin van Rijkom'
}
{
commit: 'c80ec3b'
date: 'Thu Mar 30 13:07:58 2017'
author: 'Koen Bok'
}
{
commit: 'a4c59b4'
date: 'Wed Mar 29 22:05:33 2017'
author: 'Niels van Hoor'
}
{
commit: 'f506fc0'
date: 'Tue Mar 28 11:30:57 2017'
author: 'Eelco Lempsink'
}
{
commit: '413c18d'
date: 'Fri Mar 24 14:51:23 2017'
author: 'Niels van Hoor'
}
{
commit: '4428af1'
date: 'Fri Mar 24 10:54:57 2017'
author: 'Niels van Hoor'
}
{
commit: '095e0ba'
date: 'Fri Mar 24 08:50:42 2017'
author: 'Niels van Hoor'
}
{
commit: '7ab0a55'
date: 'Fri Mar 24 08:50:04 2017'
author: 'Niels van Hoor'
}
{
commit: 'f71bb1c'
date: 'Thu Mar 23 17:22:20 2017'
author: 'Niels van Hoor'
}
{
commit: 'f989af9'
date: 'Thu Mar 23 14:40:07 2017'
author: 'Niels van Hoor'
}
{
commit: 'e5a244c'
date: 'Thu Mar 23 14:23:34 2017'
author: 'Niels van Hoor'
}
{
commit: 'ee976ab'
date: 'Thu Mar 23 14:23:12 2017'
author: 'Niels van Hoor'
}
{
commit: 'cd53f5d'
date: 'Thu Mar 23 14:22:47 2017'
author: 'Niels van Hoor'
}
{
commit: 'ed66ccb'
date: 'Thu Mar 23 14:18:57 2017'
author: 'Niels van Hoor'
}
{
commit: '1f2524b'
date: 'Wed Mar 22 20:25:18 2017'
author: 'Niels van Hoor'
}
{
commit: '6ddff8f'
date: 'Wed Mar 22 20:25:01 2017'
author: 'Niels van Hoor'
}
{
commit: '570aa20'
date: 'Wed Mar 22 17:11:12 2017'
author: 'Niels van Hoor'
}
{
commit: '1c7d70d'
date: 'Wed Mar 22 17:10:41 2017'
author: 'Niels van Hoor'
}
{
commit: 'af1246a'
date: 'Wed Mar 22 15:45:49 2017'
author: 'Benjamin'
}
{
commit: 'cdfc21c'
date: 'Mon Mar 20 15:45:47 2017'
author: 'Niels van Hoor'
}
{
commit: '6e1ae6a'
date: 'Mon Mar 20 15:45:33 2017'
author: 'Niels van Hoor'
}
{
commit: '8b3d89a'
date: 'Mon Mar 20 11:48:40 2017'
author: 'Niels van Hoor'
}
{
commit: 'd778b25'
date: 'Sat Mar 11 11:34:09 2017'
author: 'Koen Bok'
}
{
commit: '4c68120'
date: 'Sat Mar 11 11:26:52 2017'
author: 'Koen Bok'
}
{
commit: '8774421'
date: 'Fri Mar 10 17:06:32 2017'
author: 'Niels van Hoor'
}
{
commit: '9bc528c'
date: 'Fri Mar 10 16:36:18 2017'
author: 'Koen Bok'
}
{
commit: '475253a'
date: 'Fri Mar 10 15:59:50 2017'
author: 'Niels van Hoor'
}
{
commit: 'fc55495'
date: 'Thu Mar 9 18:11:30 2017'
author: 'Niels van Hoor'
}
{
commit: '31c72c4'
date: 'Thu Mar 9 17:22:06 2017'
author: 'Niels van Hoor'
}
{
commit: '4c3321f'
date: 'Thu Mar 9 17:06:42 2017'
author: 'Niels van Hoor'
}
{
commit: '3ee7a06'
date: 'Thu Mar 9 17:05:09 2017'
author: 'Niels van Hoor'
}
{
commit: '6281e9b'
date: 'Wed Mar 8 14:22:13 2017'
author: 'Benjamin den Boe'
}
{
commit: '2cad79b'
date: 'Wed Mar 8 10:34:50 2017'
author: 'Niels van Hoor'
}
{
commit: '44cef0d'
date: 'Tue Mar 7 16:33:52 2017'
author: 'Benjamin den Boe'
}
{
commit: 'd247967'
date: 'Tue Mar 7 16:29:17 2017'
author: 'Benjamin den Boe'
}
{
commit: 'e6fb645'
date: 'Tue Mar 7 16:25:03 2017'
author: 'Benjamin den Boe'
}
{
commit: 'b9be679'
date: 'Tue Mar 7 16:17:33 2017'
author: 'Benjamin den Boe'
}
{
commit: '3778d6e'
date: 'Tue Mar 7 15:57:12 2017'
author: 'Benjamin den Boe'
}
{
commit: '4408f89'
date: 'Tue Mar 7 15:45:10 2017'
author: 'Benjamin den Boe'
}
{
commit: 'db7aea1'
date: 'Tue Mar 7 00:06:02 2017'
author: 'Niels van Hoor'
}
{
commit: '9223f5c'
date: 'Fri Mar 3 15:50:44 2017'
author: 'Benjamin den Boe'
}
{
commit: '6d64ea9'
date: 'Thu Mar 2 13:55:25 2017'
author: 'Benjamin den Boe'
}
{
commit: '82f1159'
date: 'Thu Mar 2 11:05:00 2017'
author: 'Benjamin den Boe'
}
{
commit: 'b9bddcb'
date: 'Tue Feb 28 12:29:54 2017'
author: 'Koen Bok'
}
{
commit: '6d899a4'
date: 'Tue Feb 28 12:29:28 2017'
author: 'Koen Bok'
}
{
commit: '73f35e5'
date: 'Tue Feb 28 12:28:48 2017'
author: 'Koen Bok'
}
{
commit: '66eb6d4'
date: 'Wed Feb 22 12:08:13 2017'
author: 'Niels van Hoor'
}
{
commit: '8db1dc8'
date: 'Fri Feb 17 11:51:38 2017'
author: 'Niels van Hoor'
}
{
commit: '9d08997'
date: 'Fri Feb 17 11:51:24 2017'
author: 'Niels van Hoor'
}
{
commit: '6d8bc7e'
date: 'Wed Feb 15 16:49:27 2017'
author: 'Niels van Hoor'
}
{
commit: 'eb38604'
date: 'Wed Feb 15 16:34:07 2017'
author: 'Niels van Hoor'
}
{
commit: 'e1ddd6f'
date: 'Wed Feb 15 16:15:06 2017'
author: 'Niels van Hoor'
}
{
commit: '31c303b'
date: 'Wed Feb 15 16:13:03 2017'
author: 'Niels van Hoor'
}
{
commit: '2f2e5d3'
date: 'Wed Feb 15 16:11:42 2017'
author: 'Niels van Hoor'
}
{
commit: '0b17a69'
date: 'Mon Feb 13 16:49:01 2017'
author: 'Niels van Hoor'
}
{
commit: '5814544'
date: 'Sat Feb 11 00:20:04 2017'
author: 'Niels van Hoor'
}
{
commit: 'd50c3f8'
date: 'Fri Feb 10 23:41:42 2017'
author: 'Niels van Hoor'
}
{
commit: 'ad9fa7e'
date: 'Tue Jan 24 10:03:01 2017'
author: 'Niels van Hoor'
}
{
commit: 'f4db660'
date: 'Thu Jan 19 12:02:23 2017'
author: 'Eelco Lempsink'
}
{
commit: 'f3a1c68'
date: 'Mon Jan 2 10:58:53 2017'
author: 'Onne'
}
{
commit: '3821f65'
date: 'Fri Dec 23 14:21:05 2016'
author: 'Eelco Lempsink'
}
{
commit: '386a97a'
date: 'Fri Dec 23 13:31:45 2016'
author: 'Niels van Hoor'
}
{
commit: '97a28ce'
date: 'Thu Dec 15 16:16:05 2016'
author: 'Niels van Hoor'
}
{
commit: '876ed67'
date: 'Thu Dec 15 16:10:08 2016'
author: 'Niels van Hoor'
}
{
commit: 'ecb4b1c'
date: 'Thu Dec 15 16:09:29 2016'
author: 'Niels van Hoor'
}
{
commit: 'cc7f6c8'
date: 'Thu Dec 15 16:08:58 2016'
author: 'Niels van Hoor'
}
{
commit: 'd8c976d'
date: 'Thu Dec 15 13:51:08 2016'
author: 'Niels van Hoor'
}
{
commit: '8efe84b'
date: 'Thu Dec 8 13:14:41 2016'
author: 'Eelco Lempsink'
}
{
commit: '957efc8'
date: 'Thu Dec 8 12:19:06 2016'
author: 'Eelco Lempsink'
}
{
commit: 'e9d7ba1'
date: 'Thu Dec 1 17:06:50 2016'
author: 'Koen Bok'
}
{
commit: '239131b'
date: 'Thu Dec 1 17:04:12 2016'
author: 'Koen Bok'
}
{
commit: 'e10cdbb'
date: 'Thu Dec 1 15:01:55 2016'
author: 'Koen Bok'
}
{
commit: 'f1ee215'
date: 'Thu Dec 1 14:18:25 2016'
author: 'Koen Bok'
}
{
commit: '73363ec'
date: 'Thu Dec 1 14:15:10 2016'
author: 'Koen Bok'
}
{
commit: '30bfb04'
date: 'Thu Dec 1 14:08:20 2016'
author: 'Koen Bok'
}
{
commit: '49f4ce1'
date: 'Thu Dec 1 12:30:26 2016'
author: 'Koen Bok'
}
{
commit: 'c97d630'
date: 'Thu Dec 1 11:54:35 2016'
author: 'Koen Bok'
}
{
commit: '367de0b'
date: 'Thu Dec 1 11:16:00 2016'
author: 'Eelco Lempsink'
}
{
commit: 'a65ffb2'
date: 'Wed Nov 30 23:44:26 2016'
author: 'Koen Bok'
}
{
commit: '13e72ba'
date: 'Wed Nov 30 23:29:15 2016'
author: 'Koen Bok'
}
{
commit: '0894861'
date: 'Wed Nov 30 17:26:06 2016'
author: 'Koen Bok'
}
{
commit: '93705be'
date: 'Wed Nov 30 16:02:50 2016'
author: 'Koen Bok'
}
{
commit: '8276a2f'
date: 'Wed Nov 30 15:27:04 2016'
author: 'Koen Bok'
}
{
commit: '0de5c81'
date: 'Wed Nov 30 15:26:28 2016'
author: 'Koen Bok'
}
{
commit: 'f7330d4'
date: 'Wed Nov 30 15:21:09 2016'
author: 'Koen Bok'
}
{
commit: '5b5fa2f'
date: 'Wed Nov 30 14:30:43 2016'
author: 'Koen Bok'
}
{
commit: '64e9597'
date: 'Mon Nov 28 15:32:52 2016'
author: 'Koen Bok'
}
{
commit: '9eeeb09'
date: 'Fri Nov 25 10:53:02 2016'
author: 'Onne Gorte'
}
{
commit: '13d5193'
date: 'Thu Nov 24 12:40:29 2016'
author: 'Onne Gorte'
}
{
commit: '1b11b42'
date: 'Thu Nov 24 17:56:59 2016'
author: 'Niels van Hoor'
}
{
commit: '2ddfe04'
date: 'Thu Nov 24 17:56:44 2016'
author: 'Niels van Hoor'
}
{
commit: '31b9b31'
date: 'Wed Nov 23 18:17:59 2016'
author: 'Niels van Hoor'
}
{
commit: 'b6fbac8'
date: 'Wed Nov 23 17:27:11 2016'
author: 'Niels van Hoor'
}
{
commit: '50b9e61'
date: 'Wed Nov 23 17:26:43 2016'
author: 'Niels van Hoor'
}
{
commit: 'f2d6d77'
date: 'Tue Nov 22 20:46:07 2016'
author: 'Eelco Lempsink'
}
{
commit: '26d9511'
date: 'Tue Nov 22 17:59:10 2016'
author: 'Eelco Lempsink'
}
{
commit: '9d8b135'
date: 'Tue Nov 22 09:50:16 2016'
author: 'Eelco Lempsink'
}
{
commit: 'f8391b4'
date: 'Tue Nov 22 09:49:25 2016'
author: 'Eelco Lempsink'
}
{
commit: '6c4df14'
date: 'Tue Nov 22 09:47:23 2016'
author: 'Eelco Lempsink'
}
{
commit: '1cb21a3'
date: 'Wed Nov 16 11:33:54 2016'
author: 'Koen Bok'
}
{
commit: '9d86e5d'
date: 'Tue Nov 8 16:09:43 2016'
author: 'Koen Bok'
}
{
commit: '416640c'
date: 'Tue Nov 8 16:03:28 2016'
author: 'Koen Bok'
}
{
commit: '5901216'
date: 'Tue Nov 8 15:21:59 2016'
author: 'Koen Bok'
}
{
commit: '56a6d73'
date: 'Tue Nov 8 15:21:52 2016'
author: 'Koen Bok'
}
{
commit: '7de77b6'
date: 'Tue Nov 8 15:21:16 2016'
author: 'Koen Bok'
}
{
commit: 'ebdd729'
date: 'Mon Nov 7 17:25:16 2016'
author: 'Niels van Hoor'
}
{
commit: '8284723'
date: 'Mon Nov 7 11:59:38 2016'
author: 'Niels van Hoor'
}
{
commit: '95260da'
date: 'Mon Nov 7 09:37:25 2016'
author: 'Niels van Hoor'
}
{
commit: 'cfadfd6'
date: 'Fri Nov 4 14:43:14 2016'
author: 'Niels van Hoor'
}
{
commit: 'a7c60c6'
date: 'Fri Nov 4 14:42:04 2016'
author: 'Niels van Hoor'
}
{
commit: '8477984'
date: 'Thu Nov 3 15:12:32 2016'
author: 'Niels van Hoor'
}
{
commit: 'a4e959e'
date: 'Wed Nov 2 11:00:50 2016'
author: 'Niels van Hoor'
}
{
commit: '92c9d75'
date: 'Wed Nov 2 10:44:40 2016'
author: 'Niels van Hoor'
}
{
commit: '64cf695'
date: 'Wed Nov 2 10:41:29 2016'
author: 'Niels van Hoor'
}
{
commit: '7c96f4e'
date: 'Wed Nov 2 10:31:56 2016'
author: 'Niels van Hoor'
}
{
commit: '14420bf'
date: 'Wed Nov 2 09:26:50 2016'
author: 'Niels van Hoor'
}
{
commit: '03eaa69'
date: 'Wed Nov 2 09:23:14 2016'
author: 'Niels van Hoor'
}
{
commit: '3fcd7c7'
date: 'Wed Nov 2 09:19:29 2016'
author: 'Niels van Hoor'
}
{
commit: '248073d'
date: 'Thu Oct 27 18:13:57 2016'
author: 'Niels van Hoor'
}
{
commit: 'ff34779'
date: 'Thu Oct 27 18:13:47 2016'
author: 'Niels van Hoor'
}
{
commit: 'bdd0656'
date: 'Thu Oct 27 18:13:27 2016'
author: 'Niels van Hoor'
}
{
commit: '8b67479'
date: 'Wed Oct 19 12:57:18 2016'
author: 'Niels van Hoor'
}
{
commit: 'fb240f0'
date: 'Mon Oct 17 15:22:34 2016'
author: 'Niels van Hoor'
}
{
commit: '578bb81'
date: 'Mon Oct 17 15:22:10 2016'
author: 'Niels van Hoor'
}
{
commit: '7638378'
date: 'Mon Oct 17 14:38:04 2016'
author: 'Niels van Hoor'
}
{
commit: '4cef118'
date: 'Mon Oct 17 14:27:53 2016'
author: 'Niels van Hoor'
}
{
commit: 'ae20dc7'
date: 'Wed Oct 12 18:49:38 2016'
author: 'Niels van Hoor'
}
{
commit: '13eae19'
date: 'Wed Oct 12 00:27:48 2016'
author: 'Niels van Hoor'
}
{
commit: '8753f2a'
date: 'Tue Oct 11 15:44:48 2016'
author: 'Niels van Hoor'
}
{
commit: 'a8d00eb'
date: 'Tue Oct 11 15:39:34 2016'
author: 'Niels van Hoor'
}
{
commit: '61c5401'
date: 'Mon Oct 10 13:25:06 2016'
author: 'Eelco Lempsink'
}
{
commit: '81ca90f'
date: 'Mon Oct 10 12:15:10 2016'
author: 'Niels van Hoor'
}
{
commit: '27d0b00'
date: 'Mon Oct 10 12:12:21 2016'
author: 'Niels van Hoor'
}
{
commit: '641be2d'
date: 'Mon Oct 10 11:05:26 2016'
author: 'Niels van Hoor'
}
{
commit: '8027175'
date: 'Mon Oct 10 11:02:26 2016'
author: 'Niels van Hoor'
}
{
commit: '6384ea9'
date: 'Sat Oct 8 14:57:29 2016'
author: 'Niels van Hoor'
}
{
commit: '4af7e9a'
date: 'Fri Oct 7 17:40:29 2016'
author: 'Niels van Hoor'
}
{
commit: '35513f1'
date: 'Fri Oct 7 17:40:05 2016'
author: 'Niels van Hoor'
}
{
commit: 'a9e75d4'
date: 'Fri Oct 7 17:39:21 2016'
author: 'Niels van Hoor'
}
{
commit: '705546c'
date: 'Fri Oct 7 17:38:59 2016'
author: 'Niels van Hoor'
}
{
commit: '24d1b38'
date: 'Fri Oct 7 17:37:58 2016'
author: 'Niels van Hoor'
}
{
commit: '9f8be26'
date: 'Fri Oct 7 16:15:02 2016'
author: 'Niels van Hoor'
}
{
commit: 'bed29aa'
date: 'Thu Oct 6 22:10:42 2016'
author: 'Niels van Hoor'
}
{
commit: 'f2f00ea'
date: 'Thu Oct 6 22:07:40 2016'
author: 'Niels van Hoor'
}
{
commit: '137767c'
date: 'Thu Oct 6 19:07:52 2016'
author: 'Niels van Hoor'
}
{
commit: '16a1777'
date: 'Thu Oct 6 18:35:27 2016'
author: 'Niels van Hoor'
}
{
commit: 'e5584ac'
date: 'Thu Oct 6 15:10:08 2016'
author: 'Niels van Hoor'
}
{
commit: '554908c'
date: 'Thu Oct 6 14:24:09 2016'
author: 'Niels van Hoor'
}
{
commit: '6e5b932'
date: 'Thu Oct 6 08:24:02 2016'
author: 'Niels van Hoor'
}
{
commit: '1f18a6c'
date: 'Wed Oct 5 08:54:55 2016'
author: 'Niels van Hoor'
}
{
commit: '2a7ad6c'
date: 'Tue Oct 4 14:26:23 2016'
author: 'Niels van Hoor'
}
{
commit: 'ea73dcd'
date: 'Tue Oct 4 00:37:10 2016'
author: 'Niels van Hoor'
}
{
commit: 'a9c5455'
date: 'Tue Oct 4 00:34:19 2016'
author: 'Niels van Hoor'
}
{
commit: '32000eb'
date: 'Mon Oct 3 17:01:07 2016'
author: 'Niels van Hoor'
}
{
commit: 'c8f66f3'
date: 'Mon Oct 3 14:29:59 2016'
author: 'Niels van Hoor'
}
{
commit: '8695f71'
date: 'Mon Oct 3 13:46:11 2016'
author: 'Niels van Hoor'
}
{
commit: '69cc7a6'
date: 'Fri Sep 30 10:49:08 2016'
author: 'Niels van Hoor'
}
{
commit: '26585ca'
date: 'Fri Sep 30 10:42:41 2016'
author: 'Niels van Hoor'
}
{
commit: '5e2efb4'
date: 'Fri Sep 30 10:01:13 2016'
author: 'Eelco Lempsink'
}
{
commit: '47bbd9c'
date: 'Fri Sep 30 09:03:00 2016'
author: 'Niels van Hoor'
}
{
commit: 'db56e33'
date: 'Fri Sep 30 09:02:26 2016'
author: 'Niels van Hoor'
}
{
commit: '7e06984'
date: 'Fri Sep 30 09:02:10 2016'
author: 'Niels van Hoor'
}
{
commit: 'f027d89'
date: 'Fri Sep 30 09:02:00 2016'
author: 'Niels van Hoor'
}
{
commit: '28ebf17'
date: 'Fri Sep 30 09:01:21 2016'
author: 'Niels van Hoor'
}
{
commit: '2f6e520'
date: 'Thu Sep 29 22:23:55 2016'
author: 'Niels van Hoor'
}
{
commit: 'e339750'
date: 'Thu Sep 29 16:27:48 2016'
author: 'Eelco Lempsink'
}
{
commit: '9b99341'
date: 'Thu Sep 29 14:31:37 2016'
author: 'Eelco Lempsink'
}
{
commit: 'ae6d992'
date: 'Thu Sep 29 13:55:24 2016'
author: 'Eelco Lempsink'
}
{
commit: 'b8fbf27'
date: 'Wed Sep 28 23:06:18 2016'
author: 'Niels van Hoor'
}
{
commit: 'aefab7c'
date: 'Thu Sep 22 09:41:28 2016'
author: 'Niels van Hoor'
}
{
commit: '3bf7734'
date: 'Thu Sep 22 09:40:41 2016'
author: 'Niels van Hoor'
}
{
commit: '2b07047'
date: 'Thu Sep 22 09:40:22 2016'
author: 'Niels van Hoor'
}
{
commit: '348fe2a'
date: 'Wed Sep 21 17:37:57 2016'
author: 'Niels van Hoor'
}
{
commit: '6c8224f'
date: 'Wed Sep 28 22:38:56 2016'
author: 'Niels van Hoor'
}
{
commit: '37245c3'
date: 'Wed Sep 28 11:11:47 2016'
author: 'Eelco Lempsink'
}
{
commit: '6c5600f'
date: 'Wed Sep 28 00:19:56 2016'
author: 'Niels van Hoor'
}
{
commit: '038d633'
date: 'Wed Sep 28 00:19:34 2016'
author: 'Niels van Hoor'
}
{
commit: '062f912'
date: 'Tue Sep 27 16:06:07 2016'
author: 'Eelco Lempsink'
}
{
commit: 'd254681'
date: 'Tue Sep 27 13:47:34 2016'
author: 'Niels van Hoor'
}
{
commit: '3220f92'
date: 'Tue Sep 27 10:38:53 2016'
author: 'Niels van Hoor'
}
{
commit: 'ec784c9'
date: 'Mon Sep 26 21:53:34 2016'
author: 'Eelco Lempsink'
}
{
commit: '4b9442d'
date: 'Mon Sep 26 15:47:46 2016'
author: 'Eelco Lempsink'
}
{
commit: '1955a99'
date: 'Mon Sep 26 15:44:12 2016'
author: 'Eelco Lempsink'
}
{
commit: '20ae075'
date: 'Mon Sep 26 15:00:35 2016'
author: 'Koen Bok'
}
{
commit: '48cfcef'
date: 'Mon Sep 26 14:39:17 2016'
author: 'Koen Bok'
}
{
commit: '9b6ae19'
date: 'Mon Sep 26 14:36:08 2016'
author: 'Koen Bok'
}
{
commit: '03675e7'
date: 'Mon Sep 26 14:31:37 2016'
author: 'Koen Bok'
}
{
commit: '3a113df'
date: 'Mon Sep 26 09:57:37 2016'
author: 'Niels van Hoor'
}
{
commit: '99f0882'
date: 'Wed Sep 21 13:49:35 2016'
author: 'Koen Bok'
}
{
commit: '8d9f4ea'
date: 'Wed Sep 21 13:27:27 2016'
author: 'Koen Bok'
}
{
commit: '5b3372d'
date: 'Fri Sep 16 17:46:34 2016'
author: 'Koen Bok'
}
{
commit: '23a313a'
date: 'Thu Sep 15 15:36:54 2016'
author: 'Niels van Hoor'
}
{
commit: 'cd057d1'
date: 'Thu Sep 15 13:45:18 2016'
author: 'Niels van Hoor'
}
{
commit: 'aedf25d'
date: 'Thu Sep 15 13:26:52 2016'
author: 'Niels van Hoor'
}
{
commit: 'cd5cbfa'
date: 'Thu Sep 15 11:05:32 2016'
author: 'Niels van Hoor'
}
{
commit: '0970fe9'
date: 'Thu Sep 15 11:03:46 2016'
author: 'Niels van Hoor'
}
{
commit: '0b599de'
date: 'Thu Sep 15 11:03:21 2016'
author: 'Niels van Hoor'
}
{
commit: '15f8ce3'
date: 'Thu Sep 15 11:01:38 2016'
author: 'Niels van Hoor'
}
{
commit: '7e30a0f'
date: 'Thu Sep 15 10:28:33 2016'
author: 'Niels van Hoor'
}
{
commit: '6c644c5'
date: 'Thu Sep 15 10:28:05 2016'
author: 'Niels van Hoor'
}
{
commit: 'eac0aa8'
date: 'Wed Sep 14 17:25:19 2016'
author: 'Niels van Hoor'
}
{
commit: '32ac81b'
date: 'Wed Sep 14 17:25:10 2016'
author: 'Niels van Hoor'
}
{
commit: '8d510b0'
date: 'Wed Sep 14 17:24:55 2016'
author: 'Niels van Hoor'
}
{
commit: 'cc93537'
date: 'Wed Sep 14 17:24:40 2016'
author: 'Niels van Hoor'
}
{
commit: 'ec88f94'
date: 'Wed Sep 14 17:23:21 2016'
author: 'Niels van Hoor'
}
{
commit: '8c0992e'
date: 'Wed Sep 14 14:32:08 2016'
author: 'Niels van Hoor'
}
{
commit: '07040f2'
date: 'Wed Sep 14 14:17:50 2016'
author: 'Niels van Hoor'
}
{
commit: '40b20be'
date: 'Wed Sep 14 14:17:26 2016'
author: 'Niels van Hoor'
}
{
commit: '6c3e0ca'
date: 'Wed Sep 14 14:17:11 2016'
author: 'Niels van Hoor'
}
{
commit: 'd80347c'
date: 'Wed Sep 14 13:46:14 2016'
author: 'Niels van Hoor'
}
{
commit: 'cbd8339'
date: 'Wed Sep 14 13:25:14 2016'
author: 'Niels van Hoor'
}
{
commit: '38a56ff'
date: 'Wed Sep 14 10:05:42 2016'
author: 'Niels van Hoor'
}
{
commit: 'fab4a97'
date: 'Wed Sep 14 08:58:59 2016'
author: 'Niels van Hoor'
}
{
commit: '4500db2'
date: 'Tue Sep 13 21:08:17 2016'
author: 'Niels van Hoor'
}
{
commit: 'f79eafd'
date: 'Tue Sep 13 21:08:01 2016'
author: 'Niels van Hoor'
}
{
commit: '86c4fa9'
date: 'Tue Sep 13 18:18:10 2016'
author: 'Niels van Hoor'
}
{
commit: '209fec6'
date: 'Tue Sep 13 18:15:52 2016'
author: 'Niels van Hoor'
}
{
commit: '44253ef'
date: 'Tue Sep 13 18:15:20 2016'
author: 'Niels van Hoor'
}
{
commit: 'b94edd2'
date: 'Mon Sep 12 12:30:43 2016'
author: 'Niels van Hoor'
}
{
commit: '81120f0'
date: 'Mon Sep 12 12:30:02 2016'
author: 'Niels van Hoor'
}
{
commit: '5f9fd64'
date: 'Mon Sep 12 12:29:49 2016'
author: 'Niels van Hoor'
}
{
commit: '9e65920'
date: 'Mon Sep 12 12:29:05 2016'
author: 'Niels van Hoor'
}
{
commit: 'f777e28'
date: 'Mon Sep 12 11:43:41 2016'
author: 'Niels van Hoor'
}
{
commit: 'cb4555c'
date: 'Wed Aug 17 14:50:02 2016'
author: 'Niels van Hoor'
}
{
commit: 'afbff06'
date: 'Wed Aug 17 14:49:40 2016'
author: 'Niels van Hoor'
}
{
commit: 'f8e4c6b'
date: 'Tue Aug 16 11:15:20 2016'
author: 'Niels van Hoor'
}
{
commit: '080d69b'
date: 'Tue Aug 16 10:59:53 2016'
author: 'Niels van Hoor'
}
{
commit: '24a3a06'
date: 'Tue Aug 16 10:59:35 2016'
author: 'Niels van Hoor'
}
{
commit: 'ed0057e'
date: 'Tue Aug 16 10:53:08 2016'
author: 'Niels van Hoor'
}
{
commit: 'd5a82c2'
date: 'Tue Aug 16 10:52:50 2016'
author: 'Niels van Hoor'
}
{
commit: 'f9d6ef3'
date: 'Mon Aug 1 21:31:27 2016'
author: 'Niels van Hoor'
}
{
commit: 'e6df5eb'
date: 'Mon Aug 1 21:30:55 2016'
author: 'Niels van Hoor'
}
{
commit: '4936620'
date: 'Sun Jul 31 17:55:49 2016'
author: 'Niels van Hoor'
}
{
commit: 'c8c0361'
date: 'Sun Jul 31 17:55:17 2016'
author: 'Niels van Hoor'
}
{
commit: 'be9f7c9'
date: 'Sun Jul 31 17:54:25 2016'
author: 'Niels van Hoor'
}
{
commit: '5c3b369'
date: 'Sun Jul 31 17:54:01 2016'
author: 'Niels van Hoor'
}
{
commit: '7e36874'
date: 'Sun Jul 31 17:53:32 2016'
author: 'Niels van Hoor'
}
{
commit: '4511a7a'
date: 'Tue Jul 26 17:25:45 2016'
author: 'Niels van Hoor'
}
{
commit: '68fc9ba'
date: 'Tue Jul 26 17:14:40 2016'
author: 'Niels van Hoor'
}
{
commit: '7179178'
date: 'Tue Jul 26 17:14:23 2016'
author: 'Niels van Hoor'
}
{
commit: '3d868d0'
date: 'Tue Jul 26 16:55:03 2016'
author: 'Niels van Hoor'
}
{
commit: '7b96385'
date: 'Tue Jul 26 16:22:22 2016'
author: 'Niels van Hoor'
}
{
commit: 'dd49a2e'
date: 'Tue Jul 26 16:22:05 2016'
author: 'Niels van Hoor'
}
{
commit: '16a3107'
date: 'Tue Jul 26 15:58:39 2016'
author: 'Niels van Hoor'
}
{
commit: 'd74291b'
date: 'Tue Jul 26 15:20:10 2016'
author: 'Niels van Hoor'
}
{
commit: '5ee2864'
date: 'Tue Jul 26 15:20:02 2016'
author: 'Niels van Hoor'
}
{
commit: '862b33b'
date: 'Tue Jul 26 15:19:47 2016'
author: 'Niels van Hoor'
}
{
commit: 'b29eddc'
date: 'Tue Jul 26 15:19:34 2016'
author: 'Niels van Hoor'
}
{
commit: '9cf7b44'
date: 'Tue Jul 26 15:18:42 2016'
author: 'Niels van Hoor'
}
{
commit: '1cb9455'
date: 'Tue Jul 26 15:18:15 2016'
author: 'Niels van Hoor'
}
{
commit: '2922fd8'
date: 'Tue Jul 26 15:18:09 2016'
author: 'Niels van Hoor'
}
{
commit: '358a3f5'
date: 'Tue Jul 26 15:17:59 2016'
author: 'Niels van Hoor'
}
{
commit: '1fc72b8'
date: 'Tue Jul 26 15:17:40 2016'
author: 'Niels van Hoor'
}
{
commit: 'b0b408e'
date: 'Tue Jul 26 15:17:25 2016'
author: 'Niels van Hoor'
}
{
commit: '5b8ca53'
date: 'Tue Jul 26 15:16:45 2016'
author: 'Niels van Hoor'
}
{
commit: '6c25ab8'
date: 'Tue Jul 26 15:16:05 2016'
author: 'Niels van Hoor'
}
{
commit: '2d8e0d3'
date: 'Tue Jul 26 15:15:36 2016'
author: 'Niels van Hoor'
}
{
commit: 'a96fa22'
date: 'Fri Jul 22 16:44:29 2016'
author: 'Niels van Hoor'
}
{
commit: '44b09ae'
date: 'Fri Jul 22 16:44:21 2016'
author: 'Niels van Hoor'
}
{
commit: '66801fc'
date: 'Fri Jul 22 16:40:53 2016'
author: 'Niels van Hoor'
}
{
commit: '1047b37'
date: 'Fri Jul 22 16:36:41 2016'
author: 'Niels van Hoor'
}
{
commit: '3249596'
date: 'Fri Jul 22 16:36:28 2016'
author: 'Niels van Hoor'
}
{
commit: '16ed3bd'
date: 'Fri Jul 22 16:35:31 2016'
author: 'Niels van Hoor'
}
{
commit: '7835f82'
date: 'Fri Jul 22 16:27:50 2016'
author: 'Niels van Hoor'
}
{
commit: '6d8b172'
date: 'Fri Jul 22 16:26:06 2016'
author: 'Niels van Hoor'
}
{
commit: 'e9a76d2'
date: 'Fri Jul 22 16:25:23 2016'
author: 'Niels van Hoor'
}
{
commit: '4864241'
date: 'Fri Jul 22 16:25:00 2016'
author: 'Niels van Hoor'
}
{
commit: 'a7367a7'
date: 'Wed Sep 14 14:53:12 2016'
author: 'Koen Bok'
}
{
commit: 'dc920bd'
date: 'Wed Sep 14 09:05:59 2016'
author: 'Eelco Lempsink'
}
{
commit: 'cd149e4'
date: 'Fri Sep 9 14:04:20 2016'
author: 'J.P.P Treu'
}
{
commit: 'e96125a'
date: 'Fri Sep 9 11:16:33 2016'
author: 'J.P.P Treu'
}
{
commit: 'cfa63d5'
date: 'Fri Sep 9 11:08:56 2016'
author: 'J.P.P Treu'
}
{
commit: 'ab13543'
date: 'Wed Sep 7 13:48:43 2016'
author: 'J.P.P Treu'
}
{
commit: '2de07a1'
date: 'Wed Sep 7 13:46:37 2016'
author: 'J.P.P Treu'
}
{
commit: 'a5e37a1'
date: 'Tue Sep 6 11:41:24 2016'
author: 'Benjamin den Boe'
}
{
commit: 'ef8aefc'
date: 'Mon Sep 5 18:36:03 2016'
author: 'J.P.P Treu'
}
{
commit: 'ebf58d2'
date: 'Mon Sep 5 18:31:11 2016'
author: 'J.P.P Treu'
}
{
commit: '11296da'
date: 'Mon Sep 5 12:06:10 2016'
author: 'Benjamin den Boe'
}
{
commit: '94ccc04'
date: 'Sat Sep 3 12:53:19 2016'
author: 'Jonas Treu'
}
{
commit: 'f183c8c'
date: 'Fri Sep 2 15:22:31 2016'
author: 'Eelco Lempsink'
}
{
commit: '6f3fa61'
date: 'Fri Sep 2 15:17:28 2016'
author: 'Eelco Lempsink'
}
{
commit: '0d422ff'
date: 'Fri Sep 2 10:41:30 2016'
author: 'Eelco Lempsink'
}
{
commit: 'd5c63b3'
date: 'Tue Aug 30 16:58:42 2016'
author: 'Eelco Lempsink'
}
{
commit: 'dca9008'
date: 'Tue Aug 30 07:43:42 2016'
author: 'Niels van Hoor'
}
{
commit: 'bfb8543'
date: 'Mon Aug 29 10:50:39 2016'
author: 'J.P.P Treu'
}
{
commit: '5093eed'
date: 'Mon Aug 29 10:20:13 2016'
author: 'Koen Bok'
}
{
commit: 'f87b3ab'
date: 'Thu Aug 25 15:20:04 2016'
author: 'Eelco Lempsink'
}
{
commit: '7687398'
date: 'Wed Aug 24 16:19:47 2016'
author: 'Jonas Treu'
}
{
commit: '8488d33'
date: 'Tue Aug 23 10:46:07 2016'
author: 'Koen Bok'
}
{
commit: 'a33d1be'
date: 'Tue Aug 23 10:45:28 2016'
author: 'Koen Bok'
}
{
commit: '852cfe5'
date: 'Mon Aug 15 09:29:56 2016'
author: 'Koen Bok'
}
{
commit: '694b407'
date: 'Fri Aug 12 15:07:39 2016'
author: 'Eelco Lempsink'
}
{
commit: '83a0477'
date: 'Fri Aug 12 14:30:06 2016'
author: 'Eelco Lempsink'
}
{
commit: 'b5b4642'
date: 'Fri Aug 12 14:29:31 2016'
author: 'Eelco Lempsink'
}
{
commit: '7d48128'
date: 'Thu Aug 4 17:38:06 2016'
author: 'Koen Bok'
}
{
commit: '7f842d9'
date: 'Thu Aug 4 16:25:17 2016'
author: 'Floris Verloop'
}
{
commit: '2e96fb6'
date: 'Thu Aug 4 15:02:54 2016'
author: 'Floris Verloop'
}
{
commit: '778a01b'
date: 'Thu Aug 4 14:39:51 2016'
author: 'Koen Bok'
}
{
commit: 'c8f1ade'
date: 'Thu Aug 4 14:09:40 2016'
author: 'Floris Verloop'
}
{
commit: 'fc80069'
date: 'Thu Aug 4 10:25:18 2016'
author: 'Koen Bok'
}
{
commit: '31a7bf9'
date: 'Wed Aug 3 23:21:34 2016'
author: 'Koen Bok'
}
{
commit: 'b69ac08'
date: 'Wed Aug 3 22:00:09 2016'
author: 'Koen Bok'
}
{
commit: 'b2763d2'
date: 'Wed Aug 3 20:05:51 2016'
author: 'Koen Bok'
}
{
commit: 'ca3afd5'
date: 'Wed Aug 3 17:55:27 2016'
author: 'Floris Verloop'
}
{
commit: '248f2f8'
date: 'Wed Aug 3 16:12:16 2016'
author: 'Floris Verloop'
}
{
commit: '293816d'
date: 'Wed Aug 3 15:30:56 2016'
author: 'Floris Verloop'
}
{
commit: '6870ecb'
date: 'Wed Aug 3 15:20:55 2016'
author: 'Floris Verloop'
}
{
commit: '030be1f'
date: 'Wed Aug 3 15:05:55 2016'
author: 'Floris Verloop'
}
{
commit: 'db95db3'
date: 'Wed Aug 3 14:49:03 2016'
author: 'Niels van Hoor'
}
{
commit: '2cb1e4a'
date: 'Wed Aug 3 14:45:13 2016'
author: 'Niels van Hoor'
}
{
commit: 'c29cbc1'
date: 'Wed Aug 3 12:16:08 2016'
author: 'Floris Verloop'
}
{
commit: 'eb31c27'
date: 'Wed Aug 3 12:12:21 2016'
author: 'Floris Verloop'
}
{
commit: '781cc50'
date: 'Wed Aug 3 12:01:15 2016'
author: 'Floris Verloop'
}
{
commit: 'd34658d'
date: 'Wed Aug 3 11:54:56 2016'
author: 'Koen Bok'
}
{
commit: 'daa910b'
date: 'Wed Aug 3 11:54:45 2016'
author: 'Floris Verloop'
}
{
commit: '440e75b'
date: 'Wed Aug 3 11:41:01 2016'
author: 'Floris Verloop'
}
{
commit: 'f11ff0e'
date: 'Wed Aug 3 10:46:07 2016'
author: 'Floris Verloop'
}
{
commit: 'dd76064'
date: 'Tue Aug 2 17:20:08 2016'
author: 'Floris Verloop'
}
{
commit: '4a17b7c'
date: 'Tue Aug 2 17:15:26 2016'
author: 'Floris Verloop'
}
{
commit: '297edea'
date: 'Tue Aug 2 17:08:40 2016'
author: 'Floris Verloop'
}
{
commit: '4b6d92a'
date: 'Tue Aug 2 16:36:05 2016'
author: 'Koen Bok'
}
{
commit: 'acaa7d4'
date: 'Tue Aug 2 16:27:38 2016'
author: 'Niels van Hoor'
}
{
commit: 'dc847a4'
date: 'Tue Aug 2 16:09:30 2016'
author: 'Niels van Hoor'
}
{
commit: '2a84e68'
date: 'Tue Aug 2 15:42:59 2016'
author: 'Koen Bok'
}
{
commit: 'f75ec6a'
date: 'Tue Aug 2 14:57:55 2016'
author: 'Floris Verloop'
}
{
commit: '4773f8f'
date: 'Tue Aug 2 14:38:38 2016'
author: 'Niels van Hoor'
}
{
commit: '52ee4d8'
date: 'Tue Aug 2 14:35:20 2016'
author: 'Niels van Hoor'
}
{
commit: '5297f1f'
date: 'Tue Aug 2 14:32:52 2016'
author: 'Niels van Hoor'
}
{
commit: '5d1fcb4'
date: 'Tue Aug 2 14:32:37 2016'
author: 'Koen Bok'
}
{
commit: 'fd32c4c'
date: 'Tue Aug 2 14:21:09 2016'
author: 'Niels van Hoor'
}
{
commit: 'bb2ad29'
date: 'Tue Aug 2 14:15:44 2016'
author: 'Koen Bok'
}
{
commit: '1280168'
date: 'Tue Aug 2 14:14:18 2016'
author: 'Koen Bok'
}
{
commit: '37893f6'
date: 'Mon Aug 1 16:52:23 2016'
author: 'Koen Bok'
}
{
commit: '2b75dd9'
date: 'Mon Aug 1 16:49:10 2016'
author: 'Koen Bok'
}
{
commit: '15027e5'
date: 'Mon Aug 1 23:38:58 2016'
author: 'Niels van Hoor'
}
{
commit: 'ca22880'
date: 'Mon Aug 1 21:59:41 2016'
author: 'Niels van Hoor'
}
{
commit: '6e9d16c'
date: 'Mon Aug 1 21:53:13 2016'
author: 'Niels van Hoor'
}
{
commit: '5ee40e2'
date: 'Mon Aug 1 20:50:32 2016'
author: 'Koen Bok'
}
{
commit: '9cf0155'
date: 'Mon Aug 1 20:49:32 2016'
author: 'Koen Bok'
}
{
commit: '582615b'
date: 'Mon Aug 1 18:16:52 2016'
author: 'Niels van Hoor'
}
{
commit: '364e1a2'
date: 'Mon Aug 1 16:40:35 2016'
author: 'Niels van Hoor'
}
{
commit: '61d6312'
date: 'Mon Aug 1 16:21:17 2016'
author: 'Niels van Hoor'
}
{
commit: 'd6b4b03'
date: 'Mon Aug 1 15:57:22 2016'
author: 'Jonas Treu'
}
{
commit: 'd1b04b4'
date: 'Mon Aug 1 15:30:22 2016'
author: 'Niels van Hoor'
}
{
commit: '36e778e'
date: 'Mon Aug 1 13:24:57 2016'
author: 'Koen Bok'
}
{
commit: '0598f10'
date: 'Sat Jul 30 12:22:32 2016'
author: 'J.P.P Treu'
}
{
commit: 'd346edc'
date: 'Fri Jul 29 17:51:41 2016'
author: 'Eelco Lempsink'
}
{
commit: 'bc1237c'
date: 'Fri Jul 29 17:31:54 2016'
author: 'Eelco Lempsink'
}
{
commit: 'f7736fe'
date: 'Fri Jul 29 17:18:09 2016'
author: 'Eelco Lempsink'
}
{
commit: 'ef12b68'
date: 'Fri Jul 29 15:12:50 2016'
author: 'Eelco Lempsink'
}
{
commit: '0398085'
date: 'Fri Jul 29 14:56:50 2016'
author: 'Eelco Lempsink'
}
{
commit: 'a44cf99'
date: 'Fri Jul 29 13:49:33 2016'
author: 'Eelco Lempsink'
}
{
commit: 'd86e1ec'
date: 'Fri Jul 29 11:01:12 2016'
author: 'Eelco Lempsink'
}
{
commit: '39bf120'
date: 'Thu Jul 28 23:27:54 2016'
author: 'Eelco Lempsink'
}
{
commit: '9ff1da3'
date: 'Thu Jul 28 23:07:37 2016'
author: 'Eelco Lempsink'
}
{
commit: 'a75ebf1'
date: 'Thu Jul 28 23:03:38 2016'
author: 'Eelco Lempsink'
}
{
commit: 'd5604c3'
date: 'Thu Jul 28 16:57:42 2016'
author: 'Eelco Lempsink'
}
{
commit: '4daa11b'
date: 'Thu Jul 28 16:53:56 2016'
author: 'Koen Bok'
}
{
commit: '04a5d8c'
date: 'Thu Jul 28 16:49:00 2016'
author: 'Eelco Lempsink'
}
{
commit: '0d9f714'
date: 'Thu Jul 28 16:39:41 2016'
author: 'Eelco Lempsink'
}
{
commit: '77fee3e'
date: 'Thu Jul 28 16:21:35 2016'
author: 'Eelco Lempsink'
}
{
commit: '616019b'
date: 'Thu Jul 28 16:11:53 2016'
author: 'Eelco Lempsink'
}
{
commit: '59d1622'
date: 'Thu Jul 28 16:08:52 2016'
author: 'Eelco Lempsink'
}
{
commit: 'f3cfea6'
date: 'Thu Jul 28 15:08:37 2016'
author: 'Koen Bok'
}
{
commit: 'b9e8d14'
date: 'Thu Jul 28 14:36:54 2016'
author: 'Eelco Lempsink'
}
{
commit: '046b50e'
date: 'Thu Jul 28 14:04:03 2016'
author: 'Eelco Lempsink'
}
{
commit: '1bb9b56'
date: 'Thu Jul 28 12:33:37 2016'
author: 'Koen Bok'
}
{
commit: 'cc13ef9'
date: 'Thu Jul 28 11:42:22 2016'
author: 'Koen Bok'
}
{
commit: 'd071ba4'
date: 'Thu Jul 28 11:39:07 2016'
author: 'Koen Bok'
}
{
commit: '27d8ed3'
date: 'Thu Jul 28 11:23:27 2016'
author: 'J.P.P Treu'
}
{
commit: '418b162'
date: 'Thu Jul 28 11:17:34 2016'
author: 'J.P.P Treu'
}
{
commit: 'ada2943'
date: 'Thu Jul 28 09:48:40 2016'
author: 'Eelco Lempsink'
}
{
commit: '8463a44'
date: 'Wed Jul 27 20:57:30 2016'
author: 'Koen Bok'
}
{
commit: '27d2d35'
date: 'Wed Jul 27 17:42:22 2016'
author: 'Koen Bok'
}
{
commit: 'f74b3cb'
date: 'Wed Jul 27 13:35:41 2016'
author: 'Jonas Treu'
}
{
commit: '85b6df4'
date: 'Wed Jul 27 12:23:47 2016'
author: 'Koen Bok'
}
{
commit: '58aeab5'
date: 'Wed Jul 27 12:23:33 2016'
author: 'Koen Bok'
}
{
commit: '0b12542'
date: 'Wed Jul 27 09:19:08 2016'
author: 'Eelco Lempsink'
}
{
commit: 'a56c591'
date: 'Mon Jul 25 16:40:54 2016'
author: 'Eelco Lempsink'
}
{
commit: 'c9a02f5'
date: 'Mon Jul 25 16:12:03 2016'
author: 'Koen Bok'
}
{
commit: 'c997102'
date: 'Mon Jul 25 16:07:33 2016'
author: 'Koen Bok'
}
{
commit: 'f0f7e2d'
date: 'Mon Jul 25 15:21:56 2016'
author: 'Eelco Lempsink'
}
{
commit: 'a545058'
date: 'Mon Jul 25 15:21:23 2016'
author: 'Eelco Lempsink'
}
{
commit: 'cf7b2b7'
date: 'Mon Jul 25 15:21:00 2016'
author: 'Eelco Lempsink'
}
{
commit: '955f744'
date: 'Mon Jul 25 15:12:17 2016'
author: 'Eelco Lempsink'
}
{
commit: '193f74c'
date: 'Mon Jul 25 14:19:34 2016'
author: 'Koen Bok'
}
{
commit: 'ecb9956'
date: 'Mon Jul 25 14:19:20 2016'
author: 'Koen Bok'
}
{
commit: 'b5515a3'
date: 'Mon Jul 25 14:18:23 2016'
author: 'Koen Bok'
}
{
commit: '5199601'
date: 'Mon Jul 25 13:27:33 2016'
author: 'Eelco Lempsink'
}
{
commit: '5c3994f'
date: 'Fri Jul 22 16:45:39 2016'
author: 'Niels van Hoor'
}
{
commit: '67ae9bd'
date: 'Thu Jul 21 19:22:52 2016'
author: 'Koen Bok'
}
{
commit: '2416d2f'
date: 'Wed Jul 20 17:25:37 2016'
author: 'Niels van Hoor'
}
{
commit: 'e9e49d6'
date: 'Wed Jul 20 15:19:49 2016'
author: 'Niels van Hoor'
}
{
commit: '066e6d8'
date: 'Wed Jul 20 15:00:27 2016'
author: 'Niels van Hoor'
}
{
commit: 'fe056b0'
date: 'Wed Jul 20 14:50:26 2016'
author: 'Niels van Hoor'
}
{
commit: 'de39148'
date: 'Wed Jul 20 14:41:11 2016'
author: 'J.P.P Treu'
}
{
commit: 'a252925'
date: 'Wed Jul 20 14:30:22 2016'
author: 'J.P.P Treu'
}
{
commit: '7143e64'
date: 'Tue Jul 19 10:35:01 2016'
author: 'Koen Bok'
}
{
commit: '997b336'
date: 'Mon Jul 18 09:23:12 2016'
author: 'Niels van Hoor'
}
{
commit: 'a303c80'
date: 'Fri Jul 15 14:41:07 2016'
author: 'Koen Bok'
}
{
commit: '5714727'
date: 'Fri Jul 15 11:57:59 2016'
author: 'Niels van Hoor'
}
{
commit: '1141772'
date: 'Fri Jul 15 11:56:13 2016'
author: 'Koen Bok'
}
{
commit: 'c13d10a'
date: 'Fri Jul 15 11:47:03 2016'
author: 'Koen Bok'
}
{
commit: '621479a'
date: 'Tue Jul 5 16:53:52 2016'
author: 'Niels van Hoor'
}
{
commit: 'db93886'
date: 'Mon Jul 4 17:23:45 2016'
author: 'Koen Bok'
}
{
commit: '6af1b8d'
date: 'Mon Jul 4 15:20:08 2016'
author: 'Eelco Lempsink'
}
{
commit: 'ff8a671'
date: 'Thu Jun 30 18:14:41 2016'
author: 'Niels van Hoor'
}
{
commit: '6d9790d'
date: 'Thu Jun 30 18:14:16 2016'
author: 'Niels van Hoor'
}
{
commit: 'b032241'
date: 'Thu Jun 30 13:14:17 2016'
author: 'Eelco Lempsink'
}
{
commit: '340612e'
date: 'Thu Jun 30 14:04:42 2016'
author: 'Koen Bok'
}
{
commit: 'adf5b33'
date: 'Thu Jun 30 08:35:47 2016'
author: 'Eelco Lempsink'
}
{
commit: 'd6e849a'
date: 'Tue Jun 28 15:12:10 2016'
author: 'Eelco Lempsink'
}
{
commit: 'f93ae7f'
date: 'Tue Jun 28 14:25:05 2016'
author: 'Jonas Treu'
}
{
commit: '0dff457'
date: 'Mon Jun 27 10:25:20 2016'
author: 'Eelco Lempsink'
}
{
commit: '4fb83cf'
date: 'Mon Jun 27 10:16:31 2016'
author: 'Eelco Lempsink'
}
{
commit: 'a81f32f'
date: 'Mon Jun 27 10:09:10 2016'
author: 'Eelco Lempsink'
}
{
commit: 'a7a6de0'
date: 'Mon Jun 27 09:53:20 2016'
author: 'Eelco Lempsink'
}
{
commit: '99980ec'
date: 'Mon Jun 27 09:27:07 2016'
author: 'Eelco Lempsink'
}
{
commit: 'f254db6'
date: 'Thu Jun 23 13:39:55 2016'
author: 'Niels van Hoor'
}
{
commit: 'ab0f21b'
date: 'Fri Jun 10 14:08:47 2016'
author: 'Eelco Lempsink'
}
{
commit: '72fb2a8'
date: 'Fri Jun 10 01:17:24 2016'
author: 'Eelco Lempsink'
}
{
commit: '926ac01'
date: 'Fri Jun 10 01:13:09 2016'
author: 'Eelco Lempsink'
}
{
commit: 'b36dbd6'
date: 'Fri Jun 10 00:21:35 2016'
author: 'Eelco Lempsink'
}
{
commit: '2cc6935'
date: 'Thu Jun 9 10:43:42 2016'
author: 'Koen Bok'
}
{
commit: 'c48360d'
date: 'Wed Jun 1 14:40:00 2016'
author: 'Koen Bok'
}
{
commit: '5cc7331'
date: 'Wed Jun 1 14:27:58 2016'
author: 'Koen Bok'
}
{
commit: 'f508b18'
date: 'Tue May 31 23:01:52 2016'
author: 'Eelco Lempsink'
}
{
commit: 'b9c33da'
date: 'Sun May 29 00:48:08 2016'
author: 'Koen Bok'
}
{
commit: 'a52a360'
date: 'Sun May 29 00:30:46 2016'
author: 'Koen Bok'
}
{
commit: 'c28c450'
date: 'Sun May 29 00:17:06 2016'
author: 'Koen Bok'
}
{
commit: '3fba050'
date: 'Sun May 29 00:16:58 2016'
author: 'Koen Bok'
}
{
commit: '08297a9'
date: 'Sun May 29 00:11:00 2016'
author: 'Koen Bok'
}
{
commit: '1608171'
date: 'Sun May 29 00:05:46 2016'
author: 'Koen Bok'
}
{
commit: 'b7021df'
date: 'Sun May 1 13:21:29 2016'
author: 'Koen Bok'
}
{
commit: '7c9e47e'
date: 'Sat Apr 30 23:43:29 2016'
author: 'Koen Bok'
}
{
commit: 'fffa5bc'
date: 'Sat Apr 30 23:43:07 2016'
author: 'Koen Bok'
}
{
commit: 'd6cb31e'
date: 'Tue May 17 17:09:13 2016'
author: 'Eelco Lempsink'
}
{
commit: 'd88e2f3'
date: 'Tue May 17 13:36:51 2016'
author: 'Eelco Lempsink'
}
{
commit: 'edf7c5c'
date: 'Mon May 16 15:05:49 2016'
author: 'Koen Bok'
}
{
commit: 'e24a5bc'
date: 'Mon May 16 15:02:14 2016'
author: 'Koen Bok'
}
{
commit: 'b33e4ec'
date: 'Mon May 16 14:57:53 2016'
author: 'Koen Bok'
}
{
commit: '417ae11'
date: 'Sat May 14 13:32:48 2016'
author: 'Koen Bok'
}
{
commit: '4adc7fa'
date: 'Sat May 14 13:32:42 2016'
author: 'Koen Bok'
}
{
commit: '3aa1421'
date: 'Sat May 14 13:32:32 2016'
author: 'Koen Bok'
}
{
commit: 'e0ce1dc'
date: 'Fri May 13 23:54:36 2016'
author: 'Koen Bok'
}
{
commit: 'b4c7079'
date: 'Fri May 13 23:40:24 2016'
author: 'Koen Bok'
}
{
commit: 'd903cd5'
date: 'Fri May 13 23:39:50 2016'
author: 'Koen Bok'
}
{
commit: '64b09bb'
date: 'Thu May 12 10:28:11 2016'
author: 'Koen Bok'
}
{
commit: '2848823'
date: 'Wed May 11 15:22:12 2016'
author: 'Koen Bok'
}
{
commit: '3d7e40f'
date: 'Wed May 11 15:19:51 2016'
author: 'Eelco Lempsink'
}
{
commit: 'decc0c0'
date: 'Wed May 11 13:32:35 2016'
author: 'Koen Bok'
}
{
commit: '6e06e20'
date: 'Wed May 11 13:15:04 2016'
author: 'Koen Bok'
}
{
commit: '0430a7a'
date: 'Wed May 11 13:14:56 2016'
author: 'Koen Bok'
}
{
commit: '9c7c945'
date: 'Wed May 11 13:14:46 2016'
author: 'Koen Bok'
}
{
commit: 'b5013f8'
date: 'Wed May 11 13:14:17 2016'
author: 'Koen Bok'
}
{
commit: '358bf74'
date: 'Wed May 11 13:13:50 2016'
author: 'Koen Bok'
}
{
commit: '51ebe97'
date: 'Wed May 11 13:13:33 2016'
author: 'Koen Bok'
}
{
commit: '9e60469'
date: 'Wed May 11 13:13:12 2016'
author: 'Koen Bok'
}
{
commit: '937b088'
date: 'Tue May 10 20:59:50 2016'
author: 'Koen Bok'
}
{
commit: 'd85c0fa'
date: 'Tue May 10 20:50:49 2016'
author: 'Koen Bok'
}
{
commit: '7d32c8d'
date: 'Tue May 10 17:38:37 2016'
author: 'Koen Bok'
}
{
commit: 'ea52737'
date: 'Tue May 10 17:25:43 2016'
author: 'Koen Bok'
}
{
commit: '9bc5c0e'
date: 'Tue May 10 17:23:14 2016'
author: 'Koen Bok'
}
{
commit: '02643eb'
date: 'Tue May 10 12:06:15 2016'
author: 'Koen Bok'
}
{
commit: '9070903'
date: 'Tue May 10 12:06:08 2016'
author: 'Koen Bok'
}
{
commit: '528a1cc'
date: 'Tue May 10 11:16:41 2016'
author: 'Koen Bok'
}
{
commit: '8ce9687'
date: 'Sat May 7 16:57:40 2016'
author: 'Eelco Lempsink'
}
{
commit: 'd80a019'
date: 'Fri May 6 11:50:18 2016'
author: 'Eelco Lempsink'
}
{
commit: '0e4aac0'
date: 'Fri May 6 10:11:08 2016'
author: 'Eelco Lempsink'
}
{
commit: '94e779d'
date: 'Tue May 3 15:22:30 2016'
author: 'Koen Bok'
}
{
commit: '908bfd0'
date: 'Tue May 3 15:21:13 2016'
author: 'Koen Bok'
}
{
commit: '03ec3a8'
date: 'Mon May 2 00:30:30 2016'
author: 'Koen Bok'
}
{
commit: 'a699ef8'
date: 'Mon May 2 00:15:05 2016'
author: 'Koen Bok'
}
{
commit: '9091c1a'
date: 'Mon May 2 00:07:18 2016'
author: 'Koen Bok'
}
{
commit: '9fabdcd'
date: 'Sun May 1 23:47:25 2016'
author: 'Koen Bok'
}
{
commit: 'ed45094'
date: 'Sun May 1 23:47:08 2016'
author: 'Koen Bok'
}
{
commit: '4854bad'
date: 'Sun May 1 23:45:27 2016'
author: 'Koen Bok'
}
{
commit: '9900f3d'
date: 'Sun May 1 23:45:07 2016'
author: 'Koen Bok'
}
{
commit: '9862387'
date: 'Sun May 1 23:42:56 2016'
author: 'Koen Bok'
}
{
commit: 'ebafc7e'
date: 'Sun May 1 23:42:33 2016'
author: 'Koen Bok'
}
{
commit: '29857bb'
date: 'Sun May 1 23:41:48 2016'
author: 'Koen Bok'
}
{
commit: '1ec226d'
date: 'Sun May 1 23:40:17 2016'
author: 'Koen Bok'
}
{
commit: '289740d'
date: 'Sun May 1 18:05:08 2016'
author: 'Koen Bok'
}
{
commit: 'e0f6509'
date: 'Sun May 1 13:22:06 2016'
author: 'Koen Bok'
}
{
commit: '1dbda74'
date: 'Sun May 1 13:21:29 2016'
author: 'Koen Bok'
}
{
commit: '5b548b4'
date: 'Sun May 1 17:27:42 2016'
author: 'Koen Bok'
}
{
commit: '445b074'
date: 'Sun May 1 17:17:56 2016'
author: 'Koen Bok'
}
{
commit: 'b85d82c'
date: 'Sat Apr 30 23:43:07 2016'
author: 'Koen Bok'
}
{
commit: 'd59ba69'
date: 'Sat Apr 30 23:43:29 2016'
author: 'Koen Bok'
}
{
commit: 'ae6df89'
date: 'Sat Apr 30 23:08:57 2016'
author: 'Koen Bok'
}
{
commit: 'ff8eb4d'
date: 'Sat Apr 30 23:08:40 2016'
author: 'Koen Bok'
}
{
commit: '645653b'
date: 'Sat Apr 30 22:07:59 2016'
author: 'Koen Bok'
}
{
commit: '4cf5611'
date: 'Fri Apr 29 15:57:39 2016'
author: 'Koen Bok'
}
{
commit: '1a6bb08'
date: 'Fri Apr 29 15:57:29 2016'
author: 'Koen Bok'
}
{
commit: '9a116e2'
date: 'Fri Apr 29 15:55:18 2016'
author: 'Koen Bok'
}
{
commit: '519bbfa'
date: 'Wed Apr 27 22:25:23 2016'
author: 'Koen Bok'
}
{
commit: '996220d'
date: 'Tue Apr 26 11:11:33 2016'
author: 'Niels van Hoor'
}
{
commit: 'b12a41f'
date: 'Tue Apr 26 11:11:15 2016'
author: 'Niels van Hoor'
}
{
commit: '7ab7e4e'
date: 'Sat Apr 23 20:30:55 2016'
author: 'Koen Bok'
}
{
commit: 'b0ed473'
date: 'Wed Apr 20 14:06:29 2016'
author: 'Eelco Lempsink'
}
{
commit: 'c1d403f'
date: 'Tue Apr 19 14:07:29 2016'
author: 'Koen Bok'
}
{
commit: 'dc4d622'
date: 'Thu Apr 14 13:45:10 2016'
author: 'Niels van Hoor'
}
{
commit: 'bd79c54'
date: 'Tue Apr 12 17:03:55 2016'
author: 'Koen Bok'
}
{
commit: 'e32a426'
date: 'Tue Apr 12 13:20:07 2016'
author: 'Niels van Hoor'
}
{
commit: '04ee164'
date: 'Tue Apr 12 11:45:41 2016'
author: 'Niels van Hoor'
}
{
commit: '7aefc85'
date: 'Mon Apr 11 21:23:18 2016'
author: 'Koen Bok'
}
{
commit: '01e2794'
date: 'Mon Apr 11 20:50:20 2016'
author: 'Koen Bok'
}
{
commit: 'c526d80'
date: 'Mon Apr 11 20:49:52 2016'
author: 'Koen Bok'
}
{
commit: '51ede8f'
date: 'Sat Apr 9 14:49:09 2016'
author: 'Koen Bok'
}
{
commit: 'ca477ed'
date: 'Sat Apr 9 14:02:04 2016'
author: 'Koen Bok'
}
{
commit: 'a9f4263'
date: 'Sat Apr 9 13:41:26 2016'
author: 'Koen Bok'
}
{
commit: '0e405b4'
date: 'Sat Apr 9 13:35:53 2016'
author: 'Koen Bok'
}
{
commit: '320b6dc'
date: 'Sat Apr 9 13:30:25 2016'
author: 'Koen Bok'
}
{
commit: 'c849544'
date: 'Sat Apr 9 13:08:03 2016'
author: 'Koen Bok'
}
{
commit: '925b0c4'
date: 'Sat Apr 9 01:24:23 2016'
author: 'Koen Bok'
}
{
commit: '56aef8f'
date: 'Fri Apr 8 15:10:29 2016'
author: 'Koen Bok'
}
{
commit: '10d6ed2'
date: 'Fri Apr 8 15:06:50 2016'
author: 'Koen Bok'
}
{
commit: '24fa7ed'
date: 'Tue Apr 5 22:01:26 2016'
author: 'Koen Bok'
}
{
commit: '6e5fe80'
date: 'Mon Apr 4 15:56:52 2016'
author: 'Koen Bok'
}
{
commit: '5c1930f'
date: 'Thu Mar 31 22:31:32 2016'
author: 'Koen Bok'
}
{
commit: '0c1d135'
date: 'Thu Mar 24 23:15:50 2016'
author: 'Koen Bok'
}
{
commit: '4e762bf'
date: 'Thu Mar 24 23:07:14 2016'
author: 'Koen Bok'
}
{
commit: 'f4c7b16'
date: 'Thu Mar 24 22:50:22 2016'
author: 'Koen Bok'
}
{
commit: '084b0bc'
date: 'Thu Mar 24 16:12:48 2016'
author: 'Koen Bok'
}
{
commit: 'e7246ec'
date: 'Thu Mar 24 11:58:20 2016'
author: 'Koen Bok'
}
{
commit: '25f28a7'
date: 'Tue Mar 22 15:18:16 2016'
author: 'Koen Bok'
}
{
commit: '3722567'
date: 'Mon Mar 21 15:56:10 2016'
author: 'J.P.P Treu'
}
{
commit: 'a85d1df'
date: 'Sat Mar 19 01:42:20 2016'
author: 'J.P.P Treu'
}
{
commit: '489695d'
date: 'Sat Mar 19 01:15:25 2016'
author: 'J.P.P Treu'
}
{
commit: '42e6aa4'
date: 'Sat Mar 19 01:05:00 2016'
author: 'J.P.P Treu'
}
{
commit: '9ea07a4'
date: 'Thu Mar 17 14:17:51 2016'
author: 'J.P.P Treu'
}
{
commit: 'b1064fc'
date: 'Thu Mar 17 14:14:43 2016'
author: 'J.P.P Treu'
}
{
commit: '636c0e5'
date: 'Tue Mar 15 15:05:18 2016'
author: 'J.P.P Treu'
}
{
commit: '0f82601'
date: 'Tue Mar 15 14:57:50 2016'
author: 'J.P.P Treu'
}
{
commit: '75c0dc2'
date: 'Tue Mar 15 10:43:02 2016'
author: 'J.P.P Treu'
}
{
commit: 'b961fbf'
date: 'Mon Mar 14 16:50:01 2016'
author: 'J.P.P Treu'
}
{
commit: '949e6b3'
date: 'Mon Mar 14 16:10:15 2016'
author: 'J.P.P Treu'
}
{
commit: 'cd8bd60'
date: 'Thu Mar 10 17:39:02 2016'
author: 'J.P.P Treu'
}
{
commit: '35a1e52'
date: 'Tue Mar 1 17:59:20 2016'
author: 'Koen Bok'
}
{
commit: 'f1cc5ca'
date: 'Tue Mar 1 15:17:44 2016'
author: 'Koen Bok'
}
{
commit: 'dd2301c'
date: 'Tue Mar 1 13:21:51 2016'
author: 'Koen Bok'
}
{
commit: '460cf79'
date: 'Mon Feb 29 16:04:42 2016'
author: 'Koen Bok'
}
{
commit: '32b26ed'
date: 'Thu Feb 25 13:13:03 2016'
author: 'J.P.P Treu'
}
{
commit: '04f59b5'
date: 'Thu Feb 25 13:02:23 2016'
author: 'J.P.P Treu'
}
{
commit: '233ad6d'
date: 'Wed Feb 24 00:49:46 2016'
author: 'Koen Bok'
}
{
commit: '4f9f713'
date: 'Tue Feb 23 13:28:51 2016'
author: 'J.P.P Treu'
}
{
commit: '13042bc'
date: 'Tue Feb 23 10:50:45 2016'
author: 'J.P.P Treu'
}
{
commit: 'b28335d'
date: 'Thu Feb 18 16:51:53 2016'
author: 'J.P.P Treu'
}
{
commit: '9f41c24'
date: 'Wed Feb 17 16:58:15 2016'
author: 'Koen Bok'
}
{
commit: 'b7039f5'
date: 'Wed Feb 17 16:43:09 2016'
author: 'Koen Bok'
}
{
commit: '195bc4d'
date: 'Wed Feb 17 16:30:32 2016'
author: 'J.P.P Treu'
}
{
commit: '5c72995'
date: 'Wed Feb 17 14:28:03 2016'
author: 'Floris Verloop'
}
{
commit: 'e2e7852'
date: 'Wed Feb 17 12:18:35 2016'
author: 'Koen Bok'
}
{
commit: '700cb8e'
date: 'Wed Feb 17 12:17:58 2016'
author: 'Koen Bok'
}
{
commit: 'a3d0603'
date: 'Tue Feb 16 11:35:45 2016'
author: 'Koen Bok'
}
{
commit: 'c6b4f3f'
date: 'Tue Feb 16 11:31:26 2016'
author: 'Koen Bok'
}
{
commit: 'df4f12a'
date: 'Tue Feb 9 22:40:51 2016'
author: 'Koen Bok'
}
{
commit: '4792322'
date: 'Tue Feb 9 15:16:31 2016'
author: 'Koen Bok'
}
{
commit: '0563b21'
date: 'Tue Feb 9 12:20:22 2016'
author: 'Koen Bok'
}
{
commit: '9453b24'
date: 'Tue Feb 9 12:12:44 2016'
author: 'Koen Bok'
}
{
commit: '40a0c99'
date: 'Tue Feb 9 12:12:03 2016'
author: 'Koen Bok'
}
{
commit: '780e9b3'
date: 'Mon Feb 8 22:11:46 2016'
author: 'Koen Bok'
}
{
commit: '3648250'
date: 'Mon Feb 8 16:31:51 2016'
author: 'Koen Bok'
}
{
commit: '0564c0e'
date: 'Mon Feb 8 16:15:06 2016'
author: 'Koen Bok'
}
{
commit: '094286c'
date: 'Mon Feb 8 15:52:13 2016'
author: 'Koen Bok'
}
{
commit: '8688f84'
date: 'Mon Feb 8 15:39:22 2016'
author: 'Koen Bok'
}
{
commit: '0af5d00'
date: 'Mon Feb 8 14:21:55 2016'
author: 'Koen Bok'
}
{
commit: '54f231d'
date: 'Mon Feb 8 14:14:12 2016'
author: 'Koen Bok'
}
{
commit: '4cae87f'
date: 'Mon Feb 8 13:42:49 2016'
author: 'Koen Bok'
}
{
commit: 'c32e406'
date: 'Mon Feb 8 13:09:42 2016'
author: 'Koen Bok'
}
{
commit: '8435391'
date: 'Sun Feb 7 21:17:53 2016'
author: 'J.P.P Treu'
}
{
commit: '71d886e'
date: 'Fri Feb 5 10:54:13 2016'
author: 'Koen Bok'
}
{
commit: '1bc5caa'
date: 'Thu Feb 4 17:55:59 2016'
author: 'J.P.P Treu'
}
{
commit: 'e57ec36'
date: 'Thu Feb 4 13:18:59 2016'
author: 'Koen Bok'
}
{
commit: '9876ddd'
date: 'Thu Feb 4 00:01:35 2016'
author: 'Koen Bok'
}
{
commit: 'a23f2cd'
date: 'Thu Feb 4 00:00:44 2016'
author: 'Koen Bok'
}
{
commit: '74b8b3b'
date: 'Wed Feb 3 23:04:11 2016'
author: 'Koen Bok'
}
{
commit: 'dbec9ae'
date: 'Wed Feb 3 22:26:49 2016'
author: 'Koen Bok'
}
{
commit: '63e19c9'
date: 'Wed Feb 3 17:13:58 2016'
author: 'Koen Bok'
}
{
commit: '7f04ad8'
date: 'Wed Feb 3 16:21:24 2016'
author: 'Koen Bok'
}
{
commit: '4795a1b'
date: 'Wed Feb 3 16:13:19 2016'
author: 'Koen Bok'
}
{
commit: '267cd7e'
date: 'Wed Feb 3 16:11:34 2016'
author: 'J.P.P Treu'
}
{
commit: '68b52f4'
date: 'Wed Feb 3 13:56:16 2016'
author: 'Jonas Treu'
}
{
commit: 'c182568'
date: 'Wed Feb 3 13:55:11 2016'
author: 'J.P.P Treu'
}
{
commit: 'b1ec9bc'
date: 'Tue Feb 2 12:13:24 2016'
author: 'Koen Bok'
}
{
commit: '941bbdc'
date: 'Mon Feb 1 16:54:19 2016'
author: 'Koen Bok'
}
{
commit: '50f0eed'
date: 'Mon Feb 1 16:51:17 2016'
author: 'Koen Bok'
}
{
commit: '026e006'
date: 'Mon Feb 1 16:44:29 2016'
author: 'Koen Bok'
}
{
commit: '89aad75'
date: 'Mon Feb 1 16:33:08 2016'
author: 'Koen Bok'
}
{
commit: '11abb58'
date: 'Mon Feb 1 14:29:12 2016'
author: 'Koen Bok'
}
{
commit: '08fd95c'
date: 'Mon Feb 1 14:25:09 2016'
author: 'Koen Bok'
}
{
commit: '9d7c8ee'
date: 'Fri Jan 29 00:29:35 2016'
author: 'Koen Bok'
}
{
commit: 'b2b197d'
date: 'Fri Jan 29 00:12:21 2016'
author: 'Koen Bok'
}
{
commit: '6c1eee0'
date: 'Fri Jan 29 00:05:22 2016'
author: 'Koen Bok'
}
{
commit: '17eeb59'
date: 'Fri Jan 29 00:03:03 2016'
author: 'Koen Bok'
}
{
commit: '3315377'
date: 'Thu Jan 28 23:47:10 2016'
author: 'Koen Bok'
}
{
commit: '4581bf2'
date: 'Thu Jan 28 23:09:14 2016'
author: 'Koen Bok'
}
{
commit: '719b8d5'
date: 'Thu Jan 28 23:09:00 2016'
author: 'Koen Bok'
}
{
commit: 'f201e2f'
date: 'Thu Jan 28 18:08:14 2016'
author: 'Koen Bok'
}
{
commit: 'c82d1b2'
date: 'Thu Jan 28 10:52:31 2016'
author: 'Benjamin den Boe'
}
{
commit: 'f06b12d'
date: 'Wed Jan 27 23:13:06 2016'
author: 'Koen Bok'
}
{
commit: '0660faa'
date: 'Wed Jan 27 23:06:22 2016'
author: 'Koen Bok'
}
{
commit: 'eb1315d'
date: 'Wed Jan 27 23:05:55 2016'
author: 'Koen Bok'
}
{
commit: 'dd9c30e'
date: 'Wed Jan 27 22:41:29 2016'
author: 'Koen Bok'
}
{
commit: 'c420611'
date: 'Wed Jan 27 18:45:42 2016'
author: 'Koen Bok'
}
{
commit: '75a62be'
date: 'Wed Jan 27 17:52:20 2016'
author: 'Koen Bok'
}
{
commit: '575fadc'
date: 'Wed Jan 27 17:02:31 2016'
author: 'Koen Bok'
}
{
commit: '111572b'
date: 'Wed Jan 27 17:02:03 2016'
author: 'Koen Bok'
}
{
commit: 'ba0f3ad'
date: 'Wed Jan 27 14:22:51 2016'
author: 'Koen Bok'
}
{
commit: '1650f6b'
date: 'Wed Jan 27 14:20:48 2016'
author: 'Koen Bok'
}
{
commit: '475604f'
date: 'Wed Jan 27 11:36:24 2016'
author: 'Koen Bok'
}
{
commit: '15cffc4'
date: 'Wed Jan 27 11:35:10 2016'
author: 'Koen Bok'
}
{
commit: '3d977bc'
date: 'Tue Jan 26 18:11:45 2016'
author: 'Koen Bok'
}
{
commit: '5c031e0'
date: 'Tue Jan 26 17:11:51 2016'
author: 'Koen Bok'
}
{
commit: '790d6ce'
date: 'Sat Jan 23 16:48:48 2016'
author: 'Koen Bok'
}
{
commit: 'ac27782'
date: 'Thu Jan 21 18:37:49 2016'
author: 'Koen Bok'
}
{
commit: 'f985fbb'
date: 'Thu Jan 21 18:35:43 2016'
author: 'J.P.P Treu'
}
{
commit: '56b653a'
date: 'Thu Jan 21 18:35:21 2016'
author: 'J.P.P Treu'
}
{
commit: '92d6d90'
date: 'Thu Jan 21 17:37:03 2016'
author: 'Benjamin den Boe'
}
{
commit: '21f6504'
date: 'Thu Jan 21 15:41:21 2016'
author: 'Benjamin den Boe'
}
{
commit: '6be8955'
date: 'Thu Jan 21 15:23:48 2016'
author: 'Benjamin den Boe'
}
{
commit: '2dd0385'
date: 'Thu Jan 21 14:11:22 2016'
author: 'Benjamin den Boe'
}
{
commit: 'c7c99d1'
date: 'Thu Jan 21 13:48:27 2016'
author: 'Benjamin den Boe'
}
{
commit: 'fde7c54'
date: 'Wed Jan 20 23:44:16 2016'
author: 'Koen Bok'
}
{
commit: 'b5a7beb'
date: 'Tue Jan 19 22:34:54 2016'
author: 'Koen Bok'
}
{
commit: '82ccc3e'
date: 'Tue Jan 19 22:33:57 2016'
author: 'Koen Bok'
}
{
commit: 'ba7cf3c'
date: 'Mon Jan 18 17:23:20 2016'
author: 'Koen Bok'
}
{
commit: '4d0ce3e'
date: 'Mon Jan 18 15:28:42 2016'
author: 'Koen Bok'
}
{
commit: '0fb0831'
date: 'Mon Jan 18 13:56:41 2016'
author: 'Koen Bok'
}
{
commit: '325775b'
date: 'Mon Jan 18 13:08:17 2016'
author: 'Koen Bok'
}
{
commit: '8e8fda5'
date: 'Mon Jan 18 12:24:18 2016'
author: 'J.P.P Treu'
}
{
commit: '45633ea'
date: 'Mon Jan 18 11:46:29 2016'
author: 'Koen Bok'
}
{
commit: '78724ef'
date: 'Mon Jan 18 11:35:45 2016'
author: 'Koen Bok'
}
{
commit: 'fb78409'
date: 'Mon Jan 18 11:20:43 2016'
author: 'J.P.P Treu'
}
{
commit: '757977c'
date: 'Sun Jan 17 14:17:55 2016'
author: 'Koen Bok'
}
{
commit: '6020e74'
date: 'Sun Jan 17 14:11:12 2016'
author: 'Koen Bok'
}
{
commit: '42de49f'
date: 'Sun Jan 17 12:40:36 2016'
author: 'Koen Bok'
}
{
commit: '27b7647'
date: 'Sat Jan 16 11:21:00 2016'
author: 'Koen Bok'
}
{
commit: '5e126c4'
date: 'Fri Jan 15 16:38:29 2016'
author: 'Koen Bok'
}
{
commit: '7aca265'
date: 'Thu Jan 14 13:16:49 2016'
author: 'J.P.P Treu'
}
{
commit: 'c65dff5'
date: 'Tue Jan 12 18:02:51 2016'
author: 'Koen Bok'
}
{
commit: 'd4c15a4'
date: 'Tue Jan 12 17:00:35 2016'
author: 'Koen Bok'
}
{
commit: '631b47b'
date: 'Tue Jan 12 14:24:49 2016'
author: 'Koen Bok'
}
{
commit: 'dea63db'
date: 'Tue Jan 12 14:12:40 2016'
author: 'Koen Bok'
}
{
commit: '7399c60'
date: 'Tue Jan 12 14:10:52 2016'
author: 'Koen Bok'
}
{
commit: '1ab06fa'
date: 'Tue Jan 12 14:10:27 2016'
author: 'Koen Bok'
}
{
commit: '7296de7'
date: 'Mon Jan 11 23:59:05 2016'
author: 'Koen Bok'
}
{
commit: '83b7c3d'
date: 'Sun Jan 10 21:37:53 2016'
author: 'Koen Bok'
}
{
commit: 'd7571f8'
date: 'Fri Jan 8 16:20:26 2016'
author: 'Koen Bok'
}
{
commit: '36193ac'
date: 'Thu Jan 7 17:10:37 2016'
author: 'Koen Bok'
}
{
commit: '3ee6d07'
date: 'Thu Jan 7 14:08:59 2016'
author: 'Koen Bok'
}
{
commit: 'f797a36'
date: 'Mon Jan 4 14:39:21 2016'
author: 'J.P.P Treu'
}
{
commit: '209a50d'
date: 'Tue Dec 29 23:44:13 2015'
author: 'Koen Bok'
}
{
commit: 'd8ac785'
date: 'Tue Dec 29 14:01:18 2015'
author: 'Koen Bok'
}
{
commit: '06b2cbb'
date: 'Mon Dec 28 21:57:32 2015'
author: 'Koen Bok'
}
{
commit: '643b636'
date: 'Mon Dec 28 21:35:07 2015'
author: 'Koen Bok'
}
{
commit: '59923c4'
date: 'Mon Dec 28 18:19:29 2015'
author: 'Koen Bok'
}
{
commit: 'b71627c'
date: 'Mon Dec 28 17:48:43 2015'
author: 'Koen Bok'
}
{
commit: '57a342d'
date: 'Sun Dec 27 15:21:12 2015'
author: 'Koen Bok'
}
{
commit: '6a39d85'
date: 'Sun Dec 27 15:09:02 2015'
author: 'Koen Bok'
}
{
commit: '1f0def7'
date: 'Sun Dec 27 14:54:36 2015'
author: 'Koen Bok'
}
{
commit: '31baf97'
date: 'Sun Dec 27 14:46:26 2015'
author: 'Koen Bok'
}
{
commit: '91e8b76'
date: 'Sun Dec 27 14:46:06 2015'
author: 'Koen Bok'
}
{
commit: 'cc17f24'
date: 'Sun Dec 27 12:36:03 2015'
author: 'Koen Bok'
}
{
commit: 'aa5f73b'
date: 'Tue Dec 22 21:41:00 2015'
author: 'J.P.P Treu'
}
{
commit: '44ccc0b'
date: 'Tue Dec 22 21:25:34 2015'
author: 'J.P.P Treu'
}
{
commit: '7ed24b6'
date: 'Tue Dec 22 16:22:35 2015'
author: 'J.P.P Treu'
}
{
commit: '9d5a69c'
date: 'Mon Dec 21 14:53:24 2015'
author: 'Koen Bok'
}
{
commit: '10f79a7'
date: 'Sat Dec 19 14:05:47 2015'
author: 'Koen Bok'
}
{
commit: '6b2f999'
date: 'Fri Dec 18 17:08:39 2015'
author: 'Koen Bok'
}
{
commit: 'd8cabe8'
date: 'Fri Dec 18 15:52:46 2015'
author: 'Koen Bok'
}
{
commit: '3f2c0e2'
date: 'Fri Dec 18 15:09:57 2015'
author: 'Koen Bok'
}
{
commit: '58ad551'
date: 'Thu Dec 17 22:25:31 2015'
author: 'Koen Bok'
}
{
commit: 'aa6f4fb'
date: 'Thu Dec 17 14:24:14 2015'
author: 'Koen Bok'
}
{
commit: 'a7df8f4'
date: 'Thu Dec 17 14:03:24 2015'
author: 'Koen Bok'
}
{
commit: 'aefa54a'
date: 'Thu Dec 17 12:30:12 2015'
author: 'Koen Bok'
}
{
commit: '920106e'
date: 'Thu Dec 17 10:44:06 2015'
author: 'Koen Bok'
}
{
commit: 'b4583d4'
date: 'Wed Dec 16 17:17:44 2015'
author: 'Koen Bok'
}
{
commit: '864d7d5'
date: 'Wed Dec 16 17:05:37 2015'
author: 'Koen Bok'
}
{
commit: '893cb5d'
date: 'Wed Dec 16 15:58:21 2015'
author: 'Koen Bok'
}
{
commit: '80a0cba'
date: 'Wed Dec 16 15:44:29 2015'
author: 'Koen Bok'
}
{
commit: '4e8be92'
date: 'Wed Dec 16 15:40:16 2015'
author: 'Koen Bok'
}
{
commit: '0ac463d'
date: 'Wed Dec 16 13:57:18 2015'
author: 'Koen Bok'
}
{
commit: '9bfae6d'
date: 'Wed Dec 16 13:33:37 2015'
author: 'Koen Bok'
}
{
commit: 'f434b99'
date: 'Tue Dec 15 23:24:03 2015'
author: 'Koen Bok'
}
{
commit: '7089129'
date: 'Tue Dec 15 23:14:41 2015'
author: 'Koen Bok'
}
{
commit: '81cf4a1'
date: 'Tue Dec 15 23:04:11 2015'
author: 'Koen Bok'
}
{
commit: '0f9bd46'
date: 'Tue Dec 15 22:52:34 2015'
author: 'Koen Bok'
}
{
commit: 'abcfbcf'
date: 'Tue Dec 15 22:44:47 2015'
author: 'Koen Bok'
}
{
commit: 'aeb2d6c'
date: 'Tue Dec 15 22:40:34 2015'
author: 'Koen Bok'
}
{
commit: '025ea9b'
date: 'Tue Dec 15 15:01:43 2015'
author: 'Koen Bok'
}
{
commit: '32cc787'
date: 'Tue Dec 15 14:57:23 2015'
author: 'Koen Bok'
}
{
commit: '2ce0737'
date: 'Tue Dec 15 14:53:58 2015'
author: 'Koen Bok'
}
{
commit: '9cca44c'
date: 'Tue Dec 15 14:46:24 2015'
author: 'Koen Bok'
}
{
commit: '601e1e5'
date: 'Tue Dec 15 14:43:15 2015'
author: 'Koen Bok'
}
{
commit: 'ca8aa71'
date: 'Tue Dec 15 14:34:44 2015'
author: 'Koen Bok'
}
{
commit: '2c078d2'
date: 'Tue Dec 15 14:13:03 2015'
author: 'Koen Bok'
}
{
commit: '00df8b6'
date: 'Tue Dec 15 13:54:03 2015'
author: 'Koen Bok'
}
{
commit: '8ad78d8'
date: 'Tue Dec 15 13:44:29 2015'
author: 'Koen Bok'
}
{
commit: '89bfd2e'
date: 'Tue Dec 15 13:38:09 2015'
author: 'Koen Bok'
}
{
commit: '4cb63a5'
date: 'Tue Dec 15 13:25:26 2015'
author: 'Koen Bok'
}
{
commit: 'fb55e60'
date: 'Tue Dec 15 13:24:54 2015'
author: 'Koen Bok'
}
{
commit: '0f3b196'
date: 'Tue Dec 15 11:47:08 2015'
author: 'Koen Bok'
}
{
commit: '5a95739'
date: 'Tue Dec 15 11:40:52 2015'
author: 'Koen Bok'
}
{
commit: '674c81c'
date: 'Tue Dec 15 11:25:33 2015'
author: 'Koen Bok'
}
{
commit: 'd38fcf7'
date: 'Tue Dec 15 11:25:18 2015'
author: 'Koen Bok'
}
{
commit: '6cf8a83'
date: 'Tue Dec 15 10:37:01 2015'
author: 'Koen Bok'
}
{
commit: '672b250'
date: 'Mon Dec 14 17:10:57 2015'
author: 'Koen Bok'
}
{
commit: '982a065'
date: 'Mon Dec 14 14:50:23 2015'
author: 'Koen Bok'
}
{
commit: '84dc662'
date: 'Mon Dec 14 14:49:42 2015'
author: 'Koen Bok'
}
{
commit: '304f85a'
date: 'Mon Dec 14 14:46:26 2015'
author: 'Koen Bok'
}
{
commit: '605eb08'
date: 'Mon Dec 14 14:03:25 2015'
author: 'Koen Bok'
}
{
commit: '4a3b67f'
date: 'Mon Dec 14 13:54:58 2015'
author: 'Koen Bok'
}
{
commit: 'cfb7d76'
date: 'Thu Dec 10 16:18:04 2015'
author: 'Koen Bok'
}
{
commit: 'd443397'
date: 'Tue Dec 8 18:42:55 2015'
author: 'Koen Bok'
}
{
commit: 'ff8ab5f'
date: 'Tue Dec 8 15:32:47 2015'
author: 'J.P.P Treu'
}
{
commit: 'f947fee'
date: 'Fri Dec 4 16:49:34 2015'
author: 'Koen Bok'
}
{
commit: 'c5e779e'
date: 'Mon Nov 30 12:00:03 2015'
author: 'Koen Bok'
}
{
commit: 'f3c4b01'
date: 'Fri Nov 27 13:52:22 2015'
author: 'Benjamin den Boe'
}
{
commit: '0b7d7ef'
date: 'Tue Nov 17 14:06:27 2015'
author: 'Koen Bok'
}
{
commit: '9ca69d1'
date: 'Tue Nov 17 13:48:38 2015'
author: 'Koen Bok'
}
{
commit: '0d84b03'
date: 'Mon Nov 16 14:36:35 2015'
author: 'Benjamin den Boe'
}
{
commit: 'a22ecf0'
date: 'Fri Nov 6 14:30:25 2015'
author: 'Benjamin den Boe'
}
{
commit: '858494c'
date: 'Fri Nov 6 14:24:17 2015'
author: 'Benjamin den Boe'
}
{
commit: '0e4d9bb'
date: 'Mon Nov 2 13:41:00 2015'
author: 'J.P.P Treu'
}
{
commit: '3b32854'
date: 'Mon Nov 2 12:02:59 2015'
author: 'J.P.P Treu'
}
{
commit: 'b1b3b23'
date: 'Tue Oct 27 14:51:07 2015'
author: 'J.P.P Treu'
}
{
commit: 'f749b23'
date: 'Tue Oct 27 14:47:01 2015'
author: 'J.P.P Treu'
}
{
commit: '8c8bac4'
date: 'Mon Oct 26 16:26:15 2015'
author: 'Koen Bok'
}
{
commit: '42b7a4b'
date: 'Mon Oct 26 16:25:42 2015'
author: 'Koen Bok'
}
{
commit: '6c1c974'
date: 'Wed Oct 7 08:34:50 2015'
author: 'Edwin van Rijkom'
}
{
commit: 'b22d277'
date: 'Sun Sep 13 12:52:34 2015'
author: 'Benjamin den Boe'
}
{
commit: 'ef74161'
date: 'Tue Sep 8 17:16:56 2015'
author: 'Koen Bok'
}
{
commit: 'a6eb697'
date: 'Mon Sep 7 16:11:58 2015'
author: 'Koen Bok'
}
{
commit: 'd017d6e'
date: 'Fri Sep 4 12:02:32 2015'
author: 'Edwin van Rijkom'
}
{
commit: '0db522a'
date: 'Fri Sep 4 11:30:03 2015'
author: 'Koen Bok'
}
{
commit: 'e45456b'
date: 'Fri Sep 4 11:26:52 2015'
author: 'Koen Bok'
}
{
commit: 'ecf3c54'
date: 'Wed Aug 19 16:21:22 2015'
author: 'Edwin van Rijkom'
}
{
commit: 'c966d49'
date: 'Fri Aug 14 14:34:58 2015'
author: 'Koen Bok'
}
{
commit: '6360b78'
date: 'Wed Aug 12 19:40:51 2015'
author: 'Koen Bok'
}
{
commit: 'daa25a5'
date: 'Wed Aug 12 19:40:06 2015'
author: 'Koen Bok'
}
{
commit: 'd129b20'
date: 'Thu Aug 6 12:11:29 2015'
author: 'J.P.P Treu'
}
{
commit: '62ac19e'
date: 'Wed Jul 22 11:44:04 2015'
author: 'J.P.P Treu'
}
{
commit: 'effb399'
date: 'Tue Jul 21 16:23:39 2015'
author: 'J.P.P Treu'
}
{
commit: 'c181296'
date: 'Tue Jul 21 15:17:23 2015'
author: 'J.P.P Treu'
}
{
commit: 'a80a61c'
date: 'Tue Jul 21 14:02:31 2015'
author: 'J.P.P Treu'
}
{
commit: 'f69ced3'
date: 'Tue Jul 21 13:46:40 2015'
author: 'J.P.P Treu'
}
{
commit: '8972740'
date: 'Tue Jul 21 13:31:56 2015'
author: 'J.P.P Treu'
}
{
commit: '9e98ef0'
date: 'Tue Jul 21 10:56:56 2015'
author: 'Koen Bok'
}
{
commit: 'b34eca1'
date: 'Fri Jul 17 23:41:32 2015'
author: 'Koen Bok'
}
{
commit: '5c47bd5'
date: 'Fri Jul 17 23:40:25 2015'
author: 'Koen Bok'
}
{
commit: 'db96a5e'
date: 'Wed Jul 15 17:04:42 2015'
author: 'Koen Bok'
}
{
commit: 'c2f0066'
date: 'Wed Jul 15 16:03:24 2015'
author: 'Koen Bok'
}
{
commit: '6438f75'
date: 'Wed Jul 15 16:02:40 2015'
author: 'Koen Bok'
}
{
commit: '8097266'
date: 'Thu Jul 9 13:38:00 2015'
author: 'Koen Bok'
}
{
commit: '1c5bb79'
date: 'Thu Jul 9 13:37:48 2015'
author: 'Koen Bok'
}
{
commit: '7c13491'
date: 'Tue Jun 30 16:16:53 2015'
author: 'Koen Bok'
}
{
commit: '194c6c1'
date: 'Sun Jun 28 00:05:21 2015'
author: 'Koen Bok'
}
{
commit: 'd2152f3'
date: 'Sun Jun 28 00:03:46 2015'
author: 'Koen Bok'
}
{
commit: 'c4371ee'
date: 'Wed Jun 24 16:58:38 2015'
author: 'Koen Bok'
}
{
commit: '2c7d9b6'
date: 'Wed Jun 24 16:58:28 2015'
author: 'Koen Bok'
}
{
commit: '7cb7f1f'
date: 'Tue Jun 23 16:07:56 2015'
author: 'Koen Bok'
}
{
commit: '11139a7'
date: 'Wed Jun 10 14:47:44 2015'
author: 'Benjamin den Boe'
}
{
commit: '21b9efa'
date: 'Mon Jun 8 16:16:45 2015'
author: 'Koen Bok'
}
{
commit: 'f0e040f'
date: 'Mon Jun 8 16:07:54 2015'
author: 'Koen Bok'
}
{
commit: '067cc97'
date: 'Mon Jun 8 15:56:42 2015'
author: 'Koen Bok'
}
{
commit: 'f08d7f6'
date: 'Mon Jun 8 15:49:33 2015'
author: 'Koen Bok'
}
{
commit: '6bb2038'
date: 'Thu May 21 12:13:40 2015'
author: 'Koen Bok'
}
{
commit: '9f69cd4'
date: 'Thu May 21 12:12:53 2015'
author: 'Koen Bok'
}
{
commit: '9ee696d'
date: 'Tue May 12 17:04:55 2015'
author: 'Koen Bok'
}
{
commit: '6707a25'
date: 'Tue May 12 17:04:51 2015'
author: 'Koen Bok'
}
{
commit: '53b28cf'
date: 'Tue May 12 17:02:48 2015'
author: 'Koen Bok'
}
{
commit: '3fc11e7'
date: 'Thu May 7 15:34:24 2015'
author: 'Koen Bok'
}
{
commit: '27c6950'
date: 'Thu May 7 14:38:33 2015'
author: 'Koen Bok'
}
{
commit: '997378f'
date: 'Tue May 5 16:13:05 2015'
author: 'Koen Bok'
}
{
commit: '5797cc7'
date: 'Fri May 1 20:35:00 2015'
author: 'Benjamin den Boe'
}
{
commit: 'd85846c'
date: 'Fri May 1 12:16:52 2015'
author: 'Benjamin den Boe'
}
{
commit: 'ab89ee4'
date: 'Fri May 1 12:13:51 2015'
author: 'Benjamin den Boe'
}
{
commit: '0e755dc'
date: 'Fri May 1 12:12:54 2015'
author: 'Benjamin den Boe'
}
{
commit: '7504bd8'
date: 'Fri May 1 11:55:55 2015'
author: 'Benjamin den Boe'
}
{
commit: '81a3f2f'
date: 'Fri May 1 10:28:54 2015'
author: 'Benjamin den Boe'
}
{
commit: '238f18e'
date: 'Thu Apr 30 14:17:19 2015'
author: 'Koen Bok'
}
{
commit: 'f755586'
date: 'Thu Apr 30 12:41:32 2015'
author: 'Koen Bok'
}
{
commit: 'ce79b87'
date: 'Thu Apr 30 12:41:20 2015'
author: 'Koen Bok'
}
{
commit: '7f8a9de'
date: 'Thu Apr 23 12:02:27 2015'
author: 'Benjamin den Boe'
}
{
commit: 'a063536'
date: 'Thu Apr 23 11:37:49 2015'
author: 'Benjamin den Boe'
}
{
commit: 'ced648f'
date: 'Wed Apr 22 17:12:01 2015'
author: 'Benjamin den Boe'
}
{
commit: '66eea95'
date: 'Wed Apr 22 17:00:46 2015'
author: 'Benjamin den Boe'
}
{
commit: '4e182d7'
date: 'Mon Apr 20 16:21:58 2015'
author: 'Koen Bok'
}
{
commit: 'e65ea33'
date: 'Mon Apr 20 16:21:39 2015'
author: 'Koen Bok'
}
{
commit: '6e913b4'
date: 'Tue Apr 14 17:33:29 2015'
author: 'Edwin van Rijkom'
}
{
commit: '9c12003'
date: 'Thu Apr 9 11:02:27 2015'
author: 'Koen Bok'
}
{
commit: '7ff0515'
date: 'Thu Apr 9 11:01:30 2015'
author: 'Koen Bok'
}
{
commit: 'bf78d4f'
date: 'Tue Apr 7 22:43:26 2015'
author: 'Koen Bok'
}
{
commit: '16520a3'
date: 'Tue Apr 7 21:58:55 2015'
author: 'Koen Bok'
}
{
commit: '2dd9a9c'
date: 'Tue Apr 7 21:37:03 2015'
author: 'Koen Bok'
}
{
commit: 'e6d5cda'
date: 'Tue Apr 7 21:12:34 2015'
author: 'Koen Bok'
}
{
commit: '64896a8'
date: 'Tue Apr 7 20:55:25 2015'
author: 'Koen Bok'
}
{
commit: '0e65db3'
date: 'Tue Apr 7 20:55:09 2015'
author: 'Koen Bok'
}
{
commit: '1b293bf'
date: 'Tue Apr 7 17:44:36 2015'
author: 'J.P.P Treu'
}
{
commit: 'bab3232'
date: 'Fri Apr 3 15:02:35 2015'
author: 'Edwin van Rijkom'
}
{
commit: '58ff015'
date: 'Fri Apr 3 13:23:20 2015'
author: 'Edwin van Rijkom'
}
{
commit: 'c9b3cb0'
date: 'Fri Apr 3 11:13:18 2015'
author: 'Koen Bok'
}
{
commit: '1e8c7c1'
date: 'Fri Apr 3 10:28:21 2015'
author: 'Koen Bok'
}
{
commit: 'c518763'
date: 'Fri Apr 3 10:19:14 2015'
author: 'Koen Bok'
}
{
commit: '2b7855d'
date: 'Fri Apr 3 10:10:56 2015'
author: 'Koen Bok'
}
{
commit: '6dda6c6'
date: 'Fri Apr 3 10:04:46 2015'
author: 'Koen Bok'
}
{
commit: 'a5e1a1a'
date: 'Thu Apr 2 17:41:25 2015'
author: 'Koen Bok'
}
{
commit: '3098d23'
date: 'Thu Apr 2 17:35:41 2015'
author: 'Koen Bok'
}
{
commit: '42b6bf1'
date: 'Thu Apr 2 17:24:28 2015'
author: 'Koen Bok'
}
{
commit: '658933b'
date: 'Thu Apr 2 16:02:18 2015'
author: 'Koen Bok'
}
{
commit: '29a8452'
date: 'Thu Apr 2 15:12:12 2015'
author: 'Koen Bok'
}
{
commit: '705a6d1'
date: 'Thu Apr 2 14:10:59 2015'
author: 'Koen Bok'
}
{
commit: '9da6ec3'
date: 'Thu Apr 2 13:52:13 2015'
author: 'Koen Bok'
}
{
commit: '7e4f83b'
date: 'Thu Apr 2 12:58:55 2015'
author: 'Koen Bok'
}
{
commit: '1febf8d'
date: 'Thu Apr 2 12:25:41 2015'
author: 'Koen Bok'
}
{
commit: 'e4c08c6'
date: 'Thu Apr 2 12:22:38 2015'
author: 'Koen Bok'
}
{
commit: '654779d'
date: 'Thu Apr 2 12:15:36 2015'
author: 'Koen Bok'
}
{
commit: '4cb562b'
date: 'Thu Apr 2 12:02:34 2015'
author: 'Koen Bok'
}
{
commit: '86b7691'
date: 'Fri Mar 27 11:00:36 2015'
author: 'Benjamin den Boe'
}
{
commit: 'e798718'
date: 'Thu Mar 26 11:34:38 2015'
author: 'Koen Bok'
}
{
commit: 'e6cd862'
date: 'Tue Mar 10 12:32:34 2015'
author: 'Koen Bok'
}
{
commit: '3b25bba'
date: 'Tue Mar 10 11:22:23 2015'
author: 'Koen Bok'
}
{
commit: 'd6a3894'
date: 'Tue Mar 10 11:05:11 2015'
author: 'Koen Bok'
}
{
commit: 'bd14b72'
date: 'Tue Mar 10 11:03:59 2015'
author: 'Koen Bok'
}
{
commit: '5caa36d'
date: 'Mon Mar 9 12:27:38 2015'
author: 'Koen Bok'
}
{
commit: '42dfba9'
date: 'Mon Mar 9 12:27:26 2015'
author: 'Koen Bok'
}
{
commit: '7356bf5'
date: 'Fri Feb 27 16:28:48 2015'
author: 'Koen Bok'
}
{
commit: '22c2e4e'
date: 'Tue Feb 24 17:26:11 2015'
author: 'Koen Bok'
}
{
commit: 'efc5aa4'
date: 'Sun Feb 15 23:18:42 2015'
author: 'Koen Bok'
}
{
commit: 'd69bbfe'
date: 'Sun Feb 15 23:11:07 2015'
author: 'Koen Bok'
}
{
commit: 'ffcdbec'
date: 'Tue Feb 10 16:36:34 2015'
author: 'Koen Bok'
}
{
commit: 'adcca17'
date: 'Tue Feb 10 16:16:47 2015'
author: 'Koen Bok'
}
{
commit: 'c659f32'
date: 'Tue Feb 10 14:36:06 2015'
author: 'Koen Bok'
}
{
commit: '5f36189'
date: 'Mon Feb 2 11:29:38 2015'
author: 'Koen Bok'
}
{
commit: 'f6abac2'
date: 'Fri Jan 30 17:28:40 2015'
author: 'Koen Bok'
}
{
commit: '2aafdc0'
date: 'Fri Jan 30 16:46:50 2015'
author: 'Koen Bok'
}
{
commit: '2224d2c'
date: 'Fri Jan 30 16:37:12 2015'
author: 'Koen Bok'
}
{
commit: '3edbb0f'
date: 'Mon Jan 26 23:04:47 2015'
author: 'Koen Bok'
}
{
commit: '3d49915'
date: 'Sun Jan 18 23:04:40 2015'
author: 'Koen Bok'
}
{
commit: '7f60bc9'
date: 'Sun Jan 18 23:02:16 2015'
author: 'Koen Bok'
}
{
commit: '52df7dc'
date: 'Sun Jan 18 23:01:46 2015'
author: 'Koen Bok'
}
{
commit: '45d792d'
date: 'Wed Jan 14 11:41:50 2015'
author: 'Koen Bok'
}
{
commit: 'f932e94'
date: 'Mon Jan 5 18:07:01 2015'
author: 'Koen Bok'
}
{
commit: '75e5990'
date: 'Mon Jan 5 18:06:48 2015'
author: 'Koen Bok'
}
{
commit: '0d76609'
date: 'Sat Dec 13 21:14:41 2014'
author: 'Koen Bok'
}
{
commit: '8e59102'
date: 'Fri Dec 12 14:52:38 2014'
author: 'Koen Bok'
}
{
commit: 'd400c9c'
date: 'Fri Dec 12 14:48:17 2014'
author: 'Koen Bok'
}
{
commit: '47f8be6'
date: 'Fri Dec 12 12:54:11 2014'
author: 'Koen Bok'
}
{
commit: '3f9411e'
date: 'Fri Dec 12 11:49:49 2014'
author: 'Koen Bok'
}
{
commit: '6cc6def'
date: 'Fri Dec 12 11:48:00 2014'
author: 'Koen Bok'
}
{
commit: '0d59690'
date: 'Sat Dec 6 11:21:49 2014'
author: 'Koen Bok'
}
{
commit: '135a729'
date: 'Sat Dec 6 11:19:23 2014'
author: 'Koen Bok'
}
{
commit: '24de214'
date: 'Wed Nov 26 19:15:48 2014'
author: 'Koen Bok'
}
{
commit: 'e33a47b'
date: 'Wed Nov 26 19:02:02 2014'
author: 'Koen Bok'
}
{
commit: '4b1f2d2'
date: 'Wed Nov 26 14:50:35 2014'
author: 'Koen Bok'
}
{
commit: '92b4ee1'
date: 'Tue Nov 11 12:14:06 2014'
author: 'Koen Bok'
}
{
commit: 'af57e3d'
date: 'Sun Nov 2 11:11:58 2014'
author: 'Koen Bok'
}
{
commit: 'd147a23'
date: 'Mon Oct 27 22:25:03 2014'
author: 'Koen Bok'
}
{
commit: '62e18b8'
date: 'Mon Oct 27 22:14:38 2014'
author: 'Koen Bok'
}
{
commit: '0f8c3bb'
date: 'Mon Oct 27 22:01:12 2014'
author: 'Koen Bok'
}
{
commit: 'ef112cd'
date: 'Mon Oct 27 21:36:35 2014'
author: 'Koen Bok'
}
{
commit: '6c287fe'
date: 'Mon Oct 27 21:18:39 2014'
author: 'Koen Bok'
}
{
commit: 'ecfa572'
date: 'Mon Oct 27 20:59:38 2014'
author: 'Koen Bok'
}
{
commit: '8d4b1bd'
date: 'Mon Oct 27 20:48:41 2014'
author: 'Koen Bok'
}
{
commit: 'a82c4fb'
date: 'Mon Oct 27 19:18:18 2014'
author: 'Koen Bok'
}
{
commit: '2e07617'
date: 'Mon Oct 27 17:08:19 2014'
author: 'Koen Bok'
}
{
commit: 'f7271db'
date: 'Mon Oct 27 17:06:39 2014'
author: 'Koen Bok'
}
{
commit: 'f86b69a'
date: 'Mon Oct 27 17:06:23 2014'
author: 'Koen Bok'
}
{
commit: 'f6816e1'
date: 'Fri Oct 24 12:09:44 2014'
author: 'Benjamin den Boe'
}
{
commit: '093bdb7'
date: 'Fri Oct 24 12:03:30 2014'
author: 'Benjamin den Boe'
}
{
commit: '5121cf5'
date: 'Fri Oct 24 11:57:45 2014'
author: 'Benjamin den Boe'
}
{
commit: '1b13e7f'
date: 'Mon Oct 20 23:06:22 2014'
author: 'Koen Bok'
}
{
commit: '5b6aed1'
date: 'Sat Oct 18 14:40:46 2014'
author: 'Koen Bok'
}
{
commit: 'db9ffd1'
date: 'Sat Oct 18 12:08:21 2014'
author: 'Koen Bok'
}
{
commit: 'd89500c'
date: 'Sat Oct 11 16:15:04 2014'
author: 'Koen Bok'
}
{
commit: 'ef2f738'
date: 'Fri Oct 10 14:26:21 2014'
author: 'Koen Bok'
}
{
commit: '9b7b806'
date: 'Fri Oct 10 11:14:05 2014'
author: 'Koen Bok'
}
{
commit: '0f0a7fe'
date: 'Wed Oct 8 22:59:05 2014'
author: 'Koen Bok'
}
{
commit: '5afa23f'
date: 'Mon Oct 6 14:23:09 2014'
author: 'Koen Bok'
}
{
commit: '00ab257'
date: 'Mon Oct 6 11:50:28 2014'
author: 'Koen Bok'
}
{
commit: '2db13aa'
date: 'Mon Oct 6 10:49:23 2014'
author: 'Koen Bok'
}
{
commit: '0a61fc4'
date: 'Sun Oct 5 16:35:21 2014'
author: 'Koen Bok'
}
{
commit: 'fbde7a8'
date: 'Sun Oct 5 16:24:15 2014'
author: 'Koen Bok'
}
{
commit: '2a0439a'
date: 'Sun Oct 5 16:23:05 2014'
author: 'Koen Bok'
}
{
commit: '4eb86e9'
date: 'Sun Oct 5 16:22:47 2014'
author: 'Koen Bok'
}
{
commit: '63841e7'
date: 'Sun Oct 5 16:22:41 2014'
author: 'Koen Bok'
}
{
commit: 'a04b76d'
date: 'Tue Sep 30 16:21:33 2014'
author: 'Koen Bok'
}
{
commit: 'c548c39'
date: 'Tue Sep 30 16:17:00 2014'
author: 'Koen Bok'
}
{
commit: 'e9c3c4a'
date: 'Tue Sep 30 16:13:51 2014'
author: 'Koen Bok'
}
{
commit: 'de60b1b'
date: 'Tue Sep 30 14:34:39 2014'
author: 'Koen Bok'
}
{
commit: 'e81fc33'
date: 'Tue Sep 30 14:25:45 2014'
author: 'Koen Bok'
}
{
commit: 'ae4fc9b'
date: 'Tue Sep 30 14:03:35 2014'
author: 'Koen Bok'
}
{
commit: '1d521bd'
date: 'Tue Sep 30 12:49:24 2014'
author: 'Koen Bok'
}
{
commit: 'a2613e7'
date: 'Tue Sep 30 12:38:32 2014'
author: 'Koen Bok'
}
{
commit: 'ebcb76b'
date: 'Tue Sep 30 12:38:18 2014'
author: 'Koen Bok'
}
{
commit: '410b06f'
date: 'Tue Sep 30 11:36:05 2014'
author: 'Koen Bok'
}
{
commit: 'dc36471'
date: 'Fri Sep 26 14:55:30 2014'
author: 'Koen Bok'
}
{
commit: 'bd01fd8'
date: 'Fri Sep 26 14:44:28 2014'
author: 'Koen Bok'
}
{
commit: '9a04efe'
date: 'Fri Sep 26 14:11:58 2014'
author: 'Koen Bok'
}
{
commit: 'e13feb4'
date: 'Fri Sep 26 14:11:36 2014'
author: 'Koen Bok'
}
{
commit: '570630a'
date: 'Fri Sep 26 14:11:21 2014'
author: 'Koen Bok'
}
{
commit: '82d933f'
date: 'Fri Sep 26 13:59:26 2014'
author: 'Koen Bok'
}
{
commit: '1e64425'
date: 'Thu Sep 25 19:01:01 2014'
author: 'Koen Bok'
}
{
commit: 'bf0e721'
date: 'Thu Sep 25 18:34:14 2014'
author: 'Koen Bok'
}
{
commit: 'd8c6123'
date: 'Thu Sep 25 18:16:04 2014'
author: 'Koen Bok'
}
{
commit: '9e116b5'
date: 'Thu Sep 25 16:59:41 2014'
author: 'Koen Bok'
}
{
commit: 'c1ad81e'
date: 'Thu Sep 25 16:48:56 2014'
author: 'Koen Bok'
}
{
commit: 'ffd106e'
date: 'Thu Sep 25 15:56:49 2014'
author: 'Koen Bok'
}
{
commit: 'fe9fa63'
date: 'Thu Sep 25 15:39:48 2014'
author: 'Koen Bok'
}
{
commit: '48028d3'
date: 'Thu Sep 25 15:38:25 2014'
author: 'Koen Bok'
}
{
commit: '4686f4f'
date: 'Thu Sep 25 15:31:20 2014'
author: 'Koen Bok'
}
{
commit: '9552f44'
date: 'Thu Sep 25 15:31:06 2014'
author: 'Koen Bok'
}
{
commit: '962f45d'
date: 'Thu Sep 25 15:30:51 2014'
author: 'Koen Bok'
}
{
commit: '2f8594f'
date: 'Thu Sep 25 15:30:33 2014'
author: 'Koen Bok'
}
{
commit: '40f024f'
date: 'Thu Sep 25 15:04:35 2014'
author: 'Koen Bok'
}
{
commit: 'e5ce332'
date: 'Thu Sep 25 11:14:52 2014'
author: 'Koen Bok'
}
{
commit: 'fce1baa'
date: 'Wed Sep 24 22:34:07 2014'
author: 'Koen Bok'
}
{
commit: 'b9ae0ad'
date: 'Tue Sep 23 14:55:19 2014'
author: 'Koen Bok'
}
{
commit: '814d193'
date: 'Mon Sep 22 21:23:04 2014'
author: 'Koen Bok'
}
{
commit: '065fa18'
date: 'Mon Sep 22 21:14:32 2014'
author: 'Koen Bok'
}
{
commit: '4a29f4e'
date: 'Thu Sep 18 15:02:01 2014'
author: 'Benjamin den Boe'
}
{
commit: '1deb452'
date: 'Thu Sep 18 14:58:35 2014'
author: 'Benjamin den Boe'
}
{
commit: 'b8abf40'
date: 'Thu Sep 18 14:58:02 2014'
author: 'Benjamin den Boe'
}
{
commit: '66e8ba2'
date: 'Thu Sep 18 14:56:21 2014'
author: 'Benjamin den Boe'
}
{
commit: 'd7c40ce'
date: 'Thu Sep 18 14:55:36 2014'
author: 'Benjamin den Boe'
}
{
commit: 'bf11c56'
date: 'Thu Sep 18 14:49:14 2014'
author: 'Benjamin den Boe'
}
{
commit: '5b9777d'
date: 'Thu Sep 18 14:47:43 2014'
author: 'Benjamin den Boe'
}
{
commit: 'cc66c7b'
date: 'Thu Sep 18 14:43:08 2014'
author: 'Benjamin den Boe'
}
{
commit: '06a4df9'
date: 'Sun Aug 24 14:54:37 2014'
author: 'Koen Bok'
}
{
commit: 'e61b373'
date: 'Wed Aug 20 15:35:10 2014'
author: 'Koen Bok'
}
{
commit: '6059202'
date: 'Tue Aug 19 14:14:45 2014'
author: 'Koen Bok'
}
{
commit: 'bccd4e3'
date: 'Tue Aug 19 14:05:24 2014'
author: 'Koen Bok'
}
{
commit: 'f13bd7f'
date: 'Tue Aug 19 14:04:45 2014'
author: 'Koen Bok'
}
{
commit: '689cbb4'
date: 'Sun Aug 17 14:39:32 2014'
author: 'Koen Bok'
}
{
commit: 'daa11db'
date: 'Sun Aug 17 11:54:23 2014'
author: 'Koen Bok'
}
{
commit: 'c73c31f'
date: 'Thu Aug 14 06:44:14 2014'
author: 'Koen Bok'
}
{
commit: '51041c6'
date: 'Tue Jul 22 14:17:04 2014'
author: 'Koen Bok'
}
{
commit: '20a26c1'
date: 'Tue Jul 22 14:08:44 2014'
author: 'Koen Bok'
}
{
commit: 'f65e89d'
date: 'Tue Jul 22 14:05:47 2014'
author: 'Koen Bok'
}
{
commit: 'd14e883'
date: 'Thu Jul 10 12:38:40 2014'
author: 'Koen Bok'
}
{
commit: 'd907147'
date: 'Tue Jul 1 15:06:41 2014'
author: 'Koen Bok'
}
{
commit: '6ae8769'
date: 'Tue Jul 1 14:36:00 2014'
author: 'Koen Bok'
}
{
commit: 'e3be363'
date: 'Fri Jun 27 11:23:23 2014'
author: 'Koen Bok'
}
{
commit: '0839b4b'
date: 'Fri Jun 27 10:42:30 2014'
author: 'Koen Bok'
}
{
commit: 'f1ab831'
date: 'Wed Jun 25 00:21:40 2014'
author: 'Koen Bok'
}
{
commit: 'cef8706'
date: 'Thu Jun 19 16:21:10 2014'
author: 'Koen Bok'
}
{
commit: '4235f0c'
date: 'Thu Jun 19 16:20:55 2014'
author: 'Koen Bok'
}
{
commit: 'e3e38a1'
date: 'Thu Jun 19 15:49:02 2014'
author: 'Koen Bok'
}
{
commit: '9a0f05e'
date: 'Tue Jun 17 11:15:25 2014'
author: 'Koen Bok'
}
{
commit: '536cf90'
date: 'Tue Jun 17 11:13:49 2014'
author: 'Koen Bok'
}
{
commit: '1fb5c9e'
date: 'Tue Jun 17 01:02:39 2014'
author: 'Koen Bok'
}
{
commit: 'fafb016'
date: 'Tue Jun 17 01:02:22 2014'
author: 'Koen Bok'
}
{
commit: 'f34db1c'
date: 'Tue Jun 17 01:01:51 2014'
author: 'Koen Bok'
}
{
commit: '2ed6bab'
date: 'Wed Jun 4 12:40:06 2014'
author: 'Koen Bok'
}
{
commit: '5476611'
date: 'Fri May 30 14:03:40 2014'
author: 'Koen Bok'
}
{
commit: '53391c3'
date: 'Fri May 30 14:02:27 2014'
author: 'Koen Bok'
}
{
commit: 'ccc99d4'
date: 'Fri May 30 12:18:04 2014'
author: 'Koen Bok'
}
{
commit: 'd16326a'
date: 'Fri May 30 12:14:21 2014'
author: 'Koen Bok'
}
{
commit: 'bb58df1'
date: 'Mon May 26 16:09:09 2014'
author: 'Koen Bok'
}
{
commit: '975b2b1'
date: 'Wed May 21 16:54:11 2014'
author: 'Koen Bok'
}
{
commit: '21a49cc'
date: 'Wed May 21 16:52:35 2014'
author: 'Koen Bok'
}
{
commit: 'fe67bbc'
date: 'Wed May 21 16:39:48 2014'
author: 'Koen Bok'
}
{
commit: '0a8a768'
date: 'Wed May 21 16:20:50 2014'
author: 'Koen Bok'
}
{
commit: '20decd2'
date: 'Sun May 18 16:18:30 2014'
author: 'Koen Bok'
}
{
commit: 'd7c2135'
date: 'Sun May 18 16:18:12 2014'
author: 'Koen Bok'
}
{
commit: '69c4d63'
date: 'Sat May 17 21:26:31 2014'
author: 'Koen Bok'
}
{
commit: 'e86abfa'
date: 'Thu May 15 23:57:06 2014'
author: 'Koen Bok'
}
{
commit: '216a271'
date: 'Thu May 15 23:33:22 2014'
author: 'Koen Bok'
}
{
commit: 'a05732a'
date: 'Thu May 15 23:20:17 2014'
author: 'Koen Bok'
}
{
commit: '7f31ec8'
date: 'Thu May 15 23:03:48 2014'
author: 'Koen Bok'
}
{
commit: '3d42703'
date: 'Thu May 15 16:16:05 2014'
author: 'Koen Bok'
}
{
commit: 'c46ce9e'
date: 'Thu May 15 15:33:04 2014'
author: 'Koen Bok'
}
{
commit: '80ea2f8'
date: 'Thu May 15 15:32:43 2014'
author: 'Koen Bok'
}
{
commit: '943df9a'
date: 'Thu May 15 13:39:22 2014'
author: 'Koen Bok'
}
{
commit: 'a9fd591'
date: 'Thu May 15 11:31:36 2014'
author: 'Koen Bok'
}
{
commit: 'b31938a'
date: 'Thu May 15 10:32:30 2014'
author: 'Koen Bok'
}
{
commit: '4630242'
date: 'Thu May 15 10:24:30 2014'
author: 'Koen Bok'
}
{
commit: '30e688e'
date: 'Thu May 15 10:22:15 2014'
author: 'Koen Bok'
}
{
commit: 'bb15649'
date: 'Thu May 15 10:03:24 2014'
author: 'Koen Bok'
}
{
commit: '293c7a7'
date: 'Tue May 13 19:16:59 2014'
author: 'Koen Bok'
}
{
commit: '2bbd10a'
date: 'Tue May 13 19:15:51 2014'
author: 'Koen Bok'
}
{
commit: 'c16f69f'
date: 'Tue May 13 19:13:41 2014'
author: 'Koen Bok'
}
{
commit: '2205ba0'
date: 'Sun Apr 27 00:10:24 2014'
author: 'Koen Bok'
}
{
commit: 'e175596'
date: 'Sun Apr 27 00:10:15 2014'
author: 'Koen Bok'
}
{
commit: '68e7cd4'
date: 'Sun Apr 20 11:19:07 2014'
author: 'Koen Bok'
}
{
commit: 'e95bf6b'
date: 'Sun Apr 20 11:18:52 2014'
author: 'Koen Bok'
}
{
commit: 'fad01d1'
date: 'Sat Apr 19 13:20:25 2014'
author: 'Koen Bok'
}
{
commit: '0999d52'
date: 'Sat Apr 19 13:14:47 2014'
author: 'Koen Bok'
}
{
commit: '449b155'
date: 'Sat Apr 19 12:46:04 2014'
author: 'Koen Bok'
}
{
commit: '958bf94'
date: 'Sat Apr 19 12:21:20 2014'
author: 'Koen Bok'
}
{
commit: '2c1fd6d'
date: 'Sat Apr 19 12:16:49 2014'
author: 'Koen Bok'
}
{
commit: 'b78ddb1'
date: 'Sat Apr 19 12:11:55 2014'
author: 'Koen Bok'
}
{
commit: '4b76906'
date: 'Sat Apr 19 11:45:36 2014'
author: 'Koen Bok'
}
{
commit: '19d4ede'
date: 'Sat Apr 19 11:45:12 2014'
author: 'Koen Bok'
}
{
commit: '1f3e010'
date: 'Fri Nov 22 13:52:01 2013'
author: 'Koen Bok'
}
{
commit: 'ae7c991'
date: 'Thu Nov 14 11:38:59 2013'
author: 'Koen Bok'
}
{
commit: '5fce264'
date: 'Thu Nov 14 11:38:48 2013'
author: 'Koen Bok'
}
{
commit: 'e4c5467'
date: 'Tue Nov 5 18:41:28 2013'
author: 'Koen Bok'
}
{
commit: '01172c3'
date: 'Tue Nov 5 18:38:20 2013'
author: 'Koen Bok'
}
{
commit: '7942d46'
date: 'Tue Oct 29 01:06:30 2013'
author: 'Koen Bok'
}
{
commit: '352e090'
date: 'Tue Oct 29 01:04:13 2013'
author: 'Koen Bok'
}
{
commit: '5d463ce'
date: 'Thu Oct 17 14:04:55 2013'
author: 'Koen Bok'
}
{
commit: '25a47eb'
date: 'Wed Oct 16 20:47:21 2013'
author: 'Koen Bok'
}
{
commit: '1d8cf24'
date: 'Tue Oct 15 15:07:33 2013'
author: 'Koen Bok'
}
{
commit: '71678bf'
date: 'Tue Oct 15 14:57:11 2013'
author: 'Koen Bok'
}
{
commit: '4a289c5'
date: 'Tue Oct 15 12:54:50 2013'
author: 'Koen Bok'
}
{
commit: '6c102c2'
date: 'Tue Oct 15 12:42:45 2013'
author: 'Koen Bok'
}
{
commit: 'b629549'
date: 'Tue Oct 15 12:05:06 2013'
author: 'Koen Bok'
}
{
commit: '78f3d91'
date: 'Tue Oct 15 03:02:06 2013'
author: 'Koen Bok'
}
{
commit: 'bad5a94'
date: 'Sun Oct 13 20:49:24 2013'
author: 'Koen Bok'
}
{
commit: '3e4051b'
date: 'Sun Oct 13 14:53:43 2013'
author: 'Koen Bok'
}
{
commit: '6eefeb0'
date: 'Sun Oct 13 14:38:53 2013'
author: 'Koen Bok'
}
{
commit: 'f367c76'
date: 'Sat Oct 12 17:53:50 2013'
author: 'Koen Bok'
}
{
commit: 'aa847ac'
date: 'Fri Oct 11 10:21:30 2013'
author: 'Koen Bok'
}
{
commit: 'd21e7d0'
date: 'Tue Oct 1 07:42:38 2013'
author: 'Koen Bok'
}
{
commit: '0cbe0a6'
date: 'Fri Sep 13 17:15:00 2013'
author: 'Koen Bok'
}
{
commit: 'e4ed1d8'
date: 'Fri Sep 13 08:03:53 2013'
author: 'Koen Bok'
}
{
commit: '3f01dfe'
date: 'Sun Sep 8 02:54:43 2013'
author: 'Koen Bok'
}
{
commit: '9bd31bf'
date: 'Sat Jul 13 11:37:26 2013'
author: 'Koen Bok'
}
{
commit: 'ae64324'
date: 'Thu May 30 16:16:25 2013'
author: 'Koen Bok'
}
{
commit: '4b8be87'
date: 'Thu May 30 16:06:44 2013'
author: 'Koen Bok'
}
{
commit: '538806f'
date: 'Wed May 29 15:58:48 2013'
author: 'Koen Bok'
}
{
commit: '2d59000'
date: 'Wed May 29 15:45:28 2013'
author: 'Koen Bok'
}
{
commit: 'dcd4faa'
date: 'Fri May 24 14:41:27 2013'
author: 'Koen Bok'
}
{
commit: '150f969'
date: 'Fri May 24 14:09:31 2013'
author: 'Koen Bok'
}
{
commit: '2322236'
date: 'Fri May 24 13:00:41 2013'
author: 'Koen Bok'
}
{
commit: '3d6b2d5'
date: 'Mon May 20 23:53:41 2013'
author: 'Koen Bok'
}
{
commit: 'ee67a19'
date: 'Mon May 20 23:53:35 2013'
author: 'Koen Bok'
}
{
commit: '0b97774'
date: 'Thu May 16 13:24:58 2013'
author: 'Koen Bok'
}
{
commit: '6bfb912'
date: 'Thu May 16 13:24:20 2013'
author: 'Koen Bok'
}
{
commit: '6fd264c'
date: 'Wed May 15 16:22:27 2013'
author: 'Koen Bok'
}
{
commit: 'c45dbb7'
date: 'Mon May 13 20:55:56 2013'
author: 'Koen Bok'
}
{
commit: 'a0fd659'
date: 'Mon May 13 20:55:50 2013'
author: 'Koen Bok'
}
{
commit: '6ce75ff'
date: 'Sun May 12 00:24:11 2013'
author: 'Koen Bok'
}
{
commit: '3e381c6'
date: 'Sun May 12 00:23:55 2013'
author: 'Koen Bok'
}
{
commit: '8a8f4ae'
date: 'Sun May 12 00:23:40 2013'
author: 'Koen Bok'
}
{
commit: '167722f'
date: 'Sun May 12 00:23:13 2013'
author: 'Koen Bok'
}
{
commit: 'ec11e8b'
date: 'Sun May 12 00:23:06 2013'
author: 'Koen Bok'
}
{
commit: '5728949'
date: 'Sun May 12 00:22:53 2013'
author: 'Koen Bok'
}
{
commit: '318b135'
date: 'Wed May 8 20:13:38 2013'
author: 'Koen Bok'
}
{
commit: '1296d57'
date: 'Tue May 7 22:45:07 2013'
author: 'Koen Bok'
}
{
commit: '56dc7c1'
date: 'Tue May 7 22:44:42 2013'
author: 'Koen Bok'
}
{
commit: '8b12dcb'
date: 'Sun May 5 11:45:10 2013'
author: 'Koen Bok'
}
{
commit: 'ed0c03b'
date: 'Fri May 3 00:07:53 2013'
author: 'Koen Bok'
}
{
commit: '2ebf5b3'
date: 'Fri May 3 00:02:23 2013'
author: 'Koen Bok'
}
{
commit: '3ad0dc0'
date: 'Thu May 2 22:53:36 2013'
author: 'Koen Bok'
}
{
commit: '5b23d4e'
date: 'Thu May 2 22:50:16 2013'
author: 'Koen Bok'
}
{
commit: '8001e3a'
date: 'Thu May 2 22:50:11 2013'
author: 'Koen Bok'
}
{
commit: 'c02eef3'
date: 'Thu May 2 22:48:52 2013'
author: 'Koen Bok'
}
{
commit: 'acb0e29'
date: 'Thu May 2 22:48:35 2013'
author: 'Koen Bok'
}
{
commit: 'c3de315'
date: 'Thu May 2 22:48:25 2013'
author: 'Koen Bok'
}
{
commit: 'ff7e8c0'
date: 'Thu May 2 22:48:15 2013'
author: 'Koen Bok'
}
{
commit: '4017411'
date: 'Thu May 2 22:48:05 2013'
author: 'Koen Bok'
}
{
commit: 'e494459'
date: 'Thu May 2 22:44:56 2013'
author: 'Koen Bok'
}
{
commit: '2b3accc'
date: 'Thu May 2 22:44:33 2013'
author: 'Koen Bok'
}
{
commit: '1ec6a88'
date: 'Thu May 2 22:44:15 2013'
author: 'Koen Bok'
}
{
commit: '3e2c579'
date: 'Thu May 2 22:44:01 2013'
author: 'Koen Bok'
}
{
commit: '9b9dadd'
date: 'Thu May 2 22:43:41 2013'
author: 'Koen Bok'
}
{
commit: '5da20c9'
date: 'Wed May 1 20:43:29 2013'
author: 'Koen Bok'
}
{
commit: '078e2d3'
date: 'Tue Apr 30 11:29:15 2013'
author: 'Koen Bok'
}
{
commit: '578a6c5'
date: 'Tue Apr 30 11:29:08 2013'
author: 'Koen Bok'
}
{
commit: '518df87'
date: 'Tue Apr 30 11:28:28 2013'
author: 'Koen Bok'
}
{
commit: '62dfbf3'
date: 'Tue Apr 30 11:28:13 2013'
author: 'Koen Bok'
}
{
commit: '5e5b065'
date: 'Tue Apr 30 11:27:54 2013'
author: 'Koen Bok'
}
{
commit: '09f407a'
date: 'Tue Apr 30 11:25:30 2013'
author: 'Koen Bok'
}
{
commit: 'a85039b'
date: 'Tue Apr 30 11:24:53 2013'
author: 'Koen Bok'
}
{
commit: '524606a'
date: 'Tue Apr 30 11:23:59 2013'
author: 'Koen Bok'
}
{
commit: '2870f60'
date: 'Sat Apr 27 12:51:52 2013'
author: 'Koen Bok'
}
{
commit: 'a258ebf'
date: 'Sat Apr 27 12:00:17 2013'
author: 'Koen Bok'
}
{
commit: '342a117'
date: 'Sat Apr 27 11:59:44 2013'
author: 'Koen Bok'
}
{
commit: '66501bb'
date: 'Sat Apr 27 11:04:06 2013'
author: 'Koen Bok'
}
{
commit: '23f141a'
date: 'Sat Apr 27 10:46:39 2013'
author: 'Koen Bok'
}
{
commit: 'b480b5f'
date: 'Sat Apr 27 10:45:45 2013'
author: 'Koen Bok'
}
{
commit: '18773c5'
date: 'Sat Apr 27 10:45:33 2013'
author: 'Koen Bok'
}
{
commit: '0acf5ea'
date: 'Thu Apr 25 17:39:23 2013'
author: 'Koen Bok'
}
{
commit: '2bf250b'
date: 'Thu Apr 25 17:27:34 2013'
author: 'Koen Bok'
}
{
commit: '949744f'
date: 'Thu Apr 25 17:27:17 2013'
author: 'Koen Bok'
}
{
commit: '987d99d'
date: 'Thu Apr 25 17:26:29 2013'
author: 'Koen Bok'
}
{
commit: 'a9a6a92'
date: 'Thu Apr 25 17:11:23 2013'
author: 'Koen Bok'
}
{
commit: '7469d1f'
date: 'Thu Apr 25 15:02:35 2013'
author: 'Koen Bok'
}
{
commit: '872f5c2'
date: 'Thu Apr 25 14:59:08 2013'
author: 'Koen Bok'
}
{
commit: 'ab6a379'
date: 'Thu Apr 25 14:57:13 2013'
author: 'Koen Bok'
}
{
commit: '482d86a'
date: 'Thu Apr 25 14:38:41 2013'
author: 'Koen Bok'
}
{
commit: 'a240ff4'
date: 'Thu Apr 25 14:38:24 2013'
author: 'Koen Bok'
}
{
commit: '378e7ff'
date: 'Thu Apr 25 14:37:55 2013'
author: 'Koen Bok'
}
{
commit: '8f4385d'
date: 'Thu Apr 25 14:36:54 2013'
author: 'Koen Bok'
}
{
commit: 'efaef61'
date: 'Thu Apr 25 14:36:10 2013'
author: 'Koen Bok'
}
{
commit: '48beba7'
date: 'Thu Apr 25 14:35:57 2013'
author: 'Koen Bok'
}
{
commit: '8f0ef3f'
date: 'Thu Apr 25 14:35:42 2013'
author: 'Koen Bok'
}
{
commit: '5b1e121'
date: 'Thu Apr 25 14:35:03 2013'
author: 'Koen Bok'
}
{
commit: 'c05bdd1'
date: 'Sun Apr 21 19:01:19 2013'
author: 'Koen Bok'
}
{
commit: 'bb67d68'
date: 'Sun Apr 21 19:01:03 2013'
author: 'Koen Bok'
}
{
commit: 'b00dc5a'
date: 'Sun Apr 21 18:53:03 2013'
author: 'Koen Bok'
}
{
commit: '80bd8e7'
date: 'Sun Apr 21 18:52:42 2013'
author: 'Koen Bok'
}
{
commit: 'd9fff17'
date: 'Sun Apr 21 18:52:22 2013'
author: 'Koen Bok'
}
{
commit: 'b22ccc9'
date: 'Sun Apr 21 18:51:58 2013'
author: 'Koen Bok'
}
{
commit: 'b1fbf81'
date: 'Sun Apr 21 18:51:39 2013'
author: 'Koen Bok'
}
{
commit: '88c3ccb'
date: 'Sun Apr 21 18:51:04 2013'
author: 'Koen Bok'
}
{
commit: '816cfa7'
date: 'Sat Apr 20 11:28:37 2013'
author: 'Koen Bok'
}
{
commit: '33d86fb'
date: 'Sat Apr 20 11:28:05 2013'
author: 'Koen Bok'
}
{
commit: '4e8f467'
date: 'Sat Apr 20 11:26:26 2013'
author: 'Koen Bok'
}
{
commit: 'c0207c0'
date: 'Sat Apr 20 11:25:44 2013'
author: 'Koen Bok'
}
{
commit: '20c81a1'
date: 'Fri Apr 12 23:02:20 2013'
author: 'Koen Bok'
}
{
commit: '6da090f'
date: 'Fri Apr 12 23:02:07 2013'
author: 'Koen Bok'
}
{
commit: 'd840b70'
date: 'Wed Apr 10 00:48:14 2013'
author: 'Koen Bok'
}
{
commit: '2aaea25'
date: 'Wed Apr 10 00:38:01 2013'
author: 'Koen Bok'
}
{
commit: '3fea5e0'
date: 'Thu Apr 4 00:39:13 2013'
author: 'Koen Bok'
}
{
commit: '8fe4409'
date: 'Thu Apr 4 00:39:06 2013'
author: 'Koen Bok'
}
{
commit: 'f149a9a'
date: 'Thu Apr 4 00:38:45 2013'
author: 'Koen Bok'
}
{
commit: '64dee09'
date: 'Thu Apr 4 00:38:33 2013'
author: 'Koen Bok'
}
{
commit: '1c91f4a'
date: 'Sat Mar 23 16:02:21 2013'
author: 'Koen Bok'
}
{
commit: '685804a'
date: 'Sat Mar 23 15:59:21 2013'
author: 'Koen Bok'
}
{
commit: 'cda5559'
date: 'Fri Mar 22 13:50:20 2013'
author: 'Koen Bok'
}
{
commit: '1018b69'
date: 'Fri Mar 22 13:50:04 2013'
author: 'Koen Bok'
}
{
commit: '37e04d5'
date: 'Tue Mar 19 18:32:22 2013'
author: 'Koen Bok'
}
{
commit: '110b2b5'
date: 'Tue Mar 19 18:31:24 2013'
author: 'Koen Bok'
}
{
commit: '78c16ef'
date: 'Tue Mar 19 18:30:43 2013'
author: 'Koen Bok'
}
{
commit: '2f345ec'
date: 'Mon Mar 18 21:35:08 2013'
author: 'Koen Bok'
}
{
commit: 'cd3a9e5'
date: 'Mon Mar 18 21:34:59 2013'
author: 'Koen Bok'
}
{
commit: 'c099ddd'
date: 'Mon Mar 18 21:34:29 2013'
author: 'Koen Bok'
}
{
commit: '5192cec'
date: 'Mon Mar 18 16:23:45 2013'
author: 'Koen Bok'
}
{
commit: 'd6348db'
date: 'Sun Mar 17 19:28:41 2013'
author: 'Koen Bok'
}
{
commit: 'e73e07a'
date: 'Sun Mar 17 19:16:27 2013'
author: 'Koen Bok'
}
{
commit: '98ca654'
date: 'Sun Mar 17 17:20:52 2013'
author: 'Koen Bok'
}
{
commit: '37cd9ec'
date: 'Sun Mar 17 16:45:26 2013'
author: 'Koen Bok'
}
{
commit: 'f74c9eb'
date: 'Sun Mar 17 15:54:12 2013'
author: 'Koen Bok'
}
{
commit: '5f9b84f'
date: 'Thu Mar 14 09:16:37 2013'
author: 'Koen Bok'
}
{
commit: '6e29b46'
date: 'Wed Mar 13 17:05:41 2013'
author: 'Koen Bok'
}
{
commit: '0dc7c7f'
date: 'Wed Mar 13 17:05:06 2013'
author: 'Koen Bok'
}
{
commit: 'cf5d564'
date: 'Wed Mar 13 16:53:07 2013'
author: 'Koen Bok'
}
{
commit: 'bde291d'
date: 'Wed Mar 13 12:13:38 2013'
author: 'Koen Bok'
}
{
commit: 'e2e5546'
date: 'Wed Mar 13 12:13:14 2013'
author: 'Koen Bok'
}
{
commit: '67f1220'
date: 'Wed Mar 13 12:12:36 2013'
author: 'Koen Bok'
}
{
commit: 'ec69c6d'
date: 'Tue Mar 12 16:23:01 2013'
author: 'Koen Bok'
}
{
commit: '08504e6'
date: 'Tue Mar 12 15:58:56 2013'
author: 'Koen Bok'
}
{
commit: '3a44655'
date: 'Tue Mar 12 15:54:51 2013'
author: 'Koen Bok'
}
{
commit: '2556b6f'
date: 'Tue Mar 12 15:02:47 2013'
author: 'Koen Bok'
}
{
commit: '250cb04'
date: 'Tue Mar 12 15:02:01 2013'
author: 'Koen Bok'
}
{
commit: 'a379acc'
date: 'Mon Mar 11 15:13:03 2013'
author: 'Koen Bok'
}
{
commit: 'b1d8b15'
date: 'Mon Mar 11 00:41:22 2013'
author: 'Koen Bok'
}
{
commit: '589a47e'
date: 'Sun Mar 10 23:39:28 2013'
author: 'Koen Bok'
}
{
commit: '7d65cab'
date: 'Fri Mar 1 15:29:59 2013'
author: 'Koen Bok'
}
{
commit: 'd0106fb'
date: 'Fri Mar 1 15:27:37 2013'
author: 'Koen Bok'
}
{
commit: '6d0c7c9'
date: 'Fri Mar 1 05:00:43 2013'
author: 'Koen Bok'
}
{
commit: '913c290'
date: 'Tue Feb 26 14:30:41 2013'
author: 'Koen Bok'
}
{
commit: '27de5fe'
date: 'Sun Feb 17 21:45:19 2013'
author: 'Koen Bok'
}
{
commit: 'eaa81c4'
date: 'Sat Feb 16 15:19:53 2013'
author: 'Koen Bok'
}
{
commit: '27f5914'
date: 'Sat Feb 16 15:11:28 2013'
author: 'Koen Bok'
}
{
commit: '57f3631'
date: 'Fri Jan 25 10:26:54 2013'
author: 'Koen Bok'
}
{
commit: '5a947f5'
date: 'Mon Jan 14 18:51:10 2013'
author: 'Koen Bok'
}
{
commit: 'b28681d'
date: 'Mon Jan 14 14:20:33 2013'
author: 'Koen Bok'
}
{
commit: 'c1ab12d'
date: 'Mon Jan 14 14:20:26 2013'
author: 'Koen Bok'
}
{
commit: '53fd22e'
date: 'Mon Jan 14 14:19:30 2013'
author: 'Koen Bok'
}
{
commit: '54e88cc'
date: 'Mon Jan 14 14:18:50 2013'
author: 'Koen Bok'
}
{
commit: 'f975305'
date: 'Wed Jan 2 16:20:57 2013'
author: 'Koen Bok'
}
{
commit: 'bb1d239'
date: 'Mon Dec 31 11:54:33 2012'
author: 'Koen Bok'
}
{
commit: '1677a43'
date: 'Wed Dec 26 02:15:58 2012'
author: 'Koen Bok'
}
{
commit: '818a4a2'
date: 'Tue Dec 25 17:33:03 2012'
author: 'Koen Bok'
}
{
commit: '0c64794'
date: 'Tue Dec 25 17:32:37 2012'
author: 'Koen Bok'
}
{
commit: '3c2d257'
date: 'Tue Dec 25 17:32:01 2012'
author: 'Koen Bok'
}
{
commit: '075b827'
date: 'Tue Dec 25 17:31:21 2012'
author: 'Koen Bok'
}
{
commit: 'd82fc4e'
date: 'Tue Dec 25 17:30:56 2012'
author: 'Koen Bok'
}
{
commit: '1358741'
date: 'Tue Dec 25 17:30:37 2012'
author: 'Koen Bok'
}
{
commit: 'bfdceaf'
date: 'Fri Dec 21 14:05:21 2012'
author: 'Koen Bok'
}
{
commit: '5bdeb37'
date: 'Thu Dec 20 16:08:05 2012'
author: 'Koen Bok'
}
{
commit: '35ad7df'
date: 'Thu Dec 20 16:06:57 2012'
author: 'Koen Bok'
}
{
commit: 'bcd1a1f'
date: 'Thu Dec 20 15:59:48 2012'
author: 'Koen Bok'
}
] | 140014 | exports.builds = [
{
commit: 'c042de3'
date: 'Fri Mar 9 16:22:13 2018'
author: '<NAME>'
}
{
commit: '08e7197'
date: 'Thu Mar 8 16:19:36 2018'
author: '<NAME>'
}
{
commit: 'a426bbe'
date: 'Wed Mar 7 15:43:02 2018'
author: '<NAME>'
}
{
commit: '7cd32b4'
date: 'Fri Mar 2 13:25:08 2018'
author: '<NAME>'
}
{
commit: '3788e18'
date: 'Fri Mar 2 12:18:38 2018'
author: '<NAME>'
}
{
commit: '1206293'
date: 'Fri Feb 23 11:47:58 2018'
author: '<NAME>'
}
{
commit: 'f32dd1f'
date: 'Tue Feb 20 13:03:00 2018'
author: '<NAME>'
}
{
commit: '5cf9935'
date: 'Tue Feb 20 13:01:11 2018'
author: '<NAME>'
}
{
commit: '03ae78d'
date: 'Tue Feb 20 09:41:19 2018'
author: '<NAME>'
}
{
commit: '8616043'
date: 'Tue Feb 20 09:32:19 2018'
author: '<NAME>'
}
{
commit: 'c110c3b'
date: 'Mon Feb 19 14:14:33 2018'
author: '<NAME>'
}
{
commit: 'aaf80b0'
date: 'Fri Feb 16 15:38:13 2018'
author: '<NAME>'
}
{
commit: '64a7037'
date: 'Fri Feb 16 15:33:36 2018'
author: '<NAME>'
}
{
commit: '454aef4'
date: 'Fri Feb 16 10:20:27 2018'
author: '<NAME>'
}
{
commit: '5ce33ab'
date: 'Mon Feb 12 13:20:46 2018'
author: '<NAME>'
}
{
commit: '94daa97'
date: 'Mon Feb 12 13:05:12 2018'
author: '<NAME>'
}
{
commit: 'a32809d'
date: 'Mon Feb 12 12:24:54 2018'
author: '<NAME>'
}
{
commit: '0590c8c'
date: 'Thu Feb 1 23:24:17 2018'
author: '<NAME>'
}
{
commit: 'e210fb1'
date: 'Thu Feb 1 23:05:19 2018'
author: '<NAME>'
}
{
commit: '90d9896'
date: 'Thu Feb 1 18:47:35 2018'
author: '<NAME>'
}
{
commit: 'e249eb0'
date: 'Thu Feb 1 15:34:44 2018'
author: '<NAME>'
}
{
commit: '08c3304'
date: 'Thu Feb 1 11:55:58 2018'
author: '<NAME>'
}
{
commit: '2d58487'
date: 'Thu Feb 1 11:55:42 2018'
author: '<NAME>'
}
{
commit: '9f07664'
date: 'Thu Feb 1 09:47:28 2018'
author: '<NAME>'
}
{
commit: '35dd174'
date: 'Thu Feb 1 09:47:15 2018'
author: '<NAME>'
}
{
commit: '8cc82e7'
date: 'Thu Feb 1 09:18:03 2018'
author: '<NAME>'
}
{
commit: 'e1711bd'
date: 'Thu Feb 1 00:08:08 2018'
author: '<NAME>'
}
{
commit: '29e1659'
date: 'Wed Jan 31 23:29:27 2018'
author: '<NAME>'
}
{
commit: '0ff94ee'
date: 'Wed Jan 31 23:19:56 2018'
author: '<NAME>'
}
{
commit: '8b1c6f3'
date: 'Wed Jan 31 22:46:38 2018'
author: '<NAME>'
}
{
commit: '77dbf87'
date: 'Wed Jan 31 22:41:34 2018'
author: '<NAME>'
}
{
commit: '23da2bf'
date: 'Sun Jan 28 01:11:53 2018'
author: '<NAME>'
}
{
commit: '0b25219'
date: 'Fri Jan 26 17:35:28 2018'
author: '<NAME>'
}
{
commit: 'd9d5ef5'
date: 'Fri Jan 26 16:34:42 2018'
author: '<NAME>'
}
{
commit: 'e8c41ca'
date: 'Fri Jan 26 16:34:28 2018'
author: '<NAME>'
}
{
commit: '5d18f8a'
date: 'Fri Jan 26 16:20:39 2018'
author: '<NAME>'
}
{
commit: '8bb17ec'
date: 'Fri Jan 26 16:19:31 2018'
author: '<NAME>'
}
{
commit: '8744c3e'
date: 'Wed Jan 24 12:12:02 2018'
author: '<NAME>'
}
{
commit: 'a2342fc'
date: 'Wed Jan 24 12:11:50 2018'
author: '<NAME>'
}
{
commit: '370f5c8'
date: 'Wed Jan 24 12:11:18 2018'
author: '<NAME>'
}
{
commit: '044f470'
date: 'Wed Jan 24 12:07:05 2018'
author: '<NAME>'
}
{
commit: '8df9369'
date: 'Tue Jan 23 13:39:41 2018'
author: '<NAME>'
}
{
commit: '73e3afb'
date: 'Tue Jan 23 13:26:17 2018'
author: '<NAME>'
}
{
commit: 'b6af600'
date: 'Tue Jan 23 13:23:57 2018'
author: '<NAME>'
}
{
commit: 'f6146fa'
date: 'Mon Jan 22 16:57:11 2018'
author: '<NAME>'
}
{
commit: 'c3e2c64'
date: 'Mon Jan 22 16:56:59 2018'
author: '<NAME>'
}
{
commit: '97d82c5'
date: 'Mon Jan 22 16:16:30 2018'
author: '<NAME>'
}
{
commit: '7803f03'
date: 'Mon Jan 22 13:57:39 2018'
author: '<NAME>'
}
{
commit: 'a15f981'
date: 'Fri Jan 19 17:38:50 2018'
author: '<NAME>'
}
{
commit: '3118f37'
date: 'Fri Jan 19 16:39:35 2018'
author: '<NAME>'
}
{
commit: '5ae3756'
date: 'Fri Jan 19 16:39:24 2018'
author: '<NAME>'
}
{
commit: '3b115bf'
date: 'Fri Jan 19 13:51:22 2018'
author: '<NAME>'
}
{
commit: 'd35606e'
date: 'Fri Jan 19 13:45:08 2018'
author: '<NAME>'
}
{
commit: 'ee99adf'
date: 'Fri Jan 19 13:02:30 2018'
author: '<NAME>'
}
{
commit: '55a881b'
date: 'Fri Jan 19 12:29:14 2018'
author: '<NAME>'
}
{
commit: '875e2ba'
date: 'Fri Jan 19 12:15:26 2018'
author: '<NAME>'
}
{
commit: 'c404ded'
date: 'Fri Jan 19 12:07:22 2018'
author: '<NAME>'
}
{
commit: '37e97cb'
date: 'Thu Jan 18 15:19:33 2018'
author: '<NAME>'
}
{
commit: 'c40be67'
date: 'Thu Jan 18 12:51:14 2018'
author: '<NAME>'
}
{
commit: '6cdc580'
date: 'Thu Jan 18 12:06:37 2018'
author: '<NAME>'
}
{
commit: '60e5d26'
date: 'Thu Jan 18 11:40:23 2018'
author: '<NAME>'
}
{
commit: '6d165dd'
date: 'Thu Jan 18 09:58:52 2018'
author: '<NAME>'
}
{
commit: 'fed1f13'
date: 'Wed Jan 17 16:56:16 2018'
author: '<NAME>'
}
{
commit: 'f89a6b9'
date: 'Wed Jan 17 13:23:55 2018'
author: '<NAME>'
}
{
commit: 'd19ad1e'
date: 'Wed Jan 17 12:27:32 2018'
author: '<NAME>'
}
{
commit: 'cea1e51'
date: 'Wed Jan 17 10:59:18 2018'
author: '<NAME>'
}
{
commit: 'ad45c89'
date: 'Wed Jan 17 10:58:52 2018'
author: '<NAME>'
}
{
commit: '6ffdfe9'
date: 'Wed Jan 17 09:53:15 2018'
author: '<NAME>'
}
{
commit: '10af20c'
date: 'Mon Jan 15 15:16:26 2018'
author: '<NAME>'
}
{
commit: 'd047b30'
date: 'Wed Jan 17 09:21:32 2018'
author: '<NAME>'
}
{
commit: '263476e'
date: 'Wed Jan 17 09:21:13 2018'
author: '<NAME>'
}
{
commit: '75be5e7'
date: 'Tue Jan 16 23:13:22 2018'
author: '<NAME>'
}
{
commit: '43fd2a4'
date: 'Tue Jan 16 23:12:40 2018'
author: '<NAME>'
}
{
commit: 'efbc233'
date: 'Tue Jan 16 17:28:16 2018'
author: '<NAME>'
}
{
commit: 'e80c465'
date: 'Tue Jan 16 14:16:21 2018'
author: '<NAME>'
}
{
commit: '1aec23d'
date: 'Tue Jan 16 14:15:50 2018'
author: '<NAME>'
}
{
commit: 'e3a10c8'
date: 'Tue Jan 16 12:39:11 2018'
author: '<NAME>'
}
{
commit: 'ca62c90'
date: 'Tue Jan 16 11:01:50 2018'
author: '<NAME>'
}
{
commit: '42cc2a3'
date: 'Mon Jan 15 18:26:31 2018'
author: '<NAME>'
}
{
commit: 'a0030c0'
date: 'Mon Jan 15 16:22:43 2018'
author: '<NAME>'
}
{
commit: 'a909132'
date: 'Mon Jan 15 14:27:18 2018'
author: '<NAME>'
}
{
commit: '07ac667'
date: 'Mon Jan 15 14:26:59 2018'
author: '<NAME>'
}
{
commit: '3413e12'
date: 'Mon Jan 15 14:22:27 2018'
author: '<NAME>'
}
{
commit: '00df8de'
date: 'Mon Jan 15 14:22:14 2018'
author: '<NAME>'
}
{
commit: 'd90e148'
date: 'Mon Jan 15 12:39:26 2018'
author: '<NAME>'
}
{
commit: '6fd6c54'
date: 'Mon Jan 15 12:33:39 2018'
author: '<NAME>'
}
{
commit: '939b4df'
date: 'Mon Jan 15 12:21:22 2018'
author: '<NAME>'
}
{
commit: '206fb0e'
date: 'Thu Dec 21 14:46:04 2017'
author: '<NAME>'
}
{
commit: '3d6094a'
date: 'Tue Dec 19 23:07:35 2017'
author: '<NAME>'
}
{
commit: '7173b2d'
date: 'Tue Dec 19 23:07:17 2017'
author: '<NAME>'
}
{
commit: '549b1d0'
date: 'Tue Dec 19 22:22:35 2017'
author: '<NAME>'
}
{
commit: '3575b95'
date: 'Sun Dec 17 21:37:31 2017'
author: '<NAME>'
}
{
commit: '2aa68af'
date: 'Sun Dec 17 21:37:11 2017'
author: '<NAME>'
}
{
commit: '0821535'
date: 'Wed Dec 13 14:10:56 2017'
author: '<NAME>'
}
{
commit: 'a557f66'
date: 'Wed Dec 13 14:10:42 2017'
author: '<NAME>'
}
{
commit: 'a740c2d'
date: 'Tue Dec 12 18:12:26 2017'
author: '<NAME>'
}
{
commit: 'd6fd324'
date: 'Tue Dec 12 17:02:02 2017'
author: '<NAME>'
}
{
commit: '6f33b51'
date: 'Mon Dec 11 21:12:55 2017'
author: '<NAME>'
}
{
commit: 'c93bf72'
date: 'Mon Dec 11 14:18:31 2017'
author: '<NAME>'
}
{
commit: '651439b'
date: 'Fri Dec 8 12:26:06 2017'
author: '<NAME>'
}
{
commit: '5fbb70e'
date: 'Thu Dec 7 15:16:08 2017'
author: '<NAME>'
}
{
commit: '6e31668'
date: 'Thu Dec 7 15:17:53 2017'
author: '<NAME>'
}
{
commit: '58d7faf'
date: 'Fri Nov 17 16:20:40 2017'
author: '<NAME>'
}
{
commit: '021b060'
date: 'Wed Nov 15 23:51:41 2017'
author: '<NAME>'
}
{
commit: 'a206425'
date: 'Wed Nov 15 17:38:06 2017'
author: '<NAME>'
}
{
commit: 'd3ed9ec'
date: 'Wed Nov 15 17:13:43 2017'
author: '<NAME>'
}
{
commit: '4939336'
date: 'Wed Nov 15 13:48:23 2017'
author: '<NAME>'
}
{
commit: '863dc71'
date: 'Wed Nov 15 12:07:10 2017'
author: '<NAME>'
}
{
commit: 'f5a45ee'
date: 'Wed Nov 15 11:34:28 2017'
author: '<NAME>'
}
{
commit: '8d833db'
date: 'Wed Nov 15 10:28:16 2017'
author: '<NAME>'
}
{
commit: '613c07b'
date: 'Wed Nov 15 10:27:50 2017'
author: '<NAME>'
}
{
commit: 'd1ffabc'
date: 'Wed Nov 8 12:40:42 2017'
author: '<NAME>'
}
{
commit: '9dd8b13'
date: 'Wed Nov 8 11:36:59 2017'
author: '<NAME>'
}
{
commit: '76769d6'
date: 'Wed Nov 8 11:11:04 2017'
author: '<NAME>'
}
{
commit: 'b0ada03'
date: 'Wed Nov 8 10:16:47 2017'
author: '<NAME>'
}
{
commit: '87660fa'
date: 'Tue Nov 7 17:13:49 2017'
author: '<NAME>'
}
{
commit: '56878c5'
date: 'Tue Nov 7 14:03:36 2017'
author: '<NAME>'
}
{
commit: 'a4be0ab'
date: 'Tue Nov 7 13:59:00 2017'
author: '<NAME>'
}
{
commit: '7252e66'
date: 'Tue Nov 7 12:28:52 2017'
author: '<NAME>'
}
{
commit: 'a03d1f7'
date: 'Mon Nov 6 16:30:24 2017'
author: '<NAME>'
}
{
commit: 'f622504'
date: 'Sat Nov 4 22:07:07 2017'
author: '<NAME>'
}
{
commit: '7a25c71'
date: 'Sat Nov 4 21:16:05 2017'
author: '<NAME>'
}
{
commit: '2e42846'
date: 'Sat Nov 4 21:03:13 2017'
author: '<NAME>'
}
{
commit: '088eddd'
date: 'Sat Nov 4 20:11:11 2017'
author: '<NAME>'
}
{
commit: 'aa5e733'
date: 'Sat Nov 4 20:02:05 2017'
author: '<NAME>'
}
{
commit: '4a61eae'
date: 'Sat Nov 4 18:13:40 2017'
author: '<NAME>'
}
{
commit: '44d206b'
date: 'Sat Nov 4 17:26:20 2017'
author: '<NAME>'
}
{
commit: '04e95d1'
date: 'Sat Nov 4 15:01:26 2017'
author: '<NAME>'
}
{
commit: '975adc6'
date: 'Sat Nov 4 14:14:30 2017'
author: '<NAME>'
}
{
commit: '0f34f5c'
date: 'Fri Oct 27 15:58:47 2017'
author: '<NAME>'
}
{
commit: '54e9bcb'
date: 'Fri Oct 20 13:51:48 2017'
author: '<NAME>'
}
{
commit: '4af09bc'
date: 'Thu Oct 19 14:19:50 2017'
author: '<NAME>'
}
{
commit: '91579c4'
date: 'Thu Oct 19 14:10:17 2017'
author: '<NAME>'
}
{
commit: 'bac4ce9'
date: 'Mon Oct 9 10:44:22 2017'
author: '<NAME>'
}
{
commit: '0524216'
date: 'Mon Oct 9 09:00:51 2017'
author: '<NAME>'
}
{
commit: 'b274bf2'
date: 'Tue Oct 3 15:07:46 2017'
author: '<NAME>'
}
{
commit: 'e5c0723'
date: 'Mon Oct 2 13:32:43 2017'
author: '<NAME>'
}
{
commit: '2a91ec1'
date: 'Mon Oct 2 11:43:47 2017'
author: '<NAME>'
}
{
commit: 'f057af6'
date: 'Thu Sep 28 15:59:32 2017'
author: '<NAME>'
}
{
commit: '6e1d21b'
date: 'Thu Sep 28 12:48:05 2017'
author: '<NAME>'
}
{
commit: '86e248f'
date: 'Thu Sep 28 12:45:09 2017'
author: '<NAME>'
}
{
commit: 'df8942c'
date: 'Thu Sep 28 12:44:30 2017'
author: '<NAME>'
}
{
commit: '790e32e'
date: 'Thu Sep 28 11:33:03 2017'
author: '<NAME>'
}
{
commit: 'a61783d'
date: 'Tue Sep 19 14:11:22 2017'
author: '<NAME>'
}
{
commit: '266f591'
date: 'Mon Sep 18 17:53:43 2017'
author: '<NAME>'
}
{
commit: 'f14b32f'
date: 'Mon Sep 18 16:28:25 2017'
author: '<NAME>'
}
{
commit: '7f67e44'
date: 'Mon Sep 18 10:59:14 2017'
author: '<NAME>'
}
{
commit: 'a3b0c61'
date: 'Mon Sep 18 10:34:53 2017'
author: '<NAME>'
}
{
commit: 'a32d208'
date: 'Mon Sep 18 10:04:56 2017'
author: '<NAME>'
}
{
commit: '70e3606'
date: 'Wed Sep 13 16:56:00 2017'
author: '<NAME>'
}
{
commit: 'adda6dd'
date: 'Wed Sep 13 16:13:06 2017'
author: '<NAME>'
}
{
commit: '5b6867b'
date: 'Wed Sep 13 11:52:14 2017'
author: '<NAME>'
}
{
commit: '670822c'
date: 'Tue Sep 12 11:47:22 2017'
author: '<NAME>'
}
{
commit: '16ca97e'
date: 'Mon Sep 11 23:25:30 2017'
author: '<NAME>'
}
{
commit: '5745d60'
date: 'Mon Sep 11 12:12:47 2017'
author: '<NAME>'
}
{
commit: '9f0dc99'
date: 'Fri Sep 8 09:17:43 2017'
author: '<NAME>'
}
{
commit: 'e399eba'
date: 'Fri Sep 8 09:17:28 2017'
author: '<NAME>'
}
{
commit: '98a128c'
date: 'Fri Sep 8 09:03:01 2017'
author: '<NAME>'
}
{
commit: '53fd1ee'
date: 'Fri Sep 8 09:02:18 2017'
author: '<NAME>'
}
{
commit: '7845534'
date: 'Fri Sep 8 09:01:52 2017'
author: '<NAME>'
}
{
commit: '9d9f59d'
date: 'Fri Sep 8 08:24:28 2017'
author: '<NAME>'
}
{
commit: '48e7b78'
date: 'Thu Sep 7 18:42:20 2017'
author: '<NAME>'
}
{
commit: '865771b'
date: 'Thu Sep 7 18:28:29 2017'
author: '<NAME>'
}
{
commit: '3ec195f'
date: 'Thu Sep 7 18:27:27 2017'
author: '<NAME>'
}
{
commit: 'd0a2b52'
date: 'Tue Sep 5 14:33:56 2017'
author: '<NAME>'
}
{
commit: 'ac2d0b2'
date: 'Tue Sep 5 14:18:53 2017'
author: '<NAME>'
}
{
commit: '7994d36'
date: 'Tue Sep 5 12:28:17 2017'
author: '<NAME>'
}
{
commit: '242f61d'
date: 'Tue Sep 5 11:50:03 2017'
author: '<NAME>'
}
{
commit: 'fa5ddfa'
date: 'Fri Sep 1 11:21:38 2017'
author: '<NAME>'
}
{
commit: 'e982caf'
date: 'Fri Sep 1 10:21:58 2017'
author: '<NAME>'
}
{
commit: '4fdc139'
date: 'Tue Aug 29 16:07:45 2017'
author: '<NAME>'
}
{
commit: '0c8e7c2'
date: 'Tue Aug 29 15:18:55 2017'
author: '<NAME>'
}
{
commit: 'e0abfdf'
date: 'Tue Aug 29 13:52:24 2017'
author: '<NAME>'
}
{
commit: '76592c1'
date: 'Tue Aug 29 13:49:51 2017'
author: '<NAME>'
}
{
commit: '1d564f1'
date: 'Thu Aug 24 14:12:16 2017'
author: '<NAME>'
}
{
commit: 'd526e51'
date: 'Thu Aug 24 13:02:48 2017'
author: '<NAME>'
}
{
commit: 'a9c2efe'
date: 'Wed Aug 23 13:11:23 2017'
author: '<NAME>'
}
{
commit: 'ee120e5'
date: 'Thu Jul 27 12:16:28 2017'
author: '<NAME>'
}
{
commit: 'b6b792c'
date: 'Thu Jul 27 11:51:19 2017'
author: '<NAME>'
}
{
commit: '25a34cf'
date: 'Thu Jul 27 08:46:01 2017'
author: '<NAME>'
}
{
commit: '6bc3024'
date: 'Thu Jul 27 08:25:49 2017'
author: '<NAME>'
}
{
commit: 'f75a057'
date: 'Mon Jul 24 17:02:14 2017'
author: '<NAME>'
}
{
commit: 'fcf05a7'
date: 'Mon Jul 24 17:01:49 2017'
author: '<NAME>'
}
{
commit: '76ca229'
date: 'Mon Jul 24 17:01:24 2017'
author: '<NAME>'
}
{
commit: 'd231231'
date: 'Mon Jul 24 11:29:59 2017'
author: '<NAME>'
}
{
commit: 'bb2ef14'
date: 'Mon Jul 24 11:13:46 2017'
author: '<NAME>'
}
{
commit: '369b1c8'
date: 'Sat Jul 22 01:12:11 2017'
author: '<NAME>'
}
{
commit: '32df3a8'
date: 'Fri Jul 21 16:11:21 2017'
author: '<NAME>'
}
{
commit: '212874c'
date: 'Fri Jul 21 16:11:08 2017'
author: '<NAME>'
}
{
commit: '72ed711'
date: 'Fri Jul 21 13:33:04 2017'
author: '<NAME>'
}
{
commit: 'b39ecb6'
date: 'Fri Jul 21 13:32:51 2017'
author: '<NAME>'
}
{
commit: '364ccbf'
date: 'Fri Jul 21 12:06:19 2017'
author: '<NAME>'
}
{
commit: '9ab9796'
date: 'Fri Jul 21 10:09:01 2017'
author: '<NAME>'
}
{
commit: '4059d42'
date: 'Fri Jul 21 11:18:30 2017'
author: '<NAME>'
}
{
commit: '60c2b63'
date: 'Fri Jul 21 10:47:00 2017'
author: '<NAME>'
}
{
commit: '37d64cf'
date: 'Fri Jul 21 09:22:45 2017'
author: '<NAME>'
}
{
commit: 'f1e64b9'
date: 'Thu Jul 20 19:59:08 2017'
author: '<NAME>'
}
{
commit: '6c82841'
date: 'Thu Jul 20 19:54:44 2017'
author: '<NAME>'
}
{
commit: '4198d50'
date: 'Thu Jul 20 17:09:14 2017'
author: '<NAME>'
}
{
commit: 'a30cfcb'
date: 'Wed Jul 19 16:32:27 2017'
author: '<NAME>'
}
{
commit: 'eac82f2'
date: 'Wed Jul 19 15:11:59 2017'
author: '<NAME>'
}
{
commit: '9f17f49'
date: 'Wed Jul 19 10:08:58 2017'
author: '<NAME>'
}
{
commit: '0343ada'
date: 'Wed Jul 19 10:08:42 2017'
author: '<NAME>'
}
{
commit: 'eb512c2'
date: 'Mon Jul 17 18:31:05 2017'
author: '<NAME>'
}
{
commit: 'bac9d60'
date: 'Mon Jul 17 18:15:07 2017'
author: '<NAME>'
}
{
commit: '962b105'
date: 'Mon Jul 17 15:18:51 2017'
author: '<NAME>'
}
{
commit: '9fe65a5'
date: 'Fri Jul 14 15:42:40 2017'
author: '<NAME>'
}
{
commit: '32da80b'
date: 'Fri Jul 14 15:42:31 2017'
author: '<NAME>'
}
{
commit: '60674ee'
date: 'Fri Jul 14 13:21:32 2017'
author: '<NAME>'
}
{
commit: '026c816'
date: 'Thu Jul 13 19:30:51 2017'
author: '<NAME>'
}
{
commit: 'da39dc3'
date: 'Thu Jul 13 16:13:36 2017'
author: '<NAME>'
}
{
commit: 'fc6bd36'
date: 'Thu Jul 13 16:03:37 2017'
author: '<NAME>'
}
{
commit: '3c5bbb6'
date: 'Thu Jul 13 15:54:33 2017'
author: '<NAME>'
}
{
commit: 'c7d4cda'
date: 'Thu Jul 13 14:43:16 2017'
author: '<NAME>'
}
{
commit: '6229106'
date: 'Thu Jul 13 14:39:49 2017'
author: '<NAME>'
}
{
commit: '7f807e6'
date: 'Wed Jul 12 14:12:24 2017'
author: '<NAME>'
}
{
commit: '9e9ce72'
date: 'Wed Jul 12 11:43:39 2017'
author: '<NAME>'
}
{
commit: 'b0ae258'
date: 'Wed Jul 12 11:42:46 2017'
author: '<NAME>'
}
{
commit: '0697f93'
date: 'Tue Jul 11 22:22:37 2017'
author: '<NAME>'
}
{
commit: '4ce455d'
date: 'Tue Jul 11 17:41:08 2017'
author: '<NAME>'
}
{
commit: 'b1cb2ca'
date: 'Tue Jul 11 15:57:46 2017'
author: '<NAME>'
}
{
commit: '687d7c2'
date: 'Fri Jul 7 17:05:06 2017'
author: '<NAME>'
}
{
commit: '1cb646c'
date: 'Fri Jul 7 15:38:14 2017'
author: '<NAME>'
}
{
commit: '5317cbc'
date: 'Thu Jul 6 14:30:48 2017'
author: '<NAME>'
}
{
commit: 'e1e8316'
date: 'Thu Jul 6 13:29:28 2017'
author: '<NAME>'
}
{
commit: '6e48e05'
date: 'Thu Jul 6 11:29:24 2017'
author: '<NAME>'
}
{
commit: 'b3764ff'
date: 'Wed Jul 5 10:23:51 2017'
author: '<NAME>'
}
{
commit: '3430da7'
date: 'Fri Jun 30 11:48:52 2017'
author: '<NAME>'
}
{
commit: '635adc9'
date: 'Thu Jun 29 16:25:35 2017'
author: '<NAME>'
}
{
commit: 'dee606e'
date: 'Thu Jun 29 16:21:26 2017'
author: '<NAME>'
}
{
commit: '338f840'
date: 'Thu Jun 29 16:20:31 2017'
author: '<NAME>'
}
{
commit: 'cfa6db3'
date: 'Wed Jun 28 18:18:17 2017'
author: '<NAME>'
}
{
commit: '272bb60'
date: 'Wed Jun 28 12:35:01 2017'
author: '<NAME>'
}
{
commit: 'ae0b628'
date: 'Wed Jun 28 05:26:16 2017'
author: '<NAME>'
}
{
commit: '0d40d08'
date: 'Tue Jun 27 14:06:46 2017'
author: '<NAME>'
}
{
commit: '5b4c3bc'
date: 'Tue Jun 27 13:50:53 2017'
author: '<NAME>'
}
{
commit: '8e73cb1'
date: 'Tue Jun 27 13:46:57 2017'
author: '<NAME>'
}
{
commit: '16e23b2'
date: 'Tue Jun 27 11:29:44 2017'
author: '<NAME>'
}
{
commit: 'a23f50d'
date: 'Tue Jun 20 16:40:23 2017'
author: '<NAME>'
}
{
commit: '7aacaa8'
date: 'Tue Jun 20 16:38:59 2017'
author: '<NAME>'
}
{
commit: '6d4efa6'
date: 'Tue Jun 20 14:37:03 2017'
author: '<NAME>'
}
{
commit: 'f32e84e'
date: 'Tue Jun 13 16:47:36 2017'
author: '<NAME>'
}
{
commit: '9e3aeda'
date: 'Tue Jun 13 16:39:01 2017'
author: '<NAME>'
}
{
commit: '8df882d'
date: 'Tue Jun 13 13:20:08 2017'
author: '<NAME>'
}
{
commit: '34d048e'
date: 'Mon Jun 12 22:36:48 2017'
author: '<NAME>'
}
{
commit: 'e65f583'
date: 'Mon Jun 12 16:57:22 2017'
author: '<NAME>'
}
{
commit: '714bc24'
date: 'Mon Jun 12 16:54:50 2017'
author: '<NAME>'
}
{
commit: 'bfaf154'
date: 'Mon Jun 12 16:33:01 2017'
author: '<NAME>'
}
{
commit: '7cea533'
date: 'Mon Jun 12 16:06:24 2017'
author: '<NAME>'
}
{
commit: 'cc7d5b1'
date: 'Mon Jun 12 13:59:33 2017'
author: '<NAME>'
}
{
commit: '289e98b'
date: 'Mon Jun 12 13:38:01 2017'
author: '<NAME>'
}
{
commit: '3fb7b5d'
date: 'Mon Jun 12 11:56:50 2017'
author: '<NAME>'
}
{
commit: '266546d'
date: 'Mon Jun 12 11:56:19 2017'
author: '<NAME>'
}
{
commit: 'e42c966'
date: 'Wed Jun 28 17:24:09 2017'
author: '<NAME>'
}
{
commit: 'eaca3b9'
date: 'Wed Jun 28 17:23:23 2017'
author: '<NAME>'
}
{
commit: '8c43ee2'
date: 'Wed Jun 28 17:22:09 2017'
author: '<NAME>'
}
{
commit: '4db0275'
date: 'Wed Jun 28 17:21:30 2017'
author: '<NAME>'
}
{
commit: '950b059'
date: 'Sat Jun 24 17:22:04 2017'
author: '<NAME>'
}
{
commit: 'fd7851c'
date: 'Thu Jun 22 13:45:44 2017'
author: '<NAME>'
}
{
commit: 'ec9817d'
date: 'Fri Jun 16 12:22:03 2017'
author: '<NAME>'
}
{
commit: '280d7df'
date: 'Thu Jun 15 11:49:07 2017'
author: '<NAME>'
}
{
commit: '8ca7f3c'
date: 'Thu Jun 15 11:49:00 2017'
author: '<NAME>'
}
{
commit: '76f4f25'
date: 'Thu Apr 27 14:05:59 2017'
author: '<NAME>'
}
{
commit: 'dc0728f'
date: 'Thu Jun 15 10:46:05 2017'
author: '<NAME>'
}
{
commit: '86844fd'
date: 'Thu Jun 15 10:41:46 2017'
author: '<NAME>'
}
{
commit: '0c93b70'
date: 'Thu Jun 15 10:07:14 2017'
author: '<NAME>'
}
{
commit: '582d861'
date: 'Wed Jun 14 17:00:22 2017'
author: '<NAME>'
}
{
commit: '67e7f9e'
date: 'Mon Jun 12 18:10:38 2017'
author: '<NAME>'
}
{
commit: 'e5f8f39'
date: 'Mon Jun 12 17:58:51 2017'
author: '<NAME>'
}
{
commit: '580846c'
date: 'Fri Jun 9 16:58:36 2017'
author: '<NAME>'
}
{
commit: 'c9450ed'
date: 'Fri Jun 9 14:27:35 2017'
author: '<NAME>'
}
{
commit: 'd511624'
date: 'Wed Jun 7 14:25:06 2017'
author: '<NAME>'
}
{
commit: '7b8793e'
date: 'Fri Jun 2 10:59:56 2017'
author: '<NAME>'
}
{
commit: 'f52c855'
date: 'Wed May 31 17:08:25 2017'
author: '<NAME>'
}
{
commit: '0ee68bd'
date: 'Wed May 31 15:27:31 2017'
author: '<NAME>'
}
{
commit: '2c2404c'
date: 'Wed Apr 26 16:55:20 2017'
author: '<NAME>'
}
{
commit: '812dda3'
date: 'Wed Apr 26 14:29:54 2017'
author: '<NAME>'
}
{
commit: 'cc24de1'
date: 'Tue Apr 25 19:10:50 2017'
author: '<NAME>'
}
{
commit: 'd083f4e'
date: 'Tue Apr 25 15:25:50 2017'
author: '<NAME>'
}
{
commit: 'ce38df8'
date: 'Tue Apr 25 15:20:58 2017'
author: '<NAME>'
}
{
commit: '2aedf22'
date: 'Tue Apr 25 15:13:29 2017'
author: '<NAME>'
}
{
commit: 'bce05cc'
date: 'Mon Apr 24 11:36:11 2017'
author: '<NAME>'
}
{
commit: '63333cf'
date: 'Fri Apr 21 11:21:24 2017'
author: '<NAME>'
}
{
commit: 'fc5f1f0'
date: 'Thu Apr 20 17:20:06 2017'
author: '<NAME>'
}
{
commit: '2979f9b'
date: 'Tue Apr 18 13:49:54 2017'
author: '<NAME>'
}
{
commit: '376bc04'
date: 'Sun Apr 16 13:45:28 2017'
author: '<NAME>'
}
{
commit: 'b9956e9'
date: 'Sun Apr 16 13:07:02 2017'
author: '<NAME>'
}
{
commit: '1e0c2ce'
date: 'Wed Apr 12 18:40:52 2017'
author: '<NAME>'
}
{
commit: '17d75f7'
date: 'Thu Mar 30 17:19:19 2017'
author: '<NAME>'
}
{
commit: 'c80ec3b'
date: 'Thu Mar 30 13:07:58 2017'
author: '<NAME>'
}
{
commit: 'a4c59b4'
date: 'Wed Mar 29 22:05:33 2017'
author: '<NAME>'
}
{
commit: 'f506fc0'
date: 'Tue Mar 28 11:30:57 2017'
author: '<NAME>'
}
{
commit: '413c18d'
date: 'Fri Mar 24 14:51:23 2017'
author: '<NAME>'
}
{
commit: '4428af1'
date: 'Fri Mar 24 10:54:57 2017'
author: '<NAME>'
}
{
commit: '095e0ba'
date: 'Fri Mar 24 08:50:42 2017'
author: '<NAME>'
}
{
commit: '7ab0a55'
date: 'Fri Mar 24 08:50:04 2017'
author: '<NAME>'
}
{
commit: 'f71bb1c'
date: 'Thu Mar 23 17:22:20 2017'
author: '<NAME>'
}
{
commit: 'f989af9'
date: 'Thu Mar 23 14:40:07 2017'
author: '<NAME>'
}
{
commit: 'e5a244c'
date: 'Thu Mar 23 14:23:34 2017'
author: '<NAME>'
}
{
commit: 'ee976ab'
date: 'Thu Mar 23 14:23:12 2017'
author: '<NAME>'
}
{
commit: 'cd53f5d'
date: 'Thu Mar 23 14:22:47 2017'
author: '<NAME>'
}
{
commit: 'ed66ccb'
date: 'Thu Mar 23 14:18:57 2017'
author: '<NAME>'
}
{
commit: '1f2524b'
date: 'Wed Mar 22 20:25:18 2017'
author: '<NAME>'
}
{
commit: '6ddff8f'
date: 'Wed Mar 22 20:25:01 2017'
author: '<NAME>'
}
{
commit: '570aa20'
date: 'Wed Mar 22 17:11:12 2017'
author: '<NAME>'
}
{
commit: '1c7d70d'
date: 'Wed Mar 22 17:10:41 2017'
author: '<NAME>'
}
{
commit: 'af1246a'
date: 'Wed Mar 22 15:45:49 2017'
author: '<NAME>'
}
{
commit: 'cdfc21c'
date: 'Mon Mar 20 15:45:47 2017'
author: '<NAME>'
}
{
commit: '6e1ae6a'
date: 'Mon Mar 20 15:45:33 2017'
author: '<NAME>'
}
{
commit: '8b3d89a'
date: 'Mon Mar 20 11:48:40 2017'
author: '<NAME>'
}
{
commit: 'd778b25'
date: 'Sat Mar 11 11:34:09 2017'
author: '<NAME>'
}
{
commit: '4c68120'
date: 'Sat Mar 11 11:26:52 2017'
author: '<NAME>'
}
{
commit: '8774421'
date: 'Fri Mar 10 17:06:32 2017'
author: '<NAME>'
}
{
commit: '9bc528c'
date: 'Fri Mar 10 16:36:18 2017'
author: '<NAME>'
}
{
commit: '475253a'
date: 'Fri Mar 10 15:59:50 2017'
author: '<NAME>'
}
{
commit: 'fc55495'
date: 'Thu Mar 9 18:11:30 2017'
author: '<NAME>'
}
{
commit: '31c72c4'
date: 'Thu Mar 9 17:22:06 2017'
author: '<NAME>'
}
{
commit: '4c3321f'
date: 'Thu Mar 9 17:06:42 2017'
author: '<NAME>'
}
{
commit: '3ee7a06'
date: 'Thu Mar 9 17:05:09 2017'
author: '<NAME>'
}
{
commit: '6281e9b'
date: 'Wed Mar 8 14:22:13 2017'
author: '<NAME>'
}
{
commit: '2cad79b'
date: 'Wed Mar 8 10:34:50 2017'
author: '<NAME>'
}
{
commit: '44cef0d'
date: 'Tue Mar 7 16:33:52 2017'
author: '<NAME>'
}
{
commit: 'd247967'
date: 'Tue Mar 7 16:29:17 2017'
author: '<NAME>'
}
{
commit: 'e6fb645'
date: 'Tue Mar 7 16:25:03 2017'
author: '<NAME>'
}
{
commit: 'b9be679'
date: 'Tue Mar 7 16:17:33 2017'
author: '<NAME>'
}
{
commit: '3778d6e'
date: 'Tue Mar 7 15:57:12 2017'
author: '<NAME>'
}
{
commit: '4408f89'
date: 'Tue Mar 7 15:45:10 2017'
author: '<NAME>'
}
{
commit: 'db7aea1'
date: 'Tue Mar 7 00:06:02 2017'
author: '<NAME>'
}
{
commit: '9223f5c'
date: 'Fri Mar 3 15:50:44 2017'
author: '<NAME>'
}
{
commit: '6d64ea9'
date: 'Thu Mar 2 13:55:25 2017'
author: '<NAME>'
}
{
commit: '82f1159'
date: 'Thu Mar 2 11:05:00 2017'
author: '<NAME>'
}
{
commit: 'b9bddcb'
date: 'Tue Feb 28 12:29:54 2017'
author: '<NAME>'
}
{
commit: '6d899a4'
date: 'Tue Feb 28 12:29:28 2017'
author: '<NAME>'
}
{
commit: '73f35e5'
date: 'Tue Feb 28 12:28:48 2017'
author: '<NAME>'
}
{
commit: '66eb6d4'
date: 'Wed Feb 22 12:08:13 2017'
author: '<NAME>'
}
{
commit: '8db1dc8'
date: 'Fri Feb 17 11:51:38 2017'
author: '<NAME>'
}
{
commit: '9d08997'
date: 'Fri Feb 17 11:51:24 2017'
author: '<NAME>'
}
{
commit: '6d8bc7e'
date: 'Wed Feb 15 16:49:27 2017'
author: '<NAME>'
}
{
commit: 'eb38604'
date: 'Wed Feb 15 16:34:07 2017'
author: '<NAME>'
}
{
commit: 'e1ddd6f'
date: 'Wed Feb 15 16:15:06 2017'
author: '<NAME>'
}
{
commit: '31c303b'
date: 'Wed Feb 15 16:13:03 2017'
author: '<NAME>'
}
{
commit: '2f2e5d3'
date: 'Wed Feb 15 16:11:42 2017'
author: '<NAME>'
}
{
commit: '0b17a69'
date: 'Mon Feb 13 16:49:01 2017'
author: '<NAME>'
}
{
commit: '5814544'
date: 'Sat Feb 11 00:20:04 2017'
author: '<NAME>'
}
{
commit: 'd50c3f8'
date: 'Fri Feb 10 23:41:42 2017'
author: '<NAME>'
}
{
commit: 'ad9fa7e'
date: 'Tue Jan 24 10:03:01 2017'
author: '<NAME>'
}
{
commit: 'f4db660'
date: 'Thu Jan 19 12:02:23 2017'
author: '<NAME>'
}
{
commit: 'f3a1c68'
date: 'Mon Jan 2 10:58:53 2017'
author: '<NAME>'
}
{
commit: '3821f65'
date: 'Fri Dec 23 14:21:05 2016'
author: '<NAME>'
}
{
commit: '386a97a'
date: 'Fri Dec 23 13:31:45 2016'
author: '<NAME>'
}
{
commit: '97a28ce'
date: 'Thu Dec 15 16:16:05 2016'
author: '<NAME>'
}
{
commit: '876ed67'
date: 'Thu Dec 15 16:10:08 2016'
author: '<NAME>'
}
{
commit: 'ecb4b1c'
date: 'Thu Dec 15 16:09:29 2016'
author: '<NAME>'
}
{
commit: 'cc7f6c8'
date: 'Thu Dec 15 16:08:58 2016'
author: '<NAME>'
}
{
commit: 'd8c976d'
date: 'Thu Dec 15 13:51:08 2016'
author: '<NAME>'
}
{
commit: '8efe84b'
date: 'Thu Dec 8 13:14:41 2016'
author: '<NAME>'
}
{
commit: '957efc8'
date: 'Thu Dec 8 12:19:06 2016'
author: '<NAME>'
}
{
commit: 'e9d7ba1'
date: 'Thu Dec 1 17:06:50 2016'
author: '<NAME>'
}
{
commit: '239131b'
date: 'Thu Dec 1 17:04:12 2016'
author: '<NAME>'
}
{
commit: 'e10cdbb'
date: 'Thu Dec 1 15:01:55 2016'
author: '<NAME>'
}
{
commit: 'f1ee215'
date: 'Thu Dec 1 14:18:25 2016'
author: '<NAME>'
}
{
commit: '73363ec'
date: 'Thu Dec 1 14:15:10 2016'
author: '<NAME>'
}
{
commit: '30bfb04'
date: 'Thu Dec 1 14:08:20 2016'
author: '<NAME>'
}
{
commit: '49f4ce1'
date: 'Thu Dec 1 12:30:26 2016'
author: '<NAME>'
}
{
commit: 'c97d630'
date: 'Thu Dec 1 11:54:35 2016'
author: '<NAME>'
}
{
commit: '367de0b'
date: 'Thu Dec 1 11:16:00 2016'
author: '<NAME>'
}
{
commit: 'a65ffb2'
date: 'Wed Nov 30 23:44:26 2016'
author: '<NAME>'
}
{
commit: '13e72ba'
date: 'Wed Nov 30 23:29:15 2016'
author: '<NAME>'
}
{
commit: '0894861'
date: 'Wed Nov 30 17:26:06 2016'
author: '<NAME>'
}
{
commit: '93705be'
date: 'Wed Nov 30 16:02:50 2016'
author: '<NAME>'
}
{
commit: '8276a2f'
date: 'Wed Nov 30 15:27:04 2016'
author: '<NAME>'
}
{
commit: '0de5c81'
date: 'Wed Nov 30 15:26:28 2016'
author: '<NAME>'
}
{
commit: 'f7330d4'
date: 'Wed Nov 30 15:21:09 2016'
author: '<NAME>'
}
{
commit: '5b5fa2f'
date: 'Wed Nov 30 14:30:43 2016'
author: '<NAME>'
}
{
commit: '64e9597'
date: 'Mon Nov 28 15:32:52 2016'
author: '<NAME>'
}
{
commit: '9eeeb09'
date: 'Fri Nov 25 10:53:02 2016'
author: '<NAME>'
}
{
commit: '13d5193'
date: 'Thu Nov 24 12:40:29 2016'
author: '<NAME>'
}
{
commit: '1b11b42'
date: 'Thu Nov 24 17:56:59 2016'
author: '<NAME>'
}
{
commit: '2ddfe04'
date: 'Thu Nov 24 17:56:44 2016'
author: '<NAME>'
}
{
commit: '31b9b31'
date: 'Wed Nov 23 18:17:59 2016'
author: '<NAME>'
}
{
commit: 'b6fbac8'
date: 'Wed Nov 23 17:27:11 2016'
author: '<NAME>'
}
{
commit: '50b9e61'
date: 'Wed Nov 23 17:26:43 2016'
author: '<NAME>'
}
{
commit: 'f2d6d77'
date: 'Tue Nov 22 20:46:07 2016'
author: '<NAME>'
}
{
commit: '26d9511'
date: 'Tue Nov 22 17:59:10 2016'
author: '<NAME>'
}
{
commit: '9d8b135'
date: 'Tue Nov 22 09:50:16 2016'
author: '<NAME>'
}
{
commit: 'f8391b4'
date: 'Tue Nov 22 09:49:25 2016'
author: '<NAME>'
}
{
commit: '6c4df14'
date: 'Tue Nov 22 09:47:23 2016'
author: '<NAME>'
}
{
commit: '1cb21a3'
date: 'Wed Nov 16 11:33:54 2016'
author: '<NAME>'
}
{
commit: '9d86e5d'
date: 'Tue Nov 8 16:09:43 2016'
author: '<NAME>'
}
{
commit: '416640c'
date: 'Tue Nov 8 16:03:28 2016'
author: '<NAME>'
}
{
commit: '5901216'
date: 'Tue Nov 8 15:21:59 2016'
author: '<NAME>'
}
{
commit: '56a6d73'
date: 'Tue Nov 8 15:21:52 2016'
author: '<NAME>'
}
{
commit: '7de77b6'
date: 'Tue Nov 8 15:21:16 2016'
author: '<NAME>'
}
{
commit: 'ebdd729'
date: 'Mon Nov 7 17:25:16 2016'
author: '<NAME>'
}
{
commit: '8284723'
date: 'Mon Nov 7 11:59:38 2016'
author: '<NAME>'
}
{
commit: '95260da'
date: 'Mon Nov 7 09:37:25 2016'
author: '<NAME>'
}
{
commit: 'cfadfd6'
date: 'Fri Nov 4 14:43:14 2016'
author: '<NAME>'
}
{
commit: 'a7c60c6'
date: 'Fri Nov 4 14:42:04 2016'
author: '<NAME>'
}
{
commit: '8477984'
date: 'Thu Nov 3 15:12:32 2016'
author: '<NAME>'
}
{
commit: 'a4e959e'
date: 'Wed Nov 2 11:00:50 2016'
author: '<NAME>'
}
{
commit: '92c9d75'
date: 'Wed Nov 2 10:44:40 2016'
author: '<NAME>'
}
{
commit: '64cf695'
date: 'Wed Nov 2 10:41:29 2016'
author: '<NAME>'
}
{
commit: '7c96f4e'
date: 'Wed Nov 2 10:31:56 2016'
author: '<NAME>'
}
{
commit: '14420bf'
date: 'Wed Nov 2 09:26:50 2016'
author: '<NAME>'
}
{
commit: '03eaa69'
date: 'Wed Nov 2 09:23:14 2016'
author: '<NAME>'
}
{
commit: '3fcd7c7'
date: 'Wed Nov 2 09:19:29 2016'
author: '<NAME>'
}
{
commit: '248073d'
date: 'Thu Oct 27 18:13:57 2016'
author: '<NAME>'
}
{
commit: 'ff34779'
date: 'Thu Oct 27 18:13:47 2016'
author: '<NAME>'
}
{
commit: 'bdd0656'
date: 'Thu Oct 27 18:13:27 2016'
author: '<NAME>'
}
{
commit: '8b67479'
date: 'Wed Oct 19 12:57:18 2016'
author: '<NAME>'
}
{
commit: 'fb240f0'
date: 'Mon Oct 17 15:22:34 2016'
author: '<NAME>'
}
{
commit: '578bb81'
date: 'Mon Oct 17 15:22:10 2016'
author: '<NAME>'
}
{
commit: '7638378'
date: 'Mon Oct 17 14:38:04 2016'
author: '<NAME>'
}
{
commit: '4cef118'
date: 'Mon Oct 17 14:27:53 2016'
author: '<NAME>'
}
{
commit: 'ae20dc7'
date: 'Wed Oct 12 18:49:38 2016'
author: '<NAME>'
}
{
commit: '13eae19'
date: 'Wed Oct 12 00:27:48 2016'
author: '<NAME>'
}
{
commit: '8753f2a'
date: 'Tue Oct 11 15:44:48 2016'
author: '<NAME>'
}
{
commit: 'a8d00eb'
date: 'Tue Oct 11 15:39:34 2016'
author: '<NAME>'
}
{
commit: '61c5401'
date: 'Mon Oct 10 13:25:06 2016'
author: '<NAME>'
}
{
commit: '81ca90f'
date: 'Mon Oct 10 12:15:10 2016'
author: '<NAME>'
}
{
commit: '27d0b00'
date: 'Mon Oct 10 12:12:21 2016'
author: '<NAME>'
}
{
commit: '641be2d'
date: 'Mon Oct 10 11:05:26 2016'
author: '<NAME>'
}
{
commit: '8027175'
date: 'Mon Oct 10 11:02:26 2016'
author: '<NAME>'
}
{
commit: '6384ea9'
date: 'Sat Oct 8 14:57:29 2016'
author: '<NAME>'
}
{
commit: '4af7e9a'
date: 'Fri Oct 7 17:40:29 2016'
author: '<NAME>'
}
{
commit: '35513f1'
date: 'Fri Oct 7 17:40:05 2016'
author: '<NAME>'
}
{
commit: 'a9e75d4'
date: 'Fri Oct 7 17:39:21 2016'
author: '<NAME>'
}
{
commit: '705546c'
date: 'Fri Oct 7 17:38:59 2016'
author: '<NAME>'
}
{
commit: '24d1b38'
date: 'Fri Oct 7 17:37:58 2016'
author: '<NAME>'
}
{
commit: '9f8be26'
date: 'Fri Oct 7 16:15:02 2016'
author: '<NAME>'
}
{
commit: 'bed29aa'
date: 'Thu Oct 6 22:10:42 2016'
author: '<NAME>'
}
{
commit: 'f2f00ea'
date: 'Thu Oct 6 22:07:40 2016'
author: '<NAME>'
}
{
commit: '137767c'
date: 'Thu Oct 6 19:07:52 2016'
author: '<NAME>'
}
{
commit: '16a1777'
date: 'Thu Oct 6 18:35:27 2016'
author: '<NAME>'
}
{
commit: 'e5584ac'
date: 'Thu Oct 6 15:10:08 2016'
author: '<NAME>'
}
{
commit: '554908c'
date: 'Thu Oct 6 14:24:09 2016'
author: '<NAME>'
}
{
commit: '6e5b932'
date: 'Thu Oct 6 08:24:02 2016'
author: '<NAME>'
}
{
commit: '1f18a6c'
date: 'Wed Oct 5 08:54:55 2016'
author: '<NAME>'
}
{
commit: '2a7ad6c'
date: 'Tue Oct 4 14:26:23 2016'
author: '<NAME>'
}
{
commit: 'ea73dcd'
date: 'Tue Oct 4 00:37:10 2016'
author: '<NAME>'
}
{
commit: 'a9c5455'
date: 'Tue Oct 4 00:34:19 2016'
author: '<NAME>'
}
{
commit: '32000eb'
date: 'Mon Oct 3 17:01:07 2016'
author: '<NAME>'
}
{
commit: 'c8f66f3'
date: 'Mon Oct 3 14:29:59 2016'
author: '<NAME>'
}
{
commit: '8695f71'
date: 'Mon Oct 3 13:46:11 2016'
author: '<NAME>'
}
{
commit: '69cc7a6'
date: 'Fri Sep 30 10:49:08 2016'
author: '<NAME>'
}
{
commit: '26585ca'
date: 'Fri Sep 30 10:42:41 2016'
author: '<NAME>'
}
{
commit: '5e2efb4'
date: 'Fri Sep 30 10:01:13 2016'
author: '<NAME>'
}
{
commit: '47bbd9c'
date: 'Fri Sep 30 09:03:00 2016'
author: '<NAME>'
}
{
commit: 'db56e33'
date: 'Fri Sep 30 09:02:26 2016'
author: '<NAME>'
}
{
commit: '7e06984'
date: 'Fri Sep 30 09:02:10 2016'
author: '<NAME>'
}
{
commit: 'f027d89'
date: 'Fri Sep 30 09:02:00 2016'
author: '<NAME>'
}
{
commit: '28ebf17'
date: 'Fri Sep 30 09:01:21 2016'
author: '<NAME>'
}
{
commit: '2f6e520'
date: 'Thu Sep 29 22:23:55 2016'
author: '<NAME>'
}
{
commit: 'e339750'
date: 'Thu Sep 29 16:27:48 2016'
author: '<NAME>'
}
{
commit: '9b99341'
date: 'Thu Sep 29 14:31:37 2016'
author: '<NAME>'
}
{
commit: 'ae6d992'
date: 'Thu Sep 29 13:55:24 2016'
author: '<NAME>'
}
{
commit: 'b8fbf27'
date: 'Wed Sep 28 23:06:18 2016'
author: '<NAME>'
}
{
commit: 'aefab7c'
date: 'Thu Sep 22 09:41:28 2016'
author: '<NAME>'
}
{
commit: '3bf7734'
date: 'Thu Sep 22 09:40:41 2016'
author: '<NAME>'
}
{
commit: '2b07047'
date: 'Thu Sep 22 09:40:22 2016'
author: '<NAME>'
}
{
commit: '348fe2a'
date: 'Wed Sep 21 17:37:57 2016'
author: '<NAME>'
}
{
commit: '6c8224f'
date: 'Wed Sep 28 22:38:56 2016'
author: '<NAME>'
}
{
commit: '37245c3'
date: 'Wed Sep 28 11:11:47 2016'
author: '<NAME>'
}
{
commit: '6c5600f'
date: 'Wed Sep 28 00:19:56 2016'
author: '<NAME>'
}
{
commit: '038d633'
date: 'Wed Sep 28 00:19:34 2016'
author: '<NAME>'
}
{
commit: '062f912'
date: 'Tue Sep 27 16:06:07 2016'
author: '<NAME>'
}
{
commit: 'd254681'
date: 'Tue Sep 27 13:47:34 2016'
author: '<NAME>'
}
{
commit: '3220f92'
date: 'Tue Sep 27 10:38:53 2016'
author: '<NAME>'
}
{
commit: 'ec784c9'
date: 'Mon Sep 26 21:53:34 2016'
author: '<NAME>'
}
{
commit: '4b9442d'
date: 'Mon Sep 26 15:47:46 2016'
author: '<NAME>'
}
{
commit: '1955a99'
date: 'Mon Sep 26 15:44:12 2016'
author: '<NAME>'
}
{
commit: '20ae075'
date: 'Mon Sep 26 15:00:35 2016'
author: '<NAME>'
}
{
commit: '48cfcef'
date: 'Mon Sep 26 14:39:17 2016'
author: '<NAME>'
}
{
commit: '9b6ae19'
date: 'Mon Sep 26 14:36:08 2016'
author: '<NAME>'
}
{
commit: '03675e7'
date: 'Mon Sep 26 14:31:37 2016'
author: '<NAME>'
}
{
commit: '3a113df'
date: 'Mon Sep 26 09:57:37 2016'
author: '<NAME>'
}
{
commit: '99f0882'
date: 'Wed Sep 21 13:49:35 2016'
author: '<NAME>'
}
{
commit: '8d9f4ea'
date: 'Wed Sep 21 13:27:27 2016'
author: '<NAME>'
}
{
commit: '5b3372d'
date: 'Fri Sep 16 17:46:34 2016'
author: '<NAME>'
}
{
commit: '23a313a'
date: 'Thu Sep 15 15:36:54 2016'
author: '<NAME>'
}
{
commit: 'cd057d1'
date: 'Thu Sep 15 13:45:18 2016'
author: '<NAME>'
}
{
commit: 'aedf25d'
date: 'Thu Sep 15 13:26:52 2016'
author: '<NAME>'
}
{
commit: 'cd5cbfa'
date: 'Thu Sep 15 11:05:32 2016'
author: '<NAME>'
}
{
commit: '0970fe9'
date: 'Thu Sep 15 11:03:46 2016'
author: '<NAME>'
}
{
commit: '0b599de'
date: 'Thu Sep 15 11:03:21 2016'
author: '<NAME>'
}
{
commit: '15f8ce3'
date: 'Thu Sep 15 11:01:38 2016'
author: '<NAME>'
}
{
commit: '7e30a0f'
date: 'Thu Sep 15 10:28:33 2016'
author: '<NAME>'
}
{
commit: '6c644c5'
date: 'Thu Sep 15 10:28:05 2016'
author: '<NAME>'
}
{
commit: 'eac0aa8'
date: 'Wed Sep 14 17:25:19 2016'
author: '<NAME>'
}
{
commit: '32ac81b'
date: 'Wed Sep 14 17:25:10 2016'
author: '<NAME>'
}
{
commit: '8d510b0'
date: 'Wed Sep 14 17:24:55 2016'
author: '<NAME>'
}
{
commit: 'cc93537'
date: 'Wed Sep 14 17:24:40 2016'
author: '<NAME>'
}
{
commit: 'ec88f94'
date: 'Wed Sep 14 17:23:21 2016'
author: '<NAME>'
}
{
commit: '8c0992e'
date: 'Wed Sep 14 14:32:08 2016'
author: '<NAME>'
}
{
commit: '07040f2'
date: 'Wed Sep 14 14:17:50 2016'
author: '<NAME>'
}
{
commit: '40b20be'
date: 'Wed Sep 14 14:17:26 2016'
author: '<NAME>'
}
{
commit: '6c3e0ca'
date: 'Wed Sep 14 14:17:11 2016'
author: '<NAME>'
}
{
commit: 'd80347c'
date: 'Wed Sep 14 13:46:14 2016'
author: '<NAME>'
}
{
commit: 'cbd8339'
date: 'Wed Sep 14 13:25:14 2016'
author: '<NAME>'
}
{
commit: '38a56ff'
date: 'Wed Sep 14 10:05:42 2016'
author: '<NAME>'
}
{
commit: 'fab4a97'
date: 'Wed Sep 14 08:58:59 2016'
author: '<NAME>'
}
{
commit: '4500db2'
date: 'Tue Sep 13 21:08:17 2016'
author: '<NAME>'
}
{
commit: 'f79eafd'
date: 'Tue Sep 13 21:08:01 2016'
author: '<NAME>'
}
{
commit: '86c4fa9'
date: 'Tue Sep 13 18:18:10 2016'
author: '<NAME>'
}
{
commit: '209fec6'
date: 'Tue Sep 13 18:15:52 2016'
author: '<NAME>'
}
{
commit: '44253ef'
date: 'Tue Sep 13 18:15:20 2016'
author: '<NAME>'
}
{
commit: 'b94edd2'
date: 'Mon Sep 12 12:30:43 2016'
author: '<NAME>'
}
{
commit: '81120f0'
date: 'Mon Sep 12 12:30:02 2016'
author: '<NAME>'
}
{
commit: '5f9fd64'
date: 'Mon Sep 12 12:29:49 2016'
author: '<NAME>'
}
{
commit: '9e65920'
date: 'Mon Sep 12 12:29:05 2016'
author: '<NAME>'
}
{
commit: 'f777e28'
date: 'Mon Sep 12 11:43:41 2016'
author: '<NAME>'
}
{
commit: 'cb4555c'
date: 'Wed Aug 17 14:50:02 2016'
author: '<NAME>'
}
{
commit: 'afbff06'
date: 'Wed Aug 17 14:49:40 2016'
author: '<NAME>'
}
{
commit: 'f8e4c6b'
date: 'Tue Aug 16 11:15:20 2016'
author: '<NAME>'
}
{
commit: '080d69b'
date: 'Tue Aug 16 10:59:53 2016'
author: '<NAME>'
}
{
commit: '24a3a06'
date: 'Tue Aug 16 10:59:35 2016'
author: '<NAME>'
}
{
commit: 'ed0057e'
date: 'Tue Aug 16 10:53:08 2016'
author: '<NAME>'
}
{
commit: 'd5a82c2'
date: 'Tue Aug 16 10:52:50 2016'
author: '<NAME>'
}
{
commit: 'f9d6ef3'
date: 'Mon Aug 1 21:31:27 2016'
author: '<NAME>'
}
{
commit: 'e6df5eb'
date: 'Mon Aug 1 21:30:55 2016'
author: '<NAME>'
}
{
commit: '4936620'
date: 'Sun Jul 31 17:55:49 2016'
author: '<NAME>'
}
{
commit: 'c8c0361'
date: 'Sun Jul 31 17:55:17 2016'
author: '<NAME>'
}
{
commit: 'be9f7c9'
date: 'Sun Jul 31 17:54:25 2016'
author: '<NAME>'
}
{
commit: '5c3b369'
date: 'Sun Jul 31 17:54:01 2016'
author: '<NAME>'
}
{
commit: '7e36874'
date: 'Sun Jul 31 17:53:32 2016'
author: '<NAME>'
}
{
commit: '4511a7a'
date: 'Tue Jul 26 17:25:45 2016'
author: '<NAME>'
}
{
commit: '68fc9ba'
date: 'Tue Jul 26 17:14:40 2016'
author: '<NAME>'
}
{
commit: '7179178'
date: 'Tue Jul 26 17:14:23 2016'
author: '<NAME>'
}
{
commit: '3d868d0'
date: 'Tue Jul 26 16:55:03 2016'
author: '<NAME>'
}
{
commit: '7b96385'
date: 'Tue Jul 26 16:22:22 2016'
author: '<NAME>'
}
{
commit: 'dd49a2e'
date: 'Tue Jul 26 16:22:05 2016'
author: '<NAME>'
}
{
commit: '16a3107'
date: 'Tue Jul 26 15:58:39 2016'
author: '<NAME>'
}
{
commit: 'd74291b'
date: 'Tue Jul 26 15:20:10 2016'
author: '<NAME>'
}
{
commit: '5ee2864'
date: 'Tue Jul 26 15:20:02 2016'
author: '<NAME>'
}
{
commit: '862b33b'
date: 'Tue Jul 26 15:19:47 2016'
author: '<NAME>'
}
{
commit: 'b29eddc'
date: 'Tue Jul 26 15:19:34 2016'
author: '<NAME>'
}
{
commit: '9cf7b44'
date: 'Tue Jul 26 15:18:42 2016'
author: '<NAME>'
}
{
commit: '1cb9455'
date: 'Tue Jul 26 15:18:15 2016'
author: '<NAME>'
}
{
commit: '2922fd8'
date: 'Tue Jul 26 15:18:09 2016'
author: '<NAME>'
}
{
commit: '358a3f5'
date: 'Tue Jul 26 15:17:59 2016'
author: '<NAME>'
}
{
commit: '1fc72b8'
date: 'Tue Jul 26 15:17:40 2016'
author: '<NAME>'
}
{
commit: 'b0b408e'
date: 'Tue Jul 26 15:17:25 2016'
author: '<NAME>'
}
{
commit: '5b8ca53'
date: 'Tue Jul 26 15:16:45 2016'
author: '<NAME>'
}
{
commit: '6c25ab8'
date: 'Tue Jul 26 15:16:05 2016'
author: '<NAME>'
}
{
commit: '2d8e0d3'
date: 'Tue Jul 26 15:15:36 2016'
author: '<NAME>'
}
{
commit: 'a96fa22'
date: 'Fri Jul 22 16:44:29 2016'
author: '<NAME>'
}
{
commit: '44b09ae'
date: 'Fri Jul 22 16:44:21 2016'
author: '<NAME>'
}
{
commit: '66801fc'
date: 'Fri Jul 22 16:40:53 2016'
author: '<NAME>'
}
{
commit: '1047b37'
date: 'Fri Jul 22 16:36:41 2016'
author: '<NAME>'
}
{
commit: '3249596'
date: 'Fri Jul 22 16:36:28 2016'
author: '<NAME>'
}
{
commit: '16ed3bd'
date: 'Fri Jul 22 16:35:31 2016'
author: '<NAME>'
}
{
commit: '7835f82'
date: 'Fri Jul 22 16:27:50 2016'
author: '<NAME>'
}
{
commit: '6d8b172'
date: 'Fri Jul 22 16:26:06 2016'
author: '<NAME>'
}
{
commit: 'e9a76d2'
date: 'Fri Jul 22 16:25:23 2016'
author: '<NAME>'
}
{
commit: '4864241'
date: 'Fri Jul 22 16:25:00 2016'
author: '<NAME>'
}
{
commit: 'a7367a7'
date: 'Wed Sep 14 14:53:12 2016'
author: '<NAME>'
}
{
commit: 'dc920bd'
date: 'Wed Sep 14 09:05:59 2016'
author: '<NAME>'
}
{
commit: 'cd149e4'
date: 'Fri Sep 9 14:04:20 2016'
author: '<NAME>'
}
{
commit: 'e96125a'
date: 'Fri Sep 9 11:16:33 2016'
author: '<NAME>'
}
{
commit: 'cfa63d5'
date: 'Fri Sep 9 11:08:56 2016'
author: '<NAME>'
}
{
commit: 'ab13543'
date: 'Wed Sep 7 13:48:43 2016'
author: '<NAME>'
}
{
commit: '2de07a1'
date: 'Wed Sep 7 13:46:37 2016'
author: '<NAME>'
}
{
commit: 'a5e37a1'
date: 'Tue Sep 6 11:41:24 2016'
author: '<NAME>'
}
{
commit: 'ef8aefc'
date: 'Mon Sep 5 18:36:03 2016'
author: '<NAME>'
}
{
commit: 'ebf58d2'
date: 'Mon Sep 5 18:31:11 2016'
author: '<NAME>'
}
{
commit: '11296da'
date: 'Mon Sep 5 12:06:10 2016'
author: '<NAME>'
}
{
commit: '94ccc04'
date: 'Sat Sep 3 12:53:19 2016'
author: '<NAME>'
}
{
commit: 'f183c8c'
date: 'Fri Sep 2 15:22:31 2016'
author: '<NAME>'
}
{
commit: '6f3fa61'
date: 'Fri Sep 2 15:17:28 2016'
author: '<NAME>'
}
{
commit: '0d422ff'
date: 'Fri Sep 2 10:41:30 2016'
author: '<NAME>'
}
{
commit: 'd5c63b3'
date: 'Tue Aug 30 16:58:42 2016'
author: '<NAME>'
}
{
commit: 'dca9008'
date: 'Tue Aug 30 07:43:42 2016'
author: '<NAME>'
}
{
commit: 'bfb8543'
date: 'Mon Aug 29 10:50:39 2016'
author: '<NAME>'
}
{
commit: '5093eed'
date: 'Mon Aug 29 10:20:13 2016'
author: '<NAME>'
}
{
commit: 'f87b3ab'
date: 'Thu Aug 25 15:20:04 2016'
author: '<NAME>'
}
{
commit: '7687398'
date: 'Wed Aug 24 16:19:47 2016'
author: '<NAME>'
}
{
commit: '8488d33'
date: 'Tue Aug 23 10:46:07 2016'
author: '<NAME>'
}
{
commit: 'a33d1be'
date: 'Tue Aug 23 10:45:28 2016'
author: '<NAME> <NAME>'
}
{
commit: '852cfe5'
date: 'Mon Aug 15 09:29:56 2016'
author: '<NAME>'
}
{
commit: '694b407'
date: 'Fri Aug 12 15:07:39 2016'
author: '<NAME>'
}
{
commit: '83a0477'
date: 'Fri Aug 12 14:30:06 2016'
author: '<NAME>'
}
{
commit: 'b5b4642'
date: 'Fri Aug 12 14:29:31 2016'
author: '<NAME>'
}
{
commit: '7d48128'
date: 'Thu Aug 4 17:38:06 2016'
author: '<NAME>'
}
{
commit: '7f842d9'
date: 'Thu Aug 4 16:25:17 2016'
author: '<NAME>'
}
{
commit: '2e96fb6'
date: 'Thu Aug 4 15:02:54 2016'
author: '<NAME>'
}
{
commit: '778a01b'
date: 'Thu Aug 4 14:39:51 2016'
author: '<NAME>'
}
{
commit: 'c8f1ade'
date: 'Thu Aug 4 14:09:40 2016'
author: '<NAME>'
}
{
commit: 'fc80069'
date: 'Thu Aug 4 10:25:18 2016'
author: '<NAME>'
}
{
commit: '31a7bf9'
date: 'Wed Aug 3 23:21:34 2016'
author: '<NAME>'
}
{
commit: 'b69ac08'
date: 'Wed Aug 3 22:00:09 2016'
author: '<NAME>'
}
{
commit: 'b2763d2'
date: 'Wed Aug 3 20:05:51 2016'
author: '<NAME>'
}
{
commit: 'ca3afd5'
date: 'Wed Aug 3 17:55:27 2016'
author: '<NAME>'
}
{
commit: '248f2f8'
date: 'Wed Aug 3 16:12:16 2016'
author: '<NAME>'
}
{
commit: '293816d'
date: 'Wed Aug 3 15:30:56 2016'
author: '<NAME>'
}
{
commit: '6870ecb'
date: 'Wed Aug 3 15:20:55 2016'
author: '<NAME>'
}
{
commit: '030be1f'
date: 'Wed Aug 3 15:05:55 2016'
author: '<NAME>'
}
{
commit: 'db95db3'
date: 'Wed Aug 3 14:49:03 2016'
author: '<NAME>'
}
{
commit: '2cb1e4a'
date: 'Wed Aug 3 14:45:13 2016'
author: '<NAME>'
}
{
commit: 'c29cbc1'
date: 'Wed Aug 3 12:16:08 2016'
author: '<NAME>'
}
{
commit: 'eb31c27'
date: 'Wed Aug 3 12:12:21 2016'
author: '<NAME>'
}
{
commit: '781cc50'
date: 'Wed Aug 3 12:01:15 2016'
author: '<NAME>'
}
{
commit: 'd34658d'
date: 'Wed Aug 3 11:54:56 2016'
author: '<NAME>'
}
{
commit: 'daa910b'
date: 'Wed Aug 3 11:54:45 2016'
author: '<NAME>'
}
{
commit: '440e75b'
date: 'Wed Aug 3 11:41:01 2016'
author: '<NAME>'
}
{
commit: 'f11ff0e'
date: 'Wed Aug 3 10:46:07 2016'
author: '<NAME>'
}
{
commit: 'dd76064'
date: 'Tue Aug 2 17:20:08 2016'
author: '<NAME>'
}
{
commit: '4a17b7c'
date: 'Tue Aug 2 17:15:26 2016'
author: '<NAME>'
}
{
commit: '297edea'
date: 'Tue Aug 2 17:08:40 2016'
author: '<NAME>'
}
{
commit: '4b6d92a'
date: 'Tue Aug 2 16:36:05 2016'
author: '<NAME>'
}
{
commit: 'acaa7d4'
date: 'Tue Aug 2 16:27:38 2016'
author: '<NAME>'
}
{
commit: 'dc847a4'
date: 'Tue Aug 2 16:09:30 2016'
author: '<NAME>'
}
{
commit: '2a84e68'
date: 'Tue Aug 2 15:42:59 2016'
author: '<NAME>'
}
{
commit: 'f75ec6a'
date: 'Tue Aug 2 14:57:55 2016'
author: '<NAME>'
}
{
commit: '4773f8f'
date: 'Tue Aug 2 14:38:38 2016'
author: '<NAME>'
}
{
commit: '52ee4d8'
date: 'Tue Aug 2 14:35:20 2016'
author: '<NAME>'
}
{
commit: '5297f1f'
date: 'Tue Aug 2 14:32:52 2016'
author: '<NAME>'
}
{
commit: '5d1fcb4'
date: 'Tue Aug 2 14:32:37 2016'
author: '<NAME>'
}
{
commit: 'fd32c4c'
date: 'Tue Aug 2 14:21:09 2016'
author: '<NAME>'
}
{
commit: 'bb2ad29'
date: 'Tue Aug 2 14:15:44 2016'
author: '<NAME>'
}
{
commit: '1280168'
date: 'Tue Aug 2 14:14:18 2016'
author: '<NAME>'
}
{
commit: '37893f6'
date: 'Mon Aug 1 16:52:23 2016'
author: '<NAME>'
}
{
commit: '2b75dd9'
date: 'Mon Aug 1 16:49:10 2016'
author: '<NAME>'
}
{
commit: '15027e5'
date: 'Mon Aug 1 23:38:58 2016'
author: '<NAME>'
}
{
commit: 'ca22880'
date: 'Mon Aug 1 21:59:41 2016'
author: '<NAME>'
}
{
commit: '6e9d16c'
date: 'Mon Aug 1 21:53:13 2016'
author: '<NAME>'
}
{
commit: '5ee40e2'
date: 'Mon Aug 1 20:50:32 2016'
author: '<NAME>'
}
{
commit: '9cf0155'
date: 'Mon Aug 1 20:49:32 2016'
author: '<NAME>'
}
{
commit: '582615b'
date: 'Mon Aug 1 18:16:52 2016'
author: '<NAME>'
}
{
commit: '364e1a2'
date: 'Mon Aug 1 16:40:35 2016'
author: '<NAME>'
}
{
commit: '61d6312'
date: 'Mon Aug 1 16:21:17 2016'
author: '<NAME>'
}
{
commit: 'd6b4b03'
date: 'Mon Aug 1 15:57:22 2016'
author: '<NAME>u'
}
{
commit: 'd1b04b4'
date: 'Mon Aug 1 15:30:22 2016'
author: '<NAME>'
}
{
commit: '36e778e'
date: 'Mon Aug 1 13:24:57 2016'
author: '<NAME>'
}
{
commit: '0598f10'
date: 'Sat Jul 30 12:22:32 2016'
author: '<NAME>'
}
{
commit: 'd346edc'
date: 'Fri Jul 29 17:51:41 2016'
author: '<NAME>'
}
{
commit: 'bc1237c'
date: 'Fri Jul 29 17:31:54 2016'
author: '<NAME>'
}
{
commit: 'f7736fe'
date: 'Fri Jul 29 17:18:09 2016'
author: '<NAME>'
}
{
commit: 'ef12b68'
date: 'Fri Jul 29 15:12:50 2016'
author: '<NAME>'
}
{
commit: '0398085'
date: 'Fri Jul 29 14:56:50 2016'
author: '<NAME>'
}
{
commit: 'a44cf99'
date: 'Fri Jul 29 13:49:33 2016'
author: '<NAME>'
}
{
commit: 'd86e1ec'
date: 'Fri Jul 29 11:01:12 2016'
author: '<NAME>'
}
{
commit: '39bf120'
date: 'Thu Jul 28 23:27:54 2016'
author: '<NAME>'
}
{
commit: '9ff1da3'
date: 'Thu Jul 28 23:07:37 2016'
author: '<NAME>'
}
{
commit: 'a75ebf1'
date: 'Thu Jul 28 23:03:38 2016'
author: '<NAME>'
}
{
commit: 'd5604c3'
date: 'Thu Jul 28 16:57:42 2016'
author: '<NAME>'
}
{
commit: '4daa11b'
date: 'Thu Jul 28 16:53:56 2016'
author: '<NAME>'
}
{
commit: '04a5d8c'
date: 'Thu Jul 28 16:49:00 2016'
author: '<NAME>'
}
{
commit: '0d9f714'
date: 'Thu Jul 28 16:39:41 2016'
author: '<NAME>'
}
{
commit: '77fee3e'
date: 'Thu Jul 28 16:21:35 2016'
author: '<NAME>'
}
{
commit: '616019b'
date: 'Thu Jul 28 16:11:53 2016'
author: '<NAME>'
}
{
commit: '59d1622'
date: 'Thu Jul 28 16:08:52 2016'
author: '<NAME>'
}
{
commit: 'f3cfea6'
date: 'Thu Jul 28 15:08:37 2016'
author: '<NAME>'
}
{
commit: 'b9e8d14'
date: 'Thu Jul 28 14:36:54 2016'
author: '<NAME>'
}
{
commit: '046b50e'
date: 'Thu Jul 28 14:04:03 2016'
author: '<NAME>'
}
{
commit: '1bb9b56'
date: 'Thu Jul 28 12:33:37 2016'
author: '<NAME>'
}
{
commit: 'cc13ef9'
date: 'Thu Jul 28 11:42:22 2016'
author: '<NAME>'
}
{
commit: 'd071ba4'
date: 'Thu Jul 28 11:39:07 2016'
author: '<NAME>'
}
{
commit: '27d8ed3'
date: 'Thu Jul 28 11:23:27 2016'
author: '<NAME>'
}
{
commit: '418b162'
date: 'Thu Jul 28 11:17:34 2016'
author: '<NAME>'
}
{
commit: 'ada2943'
date: 'Thu Jul 28 09:48:40 2016'
author: '<NAME>'
}
{
commit: '8463a44'
date: 'Wed Jul 27 20:57:30 2016'
author: '<NAME>'
}
{
commit: '27d2d35'
date: 'Wed Jul 27 17:42:22 2016'
author: '<NAME>'
}
{
commit: 'f74b3cb'
date: 'Wed Jul 27 13:35:41 2016'
author: '<NAME>'
}
{
commit: '85b6df4'
date: 'Wed Jul 27 12:23:47 2016'
author: '<NAME>'
}
{
commit: '58aeab5'
date: 'Wed Jul 27 12:23:33 2016'
author: '<NAME>'
}
{
commit: '0b12542'
date: 'Wed Jul 27 09:19:08 2016'
author: '<NAME>'
}
{
commit: 'a56c591'
date: 'Mon Jul 25 16:40:54 2016'
author: '<NAME>'
}
{
commit: 'c9a02f5'
date: 'Mon Jul 25 16:12:03 2016'
author: '<NAME>'
}
{
commit: 'c997102'
date: 'Mon Jul 25 16:07:33 2016'
author: '<NAME>'
}
{
commit: 'f0f7e2d'
date: 'Mon Jul 25 15:21:56 2016'
author: '<NAME>'
}
{
commit: 'a545058'
date: 'Mon Jul 25 15:21:23 2016'
author: '<NAME>'
}
{
commit: 'cf7b2b7'
date: 'Mon Jul 25 15:21:00 2016'
author: '<NAME>'
}
{
commit: '955f744'
date: 'Mon Jul 25 15:12:17 2016'
author: '<NAME>'
}
{
commit: '193f74c'
date: 'Mon Jul 25 14:19:34 2016'
author: '<NAME>'
}
{
commit: 'ecb9956'
date: 'Mon Jul 25 14:19:20 2016'
author: '<NAME>'
}
{
commit: 'b5515a3'
date: 'Mon Jul 25 14:18:23 2016'
author: '<NAME>'
}
{
commit: '5199601'
date: 'Mon Jul 25 13:27:33 2016'
author: '<NAME>'
}
{
commit: '5c3994f'
date: 'Fri Jul 22 16:45:39 2016'
author: '<NAME>'
}
{
commit: '67ae9bd'
date: 'Thu Jul 21 19:22:52 2016'
author: '<NAME>'
}
{
commit: '2416d2f'
date: 'Wed Jul 20 17:25:37 2016'
author: '<NAME>'
}
{
commit: 'e9e49d6'
date: 'Wed Jul 20 15:19:49 2016'
author: '<NAME>'
}
{
commit: '066e6d8'
date: 'Wed Jul 20 15:00:27 2016'
author: '<NAME>'
}
{
commit: 'fe056b0'
date: 'Wed Jul 20 14:50:26 2016'
author: '<NAME>'
}
{
commit: 'de39148'
date: 'Wed Jul 20 14:41:11 2016'
author: '<NAME>'
}
{
commit: 'a252925'
date: 'Wed Jul 20 14:30:22 2016'
author: '<NAME>'
}
{
commit: '7143e64'
date: 'Tue Jul 19 10:35:01 2016'
author: '<NAME>'
}
{
commit: '997b336'
date: 'Mon Jul 18 09:23:12 2016'
author: '<NAME>'
}
{
commit: 'a303c80'
date: 'Fri Jul 15 14:41:07 2016'
author: '<NAME>'
}
{
commit: '5714727'
date: 'Fri Jul 15 11:57:59 2016'
author: '<NAME>'
}
{
commit: '1141772'
date: 'Fri Jul 15 11:56:13 2016'
author: '<NAME>'
}
{
commit: 'c13d10a'
date: 'Fri Jul 15 11:47:03 2016'
author: '<NAME>'
}
{
commit: '621479a'
date: 'Tue Jul 5 16:53:52 2016'
author: '<NAME>'
}
{
commit: 'db93886'
date: 'Mon Jul 4 17:23:45 2016'
author: '<NAME>'
}
{
commit: '6af1b8d'
date: 'Mon Jul 4 15:20:08 2016'
author: '<NAME>'
}
{
commit: 'ff8a671'
date: 'Thu Jun 30 18:14:41 2016'
author: '<NAME>'
}
{
commit: '6d9790d'
date: 'Thu Jun 30 18:14:16 2016'
author: '<NAME>'
}
{
commit: 'b032241'
date: 'Thu Jun 30 13:14:17 2016'
author: '<NAME>'
}
{
commit: '340612e'
date: 'Thu Jun 30 14:04:42 2016'
author: '<NAME>'
}
{
commit: 'adf5b33'
date: 'Thu Jun 30 08:35:47 2016'
author: '<NAME>'
}
{
commit: 'd6e849a'
date: 'Tue Jun 28 15:12:10 2016'
author: '<NAME>'
}
{
commit: 'f93ae7f'
date: 'Tue Jun 28 14:25:05 2016'
author: '<NAME>'
}
{
commit: '0dff457'
date: 'Mon Jun 27 10:25:20 2016'
author: '<NAME>'
}
{
commit: '4fb83cf'
date: 'Mon Jun 27 10:16:31 2016'
author: '<NAME>'
}
{
commit: 'a81f32f'
date: 'Mon Jun 27 10:09:10 2016'
author: '<NAME>'
}
{
commit: 'a7a6de0'
date: 'Mon Jun 27 09:53:20 2016'
author: '<NAME>'
}
{
commit: '99980ec'
date: 'Mon Jun 27 09:27:07 2016'
author: '<NAME>'
}
{
commit: 'f254db6'
date: 'Thu Jun 23 13:39:55 2016'
author: '<NAME>'
}
{
commit: 'ab0f21b'
date: 'Fri Jun 10 14:08:47 2016'
author: '<NAME>'
}
{
commit: '72fb2a8'
date: 'Fri Jun 10 01:17:24 2016'
author: '<NAME>'
}
{
commit: '926ac01'
date: 'Fri Jun 10 01:13:09 2016'
author: '<NAME>'
}
{
commit: 'b36dbd6'
date: 'Fri Jun 10 00:21:35 2016'
author: '<NAME>'
}
{
commit: '2cc6935'
date: 'Thu Jun 9 10:43:42 2016'
author: '<NAME>'
}
{
commit: 'c48360d'
date: 'Wed Jun 1 14:40:00 2016'
author: '<NAME>'
}
{
commit: '5cc7331'
date: 'Wed Jun 1 14:27:58 2016'
author: '<NAME>'
}
{
commit: 'f508b18'
date: 'Tue May 31 23:01:52 2016'
author: '<NAME>'
}
{
commit: 'b9c33da'
date: 'Sun May 29 00:48:08 2016'
author: '<NAME>'
}
{
commit: 'a52a360'
date: 'Sun May 29 00:30:46 2016'
author: '<NAME>'
}
{
commit: 'c28c450'
date: 'Sun May 29 00:17:06 2016'
author: '<NAME>'
}
{
commit: '3fba050'
date: 'Sun May 29 00:16:58 2016'
author: '<NAME>'
}
{
commit: '08297a9'
date: 'Sun May 29 00:11:00 2016'
author: '<NAME>'
}
{
commit: '1608171'
date: 'Sun May 29 00:05:46 2016'
author: '<NAME>'
}
{
commit: 'b7021df'
date: 'Sun May 1 13:21:29 2016'
author: '<NAME>'
}
{
commit: '7c9e47e'
date: 'Sat Apr 30 23:43:29 2016'
author: '<NAME>'
}
{
commit: 'fffa5bc'
date: 'Sat Apr 30 23:43:07 2016'
author: '<NAME>'
}
{
commit: 'd6cb31e'
date: 'Tue May 17 17:09:13 2016'
author: '<NAME>'
}
{
commit: 'd88e2f3'
date: 'Tue May 17 13:36:51 2016'
author: '<NAME>'
}
{
commit: 'edf7c5c'
date: 'Mon May 16 15:05:49 2016'
author: '<NAME>'
}
{
commit: 'e24a5bc'
date: 'Mon May 16 15:02:14 2016'
author: '<NAME>'
}
{
commit: 'b33e4ec'
date: 'Mon May 16 14:57:53 2016'
author: '<NAME>'
}
{
commit: '417ae11'
date: 'Sat May 14 13:32:48 2016'
author: '<NAME>'
}
{
commit: '4adc7fa'
date: 'Sat May 14 13:32:42 2016'
author: '<NAME>'
}
{
commit: '3aa1421'
date: 'Sat May 14 13:32:32 2016'
author: '<NAME>'
}
{
commit: 'e0ce1dc'
date: 'Fri May 13 23:54:36 2016'
author: '<NAME>'
}
{
commit: 'b4c7079'
date: 'Fri May 13 23:40:24 2016'
author: '<NAME>'
}
{
commit: 'd903cd5'
date: 'Fri May 13 23:39:50 2016'
author: '<NAME>'
}
{
commit: '64b09bb'
date: 'Thu May 12 10:28:11 2016'
author: '<NAME>'
}
{
commit: '2848823'
date: 'Wed May 11 15:22:12 2016'
author: '<NAME>'
}
{
commit: '3d7e40f'
date: 'Wed May 11 15:19:51 2016'
author: '<NAME>'
}
{
commit: 'decc0c0'
date: 'Wed May 11 13:32:35 2016'
author: '<NAME>'
}
{
commit: '6e06e20'
date: 'Wed May 11 13:15:04 2016'
author: '<NAME>'
}
{
commit: '0430a7a'
date: 'Wed May 11 13:14:56 2016'
author: '<NAME>'
}
{
commit: '9c7c945'
date: 'Wed May 11 13:14:46 2016'
author: '<NAME>'
}
{
commit: 'b5013f8'
date: 'Wed May 11 13:14:17 2016'
author: '<NAME>'
}
{
commit: '358bf74'
date: 'Wed May 11 13:13:50 2016'
author: '<NAME>'
}
{
commit: '51ebe97'
date: 'Wed May 11 13:13:33 2016'
author: '<NAME>'
}
{
commit: '9e60469'
date: 'Wed May 11 13:13:12 2016'
author: '<NAME>'
}
{
commit: '937b088'
date: 'Tue May 10 20:59:50 2016'
author: '<NAME>'
}
{
commit: 'd85c0fa'
date: 'Tue May 10 20:50:49 2016'
author: '<NAME>'
}
{
commit: '7d32c8d'
date: 'Tue May 10 17:38:37 2016'
author: '<NAME>'
}
{
commit: 'ea52737'
date: 'Tue May 10 17:25:43 2016'
author: '<NAME>'
}
{
commit: '9bc5c0e'
date: 'Tue May 10 17:23:14 2016'
author: '<NAME>'
}
{
commit: '02643eb'
date: 'Tue May 10 12:06:15 2016'
author: '<NAME>'
}
{
commit: '9070903'
date: 'Tue May 10 12:06:08 2016'
author: '<NAME>'
}
{
commit: '528a1cc'
date: 'Tue May 10 11:16:41 2016'
author: '<NAME>'
}
{
commit: '8ce9687'
date: 'Sat May 7 16:57:40 2016'
author: '<NAME>'
}
{
commit: 'd80a019'
date: 'Fri May 6 11:50:18 2016'
author: '<NAME>'
}
{
commit: '0e4aac0'
date: 'Fri May 6 10:11:08 2016'
author: '<NAME>'
}
{
commit: '94e779d'
date: 'Tue May 3 15:22:30 2016'
author: '<NAME>'
}
{
commit: '908bfd0'
date: 'Tue May 3 15:21:13 2016'
author: '<NAME>'
}
{
commit: '03ec3a8'
date: 'Mon May 2 00:30:30 2016'
author: '<NAME>'
}
{
commit: 'a699ef8'
date: 'Mon May 2 00:15:05 2016'
author: '<NAME>'
}
{
commit: '9091c1a'
date: 'Mon May 2 00:07:18 2016'
author: '<NAME>'
}
{
commit: '9fabdcd'
date: 'Sun May 1 23:47:25 2016'
author: '<NAME>'
}
{
commit: 'ed45094'
date: 'Sun May 1 23:47:08 2016'
author: '<NAME>'
}
{
commit: '4854bad'
date: 'Sun May 1 23:45:27 2016'
author: '<NAME>'
}
{
commit: '9900f3d'
date: 'Sun May 1 23:45:07 2016'
author: '<NAME>'
}
{
commit: '9862387'
date: 'Sun May 1 23:42:56 2016'
author: '<NAME>'
}
{
commit: 'ebafc7e'
date: 'Sun May 1 23:42:33 2016'
author: '<NAME>'
}
{
commit: '29857bb'
date: 'Sun May 1 23:41:48 2016'
author: '<NAME>'
}
{
commit: '1ec226d'
date: 'Sun May 1 23:40:17 2016'
author: '<NAME>'
}
{
commit: '289740d'
date: 'Sun May 1 18:05:08 2016'
author: '<NAME>'
}
{
commit: 'e0f6509'
date: 'Sun May 1 13:22:06 2016'
author: '<NAME>'
}
{
commit: '1dbda74'
date: 'Sun May 1 13:21:29 2016'
author: '<NAME>'
}
{
commit: '5b548b4'
date: 'Sun May 1 17:27:42 2016'
author: '<NAME>'
}
{
commit: '445b074'
date: 'Sun May 1 17:17:56 2016'
author: '<NAME>'
}
{
commit: 'b85d82c'
date: 'Sat Apr 30 23:43:07 2016'
author: '<NAME>'
}
{
commit: 'd59ba69'
date: 'Sat Apr 30 23:43:29 2016'
author: '<NAME>'
}
{
commit: 'ae6df89'
date: 'Sat Apr 30 23:08:57 2016'
author: '<NAME>'
}
{
commit: 'ff8eb4d'
date: 'Sat Apr 30 23:08:40 2016'
author: '<NAME>'
}
{
commit: '645653b'
date: 'Sat Apr 30 22:07:59 2016'
author: '<NAME>'
}
{
commit: '4cf5611'
date: 'Fri Apr 29 15:57:39 2016'
author: '<NAME>'
}
{
commit: '1a6bb08'
date: 'Fri Apr 29 15:57:29 2016'
author: '<NAME>'
}
{
commit: '9a116e2'
date: 'Fri Apr 29 15:55:18 2016'
author: '<NAME>'
}
{
commit: '519bbfa'
date: 'Wed Apr 27 22:25:23 2016'
author: '<NAME>'
}
{
commit: '996220d'
date: 'Tue Apr 26 11:11:33 2016'
author: '<NAME>'
}
{
commit: 'b12a41f'
date: 'Tue Apr 26 11:11:15 2016'
author: '<NAME>'
}
{
commit: '7ab7e4e'
date: 'Sat Apr 23 20:30:55 2016'
author: '<NAME>'
}
{
commit: 'b0ed473'
date: 'Wed Apr 20 14:06:29 2016'
author: '<NAME>empsink'
}
{
commit: 'c1d403f'
date: 'Tue Apr 19 14:07:29 2016'
author: '<NAME>'
}
{
commit: 'dc4d622'
date: 'Thu Apr 14 13:45:10 2016'
author: '<NAME>'
}
{
commit: 'bd79c54'
date: 'Tue Apr 12 17:03:55 2016'
author: '<NAME>'
}
{
commit: 'e32a426'
date: 'Tue Apr 12 13:20:07 2016'
author: '<NAME>'
}
{
commit: '04ee164'
date: 'Tue Apr 12 11:45:41 2016'
author: '<NAME>'
}
{
commit: '7aefc85'
date: 'Mon Apr 11 21:23:18 2016'
author: '<NAME>'
}
{
commit: '01e2794'
date: 'Mon Apr 11 20:50:20 2016'
author: '<NAME>'
}
{
commit: 'c526d80'
date: 'Mon Apr 11 20:49:52 2016'
author: '<NAME>'
}
{
commit: '51ede8f'
date: 'Sat Apr 9 14:49:09 2016'
author: '<NAME>'
}
{
commit: 'ca477ed'
date: 'Sat Apr 9 14:02:04 2016'
author: '<NAME>'
}
{
commit: 'a9f4263'
date: 'Sat Apr 9 13:41:26 2016'
author: '<NAME>'
}
{
commit: '0e405b4'
date: 'Sat Apr 9 13:35:53 2016'
author: '<NAME>'
}
{
commit: '320b6dc'
date: 'Sat Apr 9 13:30:25 2016'
author: '<NAME>'
}
{
commit: 'c849544'
date: 'Sat Apr 9 13:08:03 2016'
author: '<NAME>'
}
{
commit: '925b0c4'
date: 'Sat Apr 9 01:24:23 2016'
author: '<NAME>'
}
{
commit: '56aef8f'
date: 'Fri Apr 8 15:10:29 2016'
author: '<NAME>'
}
{
commit: '10d6ed2'
date: 'Fri Apr 8 15:06:50 2016'
author: '<NAME>'
}
{
commit: '24fa7ed'
date: 'Tue Apr 5 22:01:26 2016'
author: '<NAME>'
}
{
commit: '6e5fe80'
date: 'Mon Apr 4 15:56:52 2016'
author: '<NAME>'
}
{
commit: '5c1930f'
date: 'Thu Mar 31 22:31:32 2016'
author: '<NAME>'
}
{
commit: '0c1d135'
date: 'Thu Mar 24 23:15:50 2016'
author: '<NAME>'
}
{
commit: '4e762bf'
date: 'Thu Mar 24 23:07:14 2016'
author: '<NAME>'
}
{
commit: 'f4c7b16'
date: 'Thu Mar 24 22:50:22 2016'
author: '<NAME>'
}
{
commit: '084b0bc'
date: 'Thu Mar 24 16:12:48 2016'
author: '<NAME>'
}
{
commit: 'e7246ec'
date: 'Thu Mar 24 11:58:20 2016'
author: '<NAME>'
}
{
commit: '25f28a7'
date: 'Tue Mar 22 15:18:16 2016'
author: '<NAME>'
}
{
commit: '3722567'
date: 'Mon Mar 21 15:56:10 2016'
author: '<NAME>'
}
{
commit: 'a85d1df'
date: 'Sat Mar 19 01:42:20 2016'
author: '<NAME>'
}
{
commit: '489695d'
date: 'Sat Mar 19 01:15:25 2016'
author: '<NAME>'
}
{
commit: '42e6aa4'
date: 'Sat Mar 19 01:05:00 2016'
author: '<NAME>'
}
{
commit: '9ea07a4'
date: 'Thu Mar 17 14:17:51 2016'
author: '<NAME>'
}
{
commit: 'b1064fc'
date: 'Thu Mar 17 14:14:43 2016'
author: '<NAME>'
}
{
commit: '636c0e5'
date: 'Tue Mar 15 15:05:18 2016'
author: '<NAME>'
}
{
commit: '0f82601'
date: 'Tue Mar 15 14:57:50 2016'
author: '<NAME>'
}
{
commit: '75c0dc2'
date: 'Tue Mar 15 10:43:02 2016'
author: '<NAME>'
}
{
commit: 'b961fbf'
date: 'Mon Mar 14 16:50:01 2016'
author: '<NAME>'
}
{
commit: '949e6b3'
date: 'Mon Mar 14 16:10:15 2016'
author: '<NAME>'
}
{
commit: 'cd8bd60'
date: 'Thu Mar 10 17:39:02 2016'
author: '<NAME>'
}
{
commit: '35a1e52'
date: 'Tue Mar 1 17:59:20 2016'
author: '<NAME>'
}
{
commit: 'f1cc5ca'
date: 'Tue Mar 1 15:17:44 2016'
author: '<NAME>'
}
{
commit: 'dd2301c'
date: 'Tue Mar 1 13:21:51 2016'
author: '<NAME>'
}
{
commit: '460cf79'
date: 'Mon Feb 29 16:04:42 2016'
author: '<NAME>'
}
{
commit: '32b26ed'
date: 'Thu Feb 25 13:13:03 2016'
author: '<NAME>'
}
{
commit: '04f59b5'
date: 'Thu Feb 25 13:02:23 2016'
author: '<NAME>'
}
{
commit: '233ad6d'
date: 'Wed Feb 24 00:49:46 2016'
author: '<NAME>'
}
{
commit: '4f9f713'
date: 'Tue Feb 23 13:28:51 2016'
author: '<NAME>'
}
{
commit: '13042bc'
date: 'Tue Feb 23 10:50:45 2016'
author: '<NAME>'
}
{
commit: 'b28335d'
date: 'Thu Feb 18 16:51:53 2016'
author: '<NAME>'
}
{
commit: '9f41c24'
date: 'Wed Feb 17 16:58:15 2016'
author: '<NAME>'
}
{
commit: 'b7039f5'
date: 'Wed Feb 17 16:43:09 2016'
author: '<NAME>'
}
{
commit: '195bc4d'
date: 'Wed Feb 17 16:30:32 2016'
author: '<NAME>'
}
{
commit: '5c72995'
date: 'Wed Feb 17 14:28:03 2016'
author: '<NAME>loop'
}
{
commit: 'e2e7852'
date: 'Wed Feb 17 12:18:35 2016'
author: '<NAME>'
}
{
commit: '700cb8e'
date: 'Wed Feb 17 12:17:58 2016'
author: '<NAME>'
}
{
commit: 'a3d0603'
date: 'Tue Feb 16 11:35:45 2016'
author: '<NAME>'
}
{
commit: 'c6b4f3f'
date: 'Tue Feb 16 11:31:26 2016'
author: '<NAME>'
}
{
commit: 'df4f12a'
date: 'Tue Feb 9 22:40:51 2016'
author: '<NAME>'
}
{
commit: '4792322'
date: 'Tue Feb 9 15:16:31 2016'
author: '<NAME>'
}
{
commit: '0563b21'
date: 'Tue Feb 9 12:20:22 2016'
author: '<NAME>'
}
{
commit: '9453b24'
date: 'Tue Feb 9 12:12:44 2016'
author: '<NAME>'
}
{
commit: '40a0c99'
date: 'Tue Feb 9 12:12:03 2016'
author: '<NAME>'
}
{
commit: '780e9b3'
date: 'Mon Feb 8 22:11:46 2016'
author: '<NAME>'
}
{
commit: '3648250'
date: 'Mon Feb 8 16:31:51 2016'
author: '<NAME>'
}
{
commit: '0564c0e'
date: 'Mon Feb 8 16:15:06 2016'
author: '<NAME>'
}
{
commit: '094286c'
date: 'Mon Feb 8 15:52:13 2016'
author: '<NAME>'
}
{
commit: '8688f84'
date: 'Mon Feb 8 15:39:22 2016'
author: '<NAME>'
}
{
commit: '0af5d00'
date: 'Mon Feb 8 14:21:55 2016'
author: '<NAME>'
}
{
commit: '54f231d'
date: 'Mon Feb 8 14:14:12 2016'
author: '<NAME>'
}
{
commit: '4cae87f'
date: 'Mon Feb 8 13:42:49 2016'
author: '<NAME>'
}
{
commit: 'c32e406'
date: 'Mon Feb 8 13:09:42 2016'
author: '<NAME>'
}
{
commit: '8435391'
date: 'Sun Feb 7 21:17:53 2016'
author: '<NAME>'
}
{
commit: '71d886e'
date: 'Fri Feb 5 10:54:13 2016'
author: '<NAME>'
}
{
commit: '1bc5caa'
date: 'Thu Feb 4 17:55:59 2016'
author: '<NAME>'
}
{
commit: 'e57ec36'
date: 'Thu Feb 4 13:18:59 2016'
author: '<NAME>'
}
{
commit: '9876ddd'
date: 'Thu Feb 4 00:01:35 2016'
author: '<NAME>'
}
{
commit: 'a23f2cd'
date: 'Thu Feb 4 00:00:44 2016'
author: '<NAME>'
}
{
commit: '74b8b3b'
date: 'Wed Feb 3 23:04:11 2016'
author: '<NAME>'
}
{
commit: 'dbec9ae'
date: 'Wed Feb 3 22:26:49 2016'
author: '<NAME>'
}
{
commit: '63e19c9'
date: 'Wed Feb 3 17:13:58 2016'
author: '<NAME>'
}
{
commit: '7f04ad8'
date: 'Wed Feb 3 16:21:24 2016'
author: '<NAME>'
}
{
commit: '4795a1b'
date: 'Wed Feb 3 16:13:19 2016'
author: '<NAME>'
}
{
commit: '267cd7e'
date: 'Wed Feb 3 16:11:34 2016'
author: '<NAME>'
}
{
commit: '68b52f4'
date: 'Wed Feb 3 13:56:16 2016'
author: '<NAME>'
}
{
commit: 'c182568'
date: 'Wed Feb 3 13:55:11 2016'
author: '<NAME>'
}
{
commit: 'b1ec9bc'
date: 'Tue Feb 2 12:13:24 2016'
author: '<NAME>'
}
{
commit: '941bbdc'
date: 'Mon Feb 1 16:54:19 2016'
author: '<NAME>'
}
{
commit: '50f0eed'
date: 'Mon Feb 1 16:51:17 2016'
author: '<NAME>'
}
{
commit: '026e006'
date: 'Mon Feb 1 16:44:29 2016'
author: '<NAME>'
}
{
commit: '89aad75'
date: 'Mon Feb 1 16:33:08 2016'
author: '<NAME>'
}
{
commit: '11abb58'
date: 'Mon Feb 1 14:29:12 2016'
author: '<NAME>'
}
{
commit: '08fd95c'
date: 'Mon Feb 1 14:25:09 2016'
author: '<NAME>'
}
{
commit: '9d7c8ee'
date: 'Fri Jan 29 00:29:35 2016'
author: '<NAME>'
}
{
commit: 'b2b197d'
date: 'Fri Jan 29 00:12:21 2016'
author: '<NAME>'
}
{
commit: '6c1eee0'
date: 'Fri Jan 29 00:05:22 2016'
author: '<NAME>'
}
{
commit: '17eeb59'
date: 'Fri Jan 29 00:03:03 2016'
author: '<NAME>'
}
{
commit: '3315377'
date: 'Thu Jan 28 23:47:10 2016'
author: '<NAME>'
}
{
commit: '4581bf2'
date: 'Thu Jan 28 23:09:14 2016'
author: '<NAME>'
}
{
commit: '719b8d5'
date: 'Thu Jan 28 23:09:00 2016'
author: '<NAME>'
}
{
commit: 'f201e2f'
date: 'Thu Jan 28 18:08:14 2016'
author: '<NAME>'
}
{
commit: 'c82d1b2'
date: 'Thu Jan 28 10:52:31 2016'
author: '<NAME>'
}
{
commit: 'f06b12d'
date: 'Wed Jan 27 23:13:06 2016'
author: '<NAME>'
}
{
commit: '0660faa'
date: 'Wed Jan 27 23:06:22 2016'
author: '<NAME>'
}
{
commit: 'eb1315d'
date: 'Wed Jan 27 23:05:55 2016'
author: '<NAME>'
}
{
commit: 'dd9c30e'
date: 'Wed Jan 27 22:41:29 2016'
author: '<NAME>'
}
{
commit: 'c420611'
date: 'Wed Jan 27 18:45:42 2016'
author: '<NAME>'
}
{
commit: '75a62be'
date: 'Wed Jan 27 17:52:20 2016'
author: '<NAME>'
}
{
commit: '575fadc'
date: 'Wed Jan 27 17:02:31 2016'
author: '<NAME>'
}
{
commit: '111572b'
date: 'Wed Jan 27 17:02:03 2016'
author: '<NAME>'
}
{
commit: 'ba0f3ad'
date: 'Wed Jan 27 14:22:51 2016'
author: '<NAME>'
}
{
commit: '1650f6b'
date: 'Wed Jan 27 14:20:48 2016'
author: '<NAME>'
}
{
commit: '475604f'
date: 'Wed Jan 27 11:36:24 2016'
author: '<NAME>'
}
{
commit: '15cffc4'
date: 'Wed Jan 27 11:35:10 2016'
author: '<NAME>'
}
{
commit: '3d977bc'
date: 'Tue Jan 26 18:11:45 2016'
author: '<NAME>'
}
{
commit: '5c031e0'
date: 'Tue Jan 26 17:11:51 2016'
author: '<NAME>'
}
{
commit: '790d6ce'
date: 'Sat Jan 23 16:48:48 2016'
author: '<NAME>'
}
{
commit: 'ac27782'
date: 'Thu Jan 21 18:37:49 2016'
author: '<NAME>'
}
{
commit: 'f985fbb'
date: 'Thu Jan 21 18:35:43 2016'
author: '<NAME>'
}
{
commit: '56b653a'
date: 'Thu Jan 21 18:35:21 2016'
author: '<NAME>'
}
{
commit: '92d6d90'
date: 'Thu Jan 21 17:37:03 2016'
author: '<NAME>'
}
{
commit: '21f6504'
date: 'Thu Jan 21 15:41:21 2016'
author: '<NAME>'
}
{
commit: '6be8955'
date: 'Thu Jan 21 15:23:48 2016'
author: '<NAME>'
}
{
commit: '2dd0385'
date: 'Thu Jan 21 14:11:22 2016'
author: '<NAME>'
}
{
commit: 'c7c99d1'
date: 'Thu Jan 21 13:48:27 2016'
author: '<NAME>'
}
{
commit: 'fde7c54'
date: 'Wed Jan 20 23:44:16 2016'
author: '<NAME>'
}
{
commit: 'b5a7beb'
date: 'Tue Jan 19 22:34:54 2016'
author: '<NAME>'
}
{
commit: '82ccc3e'
date: 'Tue Jan 19 22:33:57 2016'
author: '<NAME>'
}
{
commit: 'ba7cf3c'
date: 'Mon Jan 18 17:23:20 2016'
author: '<NAME>'
}
{
commit: '4d0ce3e'
date: 'Mon Jan 18 15:28:42 2016'
author: '<NAME>'
}
{
commit: '0fb0831'
date: 'Mon Jan 18 13:56:41 2016'
author: '<NAME>'
}
{
commit: '325775b'
date: 'Mon Jan 18 13:08:17 2016'
author: '<NAME>'
}
{
commit: '8e8fda5'
date: 'Mon Jan 18 12:24:18 2016'
author: '<NAME>'
}
{
commit: '45633ea'
date: 'Mon Jan 18 11:46:29 2016'
author: '<NAME>'
}
{
commit: '78724ef'
date: 'Mon Jan 18 11:35:45 2016'
author: '<NAME>'
}
{
commit: 'fb78409'
date: 'Mon Jan 18 11:20:43 2016'
author: '<NAME>'
}
{
commit: '757977c'
date: 'Sun Jan 17 14:17:55 2016'
author: '<NAME>'
}
{
commit: '6020e74'
date: 'Sun Jan 17 14:11:12 2016'
author: '<NAME>'
}
{
commit: '42de49f'
date: 'Sun Jan 17 12:40:36 2016'
author: '<NAME>'
}
{
commit: '27b7647'
date: 'Sat Jan 16 11:21:00 2016'
author: '<NAME>'
}
{
commit: '5e126c4'
date: 'Fri Jan 15 16:38:29 2016'
author: '<NAME>'
}
{
commit: '7aca265'
date: 'Thu Jan 14 13:16:49 2016'
author: '<NAME>'
}
{
commit: 'c65dff5'
date: 'Tue Jan 12 18:02:51 2016'
author: '<NAME>'
}
{
commit: 'd4c15a4'
date: 'Tue Jan 12 17:00:35 2016'
author: '<NAME>'
}
{
commit: '631b47b'
date: 'Tue Jan 12 14:24:49 2016'
author: '<NAME>'
}
{
commit: 'dea63db'
date: 'Tue Jan 12 14:12:40 2016'
author: '<NAME>'
}
{
commit: '7399c60'
date: 'Tue Jan 12 14:10:52 2016'
author: '<NAME>'
}
{
commit: '1ab06fa'
date: 'Tue Jan 12 14:10:27 2016'
author: '<NAME>'
}
{
commit: '7296de7'
date: 'Mon Jan 11 23:59:05 2016'
author: '<NAME>'
}
{
commit: '83b7c3d'
date: 'Sun Jan 10 21:37:53 2016'
author: '<NAME>'
}
{
commit: 'd7571f8'
date: 'Fri Jan 8 16:20:26 2016'
author: '<NAME>'
}
{
commit: '36193ac'
date: 'Thu Jan 7 17:10:37 2016'
author: '<NAME>'
}
{
commit: '3ee6d07'
date: 'Thu Jan 7 14:08:59 2016'
author: '<NAME>'
}
{
commit: 'f797a36'
date: 'Mon Jan 4 14:39:21 2016'
author: '<NAME>'
}
{
commit: '209a50d'
date: 'Tue Dec 29 23:44:13 2015'
author: '<NAME>'
}
{
commit: 'd8ac785'
date: 'Tue Dec 29 14:01:18 2015'
author: '<NAME>'
}
{
commit: '06b2cbb'
date: 'Mon Dec 28 21:57:32 2015'
author: '<NAME>'
}
{
commit: '643b636'
date: 'Mon Dec 28 21:35:07 2015'
author: '<NAME>'
}
{
commit: '59923c4'
date: 'Mon Dec 28 18:19:29 2015'
author: '<NAME>'
}
{
commit: 'b71627c'
date: 'Mon Dec 28 17:48:43 2015'
author: '<NAME>'
}
{
commit: '57a342d'
date: 'Sun Dec 27 15:21:12 2015'
author: '<NAME>'
}
{
commit: '6a39d85'
date: 'Sun Dec 27 15:09:02 2015'
author: '<NAME>'
}
{
commit: '1f0def7'
date: 'Sun Dec 27 14:54:36 2015'
author: '<NAME>'
}
{
commit: '31baf97'
date: 'Sun Dec 27 14:46:26 2015'
author: '<NAME>'
}
{
commit: '91e8b76'
date: 'Sun Dec 27 14:46:06 2015'
author: '<NAME>'
}
{
commit: 'cc17f24'
date: 'Sun Dec 27 12:36:03 2015'
author: '<NAME>'
}
{
commit: 'aa5f73b'
date: 'Tue Dec 22 21:41:00 2015'
author: '<NAME>'
}
{
commit: '44ccc0b'
date: 'Tue Dec 22 21:25:34 2015'
author: '<NAME>'
}
{
commit: '7ed24b6'
date: 'Tue Dec 22 16:22:35 2015'
author: '<NAME>'
}
{
commit: '9d5a69c'
date: 'Mon Dec 21 14:53:24 2015'
author: '<NAME>'
}
{
commit: '10f79a7'
date: 'Sat Dec 19 14:05:47 2015'
author: '<NAME>'
}
{
commit: '6b2f999'
date: 'Fri Dec 18 17:08:39 2015'
author: '<NAME>'
}
{
commit: 'd8cabe8'
date: 'Fri Dec 18 15:52:46 2015'
author: '<NAME>'
}
{
commit: '3f2c0e2'
date: 'Fri Dec 18 15:09:57 2015'
author: '<NAME>'
}
{
commit: '58ad551'
date: 'Thu Dec 17 22:25:31 2015'
author: '<NAME>'
}
{
commit: 'aa6f4fb'
date: 'Thu Dec 17 14:24:14 2015'
author: '<NAME>'
}
{
commit: 'a7df8f4'
date: 'Thu Dec 17 14:03:24 2015'
author: '<NAME>'
}
{
commit: 'aefa54a'
date: 'Thu Dec 17 12:30:12 2015'
author: '<NAME>'
}
{
commit: '920106e'
date: 'Thu Dec 17 10:44:06 2015'
author: '<NAME>'
}
{
commit: 'b4583d4'
date: 'Wed Dec 16 17:17:44 2015'
author: '<NAME>'
}
{
commit: '864d7d5'
date: 'Wed Dec 16 17:05:37 2015'
author: '<NAME>'
}
{
commit: '893cb5d'
date: 'Wed Dec 16 15:58:21 2015'
author: '<NAME>'
}
{
commit: '80a0cba'
date: 'Wed Dec 16 15:44:29 2015'
author: '<NAME>'
}
{
commit: '4e8be92'
date: 'Wed Dec 16 15:40:16 2015'
author: '<NAME>'
}
{
commit: '0ac463d'
date: 'Wed Dec 16 13:57:18 2015'
author: '<NAME>'
}
{
commit: '9bfae6d'
date: 'Wed Dec 16 13:33:37 2015'
author: '<NAME>'
}
{
commit: 'f434b99'
date: 'Tue Dec 15 23:24:03 2015'
author: '<NAME>'
}
{
commit: '7089129'
date: 'Tue Dec 15 23:14:41 2015'
author: '<NAME>'
}
{
commit: '81cf4a1'
date: 'Tue Dec 15 23:04:11 2015'
author: '<NAME>'
}
{
commit: '0f9bd46'
date: 'Tue Dec 15 22:52:34 2015'
author: '<NAME>'
}
{
commit: 'abcfbcf'
date: 'Tue Dec 15 22:44:47 2015'
author: '<NAME>'
}
{
commit: 'aeb2d6c'
date: 'Tue Dec 15 22:40:34 2015'
author: '<NAME>'
}
{
commit: '025ea9b'
date: 'Tue Dec 15 15:01:43 2015'
author: '<NAME>'
}
{
commit: '32cc787'
date: 'Tue Dec 15 14:57:23 2015'
author: '<NAME>'
}
{
commit: '2ce0737'
date: 'Tue Dec 15 14:53:58 2015'
author: '<NAME>'
}
{
commit: '9cca44c'
date: 'Tue Dec 15 14:46:24 2015'
author: '<NAME>'
}
{
commit: '601e1e5'
date: 'Tue Dec 15 14:43:15 2015'
author: '<NAME>'
}
{
commit: 'ca8aa71'
date: 'Tue Dec 15 14:34:44 2015'
author: '<NAME>'
}
{
commit: '2c078d2'
date: 'Tue Dec 15 14:13:03 2015'
author: '<NAME>'
}
{
commit: '00df8b6'
date: 'Tue Dec 15 13:54:03 2015'
author: '<NAME>'
}
{
commit: '8ad78d8'
date: 'Tue Dec 15 13:44:29 2015'
author: '<NAME>'
}
{
commit: '89bfd2e'
date: 'Tue Dec 15 13:38:09 2015'
author: '<NAME>'
}
{
commit: '4cb63a5'
date: 'Tue Dec 15 13:25:26 2015'
author: '<NAME>'
}
{
commit: 'fb55e60'
date: 'Tue Dec 15 13:24:54 2015'
author: '<NAME>'
}
{
commit: '0f3b196'
date: 'Tue Dec 15 11:47:08 2015'
author: '<NAME>'
}
{
commit: '5a95739'
date: 'Tue Dec 15 11:40:52 2015'
author: '<NAME>'
}
{
commit: '674c81c'
date: 'Tue Dec 15 11:25:33 2015'
author: '<NAME>'
}
{
commit: 'd38fcf7'
date: 'Tue Dec 15 11:25:18 2015'
author: '<NAME>'
}
{
commit: '6cf8a83'
date: 'Tue Dec 15 10:37:01 2015'
author: '<NAME>'
}
{
commit: '672b250'
date: 'Mon Dec 14 17:10:57 2015'
author: '<NAME>'
}
{
commit: '982a065'
date: 'Mon Dec 14 14:50:23 2015'
author: '<NAME>'
}
{
commit: '84dc662'
date: 'Mon Dec 14 14:49:42 2015'
author: '<NAME>'
}
{
commit: '304f85a'
date: 'Mon Dec 14 14:46:26 2015'
author: '<NAME>'
}
{
commit: '605eb08'
date: 'Mon Dec 14 14:03:25 2015'
author: '<NAME>'
}
{
commit: '4a3b67f'
date: 'Mon Dec 14 13:54:58 2015'
author: '<NAME>'
}
{
commit: 'cfb7d76'
date: 'Thu Dec 10 16:18:04 2015'
author: '<NAME>'
}
{
commit: 'd443397'
date: 'Tue Dec 8 18:42:55 2015'
author: '<NAME>'
}
{
commit: 'ff8ab5f'
date: 'Tue Dec 8 15:32:47 2015'
author: '<NAME>'
}
{
commit: 'f947fee'
date: 'Fri Dec 4 16:49:34 2015'
author: '<NAME>'
}
{
commit: 'c5e779e'
date: 'Mon Nov 30 12:00:03 2015'
author: '<NAME>'
}
{
commit: 'f3c4b01'
date: 'Fri Nov 27 13:52:22 2015'
author: '<NAME>'
}
{
commit: '0b7d7ef'
date: 'Tue Nov 17 14:06:27 2015'
author: '<NAME>'
}
{
commit: '9ca69d1'
date: 'Tue Nov 17 13:48:38 2015'
author: '<NAME>'
}
{
commit: '0d84b03'
date: 'Mon Nov 16 14:36:35 2015'
author: '<NAME>'
}
{
commit: 'a22ecf0'
date: 'Fri Nov 6 14:30:25 2015'
author: '<NAME>'
}
{
commit: '858494c'
date: 'Fri Nov 6 14:24:17 2015'
author: '<NAME>'
}
{
commit: '0e4d9bb'
date: 'Mon Nov 2 13:41:00 2015'
author: '<NAME>'
}
{
commit: '3b32854'
date: 'Mon Nov 2 12:02:59 2015'
author: '<NAME>'
}
{
commit: 'b1b3b23'
date: 'Tue Oct 27 14:51:07 2015'
author: '<NAME>'
}
{
commit: 'f749b23'
date: 'Tue Oct 27 14:47:01 2015'
author: '<NAME>'
}
{
commit: '8c8bac4'
date: 'Mon Oct 26 16:26:15 2015'
author: '<NAME>'
}
{
commit: '42b7a4b'
date: 'Mon Oct 26 16:25:42 2015'
author: '<NAME>'
}
{
commit: '6c1c974'
date: 'Wed Oct 7 08:34:50 2015'
author: '<NAME>'
}
{
commit: 'b22d277'
date: 'Sun Sep 13 12:52:34 2015'
author: '<NAME>'
}
{
commit: 'ef74161'
date: 'Tue Sep 8 17:16:56 2015'
author: '<NAME>'
}
{
commit: 'a6eb697'
date: 'Mon Sep 7 16:11:58 2015'
author: '<NAME>'
}
{
commit: 'd017d6e'
date: 'Fri Sep 4 12:02:32 2015'
author: '<NAME>'
}
{
commit: '0db522a'
date: 'Fri Sep 4 11:30:03 2015'
author: '<NAME>'
}
{
commit: 'e45456b'
date: 'Fri Sep 4 11:26:52 2015'
author: '<NAME>'
}
{
commit: 'ecf3c54'
date: 'Wed Aug 19 16:21:22 2015'
author: '<NAME>'
}
{
commit: 'c966d49'
date: 'Fri Aug 14 14:34:58 2015'
author: '<NAME>'
}
{
commit: '6360b78'
date: 'Wed Aug 12 19:40:51 2015'
author: '<NAME>'
}
{
commit: 'daa25a5'
date: 'Wed Aug 12 19:40:06 2015'
author: '<NAME>'
}
{
commit: 'd129b20'
date: 'Thu Aug 6 12:11:29 2015'
author: '<NAME>'
}
{
commit: '62ac19e'
date: 'Wed Jul 22 11:44:04 2015'
author: '<NAME>'
}
{
commit: 'effb399'
date: 'Tue Jul 21 16:23:39 2015'
author: '<NAME>'
}
{
commit: 'c181296'
date: 'Tue Jul 21 15:17:23 2015'
author: '<NAME>'
}
{
commit: 'a80a61c'
date: 'Tue Jul 21 14:02:31 2015'
author: '<NAME>'
}
{
commit: 'f69ced3'
date: 'Tue Jul 21 13:46:40 2015'
author: '<NAME>'
}
{
commit: '8972740'
date: 'Tue Jul 21 13:31:56 2015'
author: '<NAME>'
}
{
commit: '9e98ef0'
date: 'Tue Jul 21 10:56:56 2015'
author: '<NAME>'
}
{
commit: 'b34eca1'
date: 'Fri Jul 17 23:41:32 2015'
author: '<NAME>'
}
{
commit: '5c47bd5'
date: 'Fri Jul 17 23:40:25 2015'
author: '<NAME>'
}
{
commit: 'db96a5e'
date: 'Wed Jul 15 17:04:42 2015'
author: '<NAME>'
}
{
commit: 'c2f0066'
date: 'Wed Jul 15 16:03:24 2015'
author: '<NAME>'
}
{
commit: '6438f75'
date: 'Wed Jul 15 16:02:40 2015'
author: '<NAME>'
}
{
commit: '8097266'
date: 'Thu Jul 9 13:38:00 2015'
author: '<NAME>'
}
{
commit: '1c5bb79'
date: 'Thu Jul 9 13:37:48 2015'
author: '<NAME>'
}
{
commit: '7c13491'
date: 'Tue Jun 30 16:16:53 2015'
author: '<NAME>'
}
{
commit: '194c6c1'
date: 'Sun Jun 28 00:05:21 2015'
author: '<NAME>'
}
{
commit: 'd2152f3'
date: 'Sun Jun 28 00:03:46 2015'
author: '<NAME>'
}
{
commit: 'c4371ee'
date: 'Wed Jun 24 16:58:38 2015'
author: '<NAME>'
}
{
commit: '2c7d9b6'
date: 'Wed Jun 24 16:58:28 2015'
author: '<NAME>'
}
{
commit: '7cb7f1f'
date: 'Tue Jun 23 16:07:56 2015'
author: '<NAME>'
}
{
commit: '11139a7'
date: 'Wed Jun 10 14:47:44 2015'
author: '<NAME>'
}
{
commit: '21b9efa'
date: 'Mon Jun 8 16:16:45 2015'
author: '<NAME>'
}
{
commit: 'f0e040f'
date: 'Mon Jun 8 16:07:54 2015'
author: '<NAME>'
}
{
commit: '067cc97'
date: 'Mon Jun 8 15:56:42 2015'
author: '<NAME>'
}
{
commit: 'f08d7f6'
date: 'Mon Jun 8 15:49:33 2015'
author: '<NAME>'
}
{
commit: '6bb2038'
date: 'Thu May 21 12:13:40 2015'
author: '<NAME>'
}
{
commit: '9f69cd4'
date: 'Thu May 21 12:12:53 2015'
author: '<NAME>'
}
{
commit: '9ee696d'
date: 'Tue May 12 17:04:55 2015'
author: '<NAME>'
}
{
commit: '6707a25'
date: 'Tue May 12 17:04:51 2015'
author: '<NAME>'
}
{
commit: '53b28cf'
date: 'Tue May 12 17:02:48 2015'
author: '<NAME>'
}
{
commit: '3fc11e7'
date: 'Thu May 7 15:34:24 2015'
author: '<NAME>'
}
{
commit: '27c6950'
date: 'Thu May 7 14:38:33 2015'
author: '<NAME>'
}
{
commit: '997378f'
date: 'Tue May 5 16:13:05 2015'
author: '<NAME>'
}
{
commit: '5797cc7'
date: 'Fri May 1 20:35:00 2015'
author: '<NAME>'
}
{
commit: 'd85846c'
date: 'Fri May 1 12:16:52 2015'
author: '<NAME>'
}
{
commit: 'ab89ee4'
date: 'Fri May 1 12:13:51 2015'
author: '<NAME>'
}
{
commit: '0e755dc'
date: 'Fri May 1 12:12:54 2015'
author: '<NAME>'
}
{
commit: '7504bd8'
date: 'Fri May 1 11:55:55 2015'
author: '<NAME>'
}
{
commit: '81a3f2f'
date: 'Fri May 1 10:28:54 2015'
author: '<NAME>'
}
{
commit: '238f18e'
date: 'Thu Apr 30 14:17:19 2015'
author: '<NAME>'
}
{
commit: 'f755586'
date: 'Thu Apr 30 12:41:32 2015'
author: '<NAME>'
}
{
commit: 'ce79b87'
date: 'Thu Apr 30 12:41:20 2015'
author: '<NAME>'
}
{
commit: '7f8a9de'
date: 'Thu Apr 23 12:02:27 2015'
author: '<NAME>'
}
{
commit: 'a063536'
date: 'Thu Apr 23 11:37:49 2015'
author: '<NAME>'
}
{
commit: 'ced648f'
date: 'Wed Apr 22 17:12:01 2015'
author: '<NAME>'
}
{
commit: '66eea95'
date: 'Wed Apr 22 17:00:46 2015'
author: '<NAME>'
}
{
commit: '4e182d7'
date: 'Mon Apr 20 16:21:58 2015'
author: '<NAME>'
}
{
commit: 'e65ea33'
date: 'Mon Apr 20 16:21:39 2015'
author: '<NAME>'
}
{
commit: '6e913b4'
date: 'Tue Apr 14 17:33:29 2015'
author: '<NAME>'
}
{
commit: '9c12003'
date: 'Thu Apr 9 11:02:27 2015'
author: '<NAME>'
}
{
commit: '7ff0515'
date: 'Thu Apr 9 11:01:30 2015'
author: '<NAME>'
}
{
commit: 'bf78d4f'
date: 'Tue Apr 7 22:43:26 2015'
author: '<NAME>'
}
{
commit: '16520a3'
date: 'Tue Apr 7 21:58:55 2015'
author: '<NAME>'
}
{
commit: '2dd9a9c'
date: 'Tue Apr 7 21:37:03 2015'
author: '<NAME>'
}
{
commit: 'e6d5cda'
date: 'Tue Apr 7 21:12:34 2015'
author: '<NAME>'
}
{
commit: '64896a8'
date: 'Tue Apr 7 20:55:25 2015'
author: '<NAME>'
}
{
commit: '0e65db3'
date: 'Tue Apr 7 20:55:09 2015'
author: '<NAME>'
}
{
commit: '1b293bf'
date: 'Tue Apr 7 17:44:36 2015'
author: '<NAME>'
}
{
commit: 'bab3232'
date: 'Fri Apr 3 15:02:35 2015'
author: '<NAME>'
}
{
commit: '58ff015'
date: 'Fri Apr 3 13:23:20 2015'
author: '<NAME>'
}
{
commit: 'c9b3cb0'
date: 'Fri Apr 3 11:13:18 2015'
author: '<NAME>'
}
{
commit: '1e8c7c1'
date: 'Fri Apr 3 10:28:21 2015'
author: '<NAME>'
}
{
commit: 'c518763'
date: 'Fri Apr 3 10:19:14 2015'
author: '<NAME>'
}
{
commit: '2b7855d'
date: 'Fri Apr 3 10:10:56 2015'
author: '<NAME>'
}
{
commit: '6dda6c6'
date: 'Fri Apr 3 10:04:46 2015'
author: '<NAME>'
}
{
commit: 'a5e1a1a'
date: 'Thu Apr 2 17:41:25 2015'
author: '<NAME>'
}
{
commit: '3098d23'
date: 'Thu Apr 2 17:35:41 2015'
author: '<NAME>'
}
{
commit: '42b6bf1'
date: 'Thu Apr 2 17:24:28 2015'
author: '<NAME>'
}
{
commit: '658933b'
date: 'Thu Apr 2 16:02:18 2015'
author: '<NAME>'
}
{
commit: '29a8452'
date: 'Thu Apr 2 15:12:12 2015'
author: '<NAME>'
}
{
commit: '705a6d1'
date: 'Thu Apr 2 14:10:59 2015'
author: '<NAME>'
}
{
commit: '9da6ec3'
date: 'Thu Apr 2 13:52:13 2015'
author: '<NAME>'
}
{
commit: '7e4f83b'
date: 'Thu Apr 2 12:58:55 2015'
author: '<NAME>'
}
{
commit: '1febf8d'
date: 'Thu Apr 2 12:25:41 2015'
author: '<NAME>'
}
{
commit: 'e4c08c6'
date: 'Thu Apr 2 12:22:38 2015'
author: '<NAME>'
}
{
commit: '654779d'
date: 'Thu Apr 2 12:15:36 2015'
author: '<NAME>'
}
{
commit: '4cb562b'
date: 'Thu Apr 2 12:02:34 2015'
author: '<NAME>'
}
{
commit: '86b7691'
date: 'Fri Mar 27 11:00:36 2015'
author: '<NAME>'
}
{
commit: 'e798718'
date: 'Thu Mar 26 11:34:38 2015'
author: '<NAME>'
}
{
commit: 'e6cd862'
date: 'Tue Mar 10 12:32:34 2015'
author: '<NAME>'
}
{
commit: '3b25bba'
date: 'Tue Mar 10 11:22:23 2015'
author: '<NAME>'
}
{
commit: 'd6a3894'
date: 'Tue Mar 10 11:05:11 2015'
author: '<NAME>'
}
{
commit: 'bd14b72'
date: 'Tue Mar 10 11:03:59 2015'
author: '<NAME>'
}
{
commit: '5caa36d'
date: 'Mon Mar 9 12:27:38 2015'
author: '<NAME>'
}
{
commit: '42dfba9'
date: 'Mon Mar 9 12:27:26 2015'
author: '<NAME>'
}
{
commit: '7356bf5'
date: 'Fri Feb 27 16:28:48 2015'
author: '<NAME>'
}
{
commit: '22c2e4e'
date: 'Tue Feb 24 17:26:11 2015'
author: '<NAME>'
}
{
commit: 'efc5aa4'
date: 'Sun Feb 15 23:18:42 2015'
author: '<NAME>'
}
{
commit: 'd69bbfe'
date: 'Sun Feb 15 23:11:07 2015'
author: '<NAME>'
}
{
commit: 'ffcdbec'
date: 'Tue Feb 10 16:36:34 2015'
author: '<NAME>'
}
{
commit: 'adcca17'
date: 'Tue Feb 10 16:16:47 2015'
author: '<NAME>'
}
{
commit: 'c659f32'
date: 'Tue Feb 10 14:36:06 2015'
author: '<NAME>'
}
{
commit: '5f36189'
date: 'Mon Feb 2 11:29:38 2015'
author: '<NAME>'
}
{
commit: 'f6abac2'
date: 'Fri Jan 30 17:28:40 2015'
author: '<NAME>'
}
{
commit: '2aafdc0'
date: 'Fri Jan 30 16:46:50 2015'
author: '<NAME>'
}
{
commit: '2224d2c'
date: 'Fri Jan 30 16:37:12 2015'
author: '<NAME>'
}
{
commit: '3edbb0f'
date: 'Mon Jan 26 23:04:47 2015'
author: '<NAME>'
}
{
commit: '3d49915'
date: 'Sun Jan 18 23:04:40 2015'
author: '<NAME>'
}
{
commit: '7f60bc9'
date: 'Sun Jan 18 23:02:16 2015'
author: '<NAME>'
}
{
commit: '52df7dc'
date: 'Sun Jan 18 23:01:46 2015'
author: '<NAME>'
}
{
commit: '45d792d'
date: 'Wed Jan 14 11:41:50 2015'
author: '<NAME>'
}
{
commit: 'f932e94'
date: 'Mon Jan 5 18:07:01 2015'
author: '<NAME>'
}
{
commit: '75e5990'
date: 'Mon Jan 5 18:06:48 2015'
author: '<NAME>'
}
{
commit: '0d76609'
date: 'Sat Dec 13 21:14:41 2014'
author: '<NAME>'
}
{
commit: '8e59102'
date: 'Fri Dec 12 14:52:38 2014'
author: '<NAME>'
}
{
commit: 'd400c9c'
date: 'Fri Dec 12 14:48:17 2014'
author: '<NAME>'
}
{
commit: '47f8be6'
date: 'Fri Dec 12 12:54:11 2014'
author: '<NAME>'
}
{
commit: '3f9411e'
date: 'Fri Dec 12 11:49:49 2014'
author: '<NAME>'
}
{
commit: '6cc6def'
date: 'Fri Dec 12 11:48:00 2014'
author: '<NAME>'
}
{
commit: '0d59690'
date: 'Sat Dec 6 11:21:49 2014'
author: '<NAME>'
}
{
commit: '135a729'
date: 'Sat Dec 6 11:19:23 2014'
author: '<NAME>'
}
{
commit: '24de214'
date: 'Wed Nov 26 19:15:48 2014'
author: '<NAME>'
}
{
commit: 'e33a47b'
date: 'Wed Nov 26 19:02:02 2014'
author: '<NAME>'
}
{
commit: '4b1f2d2'
date: 'Wed Nov 26 14:50:35 2014'
author: '<NAME>'
}
{
commit: '92b4ee1'
date: 'Tue Nov 11 12:14:06 2014'
author: '<NAME>'
}
{
commit: 'af57e3d'
date: 'Sun Nov 2 11:11:58 2014'
author: '<NAME>'
}
{
commit: 'd147a23'
date: 'Mon Oct 27 22:25:03 2014'
author: '<NAME>'
}
{
commit: '62e18b8'
date: 'Mon Oct 27 22:14:38 2014'
author: '<NAME>'
}
{
commit: '0f8c3bb'
date: 'Mon Oct 27 22:01:12 2014'
author: '<NAME>'
}
{
commit: 'ef112cd'
date: 'Mon Oct 27 21:36:35 2014'
author: '<NAME>'
}
{
commit: '6c287fe'
date: 'Mon Oct 27 21:18:39 2014'
author: '<NAME>'
}
{
commit: 'ecfa572'
date: 'Mon Oct 27 20:59:38 2014'
author: '<NAME>'
}
{
commit: '8d4b1bd'
date: 'Mon Oct 27 20:48:41 2014'
author: '<NAME>'
}
{
commit: 'a82c4fb'
date: 'Mon Oct 27 19:18:18 2014'
author: '<NAME>'
}
{
commit: '2e07617'
date: 'Mon Oct 27 17:08:19 2014'
author: '<NAME>'
}
{
commit: 'f7271db'
date: 'Mon Oct 27 17:06:39 2014'
author: '<NAME>'
}
{
commit: 'f86b69a'
date: 'Mon Oct 27 17:06:23 2014'
author: '<NAME>'
}
{
commit: 'f6816e1'
date: 'Fri Oct 24 12:09:44 2014'
author: '<NAME>'
}
{
commit: '093bdb7'
date: 'Fri Oct 24 12:03:30 2014'
author: '<NAME>'
}
{
commit: '5121cf5'
date: 'Fri Oct 24 11:57:45 2014'
author: '<NAME>'
}
{
commit: '1b13e7f'
date: 'Mon Oct 20 23:06:22 2014'
author: '<NAME>'
}
{
commit: '5b6aed1'
date: 'Sat Oct 18 14:40:46 2014'
author: '<NAME>'
}
{
commit: 'db9ffd1'
date: 'Sat Oct 18 12:08:21 2014'
author: '<NAME>'
}
{
commit: 'd89500c'
date: 'Sat Oct 11 16:15:04 2014'
author: '<NAME>'
}
{
commit: 'ef2f738'
date: 'Fri Oct 10 14:26:21 2014'
author: '<NAME>'
}
{
commit: '9b7b806'
date: 'Fri Oct 10 11:14:05 2014'
author: '<NAME>'
}
{
commit: '0f0a7fe'
date: 'Wed Oct 8 22:59:05 2014'
author: '<NAME>'
}
{
commit: '5afa23f'
date: 'Mon Oct 6 14:23:09 2014'
author: '<NAME>'
}
{
commit: '00ab257'
date: 'Mon Oct 6 11:50:28 2014'
author: '<NAME>'
}
{
commit: '2db13aa'
date: 'Mon Oct 6 10:49:23 2014'
author: '<NAME>'
}
{
commit: '0a61fc4'
date: 'Sun Oct 5 16:35:21 2014'
author: '<NAME>'
}
{
commit: 'fbde7a8'
date: 'Sun Oct 5 16:24:15 2014'
author: '<NAME>'
}
{
commit: '2a0439a'
date: 'Sun Oct 5 16:23:05 2014'
author: '<NAME>'
}
{
commit: '4eb86e9'
date: 'Sun Oct 5 16:22:47 2014'
author: '<NAME>'
}
{
commit: '63841e7'
date: 'Sun Oct 5 16:22:41 2014'
author: '<NAME>'
}
{
commit: 'a04b76d'
date: 'Tue Sep 30 16:21:33 2014'
author: '<NAME>'
}
{
commit: 'c548c39'
date: 'Tue Sep 30 16:17:00 2014'
author: '<NAME>'
}
{
commit: 'e9c3c4a'
date: 'Tue Sep 30 16:13:51 2014'
author: '<NAME>'
}
{
commit: 'de60b1b'
date: 'Tue Sep 30 14:34:39 2014'
author: '<NAME>'
}
{
commit: 'e81fc33'
date: 'Tue Sep 30 14:25:45 2014'
author: '<NAME>'
}
{
commit: 'ae4fc9b'
date: 'Tue Sep 30 14:03:35 2014'
author: '<NAME>'
}
{
commit: '1d521bd'
date: 'Tue Sep 30 12:49:24 2014'
author: '<NAME>'
}
{
commit: 'a2613e7'
date: 'Tue Sep 30 12:38:32 2014'
author: '<NAME>'
}
{
commit: 'ebcb76b'
date: 'Tue Sep 30 12:38:18 2014'
author: '<NAME>'
}
{
commit: '410b06f'
date: 'Tue Sep 30 11:36:05 2014'
author: '<NAME>'
}
{
commit: 'dc36471'
date: 'Fri Sep 26 14:55:30 2014'
author: '<NAME>'
}
{
commit: 'bd01fd8'
date: 'Fri Sep 26 14:44:28 2014'
author: '<NAME>'
}
{
commit: '9a04efe'
date: 'Fri Sep 26 14:11:58 2014'
author: '<NAME>'
}
{
commit: 'e13feb4'
date: 'Fri Sep 26 14:11:36 2014'
author: '<NAME>'
}
{
commit: '570630a'
date: 'Fri Sep 26 14:11:21 2014'
author: '<NAME>'
}
{
commit: '82d933f'
date: 'Fri Sep 26 13:59:26 2014'
author: '<NAME>'
}
{
commit: '1e64425'
date: 'Thu Sep 25 19:01:01 2014'
author: '<NAME>'
}
{
commit: 'bf0e721'
date: 'Thu Sep 25 18:34:14 2014'
author: '<NAME>'
}
{
commit: 'd8c6123'
date: 'Thu Sep 25 18:16:04 2014'
author: '<NAME>'
}
{
commit: '9e116b5'
date: 'Thu Sep 25 16:59:41 2014'
author: '<NAME>'
}
{
commit: 'c1ad81e'
date: 'Thu Sep 25 16:48:56 2014'
author: '<NAME>'
}
{
commit: 'ffd106e'
date: 'Thu Sep 25 15:56:49 2014'
author: '<NAME>'
}
{
commit: 'fe9fa63'
date: 'Thu Sep 25 15:39:48 2014'
author: '<NAME>'
}
{
commit: '48028d3'
date: 'Thu Sep 25 15:38:25 2014'
author: '<NAME>'
}
{
commit: '4686f4f'
date: 'Thu Sep 25 15:31:20 2014'
author: '<NAME>'
}
{
commit: '9552f44'
date: 'Thu Sep 25 15:31:06 2014'
author: '<NAME>'
}
{
commit: '962f45d'
date: 'Thu Sep 25 15:30:51 2014'
author: '<NAME>'
}
{
commit: '2f8594f'
date: 'Thu Sep 25 15:30:33 2014'
author: '<NAME>'
}
{
commit: '40f024f'
date: 'Thu Sep 25 15:04:35 2014'
author: '<NAME>'
}
{
commit: 'e5ce332'
date: 'Thu Sep 25 11:14:52 2014'
author: '<NAME>'
}
{
commit: 'fce1baa'
date: 'Wed Sep 24 22:34:07 2014'
author: '<NAME>'
}
{
commit: 'b9ae0ad'
date: 'Tue Sep 23 14:55:19 2014'
author: '<NAME>'
}
{
commit: '814d193'
date: 'Mon Sep 22 21:23:04 2014'
author: '<NAME>'
}
{
commit: '065fa18'
date: 'Mon Sep 22 21:14:32 2014'
author: '<NAME>'
}
{
commit: '4a29f4e'
date: 'Thu Sep 18 15:02:01 2014'
author: '<NAME>'
}
{
commit: '1deb452'
date: 'Thu Sep 18 14:58:35 2014'
author: '<NAME>'
}
{
commit: 'b8abf40'
date: 'Thu Sep 18 14:58:02 2014'
author: '<NAME>'
}
{
commit: '66e8ba2'
date: 'Thu Sep 18 14:56:21 2014'
author: '<NAME>'
}
{
commit: 'd7c40ce'
date: 'Thu Sep 18 14:55:36 2014'
author: '<NAME>'
}
{
commit: 'bf11c56'
date: 'Thu Sep 18 14:49:14 2014'
author: '<NAME>'
}
{
commit: '5b9777d'
date: 'Thu Sep 18 14:47:43 2014'
author: '<NAME>'
}
{
commit: 'cc66c7b'
date: 'Thu Sep 18 14:43:08 2014'
author: '<NAME>'
}
{
commit: '06a4df9'
date: 'Sun Aug 24 14:54:37 2014'
author: '<NAME>'
}
{
commit: 'e61b373'
date: 'Wed Aug 20 15:35:10 2014'
author: '<NAME>'
}
{
commit: '6059202'
date: 'Tue Aug 19 14:14:45 2014'
author: '<NAME>'
}
{
commit: 'bccd4e3'
date: 'Tue Aug 19 14:05:24 2014'
author: '<NAME>'
}
{
commit: 'f13bd7f'
date: 'Tue Aug 19 14:04:45 2014'
author: '<NAME>'
}
{
commit: '689cbb4'
date: 'Sun Aug 17 14:39:32 2014'
author: '<NAME>'
}
{
commit: 'daa11db'
date: 'Sun Aug 17 11:54:23 2014'
author: '<NAME>'
}
{
commit: 'c73c31f'
date: 'Thu Aug 14 06:44:14 2014'
author: '<NAME>'
}
{
commit: '51041c6'
date: 'Tue Jul 22 14:17:04 2014'
author: '<NAME>'
}
{
commit: '20a26c1'
date: 'Tue Jul 22 14:08:44 2014'
author: '<NAME>'
}
{
commit: 'f65e89d'
date: 'Tue Jul 22 14:05:47 2014'
author: '<NAME>'
}
{
commit: 'd14e883'
date: 'Thu Jul 10 12:38:40 2014'
author: '<NAME>'
}
{
commit: 'd907147'
date: 'Tue Jul 1 15:06:41 2014'
author: '<NAME>'
}
{
commit: '6ae8769'
date: 'Tue Jul 1 14:36:00 2014'
author: '<NAME>'
}
{
commit: 'e3be363'
date: 'Fri Jun 27 11:23:23 2014'
author: '<NAME>'
}
{
commit: '0839b4b'
date: 'Fri Jun 27 10:42:30 2014'
author: '<NAME>'
}
{
commit: 'f1ab831'
date: 'Wed Jun 25 00:21:40 2014'
author: '<NAME>'
}
{
commit: 'cef8706'
date: 'Thu Jun 19 16:21:10 2014'
author: '<NAME>'
}
{
commit: '4235f0c'
date: 'Thu Jun 19 16:20:55 2014'
author: '<NAME>'
}
{
commit: 'e3e38a1'
date: 'Thu Jun 19 15:49:02 2014'
author: '<NAME>'
}
{
commit: '9a0f05e'
date: 'Tue Jun 17 11:15:25 2014'
author: '<NAME>'
}
{
commit: '536cf90'
date: 'Tue Jun 17 11:13:49 2014'
author: '<NAME>'
}
{
commit: '1fb5c9e'
date: 'Tue Jun 17 01:02:39 2014'
author: '<NAME>'
}
{
commit: 'fafb016'
date: 'Tue Jun 17 01:02:22 2014'
author: '<NAME>'
}
{
commit: 'f34db1c'
date: 'Tue Jun 17 01:01:51 2014'
author: '<NAME>'
}
{
commit: '2ed6bab'
date: 'Wed Jun 4 12:40:06 2014'
author: '<NAME>'
}
{
commit: '5476611'
date: 'Fri May 30 14:03:40 2014'
author: '<NAME>'
}
{
commit: '53391c3'
date: 'Fri May 30 14:02:27 2014'
author: '<NAME>'
}
{
commit: 'ccc99d4'
date: 'Fri May 30 12:18:04 2014'
author: '<NAME>'
}
{
commit: 'd16326a'
date: 'Fri May 30 12:14:21 2014'
author: '<NAME>'
}
{
commit: 'bb58df1'
date: 'Mon May 26 16:09:09 2014'
author: '<NAME>'
}
{
commit: '975b2b1'
date: 'Wed May 21 16:54:11 2014'
author: '<NAME>'
}
{
commit: '21a49cc'
date: 'Wed May 21 16:52:35 2014'
author: '<NAME>'
}
{
commit: 'fe67bbc'
date: 'Wed May 21 16:39:48 2014'
author: '<NAME>'
}
{
commit: '0a8a768'
date: 'Wed May 21 16:20:50 2014'
author: '<NAME>'
}
{
commit: '20decd2'
date: 'Sun May 18 16:18:30 2014'
author: '<NAME>'
}
{
commit: 'd7c2135'
date: 'Sun May 18 16:18:12 2014'
author: '<NAME>'
}
{
commit: '69c4d63'
date: 'Sat May 17 21:26:31 2014'
author: '<NAME>'
}
{
commit: 'e86abfa'
date: 'Thu May 15 23:57:06 2014'
author: '<NAME>'
}
{
commit: '216a271'
date: 'Thu May 15 23:33:22 2014'
author: '<NAME>'
}
{
commit: 'a05732a'
date: 'Thu May 15 23:20:17 2014'
author: '<NAME>'
}
{
commit: '7f31ec8'
date: 'Thu May 15 23:03:48 2014'
author: '<NAME>'
}
{
commit: '3d42703'
date: 'Thu May 15 16:16:05 2014'
author: '<NAME>'
}
{
commit: 'c46ce9e'
date: 'Thu May 15 15:33:04 2014'
author: '<NAME>'
}
{
commit: '80ea2f8'
date: 'Thu May 15 15:32:43 2014'
author: '<NAME>'
}
{
commit: '943df9a'
date: 'Thu May 15 13:39:22 2014'
author: '<NAME>'
}
{
commit: 'a9fd591'
date: 'Thu May 15 11:31:36 2014'
author: '<NAME>'
}
{
commit: 'b31938a'
date: 'Thu May 15 10:32:30 2014'
author: '<NAME>'
}
{
commit: '4630242'
date: 'Thu May 15 10:24:30 2014'
author: '<NAME>'
}
{
commit: '30e688e'
date: 'Thu May 15 10:22:15 2014'
author: '<NAME>'
}
{
commit: 'bb15649'
date: 'Thu May 15 10:03:24 2014'
author: '<NAME>'
}
{
commit: '293c7a7'
date: 'Tue May 13 19:16:59 2014'
author: '<NAME>'
}
{
commit: '2bbd10a'
date: 'Tue May 13 19:15:51 2014'
author: '<NAME>'
}
{
commit: 'c16f69f'
date: 'Tue May 13 19:13:41 2014'
author: '<NAME>'
}
{
commit: '2205ba0'
date: 'Sun Apr 27 00:10:24 2014'
author: '<NAME>'
}
{
commit: 'e175596'
date: 'Sun Apr 27 00:10:15 2014'
author: '<NAME>'
}
{
commit: '68e7cd4'
date: 'Sun Apr 20 11:19:07 2014'
author: '<NAME>'
}
{
commit: 'e95bf6b'
date: 'Sun Apr 20 11:18:52 2014'
author: '<NAME>'
}
{
commit: 'fad01d1'
date: 'Sat Apr 19 13:20:25 2014'
author: '<NAME>'
}
{
commit: '0999d52'
date: 'Sat Apr 19 13:14:47 2014'
author: '<NAME>'
}
{
commit: '449b155'
date: 'Sat Apr 19 12:46:04 2014'
author: '<NAME>'
}
{
commit: '958bf94'
date: 'Sat Apr 19 12:21:20 2014'
author: '<NAME>'
}
{
commit: '2c1fd6d'
date: 'Sat Apr 19 12:16:49 2014'
author: '<NAME>'
}
{
commit: 'b78ddb1'
date: 'Sat Apr 19 12:11:55 2014'
author: '<NAME>'
}
{
commit: '4b76906'
date: 'Sat Apr 19 11:45:36 2014'
author: '<NAME>'
}
{
commit: '19d4ede'
date: 'Sat Apr 19 11:45:12 2014'
author: '<NAME>'
}
{
commit: '1f3e010'
date: 'Fri Nov 22 13:52:01 2013'
author: '<NAME>'
}
{
commit: 'ae7c991'
date: 'Thu Nov 14 11:38:59 2013'
author: '<NAME>'
}
{
commit: '5fce264'
date: 'Thu Nov 14 11:38:48 2013'
author: '<NAME>'
}
{
commit: 'e4c5467'
date: 'Tue Nov 5 18:41:28 2013'
author: '<NAME>'
}
{
commit: '01172c3'
date: 'Tue Nov 5 18:38:20 2013'
author: '<NAME>'
}
{
commit: '7942d46'
date: 'Tue Oct 29 01:06:30 2013'
author: '<NAME>'
}
{
commit: '352e090'
date: 'Tue Oct 29 01:04:13 2013'
author: '<NAME>'
}
{
commit: '5d463ce'
date: 'Thu Oct 17 14:04:55 2013'
author: '<NAME>'
}
{
commit: '25a47eb'
date: 'Wed Oct 16 20:47:21 2013'
author: '<NAME>'
}
{
commit: '1d8cf24'
date: 'Tue Oct 15 15:07:33 2013'
author: '<NAME>'
}
{
commit: '71678bf'
date: 'Tue Oct 15 14:57:11 2013'
author: '<NAME>'
}
{
commit: '4a289c5'
date: 'Tue Oct 15 12:54:50 2013'
author: '<NAME>'
}
{
commit: '6c102c2'
date: 'Tue Oct 15 12:42:45 2013'
author: '<NAME>'
}
{
commit: 'b629549'
date: 'Tue Oct 15 12:05:06 2013'
author: '<NAME>'
}
{
commit: '78f3d91'
date: 'Tue Oct 15 03:02:06 2013'
author: '<NAME>'
}
{
commit: 'bad5a94'
date: 'Sun Oct 13 20:49:24 2013'
author: '<NAME>'
}
{
commit: '3e4051b'
date: 'Sun Oct 13 14:53:43 2013'
author: '<NAME>'
}
{
commit: '6eefeb0'
date: 'Sun Oct 13 14:38:53 2013'
author: '<NAME>'
}
{
commit: 'f367c76'
date: 'Sat Oct 12 17:53:50 2013'
author: '<NAME>'
}
{
commit: 'aa847ac'
date: 'Fri Oct 11 10:21:30 2013'
author: '<NAME>'
}
{
commit: 'd21e7d0'
date: 'Tue Oct 1 07:42:38 2013'
author: '<NAME>'
}
{
commit: '0cbe0a6'
date: 'Fri Sep 13 17:15:00 2013'
author: '<NAME>'
}
{
commit: 'e4ed1d8'
date: 'Fri Sep 13 08:03:53 2013'
author: '<NAME>'
}
{
commit: '3f01dfe'
date: 'Sun Sep 8 02:54:43 2013'
author: '<NAME>'
}
{
commit: '9bd31bf'
date: 'Sat Jul 13 11:37:26 2013'
author: '<NAME>'
}
{
commit: 'ae64324'
date: 'Thu May 30 16:16:25 2013'
author: '<NAME>'
}
{
commit: '4b8be87'
date: 'Thu May 30 16:06:44 2013'
author: '<NAME>'
}
{
commit: '538806f'
date: 'Wed May 29 15:58:48 2013'
author: '<NAME>'
}
{
commit: '2d59000'
date: 'Wed May 29 15:45:28 2013'
author: '<NAME>'
}
{
commit: 'dcd4faa'
date: 'Fri May 24 14:41:27 2013'
author: '<NAME>'
}
{
commit: '150f969'
date: 'Fri May 24 14:09:31 2013'
author: '<NAME>'
}
{
commit: '2322236'
date: 'Fri May 24 13:00:41 2013'
author: '<NAME>'
}
{
commit: '3d6b2d5'
date: 'Mon May 20 23:53:41 2013'
author: '<NAME>'
}
{
commit: 'ee67a19'
date: 'Mon May 20 23:53:35 2013'
author: '<NAME>'
}
{
commit: '0b97774'
date: 'Thu May 16 13:24:58 2013'
author: '<NAME>'
}
{
commit: '6bfb912'
date: 'Thu May 16 13:24:20 2013'
author: '<NAME>'
}
{
commit: '6fd264c'
date: 'Wed May 15 16:22:27 2013'
author: '<NAME>'
}
{
commit: 'c45dbb7'
date: 'Mon May 13 20:55:56 2013'
author: '<NAME>'
}
{
commit: 'a0fd659'
date: 'Mon May 13 20:55:50 2013'
author: '<NAME>'
}
{
commit: '6ce75ff'
date: 'Sun May 12 00:24:11 2013'
author: '<NAME>'
}
{
commit: '3e381c6'
date: 'Sun May 12 00:23:55 2013'
author: '<NAME>'
}
{
commit: '8a8f4ae'
date: 'Sun May 12 00:23:40 2013'
author: '<NAME>'
}
{
commit: '167722f'
date: 'Sun May 12 00:23:13 2013'
author: '<NAME>'
}
{
commit: 'ec11e8b'
date: 'Sun May 12 00:23:06 2013'
author: '<NAME>'
}
{
commit: '5728949'
date: 'Sun May 12 00:22:53 2013'
author: '<NAME>'
}
{
commit: '318b135'
date: 'Wed May 8 20:13:38 2013'
author: '<NAME>'
}
{
commit: '1296d57'
date: 'Tue May 7 22:45:07 2013'
author: '<NAME>'
}
{
commit: '56dc7c1'
date: 'Tue May 7 22:44:42 2013'
author: '<NAME>'
}
{
commit: '8b12dcb'
date: 'Sun May 5 11:45:10 2013'
author: '<NAME>'
}
{
commit: 'ed0c03b'
date: 'Fri May 3 00:07:53 2013'
author: '<NAME>'
}
{
commit: '2ebf5b3'
date: 'Fri May 3 00:02:23 2013'
author: '<NAME>'
}
{
commit: '3ad0dc0'
date: 'Thu May 2 22:53:36 2013'
author: '<NAME>'
}
{
commit: '5b23d4e'
date: 'Thu May 2 22:50:16 2013'
author: '<NAME>'
}
{
commit: '8001e3a'
date: 'Thu May 2 22:50:11 2013'
author: '<NAME>'
}
{
commit: 'c02eef3'
date: 'Thu May 2 22:48:52 2013'
author: '<NAME>'
}
{
commit: 'acb0e29'
date: 'Thu May 2 22:48:35 2013'
author: '<NAME>'
}
{
commit: 'c3de315'
date: 'Thu May 2 22:48:25 2013'
author: '<NAME>'
}
{
commit: 'ff7e8c0'
date: 'Thu May 2 22:48:15 2013'
author: '<NAME>'
}
{
commit: '4017411'
date: 'Thu May 2 22:48:05 2013'
author: '<NAME>'
}
{
commit: 'e494459'
date: 'Thu May 2 22:44:56 2013'
author: '<NAME>'
}
{
commit: '2b3accc'
date: 'Thu May 2 22:44:33 2013'
author: '<NAME>'
}
{
commit: '1ec6a88'
date: 'Thu May 2 22:44:15 2013'
author: '<NAME>'
}
{
commit: '3e2c579'
date: 'Thu May 2 22:44:01 2013'
author: '<NAME>'
}
{
commit: '9b9dadd'
date: 'Thu May 2 22:43:41 2013'
author: '<NAME>'
}
{
commit: '5da20c9'
date: 'Wed May 1 20:43:29 2013'
author: '<NAME>'
}
{
commit: '078e2d3'
date: 'Tue Apr 30 11:29:15 2013'
author: '<NAME>'
}
{
commit: '578a6c5'
date: 'Tue Apr 30 11:29:08 2013'
author: '<NAME>'
}
{
commit: '518df87'
date: 'Tue Apr 30 11:28:28 2013'
author: '<NAME>'
}
{
commit: '62dfbf3'
date: 'Tue Apr 30 11:28:13 2013'
author: '<NAME>'
}
{
commit: '5e5b065'
date: 'Tue Apr 30 11:27:54 2013'
author: '<NAME>'
}
{
commit: '09f407a'
date: 'Tue Apr 30 11:25:30 2013'
author: '<NAME>'
}
{
commit: 'a85039b'
date: 'Tue Apr 30 11:24:53 2013'
author: '<NAME>'
}
{
commit: '524606a'
date: 'Tue Apr 30 11:23:59 2013'
author: '<NAME>'
}
{
commit: '2870f60'
date: 'Sat Apr 27 12:51:52 2013'
author: '<NAME>'
}
{
commit: 'a258ebf'
date: 'Sat Apr 27 12:00:17 2013'
author: '<NAME>'
}
{
commit: '342a117'
date: 'Sat Apr 27 11:59:44 2013'
author: '<NAME>'
}
{
commit: '66501bb'
date: 'Sat Apr 27 11:04:06 2013'
author: '<NAME>'
}
{
commit: '23f141a'
date: 'Sat Apr 27 10:46:39 2013'
author: '<NAME>'
}
{
commit: 'b480b5f'
date: 'Sat Apr 27 10:45:45 2013'
author: '<NAME>'
}
{
commit: '18773c5'
date: 'Sat Apr 27 10:45:33 2013'
author: '<NAME>'
}
{
commit: '0acf5ea'
date: 'Thu Apr 25 17:39:23 2013'
author: '<NAME>'
}
{
commit: '2bf250b'
date: 'Thu Apr 25 17:27:34 2013'
author: '<NAME>'
}
{
commit: '949744f'
date: 'Thu Apr 25 17:27:17 2013'
author: '<NAME>'
}
{
commit: '987d99d'
date: 'Thu Apr 25 17:26:29 2013'
author: '<NAME>'
}
{
commit: 'a9a6a92'
date: 'Thu Apr 25 17:11:23 2013'
author: '<NAME>'
}
{
commit: '7469d1f'
date: 'Thu Apr 25 15:02:35 2013'
author: '<NAME>'
}
{
commit: '872f5c2'
date: 'Thu Apr 25 14:59:08 2013'
author: '<NAME>'
}
{
commit: 'ab6a379'
date: 'Thu Apr 25 14:57:13 2013'
author: '<NAME>'
}
{
commit: '482d86a'
date: 'Thu Apr 25 14:38:41 2013'
author: '<NAME>'
}
{
commit: 'a240ff4'
date: 'Thu Apr 25 14:38:24 2013'
author: '<NAME>'
}
{
commit: '378e7ff'
date: 'Thu Apr 25 14:37:55 2013'
author: '<NAME>'
}
{
commit: '8f4385d'
date: 'Thu Apr 25 14:36:54 2013'
author: '<NAME>'
}
{
commit: 'efaef61'
date: 'Thu Apr 25 14:36:10 2013'
author: '<NAME>'
}
{
commit: '48beba7'
date: 'Thu Apr 25 14:35:57 2013'
author: '<NAME>'
}
{
commit: '8f0ef3f'
date: 'Thu Apr 25 14:35:42 2013'
author: '<NAME>'
}
{
commit: '5b1e121'
date: 'Thu Apr 25 14:35:03 2013'
author: '<NAME>'
}
{
commit: 'c05bdd1'
date: 'Sun Apr 21 19:01:19 2013'
author: '<NAME>'
}
{
commit: 'bb67d68'
date: 'Sun Apr 21 19:01:03 2013'
author: '<NAME>'
}
{
commit: 'b00dc5a'
date: 'Sun Apr 21 18:53:03 2013'
author: '<NAME>'
}
{
commit: '80bd8e7'
date: 'Sun Apr 21 18:52:42 2013'
author: '<NAME>'
}
{
commit: 'd9fff17'
date: 'Sun Apr 21 18:52:22 2013'
author: '<NAME>'
}
{
commit: 'b22ccc9'
date: 'Sun Apr 21 18:51:58 2013'
author: '<NAME>'
}
{
commit: 'b1fbf81'
date: 'Sun Apr 21 18:51:39 2013'
author: '<NAME>'
}
{
commit: '88c3ccb'
date: 'Sun Apr 21 18:51:04 2013'
author: '<NAME>'
}
{
commit: '816cfa7'
date: 'Sat Apr 20 11:28:37 2013'
author: '<NAME>'
}
{
commit: '33d86fb'
date: 'Sat Apr 20 11:28:05 2013'
author: '<NAME>'
}
{
commit: '4e8f467'
date: 'Sat Apr 20 11:26:26 2013'
author: '<NAME>'
}
{
commit: 'c0207c0'
date: 'Sat Apr 20 11:25:44 2013'
author: '<NAME>'
}
{
commit: '20c81a1'
date: 'Fri Apr 12 23:02:20 2013'
author: '<NAME>'
}
{
commit: '6da090f'
date: 'Fri Apr 12 23:02:07 2013'
author: '<NAME>'
}
{
commit: 'd840b70'
date: 'Wed Apr 10 00:48:14 2013'
author: '<NAME>'
}
{
commit: '2aaea25'
date: 'Wed Apr 10 00:38:01 2013'
author: '<NAME>'
}
{
commit: '3fea5e0'
date: 'Thu Apr 4 00:39:13 2013'
author: '<NAME>'
}
{
commit: '8fe4409'
date: 'Thu Apr 4 00:39:06 2013'
author: '<NAME>'
}
{
commit: 'f149a9a'
date: 'Thu Apr 4 00:38:45 2013'
author: '<NAME>'
}
{
commit: '64dee09'
date: 'Thu Apr 4 00:38:33 2013'
author: '<NAME>'
}
{
commit: '1c91f4a'
date: 'Sat Mar 23 16:02:21 2013'
author: '<NAME>'
}
{
commit: '685804a'
date: 'Sat Mar 23 15:59:21 2013'
author: '<NAME>'
}
{
commit: 'cda5559'
date: 'Fri Mar 22 13:50:20 2013'
author: '<NAME>'
}
{
commit: '1018b69'
date: 'Fri Mar 22 13:50:04 2013'
author: '<NAME>'
}
{
commit: '37e04d5'
date: 'Tue Mar 19 18:32:22 2013'
author: '<NAME>'
}
{
commit: '110b2b5'
date: 'Tue Mar 19 18:31:24 2013'
author: '<NAME>'
}
{
commit: '78c16ef'
date: 'Tue Mar 19 18:30:43 2013'
author: '<NAME>'
}
{
commit: '2f345ec'
date: 'Mon Mar 18 21:35:08 2013'
author: '<NAME>'
}
{
commit: 'cd3a9e5'
date: 'Mon Mar 18 21:34:59 2013'
author: '<NAME>'
}
{
commit: 'c099ddd'
date: 'Mon Mar 18 21:34:29 2013'
author: '<NAME>'
}
{
commit: '5192cec'
date: 'Mon Mar 18 16:23:45 2013'
author: '<NAME>'
}
{
commit: 'd6348db'
date: 'Sun Mar 17 19:28:41 2013'
author: '<NAME>'
}
{
commit: 'e73e07a'
date: 'Sun Mar 17 19:16:27 2013'
author: '<NAME>'
}
{
commit: '98ca654'
date: 'Sun Mar 17 17:20:52 2013'
author: '<NAME>'
}
{
commit: '37cd9ec'
date: 'Sun Mar 17 16:45:26 2013'
author: '<NAME>'
}
{
commit: 'f74c9eb'
date: 'Sun Mar 17 15:54:12 2013'
author: '<NAME>'
}
{
commit: '5f9b84f'
date: 'Thu Mar 14 09:16:37 2013'
author: '<NAME>'
}
{
commit: '6e29b46'
date: 'Wed Mar 13 17:05:41 2013'
author: '<NAME>'
}
{
commit: '0dc7c7f'
date: 'Wed Mar 13 17:05:06 2013'
author: '<NAME>'
}
{
commit: 'cf5d564'
date: 'Wed Mar 13 16:53:07 2013'
author: '<NAME>'
}
{
commit: 'bde291d'
date: 'Wed Mar 13 12:13:38 2013'
author: '<NAME>'
}
{
commit: 'e2e5546'
date: 'Wed Mar 13 12:13:14 2013'
author: '<NAME>'
}
{
commit: '67f1220'
date: 'Wed Mar 13 12:12:36 2013'
author: '<NAME>'
}
{
commit: 'ec69c6d'
date: 'Tue Mar 12 16:23:01 2013'
author: '<NAME>'
}
{
commit: '08504e6'
date: 'Tue Mar 12 15:58:56 2013'
author: '<NAME>'
}
{
commit: '3a44655'
date: 'Tue Mar 12 15:54:51 2013'
author: '<NAME>'
}
{
commit: '2556b6f'
date: 'Tue Mar 12 15:02:47 2013'
author: '<NAME>'
}
{
commit: '250cb04'
date: 'Tue Mar 12 15:02:01 2013'
author: '<NAME>'
}
{
commit: 'a379acc'
date: 'Mon Mar 11 15:13:03 2013'
author: '<NAME>'
}
{
commit: 'b1d8b15'
date: 'Mon Mar 11 00:41:22 2013'
author: '<NAME>'
}
{
commit: '589a47e'
date: 'Sun Mar 10 23:39:28 2013'
author: '<NAME>'
}
{
commit: '7d65cab'
date: 'Fri Mar 1 15:29:59 2013'
author: '<NAME>'
}
{
commit: 'd0106fb'
date: 'Fri Mar 1 15:27:37 2013'
author: '<NAME>'
}
{
commit: '6d0c7c9'
date: 'Fri Mar 1 05:00:43 2013'
author: '<NAME>'
}
{
commit: '913c290'
date: 'Tue Feb 26 14:30:41 2013'
author: '<NAME>'
}
{
commit: '27de5fe'
date: 'Sun Feb 17 21:45:19 2013'
author: '<NAME>'
}
{
commit: 'eaa81c4'
date: 'Sat Feb 16 15:19:53 2013'
author: '<NAME>'
}
{
commit: '27f5914'
date: 'Sat Feb 16 15:11:28 2013'
author: '<NAME>'
}
{
commit: '57f3631'
date: 'Fri Jan 25 10:26:54 2013'
author: '<NAME>'
}
{
commit: '5a947f5'
date: 'Mon Jan 14 18:51:10 2013'
author: '<NAME>'
}
{
commit: 'b28681d'
date: 'Mon Jan 14 14:20:33 2013'
author: '<NAME>'
}
{
commit: 'c1ab12d'
date: 'Mon Jan 14 14:20:26 2013'
author: '<NAME>'
}
{
commit: '53fd22e'
date: 'Mon Jan 14 14:19:30 2013'
author: '<NAME>'
}
{
commit: '54e88cc'
date: 'Mon Jan 14 14:18:50 2013'
author: '<NAME>'
}
{
commit: 'f975305'
date: 'Wed Jan 2 16:20:57 2013'
author: '<NAME>'
}
{
commit: 'bb1d239'
date: 'Mon Dec 31 11:54:33 2012'
author: '<NAME>'
}
{
commit: '1677a43'
date: 'Wed Dec 26 02:15:58 2012'
author: '<NAME>'
}
{
commit: '818a4a2'
date: 'Tue Dec 25 17:33:03 2012'
author: '<NAME>'
}
{
commit: '0c64794'
date: 'Tue Dec 25 17:32:37 2012'
author: '<NAME>'
}
{
commit: '3c2d257'
date: 'Tue Dec 25 17:32:01 2012'
author: '<NAME>'
}
{
commit: '075b827'
date: 'Tue Dec 25 17:31:21 2012'
author: '<NAME>'
}
{
commit: 'd82fc4e'
date: 'Tue Dec 25 17:30:56 2012'
author: '<NAME>'
}
{
commit: '1358741'
date: 'Tue Dec 25 17:30:37 2012'
author: '<NAME>'
}
{
commit: 'bfdceaf'
date: 'Fri Dec 21 14:05:21 2012'
author: '<NAME>'
}
{
commit: '5bdeb37'
date: 'Thu Dec 20 16:08:05 2012'
author: '<NAME>'
}
{
commit: '35ad7df'
date: 'Thu Dec 20 16:06:57 2012'
author: '<NAME>'
}
{
commit: 'bcd1a1f'
date: 'Thu Dec 20 15:59:48 2012'
author: '<NAME>'
}
] | true | exports.builds = [
{
commit: 'c042de3'
date: 'Fri Mar 9 16:22:13 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '08e7197'
date: 'Thu Mar 8 16:19:36 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a426bbe'
date: 'Wed Mar 7 15:43:02 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7cd32b4'
date: 'Fri Mar 2 13:25:08 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3788e18'
date: 'Fri Mar 2 12:18:38 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1206293'
date: 'Fri Feb 23 11:47:58 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f32dd1f'
date: 'Tue Feb 20 13:03:00 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5cf9935'
date: 'Tue Feb 20 13:01:11 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '03ae78d'
date: 'Tue Feb 20 09:41:19 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8616043'
date: 'Tue Feb 20 09:32:19 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c110c3b'
date: 'Mon Feb 19 14:14:33 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aaf80b0'
date: 'Fri Feb 16 15:38:13 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '64a7037'
date: 'Fri Feb 16 15:33:36 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '454aef4'
date: 'Fri Feb 16 10:20:27 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5ce33ab'
date: 'Mon Feb 12 13:20:46 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '94daa97'
date: 'Mon Feb 12 13:05:12 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a32809d'
date: 'Mon Feb 12 12:24:54 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0590c8c'
date: 'Thu Feb 1 23:24:17 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e210fb1'
date: 'Thu Feb 1 23:05:19 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '90d9896'
date: 'Thu Feb 1 18:47:35 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e249eb0'
date: 'Thu Feb 1 15:34:44 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '08c3304'
date: 'Thu Feb 1 11:55:58 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2d58487'
date: 'Thu Feb 1 11:55:42 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9f07664'
date: 'Thu Feb 1 09:47:28 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '35dd174'
date: 'Thu Feb 1 09:47:15 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8cc82e7'
date: 'Thu Feb 1 09:18:03 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e1711bd'
date: 'Thu Feb 1 00:08:08 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '29e1659'
date: 'Wed Jan 31 23:29:27 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0ff94ee'
date: 'Wed Jan 31 23:19:56 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8b1c6f3'
date: 'Wed Jan 31 22:46:38 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '77dbf87'
date: 'Wed Jan 31 22:41:34 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '23da2bf'
date: 'Sun Jan 28 01:11:53 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0b25219'
date: 'Fri Jan 26 17:35:28 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd9d5ef5'
date: 'Fri Jan 26 16:34:42 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e8c41ca'
date: 'Fri Jan 26 16:34:28 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5d18f8a'
date: 'Fri Jan 26 16:20:39 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8bb17ec'
date: 'Fri Jan 26 16:19:31 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8744c3e'
date: 'Wed Jan 24 12:12:02 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a2342fc'
date: 'Wed Jan 24 12:11:50 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '370f5c8'
date: 'Wed Jan 24 12:11:18 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '044f470'
date: 'Wed Jan 24 12:07:05 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8df9369'
date: 'Tue Jan 23 13:39:41 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '73e3afb'
date: 'Tue Jan 23 13:26:17 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b6af600'
date: 'Tue Jan 23 13:23:57 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f6146fa'
date: 'Mon Jan 22 16:57:11 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c3e2c64'
date: 'Mon Jan 22 16:56:59 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '97d82c5'
date: 'Mon Jan 22 16:16:30 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7803f03'
date: 'Mon Jan 22 13:57:39 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a15f981'
date: 'Fri Jan 19 17:38:50 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3118f37'
date: 'Fri Jan 19 16:39:35 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5ae3756'
date: 'Fri Jan 19 16:39:24 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3b115bf'
date: 'Fri Jan 19 13:51:22 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd35606e'
date: 'Fri Jan 19 13:45:08 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ee99adf'
date: 'Fri Jan 19 13:02:30 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '55a881b'
date: 'Fri Jan 19 12:29:14 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '875e2ba'
date: 'Fri Jan 19 12:15:26 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c404ded'
date: 'Fri Jan 19 12:07:22 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '37e97cb'
date: 'Thu Jan 18 15:19:33 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c40be67'
date: 'Thu Jan 18 12:51:14 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6cdc580'
date: 'Thu Jan 18 12:06:37 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '60e5d26'
date: 'Thu Jan 18 11:40:23 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6d165dd'
date: 'Thu Jan 18 09:58:52 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fed1f13'
date: 'Wed Jan 17 16:56:16 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f89a6b9'
date: 'Wed Jan 17 13:23:55 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd19ad1e'
date: 'Wed Jan 17 12:27:32 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cea1e51'
date: 'Wed Jan 17 10:59:18 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ad45c89'
date: 'Wed Jan 17 10:58:52 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6ffdfe9'
date: 'Wed Jan 17 09:53:15 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '10af20c'
date: 'Mon Jan 15 15:16:26 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd047b30'
date: 'Wed Jan 17 09:21:32 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '263476e'
date: 'Wed Jan 17 09:21:13 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '75be5e7'
date: 'Tue Jan 16 23:13:22 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '43fd2a4'
date: 'Tue Jan 16 23:12:40 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'efbc233'
date: 'Tue Jan 16 17:28:16 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e80c465'
date: 'Tue Jan 16 14:16:21 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1aec23d'
date: 'Tue Jan 16 14:15:50 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e3a10c8'
date: 'Tue Jan 16 12:39:11 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ca62c90'
date: 'Tue Jan 16 11:01:50 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '42cc2a3'
date: 'Mon Jan 15 18:26:31 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a0030c0'
date: 'Mon Jan 15 16:22:43 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a909132'
date: 'Mon Jan 15 14:27:18 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '07ac667'
date: 'Mon Jan 15 14:26:59 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3413e12'
date: 'Mon Jan 15 14:22:27 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '00df8de'
date: 'Mon Jan 15 14:22:14 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd90e148'
date: 'Mon Jan 15 12:39:26 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6fd6c54'
date: 'Mon Jan 15 12:33:39 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '939b4df'
date: 'Mon Jan 15 12:21:22 2018'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '206fb0e'
date: 'Thu Dec 21 14:46:04 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3d6094a'
date: 'Tue Dec 19 23:07:35 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7173b2d'
date: 'Tue Dec 19 23:07:17 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '549b1d0'
date: 'Tue Dec 19 22:22:35 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3575b95'
date: 'Sun Dec 17 21:37:31 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2aa68af'
date: 'Sun Dec 17 21:37:11 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0821535'
date: 'Wed Dec 13 14:10:56 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a557f66'
date: 'Wed Dec 13 14:10:42 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a740c2d'
date: 'Tue Dec 12 18:12:26 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd6fd324'
date: 'Tue Dec 12 17:02:02 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6f33b51'
date: 'Mon Dec 11 21:12:55 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c93bf72'
date: 'Mon Dec 11 14:18:31 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '651439b'
date: 'Fri Dec 8 12:26:06 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5fbb70e'
date: 'Thu Dec 7 15:16:08 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e31668'
date: 'Thu Dec 7 15:17:53 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '58d7faf'
date: 'Fri Nov 17 16:20:40 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '021b060'
date: 'Wed Nov 15 23:51:41 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a206425'
date: 'Wed Nov 15 17:38:06 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd3ed9ec'
date: 'Wed Nov 15 17:13:43 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4939336'
date: 'Wed Nov 15 13:48:23 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '863dc71'
date: 'Wed Nov 15 12:07:10 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f5a45ee'
date: 'Wed Nov 15 11:34:28 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8d833db'
date: 'Wed Nov 15 10:28:16 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '613c07b'
date: 'Wed Nov 15 10:27:50 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd1ffabc'
date: 'Wed Nov 8 12:40:42 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9dd8b13'
date: 'Wed Nov 8 11:36:59 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '76769d6'
date: 'Wed Nov 8 11:11:04 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b0ada03'
date: 'Wed Nov 8 10:16:47 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '87660fa'
date: 'Tue Nov 7 17:13:49 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '56878c5'
date: 'Tue Nov 7 14:03:36 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a4be0ab'
date: 'Tue Nov 7 13:59:00 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7252e66'
date: 'Tue Nov 7 12:28:52 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a03d1f7'
date: 'Mon Nov 6 16:30:24 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f622504'
date: 'Sat Nov 4 22:07:07 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7a25c71'
date: 'Sat Nov 4 21:16:05 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2e42846'
date: 'Sat Nov 4 21:03:13 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '088eddd'
date: 'Sat Nov 4 20:11:11 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aa5e733'
date: 'Sat Nov 4 20:02:05 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4a61eae'
date: 'Sat Nov 4 18:13:40 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '44d206b'
date: 'Sat Nov 4 17:26:20 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '04e95d1'
date: 'Sat Nov 4 15:01:26 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '975adc6'
date: 'Sat Nov 4 14:14:30 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0f34f5c'
date: 'Fri Oct 27 15:58:47 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '54e9bcb'
date: 'Fri Oct 20 13:51:48 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4af09bc'
date: 'Thu Oct 19 14:19:50 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '91579c4'
date: 'Thu Oct 19 14:10:17 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bac4ce9'
date: 'Mon Oct 9 10:44:22 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0524216'
date: 'Mon Oct 9 09:00:51 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b274bf2'
date: 'Tue Oct 3 15:07:46 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e5c0723'
date: 'Mon Oct 2 13:32:43 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2a91ec1'
date: 'Mon Oct 2 11:43:47 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f057af6'
date: 'Thu Sep 28 15:59:32 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e1d21b'
date: 'Thu Sep 28 12:48:05 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '86e248f'
date: 'Thu Sep 28 12:45:09 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'df8942c'
date: 'Thu Sep 28 12:44:30 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '790e32e'
date: 'Thu Sep 28 11:33:03 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a61783d'
date: 'Tue Sep 19 14:11:22 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '266f591'
date: 'Mon Sep 18 17:53:43 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f14b32f'
date: 'Mon Sep 18 16:28:25 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7f67e44'
date: 'Mon Sep 18 10:59:14 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a3b0c61'
date: 'Mon Sep 18 10:34:53 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a32d208'
date: 'Mon Sep 18 10:04:56 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '70e3606'
date: 'Wed Sep 13 16:56:00 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'adda6dd'
date: 'Wed Sep 13 16:13:06 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b6867b'
date: 'Wed Sep 13 11:52:14 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '670822c'
date: 'Tue Sep 12 11:47:22 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '16ca97e'
date: 'Mon Sep 11 23:25:30 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5745d60'
date: 'Mon Sep 11 12:12:47 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9f0dc99'
date: 'Fri Sep 8 09:17:43 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e399eba'
date: 'Fri Sep 8 09:17:28 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '98a128c'
date: 'Fri Sep 8 09:03:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '53fd1ee'
date: 'Fri Sep 8 09:02:18 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7845534'
date: 'Fri Sep 8 09:01:52 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9d9f59d'
date: 'Fri Sep 8 08:24:28 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '48e7b78'
date: 'Thu Sep 7 18:42:20 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '865771b'
date: 'Thu Sep 7 18:28:29 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3ec195f'
date: 'Thu Sep 7 18:27:27 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd0a2b52'
date: 'Tue Sep 5 14:33:56 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ac2d0b2'
date: 'Tue Sep 5 14:18:53 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7994d36'
date: 'Tue Sep 5 12:28:17 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '242f61d'
date: 'Tue Sep 5 11:50:03 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fa5ddfa'
date: 'Fri Sep 1 11:21:38 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e982caf'
date: 'Fri Sep 1 10:21:58 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4fdc139'
date: 'Tue Aug 29 16:07:45 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0c8e7c2'
date: 'Tue Aug 29 15:18:55 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e0abfdf'
date: 'Tue Aug 29 13:52:24 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '76592c1'
date: 'Tue Aug 29 13:49:51 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1d564f1'
date: 'Thu Aug 24 14:12:16 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd526e51'
date: 'Thu Aug 24 13:02:48 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a9c2efe'
date: 'Wed Aug 23 13:11:23 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ee120e5'
date: 'Thu Jul 27 12:16:28 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b6b792c'
date: 'Thu Jul 27 11:51:19 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '25a34cf'
date: 'Thu Jul 27 08:46:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6bc3024'
date: 'Thu Jul 27 08:25:49 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f75a057'
date: 'Mon Jul 24 17:02:14 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fcf05a7'
date: 'Mon Jul 24 17:01:49 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '76ca229'
date: 'Mon Jul 24 17:01:24 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd231231'
date: 'Mon Jul 24 11:29:59 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bb2ef14'
date: 'Mon Jul 24 11:13:46 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '369b1c8'
date: 'Sat Jul 22 01:12:11 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '32df3a8'
date: 'Fri Jul 21 16:11:21 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '212874c'
date: 'Fri Jul 21 16:11:08 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '72ed711'
date: 'Fri Jul 21 13:33:04 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b39ecb6'
date: 'Fri Jul 21 13:32:51 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '364ccbf'
date: 'Fri Jul 21 12:06:19 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9ab9796'
date: 'Fri Jul 21 10:09:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4059d42'
date: 'Fri Jul 21 11:18:30 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '60c2b63'
date: 'Fri Jul 21 10:47:00 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '37d64cf'
date: 'Fri Jul 21 09:22:45 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f1e64b9'
date: 'Thu Jul 20 19:59:08 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c82841'
date: 'Thu Jul 20 19:54:44 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4198d50'
date: 'Thu Jul 20 17:09:14 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a30cfcb'
date: 'Wed Jul 19 16:32:27 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'eac82f2'
date: 'Wed Jul 19 15:11:59 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9f17f49'
date: 'Wed Jul 19 10:08:58 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0343ada'
date: 'Wed Jul 19 10:08:42 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'eb512c2'
date: 'Mon Jul 17 18:31:05 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bac9d60'
date: 'Mon Jul 17 18:15:07 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '962b105'
date: 'Mon Jul 17 15:18:51 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9fe65a5'
date: 'Fri Jul 14 15:42:40 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '32da80b'
date: 'Fri Jul 14 15:42:31 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '60674ee'
date: 'Fri Jul 14 13:21:32 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '026c816'
date: 'Thu Jul 13 19:30:51 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'da39dc3'
date: 'Thu Jul 13 16:13:36 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fc6bd36'
date: 'Thu Jul 13 16:03:37 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3c5bbb6'
date: 'Thu Jul 13 15:54:33 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c7d4cda'
date: 'Thu Jul 13 14:43:16 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6229106'
date: 'Thu Jul 13 14:39:49 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7f807e6'
date: 'Wed Jul 12 14:12:24 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9e9ce72'
date: 'Wed Jul 12 11:43:39 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b0ae258'
date: 'Wed Jul 12 11:42:46 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0697f93'
date: 'Tue Jul 11 22:22:37 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4ce455d'
date: 'Tue Jul 11 17:41:08 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b1cb2ca'
date: 'Tue Jul 11 15:57:46 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '687d7c2'
date: 'Fri Jul 7 17:05:06 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1cb646c'
date: 'Fri Jul 7 15:38:14 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5317cbc'
date: 'Thu Jul 6 14:30:48 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e1e8316'
date: 'Thu Jul 6 13:29:28 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e48e05'
date: 'Thu Jul 6 11:29:24 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b3764ff'
date: 'Wed Jul 5 10:23:51 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3430da7'
date: 'Fri Jun 30 11:48:52 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '635adc9'
date: 'Thu Jun 29 16:25:35 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dee606e'
date: 'Thu Jun 29 16:21:26 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '338f840'
date: 'Thu Jun 29 16:20:31 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cfa6db3'
date: 'Wed Jun 28 18:18:17 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '272bb60'
date: 'Wed Jun 28 12:35:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ae0b628'
date: 'Wed Jun 28 05:26:16 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0d40d08'
date: 'Tue Jun 27 14:06:46 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b4c3bc'
date: 'Tue Jun 27 13:50:53 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8e73cb1'
date: 'Tue Jun 27 13:46:57 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '16e23b2'
date: 'Tue Jun 27 11:29:44 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a23f50d'
date: 'Tue Jun 20 16:40:23 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7aacaa8'
date: 'Tue Jun 20 16:38:59 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6d4efa6'
date: 'Tue Jun 20 14:37:03 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f32e84e'
date: 'Tue Jun 13 16:47:36 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9e3aeda'
date: 'Tue Jun 13 16:39:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8df882d'
date: 'Tue Jun 13 13:20:08 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '34d048e'
date: 'Mon Jun 12 22:36:48 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e65f583'
date: 'Mon Jun 12 16:57:22 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '714bc24'
date: 'Mon Jun 12 16:54:50 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bfaf154'
date: 'Mon Jun 12 16:33:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7cea533'
date: 'Mon Jun 12 16:06:24 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cc7d5b1'
date: 'Mon Jun 12 13:59:33 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '289e98b'
date: 'Mon Jun 12 13:38:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3fb7b5d'
date: 'Mon Jun 12 11:56:50 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '266546d'
date: 'Mon Jun 12 11:56:19 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e42c966'
date: 'Wed Jun 28 17:24:09 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'eaca3b9'
date: 'Wed Jun 28 17:23:23 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8c43ee2'
date: 'Wed Jun 28 17:22:09 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4db0275'
date: 'Wed Jun 28 17:21:30 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '950b059'
date: 'Sat Jun 24 17:22:04 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fd7851c'
date: 'Thu Jun 22 13:45:44 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ec9817d'
date: 'Fri Jun 16 12:22:03 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '280d7df'
date: 'Thu Jun 15 11:49:07 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8ca7f3c'
date: 'Thu Jun 15 11:49:00 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '76f4f25'
date: 'Thu Apr 27 14:05:59 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dc0728f'
date: 'Thu Jun 15 10:46:05 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '86844fd'
date: 'Thu Jun 15 10:41:46 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0c93b70'
date: 'Thu Jun 15 10:07:14 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '582d861'
date: 'Wed Jun 14 17:00:22 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '67e7f9e'
date: 'Mon Jun 12 18:10:38 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e5f8f39'
date: 'Mon Jun 12 17:58:51 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '580846c'
date: 'Fri Jun 9 16:58:36 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c9450ed'
date: 'Fri Jun 9 14:27:35 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd511624'
date: 'Wed Jun 7 14:25:06 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7b8793e'
date: 'Fri Jun 2 10:59:56 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f52c855'
date: 'Wed May 31 17:08:25 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0ee68bd'
date: 'Wed May 31 15:27:31 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2c2404c'
date: 'Wed Apr 26 16:55:20 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '812dda3'
date: 'Wed Apr 26 14:29:54 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cc24de1'
date: 'Tue Apr 25 19:10:50 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd083f4e'
date: 'Tue Apr 25 15:25:50 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ce38df8'
date: 'Tue Apr 25 15:20:58 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2aedf22'
date: 'Tue Apr 25 15:13:29 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bce05cc'
date: 'Mon Apr 24 11:36:11 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '63333cf'
date: 'Fri Apr 21 11:21:24 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fc5f1f0'
date: 'Thu Apr 20 17:20:06 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2979f9b'
date: 'Tue Apr 18 13:49:54 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '376bc04'
date: 'Sun Apr 16 13:45:28 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b9956e9'
date: 'Sun Apr 16 13:07:02 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1e0c2ce'
date: 'Wed Apr 12 18:40:52 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '17d75f7'
date: 'Thu Mar 30 17:19:19 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c80ec3b'
date: 'Thu Mar 30 13:07:58 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a4c59b4'
date: 'Wed Mar 29 22:05:33 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f506fc0'
date: 'Tue Mar 28 11:30:57 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '413c18d'
date: 'Fri Mar 24 14:51:23 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4428af1'
date: 'Fri Mar 24 10:54:57 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '095e0ba'
date: 'Fri Mar 24 08:50:42 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7ab0a55'
date: 'Fri Mar 24 08:50:04 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f71bb1c'
date: 'Thu Mar 23 17:22:20 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f989af9'
date: 'Thu Mar 23 14:40:07 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e5a244c'
date: 'Thu Mar 23 14:23:34 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ee976ab'
date: 'Thu Mar 23 14:23:12 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cd53f5d'
date: 'Thu Mar 23 14:22:47 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ed66ccb'
date: 'Thu Mar 23 14:18:57 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1f2524b'
date: 'Wed Mar 22 20:25:18 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6ddff8f'
date: 'Wed Mar 22 20:25:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '570aa20'
date: 'Wed Mar 22 17:11:12 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1c7d70d'
date: 'Wed Mar 22 17:10:41 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'af1246a'
date: 'Wed Mar 22 15:45:49 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cdfc21c'
date: 'Mon Mar 20 15:45:47 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e1ae6a'
date: 'Mon Mar 20 15:45:33 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8b3d89a'
date: 'Mon Mar 20 11:48:40 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd778b25'
date: 'Sat Mar 11 11:34:09 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4c68120'
date: 'Sat Mar 11 11:26:52 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8774421'
date: 'Fri Mar 10 17:06:32 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9bc528c'
date: 'Fri Mar 10 16:36:18 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '475253a'
date: 'Fri Mar 10 15:59:50 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fc55495'
date: 'Thu Mar 9 18:11:30 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '31c72c4'
date: 'Thu Mar 9 17:22:06 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4c3321f'
date: 'Thu Mar 9 17:06:42 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3ee7a06'
date: 'Thu Mar 9 17:05:09 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6281e9b'
date: 'Wed Mar 8 14:22:13 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2cad79b'
date: 'Wed Mar 8 10:34:50 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '44cef0d'
date: 'Tue Mar 7 16:33:52 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd247967'
date: 'Tue Mar 7 16:29:17 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e6fb645'
date: 'Tue Mar 7 16:25:03 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b9be679'
date: 'Tue Mar 7 16:17:33 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3778d6e'
date: 'Tue Mar 7 15:57:12 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4408f89'
date: 'Tue Mar 7 15:45:10 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'db7aea1'
date: 'Tue Mar 7 00:06:02 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9223f5c'
date: 'Fri Mar 3 15:50:44 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6d64ea9'
date: 'Thu Mar 2 13:55:25 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '82f1159'
date: 'Thu Mar 2 11:05:00 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b9bddcb'
date: 'Tue Feb 28 12:29:54 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6d899a4'
date: 'Tue Feb 28 12:29:28 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '73f35e5'
date: 'Tue Feb 28 12:28:48 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '66eb6d4'
date: 'Wed Feb 22 12:08:13 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8db1dc8'
date: 'Fri Feb 17 11:51:38 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9d08997'
date: 'Fri Feb 17 11:51:24 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6d8bc7e'
date: 'Wed Feb 15 16:49:27 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'eb38604'
date: 'Wed Feb 15 16:34:07 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e1ddd6f'
date: 'Wed Feb 15 16:15:06 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '31c303b'
date: 'Wed Feb 15 16:13:03 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2f2e5d3'
date: 'Wed Feb 15 16:11:42 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0b17a69'
date: 'Mon Feb 13 16:49:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5814544'
date: 'Sat Feb 11 00:20:04 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd50c3f8'
date: 'Fri Feb 10 23:41:42 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ad9fa7e'
date: 'Tue Jan 24 10:03:01 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f4db660'
date: 'Thu Jan 19 12:02:23 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f3a1c68'
date: 'Mon Jan 2 10:58:53 2017'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3821f65'
date: 'Fri Dec 23 14:21:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '386a97a'
date: 'Fri Dec 23 13:31:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '97a28ce'
date: 'Thu Dec 15 16:16:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '876ed67'
date: 'Thu Dec 15 16:10:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ecb4b1c'
date: 'Thu Dec 15 16:09:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cc7f6c8'
date: 'Thu Dec 15 16:08:58 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd8c976d'
date: 'Thu Dec 15 13:51:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8efe84b'
date: 'Thu Dec 8 13:14:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '957efc8'
date: 'Thu Dec 8 12:19:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e9d7ba1'
date: 'Thu Dec 1 17:06:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '239131b'
date: 'Thu Dec 1 17:04:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e10cdbb'
date: 'Thu Dec 1 15:01:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f1ee215'
date: 'Thu Dec 1 14:18:25 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '73363ec'
date: 'Thu Dec 1 14:15:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '30bfb04'
date: 'Thu Dec 1 14:08:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '49f4ce1'
date: 'Thu Dec 1 12:30:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c97d630'
date: 'Thu Dec 1 11:54:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '367de0b'
date: 'Thu Dec 1 11:16:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a65ffb2'
date: 'Wed Nov 30 23:44:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '13e72ba'
date: 'Wed Nov 30 23:29:15 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0894861'
date: 'Wed Nov 30 17:26:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '93705be'
date: 'Wed Nov 30 16:02:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8276a2f'
date: 'Wed Nov 30 15:27:04 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0de5c81'
date: 'Wed Nov 30 15:26:28 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f7330d4'
date: 'Wed Nov 30 15:21:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b5fa2f'
date: 'Wed Nov 30 14:30:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '64e9597'
date: 'Mon Nov 28 15:32:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9eeeb09'
date: 'Fri Nov 25 10:53:02 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '13d5193'
date: 'Thu Nov 24 12:40:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1b11b42'
date: 'Thu Nov 24 17:56:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2ddfe04'
date: 'Thu Nov 24 17:56:44 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '31b9b31'
date: 'Wed Nov 23 18:17:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b6fbac8'
date: 'Wed Nov 23 17:27:11 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '50b9e61'
date: 'Wed Nov 23 17:26:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f2d6d77'
date: 'Tue Nov 22 20:46:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '26d9511'
date: 'Tue Nov 22 17:59:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9d8b135'
date: 'Tue Nov 22 09:50:16 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f8391b4'
date: 'Tue Nov 22 09:49:25 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c4df14'
date: 'Tue Nov 22 09:47:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1cb21a3'
date: 'Wed Nov 16 11:33:54 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9d86e5d'
date: 'Tue Nov 8 16:09:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '416640c'
date: 'Tue Nov 8 16:03:28 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5901216'
date: 'Tue Nov 8 15:21:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '56a6d73'
date: 'Tue Nov 8 15:21:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7de77b6'
date: 'Tue Nov 8 15:21:16 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ebdd729'
date: 'Mon Nov 7 17:25:16 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8284723'
date: 'Mon Nov 7 11:59:38 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '95260da'
date: 'Mon Nov 7 09:37:25 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cfadfd6'
date: 'Fri Nov 4 14:43:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a7c60c6'
date: 'Fri Nov 4 14:42:04 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8477984'
date: 'Thu Nov 3 15:12:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a4e959e'
date: 'Wed Nov 2 11:00:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '92c9d75'
date: 'Wed Nov 2 10:44:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '64cf695'
date: 'Wed Nov 2 10:41:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7c96f4e'
date: 'Wed Nov 2 10:31:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '14420bf'
date: 'Wed Nov 2 09:26:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '03eaa69'
date: 'Wed Nov 2 09:23:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3fcd7c7'
date: 'Wed Nov 2 09:19:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '248073d'
date: 'Thu Oct 27 18:13:57 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ff34779'
date: 'Thu Oct 27 18:13:47 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bdd0656'
date: 'Thu Oct 27 18:13:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8b67479'
date: 'Wed Oct 19 12:57:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fb240f0'
date: 'Mon Oct 17 15:22:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '578bb81'
date: 'Mon Oct 17 15:22:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7638378'
date: 'Mon Oct 17 14:38:04 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4cef118'
date: 'Mon Oct 17 14:27:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ae20dc7'
date: 'Wed Oct 12 18:49:38 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '13eae19'
date: 'Wed Oct 12 00:27:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8753f2a'
date: 'Tue Oct 11 15:44:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a8d00eb'
date: 'Tue Oct 11 15:39:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '61c5401'
date: 'Mon Oct 10 13:25:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '81ca90f'
date: 'Mon Oct 10 12:15:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '27d0b00'
date: 'Mon Oct 10 12:12:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '641be2d'
date: 'Mon Oct 10 11:05:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8027175'
date: 'Mon Oct 10 11:02:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6384ea9'
date: 'Sat Oct 8 14:57:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4af7e9a'
date: 'Fri Oct 7 17:40:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '35513f1'
date: 'Fri Oct 7 17:40:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a9e75d4'
date: 'Fri Oct 7 17:39:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '705546c'
date: 'Fri Oct 7 17:38:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '24d1b38'
date: 'Fri Oct 7 17:37:58 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9f8be26'
date: 'Fri Oct 7 16:15:02 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bed29aa'
date: 'Thu Oct 6 22:10:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f2f00ea'
date: 'Thu Oct 6 22:07:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '137767c'
date: 'Thu Oct 6 19:07:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '16a1777'
date: 'Thu Oct 6 18:35:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e5584ac'
date: 'Thu Oct 6 15:10:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '554908c'
date: 'Thu Oct 6 14:24:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e5b932'
date: 'Thu Oct 6 08:24:02 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1f18a6c'
date: 'Wed Oct 5 08:54:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2a7ad6c'
date: 'Tue Oct 4 14:26:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ea73dcd'
date: 'Tue Oct 4 00:37:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a9c5455'
date: 'Tue Oct 4 00:34:19 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '32000eb'
date: 'Mon Oct 3 17:01:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c8f66f3'
date: 'Mon Oct 3 14:29:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8695f71'
date: 'Mon Oct 3 13:46:11 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '69cc7a6'
date: 'Fri Sep 30 10:49:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '26585ca'
date: 'Fri Sep 30 10:42:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5e2efb4'
date: 'Fri Sep 30 10:01:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '47bbd9c'
date: 'Fri Sep 30 09:03:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'db56e33'
date: 'Fri Sep 30 09:02:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7e06984'
date: 'Fri Sep 30 09:02:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f027d89'
date: 'Fri Sep 30 09:02:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '28ebf17'
date: 'Fri Sep 30 09:01:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2f6e520'
date: 'Thu Sep 29 22:23:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e339750'
date: 'Thu Sep 29 16:27:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9b99341'
date: 'Thu Sep 29 14:31:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ae6d992'
date: 'Thu Sep 29 13:55:24 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b8fbf27'
date: 'Wed Sep 28 23:06:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aefab7c'
date: 'Thu Sep 22 09:41:28 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3bf7734'
date: 'Thu Sep 22 09:40:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2b07047'
date: 'Thu Sep 22 09:40:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '348fe2a'
date: 'Wed Sep 21 17:37:57 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c8224f'
date: 'Wed Sep 28 22:38:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '37245c3'
date: 'Wed Sep 28 11:11:47 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c5600f'
date: 'Wed Sep 28 00:19:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '038d633'
date: 'Wed Sep 28 00:19:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '062f912'
date: 'Tue Sep 27 16:06:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd254681'
date: 'Tue Sep 27 13:47:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3220f92'
date: 'Tue Sep 27 10:38:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ec784c9'
date: 'Mon Sep 26 21:53:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4b9442d'
date: 'Mon Sep 26 15:47:46 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1955a99'
date: 'Mon Sep 26 15:44:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '20ae075'
date: 'Mon Sep 26 15:00:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '48cfcef'
date: 'Mon Sep 26 14:39:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9b6ae19'
date: 'Mon Sep 26 14:36:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '03675e7'
date: 'Mon Sep 26 14:31:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3a113df'
date: 'Mon Sep 26 09:57:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '99f0882'
date: 'Wed Sep 21 13:49:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8d9f4ea'
date: 'Wed Sep 21 13:27:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b3372d'
date: 'Fri Sep 16 17:46:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '23a313a'
date: 'Thu Sep 15 15:36:54 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cd057d1'
date: 'Thu Sep 15 13:45:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aedf25d'
date: 'Thu Sep 15 13:26:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cd5cbfa'
date: 'Thu Sep 15 11:05:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0970fe9'
date: 'Thu Sep 15 11:03:46 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0b599de'
date: 'Thu Sep 15 11:03:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '15f8ce3'
date: 'Thu Sep 15 11:01:38 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7e30a0f'
date: 'Thu Sep 15 10:28:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c644c5'
date: 'Thu Sep 15 10:28:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'eac0aa8'
date: 'Wed Sep 14 17:25:19 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '32ac81b'
date: 'Wed Sep 14 17:25:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8d510b0'
date: 'Wed Sep 14 17:24:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cc93537'
date: 'Wed Sep 14 17:24:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ec88f94'
date: 'Wed Sep 14 17:23:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8c0992e'
date: 'Wed Sep 14 14:32:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '07040f2'
date: 'Wed Sep 14 14:17:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '40b20be'
date: 'Wed Sep 14 14:17:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c3e0ca'
date: 'Wed Sep 14 14:17:11 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd80347c'
date: 'Wed Sep 14 13:46:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cbd8339'
date: 'Wed Sep 14 13:25:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '38a56ff'
date: 'Wed Sep 14 10:05:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fab4a97'
date: 'Wed Sep 14 08:58:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4500db2'
date: 'Tue Sep 13 21:08:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f79eafd'
date: 'Tue Sep 13 21:08:01 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '86c4fa9'
date: 'Tue Sep 13 18:18:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '209fec6'
date: 'Tue Sep 13 18:15:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '44253ef'
date: 'Tue Sep 13 18:15:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b94edd2'
date: 'Mon Sep 12 12:30:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '81120f0'
date: 'Mon Sep 12 12:30:02 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5f9fd64'
date: 'Mon Sep 12 12:29:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9e65920'
date: 'Mon Sep 12 12:29:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f777e28'
date: 'Mon Sep 12 11:43:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cb4555c'
date: 'Wed Aug 17 14:50:02 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'afbff06'
date: 'Wed Aug 17 14:49:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f8e4c6b'
date: 'Tue Aug 16 11:15:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '080d69b'
date: 'Tue Aug 16 10:59:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '24a3a06'
date: 'Tue Aug 16 10:59:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ed0057e'
date: 'Tue Aug 16 10:53:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd5a82c2'
date: 'Tue Aug 16 10:52:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f9d6ef3'
date: 'Mon Aug 1 21:31:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e6df5eb'
date: 'Mon Aug 1 21:30:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4936620'
date: 'Sun Jul 31 17:55:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c8c0361'
date: 'Sun Jul 31 17:55:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'be9f7c9'
date: 'Sun Jul 31 17:54:25 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5c3b369'
date: 'Sun Jul 31 17:54:01 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7e36874'
date: 'Sun Jul 31 17:53:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4511a7a'
date: 'Tue Jul 26 17:25:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '68fc9ba'
date: 'Tue Jul 26 17:14:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7179178'
date: 'Tue Jul 26 17:14:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3d868d0'
date: 'Tue Jul 26 16:55:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7b96385'
date: 'Tue Jul 26 16:22:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dd49a2e'
date: 'Tue Jul 26 16:22:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '16a3107'
date: 'Tue Jul 26 15:58:39 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd74291b'
date: 'Tue Jul 26 15:20:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5ee2864'
date: 'Tue Jul 26 15:20:02 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '862b33b'
date: 'Tue Jul 26 15:19:47 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b29eddc'
date: 'Tue Jul 26 15:19:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9cf7b44'
date: 'Tue Jul 26 15:18:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1cb9455'
date: 'Tue Jul 26 15:18:15 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2922fd8'
date: 'Tue Jul 26 15:18:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '358a3f5'
date: 'Tue Jul 26 15:17:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1fc72b8'
date: 'Tue Jul 26 15:17:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b0b408e'
date: 'Tue Jul 26 15:17:25 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b8ca53'
date: 'Tue Jul 26 15:16:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c25ab8'
date: 'Tue Jul 26 15:16:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2d8e0d3'
date: 'Tue Jul 26 15:15:36 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a96fa22'
date: 'Fri Jul 22 16:44:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '44b09ae'
date: 'Fri Jul 22 16:44:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '66801fc'
date: 'Fri Jul 22 16:40:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1047b37'
date: 'Fri Jul 22 16:36:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3249596'
date: 'Fri Jul 22 16:36:28 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '16ed3bd'
date: 'Fri Jul 22 16:35:31 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7835f82'
date: 'Fri Jul 22 16:27:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6d8b172'
date: 'Fri Jul 22 16:26:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e9a76d2'
date: 'Fri Jul 22 16:25:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4864241'
date: 'Fri Jul 22 16:25:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a7367a7'
date: 'Wed Sep 14 14:53:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dc920bd'
date: 'Wed Sep 14 09:05:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cd149e4'
date: 'Fri Sep 9 14:04:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e96125a'
date: 'Fri Sep 9 11:16:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cfa63d5'
date: 'Fri Sep 9 11:08:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ab13543'
date: 'Wed Sep 7 13:48:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2de07a1'
date: 'Wed Sep 7 13:46:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a5e37a1'
date: 'Tue Sep 6 11:41:24 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ef8aefc'
date: 'Mon Sep 5 18:36:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ebf58d2'
date: 'Mon Sep 5 18:31:11 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '11296da'
date: 'Mon Sep 5 12:06:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '94ccc04'
date: 'Sat Sep 3 12:53:19 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f183c8c'
date: 'Fri Sep 2 15:22:31 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6f3fa61'
date: 'Fri Sep 2 15:17:28 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0d422ff'
date: 'Fri Sep 2 10:41:30 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd5c63b3'
date: 'Tue Aug 30 16:58:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dca9008'
date: 'Tue Aug 30 07:43:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bfb8543'
date: 'Mon Aug 29 10:50:39 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5093eed'
date: 'Mon Aug 29 10:20:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f87b3ab'
date: 'Thu Aug 25 15:20:04 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7687398'
date: 'Wed Aug 24 16:19:47 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8488d33'
date: 'Tue Aug 23 10:46:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a33d1be'
date: 'Tue Aug 23 10:45:28 2016'
author: 'PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI'
}
{
commit: '852cfe5'
date: 'Mon Aug 15 09:29:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '694b407'
date: 'Fri Aug 12 15:07:39 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '83a0477'
date: 'Fri Aug 12 14:30:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b5b4642'
date: 'Fri Aug 12 14:29:31 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7d48128'
date: 'Thu Aug 4 17:38:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7f842d9'
date: 'Thu Aug 4 16:25:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2e96fb6'
date: 'Thu Aug 4 15:02:54 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '778a01b'
date: 'Thu Aug 4 14:39:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c8f1ade'
date: 'Thu Aug 4 14:09:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fc80069'
date: 'Thu Aug 4 10:25:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '31a7bf9'
date: 'Wed Aug 3 23:21:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b69ac08'
date: 'Wed Aug 3 22:00:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b2763d2'
date: 'Wed Aug 3 20:05:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ca3afd5'
date: 'Wed Aug 3 17:55:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '248f2f8'
date: 'Wed Aug 3 16:12:16 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '293816d'
date: 'Wed Aug 3 15:30:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6870ecb'
date: 'Wed Aug 3 15:20:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '030be1f'
date: 'Wed Aug 3 15:05:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'db95db3'
date: 'Wed Aug 3 14:49:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2cb1e4a'
date: 'Wed Aug 3 14:45:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c29cbc1'
date: 'Wed Aug 3 12:16:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'eb31c27'
date: 'Wed Aug 3 12:12:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '781cc50'
date: 'Wed Aug 3 12:01:15 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd34658d'
date: 'Wed Aug 3 11:54:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'daa910b'
date: 'Wed Aug 3 11:54:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '440e75b'
date: 'Wed Aug 3 11:41:01 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f11ff0e'
date: 'Wed Aug 3 10:46:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dd76064'
date: 'Tue Aug 2 17:20:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4a17b7c'
date: 'Tue Aug 2 17:15:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '297edea'
date: 'Tue Aug 2 17:08:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4b6d92a'
date: 'Tue Aug 2 16:36:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'acaa7d4'
date: 'Tue Aug 2 16:27:38 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dc847a4'
date: 'Tue Aug 2 16:09:30 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2a84e68'
date: 'Tue Aug 2 15:42:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f75ec6a'
date: 'Tue Aug 2 14:57:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4773f8f'
date: 'Tue Aug 2 14:38:38 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '52ee4d8'
date: 'Tue Aug 2 14:35:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5297f1f'
date: 'Tue Aug 2 14:32:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5d1fcb4'
date: 'Tue Aug 2 14:32:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fd32c4c'
date: 'Tue Aug 2 14:21:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bb2ad29'
date: 'Tue Aug 2 14:15:44 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1280168'
date: 'Tue Aug 2 14:14:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '37893f6'
date: 'Mon Aug 1 16:52:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2b75dd9'
date: 'Mon Aug 1 16:49:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '15027e5'
date: 'Mon Aug 1 23:38:58 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ca22880'
date: 'Mon Aug 1 21:59:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e9d16c'
date: 'Mon Aug 1 21:53:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5ee40e2'
date: 'Mon Aug 1 20:50:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9cf0155'
date: 'Mon Aug 1 20:49:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '582615b'
date: 'Mon Aug 1 18:16:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '364e1a2'
date: 'Mon Aug 1 16:40:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '61d6312'
date: 'Mon Aug 1 16:21:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd6b4b03'
date: 'Mon Aug 1 15:57:22 2016'
author: 'PI:NAME:<NAME>END_PIu'
}
{
commit: 'd1b04b4'
date: 'Mon Aug 1 15:30:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '36e778e'
date: 'Mon Aug 1 13:24:57 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0598f10'
date: 'Sat Jul 30 12:22:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd346edc'
date: 'Fri Jul 29 17:51:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bc1237c'
date: 'Fri Jul 29 17:31:54 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f7736fe'
date: 'Fri Jul 29 17:18:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ef12b68'
date: 'Fri Jul 29 15:12:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0398085'
date: 'Fri Jul 29 14:56:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a44cf99'
date: 'Fri Jul 29 13:49:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd86e1ec'
date: 'Fri Jul 29 11:01:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '39bf120'
date: 'Thu Jul 28 23:27:54 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9ff1da3'
date: 'Thu Jul 28 23:07:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a75ebf1'
date: 'Thu Jul 28 23:03:38 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd5604c3'
date: 'Thu Jul 28 16:57:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4daa11b'
date: 'Thu Jul 28 16:53:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '04a5d8c'
date: 'Thu Jul 28 16:49:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0d9f714'
date: 'Thu Jul 28 16:39:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '77fee3e'
date: 'Thu Jul 28 16:21:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '616019b'
date: 'Thu Jul 28 16:11:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '59d1622'
date: 'Thu Jul 28 16:08:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f3cfea6'
date: 'Thu Jul 28 15:08:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b9e8d14'
date: 'Thu Jul 28 14:36:54 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '046b50e'
date: 'Thu Jul 28 14:04:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1bb9b56'
date: 'Thu Jul 28 12:33:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cc13ef9'
date: 'Thu Jul 28 11:42:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd071ba4'
date: 'Thu Jul 28 11:39:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '27d8ed3'
date: 'Thu Jul 28 11:23:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '418b162'
date: 'Thu Jul 28 11:17:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ada2943'
date: 'Thu Jul 28 09:48:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8463a44'
date: 'Wed Jul 27 20:57:30 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '27d2d35'
date: 'Wed Jul 27 17:42:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f74b3cb'
date: 'Wed Jul 27 13:35:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '85b6df4'
date: 'Wed Jul 27 12:23:47 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '58aeab5'
date: 'Wed Jul 27 12:23:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0b12542'
date: 'Wed Jul 27 09:19:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a56c591'
date: 'Mon Jul 25 16:40:54 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c9a02f5'
date: 'Mon Jul 25 16:12:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c997102'
date: 'Mon Jul 25 16:07:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f0f7e2d'
date: 'Mon Jul 25 15:21:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a545058'
date: 'Mon Jul 25 15:21:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cf7b2b7'
date: 'Mon Jul 25 15:21:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '955f744'
date: 'Mon Jul 25 15:12:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '193f74c'
date: 'Mon Jul 25 14:19:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ecb9956'
date: 'Mon Jul 25 14:19:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b5515a3'
date: 'Mon Jul 25 14:18:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5199601'
date: 'Mon Jul 25 13:27:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5c3994f'
date: 'Fri Jul 22 16:45:39 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '67ae9bd'
date: 'Thu Jul 21 19:22:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2416d2f'
date: 'Wed Jul 20 17:25:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e9e49d6'
date: 'Wed Jul 20 15:19:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '066e6d8'
date: 'Wed Jul 20 15:00:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fe056b0'
date: 'Wed Jul 20 14:50:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'de39148'
date: 'Wed Jul 20 14:41:11 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a252925'
date: 'Wed Jul 20 14:30:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7143e64'
date: 'Tue Jul 19 10:35:01 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '997b336'
date: 'Mon Jul 18 09:23:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a303c80'
date: 'Fri Jul 15 14:41:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5714727'
date: 'Fri Jul 15 11:57:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1141772'
date: 'Fri Jul 15 11:56:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c13d10a'
date: 'Fri Jul 15 11:47:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '621479a'
date: 'Tue Jul 5 16:53:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'db93886'
date: 'Mon Jul 4 17:23:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6af1b8d'
date: 'Mon Jul 4 15:20:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ff8a671'
date: 'Thu Jun 30 18:14:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6d9790d'
date: 'Thu Jun 30 18:14:16 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b032241'
date: 'Thu Jun 30 13:14:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '340612e'
date: 'Thu Jun 30 14:04:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'adf5b33'
date: 'Thu Jun 30 08:35:47 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd6e849a'
date: 'Tue Jun 28 15:12:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f93ae7f'
date: 'Tue Jun 28 14:25:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0dff457'
date: 'Mon Jun 27 10:25:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4fb83cf'
date: 'Mon Jun 27 10:16:31 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a81f32f'
date: 'Mon Jun 27 10:09:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a7a6de0'
date: 'Mon Jun 27 09:53:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '99980ec'
date: 'Mon Jun 27 09:27:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f254db6'
date: 'Thu Jun 23 13:39:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ab0f21b'
date: 'Fri Jun 10 14:08:47 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '72fb2a8'
date: 'Fri Jun 10 01:17:24 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '926ac01'
date: 'Fri Jun 10 01:13:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b36dbd6'
date: 'Fri Jun 10 00:21:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2cc6935'
date: 'Thu Jun 9 10:43:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c48360d'
date: 'Wed Jun 1 14:40:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5cc7331'
date: 'Wed Jun 1 14:27:58 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f508b18'
date: 'Tue May 31 23:01:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b9c33da'
date: 'Sun May 29 00:48:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a52a360'
date: 'Sun May 29 00:30:46 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c28c450'
date: 'Sun May 29 00:17:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3fba050'
date: 'Sun May 29 00:16:58 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '08297a9'
date: 'Sun May 29 00:11:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1608171'
date: 'Sun May 29 00:05:46 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b7021df'
date: 'Sun May 1 13:21:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7c9e47e'
date: 'Sat Apr 30 23:43:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fffa5bc'
date: 'Sat Apr 30 23:43:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd6cb31e'
date: 'Tue May 17 17:09:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd88e2f3'
date: 'Tue May 17 13:36:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'edf7c5c'
date: 'Mon May 16 15:05:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e24a5bc'
date: 'Mon May 16 15:02:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b33e4ec'
date: 'Mon May 16 14:57:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '417ae11'
date: 'Sat May 14 13:32:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4adc7fa'
date: 'Sat May 14 13:32:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3aa1421'
date: 'Sat May 14 13:32:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e0ce1dc'
date: 'Fri May 13 23:54:36 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b4c7079'
date: 'Fri May 13 23:40:24 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd903cd5'
date: 'Fri May 13 23:39:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '64b09bb'
date: 'Thu May 12 10:28:11 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2848823'
date: 'Wed May 11 15:22:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3d7e40f'
date: 'Wed May 11 15:19:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'decc0c0'
date: 'Wed May 11 13:32:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e06e20'
date: 'Wed May 11 13:15:04 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0430a7a'
date: 'Wed May 11 13:14:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9c7c945'
date: 'Wed May 11 13:14:46 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b5013f8'
date: 'Wed May 11 13:14:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '358bf74'
date: 'Wed May 11 13:13:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '51ebe97'
date: 'Wed May 11 13:13:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9e60469'
date: 'Wed May 11 13:13:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '937b088'
date: 'Tue May 10 20:59:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd85c0fa'
date: 'Tue May 10 20:50:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7d32c8d'
date: 'Tue May 10 17:38:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ea52737'
date: 'Tue May 10 17:25:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9bc5c0e'
date: 'Tue May 10 17:23:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '02643eb'
date: 'Tue May 10 12:06:15 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9070903'
date: 'Tue May 10 12:06:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '528a1cc'
date: 'Tue May 10 11:16:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8ce9687'
date: 'Sat May 7 16:57:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd80a019'
date: 'Fri May 6 11:50:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0e4aac0'
date: 'Fri May 6 10:11:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '94e779d'
date: 'Tue May 3 15:22:30 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '908bfd0'
date: 'Tue May 3 15:21:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '03ec3a8'
date: 'Mon May 2 00:30:30 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a699ef8'
date: 'Mon May 2 00:15:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9091c1a'
date: 'Mon May 2 00:07:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9fabdcd'
date: 'Sun May 1 23:47:25 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ed45094'
date: 'Sun May 1 23:47:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4854bad'
date: 'Sun May 1 23:45:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9900f3d'
date: 'Sun May 1 23:45:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9862387'
date: 'Sun May 1 23:42:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ebafc7e'
date: 'Sun May 1 23:42:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '29857bb'
date: 'Sun May 1 23:41:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1ec226d'
date: 'Sun May 1 23:40:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '289740d'
date: 'Sun May 1 18:05:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e0f6509'
date: 'Sun May 1 13:22:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1dbda74'
date: 'Sun May 1 13:21:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b548b4'
date: 'Sun May 1 17:27:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '445b074'
date: 'Sun May 1 17:17:56 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b85d82c'
date: 'Sat Apr 30 23:43:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd59ba69'
date: 'Sat Apr 30 23:43:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ae6df89'
date: 'Sat Apr 30 23:08:57 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ff8eb4d'
date: 'Sat Apr 30 23:08:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '645653b'
date: 'Sat Apr 30 22:07:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4cf5611'
date: 'Fri Apr 29 15:57:39 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1a6bb08'
date: 'Fri Apr 29 15:57:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9a116e2'
date: 'Fri Apr 29 15:55:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '519bbfa'
date: 'Wed Apr 27 22:25:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '996220d'
date: 'Tue Apr 26 11:11:33 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b12a41f'
date: 'Tue Apr 26 11:11:15 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7ab7e4e'
date: 'Sat Apr 23 20:30:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b0ed473'
date: 'Wed Apr 20 14:06:29 2016'
author: 'PI:NAME:<NAME>END_PIempsink'
}
{
commit: 'c1d403f'
date: 'Tue Apr 19 14:07:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dc4d622'
date: 'Thu Apr 14 13:45:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bd79c54'
date: 'Tue Apr 12 17:03:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e32a426'
date: 'Tue Apr 12 13:20:07 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '04ee164'
date: 'Tue Apr 12 11:45:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7aefc85'
date: 'Mon Apr 11 21:23:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '01e2794'
date: 'Mon Apr 11 20:50:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c526d80'
date: 'Mon Apr 11 20:49:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '51ede8f'
date: 'Sat Apr 9 14:49:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ca477ed'
date: 'Sat Apr 9 14:02:04 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a9f4263'
date: 'Sat Apr 9 13:41:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0e405b4'
date: 'Sat Apr 9 13:35:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '320b6dc'
date: 'Sat Apr 9 13:30:25 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c849544'
date: 'Sat Apr 9 13:08:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '925b0c4'
date: 'Sat Apr 9 01:24:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '56aef8f'
date: 'Fri Apr 8 15:10:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '10d6ed2'
date: 'Fri Apr 8 15:06:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '24fa7ed'
date: 'Tue Apr 5 22:01:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e5fe80'
date: 'Mon Apr 4 15:56:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5c1930f'
date: 'Thu Mar 31 22:31:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0c1d135'
date: 'Thu Mar 24 23:15:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4e762bf'
date: 'Thu Mar 24 23:07:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f4c7b16'
date: 'Thu Mar 24 22:50:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '084b0bc'
date: 'Thu Mar 24 16:12:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e7246ec'
date: 'Thu Mar 24 11:58:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '25f28a7'
date: 'Tue Mar 22 15:18:16 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3722567'
date: 'Mon Mar 21 15:56:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a85d1df'
date: 'Sat Mar 19 01:42:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '489695d'
date: 'Sat Mar 19 01:15:25 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '42e6aa4'
date: 'Sat Mar 19 01:05:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9ea07a4'
date: 'Thu Mar 17 14:17:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b1064fc'
date: 'Thu Mar 17 14:14:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '636c0e5'
date: 'Tue Mar 15 15:05:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0f82601'
date: 'Tue Mar 15 14:57:50 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '75c0dc2'
date: 'Tue Mar 15 10:43:02 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b961fbf'
date: 'Mon Mar 14 16:50:01 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '949e6b3'
date: 'Mon Mar 14 16:10:15 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cd8bd60'
date: 'Thu Mar 10 17:39:02 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '35a1e52'
date: 'Tue Mar 1 17:59:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f1cc5ca'
date: 'Tue Mar 1 15:17:44 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dd2301c'
date: 'Tue Mar 1 13:21:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '460cf79'
date: 'Mon Feb 29 16:04:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '32b26ed'
date: 'Thu Feb 25 13:13:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '04f59b5'
date: 'Thu Feb 25 13:02:23 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '233ad6d'
date: 'Wed Feb 24 00:49:46 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4f9f713'
date: 'Tue Feb 23 13:28:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '13042bc'
date: 'Tue Feb 23 10:50:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b28335d'
date: 'Thu Feb 18 16:51:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9f41c24'
date: 'Wed Feb 17 16:58:15 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b7039f5'
date: 'Wed Feb 17 16:43:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '195bc4d'
date: 'Wed Feb 17 16:30:32 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5c72995'
date: 'Wed Feb 17 14:28:03 2016'
author: 'PI:NAME:<NAME>END_PIloop'
}
{
commit: 'e2e7852'
date: 'Wed Feb 17 12:18:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '700cb8e'
date: 'Wed Feb 17 12:17:58 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a3d0603'
date: 'Tue Feb 16 11:35:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c6b4f3f'
date: 'Tue Feb 16 11:31:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'df4f12a'
date: 'Tue Feb 9 22:40:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4792322'
date: 'Tue Feb 9 15:16:31 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0563b21'
date: 'Tue Feb 9 12:20:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9453b24'
date: 'Tue Feb 9 12:12:44 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '40a0c99'
date: 'Tue Feb 9 12:12:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '780e9b3'
date: 'Mon Feb 8 22:11:46 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3648250'
date: 'Mon Feb 8 16:31:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0564c0e'
date: 'Mon Feb 8 16:15:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '094286c'
date: 'Mon Feb 8 15:52:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8688f84'
date: 'Mon Feb 8 15:39:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0af5d00'
date: 'Mon Feb 8 14:21:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '54f231d'
date: 'Mon Feb 8 14:14:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4cae87f'
date: 'Mon Feb 8 13:42:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c32e406'
date: 'Mon Feb 8 13:09:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8435391'
date: 'Sun Feb 7 21:17:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '71d886e'
date: 'Fri Feb 5 10:54:13 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1bc5caa'
date: 'Thu Feb 4 17:55:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e57ec36'
date: 'Thu Feb 4 13:18:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9876ddd'
date: 'Thu Feb 4 00:01:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a23f2cd'
date: 'Thu Feb 4 00:00:44 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '74b8b3b'
date: 'Wed Feb 3 23:04:11 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dbec9ae'
date: 'Wed Feb 3 22:26:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '63e19c9'
date: 'Wed Feb 3 17:13:58 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7f04ad8'
date: 'Wed Feb 3 16:21:24 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4795a1b'
date: 'Wed Feb 3 16:13:19 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '267cd7e'
date: 'Wed Feb 3 16:11:34 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '68b52f4'
date: 'Wed Feb 3 13:56:16 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c182568'
date: 'Wed Feb 3 13:55:11 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b1ec9bc'
date: 'Tue Feb 2 12:13:24 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '941bbdc'
date: 'Mon Feb 1 16:54:19 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '50f0eed'
date: 'Mon Feb 1 16:51:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '026e006'
date: 'Mon Feb 1 16:44:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '89aad75'
date: 'Mon Feb 1 16:33:08 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '11abb58'
date: 'Mon Feb 1 14:29:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '08fd95c'
date: 'Mon Feb 1 14:25:09 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9d7c8ee'
date: 'Fri Jan 29 00:29:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b2b197d'
date: 'Fri Jan 29 00:12:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c1eee0'
date: 'Fri Jan 29 00:05:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '17eeb59'
date: 'Fri Jan 29 00:03:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3315377'
date: 'Thu Jan 28 23:47:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4581bf2'
date: 'Thu Jan 28 23:09:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '719b8d5'
date: 'Thu Jan 28 23:09:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f201e2f'
date: 'Thu Jan 28 18:08:14 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c82d1b2'
date: 'Thu Jan 28 10:52:31 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f06b12d'
date: 'Wed Jan 27 23:13:06 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0660faa'
date: 'Wed Jan 27 23:06:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'eb1315d'
date: 'Wed Jan 27 23:05:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dd9c30e'
date: 'Wed Jan 27 22:41:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c420611'
date: 'Wed Jan 27 18:45:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '75a62be'
date: 'Wed Jan 27 17:52:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '575fadc'
date: 'Wed Jan 27 17:02:31 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '111572b'
date: 'Wed Jan 27 17:02:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ba0f3ad'
date: 'Wed Jan 27 14:22:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1650f6b'
date: 'Wed Jan 27 14:20:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '475604f'
date: 'Wed Jan 27 11:36:24 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '15cffc4'
date: 'Wed Jan 27 11:35:10 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3d977bc'
date: 'Tue Jan 26 18:11:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5c031e0'
date: 'Tue Jan 26 17:11:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '790d6ce'
date: 'Sat Jan 23 16:48:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ac27782'
date: 'Thu Jan 21 18:37:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f985fbb'
date: 'Thu Jan 21 18:35:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '56b653a'
date: 'Thu Jan 21 18:35:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '92d6d90'
date: 'Thu Jan 21 17:37:03 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '21f6504'
date: 'Thu Jan 21 15:41:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6be8955'
date: 'Thu Jan 21 15:23:48 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2dd0385'
date: 'Thu Jan 21 14:11:22 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c7c99d1'
date: 'Thu Jan 21 13:48:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fde7c54'
date: 'Wed Jan 20 23:44:16 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b5a7beb'
date: 'Tue Jan 19 22:34:54 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '82ccc3e'
date: 'Tue Jan 19 22:33:57 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ba7cf3c'
date: 'Mon Jan 18 17:23:20 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4d0ce3e'
date: 'Mon Jan 18 15:28:42 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0fb0831'
date: 'Mon Jan 18 13:56:41 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '325775b'
date: 'Mon Jan 18 13:08:17 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8e8fda5'
date: 'Mon Jan 18 12:24:18 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '45633ea'
date: 'Mon Jan 18 11:46:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '78724ef'
date: 'Mon Jan 18 11:35:45 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fb78409'
date: 'Mon Jan 18 11:20:43 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '757977c'
date: 'Sun Jan 17 14:17:55 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6020e74'
date: 'Sun Jan 17 14:11:12 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '42de49f'
date: 'Sun Jan 17 12:40:36 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '27b7647'
date: 'Sat Jan 16 11:21:00 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5e126c4'
date: 'Fri Jan 15 16:38:29 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7aca265'
date: 'Thu Jan 14 13:16:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c65dff5'
date: 'Tue Jan 12 18:02:51 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd4c15a4'
date: 'Tue Jan 12 17:00:35 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '631b47b'
date: 'Tue Jan 12 14:24:49 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dea63db'
date: 'Tue Jan 12 14:12:40 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7399c60'
date: 'Tue Jan 12 14:10:52 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1ab06fa'
date: 'Tue Jan 12 14:10:27 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7296de7'
date: 'Mon Jan 11 23:59:05 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '83b7c3d'
date: 'Sun Jan 10 21:37:53 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd7571f8'
date: 'Fri Jan 8 16:20:26 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '36193ac'
date: 'Thu Jan 7 17:10:37 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3ee6d07'
date: 'Thu Jan 7 14:08:59 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f797a36'
date: 'Mon Jan 4 14:39:21 2016'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '209a50d'
date: 'Tue Dec 29 23:44:13 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd8ac785'
date: 'Tue Dec 29 14:01:18 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '06b2cbb'
date: 'Mon Dec 28 21:57:32 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '643b636'
date: 'Mon Dec 28 21:35:07 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '59923c4'
date: 'Mon Dec 28 18:19:29 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b71627c'
date: 'Mon Dec 28 17:48:43 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '57a342d'
date: 'Sun Dec 27 15:21:12 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6a39d85'
date: 'Sun Dec 27 15:09:02 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1f0def7'
date: 'Sun Dec 27 14:54:36 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '31baf97'
date: 'Sun Dec 27 14:46:26 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '91e8b76'
date: 'Sun Dec 27 14:46:06 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cc17f24'
date: 'Sun Dec 27 12:36:03 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aa5f73b'
date: 'Tue Dec 22 21:41:00 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '44ccc0b'
date: 'Tue Dec 22 21:25:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7ed24b6'
date: 'Tue Dec 22 16:22:35 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9d5a69c'
date: 'Mon Dec 21 14:53:24 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '10f79a7'
date: 'Sat Dec 19 14:05:47 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6b2f999'
date: 'Fri Dec 18 17:08:39 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd8cabe8'
date: 'Fri Dec 18 15:52:46 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3f2c0e2'
date: 'Fri Dec 18 15:09:57 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '58ad551'
date: 'Thu Dec 17 22:25:31 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aa6f4fb'
date: 'Thu Dec 17 14:24:14 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a7df8f4'
date: 'Thu Dec 17 14:03:24 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aefa54a'
date: 'Thu Dec 17 12:30:12 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '920106e'
date: 'Thu Dec 17 10:44:06 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b4583d4'
date: 'Wed Dec 16 17:17:44 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '864d7d5'
date: 'Wed Dec 16 17:05:37 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '893cb5d'
date: 'Wed Dec 16 15:58:21 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '80a0cba'
date: 'Wed Dec 16 15:44:29 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4e8be92'
date: 'Wed Dec 16 15:40:16 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0ac463d'
date: 'Wed Dec 16 13:57:18 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9bfae6d'
date: 'Wed Dec 16 13:33:37 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f434b99'
date: 'Tue Dec 15 23:24:03 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7089129'
date: 'Tue Dec 15 23:14:41 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '81cf4a1'
date: 'Tue Dec 15 23:04:11 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0f9bd46'
date: 'Tue Dec 15 22:52:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'abcfbcf'
date: 'Tue Dec 15 22:44:47 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aeb2d6c'
date: 'Tue Dec 15 22:40:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '025ea9b'
date: 'Tue Dec 15 15:01:43 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '32cc787'
date: 'Tue Dec 15 14:57:23 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2ce0737'
date: 'Tue Dec 15 14:53:58 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9cca44c'
date: 'Tue Dec 15 14:46:24 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '601e1e5'
date: 'Tue Dec 15 14:43:15 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ca8aa71'
date: 'Tue Dec 15 14:34:44 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2c078d2'
date: 'Tue Dec 15 14:13:03 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '00df8b6'
date: 'Tue Dec 15 13:54:03 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8ad78d8'
date: 'Tue Dec 15 13:44:29 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '89bfd2e'
date: 'Tue Dec 15 13:38:09 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4cb63a5'
date: 'Tue Dec 15 13:25:26 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fb55e60'
date: 'Tue Dec 15 13:24:54 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0f3b196'
date: 'Tue Dec 15 11:47:08 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5a95739'
date: 'Tue Dec 15 11:40:52 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '674c81c'
date: 'Tue Dec 15 11:25:33 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd38fcf7'
date: 'Tue Dec 15 11:25:18 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6cf8a83'
date: 'Tue Dec 15 10:37:01 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '672b250'
date: 'Mon Dec 14 17:10:57 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '982a065'
date: 'Mon Dec 14 14:50:23 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '84dc662'
date: 'Mon Dec 14 14:49:42 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '304f85a'
date: 'Mon Dec 14 14:46:26 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '605eb08'
date: 'Mon Dec 14 14:03:25 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4a3b67f'
date: 'Mon Dec 14 13:54:58 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cfb7d76'
date: 'Thu Dec 10 16:18:04 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd443397'
date: 'Tue Dec 8 18:42:55 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ff8ab5f'
date: 'Tue Dec 8 15:32:47 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f947fee'
date: 'Fri Dec 4 16:49:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c5e779e'
date: 'Mon Nov 30 12:00:03 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f3c4b01'
date: 'Fri Nov 27 13:52:22 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0b7d7ef'
date: 'Tue Nov 17 14:06:27 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9ca69d1'
date: 'Tue Nov 17 13:48:38 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0d84b03'
date: 'Mon Nov 16 14:36:35 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a22ecf0'
date: 'Fri Nov 6 14:30:25 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '858494c'
date: 'Fri Nov 6 14:24:17 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0e4d9bb'
date: 'Mon Nov 2 13:41:00 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3b32854'
date: 'Mon Nov 2 12:02:59 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b1b3b23'
date: 'Tue Oct 27 14:51:07 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f749b23'
date: 'Tue Oct 27 14:47:01 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8c8bac4'
date: 'Mon Oct 26 16:26:15 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '42b7a4b'
date: 'Mon Oct 26 16:25:42 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c1c974'
date: 'Wed Oct 7 08:34:50 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b22d277'
date: 'Sun Sep 13 12:52:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ef74161'
date: 'Tue Sep 8 17:16:56 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a6eb697'
date: 'Mon Sep 7 16:11:58 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd017d6e'
date: 'Fri Sep 4 12:02:32 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0db522a'
date: 'Fri Sep 4 11:30:03 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e45456b'
date: 'Fri Sep 4 11:26:52 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ecf3c54'
date: 'Wed Aug 19 16:21:22 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c966d49'
date: 'Fri Aug 14 14:34:58 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6360b78'
date: 'Wed Aug 12 19:40:51 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'daa25a5'
date: 'Wed Aug 12 19:40:06 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd129b20'
date: 'Thu Aug 6 12:11:29 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '62ac19e'
date: 'Wed Jul 22 11:44:04 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'effb399'
date: 'Tue Jul 21 16:23:39 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c181296'
date: 'Tue Jul 21 15:17:23 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a80a61c'
date: 'Tue Jul 21 14:02:31 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f69ced3'
date: 'Tue Jul 21 13:46:40 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8972740'
date: 'Tue Jul 21 13:31:56 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9e98ef0'
date: 'Tue Jul 21 10:56:56 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b34eca1'
date: 'Fri Jul 17 23:41:32 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5c47bd5'
date: 'Fri Jul 17 23:40:25 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'db96a5e'
date: 'Wed Jul 15 17:04:42 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c2f0066'
date: 'Wed Jul 15 16:03:24 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6438f75'
date: 'Wed Jul 15 16:02:40 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8097266'
date: 'Thu Jul 9 13:38:00 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1c5bb79'
date: 'Thu Jul 9 13:37:48 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7c13491'
date: 'Tue Jun 30 16:16:53 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '194c6c1'
date: 'Sun Jun 28 00:05:21 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd2152f3'
date: 'Sun Jun 28 00:03:46 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c4371ee'
date: 'Wed Jun 24 16:58:38 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2c7d9b6'
date: 'Wed Jun 24 16:58:28 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7cb7f1f'
date: 'Tue Jun 23 16:07:56 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '11139a7'
date: 'Wed Jun 10 14:47:44 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '21b9efa'
date: 'Mon Jun 8 16:16:45 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f0e040f'
date: 'Mon Jun 8 16:07:54 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '067cc97'
date: 'Mon Jun 8 15:56:42 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f08d7f6'
date: 'Mon Jun 8 15:49:33 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6bb2038'
date: 'Thu May 21 12:13:40 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9f69cd4'
date: 'Thu May 21 12:12:53 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9ee696d'
date: 'Tue May 12 17:04:55 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6707a25'
date: 'Tue May 12 17:04:51 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '53b28cf'
date: 'Tue May 12 17:02:48 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3fc11e7'
date: 'Thu May 7 15:34:24 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '27c6950'
date: 'Thu May 7 14:38:33 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '997378f'
date: 'Tue May 5 16:13:05 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5797cc7'
date: 'Fri May 1 20:35:00 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd85846c'
date: 'Fri May 1 12:16:52 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ab89ee4'
date: 'Fri May 1 12:13:51 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0e755dc'
date: 'Fri May 1 12:12:54 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7504bd8'
date: 'Fri May 1 11:55:55 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '81a3f2f'
date: 'Fri May 1 10:28:54 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '238f18e'
date: 'Thu Apr 30 14:17:19 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f755586'
date: 'Thu Apr 30 12:41:32 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ce79b87'
date: 'Thu Apr 30 12:41:20 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7f8a9de'
date: 'Thu Apr 23 12:02:27 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a063536'
date: 'Thu Apr 23 11:37:49 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ced648f'
date: 'Wed Apr 22 17:12:01 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '66eea95'
date: 'Wed Apr 22 17:00:46 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4e182d7'
date: 'Mon Apr 20 16:21:58 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e65ea33'
date: 'Mon Apr 20 16:21:39 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e913b4'
date: 'Tue Apr 14 17:33:29 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9c12003'
date: 'Thu Apr 9 11:02:27 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7ff0515'
date: 'Thu Apr 9 11:01:30 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bf78d4f'
date: 'Tue Apr 7 22:43:26 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '16520a3'
date: 'Tue Apr 7 21:58:55 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2dd9a9c'
date: 'Tue Apr 7 21:37:03 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e6d5cda'
date: 'Tue Apr 7 21:12:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '64896a8'
date: 'Tue Apr 7 20:55:25 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0e65db3'
date: 'Tue Apr 7 20:55:09 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1b293bf'
date: 'Tue Apr 7 17:44:36 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bab3232'
date: 'Fri Apr 3 15:02:35 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '58ff015'
date: 'Fri Apr 3 13:23:20 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c9b3cb0'
date: 'Fri Apr 3 11:13:18 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1e8c7c1'
date: 'Fri Apr 3 10:28:21 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c518763'
date: 'Fri Apr 3 10:19:14 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2b7855d'
date: 'Fri Apr 3 10:10:56 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6dda6c6'
date: 'Fri Apr 3 10:04:46 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a5e1a1a'
date: 'Thu Apr 2 17:41:25 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3098d23'
date: 'Thu Apr 2 17:35:41 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '42b6bf1'
date: 'Thu Apr 2 17:24:28 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '658933b'
date: 'Thu Apr 2 16:02:18 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '29a8452'
date: 'Thu Apr 2 15:12:12 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '705a6d1'
date: 'Thu Apr 2 14:10:59 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9da6ec3'
date: 'Thu Apr 2 13:52:13 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7e4f83b'
date: 'Thu Apr 2 12:58:55 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1febf8d'
date: 'Thu Apr 2 12:25:41 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e4c08c6'
date: 'Thu Apr 2 12:22:38 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '654779d'
date: 'Thu Apr 2 12:15:36 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4cb562b'
date: 'Thu Apr 2 12:02:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '86b7691'
date: 'Fri Mar 27 11:00:36 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e798718'
date: 'Thu Mar 26 11:34:38 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e6cd862'
date: 'Tue Mar 10 12:32:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3b25bba'
date: 'Tue Mar 10 11:22:23 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd6a3894'
date: 'Tue Mar 10 11:05:11 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bd14b72'
date: 'Tue Mar 10 11:03:59 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5caa36d'
date: 'Mon Mar 9 12:27:38 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '42dfba9'
date: 'Mon Mar 9 12:27:26 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7356bf5'
date: 'Fri Feb 27 16:28:48 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '22c2e4e'
date: 'Tue Feb 24 17:26:11 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'efc5aa4'
date: 'Sun Feb 15 23:18:42 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd69bbfe'
date: 'Sun Feb 15 23:11:07 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ffcdbec'
date: 'Tue Feb 10 16:36:34 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'adcca17'
date: 'Tue Feb 10 16:16:47 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c659f32'
date: 'Tue Feb 10 14:36:06 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5f36189'
date: 'Mon Feb 2 11:29:38 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f6abac2'
date: 'Fri Jan 30 17:28:40 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2aafdc0'
date: 'Fri Jan 30 16:46:50 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2224d2c'
date: 'Fri Jan 30 16:37:12 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3edbb0f'
date: 'Mon Jan 26 23:04:47 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3d49915'
date: 'Sun Jan 18 23:04:40 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7f60bc9'
date: 'Sun Jan 18 23:02:16 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '52df7dc'
date: 'Sun Jan 18 23:01:46 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '45d792d'
date: 'Wed Jan 14 11:41:50 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f932e94'
date: 'Mon Jan 5 18:07:01 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '75e5990'
date: 'Mon Jan 5 18:06:48 2015'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0d76609'
date: 'Sat Dec 13 21:14:41 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8e59102'
date: 'Fri Dec 12 14:52:38 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd400c9c'
date: 'Fri Dec 12 14:48:17 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '47f8be6'
date: 'Fri Dec 12 12:54:11 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3f9411e'
date: 'Fri Dec 12 11:49:49 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6cc6def'
date: 'Fri Dec 12 11:48:00 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0d59690'
date: 'Sat Dec 6 11:21:49 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '135a729'
date: 'Sat Dec 6 11:19:23 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '24de214'
date: 'Wed Nov 26 19:15:48 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e33a47b'
date: 'Wed Nov 26 19:02:02 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4b1f2d2'
date: 'Wed Nov 26 14:50:35 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '92b4ee1'
date: 'Tue Nov 11 12:14:06 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'af57e3d'
date: 'Sun Nov 2 11:11:58 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd147a23'
date: 'Mon Oct 27 22:25:03 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '62e18b8'
date: 'Mon Oct 27 22:14:38 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0f8c3bb'
date: 'Mon Oct 27 22:01:12 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ef112cd'
date: 'Mon Oct 27 21:36:35 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c287fe'
date: 'Mon Oct 27 21:18:39 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ecfa572'
date: 'Mon Oct 27 20:59:38 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8d4b1bd'
date: 'Mon Oct 27 20:48:41 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a82c4fb'
date: 'Mon Oct 27 19:18:18 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2e07617'
date: 'Mon Oct 27 17:08:19 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f7271db'
date: 'Mon Oct 27 17:06:39 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f86b69a'
date: 'Mon Oct 27 17:06:23 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f6816e1'
date: 'Fri Oct 24 12:09:44 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '093bdb7'
date: 'Fri Oct 24 12:03:30 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5121cf5'
date: 'Fri Oct 24 11:57:45 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1b13e7f'
date: 'Mon Oct 20 23:06:22 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b6aed1'
date: 'Sat Oct 18 14:40:46 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'db9ffd1'
date: 'Sat Oct 18 12:08:21 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd89500c'
date: 'Sat Oct 11 16:15:04 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ef2f738'
date: 'Fri Oct 10 14:26:21 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9b7b806'
date: 'Fri Oct 10 11:14:05 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0f0a7fe'
date: 'Wed Oct 8 22:59:05 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5afa23f'
date: 'Mon Oct 6 14:23:09 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '00ab257'
date: 'Mon Oct 6 11:50:28 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2db13aa'
date: 'Mon Oct 6 10:49:23 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0a61fc4'
date: 'Sun Oct 5 16:35:21 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fbde7a8'
date: 'Sun Oct 5 16:24:15 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2a0439a'
date: 'Sun Oct 5 16:23:05 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4eb86e9'
date: 'Sun Oct 5 16:22:47 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '63841e7'
date: 'Sun Oct 5 16:22:41 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a04b76d'
date: 'Tue Sep 30 16:21:33 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c548c39'
date: 'Tue Sep 30 16:17:00 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e9c3c4a'
date: 'Tue Sep 30 16:13:51 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'de60b1b'
date: 'Tue Sep 30 14:34:39 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e81fc33'
date: 'Tue Sep 30 14:25:45 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ae4fc9b'
date: 'Tue Sep 30 14:03:35 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1d521bd'
date: 'Tue Sep 30 12:49:24 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a2613e7'
date: 'Tue Sep 30 12:38:32 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ebcb76b'
date: 'Tue Sep 30 12:38:18 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '410b06f'
date: 'Tue Sep 30 11:36:05 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dc36471'
date: 'Fri Sep 26 14:55:30 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bd01fd8'
date: 'Fri Sep 26 14:44:28 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9a04efe'
date: 'Fri Sep 26 14:11:58 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e13feb4'
date: 'Fri Sep 26 14:11:36 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '570630a'
date: 'Fri Sep 26 14:11:21 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '82d933f'
date: 'Fri Sep 26 13:59:26 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1e64425'
date: 'Thu Sep 25 19:01:01 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bf0e721'
date: 'Thu Sep 25 18:34:14 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd8c6123'
date: 'Thu Sep 25 18:16:04 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9e116b5'
date: 'Thu Sep 25 16:59:41 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c1ad81e'
date: 'Thu Sep 25 16:48:56 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ffd106e'
date: 'Thu Sep 25 15:56:49 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fe9fa63'
date: 'Thu Sep 25 15:39:48 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '48028d3'
date: 'Thu Sep 25 15:38:25 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4686f4f'
date: 'Thu Sep 25 15:31:20 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9552f44'
date: 'Thu Sep 25 15:31:06 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '962f45d'
date: 'Thu Sep 25 15:30:51 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2f8594f'
date: 'Thu Sep 25 15:30:33 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '40f024f'
date: 'Thu Sep 25 15:04:35 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e5ce332'
date: 'Thu Sep 25 11:14:52 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fce1baa'
date: 'Wed Sep 24 22:34:07 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b9ae0ad'
date: 'Tue Sep 23 14:55:19 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '814d193'
date: 'Mon Sep 22 21:23:04 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '065fa18'
date: 'Mon Sep 22 21:14:32 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4a29f4e'
date: 'Thu Sep 18 15:02:01 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1deb452'
date: 'Thu Sep 18 14:58:35 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b8abf40'
date: 'Thu Sep 18 14:58:02 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '66e8ba2'
date: 'Thu Sep 18 14:56:21 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd7c40ce'
date: 'Thu Sep 18 14:55:36 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bf11c56'
date: 'Thu Sep 18 14:49:14 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b9777d'
date: 'Thu Sep 18 14:47:43 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cc66c7b'
date: 'Thu Sep 18 14:43:08 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '06a4df9'
date: 'Sun Aug 24 14:54:37 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e61b373'
date: 'Wed Aug 20 15:35:10 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6059202'
date: 'Tue Aug 19 14:14:45 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bccd4e3'
date: 'Tue Aug 19 14:05:24 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f13bd7f'
date: 'Tue Aug 19 14:04:45 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '689cbb4'
date: 'Sun Aug 17 14:39:32 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'daa11db'
date: 'Sun Aug 17 11:54:23 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c73c31f'
date: 'Thu Aug 14 06:44:14 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '51041c6'
date: 'Tue Jul 22 14:17:04 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '20a26c1'
date: 'Tue Jul 22 14:08:44 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f65e89d'
date: 'Tue Jul 22 14:05:47 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd14e883'
date: 'Thu Jul 10 12:38:40 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd907147'
date: 'Tue Jul 1 15:06:41 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6ae8769'
date: 'Tue Jul 1 14:36:00 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e3be363'
date: 'Fri Jun 27 11:23:23 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0839b4b'
date: 'Fri Jun 27 10:42:30 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f1ab831'
date: 'Wed Jun 25 00:21:40 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cef8706'
date: 'Thu Jun 19 16:21:10 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4235f0c'
date: 'Thu Jun 19 16:20:55 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e3e38a1'
date: 'Thu Jun 19 15:49:02 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9a0f05e'
date: 'Tue Jun 17 11:15:25 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '536cf90'
date: 'Tue Jun 17 11:13:49 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1fb5c9e'
date: 'Tue Jun 17 01:02:39 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fafb016'
date: 'Tue Jun 17 01:02:22 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f34db1c'
date: 'Tue Jun 17 01:01:51 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2ed6bab'
date: 'Wed Jun 4 12:40:06 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5476611'
date: 'Fri May 30 14:03:40 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '53391c3'
date: 'Fri May 30 14:02:27 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ccc99d4'
date: 'Fri May 30 12:18:04 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd16326a'
date: 'Fri May 30 12:14:21 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bb58df1'
date: 'Mon May 26 16:09:09 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '975b2b1'
date: 'Wed May 21 16:54:11 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '21a49cc'
date: 'Wed May 21 16:52:35 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fe67bbc'
date: 'Wed May 21 16:39:48 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0a8a768'
date: 'Wed May 21 16:20:50 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '20decd2'
date: 'Sun May 18 16:18:30 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd7c2135'
date: 'Sun May 18 16:18:12 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '69c4d63'
date: 'Sat May 17 21:26:31 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e86abfa'
date: 'Thu May 15 23:57:06 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '216a271'
date: 'Thu May 15 23:33:22 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a05732a'
date: 'Thu May 15 23:20:17 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7f31ec8'
date: 'Thu May 15 23:03:48 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3d42703'
date: 'Thu May 15 16:16:05 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c46ce9e'
date: 'Thu May 15 15:33:04 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '80ea2f8'
date: 'Thu May 15 15:32:43 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '943df9a'
date: 'Thu May 15 13:39:22 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a9fd591'
date: 'Thu May 15 11:31:36 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b31938a'
date: 'Thu May 15 10:32:30 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4630242'
date: 'Thu May 15 10:24:30 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '30e688e'
date: 'Thu May 15 10:22:15 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bb15649'
date: 'Thu May 15 10:03:24 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '293c7a7'
date: 'Tue May 13 19:16:59 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2bbd10a'
date: 'Tue May 13 19:15:51 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c16f69f'
date: 'Tue May 13 19:13:41 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2205ba0'
date: 'Sun Apr 27 00:10:24 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e175596'
date: 'Sun Apr 27 00:10:15 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '68e7cd4'
date: 'Sun Apr 20 11:19:07 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e95bf6b'
date: 'Sun Apr 20 11:18:52 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'fad01d1'
date: 'Sat Apr 19 13:20:25 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0999d52'
date: 'Sat Apr 19 13:14:47 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '449b155'
date: 'Sat Apr 19 12:46:04 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '958bf94'
date: 'Sat Apr 19 12:21:20 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2c1fd6d'
date: 'Sat Apr 19 12:16:49 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b78ddb1'
date: 'Sat Apr 19 12:11:55 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4b76906'
date: 'Sat Apr 19 11:45:36 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '19d4ede'
date: 'Sat Apr 19 11:45:12 2014'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1f3e010'
date: 'Fri Nov 22 13:52:01 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ae7c991'
date: 'Thu Nov 14 11:38:59 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5fce264'
date: 'Thu Nov 14 11:38:48 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e4c5467'
date: 'Tue Nov 5 18:41:28 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '01172c3'
date: 'Tue Nov 5 18:38:20 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7942d46'
date: 'Tue Oct 29 01:06:30 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '352e090'
date: 'Tue Oct 29 01:04:13 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5d463ce'
date: 'Thu Oct 17 14:04:55 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '25a47eb'
date: 'Wed Oct 16 20:47:21 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1d8cf24'
date: 'Tue Oct 15 15:07:33 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '71678bf'
date: 'Tue Oct 15 14:57:11 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4a289c5'
date: 'Tue Oct 15 12:54:50 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6c102c2'
date: 'Tue Oct 15 12:42:45 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b629549'
date: 'Tue Oct 15 12:05:06 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '78f3d91'
date: 'Tue Oct 15 03:02:06 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bad5a94'
date: 'Sun Oct 13 20:49:24 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3e4051b'
date: 'Sun Oct 13 14:53:43 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6eefeb0'
date: 'Sun Oct 13 14:38:53 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f367c76'
date: 'Sat Oct 12 17:53:50 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'aa847ac'
date: 'Fri Oct 11 10:21:30 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd21e7d0'
date: 'Tue Oct 1 07:42:38 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0cbe0a6'
date: 'Fri Sep 13 17:15:00 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e4ed1d8'
date: 'Fri Sep 13 08:03:53 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3f01dfe'
date: 'Sun Sep 8 02:54:43 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9bd31bf'
date: 'Sat Jul 13 11:37:26 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ae64324'
date: 'Thu May 30 16:16:25 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4b8be87'
date: 'Thu May 30 16:06:44 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '538806f'
date: 'Wed May 29 15:58:48 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2d59000'
date: 'Wed May 29 15:45:28 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'dcd4faa'
date: 'Fri May 24 14:41:27 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '150f969'
date: 'Fri May 24 14:09:31 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2322236'
date: 'Fri May 24 13:00:41 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3d6b2d5'
date: 'Mon May 20 23:53:41 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ee67a19'
date: 'Mon May 20 23:53:35 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0b97774'
date: 'Thu May 16 13:24:58 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6bfb912'
date: 'Thu May 16 13:24:20 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6fd264c'
date: 'Wed May 15 16:22:27 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c45dbb7'
date: 'Mon May 13 20:55:56 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a0fd659'
date: 'Mon May 13 20:55:50 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6ce75ff'
date: 'Sun May 12 00:24:11 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3e381c6'
date: 'Sun May 12 00:23:55 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8a8f4ae'
date: 'Sun May 12 00:23:40 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '167722f'
date: 'Sun May 12 00:23:13 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ec11e8b'
date: 'Sun May 12 00:23:06 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5728949'
date: 'Sun May 12 00:22:53 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '318b135'
date: 'Wed May 8 20:13:38 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1296d57'
date: 'Tue May 7 22:45:07 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '56dc7c1'
date: 'Tue May 7 22:44:42 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8b12dcb'
date: 'Sun May 5 11:45:10 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ed0c03b'
date: 'Fri May 3 00:07:53 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2ebf5b3'
date: 'Fri May 3 00:02:23 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3ad0dc0'
date: 'Thu May 2 22:53:36 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b23d4e'
date: 'Thu May 2 22:50:16 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8001e3a'
date: 'Thu May 2 22:50:11 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c02eef3'
date: 'Thu May 2 22:48:52 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'acb0e29'
date: 'Thu May 2 22:48:35 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c3de315'
date: 'Thu May 2 22:48:25 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ff7e8c0'
date: 'Thu May 2 22:48:15 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4017411'
date: 'Thu May 2 22:48:05 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e494459'
date: 'Thu May 2 22:44:56 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2b3accc'
date: 'Thu May 2 22:44:33 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1ec6a88'
date: 'Thu May 2 22:44:15 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3e2c579'
date: 'Thu May 2 22:44:01 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '9b9dadd'
date: 'Thu May 2 22:43:41 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5da20c9'
date: 'Wed May 1 20:43:29 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '078e2d3'
date: 'Tue Apr 30 11:29:15 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '578a6c5'
date: 'Tue Apr 30 11:29:08 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '518df87'
date: 'Tue Apr 30 11:28:28 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '62dfbf3'
date: 'Tue Apr 30 11:28:13 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5e5b065'
date: 'Tue Apr 30 11:27:54 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '09f407a'
date: 'Tue Apr 30 11:25:30 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a85039b'
date: 'Tue Apr 30 11:24:53 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '524606a'
date: 'Tue Apr 30 11:23:59 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2870f60'
date: 'Sat Apr 27 12:51:52 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a258ebf'
date: 'Sat Apr 27 12:00:17 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '342a117'
date: 'Sat Apr 27 11:59:44 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '66501bb'
date: 'Sat Apr 27 11:04:06 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '23f141a'
date: 'Sat Apr 27 10:46:39 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b480b5f'
date: 'Sat Apr 27 10:45:45 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '18773c5'
date: 'Sat Apr 27 10:45:33 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0acf5ea'
date: 'Thu Apr 25 17:39:23 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2bf250b'
date: 'Thu Apr 25 17:27:34 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '949744f'
date: 'Thu Apr 25 17:27:17 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '987d99d'
date: 'Thu Apr 25 17:26:29 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a9a6a92'
date: 'Thu Apr 25 17:11:23 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7469d1f'
date: 'Thu Apr 25 15:02:35 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '872f5c2'
date: 'Thu Apr 25 14:59:08 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ab6a379'
date: 'Thu Apr 25 14:57:13 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '482d86a'
date: 'Thu Apr 25 14:38:41 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a240ff4'
date: 'Thu Apr 25 14:38:24 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '378e7ff'
date: 'Thu Apr 25 14:37:55 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8f4385d'
date: 'Thu Apr 25 14:36:54 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'efaef61'
date: 'Thu Apr 25 14:36:10 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '48beba7'
date: 'Thu Apr 25 14:35:57 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8f0ef3f'
date: 'Thu Apr 25 14:35:42 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5b1e121'
date: 'Thu Apr 25 14:35:03 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c05bdd1'
date: 'Sun Apr 21 19:01:19 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bb67d68'
date: 'Sun Apr 21 19:01:03 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b00dc5a'
date: 'Sun Apr 21 18:53:03 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '80bd8e7'
date: 'Sun Apr 21 18:52:42 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd9fff17'
date: 'Sun Apr 21 18:52:22 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b22ccc9'
date: 'Sun Apr 21 18:51:58 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b1fbf81'
date: 'Sun Apr 21 18:51:39 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '88c3ccb'
date: 'Sun Apr 21 18:51:04 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '816cfa7'
date: 'Sat Apr 20 11:28:37 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '33d86fb'
date: 'Sat Apr 20 11:28:05 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '4e8f467'
date: 'Sat Apr 20 11:26:26 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c0207c0'
date: 'Sat Apr 20 11:25:44 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '20c81a1'
date: 'Fri Apr 12 23:02:20 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6da090f'
date: 'Fri Apr 12 23:02:07 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd840b70'
date: 'Wed Apr 10 00:48:14 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2aaea25'
date: 'Wed Apr 10 00:38:01 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3fea5e0'
date: 'Thu Apr 4 00:39:13 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '8fe4409'
date: 'Thu Apr 4 00:39:06 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f149a9a'
date: 'Thu Apr 4 00:38:45 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '64dee09'
date: 'Thu Apr 4 00:38:33 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1c91f4a'
date: 'Sat Mar 23 16:02:21 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '685804a'
date: 'Sat Mar 23 15:59:21 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cda5559'
date: 'Fri Mar 22 13:50:20 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1018b69'
date: 'Fri Mar 22 13:50:04 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '37e04d5'
date: 'Tue Mar 19 18:32:22 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '110b2b5'
date: 'Tue Mar 19 18:31:24 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '78c16ef'
date: 'Tue Mar 19 18:30:43 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2f345ec'
date: 'Mon Mar 18 21:35:08 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cd3a9e5'
date: 'Mon Mar 18 21:34:59 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c099ddd'
date: 'Mon Mar 18 21:34:29 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5192cec'
date: 'Mon Mar 18 16:23:45 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd6348db'
date: 'Sun Mar 17 19:28:41 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e73e07a'
date: 'Sun Mar 17 19:16:27 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '98ca654'
date: 'Sun Mar 17 17:20:52 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '37cd9ec'
date: 'Sun Mar 17 16:45:26 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f74c9eb'
date: 'Sun Mar 17 15:54:12 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5f9b84f'
date: 'Thu Mar 14 09:16:37 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6e29b46'
date: 'Wed Mar 13 17:05:41 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0dc7c7f'
date: 'Wed Mar 13 17:05:06 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'cf5d564'
date: 'Wed Mar 13 16:53:07 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bde291d'
date: 'Wed Mar 13 12:13:38 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'e2e5546'
date: 'Wed Mar 13 12:13:14 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '67f1220'
date: 'Wed Mar 13 12:12:36 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'ec69c6d'
date: 'Tue Mar 12 16:23:01 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '08504e6'
date: 'Tue Mar 12 15:58:56 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3a44655'
date: 'Tue Mar 12 15:54:51 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '2556b6f'
date: 'Tue Mar 12 15:02:47 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '250cb04'
date: 'Tue Mar 12 15:02:01 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'a379acc'
date: 'Mon Mar 11 15:13:03 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b1d8b15'
date: 'Mon Mar 11 00:41:22 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '589a47e'
date: 'Sun Mar 10 23:39:28 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '7d65cab'
date: 'Fri Mar 1 15:29:59 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd0106fb'
date: 'Fri Mar 1 15:27:37 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '6d0c7c9'
date: 'Fri Mar 1 05:00:43 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '913c290'
date: 'Tue Feb 26 14:30:41 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '27de5fe'
date: 'Sun Feb 17 21:45:19 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'eaa81c4'
date: 'Sat Feb 16 15:19:53 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '27f5914'
date: 'Sat Feb 16 15:11:28 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '57f3631'
date: 'Fri Jan 25 10:26:54 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5a947f5'
date: 'Mon Jan 14 18:51:10 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'b28681d'
date: 'Mon Jan 14 14:20:33 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'c1ab12d'
date: 'Mon Jan 14 14:20:26 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '53fd22e'
date: 'Mon Jan 14 14:19:30 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '54e88cc'
date: 'Mon Jan 14 14:18:50 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'f975305'
date: 'Wed Jan 2 16:20:57 2013'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bb1d239'
date: 'Mon Dec 31 11:54:33 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1677a43'
date: 'Wed Dec 26 02:15:58 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '818a4a2'
date: 'Tue Dec 25 17:33:03 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '0c64794'
date: 'Tue Dec 25 17:32:37 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '3c2d257'
date: 'Tue Dec 25 17:32:01 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '075b827'
date: 'Tue Dec 25 17:31:21 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'd82fc4e'
date: 'Tue Dec 25 17:30:56 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '1358741'
date: 'Tue Dec 25 17:30:37 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bfdceaf'
date: 'Fri Dec 21 14:05:21 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '5bdeb37'
date: 'Thu Dec 20 16:08:05 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: '35ad7df'
date: 'Thu Dec 20 16:06:57 2012'
author: 'PI:NAME:<NAME>END_PI'
}
{
commit: 'bcd1a1f'
date: 'Thu Dec 20 15:59:48 2012'
author: 'PI:NAME:<NAME>END_PI'
}
] |
[
{
"context": "n>\"\n callbacks: DEFAULT_CALLBACKS\n search_key: \"name\"\n start_with_space: yes\n highlight_first: yes\n ",
"end": 1847,
"score": 0.9594163298606873,
"start": 1843,
"tag": "KEY",
"value": "name"
}
] | src/api.coffee | GerHobbelt/At.js | 0 | Api =
# load a flag's data
#
# @params at[String] the flag
# @params data [Array] data to storage.
load: (at, data) -> c.model.load data if c = this.controller(at)
getInsertedItemsWithIDs: (at) ->
return [null, null] unless c = this.controller at
at = "-#{c.get_opt('alias') || c.at}" if at
ids = []
items = $.map @$inputor.find("span.atwho-view-flag#{at || ""}"), (item) ->
data = $(item).data('atwho-data-item')
return if ids.indexOf(data.id) > -1
ids.push = data.id if data.id
data
[ids, items]
getInsertedItems: (at) -> Api.getInsertedItemsWithIDs.apply(this, [at])[1]
getInsertedIDs: (at) -> Api.getInsertedItemsWithIDs.apply(this, [at])[0]
setIframe: (iframe) -> this.setIframe(iframe)
run: -> this.dispatch()
destroy: ->
this.shutdown()
@$inputor.data('atwho', null)
Atwho =
# init or update an inputor with a special flag
#
# @params options [Object] settings of At.js
init: (options) ->
app = ($this = $(this)).data "atwho"
$this.data 'atwho', (app = new App(this)) if not app
app.reg options.at, options
this
$CONTAINER = $("<div id='atwho-container'></div>")
$.fn.atwho = (method) ->
_args = arguments
$('body').append($CONTAINER)
result = null
this.filter('textarea, input, [contenteditable=true]').each ->
if typeof method is 'object' || !method
Atwho.init.apply this, _args
else if Api[method]
result = Api[method].apply app, Array::slice.call(_args, 1) if app = $(this).data('atwho')
else
$.error "Method #{method} does not exist on jQuery.caret"
result || this
$.fn.atwho.default =
at: undefined
alias: undefined
data: null
tpl: "<li data-value='${atwho-at}${name}'>${name}</li>"
insert_tpl: "<span>${atwho-data-value}</span>"
callbacks: DEFAULT_CALLBACKS
search_key: "name"
start_with_space: yes
highlight_first: yes
limit: 5
max_len: 20
display_timeout: 300
delay: null
| 162161 | Api =
# load a flag's data
#
# @params at[String] the flag
# @params data [Array] data to storage.
load: (at, data) -> c.model.load data if c = this.controller(at)
getInsertedItemsWithIDs: (at) ->
return [null, null] unless c = this.controller at
at = "-#{c.get_opt('alias') || c.at}" if at
ids = []
items = $.map @$inputor.find("span.atwho-view-flag#{at || ""}"), (item) ->
data = $(item).data('atwho-data-item')
return if ids.indexOf(data.id) > -1
ids.push = data.id if data.id
data
[ids, items]
getInsertedItems: (at) -> Api.getInsertedItemsWithIDs.apply(this, [at])[1]
getInsertedIDs: (at) -> Api.getInsertedItemsWithIDs.apply(this, [at])[0]
setIframe: (iframe) -> this.setIframe(iframe)
run: -> this.dispatch()
destroy: ->
this.shutdown()
@$inputor.data('atwho', null)
Atwho =
# init or update an inputor with a special flag
#
# @params options [Object] settings of At.js
init: (options) ->
app = ($this = $(this)).data "atwho"
$this.data 'atwho', (app = new App(this)) if not app
app.reg options.at, options
this
$CONTAINER = $("<div id='atwho-container'></div>")
$.fn.atwho = (method) ->
_args = arguments
$('body').append($CONTAINER)
result = null
this.filter('textarea, input, [contenteditable=true]').each ->
if typeof method is 'object' || !method
Atwho.init.apply this, _args
else if Api[method]
result = Api[method].apply app, Array::slice.call(_args, 1) if app = $(this).data('atwho')
else
$.error "Method #{method} does not exist on jQuery.caret"
result || this
$.fn.atwho.default =
at: undefined
alias: undefined
data: null
tpl: "<li data-value='${atwho-at}${name}'>${name}</li>"
insert_tpl: "<span>${atwho-data-value}</span>"
callbacks: DEFAULT_CALLBACKS
search_key: "<KEY>"
start_with_space: yes
highlight_first: yes
limit: 5
max_len: 20
display_timeout: 300
delay: null
| true | Api =
# load a flag's data
#
# @params at[String] the flag
# @params data [Array] data to storage.
load: (at, data) -> c.model.load data if c = this.controller(at)
getInsertedItemsWithIDs: (at) ->
return [null, null] unless c = this.controller at
at = "-#{c.get_opt('alias') || c.at}" if at
ids = []
items = $.map @$inputor.find("span.atwho-view-flag#{at || ""}"), (item) ->
data = $(item).data('atwho-data-item')
return if ids.indexOf(data.id) > -1
ids.push = data.id if data.id
data
[ids, items]
getInsertedItems: (at) -> Api.getInsertedItemsWithIDs.apply(this, [at])[1]
getInsertedIDs: (at) -> Api.getInsertedItemsWithIDs.apply(this, [at])[0]
setIframe: (iframe) -> this.setIframe(iframe)
run: -> this.dispatch()
destroy: ->
this.shutdown()
@$inputor.data('atwho', null)
Atwho =
# init or update an inputor with a special flag
#
# @params options [Object] settings of At.js
init: (options) ->
app = ($this = $(this)).data "atwho"
$this.data 'atwho', (app = new App(this)) if not app
app.reg options.at, options
this
$CONTAINER = $("<div id='atwho-container'></div>")
$.fn.atwho = (method) ->
_args = arguments
$('body').append($CONTAINER)
result = null
this.filter('textarea, input, [contenteditable=true]').each ->
if typeof method is 'object' || !method
Atwho.init.apply this, _args
else if Api[method]
result = Api[method].apply app, Array::slice.call(_args, 1) if app = $(this).data('atwho')
else
$.error "Method #{method} does not exist on jQuery.caret"
result || this
$.fn.atwho.default =
at: undefined
alias: undefined
data: null
tpl: "<li data-value='${atwho-at}${name}'>${name}</li>"
insert_tpl: "<span>${atwho-data-value}</span>"
callbacks: DEFAULT_CALLBACKS
search_key: "PI:KEY:<KEY>END_PI"
start_with_space: yes
highlight_first: yes
limit: 5
max_len: 20
display_timeout: 300
delay: null
|
[
{
"context": " name: 'Vox Media'\n data:\n shortName: 'Vox'\n description: 'Smart Media Brands for a New",
"end": 80,
"score": 0.8142744898796082,
"start": 77,
"tag": "NAME",
"value": "Vox"
}
] | test/support/theVergeJSON.coffee | penne12/WhoseNews | 3 | module.exports =
parent:
name: 'Vox Media'
data:
shortName: 'Vox'
description: 'Smart Media Brands for a New Generation\nVox Media builds smart brands that people love in big categories they’re passionate about. We create products to empower the most talented voices and engage hundreds of millions of people with high quality content and experiences.\n'
type: 'Private'
url: 'http://www.voxmedia.com/'
wikipedia: 'https://en.wikipedia.org/wiki/Vox_Media'
headquarters: 'New York City, NY'
commercial: true
investors:
[ 'NBCUniversal'
'Comcast Ventures'
'Accel Partners'
'khosla ventures'
'General Atlantic' ]
brands:
Vox:
description: 'Vox is a general interest news site for the 21st century. Its mission is simple: Explain the News. Vox is where you go to understand the news and the world around you.\n',
url: 'http://www.vox.com/'
wikipedia: 'https://en.wikipedia.org/wiki/Vox_(website)'
domains: [ 'vox.com' ]
'The Verge':
description: 'The Verge is the global authority on the most innovative parts of our modern culture, covering technology, science, entertainment, transportation—and the experiences of the future.\n'
url: 'http://www.theverge.com/'
domains: [ 'theverge.com' ]
wikipedia: 'https://en.wikipedia.org/wiki/The_Verge'
name: 'The Verge'
data:
description: 'The Verge is the global authority on the most innovative parts of our modern culture, covering technology, science, entertainment, transportation—and the experiences of the future.\n'
url: 'http://www.theverge.com/'
domains: [ 'theverge.com' ]
wikipedia: 'https://en.wikipedia.org/wiki/The_Verge'
| 114546 | module.exports =
parent:
name: 'Vox Media'
data:
shortName: '<NAME>'
description: 'Smart Media Brands for a New Generation\nVox Media builds smart brands that people love in big categories they’re passionate about. We create products to empower the most talented voices and engage hundreds of millions of people with high quality content and experiences.\n'
type: 'Private'
url: 'http://www.voxmedia.com/'
wikipedia: 'https://en.wikipedia.org/wiki/Vox_Media'
headquarters: 'New York City, NY'
commercial: true
investors:
[ 'NBCUniversal'
'Comcast Ventures'
'Accel Partners'
'khosla ventures'
'General Atlantic' ]
brands:
Vox:
description: 'Vox is a general interest news site for the 21st century. Its mission is simple: Explain the News. Vox is where you go to understand the news and the world around you.\n',
url: 'http://www.vox.com/'
wikipedia: 'https://en.wikipedia.org/wiki/Vox_(website)'
domains: [ 'vox.com' ]
'The Verge':
description: 'The Verge is the global authority on the most innovative parts of our modern culture, covering technology, science, entertainment, transportation—and the experiences of the future.\n'
url: 'http://www.theverge.com/'
domains: [ 'theverge.com' ]
wikipedia: 'https://en.wikipedia.org/wiki/The_Verge'
name: 'The Verge'
data:
description: 'The Verge is the global authority on the most innovative parts of our modern culture, covering technology, science, entertainment, transportation—and the experiences of the future.\n'
url: 'http://www.theverge.com/'
domains: [ 'theverge.com' ]
wikipedia: 'https://en.wikipedia.org/wiki/The_Verge'
| true | module.exports =
parent:
name: 'Vox Media'
data:
shortName: 'PI:NAME:<NAME>END_PI'
description: 'Smart Media Brands for a New Generation\nVox Media builds smart brands that people love in big categories they’re passionate about. We create products to empower the most talented voices and engage hundreds of millions of people with high quality content and experiences.\n'
type: 'Private'
url: 'http://www.voxmedia.com/'
wikipedia: 'https://en.wikipedia.org/wiki/Vox_Media'
headquarters: 'New York City, NY'
commercial: true
investors:
[ 'NBCUniversal'
'Comcast Ventures'
'Accel Partners'
'khosla ventures'
'General Atlantic' ]
brands:
Vox:
description: 'Vox is a general interest news site for the 21st century. Its mission is simple: Explain the News. Vox is where you go to understand the news and the world around you.\n',
url: 'http://www.vox.com/'
wikipedia: 'https://en.wikipedia.org/wiki/Vox_(website)'
domains: [ 'vox.com' ]
'The Verge':
description: 'The Verge is the global authority on the most innovative parts of our modern culture, covering technology, science, entertainment, transportation—and the experiences of the future.\n'
url: 'http://www.theverge.com/'
domains: [ 'theverge.com' ]
wikipedia: 'https://en.wikipedia.org/wiki/The_Verge'
name: 'The Verge'
data:
description: 'The Verge is the global authority on the most innovative parts of our modern culture, covering technology, science, entertainment, transportation—and the experiences of the future.\n'
url: 'http://www.theverge.com/'
domains: [ 'theverge.com' ]
wikipedia: 'https://en.wikipedia.org/wiki/The_Verge'
|
[
{
"context": "getPropertyKey: (keyInput) ->\n key = keyInput.val()\n if @workingSchema.properties\n for ",
"end": 12687,
"score": 0.6525132656097412,
"start": 12684,
"tag": "KEY",
"value": "val"
}
] | src/core.coffee | lgr7/codecombattreema | 66 | do __init = ->
TreemaNode.setNodeSubclass 'string', class StringNode extends TreemaNode
valueClass: 'treema-string'
@inputTypes = ['color', 'date', 'datetime', 'datetime-local',
'email', 'month', 'range', 'search',
'tel', 'text', 'time', 'url', 'week']
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, "\"#{data}\"")
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, data)
input.attr('maxlength', @workingSchema.maxLength) if @workingSchema.maxLength
input.attr('type', @workingSchema.format) if @workingSchema.format in StringNode.inputTypes
saveChanges: (valEl) ->
oldData = @data
@data = $('input', valEl).val()
super(oldData)
TreemaNode.setNodeSubclass 'number', class NumberNode extends TreemaNode
valueClass: 'treema-number'
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, JSON.stringify(data))
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data), 'number')
input.attr('max', @workingSchema.maximum) if @workingSchema.maximum
input.attr('min', @workingSchema.minimum) if @workingSchema.minimum
saveChanges: (valEl) ->
oldData = @data
@data = parseFloat($('input', valEl).val())
super(oldData)
TreemaNode.setNodeSubclass 'integer', class IntegerNode extends TreemaNode
valueClass: 'treema-integer'
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, JSON.stringify(data))
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data), 'number')
input.attr('max', @workingSchema.maximum) if @workingSchema.maximum
input.attr('min', @workingSchema.minimum) if @workingSchema.minimum
saveChanges: (valEl) ->
oldData = @data
@data = parseInt($('input', valEl).val())
super(oldData)
TreemaNode.setNodeSubclass 'null', NullNode = class NullNode extends TreemaNode
valueClass: 'treema-null'
editable: false
buildValueForDisplay: (valEl) -> @buildValueForDisplaySimply(valEl, 'null')
TreemaNode.setNodeSubclass 'boolean', class BooleanNode extends TreemaNode
valueClass: 'treema-boolean'
buildValueForDisplay: (valEl, data) ->
@buildValueForDisplaySimply(valEl, JSON.stringify(data))
@keepFocus()
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data))
$('<span></span>').text(JSON.stringify(@data)).insertBefore(input)
input.focus()
toggleValue: (newValue=null) ->
oldData = @getData()
@data = not @data
@data = newValue if newValue?
valEl = @getValEl().empty()
if @isDisplaying() then @buildValueForDisplay(valEl, @getData()) else @buildValueForEditing(valEl, @getData())
@addTrackedAction {'oldData':oldData, 'newData':@data, 'path':@getPath(), 'action':'edit'}
@keepFocus()
@flushChanges()
onSpacePressed: -> @toggleValue()
onFPressed: -> @toggleValue(false)
onTPressed: -> @toggleValue(true)
saveChanges: ->
onClick: (e) ->
value = $(e.target).closest('.treema-value')
return super(e) unless value.length
@toggleValue() if @canEdit()
TreemaNode.setNodeSubclass 'array', class ArrayNode extends TreemaNode
valueClass: 'treema-array'
collection: true
ordered: true
directlyEditable: false
sort: false
getChildren: ->
({
key: key
value: value
schema: @getChildSchema(key)
} for value, key in @getData())
buildValueForDisplay: (valEl, data) ->
text = []
return unless data
for child, index in data[..2]
helperTreema = TreemaNode.make(null, {schema: TreemaNode.utils.getChildSchema(index, @workingSchema), data:child}, @)
val = $('<div></div>')
helperTreema.buildValueForDisplay(val, helperTreema.getData())
text.push(val.text())
text.push('...') if data.length > 3
empty = if @workingSchema.title? then "(empty #{@workingSchema.title})" else '(empty)'
text = if text.length then text.join(' | ') else empty
@buildValueForDisplaySimply(valEl, text)
buildValueForEditing: (valEl, data) -> @buildValueForEditingSimply(valEl, JSON.stringify(data))
canAddChild: ->
return false if @settings.readOnly or @workingSchema.readOnly
return false if @workingSchema.additionalItems is false and @getData().length >= @workingSchema.items.length
return false if @workingSchema.maxItems? and @getData().length >= @workingSchema.maxItems
return true
addNewChild: ->
return unless @canAddChild()
@open() if @isClosed()
new_index = Object.keys(@childrenTreemas).length
schema = TreemaNode.utils.getChildSchema(new_index, @workingSchema)
newTreema = TreemaNode.make(undefined, {schema: schema}, @, new_index)
newTreema.tv4 = @tv4
childNode = @createChildNode(newTreema)
@addTrackedAction {'data':newTreema.data, 'path':newTreema.getPath(), 'parentPath':@getPath(), 'action':'insert'}
@getAddButtonEl().before(childNode)
if newTreema.canEdit()
newTreema.edit()
else
newTreema.select()
@integrateChildTreema(newTreema)
newTreema.flushChanges()
newTreema
open: ->
@data.sort(@sortFunction) if @data and @sort
super(arguments...)
close: ->
super(arguments...)
valEl = @getValEl().empty()
@buildValueForDisplay(valEl, @getData())
# auto sorting methods
sortFunction: (a, b) ->
return 1 if a > b
return -1 if a < b
return 0
window.TreemaArrayNode = ArrayNode # TODO: how should we be making these available?
TreemaNode.setNodeSubclass 'object', class ObjectNode extends TreemaNode
valueClass: 'treema-object'
collection: true
keyed: true
directlyEditable: false
getChildren: ->
# order based on properties object first
children = []
keysAccountedFor = []
if @workingSchema.properties
for key of @workingSchema.properties
defaultData = @getDefaultDataForKey(key)
if $.type(@getData()[key]) is 'undefined'
if defaultData?
keysAccountedFor.push(key)
children.push({
key: key,
schema: @getChildSchema(key)
defaultData: defaultData
})
continue
keysAccountedFor.push(key)
schema = @getChildSchema(key)
children.push({
key: key
value: @getData()[key]
schema: schema
defaultData: defaultData
})
for key, value of @getData()
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
value: value
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
if $.isPlainObject(@defaultData)
for key of @defaultData
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
if $.isPlainObject(@workingSchema.default)
for key of @workingSchema.default
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
children
getDefaultDataForKey: (key) ->
childDefaultData = @defaultData?[key] ? @workingSchema.default?[key]
if $.isArray(childDefaultData) then childDefaultData = $.extend(true, [], childDefaultData)
if $.isPlainObject(childDefaultData) then childDefaultData = $.extend(true, {}, childDefaultData)
childDefaultData
buildValueForDisplay: (valEl, data) ->
text = []
return unless data
displayValue = data[@workingSchema.displayProperty]
if displayValue
text = displayValue
return @buildValueForDisplaySimply(valEl, text)
i = 0
schema = @workingSchema or @schema
for key, value of data
continue if value is undefined
if i is 3
text.push('...')
break
i += 1
childSchema = @getChildSchema(key)
name = childSchema.title or key
if $.isPlainObject(value) or $.isArray(value)
text.push "#{name}"
continue
valueString = value
valueString = JSON.stringify(value) unless $.type(value) is 'string'
valueString = 'undefined' if typeof value is 'undefined'
valueString = valueString[..20] + ' ...' if valueString.length > 20
text.push "#{name}=#{valueString}"
empty = if @workingSchema.title? then "(empty #{@workingSchema.title})" else '(empty)'
text = if text.length then text.join(', ') else empty
@buildValueForDisplaySimply(valEl, text)
populateData: ->
super()
TreemaNode.utils.populateRequireds(@data, @workingSchema, @tv4)
close: ->
super(arguments...)
@buildValueForDisplay(@getValEl().empty(), @getData())
# adding children ---------------------------------------------------------
addNewChild: ->
return unless @canAddChild()
@open() unless @isRoot()
@deselectAll()
properties = @childPropertiesAvailable()
keyInput = $(@newPropertyTemplate)
keyInput.blur @cleanupAddNewChild
keyInput.keydown (e) =>
@originalTargetValue = $(e.target).val()
keyInput.autocomplete?(source: properties, minLength: 0, delay: 0, autoFocus: true, select: @onAutocompleteSelect)
@getAddButtonEl().before(keyInput).hide()
keyInput.focus()
keyInput.autocomplete('search')
true
onAutocompleteSelect: (e, ui) =>
$(e.target).val(ui.item.value)
@tryToAddNewChild(e, true)
canAddChild: ->
return false if @settings.readOnly or @workingSchema.readOnly
return false if @workingSchema.maxProperties? and Object.keys(@getData()).length >= @workingSchema.maxProperties
return true if @workingSchema.additionalProperties isnt false
return true if @workingSchema.patternProperties?
return true if @childPropertiesAvailable().length
return false
childPropertiesAvailable: ->
schema = @workingSchema or @schema
return [] unless schema.properties
properties = []
data = @getData()
for property, childSchema of schema.properties
continue if data?[property]?
continue if childSchema.format is 'hidden'
continue if childSchema.readOnly
properties.push(childSchema.title or property)
properties.sort()
# event handling when adding a new property -------------------------------
onDeletePressed: (e) ->
return super(e) unless @addingNewProperty()
if not $(e.target).val()
@cleanupAddNewChild()
e.preventDefault()
@$el.find('.treema-add-child').focus()
onEscapePressed: ->
@cleanupAddNewChild()
onTabPressed: (e) ->
return super(e) unless @addingNewProperty()
e.preventDefault()
@tryToAddNewChild(e, false)
onEnterPressed: (e) ->
return super(e) unless @addingNewProperty()
@tryToAddNewChild(e, true)
# new property behavior ---------------------------------------------------
tryToAddNewChild: (e, aggressive) ->
# empty input keep on moving on
if (not @originalTargetValue) and (not aggressive)
offset = if e.shiftKey then -1 else 1
@cleanupAddNewChild()
@$el.find('.treema-add-child').focus()
@traverseWhileEditing(offset)
return
keyInput = $(e.target)
key = @getPropertyKey($(e.target))
# invalid input, stay put and show an error
if key.length and not @canAddProperty(key)
@clearTemporaryErrors()
@showBadPropertyError(keyInput)
return
# if this is a prop we already have, just edit that instead
if @childrenTreemas[key]?
@cleanupAddNewChild()
treema = @childrenTreemas[key]
return if treema.canEdit() then treema.toggleEdit() else treema.select()
# otherwise add the new child
@cleanupAddNewChild()
@addNewChildForKey(key)
getPropertyKey: (keyInput) ->
key = keyInput.val()
if @workingSchema.properties
for child_key, child_schema of @workingSchema.properties
key = child_key if child_schema.title is key
key
canAddProperty: (key) ->
return true unless @workingSchema.additionalProperties is false
return true if @workingSchema.properties?[key]?
if @workingSchema.patternProperties?
for pattern of @workingSchema.patternProperties
return true if RegExp(pattern).test(key)
return false
showBadPropertyError: (keyInput) ->
keyInput.focus()
tempError = @createTemporaryError('Invalid property name.')
tempError.insertAfter(keyInput)
return
addNewChildForKey: (key) ->
schema = @getChildSchema(key)
newTreema = TreemaNode.make(null, {schema: schema}, @, key)
childNode = @createChildNode(newTreema)
@findObjectInsertionPoint(key).before(childNode)
if newTreema.canEdit()
newTreema.edit()
else
@integrateChildTreema(newTreema)
# new treemas may already have children from default
if newTreema.collection
children = newTreema.getChildren()
if children.length
newTreema.open()
child = newTreema.childrenTreemas[children[0]['key']]
child?.select()
else
newTreema.addNewChild()
@addTrackedAction {'data':newTreema.data, 'path':newTreema.getPath(), 'parentPath':@getPath(), action:'insert'}
@updateMyAddButton()
findObjectInsertionPoint: (key) ->
# Object children should be in the order of the schema.properties objects as much as possible
return @getAddButtonEl() unless @workingSchema.properties?[key]
allProps = Object.keys(@workingSchema.properties)
afterKeys = allProps.slice(allProps.indexOf(key)+1)
allChildren = @$el.find('> .treema-children > .treema-node')
for child in allChildren
if $(child).data('instance').keyForParent in afterKeys
return $(child)
return @getAddButtonEl()
# adding utilities --------------------------------------------------------
cleanupAddNewChild: =>
@$el.find('.treema-new-prop').remove()
@getAddButtonEl().show()
@clearTemporaryErrors()
addingNewProperty: -> document.activeElement is @$el.find('.treema-new-prop')[0]
window.TreemaObjectNode = ObjectNode # TODO: how should we be making these available?
| 128930 | do __init = ->
TreemaNode.setNodeSubclass 'string', class StringNode extends TreemaNode
valueClass: 'treema-string'
@inputTypes = ['color', 'date', 'datetime', 'datetime-local',
'email', 'month', 'range', 'search',
'tel', 'text', 'time', 'url', 'week']
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, "\"#{data}\"")
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, data)
input.attr('maxlength', @workingSchema.maxLength) if @workingSchema.maxLength
input.attr('type', @workingSchema.format) if @workingSchema.format in StringNode.inputTypes
saveChanges: (valEl) ->
oldData = @data
@data = $('input', valEl).val()
super(oldData)
TreemaNode.setNodeSubclass 'number', class NumberNode extends TreemaNode
valueClass: 'treema-number'
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, JSON.stringify(data))
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data), 'number')
input.attr('max', @workingSchema.maximum) if @workingSchema.maximum
input.attr('min', @workingSchema.minimum) if @workingSchema.minimum
saveChanges: (valEl) ->
oldData = @data
@data = parseFloat($('input', valEl).val())
super(oldData)
TreemaNode.setNodeSubclass 'integer', class IntegerNode extends TreemaNode
valueClass: 'treema-integer'
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, JSON.stringify(data))
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data), 'number')
input.attr('max', @workingSchema.maximum) if @workingSchema.maximum
input.attr('min', @workingSchema.minimum) if @workingSchema.minimum
saveChanges: (valEl) ->
oldData = @data
@data = parseInt($('input', valEl).val())
super(oldData)
TreemaNode.setNodeSubclass 'null', NullNode = class NullNode extends TreemaNode
valueClass: 'treema-null'
editable: false
buildValueForDisplay: (valEl) -> @buildValueForDisplaySimply(valEl, 'null')
TreemaNode.setNodeSubclass 'boolean', class BooleanNode extends TreemaNode
valueClass: 'treema-boolean'
buildValueForDisplay: (valEl, data) ->
@buildValueForDisplaySimply(valEl, JSON.stringify(data))
@keepFocus()
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data))
$('<span></span>').text(JSON.stringify(@data)).insertBefore(input)
input.focus()
toggleValue: (newValue=null) ->
oldData = @getData()
@data = not @data
@data = newValue if newValue?
valEl = @getValEl().empty()
if @isDisplaying() then @buildValueForDisplay(valEl, @getData()) else @buildValueForEditing(valEl, @getData())
@addTrackedAction {'oldData':oldData, 'newData':@data, 'path':@getPath(), 'action':'edit'}
@keepFocus()
@flushChanges()
onSpacePressed: -> @toggleValue()
onFPressed: -> @toggleValue(false)
onTPressed: -> @toggleValue(true)
saveChanges: ->
onClick: (e) ->
value = $(e.target).closest('.treema-value')
return super(e) unless value.length
@toggleValue() if @canEdit()
TreemaNode.setNodeSubclass 'array', class ArrayNode extends TreemaNode
valueClass: 'treema-array'
collection: true
ordered: true
directlyEditable: false
sort: false
getChildren: ->
({
key: key
value: value
schema: @getChildSchema(key)
} for value, key in @getData())
buildValueForDisplay: (valEl, data) ->
text = []
return unless data
for child, index in data[..2]
helperTreema = TreemaNode.make(null, {schema: TreemaNode.utils.getChildSchema(index, @workingSchema), data:child}, @)
val = $('<div></div>')
helperTreema.buildValueForDisplay(val, helperTreema.getData())
text.push(val.text())
text.push('...') if data.length > 3
empty = if @workingSchema.title? then "(empty #{@workingSchema.title})" else '(empty)'
text = if text.length then text.join(' | ') else empty
@buildValueForDisplaySimply(valEl, text)
buildValueForEditing: (valEl, data) -> @buildValueForEditingSimply(valEl, JSON.stringify(data))
canAddChild: ->
return false if @settings.readOnly or @workingSchema.readOnly
return false if @workingSchema.additionalItems is false and @getData().length >= @workingSchema.items.length
return false if @workingSchema.maxItems? and @getData().length >= @workingSchema.maxItems
return true
addNewChild: ->
return unless @canAddChild()
@open() if @isClosed()
new_index = Object.keys(@childrenTreemas).length
schema = TreemaNode.utils.getChildSchema(new_index, @workingSchema)
newTreema = TreemaNode.make(undefined, {schema: schema}, @, new_index)
newTreema.tv4 = @tv4
childNode = @createChildNode(newTreema)
@addTrackedAction {'data':newTreema.data, 'path':newTreema.getPath(), 'parentPath':@getPath(), 'action':'insert'}
@getAddButtonEl().before(childNode)
if newTreema.canEdit()
newTreema.edit()
else
newTreema.select()
@integrateChildTreema(newTreema)
newTreema.flushChanges()
newTreema
open: ->
@data.sort(@sortFunction) if @data and @sort
super(arguments...)
close: ->
super(arguments...)
valEl = @getValEl().empty()
@buildValueForDisplay(valEl, @getData())
# auto sorting methods
sortFunction: (a, b) ->
return 1 if a > b
return -1 if a < b
return 0
window.TreemaArrayNode = ArrayNode # TODO: how should we be making these available?
TreemaNode.setNodeSubclass 'object', class ObjectNode extends TreemaNode
valueClass: 'treema-object'
collection: true
keyed: true
directlyEditable: false
getChildren: ->
# order based on properties object first
children = []
keysAccountedFor = []
if @workingSchema.properties
for key of @workingSchema.properties
defaultData = @getDefaultDataForKey(key)
if $.type(@getData()[key]) is 'undefined'
if defaultData?
keysAccountedFor.push(key)
children.push({
key: key,
schema: @getChildSchema(key)
defaultData: defaultData
})
continue
keysAccountedFor.push(key)
schema = @getChildSchema(key)
children.push({
key: key
value: @getData()[key]
schema: schema
defaultData: defaultData
})
for key, value of @getData()
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
value: value
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
if $.isPlainObject(@defaultData)
for key of @defaultData
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
if $.isPlainObject(@workingSchema.default)
for key of @workingSchema.default
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
children
getDefaultDataForKey: (key) ->
childDefaultData = @defaultData?[key] ? @workingSchema.default?[key]
if $.isArray(childDefaultData) then childDefaultData = $.extend(true, [], childDefaultData)
if $.isPlainObject(childDefaultData) then childDefaultData = $.extend(true, {}, childDefaultData)
childDefaultData
buildValueForDisplay: (valEl, data) ->
text = []
return unless data
displayValue = data[@workingSchema.displayProperty]
if displayValue
text = displayValue
return @buildValueForDisplaySimply(valEl, text)
i = 0
schema = @workingSchema or @schema
for key, value of data
continue if value is undefined
if i is 3
text.push('...')
break
i += 1
childSchema = @getChildSchema(key)
name = childSchema.title or key
if $.isPlainObject(value) or $.isArray(value)
text.push "#{name}"
continue
valueString = value
valueString = JSON.stringify(value) unless $.type(value) is 'string'
valueString = 'undefined' if typeof value is 'undefined'
valueString = valueString[..20] + ' ...' if valueString.length > 20
text.push "#{name}=#{valueString}"
empty = if @workingSchema.title? then "(empty #{@workingSchema.title})" else '(empty)'
text = if text.length then text.join(', ') else empty
@buildValueForDisplaySimply(valEl, text)
populateData: ->
super()
TreemaNode.utils.populateRequireds(@data, @workingSchema, @tv4)
close: ->
super(arguments...)
@buildValueForDisplay(@getValEl().empty(), @getData())
# adding children ---------------------------------------------------------
addNewChild: ->
return unless @canAddChild()
@open() unless @isRoot()
@deselectAll()
properties = @childPropertiesAvailable()
keyInput = $(@newPropertyTemplate)
keyInput.blur @cleanupAddNewChild
keyInput.keydown (e) =>
@originalTargetValue = $(e.target).val()
keyInput.autocomplete?(source: properties, minLength: 0, delay: 0, autoFocus: true, select: @onAutocompleteSelect)
@getAddButtonEl().before(keyInput).hide()
keyInput.focus()
keyInput.autocomplete('search')
true
onAutocompleteSelect: (e, ui) =>
$(e.target).val(ui.item.value)
@tryToAddNewChild(e, true)
canAddChild: ->
return false if @settings.readOnly or @workingSchema.readOnly
return false if @workingSchema.maxProperties? and Object.keys(@getData()).length >= @workingSchema.maxProperties
return true if @workingSchema.additionalProperties isnt false
return true if @workingSchema.patternProperties?
return true if @childPropertiesAvailable().length
return false
childPropertiesAvailable: ->
schema = @workingSchema or @schema
return [] unless schema.properties
properties = []
data = @getData()
for property, childSchema of schema.properties
continue if data?[property]?
continue if childSchema.format is 'hidden'
continue if childSchema.readOnly
properties.push(childSchema.title or property)
properties.sort()
# event handling when adding a new property -------------------------------
onDeletePressed: (e) ->
return super(e) unless @addingNewProperty()
if not $(e.target).val()
@cleanupAddNewChild()
e.preventDefault()
@$el.find('.treema-add-child').focus()
onEscapePressed: ->
@cleanupAddNewChild()
onTabPressed: (e) ->
return super(e) unless @addingNewProperty()
e.preventDefault()
@tryToAddNewChild(e, false)
onEnterPressed: (e) ->
return super(e) unless @addingNewProperty()
@tryToAddNewChild(e, true)
# new property behavior ---------------------------------------------------
tryToAddNewChild: (e, aggressive) ->
# empty input keep on moving on
if (not @originalTargetValue) and (not aggressive)
offset = if e.shiftKey then -1 else 1
@cleanupAddNewChild()
@$el.find('.treema-add-child').focus()
@traverseWhileEditing(offset)
return
keyInput = $(e.target)
key = @getPropertyKey($(e.target))
# invalid input, stay put and show an error
if key.length and not @canAddProperty(key)
@clearTemporaryErrors()
@showBadPropertyError(keyInput)
return
# if this is a prop we already have, just edit that instead
if @childrenTreemas[key]?
@cleanupAddNewChild()
treema = @childrenTreemas[key]
return if treema.canEdit() then treema.toggleEdit() else treema.select()
# otherwise add the new child
@cleanupAddNewChild()
@addNewChildForKey(key)
getPropertyKey: (keyInput) ->
key = keyInput.<KEY>()
if @workingSchema.properties
for child_key, child_schema of @workingSchema.properties
key = child_key if child_schema.title is key
key
canAddProperty: (key) ->
return true unless @workingSchema.additionalProperties is false
return true if @workingSchema.properties?[key]?
if @workingSchema.patternProperties?
for pattern of @workingSchema.patternProperties
return true if RegExp(pattern).test(key)
return false
showBadPropertyError: (keyInput) ->
keyInput.focus()
tempError = @createTemporaryError('Invalid property name.')
tempError.insertAfter(keyInput)
return
addNewChildForKey: (key) ->
schema = @getChildSchema(key)
newTreema = TreemaNode.make(null, {schema: schema}, @, key)
childNode = @createChildNode(newTreema)
@findObjectInsertionPoint(key).before(childNode)
if newTreema.canEdit()
newTreema.edit()
else
@integrateChildTreema(newTreema)
# new treemas may already have children from default
if newTreema.collection
children = newTreema.getChildren()
if children.length
newTreema.open()
child = newTreema.childrenTreemas[children[0]['key']]
child?.select()
else
newTreema.addNewChild()
@addTrackedAction {'data':newTreema.data, 'path':newTreema.getPath(), 'parentPath':@getPath(), action:'insert'}
@updateMyAddButton()
findObjectInsertionPoint: (key) ->
# Object children should be in the order of the schema.properties objects as much as possible
return @getAddButtonEl() unless @workingSchema.properties?[key]
allProps = Object.keys(@workingSchema.properties)
afterKeys = allProps.slice(allProps.indexOf(key)+1)
allChildren = @$el.find('> .treema-children > .treema-node')
for child in allChildren
if $(child).data('instance').keyForParent in afterKeys
return $(child)
return @getAddButtonEl()
# adding utilities --------------------------------------------------------
cleanupAddNewChild: =>
@$el.find('.treema-new-prop').remove()
@getAddButtonEl().show()
@clearTemporaryErrors()
addingNewProperty: -> document.activeElement is @$el.find('.treema-new-prop')[0]
window.TreemaObjectNode = ObjectNode # TODO: how should we be making these available?
| true | do __init = ->
TreemaNode.setNodeSubclass 'string', class StringNode extends TreemaNode
valueClass: 'treema-string'
@inputTypes = ['color', 'date', 'datetime', 'datetime-local',
'email', 'month', 'range', 'search',
'tel', 'text', 'time', 'url', 'week']
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, "\"#{data}\"")
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, data)
input.attr('maxlength', @workingSchema.maxLength) if @workingSchema.maxLength
input.attr('type', @workingSchema.format) if @workingSchema.format in StringNode.inputTypes
saveChanges: (valEl) ->
oldData = @data
@data = $('input', valEl).val()
super(oldData)
TreemaNode.setNodeSubclass 'number', class NumberNode extends TreemaNode
valueClass: 'treema-number'
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, JSON.stringify(data))
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data), 'number')
input.attr('max', @workingSchema.maximum) if @workingSchema.maximum
input.attr('min', @workingSchema.minimum) if @workingSchema.minimum
saveChanges: (valEl) ->
oldData = @data
@data = parseFloat($('input', valEl).val())
super(oldData)
TreemaNode.setNodeSubclass 'integer', class IntegerNode extends TreemaNode
valueClass: 'treema-integer'
buildValueForDisplay: (valEl, data) -> @buildValueForDisplaySimply(valEl, JSON.stringify(data))
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data), 'number')
input.attr('max', @workingSchema.maximum) if @workingSchema.maximum
input.attr('min', @workingSchema.minimum) if @workingSchema.minimum
saveChanges: (valEl) ->
oldData = @data
@data = parseInt($('input', valEl).val())
super(oldData)
TreemaNode.setNodeSubclass 'null', NullNode = class NullNode extends TreemaNode
valueClass: 'treema-null'
editable: false
buildValueForDisplay: (valEl) -> @buildValueForDisplaySimply(valEl, 'null')
TreemaNode.setNodeSubclass 'boolean', class BooleanNode extends TreemaNode
valueClass: 'treema-boolean'
buildValueForDisplay: (valEl, data) ->
@buildValueForDisplaySimply(valEl, JSON.stringify(data))
@keepFocus()
buildValueForEditing: (valEl, data) ->
input = @buildValueForEditingSimply(valEl, JSON.stringify(data))
$('<span></span>').text(JSON.stringify(@data)).insertBefore(input)
input.focus()
toggleValue: (newValue=null) ->
oldData = @getData()
@data = not @data
@data = newValue if newValue?
valEl = @getValEl().empty()
if @isDisplaying() then @buildValueForDisplay(valEl, @getData()) else @buildValueForEditing(valEl, @getData())
@addTrackedAction {'oldData':oldData, 'newData':@data, 'path':@getPath(), 'action':'edit'}
@keepFocus()
@flushChanges()
onSpacePressed: -> @toggleValue()
onFPressed: -> @toggleValue(false)
onTPressed: -> @toggleValue(true)
saveChanges: ->
onClick: (e) ->
value = $(e.target).closest('.treema-value')
return super(e) unless value.length
@toggleValue() if @canEdit()
TreemaNode.setNodeSubclass 'array', class ArrayNode extends TreemaNode
valueClass: 'treema-array'
collection: true
ordered: true
directlyEditable: false
sort: false
getChildren: ->
({
key: key
value: value
schema: @getChildSchema(key)
} for value, key in @getData())
buildValueForDisplay: (valEl, data) ->
text = []
return unless data
for child, index in data[..2]
helperTreema = TreemaNode.make(null, {schema: TreemaNode.utils.getChildSchema(index, @workingSchema), data:child}, @)
val = $('<div></div>')
helperTreema.buildValueForDisplay(val, helperTreema.getData())
text.push(val.text())
text.push('...') if data.length > 3
empty = if @workingSchema.title? then "(empty #{@workingSchema.title})" else '(empty)'
text = if text.length then text.join(' | ') else empty
@buildValueForDisplaySimply(valEl, text)
buildValueForEditing: (valEl, data) -> @buildValueForEditingSimply(valEl, JSON.stringify(data))
canAddChild: ->
return false if @settings.readOnly or @workingSchema.readOnly
return false if @workingSchema.additionalItems is false and @getData().length >= @workingSchema.items.length
return false if @workingSchema.maxItems? and @getData().length >= @workingSchema.maxItems
return true
addNewChild: ->
return unless @canAddChild()
@open() if @isClosed()
new_index = Object.keys(@childrenTreemas).length
schema = TreemaNode.utils.getChildSchema(new_index, @workingSchema)
newTreema = TreemaNode.make(undefined, {schema: schema}, @, new_index)
newTreema.tv4 = @tv4
childNode = @createChildNode(newTreema)
@addTrackedAction {'data':newTreema.data, 'path':newTreema.getPath(), 'parentPath':@getPath(), 'action':'insert'}
@getAddButtonEl().before(childNode)
if newTreema.canEdit()
newTreema.edit()
else
newTreema.select()
@integrateChildTreema(newTreema)
newTreema.flushChanges()
newTreema
open: ->
@data.sort(@sortFunction) if @data and @sort
super(arguments...)
close: ->
super(arguments...)
valEl = @getValEl().empty()
@buildValueForDisplay(valEl, @getData())
# auto sorting methods
sortFunction: (a, b) ->
return 1 if a > b
return -1 if a < b
return 0
window.TreemaArrayNode = ArrayNode # TODO: how should we be making these available?
TreemaNode.setNodeSubclass 'object', class ObjectNode extends TreemaNode
valueClass: 'treema-object'
collection: true
keyed: true
directlyEditable: false
getChildren: ->
# order based on properties object first
children = []
keysAccountedFor = []
if @workingSchema.properties
for key of @workingSchema.properties
defaultData = @getDefaultDataForKey(key)
if $.type(@getData()[key]) is 'undefined'
if defaultData?
keysAccountedFor.push(key)
children.push({
key: key,
schema: @getChildSchema(key)
defaultData: defaultData
})
continue
keysAccountedFor.push(key)
schema = @getChildSchema(key)
children.push({
key: key
value: @getData()[key]
schema: schema
defaultData: defaultData
})
for key, value of @getData()
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
value: value
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
if $.isPlainObject(@defaultData)
for key of @defaultData
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
if $.isPlainObject(@workingSchema.default)
for key of @workingSchema.default
continue if key in keysAccountedFor
keysAccountedFor.push(key)
children.push({
key: key
schema: @getChildSchema(key)
defaultData: @getDefaultDataForKey(key)
})
children
getDefaultDataForKey: (key) ->
childDefaultData = @defaultData?[key] ? @workingSchema.default?[key]
if $.isArray(childDefaultData) then childDefaultData = $.extend(true, [], childDefaultData)
if $.isPlainObject(childDefaultData) then childDefaultData = $.extend(true, {}, childDefaultData)
childDefaultData
buildValueForDisplay: (valEl, data) ->
text = []
return unless data
displayValue = data[@workingSchema.displayProperty]
if displayValue
text = displayValue
return @buildValueForDisplaySimply(valEl, text)
i = 0
schema = @workingSchema or @schema
for key, value of data
continue if value is undefined
if i is 3
text.push('...')
break
i += 1
childSchema = @getChildSchema(key)
name = childSchema.title or key
if $.isPlainObject(value) or $.isArray(value)
text.push "#{name}"
continue
valueString = value
valueString = JSON.stringify(value) unless $.type(value) is 'string'
valueString = 'undefined' if typeof value is 'undefined'
valueString = valueString[..20] + ' ...' if valueString.length > 20
text.push "#{name}=#{valueString}"
empty = if @workingSchema.title? then "(empty #{@workingSchema.title})" else '(empty)'
text = if text.length then text.join(', ') else empty
@buildValueForDisplaySimply(valEl, text)
populateData: ->
super()
TreemaNode.utils.populateRequireds(@data, @workingSchema, @tv4)
close: ->
super(arguments...)
@buildValueForDisplay(@getValEl().empty(), @getData())
# adding children ---------------------------------------------------------
addNewChild: ->
return unless @canAddChild()
@open() unless @isRoot()
@deselectAll()
properties = @childPropertiesAvailable()
keyInput = $(@newPropertyTemplate)
keyInput.blur @cleanupAddNewChild
keyInput.keydown (e) =>
@originalTargetValue = $(e.target).val()
keyInput.autocomplete?(source: properties, minLength: 0, delay: 0, autoFocus: true, select: @onAutocompleteSelect)
@getAddButtonEl().before(keyInput).hide()
keyInput.focus()
keyInput.autocomplete('search')
true
onAutocompleteSelect: (e, ui) =>
$(e.target).val(ui.item.value)
@tryToAddNewChild(e, true)
canAddChild: ->
return false if @settings.readOnly or @workingSchema.readOnly
return false if @workingSchema.maxProperties? and Object.keys(@getData()).length >= @workingSchema.maxProperties
return true if @workingSchema.additionalProperties isnt false
return true if @workingSchema.patternProperties?
return true if @childPropertiesAvailable().length
return false
childPropertiesAvailable: ->
schema = @workingSchema or @schema
return [] unless schema.properties
properties = []
data = @getData()
for property, childSchema of schema.properties
continue if data?[property]?
continue if childSchema.format is 'hidden'
continue if childSchema.readOnly
properties.push(childSchema.title or property)
properties.sort()
# event handling when adding a new property -------------------------------
onDeletePressed: (e) ->
return super(e) unless @addingNewProperty()
if not $(e.target).val()
@cleanupAddNewChild()
e.preventDefault()
@$el.find('.treema-add-child').focus()
onEscapePressed: ->
@cleanupAddNewChild()
onTabPressed: (e) ->
return super(e) unless @addingNewProperty()
e.preventDefault()
@tryToAddNewChild(e, false)
onEnterPressed: (e) ->
return super(e) unless @addingNewProperty()
@tryToAddNewChild(e, true)
# new property behavior ---------------------------------------------------
tryToAddNewChild: (e, aggressive) ->
# empty input keep on moving on
if (not @originalTargetValue) and (not aggressive)
offset = if e.shiftKey then -1 else 1
@cleanupAddNewChild()
@$el.find('.treema-add-child').focus()
@traverseWhileEditing(offset)
return
keyInput = $(e.target)
key = @getPropertyKey($(e.target))
# invalid input, stay put and show an error
if key.length and not @canAddProperty(key)
@clearTemporaryErrors()
@showBadPropertyError(keyInput)
return
# if this is a prop we already have, just edit that instead
if @childrenTreemas[key]?
@cleanupAddNewChild()
treema = @childrenTreemas[key]
return if treema.canEdit() then treema.toggleEdit() else treema.select()
# otherwise add the new child
@cleanupAddNewChild()
@addNewChildForKey(key)
getPropertyKey: (keyInput) ->
key = keyInput.PI:KEY:<KEY>END_PI()
if @workingSchema.properties
for child_key, child_schema of @workingSchema.properties
key = child_key if child_schema.title is key
key
canAddProperty: (key) ->
return true unless @workingSchema.additionalProperties is false
return true if @workingSchema.properties?[key]?
if @workingSchema.patternProperties?
for pattern of @workingSchema.patternProperties
return true if RegExp(pattern).test(key)
return false
showBadPropertyError: (keyInput) ->
keyInput.focus()
tempError = @createTemporaryError('Invalid property name.')
tempError.insertAfter(keyInput)
return
addNewChildForKey: (key) ->
schema = @getChildSchema(key)
newTreema = TreemaNode.make(null, {schema: schema}, @, key)
childNode = @createChildNode(newTreema)
@findObjectInsertionPoint(key).before(childNode)
if newTreema.canEdit()
newTreema.edit()
else
@integrateChildTreema(newTreema)
# new treemas may already have children from default
if newTreema.collection
children = newTreema.getChildren()
if children.length
newTreema.open()
child = newTreema.childrenTreemas[children[0]['key']]
child?.select()
else
newTreema.addNewChild()
@addTrackedAction {'data':newTreema.data, 'path':newTreema.getPath(), 'parentPath':@getPath(), action:'insert'}
@updateMyAddButton()
findObjectInsertionPoint: (key) ->
# Object children should be in the order of the schema.properties objects as much as possible
return @getAddButtonEl() unless @workingSchema.properties?[key]
allProps = Object.keys(@workingSchema.properties)
afterKeys = allProps.slice(allProps.indexOf(key)+1)
allChildren = @$el.find('> .treema-children > .treema-node')
for child in allChildren
if $(child).data('instance').keyForParent in afterKeys
return $(child)
return @getAddButtonEl()
# adding utilities --------------------------------------------------------
cleanupAddNewChild: =>
@$el.find('.treema-new-prop').remove()
@getAddButtonEl().show()
@clearTemporaryErrors()
addingNewProperty: -> document.activeElement is @$el.find('.treema-new-prop')[0]
window.TreemaObjectNode = ObjectNode # TODO: how should we be making these available?
|
[
{
"context": "ValidationError\n@extends restify/RestError\n@author Isaac Johnston <isaac.johnston@joukou.com>\n@copyright (c) 2009-2",
"end": 650,
"score": 0.9998890161514282,
"start": 636,
"tag": "NAME",
"value": "Isaac Johnston"
},
{
"context": "extends restify/RestError\n@author I... | src/lib/errors/ValidationError.coffee | joukou/joukou-data | 0 | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
@class joukou-api/riak/ValidationError
@extends restify/RestError
@author Isaac Johnston <isaac.johnston@joukou.com>
@copyright (c) 2009-2014 Joukou Ltd. All rights reserved.
###
{ RestError } = require( 'restify' )
module.exports = self = class extends RestError
rawValues: {}
constructor: ( errors, rawValues ) ->
super(
restCode: 'ForbiddenError'
statusCode: 403
message: JSON.stringify( errors )
constructorOpt: self
)
this.rawValues = rawValues
return | 117440 | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
@class joukou-api/riak/ValidationError
@extends restify/RestError
@author <NAME> <<EMAIL>>
@copyright (c) 2009-2014 Joukou Ltd. All rights reserved.
###
{ RestError } = require( 'restify' )
module.exports = self = class extends RestError
rawValues: {}
constructor: ( errors, rawValues ) ->
super(
restCode: 'ForbiddenError'
statusCode: 403
message: JSON.stringify( errors )
constructorOpt: self
)
this.rawValues = rawValues
return | true | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
@class joukou-api/riak/ValidationError
@extends restify/RestError
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
@copyright (c) 2009-2014 Joukou Ltd. All rights reserved.
###
{ RestError } = require( 'restify' )
module.exports = self = class extends RestError
rawValues: {}
constructor: ( errors, rawValues ) ->
super(
restCode: 'ForbiddenError'
statusCode: 403
message: JSON.stringify( errors )
constructorOpt: self
)
this.rawValues = rawValues
return |
[
{
"context": "ER_OUTPUT\",\n job: 7,\n reducer: 1,\n key: \"Josh\",\n lines: [\"d\", \"e\", \"f\"]})\n fb.push({name: \"",
"end": 2023,
"score": 0.9986205101013184,
"start": 2019,
"tag": "NAME",
"value": "Josh"
},
{
"context": "ER_OUTPUT\",\n job: 7,\n reducer: 2,\... | src/ioserver/ioserver.coffee | dwetterau/countdera | 2 |
firebase_db = require("../serverjs/firebase_db")
constants = require("../constants")
fs = require("fs")
class ReducerSet
constructor: (reducerid) ->
@reducerid = reducerid
@output = {}
@finished = false
addLinesFromKey: (key, lines) ->
if (key not of @output)
@output[key] = lines
class JobSet
constructor: (jobid, totalReducers) ->
@job = jobid
@reducers = {}
@totalReducers = totalReducers
@numReducers = 0
addReducer: (reducerid) ->
@reducers[reducerid] = new ReducerSet(reducerid)
addOutput: (reducerid, key, lines) ->
@reducers[reducerid].addLinesFromKey(key, lines)
finishReducer: (reducerid) ->
@reducers[reducerid].finished = true
@numReducers++
console.log(@numReducers + " out of " + @totalReducers)
if (@numReducers == @totalReducers)
jobMap = @combineToJobMap()
str = @serializeJobMap(jobMap)
@saveToFile(str)
firebase_db.JOB_STATUS_REF.child(@job).child('output_url')
.set(constants.OUTPUT_DIR + @job)
return true
return false
combineToJobMap: () ->
jobMap = {}
for _,reducer of @reducers
for key, lines of reducer.output
jobMap[key] = lines
return jobMap
serializeJobMap: (jobMap) ->
str = ""
keylist = []
for key,_ of jobMap
keylist.push(key)
keylist.sort()
for key in keylist
str += @keyToString(jobMap, key)
return str
keyToString: (map, key) ->
str = ""
for line in map[key]
str += key + ":" + line + "\n"
return str
saveToFile: (str) ->
console.log("Finished Job " + @job)
fs.writeFileSync(constants.TOP_DIR + constants.OUTPUT_DIR + @job, str)
test = (fb) ->
fb.push({name: "START_JOB", job: 7, numReducers: 3})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 3})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 3})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 1})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 1,
key: "Josh",
lines: ["d", "e", "f"]})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 2,
key: "Dan",
lines: ["f", "g", "h"]})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 2,
key: "John",
lines: ["x", "y", "z"]})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 1,
key: "David",
lines: ["a", "b", "c"]})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 1})
main = () ->
fb = firebase_db.IO_SERVER_MESSAGE_REF
currentJobs = {}
#currentJobs[7] = new JobSet(7, 2)
fb.on("child_added", newMessage = (snapshot) ->
message = snapshot.val()
switch message.name
when "START_JOB" then (
if (not (currentJobs[message.job] == -1))
currentJobs[message.job] =
new JobSet(message.job, message.numReducers)
)
when "START_REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
currentJobs[message.job].addReducer(message.reducer)
)
when "REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
for m in message.message
key = m[0]
lines = m[1]
if typeof lines == 'number'
lines = [lines]
currentJobs[message.job].addOutput(
message.reducer, key, lines)
)
when "STOP_REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
if (currentJobs[message.job].finishReducer(message.reducer))
currentJobs[message.job] = -1
)
fb.child(snapshot.name()).remove()
)
#test(fb)
main()
| 70020 |
firebase_db = require("../serverjs/firebase_db")
constants = require("../constants")
fs = require("fs")
class ReducerSet
constructor: (reducerid) ->
@reducerid = reducerid
@output = {}
@finished = false
addLinesFromKey: (key, lines) ->
if (key not of @output)
@output[key] = lines
class JobSet
constructor: (jobid, totalReducers) ->
@job = jobid
@reducers = {}
@totalReducers = totalReducers
@numReducers = 0
addReducer: (reducerid) ->
@reducers[reducerid] = new ReducerSet(reducerid)
addOutput: (reducerid, key, lines) ->
@reducers[reducerid].addLinesFromKey(key, lines)
finishReducer: (reducerid) ->
@reducers[reducerid].finished = true
@numReducers++
console.log(@numReducers + " out of " + @totalReducers)
if (@numReducers == @totalReducers)
jobMap = @combineToJobMap()
str = @serializeJobMap(jobMap)
@saveToFile(str)
firebase_db.JOB_STATUS_REF.child(@job).child('output_url')
.set(constants.OUTPUT_DIR + @job)
return true
return false
combineToJobMap: () ->
jobMap = {}
for _,reducer of @reducers
for key, lines of reducer.output
jobMap[key] = lines
return jobMap
serializeJobMap: (jobMap) ->
str = ""
keylist = []
for key,_ of jobMap
keylist.push(key)
keylist.sort()
for key in keylist
str += @keyToString(jobMap, key)
return str
keyToString: (map, key) ->
str = ""
for line in map[key]
str += key + ":" + line + "\n"
return str
saveToFile: (str) ->
console.log("Finished Job " + @job)
fs.writeFileSync(constants.TOP_DIR + constants.OUTPUT_DIR + @job, str)
test = (fb) ->
fb.push({name: "START_JOB", job: 7, numReducers: 3})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 3})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 3})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 1})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 1,
key: "<NAME>",
lines: ["d", "e", "f"]})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 2,
key: "<NAME>",
lines: ["f", "g", "h"]})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 2,
key: "<NAME>",
lines: ["x", "y", "z"]})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 1,
key: "<NAME>",
lines: ["a", "b", "c"]})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 1})
main = () ->
fb = firebase_db.IO_SERVER_MESSAGE_REF
currentJobs = {}
#currentJobs[7] = new JobSet(7, 2)
fb.on("child_added", newMessage = (snapshot) ->
message = snapshot.val()
switch message.name
when "START_JOB" then (
if (not (currentJobs[message.job] == -1))
currentJobs[message.job] =
new JobSet(message.job, message.numReducers)
)
when "START_REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
currentJobs[message.job].addReducer(message.reducer)
)
when "REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
for m in message.message
key = m[0]
lines = m[1]
if typeof lines == 'number'
lines = [lines]
currentJobs[message.job].addOutput(
message.reducer, key, lines)
)
when "STOP_REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
if (currentJobs[message.job].finishReducer(message.reducer))
currentJobs[message.job] = -1
)
fb.child(snapshot.name()).remove()
)
#test(fb)
main()
| true |
firebase_db = require("../serverjs/firebase_db")
constants = require("../constants")
fs = require("fs")
class ReducerSet
constructor: (reducerid) ->
@reducerid = reducerid
@output = {}
@finished = false
addLinesFromKey: (key, lines) ->
if (key not of @output)
@output[key] = lines
class JobSet
constructor: (jobid, totalReducers) ->
@job = jobid
@reducers = {}
@totalReducers = totalReducers
@numReducers = 0
addReducer: (reducerid) ->
@reducers[reducerid] = new ReducerSet(reducerid)
addOutput: (reducerid, key, lines) ->
@reducers[reducerid].addLinesFromKey(key, lines)
finishReducer: (reducerid) ->
@reducers[reducerid].finished = true
@numReducers++
console.log(@numReducers + " out of " + @totalReducers)
if (@numReducers == @totalReducers)
jobMap = @combineToJobMap()
str = @serializeJobMap(jobMap)
@saveToFile(str)
firebase_db.JOB_STATUS_REF.child(@job).child('output_url')
.set(constants.OUTPUT_DIR + @job)
return true
return false
combineToJobMap: () ->
jobMap = {}
for _,reducer of @reducers
for key, lines of reducer.output
jobMap[key] = lines
return jobMap
serializeJobMap: (jobMap) ->
str = ""
keylist = []
for key,_ of jobMap
keylist.push(key)
keylist.sort()
for key in keylist
str += @keyToString(jobMap, key)
return str
keyToString: (map, key) ->
str = ""
for line in map[key]
str += key + ":" + line + "\n"
return str
saveToFile: (str) ->
console.log("Finished Job " + @job)
fs.writeFileSync(constants.TOP_DIR + constants.OUTPUT_DIR + @job, str)
test = (fb) ->
fb.push({name: "START_JOB", job: 7, numReducers: 3})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 3})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 3})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 1})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 1,
key: "PI:NAME:<NAME>END_PI",
lines: ["d", "e", "f"]})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 2,
key: "PI:NAME:<NAME>END_PI",
lines: ["f", "g", "h"]})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 2,
key: "PI:NAME:<NAME>END_PI",
lines: ["x", "y", "z"]})
fb.push({
name: "REDUCER_OUTPUT",
job: 7,
reducer: 1,
key: "PI:NAME:<NAME>END_PI",
lines: ["a", "b", "c"]})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({name: "START_REDUCER_OUTPUT", job: 7, reducer: 2})
fb.push({name: "STOP_REDUCER_OUTPUT", job: 7, reducer: 1})
main = () ->
fb = firebase_db.IO_SERVER_MESSAGE_REF
currentJobs = {}
#currentJobs[7] = new JobSet(7, 2)
fb.on("child_added", newMessage = (snapshot) ->
message = snapshot.val()
switch message.name
when "START_JOB" then (
if (not (currentJobs[message.job] == -1))
currentJobs[message.job] =
new JobSet(message.job, message.numReducers)
)
when "START_REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
currentJobs[message.job].addReducer(message.reducer)
)
when "REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
for m in message.message
key = m[0]
lines = m[1]
if typeof lines == 'number'
lines = [lines]
currentJobs[message.job].addOutput(
message.reducer, key, lines)
)
when "STOP_REDUCER_OUTPUT" then (
if (not((currentJobs[message.job] == null or currentJobs[message.job] == -1)))
if (currentJobs[message.job].finishReducer(message.reducer))
currentJobs[message.job] = -1
)
fb.child(snapshot.name()).remove()
)
#test(fb)
main()
|
[
{
"context": "forward-curving horns.\n '''\n\n scientificName: '(Redunca arundinum)'\n mainImage: 'assets/fieldguide-content/mammals",
"end": 619,
"score": 0.9998618364334106,
"start": 602,
"tag": "NAME",
"value": "Redunca arundinum"
}
] | app/lib/field-guide-content/reedbuck.coffee | zooniverse/snapshot-wisconsin | 0 | module.exports =
label: 'Common Reedbuck'
description: '''
The common reedbuck is the largest reedbuck. They can be identified by the distinctive dark lines that run down the front of each foreleg and the lower hind legs. Their color varies, but they are generally unpatterned, with their face, head, torso, and outer sides of the legs a shade of gray-brown. They do have white around their eyes, inside of their ears, on their chin, throat, belly, and the underside of their bushy tail. Males can easily be distinguished from females by their forward-curving horns.
'''
scientificName: '(Redunca arundinum)'
mainImage: 'assets/fieldguide-content/mammals/reedbuck/reedbuck-feature.jpg'
conservationStatus: 'Least Concern' # Options are Least Concern, Near Threatened, Vulnerable, and Endangered; all have their own little icons as well.
information: [{
label: 'Length'
value: '1.34-1.67 m'
}, {
label: 'Height'
value: '83-98 cm'
}, {
label: 'Weight'
value: '39-80 kg'
}, {
label: 'Lifespan'
value: 'Up to 16'
}, {
label: 'Gestation'
value: '7.5 months'
}, {
label: 'Avg. number of offspring'
value: '1'
}]
sections: [{
title: 'Habitat'
content: 'Common reedbucks prefer habitats with tall grasses near water.'
}, {
title: 'Diet'
content: 'Common reedbucks are predominantly grazers, but they will eat herbs and foliage of woody plants during the dry season. They get most of their water from foraging and dew.'
}, {
title: 'Predators'
content: 'Lions, leopards, spotted hyenas, wild dogs, crocodiles'
}, {
title: 'Behavior'
content: '''
<p>Common reedbucks form monogamous pairs. Males defend their territories by standing in an upright posture, running at conspecifics with high bounds (known as stotting), and producing whistling alarm calls. Alarm calls are also a response to predators. During the dry season, common reedbucks form temporary herds that can include up to seven females, their young, and one male.</p>
'''
}, {
title: 'Breeding'
content: '''
<p>The common reedbuck breeds year-round. Females give birth to a single offspring after a gestation period of about 7.5 months. Females reach sexual maturity in their second year of life, at which point they leave their parents’ territory.</p>
'''
}, {
title: 'Fun Facts'
style: 'focus-box'
content: '''
<ol>
<li>1. A young reedbuck will remain among the dense grass cover in which it was born for the first two months of its life. During this period, the mother will not stay around but instead visit the young for just 10 to 30 minutes each day.</li>
</ol>
'''
},{
title: 'Distribution'
content: '<img src="assets/fieldguide-content/mammals/reedbuck/reedbuck-map.jpg"/>'
}]
| 45759 | module.exports =
label: 'Common Reedbuck'
description: '''
The common reedbuck is the largest reedbuck. They can be identified by the distinctive dark lines that run down the front of each foreleg and the lower hind legs. Their color varies, but they are generally unpatterned, with their face, head, torso, and outer sides of the legs a shade of gray-brown. They do have white around their eyes, inside of their ears, on their chin, throat, belly, and the underside of their bushy tail. Males can easily be distinguished from females by their forward-curving horns.
'''
scientificName: '(<NAME>)'
mainImage: 'assets/fieldguide-content/mammals/reedbuck/reedbuck-feature.jpg'
conservationStatus: 'Least Concern' # Options are Least Concern, Near Threatened, Vulnerable, and Endangered; all have their own little icons as well.
information: [{
label: 'Length'
value: '1.34-1.67 m'
}, {
label: 'Height'
value: '83-98 cm'
}, {
label: 'Weight'
value: '39-80 kg'
}, {
label: 'Lifespan'
value: 'Up to 16'
}, {
label: 'Gestation'
value: '7.5 months'
}, {
label: 'Avg. number of offspring'
value: '1'
}]
sections: [{
title: 'Habitat'
content: 'Common reedbucks prefer habitats with tall grasses near water.'
}, {
title: 'Diet'
content: 'Common reedbucks are predominantly grazers, but they will eat herbs and foliage of woody plants during the dry season. They get most of their water from foraging and dew.'
}, {
title: 'Predators'
content: 'Lions, leopards, spotted hyenas, wild dogs, crocodiles'
}, {
title: 'Behavior'
content: '''
<p>Common reedbucks form monogamous pairs. Males defend their territories by standing in an upright posture, running at conspecifics with high bounds (known as stotting), and producing whistling alarm calls. Alarm calls are also a response to predators. During the dry season, common reedbucks form temporary herds that can include up to seven females, their young, and one male.</p>
'''
}, {
title: 'Breeding'
content: '''
<p>The common reedbuck breeds year-round. Females give birth to a single offspring after a gestation period of about 7.5 months. Females reach sexual maturity in their second year of life, at which point they leave their parents’ territory.</p>
'''
}, {
title: 'Fun Facts'
style: 'focus-box'
content: '''
<ol>
<li>1. A young reedbuck will remain among the dense grass cover in which it was born for the first two months of its life. During this period, the mother will not stay around but instead visit the young for just 10 to 30 minutes each day.</li>
</ol>
'''
},{
title: 'Distribution'
content: '<img src="assets/fieldguide-content/mammals/reedbuck/reedbuck-map.jpg"/>'
}]
| true | module.exports =
label: 'Common Reedbuck'
description: '''
The common reedbuck is the largest reedbuck. They can be identified by the distinctive dark lines that run down the front of each foreleg and the lower hind legs. Their color varies, but they are generally unpatterned, with their face, head, torso, and outer sides of the legs a shade of gray-brown. They do have white around their eyes, inside of their ears, on their chin, throat, belly, and the underside of their bushy tail. Males can easily be distinguished from females by their forward-curving horns.
'''
scientificName: '(PI:NAME:<NAME>END_PI)'
mainImage: 'assets/fieldguide-content/mammals/reedbuck/reedbuck-feature.jpg'
conservationStatus: 'Least Concern' # Options are Least Concern, Near Threatened, Vulnerable, and Endangered; all have their own little icons as well.
information: [{
label: 'Length'
value: '1.34-1.67 m'
}, {
label: 'Height'
value: '83-98 cm'
}, {
label: 'Weight'
value: '39-80 kg'
}, {
label: 'Lifespan'
value: 'Up to 16'
}, {
label: 'Gestation'
value: '7.5 months'
}, {
label: 'Avg. number of offspring'
value: '1'
}]
sections: [{
title: 'Habitat'
content: 'Common reedbucks prefer habitats with tall grasses near water.'
}, {
title: 'Diet'
content: 'Common reedbucks are predominantly grazers, but they will eat herbs and foliage of woody plants during the dry season. They get most of their water from foraging and dew.'
}, {
title: 'Predators'
content: 'Lions, leopards, spotted hyenas, wild dogs, crocodiles'
}, {
title: 'Behavior'
content: '''
<p>Common reedbucks form monogamous pairs. Males defend their territories by standing in an upright posture, running at conspecifics with high bounds (known as stotting), and producing whistling alarm calls. Alarm calls are also a response to predators. During the dry season, common reedbucks form temporary herds that can include up to seven females, their young, and one male.</p>
'''
}, {
title: 'Breeding'
content: '''
<p>The common reedbuck breeds year-round. Females give birth to a single offspring after a gestation period of about 7.5 months. Females reach sexual maturity in their second year of life, at which point they leave their parents’ territory.</p>
'''
}, {
title: 'Fun Facts'
style: 'focus-box'
content: '''
<ol>
<li>1. A young reedbuck will remain among the dense grass cover in which it was born for the first two months of its life. During this period, the mother will not stay around but instead visit the young for just 10 to 30 minutes each day.</li>
</ol>
'''
},{
title: 'Distribution'
content: '<img src="assets/fieldguide-content/mammals/reedbuck/reedbuck-map.jpg"/>'
}]
|
[
{
"context": ": [\n '(loading)', 'made with love for', 'Kyle Correia'\n ].map (s, i) ->\n div key: i, s\n",
"end": 492,
"score": 0.999871551990509,
"start": 480,
"tag": "NAME",
"value": "Kyle Correia"
}
] | src/Root.coffee | metakirby5/astral | 7 | # The base element of your app. Can be a router if you like.
{Component, DOM, createElement: ce} = require 'react'
{div} = DOM
Splash = require './components/Splash'
SolarSystem = require './components/SolarSystem'
module.exports = class extends Component
constructor: (props) ->
super props
@state =
loaded: false
render: ->
div className: 'fullscreen',
if not @state.loaded
ce Splash, msg: [
'(loading)', 'made with love for', 'Kyle Correia'
].map (s, i) ->
div key: i, s
ce SolarSystem,
onLoaded: => @setState loaded: true
| 198679 | # The base element of your app. Can be a router if you like.
{Component, DOM, createElement: ce} = require 'react'
{div} = DOM
Splash = require './components/Splash'
SolarSystem = require './components/SolarSystem'
module.exports = class extends Component
constructor: (props) ->
super props
@state =
loaded: false
render: ->
div className: 'fullscreen',
if not @state.loaded
ce Splash, msg: [
'(loading)', 'made with love for', '<NAME>'
].map (s, i) ->
div key: i, s
ce SolarSystem,
onLoaded: => @setState loaded: true
| true | # The base element of your app. Can be a router if you like.
{Component, DOM, createElement: ce} = require 'react'
{div} = DOM
Splash = require './components/Splash'
SolarSystem = require './components/SolarSystem'
module.exports = class extends Component
constructor: (props) ->
super props
@state =
loaded: false
render: ->
div className: 'fullscreen',
if not @state.loaded
ce Splash, msg: [
'(loading)', 'made with love for', 'PI:NAME:<NAME>END_PI'
].map (s, i) ->
div key: i, s
ce SolarSystem,
onLoaded: => @setState loaded: true
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.999912440776825,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/profile-page/main.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { AccountStanding } from './account-standing'
import { ExtraTab } from './extra-tab'
import { Beatmaps } from './beatmaps'
import { Header } from './header'
import { Historical } from './historical'
import { Kudosu } from './kudosu'
import { Medals } from './medals'
import { RecentActivity } from './recent-activity'
import { TopRanks } from './top-ranks'
import { UserPage } from './user-page'
import { BlockButton } from 'block-button'
import { NotificationBanner } from 'notification-banner'
import * as React from 'react'
import { a, button, div, i, li, span, ul } from 'react-dom-factories'
el = React.createElement
pages = document.getElementsByClassName("js-switchable-mode-page--scrollspy")
pagesOffset = document.getElementsByClassName("js-switchable-mode-page--scrollspy-offset")
currentLocation = ->
"#{document.location.pathname}#{document.location.search}"
export class Main extends React.PureComponent
constructor: (props) ->
super props
@tabs = React.createRef()
@pages = React.createRef()
@state = JSON.parse(props.container.dataset.profilePageState ? null)
@restoredState = @state?
if !@restoredState
page = location.hash.slice(1)
@initialPage = page if page?
@state =
currentMode: props.currentMode
user: props.user
userPage:
html: props.userPage.html
initialRaw: props.userPage.raw
raw: props.userPage.raw
editing: false
selection: [0, 0]
profileOrder: props.user.profile_order[..]
recentActivity: @props.extras.recentActivity
scoresBest: @props.extras.scoresBest
scoresFirsts: @props.extras.scoresFirsts
scoresRecent: @props.extras.scoresRecent
beatmapPlaycounts: @props.extras.beatmapPlaycounts
favouriteBeatmapsets: @props.extras.favouriteBeatmapsets
rankedAndApprovedBeatmapsets: @props.extras.rankedAndApprovedBeatmapsets
lovedBeatmapsets: @props.extras.lovedBeatmapsets
unrankedBeatmapsets: @props.extras.unrankedBeatmapsets
graveyardBeatmapsets: @props.extras.graveyardBeatmapsets
recentlyReceivedKudosu: @props.extras.recentlyReceivedKudosu
showMorePagination: {}
for own elem, perPage of @props.perPage
@state.showMorePagination[elem] ?= {}
@state.showMorePagination[elem].hasMore = @state[elem].length > perPage
if @state.showMorePagination[elem].hasMore
@state[elem].pop()
componentDidMount: =>
$.subscribe 'user:update.profilePage', @userUpdate
$.subscribe 'user:page:update.profilePage', @userPageUpdate
$.subscribe 'profile:showMore.profilePage', @showMore
$.subscribe 'profile:page:jump.profilePage', @pageJump
$(window).on 'throttled-scroll.profilePage', @pageScan
$(document).on 'turbolinks:before-cache.profilePage', @saveStateToContainer
$(@pages.current).sortable
cursor: 'move'
handle: '.js-profile-page-extra--sortable-handle'
items: '.js-sortable--page'
revert: 150
scrollSpeed: 10
update: @updateOrder
$(@tabs.current).sortable
containment: 'parent'
cursor: 'move'
disabled: !@props.withEdit
items: '.js-sortable--tab'
revert: 150
scrollSpeed: 0
update: @updateOrder
start: =>
# Somehow click event still goes through when dragging.
# This prevents triggering @tabClick.
Timeout.clear @draggingTabTimeout
@draggingTab = true
stop: =>
@draggingTabTimeout = Timeout.set 500, => @draggingTab = false
osu.pageChange()
@modeScrollUrl = currentLocation()
if !@restoredState
Timeout.set 0, => @pageJump null, @initialPage
componentWillUnmount: =>
$.unsubscribe '.profilePage'
$(window).off '.profilePage'
for sortable in [@pages, @tabs]
$(sortable.current).sortable 'destroy'
$(window).stop()
Timeout.clear @modeScrollTimeout
render: =>
if @props.user.is_bot
profileOrder = ['me']
else
profileOrder = @state.profileOrder.slice()
profileOrder.push 'account_standing' if !_.isEmpty @state.user.account_history
if @state.userPage.initialRaw.trim() == '' && !@props.withEdit
_.pull profileOrder, 'me'
isBlocked = _.find(currentUser.blocks, target_id: @state.user.id)
div
className: 'osu-layout__no-scroll' if isBlocked && !@state.forceShow
if isBlocked
div className: 'osu-page',
el NotificationBanner,
type: 'warning'
title: osu.trans('users.blocks.banner_text')
message:
div className: 'grid-items grid-items--notification-banner-buttons',
div null,
el BlockButton, userId: @props.user.id
div null,
button
type: 'button'
className: 'textual-button'
onClick: =>
@setState forceShow: !@state.forceShow
span {},
i className: 'textual-button__icon fas fa-low-vision'
" "
if @state.forceShow
osu.trans('users.blocks.hide_profile')
else
osu.trans('users.blocks.show_profile')
div className: "osu-layout osu-layout--full#{if isBlocked && !@state.forceShow then ' osu-layout--masked' else ''}",
el Header,
user: @state.user
stats: @state.user.statistics
currentMode: @state.currentMode
withEdit: @props.withEdit
userAchievements: @props.userAchievements
div
className: 'hidden-xs page-extra-tabs page-extra-tabs--profile-page js-switchable-mode-page--scrollspy-offset'
if profileOrder.length > 1
div className: 'osu-page',
div
className: 'page-mode page-mode--profile-page-extra'
ref: @tabs
for m in profileOrder
a
className: "page-mode__item #{'js-sortable--tab' if @isSortablePage m}"
key: m
'data-page-id': m
onClick: @tabClick
href: "##{m}"
el ExtraTab,
page: m
currentPage: @state.currentPage
currentMode: @state.currentMode
div
className: 'osu-layout__section osu-layout__section--users-extra'
div
className: 'osu-layout__row'
ref: @pages
@extraPage name for name in profileOrder
extraPage: (name) =>
{extraClass, props, component} = @extraPageParams name
topClassName = 'js-switchable-mode-page--scrollspy js-switchable-mode-page--page'
topClassName += ' js-sortable--page' if @isSortablePage name
props.withEdit = @props.withEdit
props.name = name
@extraPages ?= {}
div
key: name
'data-page-id': name
className: "#{topClassName} #{extraClass}"
ref: (el) => @extraPages[name] = el
el component, props
extraPageParams: (name) =>
switch name
when 'me'
props:
userPage: @state.userPage
user: @state.user
component: UserPage
when 'recent_activity'
props:
pagination: @state.showMorePagination
recentActivity: @state.recentActivity
user: @state.user
component: RecentActivity
when 'kudosu'
props:
user: @state.user
recentlyReceivedKudosu: @state.recentlyReceivedKudosu
pagination: @state.showMorePagination
component: Kudosu
when 'top_ranks'
props:
user: @state.user
scoresBest: @state.scoresBest
scoresFirsts: @state.scoresFirsts
currentMode: @state.currentMode
pagination: @state.showMorePagination
component: TopRanks
when 'beatmaps'
props:
user: @state.user
favouriteBeatmapsets: @state.favouriteBeatmapsets
rankedAndApprovedBeatmapsets: @state.rankedAndApprovedBeatmapsets
lovedBeatmapsets: @state.lovedBeatmapsets
unrankedBeatmapsets: @state.unrankedBeatmapsets
graveyardBeatmapsets: @state.graveyardBeatmapsets
counts:
favouriteBeatmapsets: @state.user.favourite_beatmapset_count
rankedAndApprovedBeatmapsets: @state.user.ranked_and_approved_beatmapset_count
lovedBeatmapsets: @state.user.loved_beatmapset_count
unrankedBeatmapsets: @state.user.unranked_beatmapset_count
graveyardBeatmapsets: @state.user.graveyard_beatmapset_count
pagination: @state.showMorePagination
component: Beatmaps
when 'medals'
props:
achievements: @props.achievements
userAchievements: @props.userAchievements
currentMode: @state.currentMode
user: @state.user
component: Medals
when 'historical'
props:
beatmapPlaycounts: @state.beatmapPlaycounts
scoresRecent: @state.scoresRecent
user: @state.user
currentMode: @state.currentMode
pagination: @state.showMorePagination
component: Historical
when 'account_standing'
props:
user: @state.user
component: AccountStanding
showMore: (e, {name, url, perPage = 50}) =>
offset = @state[name].length
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name] ?= {}
paginationState[name].loading = true
@setState showMorePagination: paginationState, ->
$.get osu.updateQueryString(url, offset: offset, limit: perPage + 1), (data) =>
state = _.cloneDeep(@state[name]).concat(data)
hasMore = data.length > perPage
state.pop() if hasMore
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name].loading = false
paginationState[name].hasMore = hasMore
@setState
"#{name}": state
showMorePagination: paginationState
.catch (error) =>
osu.ajaxError error
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name].loading = false
@setState
showMorePagination: paginationState
pageJump: (_e, page) =>
if page == 'main'
@setCurrentPage null, page
return
target = $(@extraPages[page])
# if invalid page is specified, scan current position
if target.length == 0
@pageScan()
return
# Don't bother scanning the current position.
# The result will be wrong when target page is too short anyway.
@scrolling = true
Timeout.clear @modeScrollTimeout
# count for the tabs height; assume pageJump always causes the header to be pinned
# otherwise the calculation needs another phase and gets a bit messy.
offsetTop = target.offset().top - pagesOffset[0].getBoundingClientRect().height
$(window).stop().scrollTo window.stickyHeader.scrollOffset(offsetTop), 500,
onAfter: =>
# Manually set the mode to avoid confusion (wrong highlight).
# Scrolling will obviously break it but that's unfortunate result
# from having the scrollspy marker at middle of page.
@setCurrentPage null, page, =>
# Doesn't work:
# - part of state (callback, part of mode setting)
# - simple variable in callback
# Both still change the switch too soon.
@modeScrollTimeout = Timeout.set 100, => @scrolling = false
pageScan: =>
return if @modeScrollUrl != currentLocation()
return if @scrolling
return if pages.length == 0
anchorHeight = pagesOffset[0].getBoundingClientRect().height
if osu.bottomPage()
@setCurrentPage null, _.last(pages).dataset.pageId
return
for page in pages
pageDims = page.getBoundingClientRect()
pageBottom = pageDims.bottom - Math.min(pageDims.height * 0.75, 200)
continue unless pageBottom > anchorHeight
@setCurrentPage null, page.dataset.pageId
return
@setCurrentPage null, page.dataset.pageId
saveStateToContainer: =>
@props.container.dataset.profilePageState = JSON.stringify(@state)
setCurrentPage: (_e, page, extraCallback) =>
callback = =>
extraCallback?()
@setHash?()
if @state.currentPage == page
return callback()
@setState currentPage: page, callback
tabClick: (e) =>
e.preventDefault()
# See $(@tabs.current).sortable.
return if @draggingTab
@pageJump null, e.currentTarget.dataset.pageId
updateOrder: (event) =>
$elems = $(event.target)
newOrder = $elems.sortable('toArray', attribute: 'data-page-id')
LoadingOverlay.show()
$elems.sortable('cancel')
@setState profileOrder: newOrder, =>
$.ajax laroute.route('account.options'),
method: 'PUT'
dataType: 'JSON'
data:
user_profile_customization:
extras_order: @state.profileOrder
.done (userData) =>
$.publish 'user:update', userData
.fail (xhr) =>
osu.emitAjaxError() xhr
@setState profileOrder: @state.user.profile_order
.always LoadingOverlay.hide
userUpdate: (_e, user) =>
return @forceUpdate() if user?.id != @state.user.id
# this component needs full user object but sometimes this event only sends part of it
@setState user: _.assign({}, @state.user, user)
userPageUpdate: (_e, newUserPage) =>
currentUserPage = _.cloneDeep @state.userPage
@setState userPage: _.extend(currentUserPage, newUserPage)
validMode: (mode) =>
modes = BeatmapHelper.modes
if _.includes(modes, mode)
mode
else
modes[0]
isSortablePage: (page) ->
_.includes @state.profileOrder, page
| 138551 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { AccountStanding } from './account-standing'
import { ExtraTab } from './extra-tab'
import { Beatmaps } from './beatmaps'
import { Header } from './header'
import { Historical } from './historical'
import { Kudosu } from './kudosu'
import { Medals } from './medals'
import { RecentActivity } from './recent-activity'
import { TopRanks } from './top-ranks'
import { UserPage } from './user-page'
import { BlockButton } from 'block-button'
import { NotificationBanner } from 'notification-banner'
import * as React from 'react'
import { a, button, div, i, li, span, ul } from 'react-dom-factories'
el = React.createElement
pages = document.getElementsByClassName("js-switchable-mode-page--scrollspy")
pagesOffset = document.getElementsByClassName("js-switchable-mode-page--scrollspy-offset")
currentLocation = ->
"#{document.location.pathname}#{document.location.search}"
export class Main extends React.PureComponent
constructor: (props) ->
super props
@tabs = React.createRef()
@pages = React.createRef()
@state = JSON.parse(props.container.dataset.profilePageState ? null)
@restoredState = @state?
if !@restoredState
page = location.hash.slice(1)
@initialPage = page if page?
@state =
currentMode: props.currentMode
user: props.user
userPage:
html: props.userPage.html
initialRaw: props.userPage.raw
raw: props.userPage.raw
editing: false
selection: [0, 0]
profileOrder: props.user.profile_order[..]
recentActivity: @props.extras.recentActivity
scoresBest: @props.extras.scoresBest
scoresFirsts: @props.extras.scoresFirsts
scoresRecent: @props.extras.scoresRecent
beatmapPlaycounts: @props.extras.beatmapPlaycounts
favouriteBeatmapsets: @props.extras.favouriteBeatmapsets
rankedAndApprovedBeatmapsets: @props.extras.rankedAndApprovedBeatmapsets
lovedBeatmapsets: @props.extras.lovedBeatmapsets
unrankedBeatmapsets: @props.extras.unrankedBeatmapsets
graveyardBeatmapsets: @props.extras.graveyardBeatmapsets
recentlyReceivedKudosu: @props.extras.recentlyReceivedKudosu
showMorePagination: {}
for own elem, perPage of @props.perPage
@state.showMorePagination[elem] ?= {}
@state.showMorePagination[elem].hasMore = @state[elem].length > perPage
if @state.showMorePagination[elem].hasMore
@state[elem].pop()
componentDidMount: =>
$.subscribe 'user:update.profilePage', @userUpdate
$.subscribe 'user:page:update.profilePage', @userPageUpdate
$.subscribe 'profile:showMore.profilePage', @showMore
$.subscribe 'profile:page:jump.profilePage', @pageJump
$(window).on 'throttled-scroll.profilePage', @pageScan
$(document).on 'turbolinks:before-cache.profilePage', @saveStateToContainer
$(@pages.current).sortable
cursor: 'move'
handle: '.js-profile-page-extra--sortable-handle'
items: '.js-sortable--page'
revert: 150
scrollSpeed: 10
update: @updateOrder
$(@tabs.current).sortable
containment: 'parent'
cursor: 'move'
disabled: !@props.withEdit
items: '.js-sortable--tab'
revert: 150
scrollSpeed: 0
update: @updateOrder
start: =>
# Somehow click event still goes through when dragging.
# This prevents triggering @tabClick.
Timeout.clear @draggingTabTimeout
@draggingTab = true
stop: =>
@draggingTabTimeout = Timeout.set 500, => @draggingTab = false
osu.pageChange()
@modeScrollUrl = currentLocation()
if !@restoredState
Timeout.set 0, => @pageJump null, @initialPage
componentWillUnmount: =>
$.unsubscribe '.profilePage'
$(window).off '.profilePage'
for sortable in [@pages, @tabs]
$(sortable.current).sortable 'destroy'
$(window).stop()
Timeout.clear @modeScrollTimeout
render: =>
if @props.user.is_bot
profileOrder = ['me']
else
profileOrder = @state.profileOrder.slice()
profileOrder.push 'account_standing' if !_.isEmpty @state.user.account_history
if @state.userPage.initialRaw.trim() == '' && !@props.withEdit
_.pull profileOrder, 'me'
isBlocked = _.find(currentUser.blocks, target_id: @state.user.id)
div
className: 'osu-layout__no-scroll' if isBlocked && !@state.forceShow
if isBlocked
div className: 'osu-page',
el NotificationBanner,
type: 'warning'
title: osu.trans('users.blocks.banner_text')
message:
div className: 'grid-items grid-items--notification-banner-buttons',
div null,
el BlockButton, userId: @props.user.id
div null,
button
type: 'button'
className: 'textual-button'
onClick: =>
@setState forceShow: !@state.forceShow
span {},
i className: 'textual-button__icon fas fa-low-vision'
" "
if @state.forceShow
osu.trans('users.blocks.hide_profile')
else
osu.trans('users.blocks.show_profile')
div className: "osu-layout osu-layout--full#{if isBlocked && !@state.forceShow then ' osu-layout--masked' else ''}",
el Header,
user: @state.user
stats: @state.user.statistics
currentMode: @state.currentMode
withEdit: @props.withEdit
userAchievements: @props.userAchievements
div
className: 'hidden-xs page-extra-tabs page-extra-tabs--profile-page js-switchable-mode-page--scrollspy-offset'
if profileOrder.length > 1
div className: 'osu-page',
div
className: 'page-mode page-mode--profile-page-extra'
ref: @tabs
for m in profileOrder
a
className: "page-mode__item #{'js-sortable--tab' if @isSortablePage m}"
key: m
'data-page-id': m
onClick: @tabClick
href: "##{m}"
el ExtraTab,
page: m
currentPage: @state.currentPage
currentMode: @state.currentMode
div
className: 'osu-layout__section osu-layout__section--users-extra'
div
className: 'osu-layout__row'
ref: @pages
@extraPage name for name in profileOrder
extraPage: (name) =>
{extraClass, props, component} = @extraPageParams name
topClassName = 'js-switchable-mode-page--scrollspy js-switchable-mode-page--page'
topClassName += ' js-sortable--page' if @isSortablePage name
props.withEdit = @props.withEdit
props.name = name
@extraPages ?= {}
div
key: name
'data-page-id': name
className: "#{topClassName} #{extraClass}"
ref: (el) => @extraPages[name] = el
el component, props
extraPageParams: (name) =>
switch name
when 'me'
props:
userPage: @state.userPage
user: @state.user
component: UserPage
when 'recent_activity'
props:
pagination: @state.showMorePagination
recentActivity: @state.recentActivity
user: @state.user
component: RecentActivity
when 'kudosu'
props:
user: @state.user
recentlyReceivedKudosu: @state.recentlyReceivedKudosu
pagination: @state.showMorePagination
component: Kudosu
when 'top_ranks'
props:
user: @state.user
scoresBest: @state.scoresBest
scoresFirsts: @state.scoresFirsts
currentMode: @state.currentMode
pagination: @state.showMorePagination
component: TopRanks
when 'beatmaps'
props:
user: @state.user
favouriteBeatmapsets: @state.favouriteBeatmapsets
rankedAndApprovedBeatmapsets: @state.rankedAndApprovedBeatmapsets
lovedBeatmapsets: @state.lovedBeatmapsets
unrankedBeatmapsets: @state.unrankedBeatmapsets
graveyardBeatmapsets: @state.graveyardBeatmapsets
counts:
favouriteBeatmapsets: @state.user.favourite_beatmapset_count
rankedAndApprovedBeatmapsets: @state.user.ranked_and_approved_beatmapset_count
lovedBeatmapsets: @state.user.loved_beatmapset_count
unrankedBeatmapsets: @state.user.unranked_beatmapset_count
graveyardBeatmapsets: @state.user.graveyard_beatmapset_count
pagination: @state.showMorePagination
component: Beatmaps
when 'medals'
props:
achievements: @props.achievements
userAchievements: @props.userAchievements
currentMode: @state.currentMode
user: @state.user
component: Medals
when 'historical'
props:
beatmapPlaycounts: @state.beatmapPlaycounts
scoresRecent: @state.scoresRecent
user: @state.user
currentMode: @state.currentMode
pagination: @state.showMorePagination
component: Historical
when 'account_standing'
props:
user: @state.user
component: AccountStanding
showMore: (e, {name, url, perPage = 50}) =>
offset = @state[name].length
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name] ?= {}
paginationState[name].loading = true
@setState showMorePagination: paginationState, ->
$.get osu.updateQueryString(url, offset: offset, limit: perPage + 1), (data) =>
state = _.cloneDeep(@state[name]).concat(data)
hasMore = data.length > perPage
state.pop() if hasMore
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name].loading = false
paginationState[name].hasMore = hasMore
@setState
"#{name}": state
showMorePagination: paginationState
.catch (error) =>
osu.ajaxError error
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name].loading = false
@setState
showMorePagination: paginationState
pageJump: (_e, page) =>
if page == 'main'
@setCurrentPage null, page
return
target = $(@extraPages[page])
# if invalid page is specified, scan current position
if target.length == 0
@pageScan()
return
# Don't bother scanning the current position.
# The result will be wrong when target page is too short anyway.
@scrolling = true
Timeout.clear @modeScrollTimeout
# count for the tabs height; assume pageJump always causes the header to be pinned
# otherwise the calculation needs another phase and gets a bit messy.
offsetTop = target.offset().top - pagesOffset[0].getBoundingClientRect().height
$(window).stop().scrollTo window.stickyHeader.scrollOffset(offsetTop), 500,
onAfter: =>
# Manually set the mode to avoid confusion (wrong highlight).
# Scrolling will obviously break it but that's unfortunate result
# from having the scrollspy marker at middle of page.
@setCurrentPage null, page, =>
# Doesn't work:
# - part of state (callback, part of mode setting)
# - simple variable in callback
# Both still change the switch too soon.
@modeScrollTimeout = Timeout.set 100, => @scrolling = false
pageScan: =>
return if @modeScrollUrl != currentLocation()
return if @scrolling
return if pages.length == 0
anchorHeight = pagesOffset[0].getBoundingClientRect().height
if osu.bottomPage()
@setCurrentPage null, _.last(pages).dataset.pageId
return
for page in pages
pageDims = page.getBoundingClientRect()
pageBottom = pageDims.bottom - Math.min(pageDims.height * 0.75, 200)
continue unless pageBottom > anchorHeight
@setCurrentPage null, page.dataset.pageId
return
@setCurrentPage null, page.dataset.pageId
saveStateToContainer: =>
@props.container.dataset.profilePageState = JSON.stringify(@state)
setCurrentPage: (_e, page, extraCallback) =>
callback = =>
extraCallback?()
@setHash?()
if @state.currentPage == page
return callback()
@setState currentPage: page, callback
tabClick: (e) =>
e.preventDefault()
# See $(@tabs.current).sortable.
return if @draggingTab
@pageJump null, e.currentTarget.dataset.pageId
updateOrder: (event) =>
$elems = $(event.target)
newOrder = $elems.sortable('toArray', attribute: 'data-page-id')
LoadingOverlay.show()
$elems.sortable('cancel')
@setState profileOrder: newOrder, =>
$.ajax laroute.route('account.options'),
method: 'PUT'
dataType: 'JSON'
data:
user_profile_customization:
extras_order: @state.profileOrder
.done (userData) =>
$.publish 'user:update', userData
.fail (xhr) =>
osu.emitAjaxError() xhr
@setState profileOrder: @state.user.profile_order
.always LoadingOverlay.hide
userUpdate: (_e, user) =>
return @forceUpdate() if user?.id != @state.user.id
# this component needs full user object but sometimes this event only sends part of it
@setState user: _.assign({}, @state.user, user)
userPageUpdate: (_e, newUserPage) =>
currentUserPage = _.cloneDeep @state.userPage
@setState userPage: _.extend(currentUserPage, newUserPage)
validMode: (mode) =>
modes = BeatmapHelper.modes
if _.includes(modes, mode)
mode
else
modes[0]
isSortablePage: (page) ->
_.includes @state.profileOrder, page
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { AccountStanding } from './account-standing'
import { ExtraTab } from './extra-tab'
import { Beatmaps } from './beatmaps'
import { Header } from './header'
import { Historical } from './historical'
import { Kudosu } from './kudosu'
import { Medals } from './medals'
import { RecentActivity } from './recent-activity'
import { TopRanks } from './top-ranks'
import { UserPage } from './user-page'
import { BlockButton } from 'block-button'
import { NotificationBanner } from 'notification-banner'
import * as React from 'react'
import { a, button, div, i, li, span, ul } from 'react-dom-factories'
el = React.createElement
pages = document.getElementsByClassName("js-switchable-mode-page--scrollspy")
pagesOffset = document.getElementsByClassName("js-switchable-mode-page--scrollspy-offset")
currentLocation = ->
"#{document.location.pathname}#{document.location.search}"
export class Main extends React.PureComponent
constructor: (props) ->
super props
@tabs = React.createRef()
@pages = React.createRef()
@state = JSON.parse(props.container.dataset.profilePageState ? null)
@restoredState = @state?
if !@restoredState
page = location.hash.slice(1)
@initialPage = page if page?
@state =
currentMode: props.currentMode
user: props.user
userPage:
html: props.userPage.html
initialRaw: props.userPage.raw
raw: props.userPage.raw
editing: false
selection: [0, 0]
profileOrder: props.user.profile_order[..]
recentActivity: @props.extras.recentActivity
scoresBest: @props.extras.scoresBest
scoresFirsts: @props.extras.scoresFirsts
scoresRecent: @props.extras.scoresRecent
beatmapPlaycounts: @props.extras.beatmapPlaycounts
favouriteBeatmapsets: @props.extras.favouriteBeatmapsets
rankedAndApprovedBeatmapsets: @props.extras.rankedAndApprovedBeatmapsets
lovedBeatmapsets: @props.extras.lovedBeatmapsets
unrankedBeatmapsets: @props.extras.unrankedBeatmapsets
graveyardBeatmapsets: @props.extras.graveyardBeatmapsets
recentlyReceivedKudosu: @props.extras.recentlyReceivedKudosu
showMorePagination: {}
for own elem, perPage of @props.perPage
@state.showMorePagination[elem] ?= {}
@state.showMorePagination[elem].hasMore = @state[elem].length > perPage
if @state.showMorePagination[elem].hasMore
@state[elem].pop()
componentDidMount: =>
$.subscribe 'user:update.profilePage', @userUpdate
$.subscribe 'user:page:update.profilePage', @userPageUpdate
$.subscribe 'profile:showMore.profilePage', @showMore
$.subscribe 'profile:page:jump.profilePage', @pageJump
$(window).on 'throttled-scroll.profilePage', @pageScan
$(document).on 'turbolinks:before-cache.profilePage', @saveStateToContainer
$(@pages.current).sortable
cursor: 'move'
handle: '.js-profile-page-extra--sortable-handle'
items: '.js-sortable--page'
revert: 150
scrollSpeed: 10
update: @updateOrder
$(@tabs.current).sortable
containment: 'parent'
cursor: 'move'
disabled: !@props.withEdit
items: '.js-sortable--tab'
revert: 150
scrollSpeed: 0
update: @updateOrder
start: =>
# Somehow click event still goes through when dragging.
# This prevents triggering @tabClick.
Timeout.clear @draggingTabTimeout
@draggingTab = true
stop: =>
@draggingTabTimeout = Timeout.set 500, => @draggingTab = false
osu.pageChange()
@modeScrollUrl = currentLocation()
if !@restoredState
Timeout.set 0, => @pageJump null, @initialPage
componentWillUnmount: =>
$.unsubscribe '.profilePage'
$(window).off '.profilePage'
for sortable in [@pages, @tabs]
$(sortable.current).sortable 'destroy'
$(window).stop()
Timeout.clear @modeScrollTimeout
render: =>
if @props.user.is_bot
profileOrder = ['me']
else
profileOrder = @state.profileOrder.slice()
profileOrder.push 'account_standing' if !_.isEmpty @state.user.account_history
if @state.userPage.initialRaw.trim() == '' && !@props.withEdit
_.pull profileOrder, 'me'
isBlocked = _.find(currentUser.blocks, target_id: @state.user.id)
div
className: 'osu-layout__no-scroll' if isBlocked && !@state.forceShow
if isBlocked
div className: 'osu-page',
el NotificationBanner,
type: 'warning'
title: osu.trans('users.blocks.banner_text')
message:
div className: 'grid-items grid-items--notification-banner-buttons',
div null,
el BlockButton, userId: @props.user.id
div null,
button
type: 'button'
className: 'textual-button'
onClick: =>
@setState forceShow: !@state.forceShow
span {},
i className: 'textual-button__icon fas fa-low-vision'
" "
if @state.forceShow
osu.trans('users.blocks.hide_profile')
else
osu.trans('users.blocks.show_profile')
div className: "osu-layout osu-layout--full#{if isBlocked && !@state.forceShow then ' osu-layout--masked' else ''}",
el Header,
user: @state.user
stats: @state.user.statistics
currentMode: @state.currentMode
withEdit: @props.withEdit
userAchievements: @props.userAchievements
div
className: 'hidden-xs page-extra-tabs page-extra-tabs--profile-page js-switchable-mode-page--scrollspy-offset'
if profileOrder.length > 1
div className: 'osu-page',
div
className: 'page-mode page-mode--profile-page-extra'
ref: @tabs
for m in profileOrder
a
className: "page-mode__item #{'js-sortable--tab' if @isSortablePage m}"
key: m
'data-page-id': m
onClick: @tabClick
href: "##{m}"
el ExtraTab,
page: m
currentPage: @state.currentPage
currentMode: @state.currentMode
div
className: 'osu-layout__section osu-layout__section--users-extra'
div
className: 'osu-layout__row'
ref: @pages
@extraPage name for name in profileOrder
extraPage: (name) =>
{extraClass, props, component} = @extraPageParams name
topClassName = 'js-switchable-mode-page--scrollspy js-switchable-mode-page--page'
topClassName += ' js-sortable--page' if @isSortablePage name
props.withEdit = @props.withEdit
props.name = name
@extraPages ?= {}
div
key: name
'data-page-id': name
className: "#{topClassName} #{extraClass}"
ref: (el) => @extraPages[name] = el
el component, props
extraPageParams: (name) =>
switch name
when 'me'
props:
userPage: @state.userPage
user: @state.user
component: UserPage
when 'recent_activity'
props:
pagination: @state.showMorePagination
recentActivity: @state.recentActivity
user: @state.user
component: RecentActivity
when 'kudosu'
props:
user: @state.user
recentlyReceivedKudosu: @state.recentlyReceivedKudosu
pagination: @state.showMorePagination
component: Kudosu
when 'top_ranks'
props:
user: @state.user
scoresBest: @state.scoresBest
scoresFirsts: @state.scoresFirsts
currentMode: @state.currentMode
pagination: @state.showMorePagination
component: TopRanks
when 'beatmaps'
props:
user: @state.user
favouriteBeatmapsets: @state.favouriteBeatmapsets
rankedAndApprovedBeatmapsets: @state.rankedAndApprovedBeatmapsets
lovedBeatmapsets: @state.lovedBeatmapsets
unrankedBeatmapsets: @state.unrankedBeatmapsets
graveyardBeatmapsets: @state.graveyardBeatmapsets
counts:
favouriteBeatmapsets: @state.user.favourite_beatmapset_count
rankedAndApprovedBeatmapsets: @state.user.ranked_and_approved_beatmapset_count
lovedBeatmapsets: @state.user.loved_beatmapset_count
unrankedBeatmapsets: @state.user.unranked_beatmapset_count
graveyardBeatmapsets: @state.user.graveyard_beatmapset_count
pagination: @state.showMorePagination
component: Beatmaps
when 'medals'
props:
achievements: @props.achievements
userAchievements: @props.userAchievements
currentMode: @state.currentMode
user: @state.user
component: Medals
when 'historical'
props:
beatmapPlaycounts: @state.beatmapPlaycounts
scoresRecent: @state.scoresRecent
user: @state.user
currentMode: @state.currentMode
pagination: @state.showMorePagination
component: Historical
when 'account_standing'
props:
user: @state.user
component: AccountStanding
showMore: (e, {name, url, perPage = 50}) =>
offset = @state[name].length
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name] ?= {}
paginationState[name].loading = true
@setState showMorePagination: paginationState, ->
$.get osu.updateQueryString(url, offset: offset, limit: perPage + 1), (data) =>
state = _.cloneDeep(@state[name]).concat(data)
hasMore = data.length > perPage
state.pop() if hasMore
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name].loading = false
paginationState[name].hasMore = hasMore
@setState
"#{name}": state
showMorePagination: paginationState
.catch (error) =>
osu.ajaxError error
paginationState = _.cloneDeep @state.showMorePagination
paginationState[name].loading = false
@setState
showMorePagination: paginationState
pageJump: (_e, page) =>
if page == 'main'
@setCurrentPage null, page
return
target = $(@extraPages[page])
# if invalid page is specified, scan current position
if target.length == 0
@pageScan()
return
# Don't bother scanning the current position.
# The result will be wrong when target page is too short anyway.
@scrolling = true
Timeout.clear @modeScrollTimeout
# count for the tabs height; assume pageJump always causes the header to be pinned
# otherwise the calculation needs another phase and gets a bit messy.
offsetTop = target.offset().top - pagesOffset[0].getBoundingClientRect().height
$(window).stop().scrollTo window.stickyHeader.scrollOffset(offsetTop), 500,
onAfter: =>
# Manually set the mode to avoid confusion (wrong highlight).
# Scrolling will obviously break it but that's unfortunate result
# from having the scrollspy marker at middle of page.
@setCurrentPage null, page, =>
# Doesn't work:
# - part of state (callback, part of mode setting)
# - simple variable in callback
# Both still change the switch too soon.
@modeScrollTimeout = Timeout.set 100, => @scrolling = false
pageScan: =>
return if @modeScrollUrl != currentLocation()
return if @scrolling
return if pages.length == 0
anchorHeight = pagesOffset[0].getBoundingClientRect().height
if osu.bottomPage()
@setCurrentPage null, _.last(pages).dataset.pageId
return
for page in pages
pageDims = page.getBoundingClientRect()
pageBottom = pageDims.bottom - Math.min(pageDims.height * 0.75, 200)
continue unless pageBottom > anchorHeight
@setCurrentPage null, page.dataset.pageId
return
@setCurrentPage null, page.dataset.pageId
saveStateToContainer: =>
@props.container.dataset.profilePageState = JSON.stringify(@state)
setCurrentPage: (_e, page, extraCallback) =>
callback = =>
extraCallback?()
@setHash?()
if @state.currentPage == page
return callback()
@setState currentPage: page, callback
tabClick: (e) =>
e.preventDefault()
# See $(@tabs.current).sortable.
return if @draggingTab
@pageJump null, e.currentTarget.dataset.pageId
updateOrder: (event) =>
$elems = $(event.target)
newOrder = $elems.sortable('toArray', attribute: 'data-page-id')
LoadingOverlay.show()
$elems.sortable('cancel')
@setState profileOrder: newOrder, =>
$.ajax laroute.route('account.options'),
method: 'PUT'
dataType: 'JSON'
data:
user_profile_customization:
extras_order: @state.profileOrder
.done (userData) =>
$.publish 'user:update', userData
.fail (xhr) =>
osu.emitAjaxError() xhr
@setState profileOrder: @state.user.profile_order
.always LoadingOverlay.hide
userUpdate: (_e, user) =>
return @forceUpdate() if user?.id != @state.user.id
# this component needs full user object but sometimes this event only sends part of it
@setState user: _.assign({}, @state.user, user)
userPageUpdate: (_e, newUserPage) =>
currentUserPage = _.cloneDeep @state.userPage
@setState userPage: _.extend(currentUserPage, newUserPage)
validMode: (mode) =>
modes = BeatmapHelper.modes
if _.includes(modes, mode)
mode
else
modes[0]
isSortablePage: (page) ->
_.includes @state.profileOrder, page
|
[
{
"context": "s > new Date\n user.resetPasswordToken = token\n user.resetPasswordExpires = new Date(",
"end": 976,
"score": 0.9894664287567139,
"start": 971,
"tag": "PASSWORD",
"value": "token"
},
{
"context": " user: config.mailgun.user\n pass:... | app/controllers/forgot.coffee | webzepter/cleveroad_test_task | 2 | async = require 'async'
nodemailer = require 'nodemailer'
crypto = require 'crypto'
config = require '../../config/config'
module.exports = (app) ->
showEmailForm: (req, res) ->
res.render 'forgot/show'
sendMail: (req, res, next) ->
req.assert('email', 'Please enter a valid email address.').isEmail()
errors = req.validationErrors()
if errors
req.flash 'errors', errors
return res.redirect '/forgot'
async.waterfall [
(done) ->
crypto.randomBytes 20, (err, buf) ->
token = buf.toString('hex')
done err, token
(token, done) ->
app.models.user.findOne
email: req.body.email.toLowerCase()
.exec (err, user) ->
if !user
req.flash 'error', 'No account with that email address exists.'
return req.redirect '/forgot'
if !user.resetPasswordExpires || user.resetPasswordExpires > new Date
user.resetPasswordToken = token
user.resetPasswordExpires = new Date((Date.now() + 60 * 60 * 1000))
else
req.flash 'error', 'Your reset token is valid. Try again later'
return res.redirect '/forgot'
user.save (err) ->
done err, token, user
(token, user, done) ->
smtpTransport = nodemailer.createTransport
service: 'MailGun'
auth:
user: config.mailgun.user
pass: config.mailgun.password
mailOptions =
to: user.email
from: 'passwordreset@node-tt.herokuapp.com'
subject: 'Node.js Password Reset'
text: 'You are receiving this because you (or someone else) have ' +
'requested the reset of the password for your account.\n\n' +
'Please click on the following link, or paste this into your '+
'browser to complete the process:\n\n' +
'http://' + req.headers.host + '/reset/' + token + '\n\n' +
'If you did not request this, please ignore this email and your ' +
'password will remain unchanged.\n'
smtpTransport.sendMail mailOptions, (err) ->
req.flash 'info', 'An e-mail has been sent to ' + user.email +
' with further instructions.'
done err
], (err) ->
if err
return next err
res.redirect '/forgot'
showNewPasswordForm: (req, res) ->
app.models.user.findOne
resetPasswordToken: req.params.token
resetPasswordExpires:
'>': new Date
.exec (err, user) ->
if !user
req.flash 'error', 'Password reset token is invalid or has expired'
return res.redirect '/forgot'
res.render 'forgot/reset'
saveNewPassword: (req, res, next) ->
req.assert('password', '6 to 100 characters required').len(6, 100)
errors = req.validationErrors()
if errors
req.flash 'errors', errors
return res.redirect 'back'
async.waterfall [
(done) ->
app.models.user.findOne
resetPasswordToken: req.params.token
resetPasswordExpires:
'>': new Date
.exec (err, user) ->
if !user
req.flash 'error', 'Password reset token is invalid or has expired'
return res.redirect 'back'
user.password = req.body.password
user.resetPasswordToken = undefined
user.resetPasswordExpires = undefined
user.save (err) ->
if err
return next(err)
req.logIn user, (err) ->
done err, user
(user, done) ->
smtpTransport = nodemailer.createTransport
service: 'MailGun'
auth:
user: config.mailgun.user
pass: config.mailgun.password
mailOptions =
to: user.email
from: 'node-tt@herokuapp.com'
subject: 'Your password has been changed'
text: 'Hello,\n\n' +
'This is a confirmation that the password for your account ' +
user.email + ' has just been changed.\n'
smtpTransport.sendMail mailOptions, (err) ->
req.flash 'success', 'Success! Your password has been changed.'
done err
], (err) ->
if err
return next err
res.redirect '/'
| 89044 | async = require 'async'
nodemailer = require 'nodemailer'
crypto = require 'crypto'
config = require '../../config/config'
module.exports = (app) ->
showEmailForm: (req, res) ->
res.render 'forgot/show'
sendMail: (req, res, next) ->
req.assert('email', 'Please enter a valid email address.').isEmail()
errors = req.validationErrors()
if errors
req.flash 'errors', errors
return res.redirect '/forgot'
async.waterfall [
(done) ->
crypto.randomBytes 20, (err, buf) ->
token = buf.toString('hex')
done err, token
(token, done) ->
app.models.user.findOne
email: req.body.email.toLowerCase()
.exec (err, user) ->
if !user
req.flash 'error', 'No account with that email address exists.'
return req.redirect '/forgot'
if !user.resetPasswordExpires || user.resetPasswordExpires > new Date
user.resetPasswordToken = <PASSWORD>
user.resetPasswordExpires = new Date((Date.now() + 60 * 60 * 1000))
else
req.flash 'error', 'Your reset token is valid. Try again later'
return res.redirect '/forgot'
user.save (err) ->
done err, token, user
(token, user, done) ->
smtpTransport = nodemailer.createTransport
service: 'MailGun'
auth:
user: config.mailgun.user
pass: <PASSWORD>.mailgun.<PASSWORD>
mailOptions =
to: user.email
from: '<EMAIL>'
subject: 'Node.js Password Reset'
text: 'You are receiving this because you (or someone else) have ' +
'requested the reset of the password for your account.\n\n' +
'Please click on the following link, or paste this into your '+
'browser to complete the process:\n\n' +
'http://' + req.headers.host + '/reset/' + token + '\n\n' +
'If you did not request this, please ignore this email and your ' +
'password will remain unchanged.\n'
smtpTransport.sendMail mailOptions, (err) ->
req.flash 'info', 'An e-mail has been sent to ' + user.email +
' with further instructions.'
done err
], (err) ->
if err
return next err
res.redirect '/forgot'
showNewPasswordForm: (req, res) ->
app.models.user.findOne
resetPasswordToken: req.params.token
resetPasswordExpires:
'>': new Date
.exec (err, user) ->
if !user
req.flash 'error', 'Password reset token is invalid or has expired'
return res.redirect '/forgot'
res.render 'forgot/reset'
saveNewPassword: (req, res, next) ->
req.assert('password', '<PASSWORD>').len(6, 100)
errors = req.validationErrors()
if errors
req.flash 'errors', errors
return res.redirect 'back'
async.waterfall [
(done) ->
app.models.user.findOne
resetPasswordToken: <PASSWORD>.params.token
resetPasswordExpires:
'>': new Date
.exec (err, user) ->
if !user
req.flash 'error', 'Password reset token is invalid or has expired'
return res.redirect 'back'
user.password = <PASSWORD>
user.resetPasswordToken = <PASSWORD>
user.resetPasswordExpires = undefined
user.save (err) ->
if err
return next(err)
req.logIn user, (err) ->
done err, user
(user, done) ->
smtpTransport = nodemailer.createTransport
service: 'MailGun'
auth:
user: config.mailgun.user
pass: <PASSWORD>
mailOptions =
to: user.email
from: '<EMAIL>'
subject: 'Your password has been changed'
text: 'Hello,\n\n' +
'This is a confirmation that the password for your account ' +
user.email + ' has just been changed.\n'
smtpTransport.sendMail mailOptions, (err) ->
req.flash 'success', 'Success! Your password has been changed.'
done err
], (err) ->
if err
return next err
res.redirect '/'
| true | async = require 'async'
nodemailer = require 'nodemailer'
crypto = require 'crypto'
config = require '../../config/config'
module.exports = (app) ->
showEmailForm: (req, res) ->
res.render 'forgot/show'
sendMail: (req, res, next) ->
req.assert('email', 'Please enter a valid email address.').isEmail()
errors = req.validationErrors()
if errors
req.flash 'errors', errors
return res.redirect '/forgot'
async.waterfall [
(done) ->
crypto.randomBytes 20, (err, buf) ->
token = buf.toString('hex')
done err, token
(token, done) ->
app.models.user.findOne
email: req.body.email.toLowerCase()
.exec (err, user) ->
if !user
req.flash 'error', 'No account with that email address exists.'
return req.redirect '/forgot'
if !user.resetPasswordExpires || user.resetPasswordExpires > new Date
user.resetPasswordToken = PI:PASSWORD:<PASSWORD>END_PI
user.resetPasswordExpires = new Date((Date.now() + 60 * 60 * 1000))
else
req.flash 'error', 'Your reset token is valid. Try again later'
return res.redirect '/forgot'
user.save (err) ->
done err, token, user
(token, user, done) ->
smtpTransport = nodemailer.createTransport
service: 'MailGun'
auth:
user: config.mailgun.user
pass: PI:PASSWORD:<PASSWORD>END_PI.mailgun.PI:PASSWORD:<PASSWORD>END_PI
mailOptions =
to: user.email
from: 'PI:EMAIL:<EMAIL>END_PI'
subject: 'Node.js Password Reset'
text: 'You are receiving this because you (or someone else) have ' +
'requested the reset of the password for your account.\n\n' +
'Please click on the following link, or paste this into your '+
'browser to complete the process:\n\n' +
'http://' + req.headers.host + '/reset/' + token + '\n\n' +
'If you did not request this, please ignore this email and your ' +
'password will remain unchanged.\n'
smtpTransport.sendMail mailOptions, (err) ->
req.flash 'info', 'An e-mail has been sent to ' + user.email +
' with further instructions.'
done err
], (err) ->
if err
return next err
res.redirect '/forgot'
showNewPasswordForm: (req, res) ->
app.models.user.findOne
resetPasswordToken: req.params.token
resetPasswordExpires:
'>': new Date
.exec (err, user) ->
if !user
req.flash 'error', 'Password reset token is invalid or has expired'
return res.redirect '/forgot'
res.render 'forgot/reset'
saveNewPassword: (req, res, next) ->
req.assert('password', 'PI:PASSWORD:<PASSWORD>END_PI').len(6, 100)
errors = req.validationErrors()
if errors
req.flash 'errors', errors
return res.redirect 'back'
async.waterfall [
(done) ->
app.models.user.findOne
resetPasswordToken: PI:PASSWORD:<PASSWORD>END_PI.params.token
resetPasswordExpires:
'>': new Date
.exec (err, user) ->
if !user
req.flash 'error', 'Password reset token is invalid or has expired'
return res.redirect 'back'
user.password = PI:PASSWORD:<PASSWORD>END_PI
user.resetPasswordToken = PI:PASSWORD:<PASSWORD>END_PI
user.resetPasswordExpires = undefined
user.save (err) ->
if err
return next(err)
req.logIn user, (err) ->
done err, user
(user, done) ->
smtpTransport = nodemailer.createTransport
service: 'MailGun'
auth:
user: config.mailgun.user
pass: PI:PASSWORD:<PASSWORD>END_PI
mailOptions =
to: user.email
from: 'PI:EMAIL:<EMAIL>END_PI'
subject: 'Your password has been changed'
text: 'Hello,\n\n' +
'This is a confirmation that the password for your account ' +
user.email + ' has just been changed.\n'
smtpTransport.sendMail mailOptions, (err) ->
req.flash 'success', 'Success! Your password has been changed.'
done err
], (err) ->
if err
return next err
res.redirect '/'
|
[
{
"context": "t '.input-small', type: 'password', placeholder: 'Password', name: 'password', ->\n te",
"end": 1715,
"score": 0.9984337687492371,
"start": 1707,
"tag": "PASSWORD",
"value": "Password"
}
] | src/components/layout/authenticate.coffee | TranscendOfSypherus/docpad-plugin-minicms | 1 |
module.exports = ->
doctype 5
html ->
head ->
# Standard
meta charset: 'utf-8'
meta 'http-equiv': 'content-type', content: 'text/html; charset=utf-8'
meta name: 'viewport', content: 'width=device-width, initial-scale=1.0'
# Document
title @title
# Styles
link rel: 'stylesheet', href: '/'+@prefix+'/css/bootstrap.css'
link rel: 'stylesheet', href: '/'+@prefix+'/css/minicms.css'
# Scripts
script src: '/'+@prefix+'/js/jquery.js'
script src: '/'+@prefix+'/js/bootstrap.js'
body '#minicms', 'data-prefix': @prefix, ->
div '#navbar.navbar.navbar-inverse.navbar-fixed-top', ->
div '.navbar-inner', ->
div '.container', ->
ul '.nav', ->
li -> a href: '/', ->
span '.icon-home.icon-white', ->
text ' '
span '.text', -> 'Site'
li '.active', -> a href: '/'+@prefix, ->
span '.icon-pencil.icon-white', ->
text ' '
span '.text', -> 'Admin'
div '#content.layout-'+@layout, ->
div '#authenticate-page', ->
form '.form-inline', action: h(@url), method: 'POST', ->
input '.input-small', type: 'text', placeholder: 'Login', name: 'login', ->
text ' '
input '.input-small', type: 'password', placeholder: 'Password', name: 'password', ->
text ' '
button '.btn', type: 'submit', -> 'Sign in'
| 72267 |
module.exports = ->
doctype 5
html ->
head ->
# Standard
meta charset: 'utf-8'
meta 'http-equiv': 'content-type', content: 'text/html; charset=utf-8'
meta name: 'viewport', content: 'width=device-width, initial-scale=1.0'
# Document
title @title
# Styles
link rel: 'stylesheet', href: '/'+@prefix+'/css/bootstrap.css'
link rel: 'stylesheet', href: '/'+@prefix+'/css/minicms.css'
# Scripts
script src: '/'+@prefix+'/js/jquery.js'
script src: '/'+@prefix+'/js/bootstrap.js'
body '#minicms', 'data-prefix': @prefix, ->
div '#navbar.navbar.navbar-inverse.navbar-fixed-top', ->
div '.navbar-inner', ->
div '.container', ->
ul '.nav', ->
li -> a href: '/', ->
span '.icon-home.icon-white', ->
text ' '
span '.text', -> 'Site'
li '.active', -> a href: '/'+@prefix, ->
span '.icon-pencil.icon-white', ->
text ' '
span '.text', -> 'Admin'
div '#content.layout-'+@layout, ->
div '#authenticate-page', ->
form '.form-inline', action: h(@url), method: 'POST', ->
input '.input-small', type: 'text', placeholder: 'Login', name: 'login', ->
text ' '
input '.input-small', type: 'password', placeholder: '<PASSWORD>', name: 'password', ->
text ' '
button '.btn', type: 'submit', -> 'Sign in'
| true |
module.exports = ->
doctype 5
html ->
head ->
# Standard
meta charset: 'utf-8'
meta 'http-equiv': 'content-type', content: 'text/html; charset=utf-8'
meta name: 'viewport', content: 'width=device-width, initial-scale=1.0'
# Document
title @title
# Styles
link rel: 'stylesheet', href: '/'+@prefix+'/css/bootstrap.css'
link rel: 'stylesheet', href: '/'+@prefix+'/css/minicms.css'
# Scripts
script src: '/'+@prefix+'/js/jquery.js'
script src: '/'+@prefix+'/js/bootstrap.js'
body '#minicms', 'data-prefix': @prefix, ->
div '#navbar.navbar.navbar-inverse.navbar-fixed-top', ->
div '.navbar-inner', ->
div '.container', ->
ul '.nav', ->
li -> a href: '/', ->
span '.icon-home.icon-white', ->
text ' '
span '.text', -> 'Site'
li '.active', -> a href: '/'+@prefix, ->
span '.icon-pencil.icon-white', ->
text ' '
span '.text', -> 'Admin'
div '#content.layout-'+@layout, ->
div '#authenticate-page', ->
form '.form-inline', action: h(@url), method: 'POST', ->
input '.input-small', type: 'text', placeholder: 'Login', name: 'login', ->
text ' '
input '.input-small', type: 'password', placeholder: 'PI:PASSWORD:<PASSWORD>END_PI', name: 'password', ->
text ' '
button '.btn', type: 'submit', -> 'Sign in'
|
[
{
"context": "12-05-17T22:28:07+09:00</updated>\n <author><name>おぽこ</name></author>\n <generator uri=\"http://www.nico",
"end": 782,
"score": 0.9729166030883789,
"start": 779,
"tag": "NAME",
"value": "おぽこ"
},
{
"context": "7T22:28:07+09:00'\n @xml.author.should.equal 'おぽこ'\n\n... | test/mylist_atom.coffee | hdemon/nicoscraper.js | 0 | _ = require 'underscore'
_.str = require 'underscore.string'
_.mixin _.str.exports()
_.str.include 'Underscore.string', 'string'
require 'should'
sinon = require 'sinon'
nock = require 'nock'
fs = require 'fs'
NicoScraper = require '../production/nicoquery.js'
xml = '''
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="ja">
<title>マイリスト 【Oblivion】おっさんの大冒険‐ニコニコ動画</title>
<subtitle>ふふ マイリスを開いてしまいましたか^^</subtitle>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/mylist/15196568"/>
<link rel="self" type="application/atom+xml" href="http://www.nicovideo.jp/mylist/15196568?rss=atom"/>
<id>tag:nicovideo.jp,2009-10-11:/mylist/15196568</id>
<updated>2012-05-17T22:28:07+09:00</updated>
<author><name>おぽこ</name></author>
<generator uri="http://www.nicovideo.jp/">ニコニコ動画</generator>
<rights>(c) niwango, inc. All rights reserved.</rights>
<entry>
<title>【Oblivion】おっさんの大冒険1(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8481759"/>
<id>tag:nicovideo.jp,2009-10-11:/watch/1255238132</id>
<published>2009-10-11T14:28:25+09:00</published>
<updated>2009-10-11T14:28:25+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険1(ゆっくり実況)" src="http://tn-skr4.smilevideo.jp/smile?i=8481759" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの生き様をとくとごらんあれ!このミリオンをいつまでも大切にしたい。そう、いつまでも・・・。おっさんがここまでこれたのも、みなさまのおかげですm(__)b次回:sm8506034 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">12:28</strong>|<strong class="nico-info-date">2009年10月11日 14:15:35</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険2(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8506034"/>
<id>tag:nicovideo.jp,2009-10-13:/watch/1255439788</id>
<published>2009-10-13T22:35:36+09:00</published>
<updated>2009-10-13T22:35:36+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険2(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=8506034" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの旅はまだまだ続きます。一応魔法&武器無し縛りでいこうと思います前回:sm8481759 次回:sm8539721 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">16:53</strong>|<strong class="nico-info-date">2009年10月13日 22:16:30</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険3(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8539721"/>
<id>tag:nicovideo.jp,2009-10-17:/watch/1255776463</id>
<published>2009-10-17T19:48:42+09:00</published>
<updated>2009-10-17T19:48:42+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険3(ゆっくり実況)" src="http://tn-skr2.smilevideo.jp/smile?i=8539721" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの運命やいかに!?前回:sm8506034 次回:sm8606274 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">16:28</strong>|<strong class="nico-info-date">2009年10月17日 19:47:46</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険4(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8606274"/>
<id>tag:nicovideo.jp,2009-10-24:/watch/1256391663</id>
<published>2009-10-24T22:41:16+09:00</published>
<updated>2009-10-24T22:41:16+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険4(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=8606274" width="94" height="70" border="0"/></p><p class="nico-description">全国のおっさんに夢と希望を100万回のストーカー行為を働いたおっさんに栄光あれ!続編はじわじわ作ってますので、気長にお待ちくだされ。前回:sm8539721 次回:sm8811116 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">16:13</strong>|<strong class="nico-info-date">2009年10月24日 22:41:06</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険5(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8811116"/>
<id>tag:nicovideo.jp,2009-11-15:/watch/1258225310</id>
<published>2009-11-15T04:03:13+09:00</published>
<updated>2009-11-15T04:03:13+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険5(ゆっくり実況)" src="http://tn-skr1.smilevideo.jp/smile?i=8811116" width="94" height="70" border="0"/></p><p class="nico-description">シャバに出たおっさんに活目せよ…!前回:sm8606274 次回:sm9221216 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">11:46</strong>|<strong class="nico-info-date">2009年11月15日 04:01:52</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険6(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm9221216"/>
<id>tag:nicovideo.jp,2009-12-28:/watch/1261952691</id>
<published>2009-12-28T07:25:45+09:00</published>
<updated>2009-12-28T07:25:45+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険6(ゆっくり実況)" src="http://tn-skr1.smilevideo.jp/smile?i=9221216" width="94" height="70" border="0"/></p><p class="nico-description">おっさんとゴブリンとの戦いの行方は!?あけおめぐっつすっす前回:sm8811116 次回:sm10024081 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">13:21</strong>|<strong class="nico-info-date">2009年12月28日 07:24:54</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険7(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm10024081"/>
<id>tag:nicovideo.jp,2010-03-14:/watch/1268546681</id>
<published>2010-03-14T19:04:41+09:00</published>
<updated>2010-03-14T19:04:41+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険7(ゆっくり実況)" src="http://tn-skr2.smilevideo.jp/smile?i=10024081" width="94" height="70" border="0"/></p><p class="nico-description">おっさん自身の幸せとは…!?前回:sm9221216 次回:sm10659358 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:29</strong>|<strong class="nico-info-date">2010年03月14日 15:04:43</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険8(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm10659358"/>
<id>tag:nicovideo.jp,2010-05-09:/watch/1273345724</id>
<published>2010-05-09T04:11:46+09:00</published>
<updated>2010-05-09T04:11:46+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険8(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=10659358" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの守るべきものとは?前回:sm10024081 次回:sm17842779 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:47</strong>|<strong class="nico-info-date">2010年05月09日 04:08:44</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Skyrim】おっさんの大冒険9(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm17842779"/>
<id>tag:nicovideo.jp,2012-05-16:/watch/1337180095</id>
<published>2012-05-17T01:18:09+09:00</published>
<updated>2012-05-17T01:18:09+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Skyrim】おっさんの大冒険9(ゆっくり実況)" src="http://tn-skr4.smilevideo.jp/smile?i=17842779" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの戦うわけとは?前回:sm10659358 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:45</strong>|<strong class="nico-info-date">2012年05月16日 23:54:55</strong> 投稿</small></p>]]></content>
</entry>
</feed>
'''
describe "About MylistAtom class", ->
describe "when create an instance", ->
before (done) ->
@xml = new NicoScraper.Source.MylistAtom xml
done()
it "has a mylist property", ->
@xml.title.should.equal '【Oblivion】おっさんの大冒険'
@xml.subtitle.should.equal 'ふふ マイリスを開いてしまいましたか^^'
@xml.mylistId.should.equal 15196568
@xml.updated.should.equal '2012-05-17T22:28:07+09:00'
@xml.author.should.equal 'おぽこ'
it "has movies property in this mylist", ->
@xml.entry['sm8481759'].title.should.equal '【Oblivion】おっさんの大冒険1(ゆっくり実況)'
@xml.entry['sm8481759'].videoId.should.equal 'sm8481759'
@xml.entry['sm8481759'].timelikeId.should.equal 1255238132
@xml.entry['sm8481759'].thumbnailUrl.should.equal 'http://tn-skr4.smilevideo.jp/smile?i=8481759'
@xml.entry['sm8481759'].description.should.equal 'おっさんの生き様をとくとごらんあれ!このミリオンをいつまでも大切にしたい。そう、いつまでも・・・。おっさんがここまでこれたのも、みなさまのおかげですm(__)b次回:sm8506034 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。'
@xml.entry['sm8481759'].length.should.equal 748
@xml.entry['sm8481759'].infoDate.should.equal 1255238135
| 162248 | _ = require 'underscore'
_.str = require 'underscore.string'
_.mixin _.str.exports()
_.str.include 'Underscore.string', 'string'
require 'should'
sinon = require 'sinon'
nock = require 'nock'
fs = require 'fs'
NicoScraper = require '../production/nicoquery.js'
xml = '''
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="ja">
<title>マイリスト 【Oblivion】おっさんの大冒険‐ニコニコ動画</title>
<subtitle>ふふ マイリスを開いてしまいましたか^^</subtitle>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/mylist/15196568"/>
<link rel="self" type="application/atom+xml" href="http://www.nicovideo.jp/mylist/15196568?rss=atom"/>
<id>tag:nicovideo.jp,2009-10-11:/mylist/15196568</id>
<updated>2012-05-17T22:28:07+09:00</updated>
<author><name><NAME></name></author>
<generator uri="http://www.nicovideo.jp/">ニコニコ動画</generator>
<rights>(c) niwango, inc. All rights reserved.</rights>
<entry>
<title>【Oblivion】おっさんの大冒険1(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8481759"/>
<id>tag:nicovideo.jp,2009-10-11:/watch/1255238132</id>
<published>2009-10-11T14:28:25+09:00</published>
<updated>2009-10-11T14:28:25+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険1(ゆっくり実況)" src="http://tn-skr4.smilevideo.jp/smile?i=8481759" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの生き様をとくとごらんあれ!このミリオンをいつまでも大切にしたい。そう、いつまでも・・・。おっさんがここまでこれたのも、みなさまのおかげですm(__)b次回:sm8506034 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">12:28</strong>|<strong class="nico-info-date">2009年10月11日 14:15:35</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険2(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8506034"/>
<id>tag:nicovideo.jp,2009-10-13:/watch/1255439788</id>
<published>2009-10-13T22:35:36+09:00</published>
<updated>2009-10-13T22:35:36+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険2(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=8506034" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの旅はまだまだ続きます。一応魔法&武器無し縛りでいこうと思います前回:sm8481759 次回:sm8539721 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">16:53</strong>|<strong class="nico-info-date">2009年10月13日 22:16:30</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険3(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8539721"/>
<id>tag:nicovideo.jp,2009-10-17:/watch/1255776463</id>
<published>2009-10-17T19:48:42+09:00</published>
<updated>2009-10-17T19:48:42+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険3(ゆっくり実況)" src="http://tn-skr2.smilevideo.jp/smile?i=8539721" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの運命やいかに!?前回:sm8506034 次回:sm8606274 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">16:28</strong>|<strong class="nico-info-date">2009年10月17日 19:47:46</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険4(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8606274"/>
<id>tag:nicovideo.jp,2009-10-24:/watch/1256391663</id>
<published>2009-10-24T22:41:16+09:00</published>
<updated>2009-10-24T22:41:16+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険4(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=8606274" width="94" height="70" border="0"/></p><p class="nico-description">全国のおっさんに夢と希望を100万回のストーカー行為を働いたおっさんに栄光あれ!続編はじわじわ作ってますので、気長にお待ちくだされ。前回:sm8539721 次回:sm8811116 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">16:13</strong>|<strong class="nico-info-date">2009年10月24日 22:41:06</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険5(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8811116"/>
<id>tag:nicovideo.jp,2009-11-15:/watch/1258225310</id>
<published>2009-11-15T04:03:13+09:00</published>
<updated>2009-11-15T04:03:13+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険5(ゆっくり実況)" src="http://tn-skr1.smilevideo.jp/smile?i=8811116" width="94" height="70" border="0"/></p><p class="nico-description">シャバに出たおっさんに活目せよ…!前回:sm8606274 次回:sm9221216 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">11:46</strong>|<strong class="nico-info-date">2009年11月15日 04:01:52</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険6(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm9221216"/>
<id>tag:nicovideo.jp,2009-12-28:/watch/1261952691</id>
<published>2009-12-28T07:25:45+09:00</published>
<updated>2009-12-28T07:25:45+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険6(ゆっくり実況)" src="http://tn-skr1.smilevideo.jp/smile?i=9221216" width="94" height="70" border="0"/></p><p class="nico-description">おっさんとゴブリンとの戦いの行方は!?あけおめぐっつすっす前回:sm8811116 次回:sm10024081 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">13:21</strong>|<strong class="nico-info-date">2009年12月28日 07:24:54</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険7(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm10024081"/>
<id>tag:nicovideo.jp,2010-03-14:/watch/1268546681</id>
<published>2010-03-14T19:04:41+09:00</published>
<updated>2010-03-14T19:04:41+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険7(ゆっくり実況)" src="http://tn-skr2.smilevideo.jp/smile?i=10024081" width="94" height="70" border="0"/></p><p class="nico-description">おっさん自身の幸せとは…!?前回:sm9221216 次回:sm10659358 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:29</strong>|<strong class="nico-info-date">2010年03月14日 15:04:43</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険8(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm10659358"/>
<id>tag:nicovideo.jp,2010-05-09:/watch/1273345724</id>
<published>2010-05-09T04:11:46+09:00</published>
<updated>2010-05-09T04:11:46+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険8(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=10659358" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの守るべきものとは?前回:sm10024081 次回:sm17842779 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:47</strong>|<strong class="nico-info-date">2010年05月09日 04:08:44</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Skyrim】おっさんの大冒険9(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm17842779"/>
<id>tag:nicovideo.jp,2012-05-16:/watch/1337180095</id>
<published>2012-05-17T01:18:09+09:00</published>
<updated>2012-05-17T01:18:09+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Skyrim】おっさんの大冒険9(ゆっくり実況)" src="http://tn-skr4.smilevideo.jp/smile?i=17842779" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの戦うわけとは?前回:sm10659358 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:45</strong>|<strong class="nico-info-date">2012年05月16日 23:54:55</strong> 投稿</small></p>]]></content>
</entry>
</feed>
'''
describe "About MylistAtom class", ->
describe "when create an instance", ->
before (done) ->
@xml = new NicoScraper.Source.MylistAtom xml
done()
it "has a mylist property", ->
@xml.title.should.equal '【Oblivion】おっさんの大冒険'
@xml.subtitle.should.equal 'ふふ マイリスを開いてしまいましたか^^'
@xml.mylistId.should.equal 15196568
@xml.updated.should.equal '2012-05-17T22:28:07+09:00'
@xml.author.should.equal '<NAME>'
it "has movies property in this mylist", ->
@xml.entry['sm8481759'].title.should.equal '【Oblivion】おっさんの大冒険1(ゆっくり実況)'
@xml.entry['sm8481759'].videoId.should.equal 'sm8481759'
@xml.entry['sm8481759'].timelikeId.should.equal 1255238132
@xml.entry['sm8481759'].thumbnailUrl.should.equal 'http://tn-skr4.smilevideo.jp/smile?i=8481759'
@xml.entry['sm8481759'].description.should.equal 'おっさんの生き様をとくとごらんあれ!このミリオンをいつまでも大切にしたい。そう、いつまでも・・・。おっさんがここまでこれたのも、みなさまのおかげですm(__)b次回:sm8506034 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。'
@xml.entry['sm8481759'].length.should.equal 748
@xml.entry['sm8481759'].infoDate.should.equal 1255238135
| true | _ = require 'underscore'
_.str = require 'underscore.string'
_.mixin _.str.exports()
_.str.include 'Underscore.string', 'string'
require 'should'
sinon = require 'sinon'
nock = require 'nock'
fs = require 'fs'
NicoScraper = require '../production/nicoquery.js'
xml = '''
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="ja">
<title>マイリスト 【Oblivion】おっさんの大冒険‐ニコニコ動画</title>
<subtitle>ふふ マイリスを開いてしまいましたか^^</subtitle>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/mylist/15196568"/>
<link rel="self" type="application/atom+xml" href="http://www.nicovideo.jp/mylist/15196568?rss=atom"/>
<id>tag:nicovideo.jp,2009-10-11:/mylist/15196568</id>
<updated>2012-05-17T22:28:07+09:00</updated>
<author><name>PI:NAME:<NAME>END_PI</name></author>
<generator uri="http://www.nicovideo.jp/">ニコニコ動画</generator>
<rights>(c) niwango, inc. All rights reserved.</rights>
<entry>
<title>【Oblivion】おっさんの大冒険1(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8481759"/>
<id>tag:nicovideo.jp,2009-10-11:/watch/1255238132</id>
<published>2009-10-11T14:28:25+09:00</published>
<updated>2009-10-11T14:28:25+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険1(ゆっくり実況)" src="http://tn-skr4.smilevideo.jp/smile?i=8481759" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの生き様をとくとごらんあれ!このミリオンをいつまでも大切にしたい。そう、いつまでも・・・。おっさんがここまでこれたのも、みなさまのおかげですm(__)b次回:sm8506034 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">12:28</strong>|<strong class="nico-info-date">2009年10月11日 14:15:35</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険2(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8506034"/>
<id>tag:nicovideo.jp,2009-10-13:/watch/1255439788</id>
<published>2009-10-13T22:35:36+09:00</published>
<updated>2009-10-13T22:35:36+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険2(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=8506034" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの旅はまだまだ続きます。一応魔法&武器無し縛りでいこうと思います前回:sm8481759 次回:sm8539721 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">16:53</strong>|<strong class="nico-info-date">2009年10月13日 22:16:30</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険3(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8539721"/>
<id>tag:nicovideo.jp,2009-10-17:/watch/1255776463</id>
<published>2009-10-17T19:48:42+09:00</published>
<updated>2009-10-17T19:48:42+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険3(ゆっくり実況)" src="http://tn-skr2.smilevideo.jp/smile?i=8539721" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの運命やいかに!?前回:sm8506034 次回:sm8606274 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。</p><p class="nico-info"><small><strong class="nico-info-length">16:28</strong>|<strong class="nico-info-date">2009年10月17日 19:47:46</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険4(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8606274"/>
<id>tag:nicovideo.jp,2009-10-24:/watch/1256391663</id>
<published>2009-10-24T22:41:16+09:00</published>
<updated>2009-10-24T22:41:16+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険4(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=8606274" width="94" height="70" border="0"/></p><p class="nico-description">全国のおっさんに夢と希望を100万回のストーカー行為を働いたおっさんに栄光あれ!続編はじわじわ作ってますので、気長にお待ちくだされ。前回:sm8539721 次回:sm8811116 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">16:13</strong>|<strong class="nico-info-date">2009年10月24日 22:41:06</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険5(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm8811116"/>
<id>tag:nicovideo.jp,2009-11-15:/watch/1258225310</id>
<published>2009-11-15T04:03:13+09:00</published>
<updated>2009-11-15T04:03:13+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険5(ゆっくり実況)" src="http://tn-skr1.smilevideo.jp/smile?i=8811116" width="94" height="70" border="0"/></p><p class="nico-description">シャバに出たおっさんに活目せよ…!前回:sm8606274 次回:sm9221216 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">11:46</strong>|<strong class="nico-info-date">2009年11月15日 04:01:52</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険6(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm9221216"/>
<id>tag:nicovideo.jp,2009-12-28:/watch/1261952691</id>
<published>2009-12-28T07:25:45+09:00</published>
<updated>2009-12-28T07:25:45+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険6(ゆっくり実況)" src="http://tn-skr1.smilevideo.jp/smile?i=9221216" width="94" height="70" border="0"/></p><p class="nico-description">おっさんとゴブリンとの戦いの行方は!?あけおめぐっつすっす前回:sm8811116 次回:sm10024081 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">13:21</strong>|<strong class="nico-info-date">2009年12月28日 07:24:54</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険7(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm10024081"/>
<id>tag:nicovideo.jp,2010-03-14:/watch/1268546681</id>
<published>2010-03-14T19:04:41+09:00</published>
<updated>2010-03-14T19:04:41+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険7(ゆっくり実況)" src="http://tn-skr2.smilevideo.jp/smile?i=10024081" width="94" height="70" border="0"/></p><p class="nico-description">おっさん自身の幸せとは…!?前回:sm9221216 次回:sm10659358 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:29</strong>|<strong class="nico-info-date">2010年03月14日 15:04:43</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Oblivion】おっさんの大冒険8(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm10659358"/>
<id>tag:nicovideo.jp,2010-05-09:/watch/1273345724</id>
<published>2010-05-09T04:11:46+09:00</published>
<updated>2010-05-09T04:11:46+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Oblivion】おっさんの大冒険8(ゆっくり実況)" src="http://tn-skr3.smilevideo.jp/smile?i=10659358" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの守るべきものとは?前回:sm10024081 次回:sm17842779 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:47</strong>|<strong class="nico-info-date">2010年05月09日 04:08:44</strong> 投稿</small></p>]]></content>
</entry>
<entry>
<title>【Skyrim】おっさんの大冒険9(ゆっくり実況)</title>
<link rel="alternate" type="text/html" href="http://www.nicovideo.jp/watch/sm17842779"/>
<id>tag:nicovideo.jp,2012-05-16:/watch/1337180095</id>
<published>2012-05-17T01:18:09+09:00</published>
<updated>2012-05-17T01:18:09+09:00</updated>
<content type="html"><![CDATA[<p class="nico-thumbnail"><img alt="【Skyrim】おっさんの大冒険9(ゆっくり実況)" src="http://tn-skr4.smilevideo.jp/smile?i=17842779" width="94" height="70" border="0"/></p><p class="nico-description">おっさんの戦うわけとは?前回:sm10659358 マイリス:mylist/15196568ブログ⇒http://teikee.blog128.fc2.com/</p><p class="nico-info"><small><strong class="nico-info-length">21:45</strong>|<strong class="nico-info-date">2012年05月16日 23:54:55</strong> 投稿</small></p>]]></content>
</entry>
</feed>
'''
describe "About MylistAtom class", ->
describe "when create an instance", ->
before (done) ->
@xml = new NicoScraper.Source.MylistAtom xml
done()
it "has a mylist property", ->
@xml.title.should.equal '【Oblivion】おっさんの大冒険'
@xml.subtitle.should.equal 'ふふ マイリスを開いてしまいましたか^^'
@xml.mylistId.should.equal 15196568
@xml.updated.should.equal '2012-05-17T22:28:07+09:00'
@xml.author.should.equal 'PI:NAME:<NAME>END_PI'
it "has movies property in this mylist", ->
@xml.entry['sm8481759'].title.should.equal '【Oblivion】おっさんの大冒険1(ゆっくり実況)'
@xml.entry['sm8481759'].videoId.should.equal 'sm8481759'
@xml.entry['sm8481759'].timelikeId.should.equal 1255238132
@xml.entry['sm8481759'].thumbnailUrl.should.equal 'http://tn-skr4.smilevideo.jp/smile?i=8481759'
@xml.entry['sm8481759'].description.should.equal 'おっさんの生き様をとくとごらんあれ!このミリオンをいつまでも大切にしたい。そう、いつまでも・・・。おっさんがここまでこれたのも、みなさまのおかげですm(__)b次回:sm8506034 マイリス:mylist/15196568【重要】この動画ではPC版Oblivionでしか表現できないシーンが含まれます。このゲームにご興味のある方はご注意ください。'
@xml.entry['sm8481759'].length.should.equal 748
@xml.entry['sm8481759'].infoDate.should.equal 1255238135
|
[
{
"context": " callback null, [] if not apps.length\n\t\tkeys = ('a:' + app + ':key' for app in apps)\n\t\tdb.redis.mget k",
"end": 952,
"score": 0.58436119556427,
"start": 952,
"tag": "KEY",
"value": ""
}
] | bower_components/oauthio/plugins/server.admin/server_admin.coffee | quentindelattre/nest.js | 0 | # OAuth daemon
# Copyright (C) 2013 Webshell SAS
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'use strict'
restify = require 'restify'
fs = require 'fs'
Url = require 'url'
{db} = shared = require '../shared'
db_getApps = (callback) ->
db.redis.smembers 'adm:apps', (err, apps) ->
return callback err if err
return callback null, [] if not apps.length
keys = ('a:' + app + ':key' for app in apps)
db.redis.mget keys, (err, appkeys) ->
return callback err if err
return callback null, appkeys
## Event: add app to user when created
shared.on 'app.create', (req, app) ->
db.redis.sadd 'adm:apps', app.id
## Event: remove app from user when deleted
shared.on 'app.remove', (req, app) ->
db.redis.srem 'adm:apps', app.id
exports.setup = (callback) ->
rmBasePath = (req, res, next) =>
if req.path().substr(0, @config.base.length) == @config.base
req._path = req._path.substr(@config.base.length)
next()
sendIndex = (req, res, next) =>
fs.readFile __dirname + '/app/index.html', 'utf8', (err, data) =>
res.setHeader 'Content-Type', 'text/html'
data = data.toString().replace /\{\{if admin\}\}([\s\S]*?)\{\{endif\}\}\n?/gm, if req.user then '$1' else ''
data = data.replace /\{\{jsconfig\}\}/g, "var oauthdconfig={host_url:\"#{@config.host_url}\",base:\"#{@config.base}\",base_api:\"#{@config.base_api}\"};"
data = data.replace /\{\{baseurl\}\}/g, "#{@config.base}"
res.end data
next()
@server.get @config.base + '/admin', @auth.optional, ((req, res, next) =>
if db.redis.last_error
res.setHeader 'Location', @config.host_url + @config.base + "/admin/error#err=" + encodeURIComponent(db.redis.last_error)
res.send 302
next false
next()
), sendIndex
@server.get new RegExp('^' + @config.base + '\/(lib|css|js|img|templates)\/.*'), rmBasePath, restify.serveStatic
directory: __dirname + '/app'
maxAge: 1
@server.get new RegExp('^' + @config.base + '\/admin\/(lib|css|js|img|templates)\/*'), rmBasePath, @auth.needed, restify.serveStatic
directory: __dirname + '/app'
maxAge: 1
# get my infos
@server.get @config.base_api + '/me', @auth.needed, (req, res, next) =>
db_getApps (e, appkeys) ->
return next(e) if e
res.send apps:appkeys
next()
@server.get new RegExp('^' + @config.base + '\/admin\/(.*)'), @auth.optional, ((req, res, next) =>
if req.params[0] == "logout"
res.setHeader 'Set-Cookie', 'accessToken=; Path=' + @config.base + '/admin; Expires=' + (new Date(0)).toUTCString()
delete req.user
if not req.user && req.params[0] != "error"
res.setHeader 'Location', @config.host_url + @config.base + "/admin"
res.send 302
next false
next()
), sendIndex
callback() | 179511 | # OAuth daemon
# Copyright (C) 2013 Webshell SAS
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'use strict'
restify = require 'restify'
fs = require 'fs'
Url = require 'url'
{db} = shared = require '../shared'
db_getApps = (callback) ->
db.redis.smembers 'adm:apps', (err, apps) ->
return callback err if err
return callback null, [] if not apps.length
keys = ('a<KEY>:' + app + ':key' for app in apps)
db.redis.mget keys, (err, appkeys) ->
return callback err if err
return callback null, appkeys
## Event: add app to user when created
shared.on 'app.create', (req, app) ->
db.redis.sadd 'adm:apps', app.id
## Event: remove app from user when deleted
shared.on 'app.remove', (req, app) ->
db.redis.srem 'adm:apps', app.id
exports.setup = (callback) ->
rmBasePath = (req, res, next) =>
if req.path().substr(0, @config.base.length) == @config.base
req._path = req._path.substr(@config.base.length)
next()
sendIndex = (req, res, next) =>
fs.readFile __dirname + '/app/index.html', 'utf8', (err, data) =>
res.setHeader 'Content-Type', 'text/html'
data = data.toString().replace /\{\{if admin\}\}([\s\S]*?)\{\{endif\}\}\n?/gm, if req.user then '$1' else ''
data = data.replace /\{\{jsconfig\}\}/g, "var oauthdconfig={host_url:\"#{@config.host_url}\",base:\"#{@config.base}\",base_api:\"#{@config.base_api}\"};"
data = data.replace /\{\{baseurl\}\}/g, "#{@config.base}"
res.end data
next()
@server.get @config.base + '/admin', @auth.optional, ((req, res, next) =>
if db.redis.last_error
res.setHeader 'Location', @config.host_url + @config.base + "/admin/error#err=" + encodeURIComponent(db.redis.last_error)
res.send 302
next false
next()
), sendIndex
@server.get new RegExp('^' + @config.base + '\/(lib|css|js|img|templates)\/.*'), rmBasePath, restify.serveStatic
directory: __dirname + '/app'
maxAge: 1
@server.get new RegExp('^' + @config.base + '\/admin\/(lib|css|js|img|templates)\/*'), rmBasePath, @auth.needed, restify.serveStatic
directory: __dirname + '/app'
maxAge: 1
# get my infos
@server.get @config.base_api + '/me', @auth.needed, (req, res, next) =>
db_getApps (e, appkeys) ->
return next(e) if e
res.send apps:appkeys
next()
@server.get new RegExp('^' + @config.base + '\/admin\/(.*)'), @auth.optional, ((req, res, next) =>
if req.params[0] == "logout"
res.setHeader 'Set-Cookie', 'accessToken=; Path=' + @config.base + '/admin; Expires=' + (new Date(0)).toUTCString()
delete req.user
if not req.user && req.params[0] != "error"
res.setHeader 'Location', @config.host_url + @config.base + "/admin"
res.send 302
next false
next()
), sendIndex
callback() | true | # OAuth daemon
# Copyright (C) 2013 Webshell SAS
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'use strict'
restify = require 'restify'
fs = require 'fs'
Url = require 'url'
{db} = shared = require '../shared'
db_getApps = (callback) ->
db.redis.smembers 'adm:apps', (err, apps) ->
return callback err if err
return callback null, [] if not apps.length
keys = ('aPI:KEY:<KEY>END_PI:' + app + ':key' for app in apps)
db.redis.mget keys, (err, appkeys) ->
return callback err if err
return callback null, appkeys
## Event: add app to user when created
shared.on 'app.create', (req, app) ->
db.redis.sadd 'adm:apps', app.id
## Event: remove app from user when deleted
shared.on 'app.remove', (req, app) ->
db.redis.srem 'adm:apps', app.id
exports.setup = (callback) ->
rmBasePath = (req, res, next) =>
if req.path().substr(0, @config.base.length) == @config.base
req._path = req._path.substr(@config.base.length)
next()
sendIndex = (req, res, next) =>
fs.readFile __dirname + '/app/index.html', 'utf8', (err, data) =>
res.setHeader 'Content-Type', 'text/html'
data = data.toString().replace /\{\{if admin\}\}([\s\S]*?)\{\{endif\}\}\n?/gm, if req.user then '$1' else ''
data = data.replace /\{\{jsconfig\}\}/g, "var oauthdconfig={host_url:\"#{@config.host_url}\",base:\"#{@config.base}\",base_api:\"#{@config.base_api}\"};"
data = data.replace /\{\{baseurl\}\}/g, "#{@config.base}"
res.end data
next()
@server.get @config.base + '/admin', @auth.optional, ((req, res, next) =>
if db.redis.last_error
res.setHeader 'Location', @config.host_url + @config.base + "/admin/error#err=" + encodeURIComponent(db.redis.last_error)
res.send 302
next false
next()
), sendIndex
@server.get new RegExp('^' + @config.base + '\/(lib|css|js|img|templates)\/.*'), rmBasePath, restify.serveStatic
directory: __dirname + '/app'
maxAge: 1
@server.get new RegExp('^' + @config.base + '\/admin\/(lib|css|js|img|templates)\/*'), rmBasePath, @auth.needed, restify.serveStatic
directory: __dirname + '/app'
maxAge: 1
# get my infos
@server.get @config.base_api + '/me', @auth.needed, (req, res, next) =>
db_getApps (e, appkeys) ->
return next(e) if e
res.send apps:appkeys
next()
@server.get new RegExp('^' + @config.base + '\/admin\/(.*)'), @auth.optional, ((req, res, next) =>
if req.params[0] == "logout"
res.setHeader 'Set-Cookie', 'accessToken=; Path=' + @config.base + '/admin; Expires=' + (new Date(0)).toUTCString()
delete req.user
if not req.user && req.params[0] != "error"
res.setHeader 'Location', @config.host_url + @config.base + "/admin"
res.send 302
next false
next()
), sendIndex
callback() |
[
{
"context": "e based on a Left Leaning Red-Black Tree\n @author Mads Hartmann Jensen (mads379@gmail.com)\n###\n\nmugs.provide(\"mugs.LLRBM",
"end": 144,
"score": 0.9998772740364075,
"start": 124,
"tag": "NAME",
"value": "Mads Hartmann Jensen"
},
{
"context": "ng Red-Black Tree\n @... | src/LLRBMap.coffee | mads-hartmann/mugs | 1 | ###*
@fileoverview Contains the implementation of the Map data structure based on a Left Leaning Red-Black Tree
@author Mads Hartmann Jensen (mads379@gmail.com)
###
mugs.provide("mugs.LLRBMap")
mugs.require("mugs.LLRBNode")
mugs.require("mugs.LLRBLeaf")
###*
mugs.LLRBMap provides the implementation of the abstract data type 'Map' based on a Left Leaning Red Black Tree. The
map contains the following operations
<pre>
insert(key,value) O(log n)
get(index) O(log n)
remove(index) O(log n)
containsKey(key) O(log n)
keys() O(n)
values() O(n)
isEmpty() O(1)
forEach(f) O(n*O(f))
</pre>
@public
@augments mugs.Collection
@class mugs.LLRBMap provides the implementation of the abstract data type 'Map' based on a Red Black Tree.
@example
var map = new mugs.LLRBMap([
{key: 1, value: "one"},
{key: 4, value: "four"},
{key: 3, value: "three"},
{key: 2, value: "two"}
]);
@param {Array} keyValuePairs An array containing objects with the properties key & value.
@param {Function=} comparator A comparator function that can compare the keys (optional). Will use a
default comparator if no comparator is given. The default one uses the
< and > operators.
###
mugs.LLRBMap = (keyValuePairs, comparator) ->
treeUnderConstruction = new mugs.LLRBLeaf(comparator)
if keyValuePairs instanceof Array and keyValuePairs.length > 0
for kv in keyValuePairs
treeUnderConstruction = treeUnderConstruction.insert(kv.key, kv.value)
this.tree = treeUnderConstruction
this
mugs.LLRBMap.prototype = new mugs.Collection()
###
---------------------------------------------------------------------------------------------
Methods related to the MAP ADT
---------------------------------------------------------------------------------------------
###
###*
Returns a new mugs.LLRBMap containing the given (key,value) pair.
@param {*} key The key to store the value by
@param {*} value The value to store in the map
@return {mugs.LLRBMap} A new mugs.LLRBMap that also contains the new key-value pair
###
mugs.LLRBMap.prototype.insert = (key, value) ->
this.buildFromTree(this.tree.insert(key,value))
###*
If a (key,value) pair exists return mugs.Some(value), otherwise mugs.None()
@param {*} key The key of the value you want to read.
@return {mugs.Some|mugs.None} mugs.Some(value) if it exists in the map. Otherwise mugs.None
###
mugs.LLRBMap.prototype.get = (key) ->
this.tree.get(key)
###*
Returns a new mugs.LLRBMap without the given key-value pair.
@param {*} key The key of the value you want to remove
@return {mugs.LLRBMap} A new mugs.LLRBMap that doesn't contain the key-value pair
###
mugs.LLRBMap.prototype.remove = (key) ->
this.buildFromTree(this.tree.remove(key))
###*
Returns a sorted list containing all of the keys in the mugs.LLRBMap
@return {List} A sorted list containing all of the keys in the mugs.LLRBMap
###
mugs.LLRBMap.prototype.keys = () ->
this.tree.keys()
###*
True if the given key is contained in the LLRBMap, otherwise false.
@param key The key to search for
@return True if the given key is contained in the LLRBMap, otherwise false.
###
mugs.LLRBMap.prototype.containsKey = (key) ->
this.tree.containsKey(key)
###*
Returns a sorted list containing all of the values in the mugs.LLRBMap
@return {List} sorted list containing all of the values in the mugs.LLRBMap
###
mugs.LLRBMap.prototype.values = () ->
this.tree.values()
###*
Return true if the collection is empty, otherwise false
@return True if the collection is empty, otherwise false
###
mugs.LLRBMap.prototype.isEmpty = () ->
this.tree.isEmpty()
###*
Used to construct a mugs.LLRBMap from mugs.RedBlackTree. This is intended
for internal use only. Would've marked it private if I could.
@private
###
mugs.LLRBMap.prototype.buildFromTree = (tree) ->
map = new mugs.LLRBMap(this.comparator)
map.tree = tree
map
###
---------------------------------------------------------------------------------------------
Methods related to Collection prototype
---------------------------------------------------------------------------------------------
###
###*
@private
###
mugs.LLRBMap.prototype.buildFromArray = (arr) ->
new mugs.LLRBMap(arr, this.comparator)
###*
Applies function 'f' on each value in the collection. This return nothing and is only invoked
for the side-effects of f.
@param f The unary function to apply on each element in the collection.
@see mugs.Collection
###
mugs.LLRBMap.prototype.forEach = ( f ) ->
this.tree.inorderTraversal( f )
| 217246 | ###*
@fileoverview Contains the implementation of the Map data structure based on a Left Leaning Red-Black Tree
@author <NAME> (<EMAIL>)
###
mugs.provide("mugs.LLRBMap")
mugs.require("mugs.LLRBNode")
mugs.require("mugs.LLRBLeaf")
###*
mugs.LLRBMap provides the implementation of the abstract data type 'Map' based on a Left Leaning Red Black Tree. The
map contains the following operations
<pre>
insert(key,value) O(log n)
get(index) O(log n)
remove(index) O(log n)
containsKey(key) O(log n)
keys() O(n)
values() O(n)
isEmpty() O(1)
forEach(f) O(n*O(f))
</pre>
@public
@augments mugs.Collection
@class mugs.LLRBMap provides the implementation of the abstract data type 'Map' based on a Red Black Tree.
@example
var map = new mugs.LLRBMap([
{key: 1, value: "one"},
{key: 4, value: "four"},
{key: 3, value: "three"},
{key: 2, value: "two"}
]);
@param {Array} keyValuePairs An array containing objects with the properties key & value.
@param {Function=} comparator A comparator function that can compare the keys (optional). Will use a
default comparator if no comparator is given. The default one uses the
< and > operators.
###
mugs.LLRBMap = (keyValuePairs, comparator) ->
treeUnderConstruction = new mugs.LLRBLeaf(comparator)
if keyValuePairs instanceof Array and keyValuePairs.length > 0
for kv in keyValuePairs
treeUnderConstruction = treeUnderConstruction.insert(kv.key, kv.value)
this.tree = treeUnderConstruction
this
mugs.LLRBMap.prototype = new mugs.Collection()
###
---------------------------------------------------------------------------------------------
Methods related to the MAP ADT
---------------------------------------------------------------------------------------------
###
###*
Returns a new mugs.LLRBMap containing the given (key,value) pair.
@param {*} key The key to store the value by
@param {*} value The value to store in the map
@return {mugs.LLRBMap} A new mugs.LLRBMap that also contains the new key-value pair
###
mugs.LLRBMap.prototype.insert = (key, value) ->
this.buildFromTree(this.tree.insert(key,value))
###*
If a (key,value) pair exists return mugs.Some(value), otherwise mugs.None()
@param {*} key The key of the value you want to read.
@return {mugs.Some|mugs.None} mugs.Some(value) if it exists in the map. Otherwise mugs.None
###
mugs.LLRBMap.prototype.get = (key) ->
this.tree.get(key)
###*
Returns a new mugs.LLRBMap without the given key-value pair.
@param {*} key The key of the value you want to remove
@return {mugs.LLRBMap} A new mugs.LLRBMap that doesn't contain the key-value pair
###
mugs.LLRBMap.prototype.remove = (key) ->
this.buildFromTree(this.tree.remove(key))
###*
Returns a sorted list containing all of the keys in the mugs.LLRBMap
@return {List} A sorted list containing all of the keys in the mugs.LLRBMap
###
mugs.LLRBMap.prototype.keys = () ->
this.tree.keys()
###*
True if the given key is contained in the LLRBMap, otherwise false.
@param key The key to search for
@return True if the given key is contained in the LLRBMap, otherwise false.
###
mugs.LLRBMap.prototype.containsKey = (key) ->
this.tree.containsKey(key)
###*
Returns a sorted list containing all of the values in the mugs.LLRBMap
@return {List} sorted list containing all of the values in the mugs.LLRBMap
###
mugs.LLRBMap.prototype.values = () ->
this.tree.values()
###*
Return true if the collection is empty, otherwise false
@return True if the collection is empty, otherwise false
###
mugs.LLRBMap.prototype.isEmpty = () ->
this.tree.isEmpty()
###*
Used to construct a mugs.LLRBMap from mugs.RedBlackTree. This is intended
for internal use only. Would've marked it private if I could.
@private
###
mugs.LLRBMap.prototype.buildFromTree = (tree) ->
map = new mugs.LLRBMap(this.comparator)
map.tree = tree
map
###
---------------------------------------------------------------------------------------------
Methods related to Collection prototype
---------------------------------------------------------------------------------------------
###
###*
@private
###
mugs.LLRBMap.prototype.buildFromArray = (arr) ->
new mugs.LLRBMap(arr, this.comparator)
###*
Applies function 'f' on each value in the collection. This return nothing and is only invoked
for the side-effects of f.
@param f The unary function to apply on each element in the collection.
@see mugs.Collection
###
mugs.LLRBMap.prototype.forEach = ( f ) ->
this.tree.inorderTraversal( f )
| true | ###*
@fileoverview Contains the implementation of the Map data structure based on a Left Leaning Red-Black Tree
@author PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
###
mugs.provide("mugs.LLRBMap")
mugs.require("mugs.LLRBNode")
mugs.require("mugs.LLRBLeaf")
###*
mugs.LLRBMap provides the implementation of the abstract data type 'Map' based on a Left Leaning Red Black Tree. The
map contains the following operations
<pre>
insert(key,value) O(log n)
get(index) O(log n)
remove(index) O(log n)
containsKey(key) O(log n)
keys() O(n)
values() O(n)
isEmpty() O(1)
forEach(f) O(n*O(f))
</pre>
@public
@augments mugs.Collection
@class mugs.LLRBMap provides the implementation of the abstract data type 'Map' based on a Red Black Tree.
@example
var map = new mugs.LLRBMap([
{key: 1, value: "one"},
{key: 4, value: "four"},
{key: 3, value: "three"},
{key: 2, value: "two"}
]);
@param {Array} keyValuePairs An array containing objects with the properties key & value.
@param {Function=} comparator A comparator function that can compare the keys (optional). Will use a
default comparator if no comparator is given. The default one uses the
< and > operators.
###
mugs.LLRBMap = (keyValuePairs, comparator) ->
treeUnderConstruction = new mugs.LLRBLeaf(comparator)
if keyValuePairs instanceof Array and keyValuePairs.length > 0
for kv in keyValuePairs
treeUnderConstruction = treeUnderConstruction.insert(kv.key, kv.value)
this.tree = treeUnderConstruction
this
mugs.LLRBMap.prototype = new mugs.Collection()
###
---------------------------------------------------------------------------------------------
Methods related to the MAP ADT
---------------------------------------------------------------------------------------------
###
###*
Returns a new mugs.LLRBMap containing the given (key,value) pair.
@param {*} key The key to store the value by
@param {*} value The value to store in the map
@return {mugs.LLRBMap} A new mugs.LLRBMap that also contains the new key-value pair
###
mugs.LLRBMap.prototype.insert = (key, value) ->
this.buildFromTree(this.tree.insert(key,value))
###*
If a (key,value) pair exists return mugs.Some(value), otherwise mugs.None()
@param {*} key The key of the value you want to read.
@return {mugs.Some|mugs.None} mugs.Some(value) if it exists in the map. Otherwise mugs.None
###
mugs.LLRBMap.prototype.get = (key) ->
this.tree.get(key)
###*
Returns a new mugs.LLRBMap without the given key-value pair.
@param {*} key The key of the value you want to remove
@return {mugs.LLRBMap} A new mugs.LLRBMap that doesn't contain the key-value pair
###
mugs.LLRBMap.prototype.remove = (key) ->
this.buildFromTree(this.tree.remove(key))
###*
Returns a sorted list containing all of the keys in the mugs.LLRBMap
@return {List} A sorted list containing all of the keys in the mugs.LLRBMap
###
mugs.LLRBMap.prototype.keys = () ->
this.tree.keys()
###*
True if the given key is contained in the LLRBMap, otherwise false.
@param key The key to search for
@return True if the given key is contained in the LLRBMap, otherwise false.
###
mugs.LLRBMap.prototype.containsKey = (key) ->
this.tree.containsKey(key)
###*
Returns a sorted list containing all of the values in the mugs.LLRBMap
@return {List} sorted list containing all of the values in the mugs.LLRBMap
###
mugs.LLRBMap.prototype.values = () ->
this.tree.values()
###*
Return true if the collection is empty, otherwise false
@return True if the collection is empty, otherwise false
###
mugs.LLRBMap.prototype.isEmpty = () ->
this.tree.isEmpty()
###*
Used to construct a mugs.LLRBMap from mugs.RedBlackTree. This is intended
for internal use only. Would've marked it private if I could.
@private
###
mugs.LLRBMap.prototype.buildFromTree = (tree) ->
map = new mugs.LLRBMap(this.comparator)
map.tree = tree
map
###
---------------------------------------------------------------------------------------------
Methods related to Collection prototype
---------------------------------------------------------------------------------------------
###
###*
@private
###
mugs.LLRBMap.prototype.buildFromArray = (arr) ->
new mugs.LLRBMap(arr, this.comparator)
###*
Applies function 'f' on each value in the collection. This return nothing and is only invoked
for the side-effects of f.
@param f The unary function to apply on each element in the collection.
@see mugs.Collection
###
mugs.LLRBMap.prototype.forEach = ( f ) ->
this.tree.inorderTraversal( f )
|
[
{
"context": "#Language: Turkish\n#Translators: serkandurusoy\n\ntr =\n\n add: \"ekle\"\n and: \"ve\"\n back: \"geri\"\n ",
"end": 46,
"score": 0.9947993159294128,
"start": 33,
"tag": "USERNAME",
"value": "serkandurusoy"
},
{
"context": "le\"\n and: \"ve\"\n back: \"geri\"\n ch... | t9n/tr.coffee | XavierSamuelHuppe/xav-accounts-t9n | 0 | #Language: Turkish
#Translators: serkandurusoy
tr =
add: "ekle"
and: "ve"
back: "geri"
changePassword: "Şifre Değiştir"
choosePassword: "Şifre Belirle"
clickAgree: "Kayıta tıklayarak kabul etmiş olacağınız"
configure: "Yapılandır"
createAccount: "Hesap Oluştur"
currentPassword: "Mevcut Şifre"
dontHaveAnAccount: "Hesabın yok mu?"
email: "Eposta"
emailAddress: "Eposta Adresi"
emailResetLink: "Email Reset Link"
forgotPassword: "Şifreni mi unuttun?"
ifYouAlreadyHaveAnAccount: "Zaten bir hesabın varsa"
newPassword: "Yeni Şifre"
newPasswordAgain: "Yeni Şifre (tekrar)"
optional: "İsteğe Bağlı"
OR: "VEYA"
password: "Şifre"
passwordAgain: "Şifre (tekrar)"
privacyPolicy: "Gizlilik Politikası"
remove: "kaldır"
resetYourPassword: "Şifreni sıfırla"
setPassword: "Şifre Belirle"
sign: "Giriş"
signIn: "Giriş"
signin: "Giriş"
signOut: "Çıkış"
signUp: "Kayıt"
signupCode: "Kayıt Kodu"
signUpWithYourEmailAddress: "Eposta adresin ile kaydol"
terms: "Kullanım Şartları"
updateYourPassword: "Şifreni güncelle"
username: "Kullanıcı adı"
usernameOrEmail: "Kullanıcı adı veya şifre"
with: "için"
info:
emailSent: "Eposta iletildi"
emailVerified: "Eposta doğrulandı"
passwordChanged: "Şifre değişti"
passwordReset: "Şifre sıfırlandı"
error:
emailRequired: "Eposta gerekli."
minChar: "En az 7 karakterli şifre."
pwdsDontMatch: "Şifreler uyuşmuyor"
pwOneDigit: "Şifre en az bir rakam içermeli."
pwOneLetter: "Şifre bir harf gerektiriyor."
signInRequired: "Bunun için önce giriş yapmış olmalısın."
signupCodeIncorrect: "Kayıt kodu hatalı."
signupCodeRequired: "Kayıt kodu gerekli."
usernameIsEmail: "Kullanıcı adı bir eposta adresi olamaz."
usernameRequired: "Kullanıcı adı gerekli."
accounts:
#---- accounts-base
#"@" + domain + " eposta adresi gerekli"
#"Bir giriş işleyicisi ya bir sonuç ya da undefined döndürmelidir"
"Email already exists.": "Eposta zaten kayıtlı."
"Email doesn't match the criteria.": "Eposta kriterleri karşılamıyor."
"Invalid login token": "Geçersiz giriş işaretçisi"
"Login forbidden": "Girişe izin verilmiyor"
#"Servis " + options.service + " zaten yapılandırılmış"
"Service unknown": "Servis tanınmıyor"
"Unrecognized options for login request": "Giriş isteği için tanınmayan seçenekler"
"User validation failed": "Kullanıcı doğrulama başarısız"
"Username already exists.": "Kullanıcı adı zaten kayıtlı."
"You are not logged in.": "Kullanıcı girişi yapmadın."
"You've been logged out by the server. Please log in again.": "Sunucu tarafından çıkarıldın. Lütfen tekrar kullanıcı girişi yap."
"Your session has expired. Please log in again.": "Oturumun zaman aşımına uğradı. Lütfen tekrar kullanıcı girişi yap."
#---- accounts-oauth
"No matching login attempt found": "Eşleşen bir giriş teşebbüsü bulunamadı"
#---- accounts-password-client
"Password is old. Please reset your password.": "Şifre eski. Lütfen şifreni sıfırla."
#---- accounts-password
"Incorrect password": "Hatalı şifre"
"Invalid email": "Hatalı eposta"
"Must be logged in": "Giriş yapmış olmalısın"
"Need to set a username or email": "Kullanıcı adı veya eposta tanımlamalısın"
"old password format": "eski şifre biçimi"
"Password may not be empty": "Şifre boş bırakılamaz"
"Signups forbidden": "Kayıt yapmaya izin verilmiyor"
"Token expired": "İşaretçinin süresi geçti"
"Token has invalid email address": "İşaretçide geçersiz eposta adresi var"
"User has no password set": "Kullanıcının şifresi tanımlanmamış"
"User not found": "Kullanıcı bulunamadı"
"Verify email link expired": "Eposta doğrulama bağlantısı zaman aşımına uğradı"
"Verify email link is for unknown address": "Eposta doğrulama bağlantısı bilinmeyen bir adres içeriyor"
#---- match
"Match failed": "Eşleşme başarısız"
#---- Misc...
"Unknown error": "Bilinmeyen hata"
T9n.map "tr", tr
| 118490 | #Language: Turkish
#Translators: serkandurusoy
tr =
add: "ekle"
and: "ve"
back: "geri"
changePassword: "<PASSWORD>"
choosePassword: "<PASSWORD>"
clickAgree: "Kayıta tıklayarak kabul etmiş olacağınız"
configure: "Yapılandır"
createAccount: "Hesap Oluştur"
currentPassword: "<PASSWORD>"
dontHaveAnAccount: "Hesabın yok mu?"
email: "Eposta"
emailAddress: "Eposta Adresi"
emailResetLink: "Email Reset Link"
forgotPassword: "<PASSWORD>?"
ifYouAlreadyHaveAnAccount: "Zaten bir hesabın varsa"
newPassword: "<PASSWORD>"
newPasswordAgain: "<PASSWORD> (tekrar)"
optional: "İsteğe Bağlı"
OR: "VEYA"
password: "<PASSWORD>"
passwordAgain: "<PASSWORD> (te<PASSWORD>ar)"
privacyPolicy: "Gizlilik Politikası"
remove: "kaldır"
resetYourPassword: "<PASSWORD>"
setPassword: "<PASSWORD>"
sign: "Giriş"
signIn: "Giriş"
signin: "Giriş"
signOut: "Çıkış"
signUp: "Kayıt"
signupCode: "Kayıt Kodu"
signUpWithYourEmailAddress: "Eposta adresin ile kaydol"
terms: "Kullanım Şartları"
updateYourPassword: "<PASSWORD>"
username: "Kullanıcı adı"
usernameOrEmail: "Kullanıcı adı veya şifre"
with: "için"
info:
emailSent: "Eposta iletildi"
emailVerified: "Eposta doğrulandı"
passwordChanged: "<PASSWORD>"
passwordReset: "<PASSWORD>fırlandı"
error:
emailRequired: "Eposta gerekli."
minChar: "En az 7 karakterli şifre."
pwdsDontMatch: "Şifreler uyuşmuyor"
pwOneDigit: "Şifre en az bir rakam içermeli."
pwOneLetter: "Şifre bir harf gerektiriyor."
signInRequired: "Bunun için önce giriş yapmış olmalısın."
signupCodeIncorrect: "Kayıt kodu hatalı."
signupCodeRequired: "Kayıt kodu gerekli."
usernameIsEmail: "Kullanıcı adı bir eposta adresi olamaz."
usernameRequired: "Kullanıcı adı gerekli."
accounts:
#---- accounts-base
#"@" + domain + " eposta adresi gerekli"
#"Bir giriş işleyicisi ya bir sonuç ya da undefined döndürmelidir"
"Email already exists.": "Eposta zaten kayıtlı."
"Email doesn't match the criteria.": "Eposta kriterleri karşılamıyor."
"Invalid login token": "Geçersiz giriş işaretçisi"
"Login forbidden": "Girişe izin verilmiyor"
#"Servis " + options.service + " zaten yapılandırılmış"
"Service unknown": "Servis tanınmıyor"
"Unrecognized options for login request": "Giriş isteği için tanınmayan seçenekler"
"User validation failed": "Kullanıcı doğrulama başarısız"
"Username already exists.": "K<NAME> adı zaten kayıtlı."
"You are not logged in.": "Kullanıcı girişi yapmadın."
"You've been logged out by the server. Please log in again.": "Sunucu tarafından çıkarıldın. Lütfen tekrar kullanıcı girişi yap."
"Your session has expired. Please log in again.": "Oturumun zaman aşımına uğradı. Lütfen tekrar kullanıcı girişi yap."
#---- accounts-oauth
"No matching login attempt found": "Eşleşen bir giriş teşebbüsü bulunamadı"
#---- accounts-password-client
"Password is old. Please reset your password.": "Şifre eski. Lütfen şifreni sıfırla."
#---- accounts-password
"Incorrect password": "<PASSWORD>"
"Invalid email": "Hatalı eposta"
"Must be logged in": "Giriş yapmış olmalısın"
"Need to set a username or email": "Kullanıcı adı veya eposta tanımlamalısın"
"old password format": "es<PASSWORD> şifre biçimi"
"Password may not be empty": "Ş<PASSWORD>"
"Signups forbidden": "Kayıt yapmaya izin verilmiyor"
"Token expired": "İşaretçinin süresi geçti"
"Token has invalid email address": "İşaretçide geçersiz eposta adresi var"
"User has no password set": "Kullanıcının şifresi tanımlanmamış"
"User not found": "Kullanıcı bulunamadı"
"Verify email link expired": "Eposta doğrulama bağlantısı zaman aşımına uğradı"
"Verify email link is for unknown address": "Eposta doğrulama bağlantısı bilinmeyen bir adres içeriyor"
#---- match
"Match failed": "Eşleşme başarısız"
#---- Misc...
"Unknown error": "Bilinmeyen hata"
T9n.map "tr", tr
| true | #Language: Turkish
#Translators: serkandurusoy
tr =
add: "ekle"
and: "ve"
back: "geri"
changePassword: "PI:PASSWORD:<PASSWORD>END_PI"
choosePassword: "PI:PASSWORD:<PASSWORD>END_PI"
clickAgree: "Kayıta tıklayarak kabul etmiş olacağınız"
configure: "Yapılandır"
createAccount: "Hesap Oluştur"
currentPassword: "PI:PASSWORD:<PASSWORD>END_PI"
dontHaveAnAccount: "Hesabın yok mu?"
email: "Eposta"
emailAddress: "Eposta Adresi"
emailResetLink: "Email Reset Link"
forgotPassword: "PI:PASSWORD:<PASSWORD>END_PI?"
ifYouAlreadyHaveAnAccount: "Zaten bir hesabın varsa"
newPassword: "PI:PASSWORD:<PASSWORD>END_PI"
newPasswordAgain: "PI:PASSWORD:<PASSWORD>END_PI (tekrar)"
optional: "İsteğe Bağlı"
OR: "VEYA"
password: "PI:PASSWORD:<PASSWORD>END_PI"
passwordAgain: "PI:PASSWORD:<PASSWORD>END_PI (tePI:PASSWORD:<PASSWORD>END_PIar)"
privacyPolicy: "Gizlilik Politikası"
remove: "kaldır"
resetYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
setPassword: "PI:PASSWORD:<PASSWORD>END_PI"
sign: "Giriş"
signIn: "Giriş"
signin: "Giriş"
signOut: "Çıkış"
signUp: "Kayıt"
signupCode: "Kayıt Kodu"
signUpWithYourEmailAddress: "Eposta adresin ile kaydol"
terms: "Kullanım Şartları"
updateYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
username: "Kullanıcı adı"
usernameOrEmail: "Kullanıcı adı veya şifre"
with: "için"
info:
emailSent: "Eposta iletildi"
emailVerified: "Eposta doğrulandı"
passwordChanged: "PI:PASSWORD:<PASSWORD>END_PI"
passwordReset: "PI:PASSWORD:<PASSWORD>END_PIfırlandı"
error:
emailRequired: "Eposta gerekli."
minChar: "En az 7 karakterli şifre."
pwdsDontMatch: "Şifreler uyuşmuyor"
pwOneDigit: "Şifre en az bir rakam içermeli."
pwOneLetter: "Şifre bir harf gerektiriyor."
signInRequired: "Bunun için önce giriş yapmış olmalısın."
signupCodeIncorrect: "Kayıt kodu hatalı."
signupCodeRequired: "Kayıt kodu gerekli."
usernameIsEmail: "Kullanıcı adı bir eposta adresi olamaz."
usernameRequired: "Kullanıcı adı gerekli."
accounts:
#---- accounts-base
#"@" + domain + " eposta adresi gerekli"
#"Bir giriş işleyicisi ya bir sonuç ya da undefined döndürmelidir"
"Email already exists.": "Eposta zaten kayıtlı."
"Email doesn't match the criteria.": "Eposta kriterleri karşılamıyor."
"Invalid login token": "Geçersiz giriş işaretçisi"
"Login forbidden": "Girişe izin verilmiyor"
#"Servis " + options.service + " zaten yapılandırılmış"
"Service unknown": "Servis tanınmıyor"
"Unrecognized options for login request": "Giriş isteği için tanınmayan seçenekler"
"User validation failed": "Kullanıcı doğrulama başarısız"
"Username already exists.": "KPI:NAME:<NAME>END_PI adı zaten kayıtlı."
"You are not logged in.": "Kullanıcı girişi yapmadın."
"You've been logged out by the server. Please log in again.": "Sunucu tarafından çıkarıldın. Lütfen tekrar kullanıcı girişi yap."
"Your session has expired. Please log in again.": "Oturumun zaman aşımına uğradı. Lütfen tekrar kullanıcı girişi yap."
#---- accounts-oauth
"No matching login attempt found": "Eşleşen bir giriş teşebbüsü bulunamadı"
#---- accounts-password-client
"Password is old. Please reset your password.": "Şifre eski. Lütfen şifreni sıfırla."
#---- accounts-password
"Incorrect password": "PI:PASSWORD:<PASSWORD>END_PI"
"Invalid email": "Hatalı eposta"
"Must be logged in": "Giriş yapmış olmalısın"
"Need to set a username or email": "Kullanıcı adı veya eposta tanımlamalısın"
"old password format": "esPI:PASSWORD:<PASSWORD>END_PI şifre biçimi"
"Password may not be empty": "ŞPI:PASSWORD:<PASSWORD>END_PI"
"Signups forbidden": "Kayıt yapmaya izin verilmiyor"
"Token expired": "İşaretçinin süresi geçti"
"Token has invalid email address": "İşaretçide geçersiz eposta adresi var"
"User has no password set": "Kullanıcının şifresi tanımlanmamış"
"User not found": "Kullanıcı bulunamadı"
"Verify email link expired": "Eposta doğrulama bağlantısı zaman aşımına uğradı"
"Verify email link is for unknown address": "Eposta doğrulama bağlantısı bilinmeyen bir adres içeriyor"
#---- match
"Match failed": "Eşleşme başarısız"
#---- Misc...
"Unknown error": "Bilinmeyen hata"
T9n.map "tr", tr
|
[
{
"context": "entations/instructions.\n# Version: 0.0.1\n# Author: Vadim Goncharov (@owldesign)\n\n#\n# UNDERSCORE USAGE\n# <% %> - to ",
"end": 144,
"score": 0.9998655319213867,
"start": 129,
"tag": "NAME",
"value": "Vadim Goncharov"
},
{
"context": "ctions.\n# Version: 0.0.1\n# Au... | dev/coffee/application.coffee | roundhouse/Web-Documentation | 1 | # Title: Web Documentation
# Description: HTML Framework for writing web documentations/instructions.
# Version: 0.0.1
# Author: Vadim Goncharov (@owldesign)
#
# UNDERSCORE USAGE
# <% %> - to execute some code
# <%= %> - to print some value in template
# <%- %> - to print some values with HTML escaped
#
class App
init: =>
styles = ["display: block","background: #f7cd81","color: white","padding: 20px 20px 20px 20px","text-align: center","font-weight: normal","font-size: 20px","line-height: 60px"].join(';')
console.log '%c Web Documentation!', styles, 'Has loaded.'
# FastClick for mobile
FastClick.attach(document.body)
# Navigation Trigger
$('#menu-trigger').on 'click', (e) ->
e.preventDefault()
$(@).toggleClass 'active'
$('body').toggleClass 'active'
$('#content').toggleClass 'active'
$('#sidebar').toggleClass 'active'
# Setup Underscore Templates
_.templateSettings.variable = "docs";
pageTemplate = _.template($('#page-template').html())
configTemplate = _.template($('#config-template').html())
footerTemplate = _.template($('#footer-template').html())
# Load Content & Config
contentData = YAML.load '/content.yml'
configData = YAML.load '/config.yml'
$('#page-wrapper').html(pageTemplate(contentData)) # Load Content
$('#header').html(configTemplate(configData)) # Load Page Info
$('#footer').html(footerTemplate(configData)) # Load Footer Info
# Navigation Accordion
$('.subnav').accordion # Navigation Accordion
speed: 'fast'
# Scroll To Section
$('.anchor').on 'click', (e) -> # Navigate to section id
e.preventDefault()
target = $(@).attr 'href'
$('html, body').animate { scrollTop: $(target).offset().top - 25 }, 500
# Fixed navigation
nav = $('#sidebar nav').offset()
console.log nav
$(window).scroll ->
if $(window).scrollTop() > nav.top + 35
$('#sidebar nav').addClass 'fixed'
else
$('#sidebar nav').removeClass 'fixed'
$ ->
Application = new App()
Application.init() | 25598 | # Title: Web Documentation
# Description: HTML Framework for writing web documentations/instructions.
# Version: 0.0.1
# Author: <NAME> (@owldesign)
#
# UNDERSCORE USAGE
# <% %> - to execute some code
# <%= %> - to print some value in template
# <%- %> - to print some values with HTML escaped
#
class App
init: =>
styles = ["display: block","background: #f7cd81","color: white","padding: 20px 20px 20px 20px","text-align: center","font-weight: normal","font-size: 20px","line-height: 60px"].join(';')
console.log '%c Web Documentation!', styles, 'Has loaded.'
# FastClick for mobile
FastClick.attach(document.body)
# Navigation Trigger
$('#menu-trigger').on 'click', (e) ->
e.preventDefault()
$(@).toggleClass 'active'
$('body').toggleClass 'active'
$('#content').toggleClass 'active'
$('#sidebar').toggleClass 'active'
# Setup Underscore Templates
_.templateSettings.variable = "docs";
pageTemplate = _.template($('#page-template').html())
configTemplate = _.template($('#config-template').html())
footerTemplate = _.template($('#footer-template').html())
# Load Content & Config
contentData = YAML.load '/content.yml'
configData = YAML.load '/config.yml'
$('#page-wrapper').html(pageTemplate(contentData)) # Load Content
$('#header').html(configTemplate(configData)) # Load Page Info
$('#footer').html(footerTemplate(configData)) # Load Footer Info
# Navigation Accordion
$('.subnav').accordion # Navigation Accordion
speed: 'fast'
# Scroll To Section
$('.anchor').on 'click', (e) -> # Navigate to section id
e.preventDefault()
target = $(@).attr 'href'
$('html, body').animate { scrollTop: $(target).offset().top - 25 }, 500
# Fixed navigation
nav = $('#sidebar nav').offset()
console.log nav
$(window).scroll ->
if $(window).scrollTop() > nav.top + 35
$('#sidebar nav').addClass 'fixed'
else
$('#sidebar nav').removeClass 'fixed'
$ ->
Application = new App()
Application.init() | true | # Title: Web Documentation
# Description: HTML Framework for writing web documentations/instructions.
# Version: 0.0.1
# Author: PI:NAME:<NAME>END_PI (@owldesign)
#
# UNDERSCORE USAGE
# <% %> - to execute some code
# <%= %> - to print some value in template
# <%- %> - to print some values with HTML escaped
#
class App
init: =>
styles = ["display: block","background: #f7cd81","color: white","padding: 20px 20px 20px 20px","text-align: center","font-weight: normal","font-size: 20px","line-height: 60px"].join(';')
console.log '%c Web Documentation!', styles, 'Has loaded.'
# FastClick for mobile
FastClick.attach(document.body)
# Navigation Trigger
$('#menu-trigger').on 'click', (e) ->
e.preventDefault()
$(@).toggleClass 'active'
$('body').toggleClass 'active'
$('#content').toggleClass 'active'
$('#sidebar').toggleClass 'active'
# Setup Underscore Templates
_.templateSettings.variable = "docs";
pageTemplate = _.template($('#page-template').html())
configTemplate = _.template($('#config-template').html())
footerTemplate = _.template($('#footer-template').html())
# Load Content & Config
contentData = YAML.load '/content.yml'
configData = YAML.load '/config.yml'
$('#page-wrapper').html(pageTemplate(contentData)) # Load Content
$('#header').html(configTemplate(configData)) # Load Page Info
$('#footer').html(footerTemplate(configData)) # Load Footer Info
# Navigation Accordion
$('.subnav').accordion # Navigation Accordion
speed: 'fast'
# Scroll To Section
$('.anchor').on 'click', (e) -> # Navigate to section id
e.preventDefault()
target = $(@).attr 'href'
$('html, body').animate { scrollTop: $(target).offset().top - 25 }, 500
# Fixed navigation
nav = $('#sidebar nav').offset()
console.log nav
$(window).scroll ->
if $(window).scrollTop() > nav.top + 35
$('#sidebar nav').addClass 'fixed'
else
$('#sidebar nav').removeClass 'fixed'
$ ->
Application = new App()
Application.init() |
[
{
"context": "uest'\nmoment = require 'moment'\napiPath = 'http://104.236.41.161/'\nDEFAULT_TOKEN = 'YreIoA-nX26yqbOrAz45CA'\n\n\n# A ",
"end": 86,
"score": 0.8377216458320618,
"start": 72,
"tag": "IP_ADDRESS",
"value": "104.236.41.161"
},
{
"context": "iPath = 'http://104.236.41.161... | private/coffee/api.coffee | grant/snappo | 0 | request = require 'request'
moment = require 'moment'
apiPath = 'http://104.236.41.161/'
DEFAULT_TOKEN = 'YreIoA-nX26yqbOrAz45CA'
# A wrapper for `https://github.com/rcchen/sh-server`
module.exports =
get:
photos: (sort="recency", cb) ->
url = apiPath + 'api/photos'
params =
token: DEFAULT_TOKEN
longitude: 75
latitude: 39
radius: 100
sort: sort
console.log("sorting by "+sort)
request {url: url, qs: params}, (err, res, body) ->
json = JSON.parse body
for photo in json
photo.created_at_text = moment(photo.created_at).fromNow()
cb json
post:
users: (email, cb) ->
url = apiPath + 'api/users'
console.log email
console.log url
request.post
url: url
qs:
email: email
, (err, res, body) ->
# Return user object
cb JSON.parse body
photos: (photo, token, lat, lng, cb) ->
# TODO
cb()
heart: (photoId, token, cb) ->
console.log "server!"
console.log photoId
console.log token
url = apiPath + 'api/photos/' + photoId + '/heart'
request.post
url: url
xhrFields:
withCredentials: true
form:
token: token
, (err, res, body) ->
# Return the current state of the heart
console.log body
cb JSON.parse body | 69332 | request = require 'request'
moment = require 'moment'
apiPath = 'http://192.168.127.12/'
DEFAULT_TOKEN = '<KEY>'
# A wrapper for `https://github.com/rcchen/sh-server`
module.exports =
get:
photos: (sort="recency", cb) ->
url = apiPath + 'api/photos'
params =
token: DEFAULT_TOKEN
longitude: 75
latitude: 39
radius: 100
sort: sort
console.log("sorting by "+sort)
request {url: url, qs: params}, (err, res, body) ->
json = JSON.parse body
for photo in json
photo.created_at_text = moment(photo.created_at).fromNow()
cb json
post:
users: (email, cb) ->
url = apiPath + 'api/users'
console.log email
console.log url
request.post
url: url
qs:
email: email
, (err, res, body) ->
# Return user object
cb JSON.parse body
photos: (photo, token, lat, lng, cb) ->
# TODO
cb()
heart: (photoId, token, cb) ->
console.log "server!"
console.log photoId
console.log token
url = apiPath + 'api/photos/' + photoId + '/heart'
request.post
url: url
xhrFields:
withCredentials: true
form:
token: token
, (err, res, body) ->
# Return the current state of the heart
console.log body
cb JSON.parse body | true | request = require 'request'
moment = require 'moment'
apiPath = 'http://PI:IP_ADDRESS:192.168.127.12END_PI/'
DEFAULT_TOKEN = 'PI:KEY:<KEY>END_PI'
# A wrapper for `https://github.com/rcchen/sh-server`
module.exports =
get:
photos: (sort="recency", cb) ->
url = apiPath + 'api/photos'
params =
token: DEFAULT_TOKEN
longitude: 75
latitude: 39
radius: 100
sort: sort
console.log("sorting by "+sort)
request {url: url, qs: params}, (err, res, body) ->
json = JSON.parse body
for photo in json
photo.created_at_text = moment(photo.created_at).fromNow()
cb json
post:
users: (email, cb) ->
url = apiPath + 'api/users'
console.log email
console.log url
request.post
url: url
qs:
email: email
, (err, res, body) ->
# Return user object
cb JSON.parse body
photos: (photo, token, lat, lng, cb) ->
# TODO
cb()
heart: (photoId, token, cb) ->
console.log "server!"
console.log photoId
console.log token
url = apiPath + 'api/photos/' + photoId + '/heart'
request.post
url: url
xhrFields:
withCredentials: true
form:
token: token
, (err, res, body) ->
# Return the current state of the heart
console.log body
cb JSON.parse body |
[
{
"context": "opic: (browser)->\n browser.fill \"Name\", \"ArmBiter\"\n \"should set text field\": (browser)-> ass",
"end": 2488,
"score": 0.9927756190299988,
"start": 2480,
"tag": "NAME",
"value": "ArmBiter"
},
{
"context": "qual browser.querySelector(\"#field-name\"... | spec/forms-spec.coffee | crskbel-ca/zombie | 1 | require("./helpers")
{ vows: vows, assert: assert, zombie: zombie, brains: brains } = require("vows")
brains.get "/form", (req, res)-> res.send """
<html>
<body>
<form action="/submit" method="post">
<label>Name <input type="text" name="name" id="field-name"></label>
<label for="field-email">Email</label>
<input type="text" name="email" id="field-email"></label>
<textarea name="likes" id="field-likes">Warm brains</textarea>
<input type="password" name="password" id="field-password">
<label>Hungry <input type="checkbox" name="hungry" value="you bet" id="field-hungry"></label>
<label for="field-brains">Brains?</label>
<input type="checkbox" name="brains" id="field-brains">
<input type="checkbox" name="green" id="field-green" checked>
<label>Looks
<select name="looks" id="field-looks">
<option value="blood" label="Bloody"></option>
<option value="clean" label="Clean"></option>
</select>
</label>
<label>Scary <input name="scary" type="radio" value="yes" id="field-scary"></label>
<label>Not scary <input name="scary" type="radio" value="no" id="field-notscary" checked="checked"></label>
<select name="state" id="field-state">
<option>alive</option>
<option>dead</option>
</select>
<input type="reset" value="Reset">
<input type="submit" name="button" value="Submit">
<button name="button" value="hit-me">Hit Me</button>
</form>
</body>
</html>
"""
brains.post "/submit", (req, res)-> res.send """
<html>
<body>
<div id="name">#{req.body.name}</div>
<div id="likes">#{req.body.likes}</div>
<div id="hungry">#{req.body.hungry}</div>
<div id="state">#{req.body.state}</div>
<div id="scary">#{req.body.scary}</div>
<div id="state">#{req.body.state}</div>
<div id="clicked">#{req.body.button}</div>
</body>
</html>
"""
vows.describe("Forms").addBatch(
"fill field":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["email", "likes", "name", "password"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"text input enclosed in label":
topic: (browser)->
browser.fill "Name", "ArmBiter"
"should set text field": (browser)-> assert.equal browser.querySelector("#field-name").value, "ArmBiter"
"should fire change event": (browser)-> assert.ok browser.nameChanged
"email input referenced from label":
topic: (browser)->
browser.fill "Email", "armbiter@example.com"
"should set email field": (browser)-> assert.equal browser.querySelector("#field-email").value, "armbiter@example.com"
"should fire change event": (browser)-> assert.ok browser.emailChanged
"textarea by field name":
topic: (browser)->
browser.fill "likes", "Arm Biting"
"should set textarea": (browser)-> assert.equal browser.querySelector("#field-likes").value, "Arm Biting"
"should fire change event": (browser)-> assert.ok browser.likesChanged
"password input by selector":
topic: (browser)->
browser.fill ":password[name=password]", "b100d"
"should set password": (browser)-> assert.equal browser.querySelector("#field-password").value, "b100d"
"should fire change event": (browser)-> assert.ok browser.passwordChanged
"check box":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["hungry", "brains", "green"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "click", -> browser["#{field}Clicked"] = true
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"checkbox enclosed in label":
topic: (browser)->
browser.check "Hungry"
browser.wait @callback
"should check checkbox": (browser)-> assert.ok browser.querySelector("#field-hungry").checked
"should fire change event": (browser)-> assert.ok browser.hungryChanged
"checkbox referenced from label":
topic: (browser)->
browser.check "Brains?"
browser.wait @callback
"should check checkbox": (browser)-> assert.ok browser.querySelector("#field-brains").checked
"should fire change event": (browser)-> assert.ok browser.brainsChanged
"checkbox by name":
topic: (browser)->
browser.uncheck "green"
browser.wait @callback
"should uncheck checkbox": (browser)-> assert.ok !browser.querySelector("#field-green").checked
"should fire change event": (browser)-> assert.ok browser.greenChanged
"radio buttons":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["scary", "notscary"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "click", -> browser["#{field}Clicked"] = true
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"radio button enclosed in label":
topic: (browser)->
browser.choose "Scary"
"should check radio": (browser)-> assert.ok browser.querySelector("#field-scary").checked
"should fire click event": (browser)-> assert.ok browser.scaryClicked
"should fire change event": (browser)-> assert.ok browser.scaryChanged
###
"radio button by value":
topic: (browser)->
browser.choose "no"
"should check radio": (browser)-> assert.ok browser.querySelector("#field-notscary").checked
"should uncheck other radio": (browser)-> assert.ok !browser.querySelector("#field-scary").checked
"should fire click event": (browser)-> assert.ok browser.notscaryClicked
"should fire change event": (browser)-> assert.ok browser.notscaryChanged
###
"select option":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["looks", "state"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"enclosed in label using option label":
topic: (browser)->
browser.select "Looks", "Bloody"
"should set value": (browser)-> assert.equal browser.querySelector("#field-looks").value, "blood"
"should select first option": (browser)->
selected = (option.selected for option in browser.querySelector("#field-looks").options)
assert.deepEqual selected, [true, false]
"should fire change event": (browser)-> assert.ok browser.looksChanged
"select name using option value":
topic: (browser)->
browser.select "state", "dead"
"should set value": (browser)-> assert.equal browser.querySelector("#field-state").value, "dead"
"should select second option": (browser)->
selected = (option.selected for option in browser.querySelector("#field-state").options)
assert.deepEqual selected, [false, true]
"should fire change event": (browser)-> assert.ok browser.stateChanged
"reset form":
"by calling reset":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "ArmBiter").fill("likes", "Arm Biting").
check("Hungry").choose("Scary").select("state", "dead")
browser.querySelector("form").reset()
@callback null, browser
"should reset input field to original value": (browser)-> assert.equal browser.querySelector("#field-name").value, ""
"should reset textarea to original value": (browser)-> assert.equal browser.querySelector("#field-likes").value, "Warm brains"
"should reset checkbox to original value": (browser)-> assert.ok !browser.querySelector("#field-hungry").value
"should reset radio to original value": (browser)->
assert.ok !browser.querySelector("#field-scary").checked
assert.ok browser.querySelector("#field-notscary").checked
"should reset select to original option": (browser)-> assert.equal browser.querySelector("#field-state").value, "alive"
"with event handler":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.querySelector("form :reset").addEventListener "click", (event)=> @callback null, event
browser.querySelector("form :reset").click()
"should fire click event": (event)-> assert.equal event.type, "click"
"with preventDefault":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "ArmBiter")
browser.querySelector("form :reset").addEventListener "click", (event)-> event.preventDefault()
browser.querySelector("form :reset").click()
@callback null, browser
"should not reset input field": (browser)-> assert.equal browser.querySelector("#field-name").value, "ArmBiter"
"by clicking reset input":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "ArmBiter")
browser.querySelector("form :reset").click()
@callback null, browser
"should reset input field to original value": (browser)-> assert.equal browser.querySelector("#field-name").value, ""
"submit form":
"by calling submit":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "ArmBiter").fill("likes", "Arm Biting").
check("Hungry").choose("Scary").select("state", "dead")
browser.querySelector("form").submit()
browser.wait @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send text input values to server": (browser)-> assert.equal browser.text("#name"), "ArmBiter"
"should send textarea values to server": (browser)-> assert.equal browser.text("#likes"), "Arm Biting"
"should send checkbox values to server": (browser)-> assert.equal browser.text("#hungry"), "you bet"
"should send radio button to server": (browser)-> assert.equal browser.text("#scary"), "yes"
"should send selected option to server": (browser)-> assert.equal browser.text("#state"), "dead"
"by clicking button":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "ArmBiter").fill("likes", "Arm Biting").
pressButton "Hit Me", @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send button value to server": (browser)-> assert.equal browser.text("#clicked"), "hit-me"
"should send input values to server": (browser)->
assert.equal browser.text("#name"), "ArmBiter"
assert.equal browser.text("#likes"), "Arm Biting"
"by clicking input":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "ArmBiter").fill("likes", "Arm Biting").
pressButton "Submit", @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send submit value to server": (browser)-> assert.equal browser.text("#clicked"), "Submit"
"should send input values to server": (browser)->
assert.equal browser.text("#name"), "ArmBiter"
assert.equal browser.text("#likes"), "Arm Biting"
).export(module)
| 123115 | require("./helpers")
{ vows: vows, assert: assert, zombie: zombie, brains: brains } = require("vows")
brains.get "/form", (req, res)-> res.send """
<html>
<body>
<form action="/submit" method="post">
<label>Name <input type="text" name="name" id="field-name"></label>
<label for="field-email">Email</label>
<input type="text" name="email" id="field-email"></label>
<textarea name="likes" id="field-likes">Warm brains</textarea>
<input type="password" name="password" id="field-password">
<label>Hungry <input type="checkbox" name="hungry" value="you bet" id="field-hungry"></label>
<label for="field-brains">Brains?</label>
<input type="checkbox" name="brains" id="field-brains">
<input type="checkbox" name="green" id="field-green" checked>
<label>Looks
<select name="looks" id="field-looks">
<option value="blood" label="Bloody"></option>
<option value="clean" label="Clean"></option>
</select>
</label>
<label>Scary <input name="scary" type="radio" value="yes" id="field-scary"></label>
<label>Not scary <input name="scary" type="radio" value="no" id="field-notscary" checked="checked"></label>
<select name="state" id="field-state">
<option>alive</option>
<option>dead</option>
</select>
<input type="reset" value="Reset">
<input type="submit" name="button" value="Submit">
<button name="button" value="hit-me">Hit Me</button>
</form>
</body>
</html>
"""
brains.post "/submit", (req, res)-> res.send """
<html>
<body>
<div id="name">#{req.body.name}</div>
<div id="likes">#{req.body.likes}</div>
<div id="hungry">#{req.body.hungry}</div>
<div id="state">#{req.body.state}</div>
<div id="scary">#{req.body.scary}</div>
<div id="state">#{req.body.state}</div>
<div id="clicked">#{req.body.button}</div>
</body>
</html>
"""
vows.describe("Forms").addBatch(
"fill field":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["email", "likes", "name", "password"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"text input enclosed in label":
topic: (browser)->
browser.fill "Name", "<NAME>"
"should set text field": (browser)-> assert.equal browser.querySelector("#field-name").value, "<NAME>"
"should fire change event": (browser)-> assert.ok browser.nameChanged
"email input referenced from label":
topic: (browser)->
browser.fill "Email", "<EMAIL>"
"should set email field": (browser)-> assert.equal browser.querySelector("#field-email").value, "<EMAIL>"
"should fire change event": (browser)-> assert.ok browser.emailChanged
"textarea by field name":
topic: (browser)->
browser.fill "likes", "Arm <NAME>"
"should set textarea": (browser)-> assert.equal browser.querySelector("#field-likes").value, "Arm <NAME>ing"
"should fire change event": (browser)-> assert.ok browser.likesChanged
"password input by selector":
topic: (browser)->
browser.fill ":password[name=password]", "<PASSWORD>"
"should set password": (browser)-> assert.equal browser.querySelector("#field-password").value, "<PASSWORD>"
"should fire change event": (browser)-> assert.ok browser.passwordChanged
"check box":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["hungry", "brains", "green"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "click", -> browser["#{field}Clicked"] = true
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"checkbox enclosed in label":
topic: (browser)->
browser.check "Hungry"
browser.wait @callback
"should check checkbox": (browser)-> assert.ok browser.querySelector("#field-hungry").checked
"should fire change event": (browser)-> assert.ok browser.hungryChanged
"checkbox referenced from label":
topic: (browser)->
browser.check "Brains?"
browser.wait @callback
"should check checkbox": (browser)-> assert.ok browser.querySelector("#field-brains").checked
"should fire change event": (browser)-> assert.ok browser.brainsChanged
"checkbox by name":
topic: (browser)->
browser.uncheck "green"
browser.wait @callback
"should uncheck checkbox": (browser)-> assert.ok !browser.querySelector("#field-green").checked
"should fire change event": (browser)-> assert.ok browser.greenChanged
"radio buttons":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["scary", "notscary"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "click", -> browser["#{field}Clicked"] = true
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"radio button enclosed in label":
topic: (browser)->
browser.choose "Scary"
"should check radio": (browser)-> assert.ok browser.querySelector("#field-scary").checked
"should fire click event": (browser)-> assert.ok browser.scaryClicked
"should fire change event": (browser)-> assert.ok browser.scaryChanged
###
"radio button by value":
topic: (browser)->
browser.choose "no"
"should check radio": (browser)-> assert.ok browser.querySelector("#field-notscary").checked
"should uncheck other radio": (browser)-> assert.ok !browser.querySelector("#field-scary").checked
"should fire click event": (browser)-> assert.ok browser.notscaryClicked
"should fire change event": (browser)-> assert.ok browser.notscaryChanged
###
"select option":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["looks", "state"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"enclosed in label using option label":
topic: (browser)->
browser.select "Looks", "Bloody"
"should set value": (browser)-> assert.equal browser.querySelector("#field-looks").value, "blood"
"should select first option": (browser)->
selected = (option.selected for option in browser.querySelector("#field-looks").options)
assert.deepEqual selected, [true, false]
"should fire change event": (browser)-> assert.ok browser.looksChanged
"select name using option value":
topic: (browser)->
browser.select "state", "dead"
"should set value": (browser)-> assert.equal browser.querySelector("#field-state").value, "dead"
"should select second option": (browser)->
selected = (option.selected for option in browser.querySelector("#field-state").options)
assert.deepEqual selected, [false, true]
"should fire change event": (browser)-> assert.ok browser.stateChanged
"reset form":
"by calling reset":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "<NAME>").fill("likes", "Arm Biting").
check("Hungry").choose("Scary").select("state", "dead")
browser.querySelector("form").reset()
@callback null, browser
"should reset input field to original value": (browser)-> assert.equal browser.querySelector("#field-name").value, ""
"should reset textarea to original value": (browser)-> assert.equal browser.querySelector("#field-likes").value, "Warm brains"
"should reset checkbox to original value": (browser)-> assert.ok !browser.querySelector("#field-hungry").value
"should reset radio to original value": (browser)->
assert.ok !browser.querySelector("#field-scary").checked
assert.ok browser.querySelector("#field-notscary").checked
"should reset select to original option": (browser)-> assert.equal browser.querySelector("#field-state").value, "alive"
"with event handler":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.querySelector("form :reset").addEventListener "click", (event)=> @callback null, event
browser.querySelector("form :reset").click()
"should fire click event": (event)-> assert.equal event.type, "click"
"with preventDefault":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "<NAME>")
browser.querySelector("form :reset").addEventListener "click", (event)-> event.preventDefault()
browser.querySelector("form :reset").click()
@callback null, browser
"should not reset input field": (browser)-> assert.equal browser.querySelector("#field-name").value, "<NAME>"
"by clicking reset input":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "<NAME>")
browser.querySelector("form :reset").click()
@callback null, browser
"should reset input field to original value": (browser)-> assert.equal browser.querySelector("#field-name").value, ""
"submit form":
"by calling submit":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "<NAME>").fill("likes", "<NAME>").
check("Hungry").choose("Scary").select("state", "dead")
browser.querySelector("form").submit()
browser.wait @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send text input values to server": (browser)-> assert.equal browser.text("#name"), "<NAME>"
"should send textarea values to server": (browser)-> assert.equal browser.text("#likes"), "Arm Biting"
"should send checkbox values to server": (browser)-> assert.equal browser.text("#hungry"), "you bet"
"should send radio button to server": (browser)-> assert.equal browser.text("#scary"), "yes"
"should send selected option to server": (browser)-> assert.equal browser.text("#state"), "dead"
"by clicking button":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "<NAME>").fill("likes", "<NAME>ing").
pressButton "Hit Me", @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send button value to server": (browser)-> assert.equal browser.text("#clicked"), "hit-me"
"should send input values to server": (browser)->
assert.equal browser.text("#name"), "<NAME>"
assert.equal browser.text("#likes"), "Arm Biting"
"by clicking input":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "<NAME>").fill("likes", "<NAME>").
pressButton "Submit", @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send submit value to server": (browser)-> assert.equal browser.text("#clicked"), "Submit"
"should send input values to server": (browser)->
assert.equal browser.text("#name"), "<NAME>"
assert.equal browser.text("#likes"), "Arm Biting"
).export(module)
| true | require("./helpers")
{ vows: vows, assert: assert, zombie: zombie, brains: brains } = require("vows")
brains.get "/form", (req, res)-> res.send """
<html>
<body>
<form action="/submit" method="post">
<label>Name <input type="text" name="name" id="field-name"></label>
<label for="field-email">Email</label>
<input type="text" name="email" id="field-email"></label>
<textarea name="likes" id="field-likes">Warm brains</textarea>
<input type="password" name="password" id="field-password">
<label>Hungry <input type="checkbox" name="hungry" value="you bet" id="field-hungry"></label>
<label for="field-brains">Brains?</label>
<input type="checkbox" name="brains" id="field-brains">
<input type="checkbox" name="green" id="field-green" checked>
<label>Looks
<select name="looks" id="field-looks">
<option value="blood" label="Bloody"></option>
<option value="clean" label="Clean"></option>
</select>
</label>
<label>Scary <input name="scary" type="radio" value="yes" id="field-scary"></label>
<label>Not scary <input name="scary" type="radio" value="no" id="field-notscary" checked="checked"></label>
<select name="state" id="field-state">
<option>alive</option>
<option>dead</option>
</select>
<input type="reset" value="Reset">
<input type="submit" name="button" value="Submit">
<button name="button" value="hit-me">Hit Me</button>
</form>
</body>
</html>
"""
brains.post "/submit", (req, res)-> res.send """
<html>
<body>
<div id="name">#{req.body.name}</div>
<div id="likes">#{req.body.likes}</div>
<div id="hungry">#{req.body.hungry}</div>
<div id="state">#{req.body.state}</div>
<div id="scary">#{req.body.scary}</div>
<div id="state">#{req.body.state}</div>
<div id="clicked">#{req.body.button}</div>
</body>
</html>
"""
vows.describe("Forms").addBatch(
"fill field":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["email", "likes", "name", "password"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"text input enclosed in label":
topic: (browser)->
browser.fill "Name", "PI:NAME:<NAME>END_PI"
"should set text field": (browser)-> assert.equal browser.querySelector("#field-name").value, "PI:NAME:<NAME>END_PI"
"should fire change event": (browser)-> assert.ok browser.nameChanged
"email input referenced from label":
topic: (browser)->
browser.fill "Email", "PI:EMAIL:<EMAIL>END_PI"
"should set email field": (browser)-> assert.equal browser.querySelector("#field-email").value, "PI:EMAIL:<EMAIL>END_PI"
"should fire change event": (browser)-> assert.ok browser.emailChanged
"textarea by field name":
topic: (browser)->
browser.fill "likes", "Arm PI:NAME:<NAME>END_PI"
"should set textarea": (browser)-> assert.equal browser.querySelector("#field-likes").value, "Arm PI:NAME:<NAME>END_PIing"
"should fire change event": (browser)-> assert.ok browser.likesChanged
"password input by selector":
topic: (browser)->
browser.fill ":password[name=password]", "PI:PASSWORD:<PASSWORD>END_PI"
"should set password": (browser)-> assert.equal browser.querySelector("#field-password").value, "PI:PASSWORD:<PASSWORD>END_PI"
"should fire change event": (browser)-> assert.ok browser.passwordChanged
"check box":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["hungry", "brains", "green"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "click", -> browser["#{field}Clicked"] = true
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"checkbox enclosed in label":
topic: (browser)->
browser.check "Hungry"
browser.wait @callback
"should check checkbox": (browser)-> assert.ok browser.querySelector("#field-hungry").checked
"should fire change event": (browser)-> assert.ok browser.hungryChanged
"checkbox referenced from label":
topic: (browser)->
browser.check "Brains?"
browser.wait @callback
"should check checkbox": (browser)-> assert.ok browser.querySelector("#field-brains").checked
"should fire change event": (browser)-> assert.ok browser.brainsChanged
"checkbox by name":
topic: (browser)->
browser.uncheck "green"
browser.wait @callback
"should uncheck checkbox": (browser)-> assert.ok !browser.querySelector("#field-green").checked
"should fire change event": (browser)-> assert.ok browser.greenChanged
"radio buttons":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["scary", "notscary"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "click", -> browser["#{field}Clicked"] = true
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"radio button enclosed in label":
topic: (browser)->
browser.choose "Scary"
"should check radio": (browser)-> assert.ok browser.querySelector("#field-scary").checked
"should fire click event": (browser)-> assert.ok browser.scaryClicked
"should fire change event": (browser)-> assert.ok browser.scaryChanged
###
"radio button by value":
topic: (browser)->
browser.choose "no"
"should check radio": (browser)-> assert.ok browser.querySelector("#field-notscary").checked
"should uncheck other radio": (browser)-> assert.ok !browser.querySelector("#field-scary").checked
"should fire click event": (browser)-> assert.ok browser.notscaryClicked
"should fire change event": (browser)-> assert.ok browser.notscaryChanged
###
"select option":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
for field in ["looks", "state"]
do (field)->
browser.querySelector("#field-#{field}").addEventListener "change", -> browser["#{field}Changed"] = true
@callback null, browser
"enclosed in label using option label":
topic: (browser)->
browser.select "Looks", "Bloody"
"should set value": (browser)-> assert.equal browser.querySelector("#field-looks").value, "blood"
"should select first option": (browser)->
selected = (option.selected for option in browser.querySelector("#field-looks").options)
assert.deepEqual selected, [true, false]
"should fire change event": (browser)-> assert.ok browser.looksChanged
"select name using option value":
topic: (browser)->
browser.select "state", "dead"
"should set value": (browser)-> assert.equal browser.querySelector("#field-state").value, "dead"
"should select second option": (browser)->
selected = (option.selected for option in browser.querySelector("#field-state").options)
assert.deepEqual selected, [false, true]
"should fire change event": (browser)-> assert.ok browser.stateChanged
"reset form":
"by calling reset":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "PI:NAME:<NAME>END_PI").fill("likes", "Arm Biting").
check("Hungry").choose("Scary").select("state", "dead")
browser.querySelector("form").reset()
@callback null, browser
"should reset input field to original value": (browser)-> assert.equal browser.querySelector("#field-name").value, ""
"should reset textarea to original value": (browser)-> assert.equal browser.querySelector("#field-likes").value, "Warm brains"
"should reset checkbox to original value": (browser)-> assert.ok !browser.querySelector("#field-hungry").value
"should reset radio to original value": (browser)->
assert.ok !browser.querySelector("#field-scary").checked
assert.ok browser.querySelector("#field-notscary").checked
"should reset select to original option": (browser)-> assert.equal browser.querySelector("#field-state").value, "alive"
"with event handler":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.querySelector("form :reset").addEventListener "click", (event)=> @callback null, event
browser.querySelector("form :reset").click()
"should fire click event": (event)-> assert.equal event.type, "click"
"with preventDefault":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "PI:NAME:<NAME>END_PI")
browser.querySelector("form :reset").addEventListener "click", (event)-> event.preventDefault()
browser.querySelector("form :reset").click()
@callback null, browser
"should not reset input field": (browser)-> assert.equal browser.querySelector("#field-name").value, "PI:NAME:<NAME>END_PI"
"by clicking reset input":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "PI:NAME:<NAME>END_PI")
browser.querySelector("form :reset").click()
@callback null, browser
"should reset input field to original value": (browser)-> assert.equal browser.querySelector("#field-name").value, ""
"submit form":
"by calling submit":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "PI:NAME:<NAME>END_PI").fill("likes", "PI:NAME:<NAME>END_PI").
check("Hungry").choose("Scary").select("state", "dead")
browser.querySelector("form").submit()
browser.wait @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send text input values to server": (browser)-> assert.equal browser.text("#name"), "PI:NAME:<NAME>END_PI"
"should send textarea values to server": (browser)-> assert.equal browser.text("#likes"), "Arm Biting"
"should send checkbox values to server": (browser)-> assert.equal browser.text("#hungry"), "you bet"
"should send radio button to server": (browser)-> assert.equal browser.text("#scary"), "yes"
"should send selected option to server": (browser)-> assert.equal browser.text("#state"), "dead"
"by clicking button":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "PI:NAME:<NAME>END_PI").fill("likes", "PI:NAME:<NAME>END_PIing").
pressButton "Hit Me", @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send button value to server": (browser)-> assert.equal browser.text("#clicked"), "hit-me"
"should send input values to server": (browser)->
assert.equal browser.text("#name"), "PI:NAME:<NAME>END_PI"
assert.equal browser.text("#likes"), "Arm Biting"
"by clicking input":
zombie.wants "http://localhost:3003/form"
topic: (browser)->
browser.fill("Name", "PI:NAME:<NAME>END_PI").fill("likes", "PI:NAME:<NAME>END_PI").
pressButton "Submit", @callback
"should open new page": (browser)-> assert.equal browser.location, "http://localhost:3003/submit"
"should add location to history": (browser)-> assert.length browser.window.history, 2
"should send submit value to server": (browser)-> assert.equal browser.text("#clicked"), "Submit"
"should send input values to server": (browser)->
assert.equal browser.text("#name"), "PI:NAME:<NAME>END_PI"
assert.equal browser.text("#likes"), "Arm Biting"
).export(module)
|
[
{
"context": "#Based on Extjs-OpenLayers by William Fisher - @teknofire on Github\n\n$ = jQuery\n\n$.fn.extend\n",
"end": 44,
"score": 0.999866247177124,
"start": 30,
"tag": "NAME",
"value": "William Fisher"
},
{
"context": "#Based on Extjs-OpenLayers by William Fisher - @teknofire... | app/assets/javascripts/jq-openlayers.js.coffee | gina-alaska/makemap | 0 | #Based on Extjs-OpenLayers by William Fisher - @teknofire on Github
$ = jQuery
$.fn.extend
OpenLayers: (options) ->
config =
map: null,
layers: null
mapConfig = {}
siteConfigs =
# Alaska centric polar projection
'Alaska - EPSG:3572':
defaultLayers: ['TILE.EPSG:3572.*'],
minZoomLevel: 2,
defaultBounds: new OpenLayers.Bounds(-2106121.205656,-4037734.1903821,2003133.434954,-1806995.9569081),
maxExtent: new OpenLayers.Bounds(-12742200.0, -7295308.34278405, 7295308.34278405, 12742200.0),
maxResolution: (20037508.34278405 / 256.0),
units: 'm',
projection: "EPSG:3572",
displayProjection: new OpenLayers.Projection("EPSG:4326")
# Alaskan Albers Equal Area
'Alaska - EPSG:3338':
defaultLayers: ['TILE.EPSG:3338.*'],
defaultBounds: new OpenLayers.Bounds(-2802734.375,-176025.390625,2939453.125,2941162.109375),
maxExtent: new OpenLayers.Bounds(-3500000, -3500000, 3500000, 3500000),
maxResolution: (3500000 * 2.0 / 256.0),
minZoomLevel: 2,
units: 'm',
projection: "EPSG:3338",
displayProjection: new OpenLayers.Projection("EPSG:4326")
# Web Mercator
'USA - EPSG:3857':
defaultLayers: ['TILE.EPSG:3857.AV_BLUEMARBLE'],
projection: "EPSG:3857",
defaultBounds: new OpenLayers.Bounds(-15130862.621001,2553608.2405956,-6912353.3409229,7015084.7069238);
units: 'm',
maxResolution: 156543.0339,
maxExtent: new OpenLayers.Bounds(-20037508, -20037508, 20037508, 20037508),
displayProjection: new OpenLayers.Projection("EPSG:4326")
setupMap = (options)->
for el in this
unless $.data(el, "map")
$.extend config, options
mapConfig = $.extend {}, siteConfigs[config.site], config
mapConfig.defaultLayers = config.layers if config.layers
map = new OpenLayers.Map(el, mapConfig);
Gina.Layers.inject map, mapConfig.defaultLayers
bounds = mapConfig.defaultBounds
# bounds.transform map.displayProjection, map.getProjectionObject();
map.zoomToExtent bounds
map.addControl(new OpenLayers.Control.Attribution);
#TODO: Handle map resizes automatically
$.data(el, "map", map)
#setupLayers = (map) ->
#Gina.Layers.inject map, mapConfig.defaultLayers
methods =
getmap: ->
for el in this
$.data(el, "map")
method = Array.prototype.slice.call(arguments, 0)[0];
if ( methods[method] )
return methods[method].apply( this, Array.prototype.slice.call( arguments, 1 ));
else if ( typeof method is 'object' or ! method )
return setupMap.apply( this, arguments );
else
$.error( 'Method ' + method + ' does not exist on jQuery.OpenLayers' );
| 105787 | #Based on Extjs-OpenLayers by <NAME> - @teknofire on Github
$ = jQuery
$.fn.extend
OpenLayers: (options) ->
config =
map: null,
layers: null
mapConfig = {}
siteConfigs =
# Alaska centric polar projection
'Alaska - EPSG:3572':
defaultLayers: ['TILE.EPSG:3572.*'],
minZoomLevel: 2,
defaultBounds: new OpenLayers.Bounds(-2106121.205656,-4037734.1903821,2003133.434954,-1806995.9569081),
maxExtent: new OpenLayers.Bounds(-12742200.0, -7295308.34278405, 7295308.34278405, 12742200.0),
maxResolution: (20037508.34278405 / 256.0),
units: 'm',
projection: "EPSG:3572",
displayProjection: new OpenLayers.Projection("EPSG:4326")
# Alaskan Albers Equal Area
'Alaska - EPSG:3338':
defaultLayers: ['TILE.EPSG:3338.*'],
defaultBounds: new OpenLayers.Bounds(-2802734.375,-176025.390625,2939453.125,2941162.109375),
maxExtent: new OpenLayers.Bounds(-3500000, -3500000, 3500000, 3500000),
maxResolution: (3500000 * 2.0 / 256.0),
minZoomLevel: 2,
units: 'm',
projection: "EPSG:3338",
displayProjection: new OpenLayers.Projection("EPSG:4326")
# Web Mercator
'USA - EPSG:3857':
defaultLayers: ['TILE.EPSG:3857.AV_BLUEMARBLE'],
projection: "EPSG:3857",
defaultBounds: new OpenLayers.Bounds(-15130862.621001,2553608.2405956,-6912353.3409229,7015084.7069238);
units: 'm',
maxResolution: 156543.0339,
maxExtent: new OpenLayers.Bounds(-20037508, -20037508, 20037508, 20037508),
displayProjection: new OpenLayers.Projection("EPSG:4326")
setupMap = (options)->
for el in this
unless $.data(el, "map")
$.extend config, options
mapConfig = $.extend {}, siteConfigs[config.site], config
mapConfig.defaultLayers = config.layers if config.layers
map = new OpenLayers.Map(el, mapConfig);
Gina.Layers.inject map, mapConfig.defaultLayers
bounds = mapConfig.defaultBounds
# bounds.transform map.displayProjection, map.getProjectionObject();
map.zoomToExtent bounds
map.addControl(new OpenLayers.Control.Attribution);
#TODO: Handle map resizes automatically
$.data(el, "map", map)
#setupLayers = (map) ->
#Gina.Layers.inject map, mapConfig.defaultLayers
methods =
getmap: ->
for el in this
$.data(el, "map")
method = Array.prototype.slice.call(arguments, 0)[0];
if ( methods[method] )
return methods[method].apply( this, Array.prototype.slice.call( arguments, 1 ));
else if ( typeof method is 'object' or ! method )
return setupMap.apply( this, arguments );
else
$.error( 'Method ' + method + ' does not exist on jQuery.OpenLayers' );
| true | #Based on Extjs-OpenLayers by PI:NAME:<NAME>END_PI - @teknofire on Github
$ = jQuery
$.fn.extend
OpenLayers: (options) ->
config =
map: null,
layers: null
mapConfig = {}
siteConfigs =
# Alaska centric polar projection
'Alaska - EPSG:3572':
defaultLayers: ['TILE.EPSG:3572.*'],
minZoomLevel: 2,
defaultBounds: new OpenLayers.Bounds(-2106121.205656,-4037734.1903821,2003133.434954,-1806995.9569081),
maxExtent: new OpenLayers.Bounds(-12742200.0, -7295308.34278405, 7295308.34278405, 12742200.0),
maxResolution: (20037508.34278405 / 256.0),
units: 'm',
projection: "EPSG:3572",
displayProjection: new OpenLayers.Projection("EPSG:4326")
# Alaskan Albers Equal Area
'Alaska - EPSG:3338':
defaultLayers: ['TILE.EPSG:3338.*'],
defaultBounds: new OpenLayers.Bounds(-2802734.375,-176025.390625,2939453.125,2941162.109375),
maxExtent: new OpenLayers.Bounds(-3500000, -3500000, 3500000, 3500000),
maxResolution: (3500000 * 2.0 / 256.0),
minZoomLevel: 2,
units: 'm',
projection: "EPSG:3338",
displayProjection: new OpenLayers.Projection("EPSG:4326")
# Web Mercator
'USA - EPSG:3857':
defaultLayers: ['TILE.EPSG:3857.AV_BLUEMARBLE'],
projection: "EPSG:3857",
defaultBounds: new OpenLayers.Bounds(-15130862.621001,2553608.2405956,-6912353.3409229,7015084.7069238);
units: 'm',
maxResolution: 156543.0339,
maxExtent: new OpenLayers.Bounds(-20037508, -20037508, 20037508, 20037508),
displayProjection: new OpenLayers.Projection("EPSG:4326")
setupMap = (options)->
for el in this
unless $.data(el, "map")
$.extend config, options
mapConfig = $.extend {}, siteConfigs[config.site], config
mapConfig.defaultLayers = config.layers if config.layers
map = new OpenLayers.Map(el, mapConfig);
Gina.Layers.inject map, mapConfig.defaultLayers
bounds = mapConfig.defaultBounds
# bounds.transform map.displayProjection, map.getProjectionObject();
map.zoomToExtent bounds
map.addControl(new OpenLayers.Control.Attribution);
#TODO: Handle map resizes automatically
$.data(el, "map", map)
#setupLayers = (map) ->
#Gina.Layers.inject map, mapConfig.defaultLayers
methods =
getmap: ->
for el in this
$.data(el, "map")
method = Array.prototype.slice.call(arguments, 0)[0];
if ( methods[method] )
return methods[method].apply( this, Array.prototype.slice.call( arguments, 1 ));
else if ( typeof method is 'object' or ! method )
return setupMap.apply( this, arguments );
else
$.error( 'Method ' + method + ' does not exist on jQuery.OpenLayers' );
|
[
{
"context": " of authors', ->\n @rendered.should.containEql 'Halley Johnson'\n\n it 'renders more authors on infinite scroll',",
"end": 1464,
"score": 0.9997333288192749,
"start": 1450,
"tag": "NAME",
"value": "Halley Johnson"
},
{
"context": "nfinite scroll', ->\n authors = ... | src/client/apps/settings/test/client/authors/index.test.coffee | craigspaeth/positron | 76 | _ = require 'underscore'
benv = require 'benv'
sinon = require 'sinon'
Backbone = require 'backbone'
{ resolve } = require 'path'
fixtures = require '../../../../../../test/helpers/fixtures'
React = require 'react'
ReactDOM = require 'react-dom'
ReactTestUtils = require 'react-dom/test-utils'
ReactDOMServer = require 'react-dom/server'
r =
find: ReactTestUtils.findRenderedDOMComponentWithClass
simulate: ReactTestUtils.Simulate
describe 'AuthorsView', ->
beforeEach (done) ->
benv.setup =>
benv.expose $: benv.require 'jquery'
sinon.stub Backbone, 'sync'
{ AuthorsView } = mod = benv.requireWithJadeify(
resolve(__dirname, '../../../client/authors/index')
[]
)
mod.__set__ 'AuthorModal', @AuthorModal = sinon.stub()
mod.__set__ 'request', {
post: sinon.stub().returns({
set: sinon.stub().returns({
send: sinon.stub().returns({
end: @request = sinon.stub()
})
})
})
}
props = authors: [fixtures().authors]
@rendered = ReactDOMServer.renderToString(
React.createElement(AuthorsView, props)
)
@component = ReactDOM.render(
React.createElement(AuthorsView, props),
(@$el = $ "<div></div>")[0],
=>
)
done()
afterEach ->
Backbone.sync.restore()
benv.teardown()
it 'renders list of authors', ->
@rendered.should.containEql 'Halley Johnson'
it 'renders more authors on infinite scroll', ->
authors = [{
name: 'Kana',
id: '123'
}]
@request.yields(null, body: data: authors: authors)
@component.fetchFeed()
@component.state.authors.length.should.equal 2
it 'returns early and does not add authors if there are no more to show', ->
@request.yields(new Error())
@component.fetchFeed()
@component.state.authors.length.should.equal 1
it 'opens the modal with an empty author on Add Author', ->
r.simulate.click r.find @component, 'authors-header__add-author'
@AuthorModal.args[2][0].isOpen.should.be.true()
(@AuthorModal.args[2][0].author is null).should.be.true()
it 'opens the modal with an author on Edit Author', ->
r.simulate.click r.find @component, 'authors-list__item-edit'
@AuthorModal.args[2][0].isOpen.should.be.true()
@AuthorModal.args[2][0].author.id.should.equal '55356a9deca560a0137bb4a7'
@AuthorModal.args[2][0].author.name.should.equal 'Halley Johnson'
@AuthorModal.args[2][0].author.bio.should.equal 'Writer based in NYC'
@AuthorModal.args[2][0].author.twitter_handle.should.equal 'kanaabe'
@AuthorModal.args[2][0].author.image_url.should.equal 'https://artsy-media.net/halley.jpg'
it 'closes the modal', ->
@component.closeModal()
@component.state.isModalOpen.should.be.false()
it 'saves an author', ->
@component.saveAuthor(id: '123456', name: 'Kana Abe')
Backbone.sync.args[0][2].success()
@component.state.authors.length.should.equal 2
@component.state.authors[0].name.should.equal 'Kana Abe'
it 'displays a save error', ->
@component.saveAuthor(id: '123456', name: 123)
Backbone.sync.args[0][2].error()
@component.state.errorMessage.should.equal 'There has been an error. Please contact support.'
| 103127 | _ = require 'underscore'
benv = require 'benv'
sinon = require 'sinon'
Backbone = require 'backbone'
{ resolve } = require 'path'
fixtures = require '../../../../../../test/helpers/fixtures'
React = require 'react'
ReactDOM = require 'react-dom'
ReactTestUtils = require 'react-dom/test-utils'
ReactDOMServer = require 'react-dom/server'
r =
find: ReactTestUtils.findRenderedDOMComponentWithClass
simulate: ReactTestUtils.Simulate
describe 'AuthorsView', ->
beforeEach (done) ->
benv.setup =>
benv.expose $: benv.require 'jquery'
sinon.stub Backbone, 'sync'
{ AuthorsView } = mod = benv.requireWithJadeify(
resolve(__dirname, '../../../client/authors/index')
[]
)
mod.__set__ 'AuthorModal', @AuthorModal = sinon.stub()
mod.__set__ 'request', {
post: sinon.stub().returns({
set: sinon.stub().returns({
send: sinon.stub().returns({
end: @request = sinon.stub()
})
})
})
}
props = authors: [fixtures().authors]
@rendered = ReactDOMServer.renderToString(
React.createElement(AuthorsView, props)
)
@component = ReactDOM.render(
React.createElement(AuthorsView, props),
(@$el = $ "<div></div>")[0],
=>
)
done()
afterEach ->
Backbone.sync.restore()
benv.teardown()
it 'renders list of authors', ->
@rendered.should.containEql '<NAME>'
it 'renders more authors on infinite scroll', ->
authors = [{
name: '<NAME>',
id: '123'
}]
@request.yields(null, body: data: authors: authors)
@component.fetchFeed()
@component.state.authors.length.should.equal 2
it 'returns early and does not add authors if there are no more to show', ->
@request.yields(new Error())
@component.fetchFeed()
@component.state.authors.length.should.equal 1
it 'opens the modal with an empty author on Add Author', ->
r.simulate.click r.find @component, 'authors-header__add-author'
@AuthorModal.args[2][0].isOpen.should.be.true()
(@AuthorModal.args[2][0].author is null).should.be.true()
it 'opens the modal with an author on Edit Author', ->
r.simulate.click r.find @component, 'authors-list__item-edit'
@AuthorModal.args[2][0].isOpen.should.be.true()
@AuthorModal.args[2][0].author.id.should.equal '55356a9deca560a0137bb4a7'
@AuthorModal.args[2][0].author.name.should.equal '<NAME>'
@AuthorModal.args[2][0].author.bio.should.equal 'Writer based in NYC'
@AuthorModal.args[2][0].author.twitter_handle.should.equal 'kanaabe'
@AuthorModal.args[2][0].author.image_url.should.equal 'https://artsy-media.net/halley.jpg'
it 'closes the modal', ->
@component.closeModal()
@component.state.isModalOpen.should.be.false()
it 'saves an author', ->
@component.saveAuthor(id: '123456', name: '<NAME>')
Backbone.sync.args[0][2].success()
@component.state.authors.length.should.equal 2
@component.state.authors[0].name.should.equal '<NAME>'
it 'displays a save error', ->
@component.saveAuthor(id: '123456', name: 123)
Backbone.sync.args[0][2].error()
@component.state.errorMessage.should.equal 'There has been an error. Please contact support.'
| true | _ = require 'underscore'
benv = require 'benv'
sinon = require 'sinon'
Backbone = require 'backbone'
{ resolve } = require 'path'
fixtures = require '../../../../../../test/helpers/fixtures'
React = require 'react'
ReactDOM = require 'react-dom'
ReactTestUtils = require 'react-dom/test-utils'
ReactDOMServer = require 'react-dom/server'
r =
find: ReactTestUtils.findRenderedDOMComponentWithClass
simulate: ReactTestUtils.Simulate
describe 'AuthorsView', ->
beforeEach (done) ->
benv.setup =>
benv.expose $: benv.require 'jquery'
sinon.stub Backbone, 'sync'
{ AuthorsView } = mod = benv.requireWithJadeify(
resolve(__dirname, '../../../client/authors/index')
[]
)
mod.__set__ 'AuthorModal', @AuthorModal = sinon.stub()
mod.__set__ 'request', {
post: sinon.stub().returns({
set: sinon.stub().returns({
send: sinon.stub().returns({
end: @request = sinon.stub()
})
})
})
}
props = authors: [fixtures().authors]
@rendered = ReactDOMServer.renderToString(
React.createElement(AuthorsView, props)
)
@component = ReactDOM.render(
React.createElement(AuthorsView, props),
(@$el = $ "<div></div>")[0],
=>
)
done()
afterEach ->
Backbone.sync.restore()
benv.teardown()
it 'renders list of authors', ->
@rendered.should.containEql 'PI:NAME:<NAME>END_PI'
it 'renders more authors on infinite scroll', ->
authors = [{
name: 'PI:NAME:<NAME>END_PI',
id: '123'
}]
@request.yields(null, body: data: authors: authors)
@component.fetchFeed()
@component.state.authors.length.should.equal 2
it 'returns early and does not add authors if there are no more to show', ->
@request.yields(new Error())
@component.fetchFeed()
@component.state.authors.length.should.equal 1
it 'opens the modal with an empty author on Add Author', ->
r.simulate.click r.find @component, 'authors-header__add-author'
@AuthorModal.args[2][0].isOpen.should.be.true()
(@AuthorModal.args[2][0].author is null).should.be.true()
it 'opens the modal with an author on Edit Author', ->
r.simulate.click r.find @component, 'authors-list__item-edit'
@AuthorModal.args[2][0].isOpen.should.be.true()
@AuthorModal.args[2][0].author.id.should.equal '55356a9deca560a0137bb4a7'
@AuthorModal.args[2][0].author.name.should.equal 'PI:NAME:<NAME>END_PI'
@AuthorModal.args[2][0].author.bio.should.equal 'Writer based in NYC'
@AuthorModal.args[2][0].author.twitter_handle.should.equal 'kanaabe'
@AuthorModal.args[2][0].author.image_url.should.equal 'https://artsy-media.net/halley.jpg'
it 'closes the modal', ->
@component.closeModal()
@component.state.isModalOpen.should.be.false()
it 'saves an author', ->
@component.saveAuthor(id: '123456', name: 'PI:NAME:<NAME>END_PI')
Backbone.sync.args[0][2].success()
@component.state.authors.length.should.equal 2
@component.state.authors[0].name.should.equal 'PI:NAME:<NAME>END_PI'
it 'displays a save error', ->
@component.saveAuthor(id: '123456', name: 123)
Backbone.sync.args[0][2].error()
@component.state.errorMessage.should.equal 'There has been an error. Please contact support.'
|
[
{
"context": "Meteor.startup ->\n\n\tLogo = [\n\t\t\t\t{name: \"Staples\", img_name: \"logo_en.png\", fr_img_name: \"logo_fr.",
"end": 48,
"score": 0.9290788173675537,
"start": 41,
"tag": "NAME",
"value": "Staples"
},
{
"context": "\"copy_print_fr.png\"}\n\t\t\t]\n\n\tManuf = [\n\t\t... | server/loaders/startup.coffee | boogieprod/beg224-shopping-bot | 0 | Meteor.startup ->
Logo = [
{name: "Staples", img_name: "logo_en.png", fr_img_name: "logo_fr.png"}
]
Serv_logo = [
{name: "Make More Happen", img: "make_more_happen.png", fr_img: "make_more_happen_fr.png"}
]
Serv_banners = [
{name: "Easycare service plan", img: "esp.png", desc: "Plan cost calculator and service details", fr_name: "Plan de service fiable", fr_img: "esp_fr.png", fr_desc: "Calculateur et d\u00E9tails du plan"},
{name: "Easytech", img: "easytech.png", desc: "Tech services made simple", fr_name: "\u00C9quipe technique fiable", fr_img: "easytech_fr.png", fr_desc: "Services techniques, la fa\u00E7on simple"},
{name: "Copy&Print center", img: "copycenter.png", desc: "...", fr_name: "Centre de copies et impressions", fr_img: "copycenter_fr.png", fr_desc: "..."}
]
Carousel = [
{name: "Ink-helper", img_name: "copy_print_en.jpg", id: 1, fr_name: "Chercheur d'encre", fr_img_name: "copy_print_fr.png"},
{name: "Virtual Store", img_name: "copy_print_en.jpg", id: 2, fr_name: "Magasin Virtuel", fr_img_name: "copy_print_fr.png"},
{name: "Copy Center", img_name: "copy_print_en.jpg", id: 3, fr_name: "Centre de Copies", fr_img_name: "copy_print_fr.png"},
{name: "Tech Center", img_name: "copy_print_en.jpg", id: 4, fr_name: "Centre technique", fr_img_name: "copy_print_fr.png"},
{name: "Rate your experience", img_name: "copy_print_en.jpg", id: 5, fr_name: "Évaluez votre expérience", fr_img_name: "copy_print_fr.png"}
]
Manuf = [
{name: "Brother", logo: "brother.png", systemId: "BR"},
{name: "Canon", logo: "canon.png", systemId: "CA"},
{name: "Dell", logo: "dell.png", systemId: "DE"},
{name: "Epson", logo: "epson.png", systemId: "EP"},
{name: "HP", logo: "hp.png", systemId: "HP"},
{name: "Lexmark", logo: "lexmark.png", systemId: "LE"},
{name: "Panasonic", logo: "panasonic.png", systemId: "PA"},
{name: "Samsung", logo: "samsung.png", systemId: "SA"},
{name: "Staples", logo: "staples.png", systemId: "OB"},
{name: "Sharp", logo: "sharp.png", systemId: "SH"},
{name: "Xerox", logo: "xerox.png", systemId: "XE"}
]
Model = [
{name: "Artisan", manufacturer: "EP", systemId: "ART"},
{name: "Expression", manufacturer: "EP", systemId: "EXP"},
{name: "Stylus", manufacturer: "EP", systemId: "STY"},
{name: "WorkForce", manufacturer: "EP", systemId: "WKF"},
{name: "Deskjet", manufacturer: "HP", systemId: "DSK"},
{name: "ENVY", manufacturer: "HP", systemId: "ENV"},
{name: "OfficeJet", manufacturer: "HP", systemId: "OFJ"},
{name: "PageWide", manufacturer: "HP", systemId: "PAW"},
{name: "Photosmart", manufacturer: "HP", systemId: "PHS"},
{name: "LaserJet", manufacturer: "HP", systemId: "LAJ"},
{name: "Color LaserJet", manufacturer: "HP", systemId: "CLJ"},
{name: "TopShot LaserJet", manufacturer: "HP", systemId: "TOS"},
{name: "DCP", manufacturer: "BR", systemId: "DCP"},
{name: "HL", manufacturer: "BR", systemId: "BHL"},
{name: "MFC", manufacturer: "BR", systemId: "MFC"}
]
products_cat = [
{name: "Laptops", systemId: "LAP", fr_name: "Ordinateurs portatifs"},
{name: "Desktops", systemId: "DSK", fr_name: "Ordinateurs de bureau"},
{name: "Tablets", systemId: "TAB", fr_name: "Tablettes \u00E9lectroniques"},
{name: "Printers", systemId: "PRI", fr_name: "Imprimantes"},
{name: "Cell phones", systemId: "CEL", fr_name: "T\u00E9l\u00E9phones cellulaires"},
{name: "Chairs", systemId: "CHA", fr_name: "Chaises"},
{name: "Furnitures", systemId: "FUR", fr_name: "Meubles"},
{name: "eReaders", systemId: "REA", fr_name: "Lecteurs \u00E9lectroniques"},
{name: "Shredders", systemId: "SHR", fr_name: "D\u00E9chiqueteurs"},
{name: "Cameras", systemId: "CAM", fr_name: "Appareils photo"}
]
if Manufacturers.find().count() is 0
Manuf.forEach (manuf) ->
Manufacturers.insert {name: manuf.name, logo: manuf.logo, systemId: manuf.systemId}
if PrinterModels.find().count() is 0
Model.forEach (model) ->
PrinterModels.insert {name: model.name, manufacturer: model.manufacturer, systemId: model.systemId}
if HomeCarousel.find().count() is 0
Carousel.forEach (carousel) ->
HomeCarousel.insertTranslations {name: carousel.name, img_name: carousel.img_name, id: carousel.id}, fr: {name: carousel.fr_name, img_name: carousel.fr_img_name}
if Logos.find().count() is 0
Logo.forEach (logo) ->
Logos.insertTranslations {name: logo.name, img_name: logo.img_name}, fr: {img_name: logo.fr_img_name}
if ServicesLogo.find().count() is 0
Serv_logo.forEach (logo) ->
ServicesLogo.insertTranslations {name: logo.name, img: logo.img}, fr: {img: logo.fr_img}
if ServicesBanner.find().count() is 0
Serv_banners.forEach (banner) ->
ServicesBanner.insertTranslations {name: banner.name, description: banner.desc, img: banner.img}, fr: {name: banner.fr_name, description: banner.fr_desc, img: banner.fr_img}
if ProductCategories.find().count() is 0
products_cat.forEach (cat) ->
ProductCategories.insertTranslations {name: cat.name, systemId: cat.systemId}, fr: {name: cat.fr_name}
| 131244 | Meteor.startup ->
Logo = [
{name: "<NAME>", img_name: "logo_en.png", fr_img_name: "logo_fr.png"}
]
Serv_logo = [
{name: "Make More Happen", img: "make_more_happen.png", fr_img: "make_more_happen_fr.png"}
]
Serv_banners = [
{name: "Easycare service plan", img: "esp.png", desc: "Plan cost calculator and service details", fr_name: "Plan de service fiable", fr_img: "esp_fr.png", fr_desc: "Calculateur et d\u00E9tails du plan"},
{name: "Easytech", img: "easytech.png", desc: "Tech services made simple", fr_name: "\u00C9quipe technique fiable", fr_img: "easytech_fr.png", fr_desc: "Services techniques, la fa\u00E7on simple"},
{name: "Copy&Print center", img: "copycenter.png", desc: "...", fr_name: "Centre de copies et impressions", fr_img: "copycenter_fr.png", fr_desc: "..."}
]
Carousel = [
{name: "Ink-helper", img_name: "copy_print_en.jpg", id: 1, fr_name: "Chercheur d'encre", fr_img_name: "copy_print_fr.png"},
{name: "Virtual Store", img_name: "copy_print_en.jpg", id: 2, fr_name: "Magasin Virtuel", fr_img_name: "copy_print_fr.png"},
{name: "Copy Center", img_name: "copy_print_en.jpg", id: 3, fr_name: "Centre de Copies", fr_img_name: "copy_print_fr.png"},
{name: "Tech Center", img_name: "copy_print_en.jpg", id: 4, fr_name: "Centre technique", fr_img_name: "copy_print_fr.png"},
{name: "Rate your experience", img_name: "copy_print_en.jpg", id: 5, fr_name: "Évaluez votre expérience", fr_img_name: "copy_print_fr.png"}
]
Manuf = [
{name: "<NAME>", logo: "brother.png", systemId: "BR"},
{name: "<NAME>", logo: "canon.png", systemId: "CA"},
{name: "<NAME>", logo: "dell.png", systemId: "DE"},
{name: "<NAME>", logo: "epson.png", systemId: "EP"},
{name: "HP", logo: "hp.png", systemId: "HP"},
{name: "<NAME>", logo: "lexmark.png", systemId: "LE"},
{name: "Panasonic", logo: "panasonic.png", systemId: "PA"},
{name: "Samsung", logo: "samsung.png", systemId: "SA"},
{name: "Staples", logo: "staples.png", systemId: "OB"},
{name: "Sharp", logo: "sharp.png", systemId: "SH"},
{name: "Xerox", logo: "xerox.png", systemId: "XE"}
]
Model = [
{name: "Artisan", manufacturer: "EP", systemId: "ART"},
{name: "Expression", manufacturer: "EP", systemId: "EXP"},
{name: "Stylus", manufacturer: "EP", systemId: "STY"},
{name: "WorkForce", manufacturer: "EP", systemId: "WKF"},
{name: "Deskjet", manufacturer: "HP", systemId: "DSK"},
{name: "ENVY", manufacturer: "HP", systemId: "ENV"},
{name: "OfficeJet", manufacturer: "HP", systemId: "OFJ"},
{name: "PageWide", manufacturer: "HP", systemId: "PAW"},
{name: "Photosmart", manufacturer: "HP", systemId: "PHS"},
{name: "LaserJet", manufacturer: "HP", systemId: "LAJ"},
{name: "Color LaserJet", manufacturer: "HP", systemId: "CLJ"},
{name: "TopShot LaserJet", manufacturer: "HP", systemId: "TOS"},
{name: "DCP", manufacturer: "BR", systemId: "DCP"},
{name: "HL", manufacturer: "BR", systemId: "BHL"},
{name: "MFC", manufacturer: "BR", systemId: "MFC"}
]
products_cat = [
{name: "Laptops", systemId: "LAP", fr_name: "Ordinateurs portatifs"},
{name: "Desktops", systemId: "DSK", fr_name: "Ordinateurs de bureau"},
{name: "Tablets", systemId: "TAB", fr_name: "Tablettes \u00E9lectroniques"},
{name: "Printers", systemId: "PRI", fr_name: "Imprimantes"},
{name: "Cell phones", systemId: "CEL", fr_name: "T\u00E9l\u00E9phones cellulaires"},
{name: "Chairs", systemId: "CHA", fr_name: "Chaises"},
{name: "Furnitures", systemId: "FUR", fr_name: "Meubles"},
{name: "eReaders", systemId: "REA", fr_name: "Lecteurs \u00E9lectroniques"},
{name: "Shredders", systemId: "SHR", fr_name: "D\u00E9chiqueteurs"},
{name: "Cameras", systemId: "CAM", fr_name: "Appareils photo"}
]
if Manufacturers.find().count() is 0
Manuf.forEach (manuf) ->
Manufacturers.insert {name: manuf.name, logo: manuf.logo, systemId: manuf.systemId}
if PrinterModels.find().count() is 0
Model.forEach (model) ->
PrinterModels.insert {name: model.name, manufacturer: model.manufacturer, systemId: model.systemId}
if HomeCarousel.find().count() is 0
Carousel.forEach (carousel) ->
HomeCarousel.insertTranslations {name: carousel.name, img_name: carousel.img_name, id: carousel.id}, fr: {name: carousel.fr_name, img_name: carousel.fr_img_name}
if Logos.find().count() is 0
Logo.forEach (logo) ->
Logos.insertTranslations {name: logo.name, img_name: logo.img_name}, fr: {img_name: logo.fr_img_name}
if ServicesLogo.find().count() is 0
Serv_logo.forEach (logo) ->
ServicesLogo.insertTranslations {name: logo.name, img: logo.img}, fr: {img: logo.fr_img}
if ServicesBanner.find().count() is 0
Serv_banners.forEach (banner) ->
ServicesBanner.insertTranslations {name: banner.name, description: banner.desc, img: banner.img}, fr: {name: banner.fr_name, description: banner.fr_desc, img: banner.fr_img}
if ProductCategories.find().count() is 0
products_cat.forEach (cat) ->
ProductCategories.insertTranslations {name: cat.name, systemId: cat.systemId}, fr: {name: cat.fr_name}
| true | Meteor.startup ->
Logo = [
{name: "PI:NAME:<NAME>END_PI", img_name: "logo_en.png", fr_img_name: "logo_fr.png"}
]
Serv_logo = [
{name: "Make More Happen", img: "make_more_happen.png", fr_img: "make_more_happen_fr.png"}
]
Serv_banners = [
{name: "Easycare service plan", img: "esp.png", desc: "Plan cost calculator and service details", fr_name: "Plan de service fiable", fr_img: "esp_fr.png", fr_desc: "Calculateur et d\u00E9tails du plan"},
{name: "Easytech", img: "easytech.png", desc: "Tech services made simple", fr_name: "\u00C9quipe technique fiable", fr_img: "easytech_fr.png", fr_desc: "Services techniques, la fa\u00E7on simple"},
{name: "Copy&Print center", img: "copycenter.png", desc: "...", fr_name: "Centre de copies et impressions", fr_img: "copycenter_fr.png", fr_desc: "..."}
]
Carousel = [
{name: "Ink-helper", img_name: "copy_print_en.jpg", id: 1, fr_name: "Chercheur d'encre", fr_img_name: "copy_print_fr.png"},
{name: "Virtual Store", img_name: "copy_print_en.jpg", id: 2, fr_name: "Magasin Virtuel", fr_img_name: "copy_print_fr.png"},
{name: "Copy Center", img_name: "copy_print_en.jpg", id: 3, fr_name: "Centre de Copies", fr_img_name: "copy_print_fr.png"},
{name: "Tech Center", img_name: "copy_print_en.jpg", id: 4, fr_name: "Centre technique", fr_img_name: "copy_print_fr.png"},
{name: "Rate your experience", img_name: "copy_print_en.jpg", id: 5, fr_name: "Évaluez votre expérience", fr_img_name: "copy_print_fr.png"}
]
Manuf = [
{name: "PI:NAME:<NAME>END_PI", logo: "brother.png", systemId: "BR"},
{name: "PI:NAME:<NAME>END_PI", logo: "canon.png", systemId: "CA"},
{name: "PI:NAME:<NAME>END_PI", logo: "dell.png", systemId: "DE"},
{name: "PI:NAME:<NAME>END_PI", logo: "epson.png", systemId: "EP"},
{name: "HP", logo: "hp.png", systemId: "HP"},
{name: "PI:NAME:<NAME>END_PI", logo: "lexmark.png", systemId: "LE"},
{name: "Panasonic", logo: "panasonic.png", systemId: "PA"},
{name: "Samsung", logo: "samsung.png", systemId: "SA"},
{name: "Staples", logo: "staples.png", systemId: "OB"},
{name: "Sharp", logo: "sharp.png", systemId: "SH"},
{name: "Xerox", logo: "xerox.png", systemId: "XE"}
]
Model = [
{name: "Artisan", manufacturer: "EP", systemId: "ART"},
{name: "Expression", manufacturer: "EP", systemId: "EXP"},
{name: "Stylus", manufacturer: "EP", systemId: "STY"},
{name: "WorkForce", manufacturer: "EP", systemId: "WKF"},
{name: "Deskjet", manufacturer: "HP", systemId: "DSK"},
{name: "ENVY", manufacturer: "HP", systemId: "ENV"},
{name: "OfficeJet", manufacturer: "HP", systemId: "OFJ"},
{name: "PageWide", manufacturer: "HP", systemId: "PAW"},
{name: "Photosmart", manufacturer: "HP", systemId: "PHS"},
{name: "LaserJet", manufacturer: "HP", systemId: "LAJ"},
{name: "Color LaserJet", manufacturer: "HP", systemId: "CLJ"},
{name: "TopShot LaserJet", manufacturer: "HP", systemId: "TOS"},
{name: "DCP", manufacturer: "BR", systemId: "DCP"},
{name: "HL", manufacturer: "BR", systemId: "BHL"},
{name: "MFC", manufacturer: "BR", systemId: "MFC"}
]
products_cat = [
{name: "Laptops", systemId: "LAP", fr_name: "Ordinateurs portatifs"},
{name: "Desktops", systemId: "DSK", fr_name: "Ordinateurs de bureau"},
{name: "Tablets", systemId: "TAB", fr_name: "Tablettes \u00E9lectroniques"},
{name: "Printers", systemId: "PRI", fr_name: "Imprimantes"},
{name: "Cell phones", systemId: "CEL", fr_name: "T\u00E9l\u00E9phones cellulaires"},
{name: "Chairs", systemId: "CHA", fr_name: "Chaises"},
{name: "Furnitures", systemId: "FUR", fr_name: "Meubles"},
{name: "eReaders", systemId: "REA", fr_name: "Lecteurs \u00E9lectroniques"},
{name: "Shredders", systemId: "SHR", fr_name: "D\u00E9chiqueteurs"},
{name: "Cameras", systemId: "CAM", fr_name: "Appareils photo"}
]
if Manufacturers.find().count() is 0
Manuf.forEach (manuf) ->
Manufacturers.insert {name: manuf.name, logo: manuf.logo, systemId: manuf.systemId}
if PrinterModels.find().count() is 0
Model.forEach (model) ->
PrinterModels.insert {name: model.name, manufacturer: model.manufacturer, systemId: model.systemId}
if HomeCarousel.find().count() is 0
Carousel.forEach (carousel) ->
HomeCarousel.insertTranslations {name: carousel.name, img_name: carousel.img_name, id: carousel.id}, fr: {name: carousel.fr_name, img_name: carousel.fr_img_name}
if Logos.find().count() is 0
Logo.forEach (logo) ->
Logos.insertTranslations {name: logo.name, img_name: logo.img_name}, fr: {img_name: logo.fr_img_name}
if ServicesLogo.find().count() is 0
Serv_logo.forEach (logo) ->
ServicesLogo.insertTranslations {name: logo.name, img: logo.img}, fr: {img: logo.fr_img}
if ServicesBanner.find().count() is 0
Serv_banners.forEach (banner) ->
ServicesBanner.insertTranslations {name: banner.name, description: banner.desc, img: banner.img}, fr: {name: banner.fr_name, description: banner.fr_desc, img: banner.fr_img}
if ProductCategories.find().count() is 0
products_cat.forEach (cat) ->
ProductCategories.insertTranslations {name: cat.name, systemId: cat.systemId}, fr: {name: cat.fr_name}
|
[
{
"context": "ponents to be written as a pure function\n# @author Yannick Croissant\n# @author Alberto Rodríguez\n# @copyright 2015 Alb",
"end": 110,
"score": 0.9998692274093628,
"start": 93,
"tag": "NAME",
"value": "Yannick Croissant"
},
{
"context": "ure function\n# @author Yannick... | src/rules/prefer-stateless-function.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Enforce stateless components to be written as a pure function
# @author Yannick Croissant
# @author Alberto Rodríguez
# @copyright 2015 Alberto Rodríguez. All rights reserved.
###
'use strict'
Components = require '../util/react/Components'
versionUtil = require 'eslint-plugin-react/lib/util/version'
astUtil = require '../util/react/ast'
docsUrl = require 'eslint-plugin-react/lib/util/docsUrl'
{isDeclarationAssignment} = require '../util/ast-utils'
# ------------------------------------------------------------------------------
# Rule Definition
# ------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'Enforce stateless components to be written as a pure function'
category: 'Stylistic Issues'
recommended: no
url: docsUrl 'prefer-stateless-function'
schema: [
type: 'object'
properties:
ignorePureComponents:
default: no
type: 'boolean'
additionalProperties: no
]
create: Components.detect (context, components, utils) ->
configuration = context.options[0] or {}
ignorePureComponents = configuration.ignorePureComponents or no
sourceCode = context.getSourceCode()
# --------------------------------------------------------------------------
# Public
# --------------------------------------------------------------------------
###*
# Checks whether a given array of statements is a single call of `super`.
# @see ESLint no-useless-constructor rule
# @param {ASTNode[]} body - An array of statements to check.
# @returns {boolean} `true` if the body is a single call of `super`.
###
isSingleSuperCall = (body) ->
body.length is 1 and
body[0].type is 'ExpressionStatement' and
body[0].expression.type is 'CallExpression' and
body[0].expression.callee.type is 'Super'
###*
# Checks whether a given node is a pattern which doesn't have any side effects.
# Default parameters and Destructuring parameters can have side effects.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} node - A pattern node.
# @returns {boolean} `true` if the node doesn't have any side effects.
###
isSimple = (node) -> node.type in ['Identifier', 'RestElement']
###*
# Checks whether a given array of expressions is `...arguments` or not.
# `super(...arguments)` passes all arguments through.
# @see ESLint no-useless-constructor rule
# @param {ASTNode[]} superArgs - An array of expressions to check.
# @returns {boolean} `true` if the superArgs is `...arguments`.
###
isSpreadArguments = (superArgs) ->
superArgs.length is 1 and
superArgs[0].type is 'SpreadElement' and
superArgs[0].argument.type is 'Identifier' and
superArgs[0].argument.name is 'arguments'
###*
# Checks whether given 2 nodes are identifiers which have the same name or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes are identifiers which have the same
# name.
###
isValidIdentifierPair = (ctorParam, superArg) ->
ctorParam.type is 'Identifier' and
superArg.type is 'Identifier' and
ctorParam.name is superArg.name
###*
# Checks whether given 2 nodes are a rest/spread pair which has the same values.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes are a rest/spread pair which has the
# same values.
###
isValidRestSpreadPair = (ctorParam, superArg) ->
ctorParam.type is 'RestElement' and
superArg.type is 'SpreadElement' and
isValidIdentifierPair ctorParam.argument, superArg.argument
###*
# Checks whether given 2 nodes have the same value or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes have the same value or not.
###
isValidPair = (ctorParam, superArg) ->
isValidIdentifierPair(ctorParam, superArg) or
isValidRestSpreadPair ctorParam, superArg
###*
# Checks whether the parameters of a constructor and the arguments of `super()`
# have the same values or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParams - The parameters of a constructor to check.
# @param {ASTNode} superArgs - The arguments of `super()` to check.
# @returns {boolean} `true` if those have the same values.
###
isPassingThrough = (ctorParams, superArgs) ->
return no unless ctorParams.length is superArgs.length
i = 0
while i < ctorParams.length
return no unless isValidPair ctorParams[i], superArgs[i]
++i
yes
###*
# Checks whether the constructor body is a redundant super call.
# @see ESLint no-useless-constructor rule
# @param {Array} body - constructor body content.
# @param {Array} ctorParams - The params to check against super call.
# @returns {boolean} true if the construtor body is redundant
###
isRedundantSuperCall = (body, ctorParams) ->
isSingleSuperCall(body) and
ctorParams.every(isSimple) and
(isSpreadArguments(body[0].expression.arguments) or
isPassingThrough ctorParams, body[0].expression.arguments)
###*
# Check if a given AST node have any other properties the ones available in stateless components
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node has at least one other property, false if not.
###
hasOtherProperties = (node) ->
properties = astUtil.getComponentProperties node
properties.some (property) ->
name = astUtil.getPropertyName property
isDisplayName = name is 'displayName'
isPropTypes =
name is 'propTypes' or (name is 'props' and property.typeAnnotation)
contextTypes = name is 'contextTypes'
defaultProps = name is 'defaultProps'
isUselessConstructor =
property.kind is 'constructor' and
isRedundantSuperCall property.value.body.body, property.value.params
isRender = name is 'render'
not isDisplayName and
not isPropTypes and
not contextTypes and
not defaultProps and
not isUselessConstructor and
not isRender
###*
# Mark component as pure as declared
# @param {ASTNode} node The AST node being checked.
###
markSCUAsDeclared = (node) ->
components.set node, hasSCU: yes
###*
# Mark childContextTypes as declared
# @param {ASTNode} node The AST node being checked.
###
markChildContextTypesAsDeclared = (node) ->
components.set node, hasChildContextTypes: yes
###*
# Mark a setState as used
# @param {ASTNode} node The AST node being checked.
###
markThisAsUsed = (node) ->
components.set node, useThis: yes
###*
# Mark a props or context as used
# @param {ASTNode} node The AST node being checked.
###
markPropsOrContextAsUsed = (node) ->
components.set node, usePropsOrContext: yes
###*
# Mark a ref as used
# @param {ASTNode} node The AST node being checked.
###
markRefAsUsed = (node) ->
components.set node, useRef: yes
###*
# Mark return as invalid
# @param {ASTNode} node The AST node being checked.
###
markReturnAsInvalid = (node) ->
components.set node, invalidReturn: yes
###*
# Mark a ClassDeclaration as having used decorators
# @param {ASTNode} node The AST node being checked.
###
markDecoratorsAsUsed = (node) ->
components.set node, useDecorators: yes
visitClass = (node) ->
if ignorePureComponents and utils.isPureComponent node
markSCUAsDeclared node
if node.decorators?.length then markDecoratorsAsUsed node
ClassDeclaration: visitClass
ClassExpression: visitClass
# Mark `this` destructuring as a usage of `this`
VariableDeclarator: (node) ->
# Ignore destructuring on other than `this`
return unless (
node.id?.type is 'ObjectPattern' and node.init?.type is 'ThisExpression'
)
# Ignore `props` and `context`
useThis = node.id.properties.some (property) ->
name = astUtil.getPropertyName property
name isnt 'props' and name isnt 'context'
unless useThis
markPropsOrContextAsUsed node
return
markThisAsUsed node
AssignmentExpression: (node) ->
return unless isDeclarationAssignment node
{left, right} = node
# Ignore destructuring on other than `this`
return unless (
left.type is 'ObjectPattern' and right.type is 'ThisExpression'
)
# Ignore `props` and `context`
useThis = left.properties.some (property) ->
name = astUtil.getPropertyName property
name isnt 'props' and name isnt 'context'
unless useThis
markPropsOrContextAsUsed node
return
markThisAsUsed node
# Mark `this` usage
MemberExpression: (node) ->
unless node.object.type is 'ThisExpression'
if node.property and node.property.name is 'childContextTypes'
component = utils.getRelatedComponent node
return unless component
markChildContextTypesAsDeclared component.node
return
return
# Ignore calls to `this.props` and `this.context`
else if (
(node.property.name or node.property.value) is 'props' or
(node.property.name or node.property.value) is 'context'
)
markPropsOrContextAsUsed node
return
markThisAsUsed node
# Mark `ref` usage
JSXAttribute: (node) ->
name = sourceCode.getText node.name
return unless name is 'ref'
markRefAsUsed node
# Mark `render` that do not return some JSX
ReturnStatement: (node) ->
scope = context.getScope()
while scope
blockNode = scope.block?.parent
if blockNode and blockNode.type in ['MethodDefinition', 'Property']
break
scope = scope.upper
isRender = blockNode?.key and blockNode.key.name is 'render'
allowNull = versionUtil.testReactVersion context, '15.0.0' # Stateless components can return null since React 15
isReturningJSX = utils.isReturningJSX node, not allowNull
isReturningNull = node.argument and node.argument.value in [null, no]
return if (
not isRender or
(allowNull and (isReturningJSX or isReturningNull)) or
(not allowNull and isReturningJSX)
)
markReturnAsInvalid node
'Program:exit': ->
list = components.list()
for own _, component of list
continue if (
hasOtherProperties(component.node) or
component.useThis or
component.useRef or
component.invalidReturn or
component.hasChildContextTypes or
component.useDecorators or
(not utils.isES5Component(component.node) and
not utils.isES6Component component.node)
)
continue if component.hasSCU and component.usePropsOrContext
context.report
node: component.node
message: 'Component should be written as a pure function'
| 68684 | ###*
# @fileoverview Enforce stateless components to be written as a pure function
# @author <NAME>
# @author <NAME>
# @copyright 2015 <NAME>. All rights reserved.
###
'use strict'
Components = require '../util/react/Components'
versionUtil = require 'eslint-plugin-react/lib/util/version'
astUtil = require '../util/react/ast'
docsUrl = require 'eslint-plugin-react/lib/util/docsUrl'
{isDeclarationAssignment} = require '../util/ast-utils'
# ------------------------------------------------------------------------------
# Rule Definition
# ------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'Enforce stateless components to be written as a pure function'
category: 'Stylistic Issues'
recommended: no
url: docsUrl 'prefer-stateless-function'
schema: [
type: 'object'
properties:
ignorePureComponents:
default: no
type: 'boolean'
additionalProperties: no
]
create: Components.detect (context, components, utils) ->
configuration = context.options[0] or {}
ignorePureComponents = configuration.ignorePureComponents or no
sourceCode = context.getSourceCode()
# --------------------------------------------------------------------------
# Public
# --------------------------------------------------------------------------
###*
# Checks whether a given array of statements is a single call of `super`.
# @see ESLint no-useless-constructor rule
# @param {ASTNode[]} body - An array of statements to check.
# @returns {boolean} `true` if the body is a single call of `super`.
###
isSingleSuperCall = (body) ->
body.length is 1 and
body[0].type is 'ExpressionStatement' and
body[0].expression.type is 'CallExpression' and
body[0].expression.callee.type is 'Super'
###*
# Checks whether a given node is a pattern which doesn't have any side effects.
# Default parameters and Destructuring parameters can have side effects.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} node - A pattern node.
# @returns {boolean} `true` if the node doesn't have any side effects.
###
isSimple = (node) -> node.type in ['Identifier', 'RestElement']
###*
# Checks whether a given array of expressions is `...arguments` or not.
# `super(...arguments)` passes all arguments through.
# @see ESLint no-useless-constructor rule
# @param {ASTNode[]} superArgs - An array of expressions to check.
# @returns {boolean} `true` if the superArgs is `...arguments`.
###
isSpreadArguments = (superArgs) ->
superArgs.length is 1 and
superArgs[0].type is 'SpreadElement' and
superArgs[0].argument.type is 'Identifier' and
superArgs[0].argument.name is 'arguments'
###*
# Checks whether given 2 nodes are identifiers which have the same name or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes are identifiers which have the same
# name.
###
isValidIdentifierPair = (ctorParam, superArg) ->
ctorParam.type is 'Identifier' and
superArg.type is 'Identifier' and
ctorParam.name is superArg.name
###*
# Checks whether given 2 nodes are a rest/spread pair which has the same values.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes are a rest/spread pair which has the
# same values.
###
isValidRestSpreadPair = (ctorParam, superArg) ->
ctorParam.type is 'RestElement' and
superArg.type is 'SpreadElement' and
isValidIdentifierPair ctorParam.argument, superArg.argument
###*
# Checks whether given 2 nodes have the same value or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes have the same value or not.
###
isValidPair = (ctorParam, superArg) ->
isValidIdentifierPair(ctorParam, superArg) or
isValidRestSpreadPair ctorParam, superArg
###*
# Checks whether the parameters of a constructor and the arguments of `super()`
# have the same values or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParams - The parameters of a constructor to check.
# @param {ASTNode} superArgs - The arguments of `super()` to check.
# @returns {boolean} `true` if those have the same values.
###
isPassingThrough = (ctorParams, superArgs) ->
return no unless ctorParams.length is superArgs.length
i = 0
while i < ctorParams.length
return no unless isValidPair ctorParams[i], superArgs[i]
++i
yes
###*
# Checks whether the constructor body is a redundant super call.
# @see ESLint no-useless-constructor rule
# @param {Array} body - constructor body content.
# @param {Array} ctorParams - The params to check against super call.
# @returns {boolean} true if the construtor body is redundant
###
isRedundantSuperCall = (body, ctorParams) ->
isSingleSuperCall(body) and
ctorParams.every(isSimple) and
(isSpreadArguments(body[0].expression.arguments) or
isPassingThrough ctorParams, body[0].expression.arguments)
###*
# Check if a given AST node have any other properties the ones available in stateless components
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node has at least one other property, false if not.
###
hasOtherProperties = (node) ->
properties = astUtil.getComponentProperties node
properties.some (property) ->
name = astUtil.getPropertyName property
isDisplayName = name is 'displayName'
isPropTypes =
name is 'propTypes' or (name is 'props' and property.typeAnnotation)
contextTypes = name is 'contextTypes'
defaultProps = name is 'defaultProps'
isUselessConstructor =
property.kind is 'constructor' and
isRedundantSuperCall property.value.body.body, property.value.params
isRender = name is 'render'
not isDisplayName and
not isPropTypes and
not contextTypes and
not defaultProps and
not isUselessConstructor and
not isRender
###*
# Mark component as pure as declared
# @param {ASTNode} node The AST node being checked.
###
markSCUAsDeclared = (node) ->
components.set node, hasSCU: yes
###*
# Mark childContextTypes as declared
# @param {ASTNode} node The AST node being checked.
###
markChildContextTypesAsDeclared = (node) ->
components.set node, hasChildContextTypes: yes
###*
# Mark a setState as used
# @param {ASTNode} node The AST node being checked.
###
markThisAsUsed = (node) ->
components.set node, useThis: yes
###*
# Mark a props or context as used
# @param {ASTNode} node The AST node being checked.
###
markPropsOrContextAsUsed = (node) ->
components.set node, usePropsOrContext: yes
###*
# Mark a ref as used
# @param {ASTNode} node The AST node being checked.
###
markRefAsUsed = (node) ->
components.set node, useRef: yes
###*
# Mark return as invalid
# @param {ASTNode} node The AST node being checked.
###
markReturnAsInvalid = (node) ->
components.set node, invalidReturn: yes
###*
# Mark a ClassDeclaration as having used decorators
# @param {ASTNode} node The AST node being checked.
###
markDecoratorsAsUsed = (node) ->
components.set node, useDecorators: yes
visitClass = (node) ->
if ignorePureComponents and utils.isPureComponent node
markSCUAsDeclared node
if node.decorators?.length then markDecoratorsAsUsed node
ClassDeclaration: visitClass
ClassExpression: visitClass
# Mark `this` destructuring as a usage of `this`
VariableDeclarator: (node) ->
# Ignore destructuring on other than `this`
return unless (
node.id?.type is 'ObjectPattern' and node.init?.type is 'ThisExpression'
)
# Ignore `props` and `context`
useThis = node.id.properties.some (property) ->
name = astUtil.getPropertyName property
name isnt 'props' and name isnt 'context'
unless useThis
markPropsOrContextAsUsed node
return
markThisAsUsed node
AssignmentExpression: (node) ->
return unless isDeclarationAssignment node
{left, right} = node
# Ignore destructuring on other than `this`
return unless (
left.type is 'ObjectPattern' and right.type is 'ThisExpression'
)
# Ignore `props` and `context`
useThis = left.properties.some (property) ->
name = astUtil.getPropertyName property
name isnt 'props' and name isnt 'context'
unless useThis
markPropsOrContextAsUsed node
return
markThisAsUsed node
# Mark `this` usage
MemberExpression: (node) ->
unless node.object.type is 'ThisExpression'
if node.property and node.property.name is 'childContextTypes'
component = utils.getRelatedComponent node
return unless component
markChildContextTypesAsDeclared component.node
return
return
# Ignore calls to `this.props` and `this.context`
else if (
(node.property.name or node.property.value) is 'props' or
(node.property.name or node.property.value) is 'context'
)
markPropsOrContextAsUsed node
return
markThisAsUsed node
# Mark `ref` usage
JSXAttribute: (node) ->
name = sourceCode.getText node.name
return unless name is 'ref'
markRefAsUsed node
# Mark `render` that do not return some JSX
ReturnStatement: (node) ->
scope = context.getScope()
while scope
blockNode = scope.block?.parent
if blockNode and blockNode.type in ['MethodDefinition', 'Property']
break
scope = scope.upper
isRender = blockNode?.key and blockNode.key.name is 'render'
allowNull = versionUtil.testReactVersion context, '15.0.0' # Stateless components can return null since React 15
isReturningJSX = utils.isReturningJSX node, not allowNull
isReturningNull = node.argument and node.argument.value in [null, no]
return if (
not isRender or
(allowNull and (isReturningJSX or isReturningNull)) or
(not allowNull and isReturningJSX)
)
markReturnAsInvalid node
'Program:exit': ->
list = components.list()
for own _, component of list
continue if (
hasOtherProperties(component.node) or
component.useThis or
component.useRef or
component.invalidReturn or
component.hasChildContextTypes or
component.useDecorators or
(not utils.isES5Component(component.node) and
not utils.isES6Component component.node)
)
continue if component.hasSCU and component.usePropsOrContext
context.report
node: component.node
message: 'Component should be written as a pure function'
| true | ###*
# @fileoverview Enforce stateless components to be written as a pure function
# @author PI:NAME:<NAME>END_PI
# @author PI:NAME:<NAME>END_PI
# @copyright 2015 PI:NAME:<NAME>END_PI. All rights reserved.
###
'use strict'
Components = require '../util/react/Components'
versionUtil = require 'eslint-plugin-react/lib/util/version'
astUtil = require '../util/react/ast'
docsUrl = require 'eslint-plugin-react/lib/util/docsUrl'
{isDeclarationAssignment} = require '../util/ast-utils'
# ------------------------------------------------------------------------------
# Rule Definition
# ------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'Enforce stateless components to be written as a pure function'
category: 'Stylistic Issues'
recommended: no
url: docsUrl 'prefer-stateless-function'
schema: [
type: 'object'
properties:
ignorePureComponents:
default: no
type: 'boolean'
additionalProperties: no
]
create: Components.detect (context, components, utils) ->
configuration = context.options[0] or {}
ignorePureComponents = configuration.ignorePureComponents or no
sourceCode = context.getSourceCode()
# --------------------------------------------------------------------------
# Public
# --------------------------------------------------------------------------
###*
# Checks whether a given array of statements is a single call of `super`.
# @see ESLint no-useless-constructor rule
# @param {ASTNode[]} body - An array of statements to check.
# @returns {boolean} `true` if the body is a single call of `super`.
###
isSingleSuperCall = (body) ->
body.length is 1 and
body[0].type is 'ExpressionStatement' and
body[0].expression.type is 'CallExpression' and
body[0].expression.callee.type is 'Super'
###*
# Checks whether a given node is a pattern which doesn't have any side effects.
# Default parameters and Destructuring parameters can have side effects.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} node - A pattern node.
# @returns {boolean} `true` if the node doesn't have any side effects.
###
isSimple = (node) -> node.type in ['Identifier', 'RestElement']
###*
# Checks whether a given array of expressions is `...arguments` or not.
# `super(...arguments)` passes all arguments through.
# @see ESLint no-useless-constructor rule
# @param {ASTNode[]} superArgs - An array of expressions to check.
# @returns {boolean} `true` if the superArgs is `...arguments`.
###
isSpreadArguments = (superArgs) ->
superArgs.length is 1 and
superArgs[0].type is 'SpreadElement' and
superArgs[0].argument.type is 'Identifier' and
superArgs[0].argument.name is 'arguments'
###*
# Checks whether given 2 nodes are identifiers which have the same name or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes are identifiers which have the same
# name.
###
isValidIdentifierPair = (ctorParam, superArg) ->
ctorParam.type is 'Identifier' and
superArg.type is 'Identifier' and
ctorParam.name is superArg.name
###*
# Checks whether given 2 nodes are a rest/spread pair which has the same values.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes are a rest/spread pair which has the
# same values.
###
isValidRestSpreadPair = (ctorParam, superArg) ->
ctorParam.type is 'RestElement' and
superArg.type is 'SpreadElement' and
isValidIdentifierPair ctorParam.argument, superArg.argument
###*
# Checks whether given 2 nodes have the same value or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParam - A node to check.
# @param {ASTNode} superArg - A node to check.
# @returns {boolean} `true` if the nodes have the same value or not.
###
isValidPair = (ctorParam, superArg) ->
isValidIdentifierPair(ctorParam, superArg) or
isValidRestSpreadPair ctorParam, superArg
###*
# Checks whether the parameters of a constructor and the arguments of `super()`
# have the same values or not.
# @see ESLint no-useless-constructor rule
# @param {ASTNode} ctorParams - The parameters of a constructor to check.
# @param {ASTNode} superArgs - The arguments of `super()` to check.
# @returns {boolean} `true` if those have the same values.
###
isPassingThrough = (ctorParams, superArgs) ->
return no unless ctorParams.length is superArgs.length
i = 0
while i < ctorParams.length
return no unless isValidPair ctorParams[i], superArgs[i]
++i
yes
###*
# Checks whether the constructor body is a redundant super call.
# @see ESLint no-useless-constructor rule
# @param {Array} body - constructor body content.
# @param {Array} ctorParams - The params to check against super call.
# @returns {boolean} true if the construtor body is redundant
###
isRedundantSuperCall = (body, ctorParams) ->
isSingleSuperCall(body) and
ctorParams.every(isSimple) and
(isSpreadArguments(body[0].expression.arguments) or
isPassingThrough ctorParams, body[0].expression.arguments)
###*
# Check if a given AST node have any other properties the ones available in stateless components
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node has at least one other property, false if not.
###
hasOtherProperties = (node) ->
properties = astUtil.getComponentProperties node
properties.some (property) ->
name = astUtil.getPropertyName property
isDisplayName = name is 'displayName'
isPropTypes =
name is 'propTypes' or (name is 'props' and property.typeAnnotation)
contextTypes = name is 'contextTypes'
defaultProps = name is 'defaultProps'
isUselessConstructor =
property.kind is 'constructor' and
isRedundantSuperCall property.value.body.body, property.value.params
isRender = name is 'render'
not isDisplayName and
not isPropTypes and
not contextTypes and
not defaultProps and
not isUselessConstructor and
not isRender
###*
# Mark component as pure as declared
# @param {ASTNode} node The AST node being checked.
###
markSCUAsDeclared = (node) ->
components.set node, hasSCU: yes
###*
# Mark childContextTypes as declared
# @param {ASTNode} node The AST node being checked.
###
markChildContextTypesAsDeclared = (node) ->
components.set node, hasChildContextTypes: yes
###*
# Mark a setState as used
# @param {ASTNode} node The AST node being checked.
###
markThisAsUsed = (node) ->
components.set node, useThis: yes
###*
# Mark a props or context as used
# @param {ASTNode} node The AST node being checked.
###
markPropsOrContextAsUsed = (node) ->
components.set node, usePropsOrContext: yes
###*
# Mark a ref as used
# @param {ASTNode} node The AST node being checked.
###
markRefAsUsed = (node) ->
components.set node, useRef: yes
###*
# Mark return as invalid
# @param {ASTNode} node The AST node being checked.
###
markReturnAsInvalid = (node) ->
components.set node, invalidReturn: yes
###*
# Mark a ClassDeclaration as having used decorators
# @param {ASTNode} node The AST node being checked.
###
markDecoratorsAsUsed = (node) ->
components.set node, useDecorators: yes
visitClass = (node) ->
if ignorePureComponents and utils.isPureComponent node
markSCUAsDeclared node
if node.decorators?.length then markDecoratorsAsUsed node
ClassDeclaration: visitClass
ClassExpression: visitClass
# Mark `this` destructuring as a usage of `this`
VariableDeclarator: (node) ->
# Ignore destructuring on other than `this`
return unless (
node.id?.type is 'ObjectPattern' and node.init?.type is 'ThisExpression'
)
# Ignore `props` and `context`
useThis = node.id.properties.some (property) ->
name = astUtil.getPropertyName property
name isnt 'props' and name isnt 'context'
unless useThis
markPropsOrContextAsUsed node
return
markThisAsUsed node
AssignmentExpression: (node) ->
return unless isDeclarationAssignment node
{left, right} = node
# Ignore destructuring on other than `this`
return unless (
left.type is 'ObjectPattern' and right.type is 'ThisExpression'
)
# Ignore `props` and `context`
useThis = left.properties.some (property) ->
name = astUtil.getPropertyName property
name isnt 'props' and name isnt 'context'
unless useThis
markPropsOrContextAsUsed node
return
markThisAsUsed node
# Mark `this` usage
MemberExpression: (node) ->
unless node.object.type is 'ThisExpression'
if node.property and node.property.name is 'childContextTypes'
component = utils.getRelatedComponent node
return unless component
markChildContextTypesAsDeclared component.node
return
return
# Ignore calls to `this.props` and `this.context`
else if (
(node.property.name or node.property.value) is 'props' or
(node.property.name or node.property.value) is 'context'
)
markPropsOrContextAsUsed node
return
markThisAsUsed node
# Mark `ref` usage
JSXAttribute: (node) ->
name = sourceCode.getText node.name
return unless name is 'ref'
markRefAsUsed node
# Mark `render` that do not return some JSX
ReturnStatement: (node) ->
scope = context.getScope()
while scope
blockNode = scope.block?.parent
if blockNode and blockNode.type in ['MethodDefinition', 'Property']
break
scope = scope.upper
isRender = blockNode?.key and blockNode.key.name is 'render'
allowNull = versionUtil.testReactVersion context, '15.0.0' # Stateless components can return null since React 15
isReturningJSX = utils.isReturningJSX node, not allowNull
isReturningNull = node.argument and node.argument.value in [null, no]
return if (
not isRender or
(allowNull and (isReturningJSX or isReturningNull)) or
(not allowNull and isReturningJSX)
)
markReturnAsInvalid node
'Program:exit': ->
list = components.list()
for own _, component of list
continue if (
hasOtherProperties(component.node) or
component.useThis or
component.useRef or
component.invalidReturn or
component.hasChildContextTypes or
component.useDecorators or
(not utils.isES5Component(component.node) and
not utils.isES6Component component.node)
)
continue if component.hasSCU and component.usePropsOrContext
context.report
node: component.node
message: 'Component should be written as a pure function'
|
[
{
"context": "calStorage', (localStorage) ->\n TAGS_LIST_KEY = 'hypothesis.user.tags.list'\n TAGS_MAP_KEY = 'hypothesis.user.tags.map'\n\n f",
"end": 96,
"score": 0.9609803557395935,
"start": 71,
"tag": "KEY",
"value": "hypothesis.user.tags.list"
},
{
"context": "Y = 'hypothesis.u... | h/static/scripts/tags.coffee | noscripter/h | 0 | module.exports = ['localStorage', (localStorage) ->
TAGS_LIST_KEY = 'hypothesis.user.tags.list'
TAGS_MAP_KEY = 'hypothesis.user.tags.map'
filter: (query) ->
savedTags = localStorage.getObject TAGS_LIST_KEY
savedTags ?= []
# Only show tags having query as a substring
filterFn = (e) ->
e.toLowerCase().indexOf(query.toLowerCase()) > -1
savedTags.filter(filterFn)
# Add newly added tags from an annotation to the stored ones and refresh
# timestamp for every tags used.
store: (tags) ->
savedTags = localStorage.getObject TAGS_MAP_KEY
savedTags ?= {}
for tag in tags
if savedTags[tag.text]?
# Update counter and timestamp
savedTags[tag.text].count += 1
savedTags[tag.text].updated = Date.now()
else
# Brand new tag, create an entry for it
savedTags[tag.text] = {
text: tag.text
count: 1
updated: Date.now()
}
localStorage.setObject TAGS_MAP_KEY, savedTags
tagsList = []
for tag of savedTags
tagsList[tagsList.length] = tag
# Now produce TAGS_LIST, ordered by (count desc, lexical asc)
compareFn = (t1, t2) ->
if savedTags[t1].count != savedTags[t2].count
return savedTags[t2].count - savedTags[t1].count
else
return -1 if t1 < t2
return 1 if t1 > t2
return 0
tagsList = tagsList.sort(compareFn)
localStorage.setObject TAGS_LIST_KEY, tagsList
]
| 122178 | module.exports = ['localStorage', (localStorage) ->
TAGS_LIST_KEY = '<KEY>'
TAGS_MAP_KEY = '<KEY>'
filter: (query) ->
savedTags = localStorage.getObject TAGS_LIST_KEY
savedTags ?= []
# Only show tags having query as a substring
filterFn = (e) ->
e.toLowerCase().indexOf(query.toLowerCase()) > -1
savedTags.filter(filterFn)
# Add newly added tags from an annotation to the stored ones and refresh
# timestamp for every tags used.
store: (tags) ->
savedTags = localStorage.getObject TAGS_MAP_KEY
savedTags ?= {}
for tag in tags
if savedTags[tag.text]?
# Update counter and timestamp
savedTags[tag.text].count += 1
savedTags[tag.text].updated = Date.now()
else
# Brand new tag, create an entry for it
savedTags[tag.text] = {
text: tag.text
count: 1
updated: Date.now()
}
localStorage.setObject TAGS_MAP_KEY, savedTags
tagsList = []
for tag of savedTags
tagsList[tagsList.length] = tag
# Now produce TAGS_LIST, ordered by (count desc, lexical asc)
compareFn = (t1, t2) ->
if savedTags[t1].count != savedTags[t2].count
return savedTags[t2].count - savedTags[t1].count
else
return -1 if t1 < t2
return 1 if t1 > t2
return 0
tagsList = tagsList.sort(compareFn)
localStorage.setObject TAGS_LIST_KEY, tagsList
]
| true | module.exports = ['localStorage', (localStorage) ->
TAGS_LIST_KEY = 'PI:KEY:<KEY>END_PI'
TAGS_MAP_KEY = 'PI:KEY:<KEY>END_PI'
filter: (query) ->
savedTags = localStorage.getObject TAGS_LIST_KEY
savedTags ?= []
# Only show tags having query as a substring
filterFn = (e) ->
e.toLowerCase().indexOf(query.toLowerCase()) > -1
savedTags.filter(filterFn)
# Add newly added tags from an annotation to the stored ones and refresh
# timestamp for every tags used.
store: (tags) ->
savedTags = localStorage.getObject TAGS_MAP_KEY
savedTags ?= {}
for tag in tags
if savedTags[tag.text]?
# Update counter and timestamp
savedTags[tag.text].count += 1
savedTags[tag.text].updated = Date.now()
else
# Brand new tag, create an entry for it
savedTags[tag.text] = {
text: tag.text
count: 1
updated: Date.now()
}
localStorage.setObject TAGS_MAP_KEY, savedTags
tagsList = []
for tag of savedTags
tagsList[tagsList.length] = tag
# Now produce TAGS_LIST, ordered by (count desc, lexical asc)
compareFn = (t1, t2) ->
if savedTags[t1].count != savedTags[t2].count
return savedTags[t2].count - savedTags[t1].count
else
return -1 if t1 < t2
return 1 if t1 > t2
return 0
tagsList = tagsList.sort(compareFn)
localStorage.setObject TAGS_LIST_KEY, tagsList
]
|
[
{
"context": "90\n 12.45\n ]\n name: 'Vatican City'\n }\n {\n latLng: [\n ",
"end": 4988,
"score": 0.9976409077644348,
"start": 4976,
"tag": "NAME",
"value": "Vatican City"
},
{
"context": ".73\n 7.41\n ]\... | resources/assets/coffee/modules/dashboard.coffee | nilsenj/storecamp | 0 | $ ->
'use strict'
### ChartJS
# -------
# Here we will create a few charts using ChartJS
###
#-----------------------
#- MONTHLY SALES CHART -
#-----------------------
# Get context with jQuery - using jQuery's .get() method.
if($('#salesChart').length > 0)
salesChartCanvas = $('#salesChart').get(0).getContext('2d')
salesChart = new Chart(salesChartCanvas)
# This will get the first returned node in the jQuery collection.
salesChartData =
labels: [
'January'
'February'
'March'
'April'
'May'
'June'
'July'
]
datasets: [
{
label: 'Electronics'
fillColor: 'rgb(210, 214, 222)'
strokeColor: 'rgb(210, 214, 222)'
pointColor: 'rgb(210, 214, 222)'
pointStrokeColor: '#c1c7d1'
pointHighlightFill: '#fff'
pointHighlightStroke: 'rgb(220,220,220)'
data: [
65
59
80
81
56
55
40
]
}
{
label: 'Digital Goods'
fillColor: 'rgba(60,141,188,0.9)'
strokeColor: 'rgba(60,141,188,0.8)'
pointColor: '#3b8bba'
pointStrokeColor: 'rgba(60,141,188,1)'
pointHighlightFill: '#fff'
pointHighlightStroke: 'rgba(60,141,188,1)'
data: [
28
48
40
19
86
27
90
]
}
]
salesChartOptions =
showScale: true
scaleShowGridLines: false
scaleGridLineColor: 'rgba(0,0,0,.05)'
scaleGridLineWidth: 1
scaleShowHorizontalLines: true
scaleShowVerticalLines: true
bezierCurve: true
bezierCurveTension: 0.3
pointDot: false
pointDotRadius: 4
pointDotStrokeWidth: 1
pointHitDetectionRadius: 20
datasetStroke: true
datasetStrokeWidth: 2
datasetFill: true
legendTemplate: '<ul class="<%=name.toLowerCase()%>-legend"><% for (var i=0; i<datasets.length; i++){%><li><span style="background-color:<%=datasets[i].lineColor%>"></span><%=datasets[i].label%></li><%}%></ul>'
maintainAspectRatio: true
responsive: true
#Create the line chart
salesChart.Line salesChartData, salesChartOptions
#---------------------------
#- END MONTHLY SALES CHART -
#---------------------------
#-------------
#- PIE CHART -
#-------------
# Get context with jQuery - using jQuery's .get() method.
pieChartCanvas = $('#pieChart').get(0).getContext('2d')
pieChart = new Chart(pieChartCanvas)
PieData = [
{
value: 700
color: '#f56954'
highlight: '#f56954'
label: 'Chrome'
}
{
value: 500
color: '#00a65a'
highlight: '#00a65a'
label: 'IE'
}
{
value: 400
color: '#f39c12'
highlight: '#f39c12'
label: 'FireFox'
}
{
value: 600
color: '#00c0ef'
highlight: '#00c0ef'
label: 'Safari'
}
{
value: 300
color: '#3c8dbc'
highlight: '#3c8dbc'
label: 'Opera'
}
{
value: 100
color: '#d2d6de'
highlight: '#d2d6de'
label: 'Navigator'
}
]
pieOptions =
segmentShowStroke: true
segmentStrokeColor: '#fff'
segmentStrokeWidth: 1
percentageInnerCutout: 50
animationSteps: 100
animationEasing: 'easeOutBounce'
animateRotate: true
animateScale: false
responsive: true
maintainAspectRatio: false
legendTemplate: '<ul class="<%=name.toLowerCase()%>-legend"><% for (var i=0; i<segments.length; i++){%><li><span style="background-color:<%=segments[i].fillColor%>"></span><%if(segments[i].label){%><%=segments[i].label%><%}%></li><%}%></ul>'
tooltipTemplate: '<%=value %> <%=label%> users'
#Create pie or douhnut chart
# You can switch between pie and douhnut using the method below.
pieChart.Doughnut PieData, pieOptions
#-----------------
#- END PIE CHART -
#-----------------
### jVector Maps
# ------------
# Create a world map with markers
###
$('#world-map-markers').vectorMap
map: 'world_mill_en'
normalizeFunction: 'polynomial'
hoverOpacity: 0.7
hoverColor: false
backgroundColor: 'transparent'
regionStyle:
initial:
fill: 'rgba(210, 214, 222, 1)'
'fill-opacity': 1
stroke: 'none'
'stroke-width': 0
'stroke-opacity': 1
hover:
'fill-opacity': 0.7
cursor: 'pointer'
selected: fill: 'yellow'
selectedHover: {}
markerStyle: initial:
fill: '#00a65a'
stroke: '#111'
markers: [
{
latLng: [
41.90
12.45
]
name: 'Vatican City'
}
{
latLng: [
43.73
7.41
]
name: 'Monaco'
}
{
latLng: [
-0.52
166.93
]
name: 'Nauru'
}
{
latLng: [
-8.51
179.21
]
name: 'Tuvalu'
}
{
latLng: [
43.93
12.46
]
name: 'San Marino'
}
{
latLng: [
47.14
9.52
]
name: 'Liechtenstein'
}
{
latLng: [
7.11
171.06
]
name: 'Marshall Islands'
}
{
latLng: [
17.3
-62.73
]
name: 'Saint Kitts and Nevis'
}
{
latLng: [
3.2
73.22
]
name: 'Maldives'
}
{
latLng: [
35.88
14.5
]
name: 'Malta'
}
{
latLng: [
12.05
-61.75
]
name: 'Grenada'
}
{
latLng: [
13.16
-61.23
]
name: 'Saint Vincent and the Grenadines'
}
{
latLng: [
13.16
-59.55
]
name: 'Barbados'
}
{
latLng: [
17.11
-61.85
]
name: 'Antigua and Barbuda'
}
{
latLng: [
-4.61
55.45
]
name: 'Seychelles'
}
{
latLng: [
7.35
134.46
]
name: 'Palau'
}
{
latLng: [
42.5
1.51
]
name: 'Andorra'
}
{
latLng: [
14.01
-60.98
]
name: 'Saint Lucia'
}
{
latLng: [
6.91
158.18
]
name: 'Federated States of Micronesia'
}
{
latLng: [
1.3
103.8
]
name: 'Singapore'
}
{
latLng: [
1.46
173.03
]
name: 'Kiribati'
}
{
latLng: [
-21.13
-175.2
]
name: 'Tonga'
}
{
latLng: [
15.3
-61.38
]
name: 'Dominica'
}
{
latLng: [
-20.2
57.5
]
name: 'Mauritius'
}
{
latLng: [
26.02
50.55
]
name: 'Bahrain'
}
{
latLng: [
0.33
6.73
]
name: 'São Tomé and Príncipe'
}
]
### SPARKLINE CHARTS
# ----------------
# Create a inline charts with spark line
###
#-----------------
#- SPARKLINE BAR -
#-----------------
$('.sparkbar').each ->
$this = $(this)
$this.sparkline 'html',
type: 'bar'
height: if $this.data('height') then $this.data('height') else '30'
barColor: $this.data('color')
return
#-----------------
#- SPARKLINE PIE -
#-----------------
$('.sparkpie').each ->
$this = $(this)
$this.sparkline 'html',
type: 'pie'
height: if $this.data('height') then $this.data('height') else '90'
sliceColors: $this.data('color')
return
#------------------
#- SPARKLINE LINE -
#------------------
$('.sparkline').each ->
$this = $(this)
$this.sparkline 'html',
type: 'line'
height: if $this.data('height') then $this.data('height') else '90'
width: '100%'
lineColor: $this.data('linecolor')
fillColor: $this.data('fillcolor')
spotColor: $this.data('spotcolor')
return
return
# ---
else
salesChartCanvas = null
salesChart = null
| 188709 | $ ->
'use strict'
### ChartJS
# -------
# Here we will create a few charts using ChartJS
###
#-----------------------
#- MONTHLY SALES CHART -
#-----------------------
# Get context with jQuery - using jQuery's .get() method.
if($('#salesChart').length > 0)
salesChartCanvas = $('#salesChart').get(0).getContext('2d')
salesChart = new Chart(salesChartCanvas)
# This will get the first returned node in the jQuery collection.
salesChartData =
labels: [
'January'
'February'
'March'
'April'
'May'
'June'
'July'
]
datasets: [
{
label: 'Electronics'
fillColor: 'rgb(210, 214, 222)'
strokeColor: 'rgb(210, 214, 222)'
pointColor: 'rgb(210, 214, 222)'
pointStrokeColor: '#c1c7d1'
pointHighlightFill: '#fff'
pointHighlightStroke: 'rgb(220,220,220)'
data: [
65
59
80
81
56
55
40
]
}
{
label: 'Digital Goods'
fillColor: 'rgba(60,141,188,0.9)'
strokeColor: 'rgba(60,141,188,0.8)'
pointColor: '#3b8bba'
pointStrokeColor: 'rgba(60,141,188,1)'
pointHighlightFill: '#fff'
pointHighlightStroke: 'rgba(60,141,188,1)'
data: [
28
48
40
19
86
27
90
]
}
]
salesChartOptions =
showScale: true
scaleShowGridLines: false
scaleGridLineColor: 'rgba(0,0,0,.05)'
scaleGridLineWidth: 1
scaleShowHorizontalLines: true
scaleShowVerticalLines: true
bezierCurve: true
bezierCurveTension: 0.3
pointDot: false
pointDotRadius: 4
pointDotStrokeWidth: 1
pointHitDetectionRadius: 20
datasetStroke: true
datasetStrokeWidth: 2
datasetFill: true
legendTemplate: '<ul class="<%=name.toLowerCase()%>-legend"><% for (var i=0; i<datasets.length; i++){%><li><span style="background-color:<%=datasets[i].lineColor%>"></span><%=datasets[i].label%></li><%}%></ul>'
maintainAspectRatio: true
responsive: true
#Create the line chart
salesChart.Line salesChartData, salesChartOptions
#---------------------------
#- END MONTHLY SALES CHART -
#---------------------------
#-------------
#- PIE CHART -
#-------------
# Get context with jQuery - using jQuery's .get() method.
pieChartCanvas = $('#pieChart').get(0).getContext('2d')
pieChart = new Chart(pieChartCanvas)
PieData = [
{
value: 700
color: '#f56954'
highlight: '#f56954'
label: 'Chrome'
}
{
value: 500
color: '#00a65a'
highlight: '#00a65a'
label: 'IE'
}
{
value: 400
color: '#f39c12'
highlight: '#f39c12'
label: 'FireFox'
}
{
value: 600
color: '#00c0ef'
highlight: '#00c0ef'
label: 'Safari'
}
{
value: 300
color: '#3c8dbc'
highlight: '#3c8dbc'
label: 'Opera'
}
{
value: 100
color: '#d2d6de'
highlight: '#d2d6de'
label: 'Navigator'
}
]
pieOptions =
segmentShowStroke: true
segmentStrokeColor: '#fff'
segmentStrokeWidth: 1
percentageInnerCutout: 50
animationSteps: 100
animationEasing: 'easeOutBounce'
animateRotate: true
animateScale: false
responsive: true
maintainAspectRatio: false
legendTemplate: '<ul class="<%=name.toLowerCase()%>-legend"><% for (var i=0; i<segments.length; i++){%><li><span style="background-color:<%=segments[i].fillColor%>"></span><%if(segments[i].label){%><%=segments[i].label%><%}%></li><%}%></ul>'
tooltipTemplate: '<%=value %> <%=label%> users'
#Create pie or douhnut chart
# You can switch between pie and douhnut using the method below.
pieChart.Doughnut PieData, pieOptions
#-----------------
#- END PIE CHART -
#-----------------
### jVector Maps
# ------------
# Create a world map with markers
###
$('#world-map-markers').vectorMap
map: 'world_mill_en'
normalizeFunction: 'polynomial'
hoverOpacity: 0.7
hoverColor: false
backgroundColor: 'transparent'
regionStyle:
initial:
fill: 'rgba(210, 214, 222, 1)'
'fill-opacity': 1
stroke: 'none'
'stroke-width': 0
'stroke-opacity': 1
hover:
'fill-opacity': 0.7
cursor: 'pointer'
selected: fill: 'yellow'
selectedHover: {}
markerStyle: initial:
fill: '#00a65a'
stroke: '#111'
markers: [
{
latLng: [
41.90
12.45
]
name: '<NAME>'
}
{
latLng: [
43.73
7.41
]
name: '<NAME>'
}
{
latLng: [
-0.52
166.93
]
name: '<NAME>'
}
{
latLng: [
-8.51
179.21
]
name: '<NAME>'
}
{
latLng: [
43.93
12.46
]
name: '<NAME>'
}
{
latLng: [
47.14
9.52
]
name: '<NAME>'
}
{
latLng: [
7.11
171.06
]
name: '<NAME>'
}
{
latLng: [
17.3
-62.73
]
name: 'Saint Kitts and Ne<NAME>'
}
{
latLng: [
3.2
73.22
]
name: '<NAME>'
}
{
latLng: [
35.88
14.5
]
name: '<NAME>'
}
{
latLng: [
12.05
-61.75
]
name: '<NAME>'
}
{
latLng: [
13.16
-61.23
]
name: '<NAME> and the <NAME>'
}
{
latLng: [
13.16
-59.55
]
name: '<NAME>'
}
{
latLng: [
17.11
-61.85
]
name: '<NAME>'
}
{
latLng: [
-4.61
55.45
]
name: '<NAME>'
}
{
latLng: [
7.35
134.46
]
name: '<NAME>'
}
{
latLng: [
42.5
1.51
]
name: '<NAME>'
}
{
latLng: [
14.01
-60.98
]
name: '<NAME>'
}
{
latLng: [
6.91
158.18
]
name: 'Federated States of Micronesia'
}
{
latLng: [
1.3
103.8
]
name: 'Singapore'
}
{
latLng: [
1.46
173.03
]
name: '<NAME>'
}
{
latLng: [
-21.13
-175.2
]
name: '<NAME>'
}
{
latLng: [
15.3
-61.38
]
name: '<NAME>'
}
{
latLng: [
-20.2
57.5
]
name: '<NAME>'
}
{
latLng: [
26.02
50.55
]
name: '<NAME>'
}
{
latLng: [
0.33
6.73
]
name: '<NAME> and <NAME>'
}
]
### SPARKLINE CHARTS
# ----------------
# Create a inline charts with spark line
###
#-----------------
#- SPARKLINE BAR -
#-----------------
$('.sparkbar').each ->
$this = $(this)
$this.sparkline 'html',
type: 'bar'
height: if $this.data('height') then $this.data('height') else '30'
barColor: $this.data('color')
return
#-----------------
#- SPARKLINE PIE -
#-----------------
$('.sparkpie').each ->
$this = $(this)
$this.sparkline 'html',
type: 'pie'
height: if $this.data('height') then $this.data('height') else '90'
sliceColors: $this.data('color')
return
#------------------
#- SPARKLINE LINE -
#------------------
$('.sparkline').each ->
$this = $(this)
$this.sparkline 'html',
type: 'line'
height: if $this.data('height') then $this.data('height') else '90'
width: '100%'
lineColor: $this.data('linecolor')
fillColor: $this.data('fillcolor')
spotColor: $this.data('spotcolor')
return
return
# ---
else
salesChartCanvas = null
salesChart = null
| true | $ ->
'use strict'
### ChartJS
# -------
# Here we will create a few charts using ChartJS
###
#-----------------------
#- MONTHLY SALES CHART -
#-----------------------
# Get context with jQuery - using jQuery's .get() method.
if($('#salesChart').length > 0)
salesChartCanvas = $('#salesChart').get(0).getContext('2d')
salesChart = new Chart(salesChartCanvas)
# This will get the first returned node in the jQuery collection.
salesChartData =
labels: [
'January'
'February'
'March'
'April'
'May'
'June'
'July'
]
datasets: [
{
label: 'Electronics'
fillColor: 'rgb(210, 214, 222)'
strokeColor: 'rgb(210, 214, 222)'
pointColor: 'rgb(210, 214, 222)'
pointStrokeColor: '#c1c7d1'
pointHighlightFill: '#fff'
pointHighlightStroke: 'rgb(220,220,220)'
data: [
65
59
80
81
56
55
40
]
}
{
label: 'Digital Goods'
fillColor: 'rgba(60,141,188,0.9)'
strokeColor: 'rgba(60,141,188,0.8)'
pointColor: '#3b8bba'
pointStrokeColor: 'rgba(60,141,188,1)'
pointHighlightFill: '#fff'
pointHighlightStroke: 'rgba(60,141,188,1)'
data: [
28
48
40
19
86
27
90
]
}
]
salesChartOptions =
showScale: true
scaleShowGridLines: false
scaleGridLineColor: 'rgba(0,0,0,.05)'
scaleGridLineWidth: 1
scaleShowHorizontalLines: true
scaleShowVerticalLines: true
bezierCurve: true
bezierCurveTension: 0.3
pointDot: false
pointDotRadius: 4
pointDotStrokeWidth: 1
pointHitDetectionRadius: 20
datasetStroke: true
datasetStrokeWidth: 2
datasetFill: true
legendTemplate: '<ul class="<%=name.toLowerCase()%>-legend"><% for (var i=0; i<datasets.length; i++){%><li><span style="background-color:<%=datasets[i].lineColor%>"></span><%=datasets[i].label%></li><%}%></ul>'
maintainAspectRatio: true
responsive: true
#Create the line chart
salesChart.Line salesChartData, salesChartOptions
#---------------------------
#- END MONTHLY SALES CHART -
#---------------------------
#-------------
#- PIE CHART -
#-------------
# Get context with jQuery - using jQuery's .get() method.
pieChartCanvas = $('#pieChart').get(0).getContext('2d')
pieChart = new Chart(pieChartCanvas)
PieData = [
{
value: 700
color: '#f56954'
highlight: '#f56954'
label: 'Chrome'
}
{
value: 500
color: '#00a65a'
highlight: '#00a65a'
label: 'IE'
}
{
value: 400
color: '#f39c12'
highlight: '#f39c12'
label: 'FireFox'
}
{
value: 600
color: '#00c0ef'
highlight: '#00c0ef'
label: 'Safari'
}
{
value: 300
color: '#3c8dbc'
highlight: '#3c8dbc'
label: 'Opera'
}
{
value: 100
color: '#d2d6de'
highlight: '#d2d6de'
label: 'Navigator'
}
]
pieOptions =
segmentShowStroke: true
segmentStrokeColor: '#fff'
segmentStrokeWidth: 1
percentageInnerCutout: 50
animationSteps: 100
animationEasing: 'easeOutBounce'
animateRotate: true
animateScale: false
responsive: true
maintainAspectRatio: false
legendTemplate: '<ul class="<%=name.toLowerCase()%>-legend"><% for (var i=0; i<segments.length; i++){%><li><span style="background-color:<%=segments[i].fillColor%>"></span><%if(segments[i].label){%><%=segments[i].label%><%}%></li><%}%></ul>'
tooltipTemplate: '<%=value %> <%=label%> users'
#Create pie or douhnut chart
# You can switch between pie and douhnut using the method below.
pieChart.Doughnut PieData, pieOptions
#-----------------
#- END PIE CHART -
#-----------------
### jVector Maps
# ------------
# Create a world map with markers
###
$('#world-map-markers').vectorMap
map: 'world_mill_en'
normalizeFunction: 'polynomial'
hoverOpacity: 0.7
hoverColor: false
backgroundColor: 'transparent'
regionStyle:
initial:
fill: 'rgba(210, 214, 222, 1)'
'fill-opacity': 1
stroke: 'none'
'stroke-width': 0
'stroke-opacity': 1
hover:
'fill-opacity': 0.7
cursor: 'pointer'
selected: fill: 'yellow'
selectedHover: {}
markerStyle: initial:
fill: '#00a65a'
stroke: '#111'
markers: [
{
latLng: [
41.90
12.45
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
43.73
7.41
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
-0.52
166.93
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
-8.51
179.21
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
43.93
12.46
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
47.14
9.52
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
7.11
171.06
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
17.3
-62.73
]
name: 'Saint Kitts and NePI:NAME:<NAME>END_PI'
}
{
latLng: [
3.2
73.22
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
35.88
14.5
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
12.05
-61.75
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
13.16
-61.23
]
name: 'PI:NAME:<NAME>END_PI and the PI:NAME:<NAME>END_PI'
}
{
latLng: [
13.16
-59.55
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
17.11
-61.85
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
-4.61
55.45
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
7.35
134.46
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
42.5
1.51
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
14.01
-60.98
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
6.91
158.18
]
name: 'Federated States of Micronesia'
}
{
latLng: [
1.3
103.8
]
name: 'Singapore'
}
{
latLng: [
1.46
173.03
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
-21.13
-175.2
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
15.3
-61.38
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
-20.2
57.5
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
26.02
50.55
]
name: 'PI:NAME:<NAME>END_PI'
}
{
latLng: [
0.33
6.73
]
name: 'PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI'
}
]
### SPARKLINE CHARTS
# ----------------
# Create a inline charts with spark line
###
#-----------------
#- SPARKLINE BAR -
#-----------------
$('.sparkbar').each ->
$this = $(this)
$this.sparkline 'html',
type: 'bar'
height: if $this.data('height') then $this.data('height') else '30'
barColor: $this.data('color')
return
#-----------------
#- SPARKLINE PIE -
#-----------------
$('.sparkpie').each ->
$this = $(this)
$this.sparkline 'html',
type: 'pie'
height: if $this.data('height') then $this.data('height') else '90'
sliceColors: $this.data('color')
return
#------------------
#- SPARKLINE LINE -
#------------------
$('.sparkline').each ->
$this = $(this)
$this.sparkline 'html',
type: 'line'
height: if $this.data('height') then $this.data('height') else '90'
width: '100%'
lineColor: $this.data('linecolor')
fillColor: $this.data('fillcolor')
spotColor: $this.data('spotcolor')
return
return
# ---
else
salesChartCanvas = null
salesChart = null
|
[
{
"context": "er: module.exports.connection.userName\n\t\tpassword: module.exports.connection.password\n\t\tserver: module.exports.connection.server\n\t\topti",
"end": 36541,
"score": 0.9788959622383118,
"start": 36507,
"tag": "PASSWORD",
"value": "module.exports.connection.password"
}
] | node_modules/mssql/src/main.coffee | AydaAz/umcosole | 0 | {EventEmitter} = require 'events'
util = require 'util'
fs = require 'fs'
{TYPES, declare} = require('./datatypes')
ISOLATION_LEVEL = require('./isolationlevel')
DRIVERS = ['msnodesql', 'tedious', 'tds']
Table = require('./table')
global_connection = null
map = []
###
Register you own type map.
**Example:**
```
sql.map.register(MyClass, sql.Text);
```
You can also overwrite default type map.
```
sql.map.register(Number, sql.BigInt);
```
@path module.exports.map
@param {*} jstype JS data type.
@param {*} sqltype SQL data type.
###
map.register = (jstype, sqltype) ->
for item, index in @ when item.js is jstype
@splice index, 1
break
@push
js: jstype
sql: sqltype
null
map.register String, TYPES.NVarChar
map.register Number, TYPES.Int
map.register Boolean, TYPES.Bit
map.register Date, TYPES.DateTime
map.register Buffer, TYPES.VarBinary
map.register Table, TYPES.TVP
###
@ignore
###
getTypeByValue = (value) ->
if value is null or value is undefined then return TYPES.NVarChar
switch typeof value
when 'string'
for item in map when item.js is String
return item.sql
return TYPES.NVarChar
when 'number'
for item in map when item.js is Number
return item.sql
return TYPES.Int
when 'boolean'
for item in map when item.js is Boolean
return item.sql
return TYPES.Bit
when 'object'
for item in map when value instanceof item.js
return item.sql
return TYPES.NVarChar
else
return TYPES.NVarChar
###
Class Connection.
Internally, each `Connection` instance is a separate pool of TDS connections. Once you create a new `Request`/`Transaction`/`Prepared Statement`, a new TDS connection is acquired from the pool and reserved for desired action. Once the action is complete, connection is released back to the pool.
@property {Boolean} connected If true, connection is established.
@property {Boolean} connecting If true, connection is being established.
@property {*} driver Reference to configured Driver.
@event connect Dispatched after connection has established.
@event close Dispatched after connection has closed a pool (by calling close).
###
class Connection extends EventEmitter
connected: false
connecting: false
driver: null
###
Create new Connection.
@param {Object} config Connection configuration.
@callback [callback] A callback which is called after connection has established, or an error has occurred.
@param {Error} err Error on error, otherwise null.
###
constructor: (@config, callback) ->
# set defaults
@config.driver ?= 'tedious'
@config.port ?= 1433
@config.options ?= {}
@config.stream ?= false
@config.parseJSON ?= false
if /^(.*)\\(.*)$/.exec @config.server
@config.server = RegExp.$1
@config.options.instanceName = RegExp.$2
if @config.driver in DRIVERS
@driver = @initializeDriver require("./#{@config.driver}")
# fix the driver by default
if module.exports.fix then @driver.fix()
else
err = new ConnectionError "Unknown driver #{@config.driver}!", 'EDRIVER'
if callback
return callback err
else
throw err
if callback then @connect callback
###
Write message to debug stream.
###
_debug: (msg) ->
@_debugStream?.write "#{String(msg).replace(/\x1B\[[0-9;]*m/g, '')}\n"
###
Initializes driver for this connection. Separated from constructor and used by co-mssql.
@private
@param {Function} driver Loaded driver.
@returns {Connection}
###
initializeDriver: (driver) ->
driver Connection, Transaction, Request, ConnectionError, TransactionError, RequestError
###
Creates a new connection pool with one active connection. This one initial connection serves as a probe to find out whether the configuration is valid.
@callback [callback] A callback which is called after connection has established, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Connection|Promise}
###
connect: (callback) ->
if callback?
return @_connect callback
new module.exports.Promise (resolve, reject) =>
@_connect (err) ->
if err then return reject err
resolve()
_connect: (callback) ->
if not @driver
return callback new ConnectionError "Connection was closed. Create a new instance."
if @connected
return callback new ConnectionError "Database is already connected! Call close before connecting to different database.", 'EALREADYCONNECTED'
if @connecting
return callback new ConnectionError "Already connecting to database! Call close before connecting to different database.", 'EALREADYCONNECTING'
go = =>
@connecting = true
@driver.Connection::connect.call @, @config, (err) =>
unless @connecting then return
@connecting = false
if err
if @_debugStream
@_debugStream.removeAllListeners()
@_debugStream.end()
@_debugStream = null
else
@connected = true
@emit 'connect'
callback err
if @config.debug
@_debugStream = fs.createWriteStream "./mssql_debug_#{Date.now()}.log"
@_debugStream.once 'open', go
@_debugStream.on 'error', (err) ->
if @connecting or @connected
# error after successful open
console.error err.stack
else
@_debugStream.removeListener 'open', go
callback new ConnectionError("Failed to open debug stream. #{err.message}", 'EDEBUG')
else
go()
@
###
Close all active connections in the pool.
@callback [callback] A callback which is called after connection has closed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Connection|Promise}
###
close: (callback) ->
if callback?
return @_close callback
new module.exports.Promise (resolve, reject) =>
@_close (err) ->
if err then return reject err
resolve()
_close: (callback) ->
if @_debugStream
@_debugStream.removeAllListeners()
@_debugStream.end()
@_debugStream = null
if @connecting
@connecting = false
@driver.Connection::close.call @, (err) =>
callback err
@driver = null
else if @connected
@connected = false
@driver.Connection::close.call @, (err) =>
unless err
@connected = false
@emit 'close'
callback err
@driver = null
@
###
Returns new request using this connection.
@returns {Request}
###
request: ->
new Request @
###
Returns new transaction using this connection.
@returns {Transaction}
###
transaction: ->
new Transaction @
###
Class PreparedStatement.
IMPORTANT: Rememeber that each prepared statement means one reserved connection from the pool. Don't forget to unprepare a prepared statement!
@property {Connection} connection Reference to used connection.
@property {Boolean} multiple If `true`, `execute` will handle multiple recordsets.
@property {String} statement Prepared SQL statement.
###
class PreparedStatement extends EventEmitter
_pooledConnection: null
_queue: null
_working: false # if true, there is a request running at the moment
_handle: 0 # sql prepared statement handle
connection: null # sql.Connection
transaction: null # !null in case we're in transaction
prepared: false
statement: null
parameters: null
multiple: false
stream: null
###
Create new Prepared Statement.
@param {String} statement SQL statement.
@param {Connection} [connection] If ommited, global connection is used instead.
###
constructor: (connection) ->
if connection instanceof Transaction
@transaction = connection
@connection = connection.connection
else if connection instanceof Connection
@connection = connection
else
@connection = global_connection
@_queue = []
@parameters = {}
###
Add an input parameter to the prepared statement.
**Example:**
```
statement.input('input_parameter', sql.Int);
statement.input('input_parameter', sql.VarChar(50));
```
@param {String} name Name of the input parameter without @ char.
@param {*} type SQL data type of input parameter.
@returns {PreparedStatement}
###
input: (name, type) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new PreparedStatementError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length < 2
throw new PreparedStatementError "Invalid number of arguments. 2 arguments expected.", 'EARGS'
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 1
length: type.length
scale: type.scale
precision: type.precision
tvpType: type.tvpType
@
###
Add an output parameter to the prepared statement.
**Example:**
```
statement.output('output_parameter', sql.Int);
statement.output('output_parameter', sql.VarChar(50));
```
@param {String} name Name of the output parameter without @ char.
@param {*} type SQL data type of output parameter.
@returns {PreparedStatement}
###
output: (name, type) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new PreparedStatementError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length < 2
throw new PreparedStatementError "Invalid number of arguments. 2 arguments expected.", 'EARGS'
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 2
length: type.length
scale: type.scale
precision: type.precision
@
###
Prepare a statement.
@property {String} [statement] SQL statement to prepare.
@callback [callback] A callback which is called after preparation has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {PreparedStatement|Promise}
###
prepare: (statement, callback) ->
if callback?
return @_prepare statement, callback
new module.exports.Promise (resolve, reject) =>
@_prepare statement, (err) ->
if err then return reject err
resolve()
_prepare: (statement, callback) ->
if @_pooledConnection
callback new PreparedStatementError "Statement is already prepared.", 'EALREADYPREPARED'
return @
if typeof statement is 'function'
callback = statement
statement = undefined
@statement = statement if statement?
done = (err, connection) =>
if err then return callback err
@_pooledConnection = connection
req = new Request @
req.stream = false
req.output 'handle', TYPES.Int
req.input 'params', TYPES.NVarChar, ("@#{name} #{declare(param.type, param)}#{if param.io is 2 then " output" else ""}" for name, param of @parameters).join(',')
req.input 'stmt', TYPES.NVarChar, @statement
req.execute 'sp_prepare', (err) =>
if err
if @transaction
@transaction.next()
else
@connection.pool.release @_pooledConnection
@_pooledConnection = null
return callback err
@_handle = req.parameters.handle.value
callback null
if @transaction
unless @transaction._pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
@transaction.queue done
else
@connection.pool.acquire done
@
###
Execute next request in queue.
@private
@returns {PreparedStatement}
###
next: ->
if @_queue.length
# defer processing of next request
process.nextTick =>
@_queue.shift() null, @_pooledConnection
else
@_working = false
@
###
Add request to queue for connection. If queue is empty, execute the request immediately.
@private
@callback callback A callback to call when connection in ready to execute request.
@param {Error} err Error on error, otherwise null.
@param {*} conn Internal driver's connection.
@returns {PreparedStatement}
###
queue: (callback) ->
unless @_pooledConnection
callback new PreparedStatementError "Statement is not prepared. Call prepare() first.", 'ENOTPREPARED'
return @
if @_working
@_queue.push callback
else
@_working = true
callback null, @_pooledConnection
@
###
Execute a prepared statement.
@property {String} values An object whose names correspond to the names of parameters that were added to the prepared statement before it was prepared.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Request|Promise}
###
execute: (values, callback) ->
if callback?
return @_execute values, callback
new module.exports.Promise (resolve, reject) =>
@_execute values, (err, recordset) ->
if err then return reject err
resolve recordset
_execute: (values, callback) ->
req = new Request @
req.stream = @stream if @stream?
req.input 'handle', TYPES.Int, @_handle
# copy parameters with new values
for name, param of @parameters
req.parameters[name] =
name: name
type: param.type
io: param.io
value: values[name]
length: param.length
scale: param.scale
precision: param.precision
req.execute 'sp_execute', (err, recordsets, returnValue) =>
if err then return callback err
callback null, (if @multiple then recordsets else recordsets[0])
req
###
Unprepare a prepared statement.
@callback [callback] A callback which is called after unpreparation has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {PreparedStatement|Promise}
###
unprepare: (callback) ->
if callback?
return @_unprepare callback
new module.exports.Promise (resolve, reject) =>
@_unprepare (err) ->
if err then return reject err
resolve()
_unprepare: (callback) ->
unless @_pooledConnection
callback new PreparedStatementError "Statement is not prepared. Call prepare() first.", 'ENOTPREPARED'
return @
done = (err) =>
if err then return callback err
if @transaction
@transaction.next()
else
@connection.pool.release @_pooledConnection
@_pooledConnection = null
@_handle = 0
callback null
req = new Request @
req.stream = false
req.input 'handle', TYPES.Int, @_handle
req.execute 'sp_unprepare', done
@
###
Class Transaction.
@property {Connection} connection Reference to used connection.
@property {Number} isolationLevel Controls the locking and row versioning behavior of TSQL statements issued by a connection. READ_COMMITTED by default.
@property {String} name Transaction name. Empty string by default.
@event begin Dispatched when transaction begin.
@event commit Dispatched on successful commit.
@event rollback Dispatched on successful rollback.
###
class Transaction extends EventEmitter
_pooledConnection: null
_queue: null
_aborted: false
_working: false # if true, there is a request running at the moment
name: ""
connection: null # sql.Connection
isolationLevel: ISOLATION_LEVEL.READ_COMMITTED
###
Create new Transaction.
@param {Connection} [connection] If ommited, global connection is used instead.
###
constructor: (connection) ->
@connection = connection ? global_connection
@_queue = []
###
@private
###
_abort: =>
@connection.driver.Transaction::_abort.call @
###
Begin a transaction.
@param {Number} [isolationLevel] Controls the locking and row versioning behavior of TSQL statements issued by a connection.
@callback [callback] A callback which is called after transaction has began, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
begin: (isolationLevel, callback) ->
if isolationLevel instanceof Function
callback = isolationLevel
isolationLevel = undefined
if callback?
return @_begin isolationLevel, callback
new module.exports.Promise (resolve, reject) =>
@_begin isolationLevel, (err) ->
if err then return reject err
resolve()
_begin: (isolationLevel, callback) ->
@isolationLevel = isolationLevel if isolationLevel?
if @_pooledConnection
callback new TransactionError "Transaction has already begun.", 'EALREADYBEGUN'
return @
@connection.driver.Transaction::begin.call @, (err) =>
unless err then @emit 'begin'
callback err
@
###
Commit a transaction.
@callback [callback] A callback which is called after transaction has commited, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
commit: (callback) ->
if callback?
return @_commit callback
new module.exports.Promise (resolve, reject) =>
@_commit (err) ->
if err then return reject err
resolve()
_commit: (callback) ->
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working
callback new TransactionError "Can't commit transaction. There is a request in progress.", 'EREQINPROG'
return @
if @_queue.length
callback new TransactionError "Can't commit transaction. There are request in queue.", 'EREQINPROG'
return @
@connection.driver.Transaction::commit.call @, (err) =>
unless err then @emit 'commit'
callback err
@
###
Execute next request in queue.
@private
@returns {Transaction}
###
next: ->
if @_aborted
toAbort = @_queue
@_queue = []
# this must be async to ensure it is not processed earlier than the request that caused abortion of this transaction
process.nextTick =>
while toAbort.length
toAbort.shift() new TransactionError "Transaction aborted.", "EABORT"
# this must be synchronous so we can rollback a transaction or commit transaction in last request's callback
@_working = false
if @_queue.length
process.nextTick =>
if @_aborted then return @next() # transaction aborted manually
@_working = true
@_queue.shift() null, @_pooledConnection
@
###
Add request to queue for connection. If queue is empty, execute the request immediately.
@private
@callback callback A callback to call when connection in ready to execute request.
@param {Error} err Error on error, otherwise null.
@param {*} conn Internal driver's connection.
@returns {Transaction}
###
queue: (callback) ->
if @_dedicatedConnection
callback null, @_dedicatedConnection
return @
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working or @_queue.length
@_queue.push callback
else
@_working = true
callback null, @_pooledConnection
@
###
Returns new request using this transaction.
@returns {Request}
###
request: ->
new Request @
###
Rollback a transaction.
@callback [callback] A callback which is called after transaction has rolled back, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
rollback: (callback) ->
if callback?
return @_rollback callback
new module.exports.Promise (resolve, reject) =>
@_rollback (err) ->
if err then return reject err
resolve()
_rollback: (callback) ->
if @_aborted
callback new TransactionError "Transaction has been aborted.", 'EABORT'
return @
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working
callback new TransactionError "Can't rollback transaction. There is a request in progress.", 'EREQINPROG'
return @
if @_queue.length
@_aborted = true
@connection.driver.Transaction::rollback.call @, (err) =>
unless err then @emit 'rollback', @_aborted
callback err
@
###
Class Request.
@property {Connection} connection Reference to used connection.
@property {Transaction} transaction Reference to transaction when request was created in transaction.
@property {*} parameters Collection of input and output parameters.
@property {Boolean} verbose If `true`, debug messages are printed to message log.
@property {Boolean} multiple If `true`, `query` will handle multiple recordsets (`execute` always expect multiple recordsets).
@property {Boolean} canceled `true` if request was canceled.
@event recordset Dispatched when metadata for new recordset are parsed.
@event row Dispatched when new row is parsed.
@event done Dispatched when request is complete.
@event error Dispatched on error.
###
class Request extends EventEmitter
connection: null
transaction: null
pstatement: null
parameters: null
verbose: false
multiple: false
canceled: false
stream: null
###
Create new Request.
@param {Connection|Transaction} connection If ommited, global connection is used instead.
###
constructor: (connection) ->
if connection instanceof Transaction
@transaction = connection
@connection = connection.connection
else if connection instanceof PreparedStatement
@pstatement = connection
@connection = connection.connection
else if connection instanceof Connection
@connection = connection
else
@connection = global_connection
@parameters = {}
###
Log to a function if assigned. Else, use console.log.
###
_log: (out) ->
if typeof @logger is "function" then @logger out else console.log out
###
Acquire connection for this request from connection.
###
_acquire: (callback) ->
if @transaction
@transaction.queue callback
else if @pstatement
@pstatement.queue callback
else
unless @connection.pool
return callback new ConnectionError "Connection not yet open.", 'ENOTOPEN'
@connection.pool.acquire callback
###
Release connection used by this request.
###
_release: (connection) ->
if @transaction
@transaction.next()
else if @pstatement
@pstatement.next()
else
@connection.pool.release connection
###
Add an input parameter to the request.
**Example:**
```
request.input('input_parameter', value);
request.input('input_parameter', sql.Int, value);
```
@param {String} name Name of the input parameter without @ char.
@param {*} [type] SQL data type of input parameter. If you omit type, module automaticaly decide which SQL data type should be used based on JS data type.
@param {*} value Input parameter value. `undefined` and `NaN` values are automatically converted to `null` values.
@returns {Request}
###
input: (name, type, value) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new RequestError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length is 1
throw new RequestError "Invalid number of arguments. At least 2 arguments expected.", 'EARGS'
else if arguments.length is 2
value = type
type = getTypeByValue(value)
# support for custom data types
if value?.valueOf and value not instanceof Date then value = value.valueOf()
# undefined to null
if value is undefined then value = null
# NaN to null
if value isnt value then value = null
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 1
value: value
length: type.length
scale: type.scale
precision: type.precision
tvpType: type.tvpType
@
###
Add an output parameter to the request.
**Example:**
```
request.output('output_parameter', sql.Int);
request.output('output_parameter', sql.VarChar(50), 'abc');
```
@param {String} name Name of the output parameter without @ char.
@param {*} type SQL data type of output parameter.
@param {*} [value] Output parameter value initial value. `undefined` and `NaN` values are automatically converted to `null` values. Optional.
@returns {Request}
###
output: (name, type, value) ->
unless type then type = TYPES.NVarChar
if (/(--| |\/\*|\*\/|')/).test name
throw new RequestError "SQL injection warning for param '#{name}'", 'EINJECT'
if type is TYPES.Text or type is TYPES.NText or type is TYPES.Image
throw new RequestError "Deprecated types (Text, NText, Image) are not supported as OUTPUT parameters.", 'EDEPRECATED'
# support for custom data types
if value?.valueOf and value not instanceof Date then value = value.valueOf()
# undefined to null
if value is undefined then value = null
# NaN to null
if value isnt value then value = null
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 2
value: value
length: type.length
scale: type.scale
precision: type.precision
@
###
Execute the SQL batch.
@param {String} batch T-SQL batch to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {*} recordset Recordset.
@returns {Request|Promise}
###
batch: (batch, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_batch batch, callback
new module.exports.Promise (resolve, reject) =>
@_batch batch, (err, recordset) ->
if err then return reject err
resolve recordset
_batch: (batch, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::batch.call @, batch, (err, recordset) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, recordset
@
###
Bulk load.
@param {Table} table SQL table.
@callback [callback] A callback which is called after bulk load has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Request|Promise}
###
bulk: (table, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_bulk table, callback
new module.exports.Promise (resolve, reject) =>
@_bulk table, (err, rowCount) ->
if err then return reject err
resolve rowCount
_bulk: (table, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::bulk.call @, table, (err, rowCount) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, rowCount
@
###
Sets request to `stream` mode and pulls all rows from all recordsets to a given stream.
@param {Stream} stream Stream to pipe data into.
@returns {Stream}
###
pipe: (stream) ->
@stream = true
@on 'row', stream.write.bind stream
@on 'error', stream.emit.bind stream, 'error'
@on 'done', -> setImmediate -> stream.end()
stream.emit 'pipe', @
stream
###
Execute the SQL command.
**Example:**
```
var request = new sql.Request();
request.query('select 1 as number', function(err, recordset) {
console.log(recordset[0].number); // return 1
// ...
});
```
You can enable multiple recordsets in querries by `request.multiple = true` command.
```
var request = new sql.Request();
request.multiple = true;
request.query('select 1 as number; select 2 as number', function(err, recordsets) {
console.log(recordsets[0][0].number); // return 1
console.log(recordsets[1][0].number); // return 2
// ...
});
```
@param {String} command T-SQL command to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {*} recordset Recordset.
@returns {Request|Promise}
###
query: (command, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_query command, callback
new module.exports.Promise (resolve, reject) =>
@_query command, (err, recordset) ->
if err then return reject err
resolve recordset
_query: (command, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::query.call @, command, (err, recordset) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, recordset
@
###
Call a stored procedure.
**Example:**
```
var request = new sql.Request();
request.input('input_parameter', sql.Int, value);
request.output('output_parameter', sql.Int);
request.execute('procedure_name', function(err, recordsets, returnValue) {
console.log(recordsets.length); // count of recordsets returned by procedure
console.log(recordset[0].length); // count of rows contained in first recordset
console.log(returnValue); // procedure return value
console.log(recordsets.returnValue); // procedure return value
console.log(request.parameters.output_parameter.value); // output value
// ...
});
```
@param {String} procedure Name of the stored procedure to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {Array} recordsets Recordsets.
@param {Number} returnValue Procedure return value.
@returns {Request|Promise}
###
execute: (command, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_execute command, callback
new module.exports.Promise (resolve, reject) =>
@_execute command, (err, recordset) ->
if err then return reject err
resolve recordset
_execute: (procedure, callback) ->
unless @connection
return process.nextTick ->
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::execute.call @, procedure, (err, recordsets, returnValue) =>
if @stream
if err then @emit 'error', err
@emit 'done', returnValue
else
callback err, recordsets, returnValue
@
###
Cancel currently executed request.
@returns {Request}
###
cancel: ->
@canceled = true
@connection.driver.Request::cancel.call @
@
class ConnectionError extends Error
constructor: (message, code) ->
unless @ instanceof ConnectionError
if message instanceof Error
err = new ConnectionError message.message, message.code
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new ConnectionError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class TransactionError extends Error
constructor: (message, code) ->
unless @ instanceof TransactionError
if message instanceof Error
err = new TransactionError message.message, message.code
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new TransactionError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class RequestError extends Error
constructor: (message, code) ->
unless @ instanceof RequestError
if message instanceof Error
err = new RequestError message.message, message.code ? code
err.number = message.info?.number ? message.code # err.code is returned by msnodesql driver
err.lineNumber = message.info?.lineNumber
err.state = message.info?.state ? message.sqlstate # err.sqlstate is returned by msnodesql driver
err.class = message.info?.class ? message.info?.severity # err.severity is returned by tds
err.serverName = message.info?.serverName
err.procName = message.info?.procName
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new RequestError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class PreparedStatementError extends Error
constructor: (message, code) ->
unless @ instanceof PreparedStatementError
if message instanceof Error
err = new PreparedStatementError message.message, message.code
err.originalError = message
Error.captureStackTrace err, arguments.callee
return err
else
err = new PreparedStatementError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code
super()
Error.captureStackTrace @, @constructor
###
Open global connection.
@param {Object} config Connection configuration.
@callback callback A callback which is called after connection has established, or an error has occurred.
@param {Error} err Error on error, otherwise null.
@returns {Connection}
###
module.exports.connect = (config, callback) ->
global_connection = new Connection config
global_connection.connect callback
###
Close global connection.
@returns {Connection}
###
module.exports.close = (callback) ->
global_connection?.close callback
module.exports.on = (event, handler) ->
global_connection?.on event, handler
module.exports.Connection = Connection
module.exports.Transaction = Transaction
module.exports.Request = Request
module.exports.Table = Table
module.exports.PreparedStatement = PreparedStatement
module.exports.ConnectionError = ConnectionError
module.exports.TransactionError = TransactionError
module.exports.RequestError = RequestError
module.exports.PreparedStatementError = PreparedStatementError
module.exports.ISOLATION_LEVEL = ISOLATION_LEVEL
module.exports.DRIVERS = DRIVERS
module.exports.TYPES = TYPES
module.exports.MAX = 65535 # (1 << 16) - 1
module.exports.map = map
module.exports.fix = true
module.exports.Promise = global.Promise ? require('promise')
# append datatypes to this modules export
for key, value of TYPES
module.exports[key] = value
module.exports[key.toUpperCase()] = value
# --- DEPRECATED IN 0.3.0 ------------------------------------------
module.exports.pool =
max: 10
min: 0
idleTimeoutMillis: 30000
module.exports.connection =
userName: ''
password: ''
server: ''
###
Initialize Tedious connection pool.
@deprecated
###
module.exports.init = ->
module.exports.connect
user: module.exports.connection.userName
password: module.exports.connection.password
server: module.exports.connection.server
options: module.exports.connection.options
driver: 'tedious'
pool: module.exports.pool | 205843 | {EventEmitter} = require 'events'
util = require 'util'
fs = require 'fs'
{TYPES, declare} = require('./datatypes')
ISOLATION_LEVEL = require('./isolationlevel')
DRIVERS = ['msnodesql', 'tedious', 'tds']
Table = require('./table')
global_connection = null
map = []
###
Register you own type map.
**Example:**
```
sql.map.register(MyClass, sql.Text);
```
You can also overwrite default type map.
```
sql.map.register(Number, sql.BigInt);
```
@path module.exports.map
@param {*} jstype JS data type.
@param {*} sqltype SQL data type.
###
map.register = (jstype, sqltype) ->
for item, index in @ when item.js is jstype
@splice index, 1
break
@push
js: jstype
sql: sqltype
null
map.register String, TYPES.NVarChar
map.register Number, TYPES.Int
map.register Boolean, TYPES.Bit
map.register Date, TYPES.DateTime
map.register Buffer, TYPES.VarBinary
map.register Table, TYPES.TVP
###
@ignore
###
getTypeByValue = (value) ->
if value is null or value is undefined then return TYPES.NVarChar
switch typeof value
when 'string'
for item in map when item.js is String
return item.sql
return TYPES.NVarChar
when 'number'
for item in map when item.js is Number
return item.sql
return TYPES.Int
when 'boolean'
for item in map when item.js is Boolean
return item.sql
return TYPES.Bit
when 'object'
for item in map when value instanceof item.js
return item.sql
return TYPES.NVarChar
else
return TYPES.NVarChar
###
Class Connection.
Internally, each `Connection` instance is a separate pool of TDS connections. Once you create a new `Request`/`Transaction`/`Prepared Statement`, a new TDS connection is acquired from the pool and reserved for desired action. Once the action is complete, connection is released back to the pool.
@property {Boolean} connected If true, connection is established.
@property {Boolean} connecting If true, connection is being established.
@property {*} driver Reference to configured Driver.
@event connect Dispatched after connection has established.
@event close Dispatched after connection has closed a pool (by calling close).
###
class Connection extends EventEmitter
connected: false
connecting: false
driver: null
###
Create new Connection.
@param {Object} config Connection configuration.
@callback [callback] A callback which is called after connection has established, or an error has occurred.
@param {Error} err Error on error, otherwise null.
###
constructor: (@config, callback) ->
# set defaults
@config.driver ?= 'tedious'
@config.port ?= 1433
@config.options ?= {}
@config.stream ?= false
@config.parseJSON ?= false
if /^(.*)\\(.*)$/.exec @config.server
@config.server = RegExp.$1
@config.options.instanceName = RegExp.$2
if @config.driver in DRIVERS
@driver = @initializeDriver require("./#{@config.driver}")
# fix the driver by default
if module.exports.fix then @driver.fix()
else
err = new ConnectionError "Unknown driver #{@config.driver}!", 'EDRIVER'
if callback
return callback err
else
throw err
if callback then @connect callback
###
Write message to debug stream.
###
_debug: (msg) ->
@_debugStream?.write "#{String(msg).replace(/\x1B\[[0-9;]*m/g, '')}\n"
###
Initializes driver for this connection. Separated from constructor and used by co-mssql.
@private
@param {Function} driver Loaded driver.
@returns {Connection}
###
initializeDriver: (driver) ->
driver Connection, Transaction, Request, ConnectionError, TransactionError, RequestError
###
Creates a new connection pool with one active connection. This one initial connection serves as a probe to find out whether the configuration is valid.
@callback [callback] A callback which is called after connection has established, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Connection|Promise}
###
connect: (callback) ->
if callback?
return @_connect callback
new module.exports.Promise (resolve, reject) =>
@_connect (err) ->
if err then return reject err
resolve()
_connect: (callback) ->
if not @driver
return callback new ConnectionError "Connection was closed. Create a new instance."
if @connected
return callback new ConnectionError "Database is already connected! Call close before connecting to different database.", 'EALREADYCONNECTED'
if @connecting
return callback new ConnectionError "Already connecting to database! Call close before connecting to different database.", 'EALREADYCONNECTING'
go = =>
@connecting = true
@driver.Connection::connect.call @, @config, (err) =>
unless @connecting then return
@connecting = false
if err
if @_debugStream
@_debugStream.removeAllListeners()
@_debugStream.end()
@_debugStream = null
else
@connected = true
@emit 'connect'
callback err
if @config.debug
@_debugStream = fs.createWriteStream "./mssql_debug_#{Date.now()}.log"
@_debugStream.once 'open', go
@_debugStream.on 'error', (err) ->
if @connecting or @connected
# error after successful open
console.error err.stack
else
@_debugStream.removeListener 'open', go
callback new ConnectionError("Failed to open debug stream. #{err.message}", 'EDEBUG')
else
go()
@
###
Close all active connections in the pool.
@callback [callback] A callback which is called after connection has closed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Connection|Promise}
###
close: (callback) ->
if callback?
return @_close callback
new module.exports.Promise (resolve, reject) =>
@_close (err) ->
if err then return reject err
resolve()
_close: (callback) ->
if @_debugStream
@_debugStream.removeAllListeners()
@_debugStream.end()
@_debugStream = null
if @connecting
@connecting = false
@driver.Connection::close.call @, (err) =>
callback err
@driver = null
else if @connected
@connected = false
@driver.Connection::close.call @, (err) =>
unless err
@connected = false
@emit 'close'
callback err
@driver = null
@
###
Returns new request using this connection.
@returns {Request}
###
request: ->
new Request @
###
Returns new transaction using this connection.
@returns {Transaction}
###
transaction: ->
new Transaction @
###
Class PreparedStatement.
IMPORTANT: Rememeber that each prepared statement means one reserved connection from the pool. Don't forget to unprepare a prepared statement!
@property {Connection} connection Reference to used connection.
@property {Boolean} multiple If `true`, `execute` will handle multiple recordsets.
@property {String} statement Prepared SQL statement.
###
class PreparedStatement extends EventEmitter
_pooledConnection: null
_queue: null
_working: false # if true, there is a request running at the moment
_handle: 0 # sql prepared statement handle
connection: null # sql.Connection
transaction: null # !null in case we're in transaction
prepared: false
statement: null
parameters: null
multiple: false
stream: null
###
Create new Prepared Statement.
@param {String} statement SQL statement.
@param {Connection} [connection] If ommited, global connection is used instead.
###
constructor: (connection) ->
if connection instanceof Transaction
@transaction = connection
@connection = connection.connection
else if connection instanceof Connection
@connection = connection
else
@connection = global_connection
@_queue = []
@parameters = {}
###
Add an input parameter to the prepared statement.
**Example:**
```
statement.input('input_parameter', sql.Int);
statement.input('input_parameter', sql.VarChar(50));
```
@param {String} name Name of the input parameter without @ char.
@param {*} type SQL data type of input parameter.
@returns {PreparedStatement}
###
input: (name, type) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new PreparedStatementError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length < 2
throw new PreparedStatementError "Invalid number of arguments. 2 arguments expected.", 'EARGS'
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 1
length: type.length
scale: type.scale
precision: type.precision
tvpType: type.tvpType
@
###
Add an output parameter to the prepared statement.
**Example:**
```
statement.output('output_parameter', sql.Int);
statement.output('output_parameter', sql.VarChar(50));
```
@param {String} name Name of the output parameter without @ char.
@param {*} type SQL data type of output parameter.
@returns {PreparedStatement}
###
output: (name, type) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new PreparedStatementError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length < 2
throw new PreparedStatementError "Invalid number of arguments. 2 arguments expected.", 'EARGS'
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 2
length: type.length
scale: type.scale
precision: type.precision
@
###
Prepare a statement.
@property {String} [statement] SQL statement to prepare.
@callback [callback] A callback which is called after preparation has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {PreparedStatement|Promise}
###
prepare: (statement, callback) ->
if callback?
return @_prepare statement, callback
new module.exports.Promise (resolve, reject) =>
@_prepare statement, (err) ->
if err then return reject err
resolve()
_prepare: (statement, callback) ->
if @_pooledConnection
callback new PreparedStatementError "Statement is already prepared.", 'EALREADYPREPARED'
return @
if typeof statement is 'function'
callback = statement
statement = undefined
@statement = statement if statement?
done = (err, connection) =>
if err then return callback err
@_pooledConnection = connection
req = new Request @
req.stream = false
req.output 'handle', TYPES.Int
req.input 'params', TYPES.NVarChar, ("@#{name} #{declare(param.type, param)}#{if param.io is 2 then " output" else ""}" for name, param of @parameters).join(',')
req.input 'stmt', TYPES.NVarChar, @statement
req.execute 'sp_prepare', (err) =>
if err
if @transaction
@transaction.next()
else
@connection.pool.release @_pooledConnection
@_pooledConnection = null
return callback err
@_handle = req.parameters.handle.value
callback null
if @transaction
unless @transaction._pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
@transaction.queue done
else
@connection.pool.acquire done
@
###
Execute next request in queue.
@private
@returns {PreparedStatement}
###
next: ->
if @_queue.length
# defer processing of next request
process.nextTick =>
@_queue.shift() null, @_pooledConnection
else
@_working = false
@
###
Add request to queue for connection. If queue is empty, execute the request immediately.
@private
@callback callback A callback to call when connection in ready to execute request.
@param {Error} err Error on error, otherwise null.
@param {*} conn Internal driver's connection.
@returns {PreparedStatement}
###
queue: (callback) ->
unless @_pooledConnection
callback new PreparedStatementError "Statement is not prepared. Call prepare() first.", 'ENOTPREPARED'
return @
if @_working
@_queue.push callback
else
@_working = true
callback null, @_pooledConnection
@
###
Execute a prepared statement.
@property {String} values An object whose names correspond to the names of parameters that were added to the prepared statement before it was prepared.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Request|Promise}
###
execute: (values, callback) ->
if callback?
return @_execute values, callback
new module.exports.Promise (resolve, reject) =>
@_execute values, (err, recordset) ->
if err then return reject err
resolve recordset
_execute: (values, callback) ->
req = new Request @
req.stream = @stream if @stream?
req.input 'handle', TYPES.Int, @_handle
# copy parameters with new values
for name, param of @parameters
req.parameters[name] =
name: name
type: param.type
io: param.io
value: values[name]
length: param.length
scale: param.scale
precision: param.precision
req.execute 'sp_execute', (err, recordsets, returnValue) =>
if err then return callback err
callback null, (if @multiple then recordsets else recordsets[0])
req
###
Unprepare a prepared statement.
@callback [callback] A callback which is called after unpreparation has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {PreparedStatement|Promise}
###
unprepare: (callback) ->
if callback?
return @_unprepare callback
new module.exports.Promise (resolve, reject) =>
@_unprepare (err) ->
if err then return reject err
resolve()
_unprepare: (callback) ->
unless @_pooledConnection
callback new PreparedStatementError "Statement is not prepared. Call prepare() first.", 'ENOTPREPARED'
return @
done = (err) =>
if err then return callback err
if @transaction
@transaction.next()
else
@connection.pool.release @_pooledConnection
@_pooledConnection = null
@_handle = 0
callback null
req = new Request @
req.stream = false
req.input 'handle', TYPES.Int, @_handle
req.execute 'sp_unprepare', done
@
###
Class Transaction.
@property {Connection} connection Reference to used connection.
@property {Number} isolationLevel Controls the locking and row versioning behavior of TSQL statements issued by a connection. READ_COMMITTED by default.
@property {String} name Transaction name. Empty string by default.
@event begin Dispatched when transaction begin.
@event commit Dispatched on successful commit.
@event rollback Dispatched on successful rollback.
###
class Transaction extends EventEmitter
_pooledConnection: null
_queue: null
_aborted: false
_working: false # if true, there is a request running at the moment
name: ""
connection: null # sql.Connection
isolationLevel: ISOLATION_LEVEL.READ_COMMITTED
###
Create new Transaction.
@param {Connection} [connection] If ommited, global connection is used instead.
###
constructor: (connection) ->
@connection = connection ? global_connection
@_queue = []
###
@private
###
_abort: =>
@connection.driver.Transaction::_abort.call @
###
Begin a transaction.
@param {Number} [isolationLevel] Controls the locking and row versioning behavior of TSQL statements issued by a connection.
@callback [callback] A callback which is called after transaction has began, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
begin: (isolationLevel, callback) ->
if isolationLevel instanceof Function
callback = isolationLevel
isolationLevel = undefined
if callback?
return @_begin isolationLevel, callback
new module.exports.Promise (resolve, reject) =>
@_begin isolationLevel, (err) ->
if err then return reject err
resolve()
_begin: (isolationLevel, callback) ->
@isolationLevel = isolationLevel if isolationLevel?
if @_pooledConnection
callback new TransactionError "Transaction has already begun.", 'EALREADYBEGUN'
return @
@connection.driver.Transaction::begin.call @, (err) =>
unless err then @emit 'begin'
callback err
@
###
Commit a transaction.
@callback [callback] A callback which is called after transaction has commited, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
commit: (callback) ->
if callback?
return @_commit callback
new module.exports.Promise (resolve, reject) =>
@_commit (err) ->
if err then return reject err
resolve()
_commit: (callback) ->
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working
callback new TransactionError "Can't commit transaction. There is a request in progress.", 'EREQINPROG'
return @
if @_queue.length
callback new TransactionError "Can't commit transaction. There are request in queue.", 'EREQINPROG'
return @
@connection.driver.Transaction::commit.call @, (err) =>
unless err then @emit 'commit'
callback err
@
###
Execute next request in queue.
@private
@returns {Transaction}
###
next: ->
if @_aborted
toAbort = @_queue
@_queue = []
# this must be async to ensure it is not processed earlier than the request that caused abortion of this transaction
process.nextTick =>
while toAbort.length
toAbort.shift() new TransactionError "Transaction aborted.", "EABORT"
# this must be synchronous so we can rollback a transaction or commit transaction in last request's callback
@_working = false
if @_queue.length
process.nextTick =>
if @_aborted then return @next() # transaction aborted manually
@_working = true
@_queue.shift() null, @_pooledConnection
@
###
Add request to queue for connection. If queue is empty, execute the request immediately.
@private
@callback callback A callback to call when connection in ready to execute request.
@param {Error} err Error on error, otherwise null.
@param {*} conn Internal driver's connection.
@returns {Transaction}
###
queue: (callback) ->
if @_dedicatedConnection
callback null, @_dedicatedConnection
return @
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working or @_queue.length
@_queue.push callback
else
@_working = true
callback null, @_pooledConnection
@
###
Returns new request using this transaction.
@returns {Request}
###
request: ->
new Request @
###
Rollback a transaction.
@callback [callback] A callback which is called after transaction has rolled back, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
rollback: (callback) ->
if callback?
return @_rollback callback
new module.exports.Promise (resolve, reject) =>
@_rollback (err) ->
if err then return reject err
resolve()
_rollback: (callback) ->
if @_aborted
callback new TransactionError "Transaction has been aborted.", 'EABORT'
return @
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working
callback new TransactionError "Can't rollback transaction. There is a request in progress.", 'EREQINPROG'
return @
if @_queue.length
@_aborted = true
@connection.driver.Transaction::rollback.call @, (err) =>
unless err then @emit 'rollback', @_aborted
callback err
@
###
Class Request.
@property {Connection} connection Reference to used connection.
@property {Transaction} transaction Reference to transaction when request was created in transaction.
@property {*} parameters Collection of input and output parameters.
@property {Boolean} verbose If `true`, debug messages are printed to message log.
@property {Boolean} multiple If `true`, `query` will handle multiple recordsets (`execute` always expect multiple recordsets).
@property {Boolean} canceled `true` if request was canceled.
@event recordset Dispatched when metadata for new recordset are parsed.
@event row Dispatched when new row is parsed.
@event done Dispatched when request is complete.
@event error Dispatched on error.
###
class Request extends EventEmitter
connection: null
transaction: null
pstatement: null
parameters: null
verbose: false
multiple: false
canceled: false
stream: null
###
Create new Request.
@param {Connection|Transaction} connection If ommited, global connection is used instead.
###
constructor: (connection) ->
if connection instanceof Transaction
@transaction = connection
@connection = connection.connection
else if connection instanceof PreparedStatement
@pstatement = connection
@connection = connection.connection
else if connection instanceof Connection
@connection = connection
else
@connection = global_connection
@parameters = {}
###
Log to a function if assigned. Else, use console.log.
###
_log: (out) ->
if typeof @logger is "function" then @logger out else console.log out
###
Acquire connection for this request from connection.
###
_acquire: (callback) ->
if @transaction
@transaction.queue callback
else if @pstatement
@pstatement.queue callback
else
unless @connection.pool
return callback new ConnectionError "Connection not yet open.", 'ENOTOPEN'
@connection.pool.acquire callback
###
Release connection used by this request.
###
_release: (connection) ->
if @transaction
@transaction.next()
else if @pstatement
@pstatement.next()
else
@connection.pool.release connection
###
Add an input parameter to the request.
**Example:**
```
request.input('input_parameter', value);
request.input('input_parameter', sql.Int, value);
```
@param {String} name Name of the input parameter without @ char.
@param {*} [type] SQL data type of input parameter. If you omit type, module automaticaly decide which SQL data type should be used based on JS data type.
@param {*} value Input parameter value. `undefined` and `NaN` values are automatically converted to `null` values.
@returns {Request}
###
input: (name, type, value) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new RequestError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length is 1
throw new RequestError "Invalid number of arguments. At least 2 arguments expected.", 'EARGS'
else if arguments.length is 2
value = type
type = getTypeByValue(value)
# support for custom data types
if value?.valueOf and value not instanceof Date then value = value.valueOf()
# undefined to null
if value is undefined then value = null
# NaN to null
if value isnt value then value = null
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 1
value: value
length: type.length
scale: type.scale
precision: type.precision
tvpType: type.tvpType
@
###
Add an output parameter to the request.
**Example:**
```
request.output('output_parameter', sql.Int);
request.output('output_parameter', sql.VarChar(50), 'abc');
```
@param {String} name Name of the output parameter without @ char.
@param {*} type SQL data type of output parameter.
@param {*} [value] Output parameter value initial value. `undefined` and `NaN` values are automatically converted to `null` values. Optional.
@returns {Request}
###
output: (name, type, value) ->
unless type then type = TYPES.NVarChar
if (/(--| |\/\*|\*\/|')/).test name
throw new RequestError "SQL injection warning for param '#{name}'", 'EINJECT'
if type is TYPES.Text or type is TYPES.NText or type is TYPES.Image
throw new RequestError "Deprecated types (Text, NText, Image) are not supported as OUTPUT parameters.", 'EDEPRECATED'
# support for custom data types
if value?.valueOf and value not instanceof Date then value = value.valueOf()
# undefined to null
if value is undefined then value = null
# NaN to null
if value isnt value then value = null
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 2
value: value
length: type.length
scale: type.scale
precision: type.precision
@
###
Execute the SQL batch.
@param {String} batch T-SQL batch to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {*} recordset Recordset.
@returns {Request|Promise}
###
batch: (batch, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_batch batch, callback
new module.exports.Promise (resolve, reject) =>
@_batch batch, (err, recordset) ->
if err then return reject err
resolve recordset
_batch: (batch, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::batch.call @, batch, (err, recordset) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, recordset
@
###
Bulk load.
@param {Table} table SQL table.
@callback [callback] A callback which is called after bulk load has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Request|Promise}
###
bulk: (table, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_bulk table, callback
new module.exports.Promise (resolve, reject) =>
@_bulk table, (err, rowCount) ->
if err then return reject err
resolve rowCount
_bulk: (table, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::bulk.call @, table, (err, rowCount) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, rowCount
@
###
Sets request to `stream` mode and pulls all rows from all recordsets to a given stream.
@param {Stream} stream Stream to pipe data into.
@returns {Stream}
###
pipe: (stream) ->
@stream = true
@on 'row', stream.write.bind stream
@on 'error', stream.emit.bind stream, 'error'
@on 'done', -> setImmediate -> stream.end()
stream.emit 'pipe', @
stream
###
Execute the SQL command.
**Example:**
```
var request = new sql.Request();
request.query('select 1 as number', function(err, recordset) {
console.log(recordset[0].number); // return 1
// ...
});
```
You can enable multiple recordsets in querries by `request.multiple = true` command.
```
var request = new sql.Request();
request.multiple = true;
request.query('select 1 as number; select 2 as number', function(err, recordsets) {
console.log(recordsets[0][0].number); // return 1
console.log(recordsets[1][0].number); // return 2
// ...
});
```
@param {String} command T-SQL command to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {*} recordset Recordset.
@returns {Request|Promise}
###
query: (command, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_query command, callback
new module.exports.Promise (resolve, reject) =>
@_query command, (err, recordset) ->
if err then return reject err
resolve recordset
_query: (command, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::query.call @, command, (err, recordset) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, recordset
@
###
Call a stored procedure.
**Example:**
```
var request = new sql.Request();
request.input('input_parameter', sql.Int, value);
request.output('output_parameter', sql.Int);
request.execute('procedure_name', function(err, recordsets, returnValue) {
console.log(recordsets.length); // count of recordsets returned by procedure
console.log(recordset[0].length); // count of rows contained in first recordset
console.log(returnValue); // procedure return value
console.log(recordsets.returnValue); // procedure return value
console.log(request.parameters.output_parameter.value); // output value
// ...
});
```
@param {String} procedure Name of the stored procedure to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {Array} recordsets Recordsets.
@param {Number} returnValue Procedure return value.
@returns {Request|Promise}
###
execute: (command, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_execute command, callback
new module.exports.Promise (resolve, reject) =>
@_execute command, (err, recordset) ->
if err then return reject err
resolve recordset
_execute: (procedure, callback) ->
unless @connection
return process.nextTick ->
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::execute.call @, procedure, (err, recordsets, returnValue) =>
if @stream
if err then @emit 'error', err
@emit 'done', returnValue
else
callback err, recordsets, returnValue
@
###
Cancel currently executed request.
@returns {Request}
###
cancel: ->
@canceled = true
@connection.driver.Request::cancel.call @
@
class ConnectionError extends Error
constructor: (message, code) ->
unless @ instanceof ConnectionError
if message instanceof Error
err = new ConnectionError message.message, message.code
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new ConnectionError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class TransactionError extends Error
constructor: (message, code) ->
unless @ instanceof TransactionError
if message instanceof Error
err = new TransactionError message.message, message.code
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new TransactionError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class RequestError extends Error
constructor: (message, code) ->
unless @ instanceof RequestError
if message instanceof Error
err = new RequestError message.message, message.code ? code
err.number = message.info?.number ? message.code # err.code is returned by msnodesql driver
err.lineNumber = message.info?.lineNumber
err.state = message.info?.state ? message.sqlstate # err.sqlstate is returned by msnodesql driver
err.class = message.info?.class ? message.info?.severity # err.severity is returned by tds
err.serverName = message.info?.serverName
err.procName = message.info?.procName
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new RequestError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class PreparedStatementError extends Error
constructor: (message, code) ->
unless @ instanceof PreparedStatementError
if message instanceof Error
err = new PreparedStatementError message.message, message.code
err.originalError = message
Error.captureStackTrace err, arguments.callee
return err
else
err = new PreparedStatementError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code
super()
Error.captureStackTrace @, @constructor
###
Open global connection.
@param {Object} config Connection configuration.
@callback callback A callback which is called after connection has established, or an error has occurred.
@param {Error} err Error on error, otherwise null.
@returns {Connection}
###
module.exports.connect = (config, callback) ->
global_connection = new Connection config
global_connection.connect callback
###
Close global connection.
@returns {Connection}
###
module.exports.close = (callback) ->
global_connection?.close callback
module.exports.on = (event, handler) ->
global_connection?.on event, handler
module.exports.Connection = Connection
module.exports.Transaction = Transaction
module.exports.Request = Request
module.exports.Table = Table
module.exports.PreparedStatement = PreparedStatement
module.exports.ConnectionError = ConnectionError
module.exports.TransactionError = TransactionError
module.exports.RequestError = RequestError
module.exports.PreparedStatementError = PreparedStatementError
module.exports.ISOLATION_LEVEL = ISOLATION_LEVEL
module.exports.DRIVERS = DRIVERS
module.exports.TYPES = TYPES
module.exports.MAX = 65535 # (1 << 16) - 1
module.exports.map = map
module.exports.fix = true
module.exports.Promise = global.Promise ? require('promise')
# append datatypes to this modules export
for key, value of TYPES
module.exports[key] = value
module.exports[key.toUpperCase()] = value
# --- DEPRECATED IN 0.3.0 ------------------------------------------
module.exports.pool =
max: 10
min: 0
idleTimeoutMillis: 30000
module.exports.connection =
userName: ''
password: ''
server: ''
###
Initialize Tedious connection pool.
@deprecated
###
module.exports.init = ->
module.exports.connect
user: module.exports.connection.userName
password: <PASSWORD>
server: module.exports.connection.server
options: module.exports.connection.options
driver: 'tedious'
pool: module.exports.pool | true | {EventEmitter} = require 'events'
util = require 'util'
fs = require 'fs'
{TYPES, declare} = require('./datatypes')
ISOLATION_LEVEL = require('./isolationlevel')
DRIVERS = ['msnodesql', 'tedious', 'tds']
Table = require('./table')
global_connection = null
map = []
###
Register you own type map.
**Example:**
```
sql.map.register(MyClass, sql.Text);
```
You can also overwrite default type map.
```
sql.map.register(Number, sql.BigInt);
```
@path module.exports.map
@param {*} jstype JS data type.
@param {*} sqltype SQL data type.
###
map.register = (jstype, sqltype) ->
for item, index in @ when item.js is jstype
@splice index, 1
break
@push
js: jstype
sql: sqltype
null
map.register String, TYPES.NVarChar
map.register Number, TYPES.Int
map.register Boolean, TYPES.Bit
map.register Date, TYPES.DateTime
map.register Buffer, TYPES.VarBinary
map.register Table, TYPES.TVP
###
@ignore
###
getTypeByValue = (value) ->
if value is null or value is undefined then return TYPES.NVarChar
switch typeof value
when 'string'
for item in map when item.js is String
return item.sql
return TYPES.NVarChar
when 'number'
for item in map when item.js is Number
return item.sql
return TYPES.Int
when 'boolean'
for item in map when item.js is Boolean
return item.sql
return TYPES.Bit
when 'object'
for item in map when value instanceof item.js
return item.sql
return TYPES.NVarChar
else
return TYPES.NVarChar
###
Class Connection.
Internally, each `Connection` instance is a separate pool of TDS connections. Once you create a new `Request`/`Transaction`/`Prepared Statement`, a new TDS connection is acquired from the pool and reserved for desired action. Once the action is complete, connection is released back to the pool.
@property {Boolean} connected If true, connection is established.
@property {Boolean} connecting If true, connection is being established.
@property {*} driver Reference to configured Driver.
@event connect Dispatched after connection has established.
@event close Dispatched after connection has closed a pool (by calling close).
###
class Connection extends EventEmitter
connected: false
connecting: false
driver: null
###
Create new Connection.
@param {Object} config Connection configuration.
@callback [callback] A callback which is called after connection has established, or an error has occurred.
@param {Error} err Error on error, otherwise null.
###
constructor: (@config, callback) ->
# set defaults
@config.driver ?= 'tedious'
@config.port ?= 1433
@config.options ?= {}
@config.stream ?= false
@config.parseJSON ?= false
if /^(.*)\\(.*)$/.exec @config.server
@config.server = RegExp.$1
@config.options.instanceName = RegExp.$2
if @config.driver in DRIVERS
@driver = @initializeDriver require("./#{@config.driver}")
# fix the driver by default
if module.exports.fix then @driver.fix()
else
err = new ConnectionError "Unknown driver #{@config.driver}!", 'EDRIVER'
if callback
return callback err
else
throw err
if callback then @connect callback
###
Write message to debug stream.
###
_debug: (msg) ->
@_debugStream?.write "#{String(msg).replace(/\x1B\[[0-9;]*m/g, '')}\n"
###
Initializes driver for this connection. Separated from constructor and used by co-mssql.
@private
@param {Function} driver Loaded driver.
@returns {Connection}
###
initializeDriver: (driver) ->
driver Connection, Transaction, Request, ConnectionError, TransactionError, RequestError
###
Creates a new connection pool with one active connection. This one initial connection serves as a probe to find out whether the configuration is valid.
@callback [callback] A callback which is called after connection has established, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Connection|Promise}
###
connect: (callback) ->
if callback?
return @_connect callback
new module.exports.Promise (resolve, reject) =>
@_connect (err) ->
if err then return reject err
resolve()
_connect: (callback) ->
if not @driver
return callback new ConnectionError "Connection was closed. Create a new instance."
if @connected
return callback new ConnectionError "Database is already connected! Call close before connecting to different database.", 'EALREADYCONNECTED'
if @connecting
return callback new ConnectionError "Already connecting to database! Call close before connecting to different database.", 'EALREADYCONNECTING'
go = =>
@connecting = true
@driver.Connection::connect.call @, @config, (err) =>
unless @connecting then return
@connecting = false
if err
if @_debugStream
@_debugStream.removeAllListeners()
@_debugStream.end()
@_debugStream = null
else
@connected = true
@emit 'connect'
callback err
if @config.debug
@_debugStream = fs.createWriteStream "./mssql_debug_#{Date.now()}.log"
@_debugStream.once 'open', go
@_debugStream.on 'error', (err) ->
if @connecting or @connected
# error after successful open
console.error err.stack
else
@_debugStream.removeListener 'open', go
callback new ConnectionError("Failed to open debug stream. #{err.message}", 'EDEBUG')
else
go()
@
###
Close all active connections in the pool.
@callback [callback] A callback which is called after connection has closed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Connection|Promise}
###
close: (callback) ->
if callback?
return @_close callback
new module.exports.Promise (resolve, reject) =>
@_close (err) ->
if err then return reject err
resolve()
_close: (callback) ->
if @_debugStream
@_debugStream.removeAllListeners()
@_debugStream.end()
@_debugStream = null
if @connecting
@connecting = false
@driver.Connection::close.call @, (err) =>
callback err
@driver = null
else if @connected
@connected = false
@driver.Connection::close.call @, (err) =>
unless err
@connected = false
@emit 'close'
callback err
@driver = null
@
###
Returns new request using this connection.
@returns {Request}
###
request: ->
new Request @
###
Returns new transaction using this connection.
@returns {Transaction}
###
transaction: ->
new Transaction @
###
Class PreparedStatement.
IMPORTANT: Rememeber that each prepared statement means one reserved connection from the pool. Don't forget to unprepare a prepared statement!
@property {Connection} connection Reference to used connection.
@property {Boolean} multiple If `true`, `execute` will handle multiple recordsets.
@property {String} statement Prepared SQL statement.
###
class PreparedStatement extends EventEmitter
_pooledConnection: null
_queue: null
_working: false # if true, there is a request running at the moment
_handle: 0 # sql prepared statement handle
connection: null # sql.Connection
transaction: null # !null in case we're in transaction
prepared: false
statement: null
parameters: null
multiple: false
stream: null
###
Create new Prepared Statement.
@param {String} statement SQL statement.
@param {Connection} [connection] If ommited, global connection is used instead.
###
constructor: (connection) ->
if connection instanceof Transaction
@transaction = connection
@connection = connection.connection
else if connection instanceof Connection
@connection = connection
else
@connection = global_connection
@_queue = []
@parameters = {}
###
Add an input parameter to the prepared statement.
**Example:**
```
statement.input('input_parameter', sql.Int);
statement.input('input_parameter', sql.VarChar(50));
```
@param {String} name Name of the input parameter without @ char.
@param {*} type SQL data type of input parameter.
@returns {PreparedStatement}
###
input: (name, type) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new PreparedStatementError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length < 2
throw new PreparedStatementError "Invalid number of arguments. 2 arguments expected.", 'EARGS'
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 1
length: type.length
scale: type.scale
precision: type.precision
tvpType: type.tvpType
@
###
Add an output parameter to the prepared statement.
**Example:**
```
statement.output('output_parameter', sql.Int);
statement.output('output_parameter', sql.VarChar(50));
```
@param {String} name Name of the output parameter without @ char.
@param {*} type SQL data type of output parameter.
@returns {PreparedStatement}
###
output: (name, type) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new PreparedStatementError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length < 2
throw new PreparedStatementError "Invalid number of arguments. 2 arguments expected.", 'EARGS'
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 2
length: type.length
scale: type.scale
precision: type.precision
@
###
Prepare a statement.
@property {String} [statement] SQL statement to prepare.
@callback [callback] A callback which is called after preparation has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {PreparedStatement|Promise}
###
prepare: (statement, callback) ->
if callback?
return @_prepare statement, callback
new module.exports.Promise (resolve, reject) =>
@_prepare statement, (err) ->
if err then return reject err
resolve()
_prepare: (statement, callback) ->
if @_pooledConnection
callback new PreparedStatementError "Statement is already prepared.", 'EALREADYPREPARED'
return @
if typeof statement is 'function'
callback = statement
statement = undefined
@statement = statement if statement?
done = (err, connection) =>
if err then return callback err
@_pooledConnection = connection
req = new Request @
req.stream = false
req.output 'handle', TYPES.Int
req.input 'params', TYPES.NVarChar, ("@#{name} #{declare(param.type, param)}#{if param.io is 2 then " output" else ""}" for name, param of @parameters).join(',')
req.input 'stmt', TYPES.NVarChar, @statement
req.execute 'sp_prepare', (err) =>
if err
if @transaction
@transaction.next()
else
@connection.pool.release @_pooledConnection
@_pooledConnection = null
return callback err
@_handle = req.parameters.handle.value
callback null
if @transaction
unless @transaction._pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
@transaction.queue done
else
@connection.pool.acquire done
@
###
Execute next request in queue.
@private
@returns {PreparedStatement}
###
next: ->
if @_queue.length
# defer processing of next request
process.nextTick =>
@_queue.shift() null, @_pooledConnection
else
@_working = false
@
###
Add request to queue for connection. If queue is empty, execute the request immediately.
@private
@callback callback A callback to call when connection in ready to execute request.
@param {Error} err Error on error, otherwise null.
@param {*} conn Internal driver's connection.
@returns {PreparedStatement}
###
queue: (callback) ->
unless @_pooledConnection
callback new PreparedStatementError "Statement is not prepared. Call prepare() first.", 'ENOTPREPARED'
return @
if @_working
@_queue.push callback
else
@_working = true
callback null, @_pooledConnection
@
###
Execute a prepared statement.
@property {String} values An object whose names correspond to the names of parameters that were added to the prepared statement before it was prepared.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Request|Promise}
###
execute: (values, callback) ->
if callback?
return @_execute values, callback
new module.exports.Promise (resolve, reject) =>
@_execute values, (err, recordset) ->
if err then return reject err
resolve recordset
_execute: (values, callback) ->
req = new Request @
req.stream = @stream if @stream?
req.input 'handle', TYPES.Int, @_handle
# copy parameters with new values
for name, param of @parameters
req.parameters[name] =
name: name
type: param.type
io: param.io
value: values[name]
length: param.length
scale: param.scale
precision: param.precision
req.execute 'sp_execute', (err, recordsets, returnValue) =>
if err then return callback err
callback null, (if @multiple then recordsets else recordsets[0])
req
###
Unprepare a prepared statement.
@callback [callback] A callback which is called after unpreparation has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {PreparedStatement|Promise}
###
unprepare: (callback) ->
if callback?
return @_unprepare callback
new module.exports.Promise (resolve, reject) =>
@_unprepare (err) ->
if err then return reject err
resolve()
_unprepare: (callback) ->
unless @_pooledConnection
callback new PreparedStatementError "Statement is not prepared. Call prepare() first.", 'ENOTPREPARED'
return @
done = (err) =>
if err then return callback err
if @transaction
@transaction.next()
else
@connection.pool.release @_pooledConnection
@_pooledConnection = null
@_handle = 0
callback null
req = new Request @
req.stream = false
req.input 'handle', TYPES.Int, @_handle
req.execute 'sp_unprepare', done
@
###
Class Transaction.
@property {Connection} connection Reference to used connection.
@property {Number} isolationLevel Controls the locking and row versioning behavior of TSQL statements issued by a connection. READ_COMMITTED by default.
@property {String} name Transaction name. Empty string by default.
@event begin Dispatched when transaction begin.
@event commit Dispatched on successful commit.
@event rollback Dispatched on successful rollback.
###
class Transaction extends EventEmitter
_pooledConnection: null
_queue: null
_aborted: false
_working: false # if true, there is a request running at the moment
name: ""
connection: null # sql.Connection
isolationLevel: ISOLATION_LEVEL.READ_COMMITTED
###
Create new Transaction.
@param {Connection} [connection] If ommited, global connection is used instead.
###
constructor: (connection) ->
@connection = connection ? global_connection
@_queue = []
###
@private
###
_abort: =>
@connection.driver.Transaction::_abort.call @
###
Begin a transaction.
@param {Number} [isolationLevel] Controls the locking and row versioning behavior of TSQL statements issued by a connection.
@callback [callback] A callback which is called after transaction has began, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
begin: (isolationLevel, callback) ->
if isolationLevel instanceof Function
callback = isolationLevel
isolationLevel = undefined
if callback?
return @_begin isolationLevel, callback
new module.exports.Promise (resolve, reject) =>
@_begin isolationLevel, (err) ->
if err then return reject err
resolve()
_begin: (isolationLevel, callback) ->
@isolationLevel = isolationLevel if isolationLevel?
if @_pooledConnection
callback new TransactionError "Transaction has already begun.", 'EALREADYBEGUN'
return @
@connection.driver.Transaction::begin.call @, (err) =>
unless err then @emit 'begin'
callback err
@
###
Commit a transaction.
@callback [callback] A callback which is called after transaction has commited, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
commit: (callback) ->
if callback?
return @_commit callback
new module.exports.Promise (resolve, reject) =>
@_commit (err) ->
if err then return reject err
resolve()
_commit: (callback) ->
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working
callback new TransactionError "Can't commit transaction. There is a request in progress.", 'EREQINPROG'
return @
if @_queue.length
callback new TransactionError "Can't commit transaction. There are request in queue.", 'EREQINPROG'
return @
@connection.driver.Transaction::commit.call @, (err) =>
unless err then @emit 'commit'
callback err
@
###
Execute next request in queue.
@private
@returns {Transaction}
###
next: ->
if @_aborted
toAbort = @_queue
@_queue = []
# this must be async to ensure it is not processed earlier than the request that caused abortion of this transaction
process.nextTick =>
while toAbort.length
toAbort.shift() new TransactionError "Transaction aborted.", "EABORT"
# this must be synchronous so we can rollback a transaction or commit transaction in last request's callback
@_working = false
if @_queue.length
process.nextTick =>
if @_aborted then return @next() # transaction aborted manually
@_working = true
@_queue.shift() null, @_pooledConnection
@
###
Add request to queue for connection. If queue is empty, execute the request immediately.
@private
@callback callback A callback to call when connection in ready to execute request.
@param {Error} err Error on error, otherwise null.
@param {*} conn Internal driver's connection.
@returns {Transaction}
###
queue: (callback) ->
if @_dedicatedConnection
callback null, @_dedicatedConnection
return @
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working or @_queue.length
@_queue.push callback
else
@_working = true
callback null, @_pooledConnection
@
###
Returns new request using this transaction.
@returns {Request}
###
request: ->
new Request @
###
Rollback a transaction.
@callback [callback] A callback which is called after transaction has rolled back, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Transaction|Promise}
###
rollback: (callback) ->
if callback?
return @_rollback callback
new module.exports.Promise (resolve, reject) =>
@_rollback (err) ->
if err then return reject err
resolve()
_rollback: (callback) ->
if @_aborted
callback new TransactionError "Transaction has been aborted.", 'EABORT'
return @
unless @_pooledConnection
callback new TransactionError "Transaction has not begun. Call begin() first.", 'ENOTBEGUN'
return @
if @_working
callback new TransactionError "Can't rollback transaction. There is a request in progress.", 'EREQINPROG'
return @
if @_queue.length
@_aborted = true
@connection.driver.Transaction::rollback.call @, (err) =>
unless err then @emit 'rollback', @_aborted
callback err
@
###
Class Request.
@property {Connection} connection Reference to used connection.
@property {Transaction} transaction Reference to transaction when request was created in transaction.
@property {*} parameters Collection of input and output parameters.
@property {Boolean} verbose If `true`, debug messages are printed to message log.
@property {Boolean} multiple If `true`, `query` will handle multiple recordsets (`execute` always expect multiple recordsets).
@property {Boolean} canceled `true` if request was canceled.
@event recordset Dispatched when metadata for new recordset are parsed.
@event row Dispatched when new row is parsed.
@event done Dispatched when request is complete.
@event error Dispatched on error.
###
class Request extends EventEmitter
connection: null
transaction: null
pstatement: null
parameters: null
verbose: false
multiple: false
canceled: false
stream: null
###
Create new Request.
@param {Connection|Transaction} connection If ommited, global connection is used instead.
###
constructor: (connection) ->
if connection instanceof Transaction
@transaction = connection
@connection = connection.connection
else if connection instanceof PreparedStatement
@pstatement = connection
@connection = connection.connection
else if connection instanceof Connection
@connection = connection
else
@connection = global_connection
@parameters = {}
###
Log to a function if assigned. Else, use console.log.
###
_log: (out) ->
if typeof @logger is "function" then @logger out else console.log out
###
Acquire connection for this request from connection.
###
_acquire: (callback) ->
if @transaction
@transaction.queue callback
else if @pstatement
@pstatement.queue callback
else
unless @connection.pool
return callback new ConnectionError "Connection not yet open.", 'ENOTOPEN'
@connection.pool.acquire callback
###
Release connection used by this request.
###
_release: (connection) ->
if @transaction
@transaction.next()
else if @pstatement
@pstatement.next()
else
@connection.pool.release connection
###
Add an input parameter to the request.
**Example:**
```
request.input('input_parameter', value);
request.input('input_parameter', sql.Int, value);
```
@param {String} name Name of the input parameter without @ char.
@param {*} [type] SQL data type of input parameter. If you omit type, module automaticaly decide which SQL data type should be used based on JS data type.
@param {*} value Input parameter value. `undefined` and `NaN` values are automatically converted to `null` values.
@returns {Request}
###
input: (name, type, value) ->
if (/(--| |\/\*|\*\/|')/).test name
throw new RequestError "SQL injection warning for param '#{name}'", 'EINJECT'
if arguments.length is 1
throw new RequestError "Invalid number of arguments. At least 2 arguments expected.", 'EARGS'
else if arguments.length is 2
value = type
type = getTypeByValue(value)
# support for custom data types
if value?.valueOf and value not instanceof Date then value = value.valueOf()
# undefined to null
if value is undefined then value = null
# NaN to null
if value isnt value then value = null
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 1
value: value
length: type.length
scale: type.scale
precision: type.precision
tvpType: type.tvpType
@
###
Add an output parameter to the request.
**Example:**
```
request.output('output_parameter', sql.Int);
request.output('output_parameter', sql.VarChar(50), 'abc');
```
@param {String} name Name of the output parameter without @ char.
@param {*} type SQL data type of output parameter.
@param {*} [value] Output parameter value initial value. `undefined` and `NaN` values are automatically converted to `null` values. Optional.
@returns {Request}
###
output: (name, type, value) ->
unless type then type = TYPES.NVarChar
if (/(--| |\/\*|\*\/|')/).test name
throw new RequestError "SQL injection warning for param '#{name}'", 'EINJECT'
if type is TYPES.Text or type is TYPES.NText or type is TYPES.Image
throw new RequestError "Deprecated types (Text, NText, Image) are not supported as OUTPUT parameters.", 'EDEPRECATED'
# support for custom data types
if value?.valueOf and value not instanceof Date then value = value.valueOf()
# undefined to null
if value is undefined then value = null
# NaN to null
if value isnt value then value = null
if type instanceof Function
type = type()
@parameters[name] =
name: name
type: type.type
io: 2
value: value
length: type.length
scale: type.scale
precision: type.precision
@
###
Execute the SQL batch.
@param {String} batch T-SQL batch to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {*} recordset Recordset.
@returns {Request|Promise}
###
batch: (batch, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_batch batch, callback
new module.exports.Promise (resolve, reject) =>
@_batch batch, (err, recordset) ->
if err then return reject err
resolve recordset
_batch: (batch, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::batch.call @, batch, (err, recordset) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, recordset
@
###
Bulk load.
@param {Table} table SQL table.
@callback [callback] A callback which is called after bulk load has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@returns {Request|Promise}
###
bulk: (table, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_bulk table, callback
new module.exports.Promise (resolve, reject) =>
@_bulk table, (err, rowCount) ->
if err then return reject err
resolve rowCount
_bulk: (table, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::bulk.call @, table, (err, rowCount) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, rowCount
@
###
Sets request to `stream` mode and pulls all rows from all recordsets to a given stream.
@param {Stream} stream Stream to pipe data into.
@returns {Stream}
###
pipe: (stream) ->
@stream = true
@on 'row', stream.write.bind stream
@on 'error', stream.emit.bind stream, 'error'
@on 'done', -> setImmediate -> stream.end()
stream.emit 'pipe', @
stream
###
Execute the SQL command.
**Example:**
```
var request = new sql.Request();
request.query('select 1 as number', function(err, recordset) {
console.log(recordset[0].number); // return 1
// ...
});
```
You can enable multiple recordsets in querries by `request.multiple = true` command.
```
var request = new sql.Request();
request.multiple = true;
request.query('select 1 as number; select 2 as number', function(err, recordsets) {
console.log(recordsets[0][0].number); // return 1
console.log(recordsets[1][0].number); // return 2
// ...
});
```
@param {String} command T-SQL command to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {*} recordset Recordset.
@returns {Request|Promise}
###
query: (command, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_query command, callback
new module.exports.Promise (resolve, reject) =>
@_query command, (err, recordset) ->
if err then return reject err
resolve recordset
_query: (command, callback) ->
unless @connection
return process.nextTick =>
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::query.call @, command, (err, recordset) =>
if @stream
if err then @emit 'error', err
@emit 'done'
else
callback err, recordset
@
###
Call a stored procedure.
**Example:**
```
var request = new sql.Request();
request.input('input_parameter', sql.Int, value);
request.output('output_parameter', sql.Int);
request.execute('procedure_name', function(err, recordsets, returnValue) {
console.log(recordsets.length); // count of recordsets returned by procedure
console.log(recordset[0].length); // count of rows contained in first recordset
console.log(returnValue); // procedure return value
console.log(recordsets.returnValue); // procedure return value
console.log(request.parameters.output_parameter.value); // output value
// ...
});
```
@param {String} procedure Name of the stored procedure to be executed.
@callback [callback] A callback which is called after execution has completed, or an error has occurred. If omited, method returns Promise.
@param {Error} err Error on error, otherwise null.
@param {Array} recordsets Recordsets.
@param {Number} returnValue Procedure return value.
@returns {Request|Promise}
###
execute: (command, callback) ->
@stream ?= @connection?.config.stream
if @stream or callback?
return @_execute command, callback
new module.exports.Promise (resolve, reject) =>
@_execute command, (err, recordset) ->
if err then return reject err
resolve recordset
_execute: (procedure, callback) ->
unless @connection
return process.nextTick ->
e = new RequestError "No connection is specified for that request.", 'ENOCONN'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
unless @connection.connected
return process.nextTick =>
e = new ConnectionError "Connection is closed.", 'ECONNCLOSED'
if @stream
@emit 'error', e
@emit 'done'
else
callback e
@canceled = false
@connection.driver.Request::execute.call @, procedure, (err, recordsets, returnValue) =>
if @stream
if err then @emit 'error', err
@emit 'done', returnValue
else
callback err, recordsets, returnValue
@
###
Cancel currently executed request.
@returns {Request}
###
cancel: ->
@canceled = true
@connection.driver.Request::cancel.call @
@
class ConnectionError extends Error
constructor: (message, code) ->
unless @ instanceof ConnectionError
if message instanceof Error
err = new ConnectionError message.message, message.code
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new ConnectionError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class TransactionError extends Error
constructor: (message, code) ->
unless @ instanceof TransactionError
if message instanceof Error
err = new TransactionError message.message, message.code
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new TransactionError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class RequestError extends Error
constructor: (message, code) ->
unless @ instanceof RequestError
if message instanceof Error
err = new RequestError message.message, message.code ? code
err.number = message.info?.number ? message.code # err.code is returned by msnodesql driver
err.lineNumber = message.info?.lineNumber
err.state = message.info?.state ? message.sqlstate # err.sqlstate is returned by msnodesql driver
err.class = message.info?.class ? message.info?.severity # err.severity is returned by tds
err.serverName = message.info?.serverName
err.procName = message.info?.procName
Object.defineProperty err, 'originalError', value: message
Error.captureStackTrace err, arguments.callee
return err
else
err = new RequestError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code if code?
super()
Error.captureStackTrace @, @constructor
class PreparedStatementError extends Error
constructor: (message, code) ->
unless @ instanceof PreparedStatementError
if message instanceof Error
err = new PreparedStatementError message.message, message.code
err.originalError = message
Error.captureStackTrace err, arguments.callee
return err
else
err = new PreparedStatementError message
Error.captureStackTrace err, arguments.callee
return err
@name = @constructor.name
@message = message
@code = code
super()
Error.captureStackTrace @, @constructor
###
Open global connection.
@param {Object} config Connection configuration.
@callback callback A callback which is called after connection has established, or an error has occurred.
@param {Error} err Error on error, otherwise null.
@returns {Connection}
###
module.exports.connect = (config, callback) ->
global_connection = new Connection config
global_connection.connect callback
###
Close global connection.
@returns {Connection}
###
module.exports.close = (callback) ->
global_connection?.close callback
module.exports.on = (event, handler) ->
global_connection?.on event, handler
module.exports.Connection = Connection
module.exports.Transaction = Transaction
module.exports.Request = Request
module.exports.Table = Table
module.exports.PreparedStatement = PreparedStatement
module.exports.ConnectionError = ConnectionError
module.exports.TransactionError = TransactionError
module.exports.RequestError = RequestError
module.exports.PreparedStatementError = PreparedStatementError
module.exports.ISOLATION_LEVEL = ISOLATION_LEVEL
module.exports.DRIVERS = DRIVERS
module.exports.TYPES = TYPES
module.exports.MAX = 65535 # (1 << 16) - 1
module.exports.map = map
module.exports.fix = true
module.exports.Promise = global.Promise ? require('promise')
# append datatypes to this modules export
for key, value of TYPES
module.exports[key] = value
module.exports[key.toUpperCase()] = value
# --- DEPRECATED IN 0.3.0 ------------------------------------------
module.exports.pool =
max: 10
min: 0
idleTimeoutMillis: 30000
module.exports.connection =
userName: ''
password: ''
server: ''
###
Initialize Tedious connection pool.
@deprecated
###
module.exports.init = ->
module.exports.connect
user: module.exports.connection.userName
password: PI:PASSWORD:<PASSWORD>END_PI
server: module.exports.connection.server
options: module.exports.connection.options
driver: 'tedious'
pool: module.exports.pool |
[
{
"context": "on,development,web standards\n\t\t\t\"\"\"\n\t\tauthors:\n\t\t\tbrianleroux:\n\t\t\t\tname: \"Brian LeRoux\"\n\t\t\t\ttwitter: \"brianlero",
"end": 409,
"score": 0.8377548456192017,
"start": 398,
"tag": "USERNAME",
"value": "brianleroux"
},
{
"context": "ards\n\t\t\t\"\... | docpad.coffee | topcoat/topcoat.github.com | 31 | # DocPad Configuration File
# http://docpad.org/docs/config
markdown = require( "markdown" ).markdown;
# Define the DocPad Configuration
docpadConfig = {
templateData: #default data
site:
url: "http://topcoat.io"
title: "Topcoat"
description: """
CSS for clean and fast web apps.
"""
keywords: """
css,opensource,application,development,web standards
"""
authors:
brianleroux:
name: "Brian LeRoux"
twitter: "brianleroux"
url: "http://brian.io/"
dam:
name: "Kristofer Joseph"
twitter: "dam"
url: "http://kristoferjoseph.com/"
garthdb:
name: "Garth Braithwaite"
twitter: "garthdb"
url: "http://www.garthdb.com/"
nnnnic:
name: "Nic Couillard"
twitter: "nnnnic"
url: "http://nnnnic.com/"
g1sh:
name: "Guillermo Torres"
twitter: "g1sh"
url: "http://www.g1sh.com/"
ndreio:
name: "Andrei Oprea"
twitter: "ndreio"
url: "http://github.com/piatra"
nodoherty:
name: "Niall O'Doherty"
twitter: "nodoherty"
url: "http://github.com/nodoherty"
formatDate: (postdate) ->
monthsArray = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
curr_date = postdate.getDate()
curr_month = monthsArray[postdate.getMonth()]
curr_year = postdate.getFullYear()
return (curr_date + " " + curr_month + " " + curr_year)
formatAuthor: (author) ->
authorName =''
authorUrl = ''
if (typeof author == 'object')
authorName = author.name
authorUrl = author.url
else
authorName = @authors[author].name
authorUrl = @authors[author].url
return "<a href='#{authorUrl}'>#{authorName}</a>"
markdowned: (copy) ->
return markdown.toHTML(copy)
collections:
pages: ->
@getCollection("html").findAll({isPage:true})
posts: ->
@getCollection('documents').findAllLive({isPost:true},[date:-1])
plugins:
cleanurls:
trailingSlashes: true
environments:
static:
outPath: ''
}
# Export the DocPad Configuration
module.exports = docpadConfig | 166069 | # DocPad Configuration File
# http://docpad.org/docs/config
markdown = require( "markdown" ).markdown;
# Define the DocPad Configuration
docpadConfig = {
templateData: #default data
site:
url: "http://topcoat.io"
title: "Topcoat"
description: """
CSS for clean and fast web apps.
"""
keywords: """
css,opensource,application,development,web standards
"""
authors:
brianleroux:
name: "<NAME>"
twitter: "brianleroux"
url: "http://brian.io/"
dam:
name: "<NAME>"
twitter: "dam"
url: "http://kristoferjoseph.com/"
garthdb:
name: "<NAME>"
twitter: "garthdb"
url: "http://www.garthdb.com/"
nnnnic:
name: "<NAME>"
twitter: "nnnnic"
url: "http://nnnnic.com/"
g1sh:
name: "<NAME>"
twitter: "g1sh"
url: "http://www.g1sh.com/"
ndreio:
name: "<NAME>"
twitter: "ndreio"
url: "http://github.com/piatra"
nodoherty:
name: "<NAME>"
twitter: "nodoherty"
url: "http://github.com/nodoherty"
formatDate: (postdate) ->
monthsArray = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
curr_date = postdate.getDate()
curr_month = monthsArray[postdate.getMonth()]
curr_year = postdate.getFullYear()
return (curr_date + " " + curr_month + " " + curr_year)
formatAuthor: (author) ->
authorName =''
authorUrl = ''
if (typeof author == 'object')
authorName = author.name
authorUrl = author.url
else
authorName = @authors[author].name
authorUrl = @authors[author].url
return "<a href='#{authorUrl}'>#{authorName}</a>"
markdowned: (copy) ->
return markdown.toHTML(copy)
collections:
pages: ->
@getCollection("html").findAll({isPage:true})
posts: ->
@getCollection('documents').findAllLive({isPost:true},[date:-1])
plugins:
cleanurls:
trailingSlashes: true
environments:
static:
outPath: ''
}
# Export the DocPad Configuration
module.exports = docpadConfig | true | # DocPad Configuration File
# http://docpad.org/docs/config
markdown = require( "markdown" ).markdown;
# Define the DocPad Configuration
docpadConfig = {
templateData: #default data
site:
url: "http://topcoat.io"
title: "Topcoat"
description: """
CSS for clean and fast web apps.
"""
keywords: """
css,opensource,application,development,web standards
"""
authors:
brianleroux:
name: "PI:NAME:<NAME>END_PI"
twitter: "brianleroux"
url: "http://brian.io/"
dam:
name: "PI:NAME:<NAME>END_PI"
twitter: "dam"
url: "http://kristoferjoseph.com/"
garthdb:
name: "PI:NAME:<NAME>END_PI"
twitter: "garthdb"
url: "http://www.garthdb.com/"
nnnnic:
name: "PI:NAME:<NAME>END_PI"
twitter: "nnnnic"
url: "http://nnnnic.com/"
g1sh:
name: "PI:NAME:<NAME>END_PI"
twitter: "g1sh"
url: "http://www.g1sh.com/"
ndreio:
name: "PI:NAME:<NAME>END_PI"
twitter: "ndreio"
url: "http://github.com/piatra"
nodoherty:
name: "PI:NAME:<NAME>END_PI"
twitter: "nodoherty"
url: "http://github.com/nodoherty"
formatDate: (postdate) ->
monthsArray = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
curr_date = postdate.getDate()
curr_month = monthsArray[postdate.getMonth()]
curr_year = postdate.getFullYear()
return (curr_date + " " + curr_month + " " + curr_year)
formatAuthor: (author) ->
authorName =''
authorUrl = ''
if (typeof author == 'object')
authorName = author.name
authorUrl = author.url
else
authorName = @authors[author].name
authorUrl = @authors[author].url
return "<a href='#{authorUrl}'>#{authorName}</a>"
markdowned: (copy) ->
return markdown.toHTML(copy)
collections:
pages: ->
@getCollection("html").findAll({isPage:true})
posts: ->
@getCollection('documents').findAllLive({isPost:true},[date:-1])
plugins:
cleanurls:
trailingSlashes: true
environments:
static:
outPath: ''
}
# Export the DocPad Configuration
module.exports = docpadConfig |
[
{
"context": "n Example:\n# data = [\n# {\n# \"name\" : \"Juan\"\n# },\n# {\n# \"name\" : \"Diego\"\n# ",
"end": 385,
"score": 0.9994654655456543,
"start": 381,
"tag": "NAME",
"value": "Juan"
},
{
"context": "me\" : \"Juan\"\n# },\n# {\n# ... | assets/coffee/appHome/directives/_directive_boxSearch.coffee | ajenjo/backend-definition | 0 | # appHome :: directives/_directive_boxSearch.coffee
#
# Permite obtener valores que requieren de multiples valores.
#
# Example:
# <div form-repeat-element="data">
# <div form-repeat-element-singular>
# Nombre
# <input form-repeat-element-value name="name" type="text" value="">
# </div>
# </div>
#
# Return Example:
# data = [
# {
# "name" : "Juan"
# },
# {
# "name" : "Diego"
# }
# ]
#
# Esta diractiva es capas de manejar los datos con los que se alimanta
directive 'formRepeatElement', () ->
restrict: 'EA'
# replace: false
scope:
fnNew: "="
___min: "=minInit"
___data: "=ngModel"
require: []
link: ($scope, $element, $attrs) ->
controller: [
'$scope'
'$attrs'
($scope, $attrs) ->
___addToData = -> $scope.___data.push {}
$scope.fnNew = ->
do ___addToData
# console.log
if $scope.___data is undefined
$scope.___data = []
# Si el numero de reviciones es mayor a la cantidad de datos, este crese.
$scope.$watch '___min', (newData, oldData) ->
nRepeatAddToData = (($scope.___min || 1) + 1) - ($scope.___data.length)
if nRepeatAddToData > 1
while nRepeatAddToData -= 1
do ___addToData
]
template: (element, attr) ->
return "<span class=\"form-single-element\" ng-repeat=\"#{attr.nameData||'$$data'} in ___data\">#{element.html()}</span>"
directive 'formSingleElement', ->
restrict: 'C'
scope: false
controller: [
'$scope'
($scope) ->
$$isMidlle = ->
if not do $$isFotter and not do $$isHeader
true
else
false
$$isFotter = ->
($scope.$parent.___data.length - 1) is $scope.$index
$$isHeader = ->
$scope.$index == 0
$scope.$$isFotter = $$isFotter
$scope.$$isHeader = $$isHeader
$scope.$$isMidlle = $$isMidlle
]
directive 'formRemoveElement', ->
restrict: 'EA'
scope: false
controller: [
"$scope"
($scope) ->
$scope.$$remove = ->
$scope.$parent.$parent.___data.splice($scope.$index, 1)
# console.log "Remove element #{}",
]
| 18551 | # appHome :: directives/_directive_boxSearch.coffee
#
# Permite obtener valores que requieren de multiples valores.
#
# Example:
# <div form-repeat-element="data">
# <div form-repeat-element-singular>
# Nombre
# <input form-repeat-element-value name="name" type="text" value="">
# </div>
# </div>
#
# Return Example:
# data = [
# {
# "name" : "<NAME>"
# },
# {
# "name" : "<NAME>"
# }
# ]
#
# Esta diractiva es capas de manejar los datos con los que se alimanta
directive 'formRepeatElement', () ->
restrict: 'EA'
# replace: false
scope:
fnNew: "="
___min: "=minInit"
___data: "=ngModel"
require: []
link: ($scope, $element, $attrs) ->
controller: [
'$scope'
'$attrs'
($scope, $attrs) ->
___addToData = -> $scope.___data.push {}
$scope.fnNew = ->
do ___addToData
# console.log
if $scope.___data is undefined
$scope.___data = []
# Si el numero de reviciones es mayor a la cantidad de datos, este crese.
$scope.$watch '___min', (newData, oldData) ->
nRepeatAddToData = (($scope.___min || 1) + 1) - ($scope.___data.length)
if nRepeatAddToData > 1
while nRepeatAddToData -= 1
do ___addToData
]
template: (element, attr) ->
return "<span class=\"form-single-element\" ng-repeat=\"#{attr.nameData||'$$data'} in ___data\">#{element.html()}</span>"
directive 'formSingleElement', ->
restrict: 'C'
scope: false
controller: [
'$scope'
($scope) ->
$$isMidlle = ->
if not do $$isFotter and not do $$isHeader
true
else
false
$$isFotter = ->
($scope.$parent.___data.length - 1) is $scope.$index
$$isHeader = ->
$scope.$index == 0
$scope.$$isFotter = $$isFotter
$scope.$$isHeader = $$isHeader
$scope.$$isMidlle = $$isMidlle
]
directive 'formRemoveElement', ->
restrict: 'EA'
scope: false
controller: [
"$scope"
($scope) ->
$scope.$$remove = ->
$scope.$parent.$parent.___data.splice($scope.$index, 1)
# console.log "Remove element #{}",
]
| true | # appHome :: directives/_directive_boxSearch.coffee
#
# Permite obtener valores que requieren de multiples valores.
#
# Example:
# <div form-repeat-element="data">
# <div form-repeat-element-singular>
# Nombre
# <input form-repeat-element-value name="name" type="text" value="">
# </div>
# </div>
#
# Return Example:
# data = [
# {
# "name" : "PI:NAME:<NAME>END_PI"
# },
# {
# "name" : "PI:NAME:<NAME>END_PI"
# }
# ]
#
# Esta diractiva es capas de manejar los datos con los que se alimanta
directive 'formRepeatElement', () ->
restrict: 'EA'
# replace: false
scope:
fnNew: "="
___min: "=minInit"
___data: "=ngModel"
require: []
link: ($scope, $element, $attrs) ->
controller: [
'$scope'
'$attrs'
($scope, $attrs) ->
___addToData = -> $scope.___data.push {}
$scope.fnNew = ->
do ___addToData
# console.log
if $scope.___data is undefined
$scope.___data = []
# Si el numero de reviciones es mayor a la cantidad de datos, este crese.
$scope.$watch '___min', (newData, oldData) ->
nRepeatAddToData = (($scope.___min || 1) + 1) - ($scope.___data.length)
if nRepeatAddToData > 1
while nRepeatAddToData -= 1
do ___addToData
]
template: (element, attr) ->
return "<span class=\"form-single-element\" ng-repeat=\"#{attr.nameData||'$$data'} in ___data\">#{element.html()}</span>"
directive 'formSingleElement', ->
restrict: 'C'
scope: false
controller: [
'$scope'
($scope) ->
$$isMidlle = ->
if not do $$isFotter and not do $$isHeader
true
else
false
$$isFotter = ->
($scope.$parent.___data.length - 1) is $scope.$index
$$isHeader = ->
$scope.$index == 0
$scope.$$isFotter = $$isFotter
$scope.$$isHeader = $$isHeader
$scope.$$isMidlle = $$isMidlle
]
directive 'formRemoveElement', ->
restrict: 'EA'
scope: false
controller: [
"$scope"
($scope) ->
$scope.$$remove = ->
$scope.$parent.$parent.___data.splice($scope.$index, 1)
# console.log "Remove element #{}",
]
|
[
{
"context": "xtends LightningElement {\\n @track greeting = 'World';\\n}\"]\n params.metaLines = [\n \"<apiVe",
"end": 2888,
"score": 0.8645389080047607,
"start": 2883,
"tag": "NAME",
"value": "World"
}
] | lib/utils.coffee | jonathanrico/forcedotcom-builder | 4 | child_process = require 'child_process'
fs = require 'fs'
module.exports =
isWin: () ->
/^win/.test process.platform
getPlatformPath: (str) ->
if @isWin() then str.replace /\//g, '\\' else str
getSrcPath: (root) ->
root + @getPlatformPath('/src/')
#-----------
getSfCreatingItemParams: (sfCreatingDialog, root) ->
params = {}
params.srcPath = @getSrcPath(root)
if sfCreatingDialog.itemType == "Class"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "cls"
params.fldPath = params.srcPath + @getPlatformPath('classes/')
params.srcLines = ["public with sharing class " + sfCreatingDialog.apiName + " {\n \n}"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<status>Active</status>"
]
else if sfCreatingDialog.itemType == "Trigger"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "trigger"
params.fldPath = params.srcPath + @getPlatformPath('triggers/')
params.srcLines = [
"trigger " + sfCreatingDialog.apiName + " on SObject_Api_Name (before update) {"
" if (Trigger.isBefore) {"
" if (Trigger.isUpdate) {"
" "
" }"
" }"
"}"
]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<status>Active</status>"
]
else if sfCreatingDialog.itemType == "Page"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "page"
params.fldPath = params.srcPath + @getPlatformPath('pages/')
params.srcLines = ["<apex:page>\n <h1>This is new page!</h1>\n</apex:page>"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<availableInTouch>false</availableInTouch>"
"<confirmationTokenRequired>false</confirmationTokenRequired>"
"<label>" + sfCreatingDialog.label + "</label>"
]
else if sfCreatingDialog.itemType == "Component"
params.extension = "component"
params.fldPath = params.srcPath + @getPlatformPath('components/')
params.srcLines = ["<apex:component>\n <h1>This is new component!</h1>\n</apex:component>"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<label>" + sfCreatingDialog.label + "</label>"
]
else if sfCreatingDialog.itemType == "LightningComponentBundle"
params.metaDataType = sfCreatingDialog.itemType
params.extension = "js"
params.fldPath = params.srcPath + @getPlatformPath('lwc/' + sfCreatingDialog.apiName + '/')
params.srcLines = ["import { LightningElement, track } from 'lwc';\n\nexport default class Hello extends LightningElement {\n @track greeting = 'World';\n}"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<isExposed>true</isExposed>"
"<targets>"
" <target>lightning__AppPage</target>"
" <target>lightning__RecordPage</target>"
" <target>lightning__HomePage</target>"
"</targets>"
]
params.extraFiles = [
{
srcFilePath: params.srcPath + @getPlatformPath('lwc/' + sfCreatingDialog.apiName + '/') + sfCreatingDialog.apiName + ".html"
srcLines: ["<template>\n <lightning-card title=\"Hello\" icon-name=\"custom:custom14\">\n <div class=\"slds-m-around_medium\">Hello, {greeting}!</div>\n </lightning-card>\n</template>"]
}
]
params.srcFilePath = params.fldPath + sfCreatingDialog.apiName + "." + params.extension
params.metaFilePath = params.srcFilePath + "-meta.xml"
params
#-----------
writeFile: (filePath, lines, dirName) ->
if dirName && !fs.existsSync(dirName)
fs.mkdirSync(dirName)
fh = fs.createWriteStream filePath
for line in lines
fh.write(line + "\n")
fh.end("")
writeMeta: (itemParams) ->
if itemParams.metaLines
lines = []
lines.push "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<" + itemParams.metaDataType + " xmlns=\"http://soap.sforce.com/2006/04/metadata\">"
for metaLine in itemParams.metaLines
lines.push " " + metaLine
lines.push "</" + itemParams.metaDataType + ">"
@writeFile itemParams.metaFilePath, lines, itemParams.fldPath
itemParams.metaFilePath
writeSrc: (itemParams) ->
@writeFile itemParams.srcFilePath, itemParams.srcLines, itemParams.fldPath
if itemParams.extraFiles
for extraItemParams in itemParams.extraFiles
@writeFile extraItemParams.srcFilePath, extraItemParams.srcLines, extraItemParams.fldPath
itemParams.srcFilePath
#-----------
writeBeforeLastOccurance: () ->
[].push.call(arguments, "before")
@writePerformLastOccurence.apply null, arguments
writeAfterLastOccurance: () ->
[].push.call(arguments, "after")
@writePerformLastOccurence.apply null, arguments
writePerformLastOccurence: (path, findText, newText, successCallback, errorCallback, place) ->
if fs.existsSync path
fileData = fs.readFileSync path, 'utf8'
indexOcc = fileData.lastIndexOf findText
if indexOcc != -1
if place == "after"
indexOcc += findText.length
fileData = fileData.substr(0, indexOcc) + newText + fileData.substr(indexOcc)
fs.writeFileSync path, fileData, 'utf8'
if successCallback
successCallback()
else
if errorCallback
errorCallback()
#-----------
createSfItem: (sfCreatingDialog, root) ->
itemParams = @getSfCreatingItemParams(sfCreatingDialog, root)
@writeMeta itemParams
@writeSrc itemParams
#-----------
deleteFolderRecursive: (path) ->
if fs.existsSync(path)
fs.readdirSync(path).forEach (file, index) =>
curPath = path + "/" + file
if fs.lstatSync(curPath).isDirectory()
@deleteFolderRecursive(curPath);
else
fs.unlinkSync(curPath);
fs.rmdirSync(path);
#-----------
runProcess: (child, view, command, args, beforeCommand, afterCommand, onclose) ->
if beforeCommand
beforeCommand()
child = child_process.exec(command, args)
child.stdout.on 'data', view.append
child.stderr.on 'data', view.append
child.on "close", onclose
if afterCommand
afterCommand()
#-----------
getMetaDataFromFolderName: (folderName) ->
folderMapping = {
'classes' : 'ApexClass'
,'triggers' : 'ApexTrigger'
,'pages' : 'ApexPage'
,'components' : 'ApexComponent'
,'staticresources' : 'StaticResource'
,'applications' : 'CustomApplication'
,'objects' : 'CustomObject'
,'tabs' : 'CustomTab'
,'layouts' : 'Layout'
,'quickActions' : 'QuickAction'
,'profiles' : 'Profile'
,'labels' : 'CustomLabels'
,'workflows' : 'Workflow'
,'remoteSiteSettings' : 'RemoteSiteSetting'
,'permissionsets' : 'PermissionSet'
,'letterhead' : 'Letterhead'
,'translations' : 'Translations'
,'groups' : 'Group'
,'objectTranslations' : 'CustomObjectTranslation'
,'communities' : 'Network'
,'reportTypes' : 'ReportType'
,'settings' : 'Settings'
,'assignmentRules' : 'AssignmentRule'
,'approvalProcesses' : 'ApprovalProcess'
,'escalationRules' : 'EscalationRule'
,'flows' : 'Flow'
,'aura' : 'AuraDefinitionBundle'
,'lwc' : 'LightningComponentBundle'
,'documents' : 'Document'
,'email' : 'EmailTemplate'
,'contentassets' : 'ContentAsset'
,'globalValueSets' : 'GlobalValueSet'
,'mlDomains' : 'MlDomain'
,'bots' : 'Bot'
,'messageChannels' : 'LightningMessageChannel'
}
result = null
if folderMapping.hasOwnProperty folderName
result = folderMapping[folderName]
result
#-------------
getLabelMeta: (cl) ->
[
' <labels>'
' <fullName>' + cl.apiName + '</fullName>'
' <categories>' + cl.categories + '</categories>'
' <language>' + cl.language + '</language>'
' <protected>true</protected>'
' <shortDescription>' + cl.shortDesc + '</shortDescription>'
' <value>' + cl.label + '</value>'
' </labels>\n'
].join('\n')
getLabelTranslationMeta: (cl) ->
[
' <customLabels>'
' <label><!-- ' + cl.label + ' --></label>'
' <name>' + cl.apiName + '</name>'
' </customLabels>\n'
].join('\n')
insertLabelSelection: (cl, editor) ->
if editor
newText = null;
grammarName = editor.getGrammar().name
if grammarName == "Apex"
newText = 'Label.' + cl.apiName
else if grammarName == "Visualforce"
newText = '{!$Label.' + cl.apiName + '}'
if newText != null
editor.getLastSelection().insertText(newText, {"select" : true})
insertCustomLabel: (cl, root, editor) ->
labelsPath = @getPlatformPath root + '/src/labels/CustomLabels.labels'
utils = this
if fs.existsSync labelsPath
@writeBeforeLastOccurance(labelsPath, '</CustomLabels>', @getLabelMeta(cl), () =>
utils.insertLabelSelection cl, editor
,null)
#Translations
translationsPath = @getPlatformPath root + '/src/translations/';
if fs.existsSync translationsPath
fs.readdir translationsPath, (err, items) ->
for i in items
if /^.+\.translation$/.test(i)
tPath = utils.getPlatformPath translationsPath + i
utils.writeAfterLastOccurance(tPath, '</customLabels>\n', utils.getLabelTranslationMeta(cl), null, () =>
utils.writeBeforeLastOccurance(tPath, '</Translations>', utils.getLabelTranslationMeta(cl), null, null)
)
fs.existsSync labelsPath
| 127803 | child_process = require 'child_process'
fs = require 'fs'
module.exports =
isWin: () ->
/^win/.test process.platform
getPlatformPath: (str) ->
if @isWin() then str.replace /\//g, '\\' else str
getSrcPath: (root) ->
root + @getPlatformPath('/src/')
#-----------
getSfCreatingItemParams: (sfCreatingDialog, root) ->
params = {}
params.srcPath = @getSrcPath(root)
if sfCreatingDialog.itemType == "Class"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "cls"
params.fldPath = params.srcPath + @getPlatformPath('classes/')
params.srcLines = ["public with sharing class " + sfCreatingDialog.apiName + " {\n \n}"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<status>Active</status>"
]
else if sfCreatingDialog.itemType == "Trigger"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "trigger"
params.fldPath = params.srcPath + @getPlatformPath('triggers/')
params.srcLines = [
"trigger " + sfCreatingDialog.apiName + " on SObject_Api_Name (before update) {"
" if (Trigger.isBefore) {"
" if (Trigger.isUpdate) {"
" "
" }"
" }"
"}"
]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<status>Active</status>"
]
else if sfCreatingDialog.itemType == "Page"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "page"
params.fldPath = params.srcPath + @getPlatformPath('pages/')
params.srcLines = ["<apex:page>\n <h1>This is new page!</h1>\n</apex:page>"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<availableInTouch>false</availableInTouch>"
"<confirmationTokenRequired>false</confirmationTokenRequired>"
"<label>" + sfCreatingDialog.label + "</label>"
]
else if sfCreatingDialog.itemType == "Component"
params.extension = "component"
params.fldPath = params.srcPath + @getPlatformPath('components/')
params.srcLines = ["<apex:component>\n <h1>This is new component!</h1>\n</apex:component>"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<label>" + sfCreatingDialog.label + "</label>"
]
else if sfCreatingDialog.itemType == "LightningComponentBundle"
params.metaDataType = sfCreatingDialog.itemType
params.extension = "js"
params.fldPath = params.srcPath + @getPlatformPath('lwc/' + sfCreatingDialog.apiName + '/')
params.srcLines = ["import { LightningElement, track } from 'lwc';\n\nexport default class Hello extends LightningElement {\n @track greeting = '<NAME>';\n}"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<isExposed>true</isExposed>"
"<targets>"
" <target>lightning__AppPage</target>"
" <target>lightning__RecordPage</target>"
" <target>lightning__HomePage</target>"
"</targets>"
]
params.extraFiles = [
{
srcFilePath: params.srcPath + @getPlatformPath('lwc/' + sfCreatingDialog.apiName + '/') + sfCreatingDialog.apiName + ".html"
srcLines: ["<template>\n <lightning-card title=\"Hello\" icon-name=\"custom:custom14\">\n <div class=\"slds-m-around_medium\">Hello, {greeting}!</div>\n </lightning-card>\n</template>"]
}
]
params.srcFilePath = params.fldPath + sfCreatingDialog.apiName + "." + params.extension
params.metaFilePath = params.srcFilePath + "-meta.xml"
params
#-----------
writeFile: (filePath, lines, dirName) ->
if dirName && !fs.existsSync(dirName)
fs.mkdirSync(dirName)
fh = fs.createWriteStream filePath
for line in lines
fh.write(line + "\n")
fh.end("")
writeMeta: (itemParams) ->
if itemParams.metaLines
lines = []
lines.push "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<" + itemParams.metaDataType + " xmlns=\"http://soap.sforce.com/2006/04/metadata\">"
for metaLine in itemParams.metaLines
lines.push " " + metaLine
lines.push "</" + itemParams.metaDataType + ">"
@writeFile itemParams.metaFilePath, lines, itemParams.fldPath
itemParams.metaFilePath
writeSrc: (itemParams) ->
@writeFile itemParams.srcFilePath, itemParams.srcLines, itemParams.fldPath
if itemParams.extraFiles
for extraItemParams in itemParams.extraFiles
@writeFile extraItemParams.srcFilePath, extraItemParams.srcLines, extraItemParams.fldPath
itemParams.srcFilePath
#-----------
writeBeforeLastOccurance: () ->
[].push.call(arguments, "before")
@writePerformLastOccurence.apply null, arguments
writeAfterLastOccurance: () ->
[].push.call(arguments, "after")
@writePerformLastOccurence.apply null, arguments
writePerformLastOccurence: (path, findText, newText, successCallback, errorCallback, place) ->
if fs.existsSync path
fileData = fs.readFileSync path, 'utf8'
indexOcc = fileData.lastIndexOf findText
if indexOcc != -1
if place == "after"
indexOcc += findText.length
fileData = fileData.substr(0, indexOcc) + newText + fileData.substr(indexOcc)
fs.writeFileSync path, fileData, 'utf8'
if successCallback
successCallback()
else
if errorCallback
errorCallback()
#-----------
createSfItem: (sfCreatingDialog, root) ->
itemParams = @getSfCreatingItemParams(sfCreatingDialog, root)
@writeMeta itemParams
@writeSrc itemParams
#-----------
deleteFolderRecursive: (path) ->
if fs.existsSync(path)
fs.readdirSync(path).forEach (file, index) =>
curPath = path + "/" + file
if fs.lstatSync(curPath).isDirectory()
@deleteFolderRecursive(curPath);
else
fs.unlinkSync(curPath);
fs.rmdirSync(path);
#-----------
runProcess: (child, view, command, args, beforeCommand, afterCommand, onclose) ->
if beforeCommand
beforeCommand()
child = child_process.exec(command, args)
child.stdout.on 'data', view.append
child.stderr.on 'data', view.append
child.on "close", onclose
if afterCommand
afterCommand()
#-----------
getMetaDataFromFolderName: (folderName) ->
folderMapping = {
'classes' : 'ApexClass'
,'triggers' : 'ApexTrigger'
,'pages' : 'ApexPage'
,'components' : 'ApexComponent'
,'staticresources' : 'StaticResource'
,'applications' : 'CustomApplication'
,'objects' : 'CustomObject'
,'tabs' : 'CustomTab'
,'layouts' : 'Layout'
,'quickActions' : 'QuickAction'
,'profiles' : 'Profile'
,'labels' : 'CustomLabels'
,'workflows' : 'Workflow'
,'remoteSiteSettings' : 'RemoteSiteSetting'
,'permissionsets' : 'PermissionSet'
,'letterhead' : 'Letterhead'
,'translations' : 'Translations'
,'groups' : 'Group'
,'objectTranslations' : 'CustomObjectTranslation'
,'communities' : 'Network'
,'reportTypes' : 'ReportType'
,'settings' : 'Settings'
,'assignmentRules' : 'AssignmentRule'
,'approvalProcesses' : 'ApprovalProcess'
,'escalationRules' : 'EscalationRule'
,'flows' : 'Flow'
,'aura' : 'AuraDefinitionBundle'
,'lwc' : 'LightningComponentBundle'
,'documents' : 'Document'
,'email' : 'EmailTemplate'
,'contentassets' : 'ContentAsset'
,'globalValueSets' : 'GlobalValueSet'
,'mlDomains' : 'MlDomain'
,'bots' : 'Bot'
,'messageChannels' : 'LightningMessageChannel'
}
result = null
if folderMapping.hasOwnProperty folderName
result = folderMapping[folderName]
result
#-------------
getLabelMeta: (cl) ->
[
' <labels>'
' <fullName>' + cl.apiName + '</fullName>'
' <categories>' + cl.categories + '</categories>'
' <language>' + cl.language + '</language>'
' <protected>true</protected>'
' <shortDescription>' + cl.shortDesc + '</shortDescription>'
' <value>' + cl.label + '</value>'
' </labels>\n'
].join('\n')
getLabelTranslationMeta: (cl) ->
[
' <customLabels>'
' <label><!-- ' + cl.label + ' --></label>'
' <name>' + cl.apiName + '</name>'
' </customLabels>\n'
].join('\n')
insertLabelSelection: (cl, editor) ->
if editor
newText = null;
grammarName = editor.getGrammar().name
if grammarName == "Apex"
newText = 'Label.' + cl.apiName
else if grammarName == "Visualforce"
newText = '{!$Label.' + cl.apiName + '}'
if newText != null
editor.getLastSelection().insertText(newText, {"select" : true})
insertCustomLabel: (cl, root, editor) ->
labelsPath = @getPlatformPath root + '/src/labels/CustomLabels.labels'
utils = this
if fs.existsSync labelsPath
@writeBeforeLastOccurance(labelsPath, '</CustomLabels>', @getLabelMeta(cl), () =>
utils.insertLabelSelection cl, editor
,null)
#Translations
translationsPath = @getPlatformPath root + '/src/translations/';
if fs.existsSync translationsPath
fs.readdir translationsPath, (err, items) ->
for i in items
if /^.+\.translation$/.test(i)
tPath = utils.getPlatformPath translationsPath + i
utils.writeAfterLastOccurance(tPath, '</customLabels>\n', utils.getLabelTranslationMeta(cl), null, () =>
utils.writeBeforeLastOccurance(tPath, '</Translations>', utils.getLabelTranslationMeta(cl), null, null)
)
fs.existsSync labelsPath
| true | child_process = require 'child_process'
fs = require 'fs'
module.exports =
isWin: () ->
/^win/.test process.platform
getPlatformPath: (str) ->
if @isWin() then str.replace /\//g, '\\' else str
getSrcPath: (root) ->
root + @getPlatformPath('/src/')
#-----------
getSfCreatingItemParams: (sfCreatingDialog, root) ->
params = {}
params.srcPath = @getSrcPath(root)
if sfCreatingDialog.itemType == "Class"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "cls"
params.fldPath = params.srcPath + @getPlatformPath('classes/')
params.srcLines = ["public with sharing class " + sfCreatingDialog.apiName + " {\n \n}"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<status>Active</status>"
]
else if sfCreatingDialog.itemType == "Trigger"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "trigger"
params.fldPath = params.srcPath + @getPlatformPath('triggers/')
params.srcLines = [
"trigger " + sfCreatingDialog.apiName + " on SObject_Api_Name (before update) {"
" if (Trigger.isBefore) {"
" if (Trigger.isUpdate) {"
" "
" }"
" }"
"}"
]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<status>Active</status>"
]
else if sfCreatingDialog.itemType == "Page"
params.metaDataType = "Apex" + sfCreatingDialog.itemType
params.extension = "page"
params.fldPath = params.srcPath + @getPlatformPath('pages/')
params.srcLines = ["<apex:page>\n <h1>This is new page!</h1>\n</apex:page>"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<availableInTouch>false</availableInTouch>"
"<confirmationTokenRequired>false</confirmationTokenRequired>"
"<label>" + sfCreatingDialog.label + "</label>"
]
else if sfCreatingDialog.itemType == "Component"
params.extension = "component"
params.fldPath = params.srcPath + @getPlatformPath('components/')
params.srcLines = ["<apex:component>\n <h1>This is new component!</h1>\n</apex:component>"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<label>" + sfCreatingDialog.label + "</label>"
]
else if sfCreatingDialog.itemType == "LightningComponentBundle"
params.metaDataType = sfCreatingDialog.itemType
params.extension = "js"
params.fldPath = params.srcPath + @getPlatformPath('lwc/' + sfCreatingDialog.apiName + '/')
params.srcLines = ["import { LightningElement, track } from 'lwc';\n\nexport default class Hello extends LightningElement {\n @track greeting = 'PI:NAME:<NAME>END_PI';\n}"]
params.metaLines = [
"<apiVersion>" + sfCreatingDialog.apiVersion + "</apiVersion>"
"<isExposed>true</isExposed>"
"<targets>"
" <target>lightning__AppPage</target>"
" <target>lightning__RecordPage</target>"
" <target>lightning__HomePage</target>"
"</targets>"
]
params.extraFiles = [
{
srcFilePath: params.srcPath + @getPlatformPath('lwc/' + sfCreatingDialog.apiName + '/') + sfCreatingDialog.apiName + ".html"
srcLines: ["<template>\n <lightning-card title=\"Hello\" icon-name=\"custom:custom14\">\n <div class=\"slds-m-around_medium\">Hello, {greeting}!</div>\n </lightning-card>\n</template>"]
}
]
params.srcFilePath = params.fldPath + sfCreatingDialog.apiName + "." + params.extension
params.metaFilePath = params.srcFilePath + "-meta.xml"
params
#-----------
writeFile: (filePath, lines, dirName) ->
if dirName && !fs.existsSync(dirName)
fs.mkdirSync(dirName)
fh = fs.createWriteStream filePath
for line in lines
fh.write(line + "\n")
fh.end("")
writeMeta: (itemParams) ->
if itemParams.metaLines
lines = []
lines.push "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<" + itemParams.metaDataType + " xmlns=\"http://soap.sforce.com/2006/04/metadata\">"
for metaLine in itemParams.metaLines
lines.push " " + metaLine
lines.push "</" + itemParams.metaDataType + ">"
@writeFile itemParams.metaFilePath, lines, itemParams.fldPath
itemParams.metaFilePath
writeSrc: (itemParams) ->
@writeFile itemParams.srcFilePath, itemParams.srcLines, itemParams.fldPath
if itemParams.extraFiles
for extraItemParams in itemParams.extraFiles
@writeFile extraItemParams.srcFilePath, extraItemParams.srcLines, extraItemParams.fldPath
itemParams.srcFilePath
#-----------
writeBeforeLastOccurance: () ->
[].push.call(arguments, "before")
@writePerformLastOccurence.apply null, arguments
writeAfterLastOccurance: () ->
[].push.call(arguments, "after")
@writePerformLastOccurence.apply null, arguments
writePerformLastOccurence: (path, findText, newText, successCallback, errorCallback, place) ->
if fs.existsSync path
fileData = fs.readFileSync path, 'utf8'
indexOcc = fileData.lastIndexOf findText
if indexOcc != -1
if place == "after"
indexOcc += findText.length
fileData = fileData.substr(0, indexOcc) + newText + fileData.substr(indexOcc)
fs.writeFileSync path, fileData, 'utf8'
if successCallback
successCallback()
else
if errorCallback
errorCallback()
#-----------
createSfItem: (sfCreatingDialog, root) ->
itemParams = @getSfCreatingItemParams(sfCreatingDialog, root)
@writeMeta itemParams
@writeSrc itemParams
#-----------
deleteFolderRecursive: (path) ->
if fs.existsSync(path)
fs.readdirSync(path).forEach (file, index) =>
curPath = path + "/" + file
if fs.lstatSync(curPath).isDirectory()
@deleteFolderRecursive(curPath);
else
fs.unlinkSync(curPath);
fs.rmdirSync(path);
#-----------
runProcess: (child, view, command, args, beforeCommand, afterCommand, onclose) ->
if beforeCommand
beforeCommand()
child = child_process.exec(command, args)
child.stdout.on 'data', view.append
child.stderr.on 'data', view.append
child.on "close", onclose
if afterCommand
afterCommand()
#-----------
getMetaDataFromFolderName: (folderName) ->
folderMapping = {
'classes' : 'ApexClass'
,'triggers' : 'ApexTrigger'
,'pages' : 'ApexPage'
,'components' : 'ApexComponent'
,'staticresources' : 'StaticResource'
,'applications' : 'CustomApplication'
,'objects' : 'CustomObject'
,'tabs' : 'CustomTab'
,'layouts' : 'Layout'
,'quickActions' : 'QuickAction'
,'profiles' : 'Profile'
,'labels' : 'CustomLabels'
,'workflows' : 'Workflow'
,'remoteSiteSettings' : 'RemoteSiteSetting'
,'permissionsets' : 'PermissionSet'
,'letterhead' : 'Letterhead'
,'translations' : 'Translations'
,'groups' : 'Group'
,'objectTranslations' : 'CustomObjectTranslation'
,'communities' : 'Network'
,'reportTypes' : 'ReportType'
,'settings' : 'Settings'
,'assignmentRules' : 'AssignmentRule'
,'approvalProcesses' : 'ApprovalProcess'
,'escalationRules' : 'EscalationRule'
,'flows' : 'Flow'
,'aura' : 'AuraDefinitionBundle'
,'lwc' : 'LightningComponentBundle'
,'documents' : 'Document'
,'email' : 'EmailTemplate'
,'contentassets' : 'ContentAsset'
,'globalValueSets' : 'GlobalValueSet'
,'mlDomains' : 'MlDomain'
,'bots' : 'Bot'
,'messageChannels' : 'LightningMessageChannel'
}
result = null
if folderMapping.hasOwnProperty folderName
result = folderMapping[folderName]
result
#-------------
getLabelMeta: (cl) ->
[
' <labels>'
' <fullName>' + cl.apiName + '</fullName>'
' <categories>' + cl.categories + '</categories>'
' <language>' + cl.language + '</language>'
' <protected>true</protected>'
' <shortDescription>' + cl.shortDesc + '</shortDescription>'
' <value>' + cl.label + '</value>'
' </labels>\n'
].join('\n')
getLabelTranslationMeta: (cl) ->
[
' <customLabels>'
' <label><!-- ' + cl.label + ' --></label>'
' <name>' + cl.apiName + '</name>'
' </customLabels>\n'
].join('\n')
insertLabelSelection: (cl, editor) ->
if editor
newText = null;
grammarName = editor.getGrammar().name
if grammarName == "Apex"
newText = 'Label.' + cl.apiName
else if grammarName == "Visualforce"
newText = '{!$Label.' + cl.apiName + '}'
if newText != null
editor.getLastSelection().insertText(newText, {"select" : true})
insertCustomLabel: (cl, root, editor) ->
labelsPath = @getPlatformPath root + '/src/labels/CustomLabels.labels'
utils = this
if fs.existsSync labelsPath
@writeBeforeLastOccurance(labelsPath, '</CustomLabels>', @getLabelMeta(cl), () =>
utils.insertLabelSelection cl, editor
,null)
#Translations
translationsPath = @getPlatformPath root + '/src/translations/';
if fs.existsSync translationsPath
fs.readdir translationsPath, (err, items) ->
for i in items
if /^.+\.translation$/.test(i)
tPath = utils.getPlatformPath translationsPath + i
utils.writeAfterLastOccurance(tPath, '</customLabels>\n', utils.getLabelTranslationMeta(cl), null, () =>
utils.writeBeforeLastOccurance(tPath, '</Translations>', utils.getLabelTranslationMeta(cl), null, null)
)
fs.existsSync labelsPath
|
[
{
"context": "w ProjectsList()\n\n initSidebarTab: ->\n key = \"dashboard_sidebar_filter\"\n\n # store selection in cookie\n $('.dash-si",
"end": 137,
"score": 0.9735764265060425,
"start": 113,
"tag": "KEY",
"value": "dashboard_sidebar_filter"
}
] | app/assets/javascripts/dashboard.js.coffee | sPooKee/gitlabhq | 0 | class @Dashboard
constructor: ->
@initSidebarTab()
new ProjectsList()
initSidebarTab: ->
key = "dashboard_sidebar_filter"
# store selection in cookie
$('.dash-sidebar-tabs a').on 'click', (e) ->
$.cookie(key, $(e.target).attr('id'))
# show tab from cookie
sidebar_filter = $.cookie(key)
$("#" + sidebar_filter).tab('show') if sidebar_filter
| 173434 | class @Dashboard
constructor: ->
@initSidebarTab()
new ProjectsList()
initSidebarTab: ->
key = "<KEY>"
# store selection in cookie
$('.dash-sidebar-tabs a').on 'click', (e) ->
$.cookie(key, $(e.target).attr('id'))
# show tab from cookie
sidebar_filter = $.cookie(key)
$("#" + sidebar_filter).tab('show') if sidebar_filter
| true | class @Dashboard
constructor: ->
@initSidebarTab()
new ProjectsList()
initSidebarTab: ->
key = "PI:KEY:<KEY>END_PI"
# store selection in cookie
$('.dash-sidebar-tabs a').on 'click', (e) ->
$.cookie(key, $(e.target).attr('id'))
# show tab from cookie
sidebar_filter = $.cookie(key)
$("#" + sidebar_filter).tab('show') if sidebar_filter
|
[
{
"context": "tions are only at the top of a function.\n# @author Danny Fritz\n# @author Gyandeep Singh\n###\n'use strict'\n\n{isDec",
"end": 110,
"score": 0.9998470544815063,
"start": 99,
"tag": "NAME",
"value": "Danny Fritz"
},
{
"context": "top of a function.\n# @author Danny Frit... | src/rules/vars-on-top.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Rule to enforce var declarations are only at the top of a function.
# @author Danny Fritz
# @author Gyandeep Singh
###
'use strict'
{isDeclarationAssignment} = require '../util/ast-utils'
boundaryNodeRegex = /Function/
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'require `var` declarations be placed at the top of their containing scope'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/vars-on-top'
schema: []
create: (context) ->
errorMessage = 'All declarations must be at the top of the function scope.'
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
###*
# @param {ASTNode} node - any node
# @returns {boolean} whether the given node structurally represents a directive
###
looksLikeDirective = (node) ->
node.type is 'ExpressionStatement' and
node.expression.type is 'Literal' and
typeof node.expression.value is 'string'
###*
# Check to see if its a ES6 import declaration
# @param {ASTNode} node - any node
# @returns {boolean} whether the given node represents a import declaration
###
looksLikeImport = (node) ->
node.type in [
'ImportDeclaration'
'ImportSpecifier'
'ImportDefaultSpecifier'
'ImportNamespaceSpecifier'
]
###*
# Checks whether a given node is a variable declaration or not.
#
# @param {ASTNode} node - any node
# @returns {boolean} `true` if the node is a variable declaration.
###
isVariableDeclaration = (node) ->
(node.type is 'ExpressionStatement' and
isDeclarationAssignment(node.expression)) or
(node.type is 'ExportNamedDeclaration' and
isDeclarationAssignment node.declaration)
###*
# Checks whether this variable is on top of the block body
# @param {ASTNode} node - The node to check
# @param {ASTNode[]} statements - collection of ASTNodes for the parent node block
# @returns {boolean} True if var is on top otherwise false
###
isVarOnTop = (node, statements) ->
l = statements.length
i = 0
# skip over directives
while i < l
if (
not looksLikeDirective(statements[i]) and
not looksLikeImport statements[i]
)
break
++i
while i < l
return no unless isVariableDeclaration statements[i]
return yes if statements[i] is node
++i
no
###*
# Checks whether variable is on top at the global level
# @param {ASTNode} node - The node to check
# @param {ASTNode} parent - Parent of the node
# @returns {void}
###
globalVarCheck = (node, assignment, parent) ->
unless isVarOnTop assignment, parent.body
context.report {node, message: errorMessage}
###*
# Checks whether variable is on top at functional block scope level
# @param {ASTNode} node - The node to check
# @param {ASTNode} parent - Parent of the node
# @param {ASTNode} grandParent - Parent of the node's parent
# @returns {void}
###
blockScopeVarCheck = (node, assignment, parent, grandParent) ->
unless (
assignment? and
/Function/.test(grandParent.type) and
parent.type is 'BlockStatement' and
isVarOnTop assignment, parent.body
)
context.report {node, message: errorMessage}
findEnclosingAssignment = (node) ->
currentNode = node
prevNode = null
while currentNode
return if boundaryNodeRegex.test node.type
return if (
currentNode.type is 'Property' and
prevNode is currentNode.key and
prevNode isnt currentNode.value
)
if currentNode.type is 'AssignmentExpression'
if prevNode is currentNode.left
return currentNode
return
prevNode = currentNode
currentNode = currentNode.parent
findEnclosingExpressionStatement = (assignmentNode) ->
currentNode = assignmentNode
currentNode = currentNode.parent while (
currentNode.type is 'AssignmentExpression'
)
return currentNode if currentNode.type is 'ExpressionStatement'
#--------------------------------------------------------------------------
# Public API
#--------------------------------------------------------------------------
'Identifier[declaration=true]': (node) ->
enclosingAssignment = findEnclosingAssignment node
return unless enclosingAssignment
enclosingExpressionStatement = findEnclosingExpressionStatement(
enclosingAssignment
)
if enclosingAssignment.parent.type is 'ExportNamedDeclaration'
globalVarCheck(
node
enclosingAssignment.parent
enclosingAssignment.parent.parent
)
else if enclosingExpressionStatement?.parent.type is 'Program'
globalVarCheck(
node
enclosingExpressionStatement
enclosingExpressionStatement.parent
)
else
blockScopeVarCheck(
node
enclosingExpressionStatement
enclosingExpressionStatement?.parent
enclosingExpressionStatement?.parent.parent
)
| 130519 | ###*
# @fileoverview Rule to enforce var declarations are only at the top of a function.
# @author <NAME>
# @author <NAME>
###
'use strict'
{isDeclarationAssignment} = require '../util/ast-utils'
boundaryNodeRegex = /Function/
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'require `var` declarations be placed at the top of their containing scope'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/vars-on-top'
schema: []
create: (context) ->
errorMessage = 'All declarations must be at the top of the function scope.'
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
###*
# @param {ASTNode} node - any node
# @returns {boolean} whether the given node structurally represents a directive
###
looksLikeDirective = (node) ->
node.type is 'ExpressionStatement' and
node.expression.type is 'Literal' and
typeof node.expression.value is 'string'
###*
# Check to see if its a ES6 import declaration
# @param {ASTNode} node - any node
# @returns {boolean} whether the given node represents a import declaration
###
looksLikeImport = (node) ->
node.type in [
'ImportDeclaration'
'ImportSpecifier'
'ImportDefaultSpecifier'
'ImportNamespaceSpecifier'
]
###*
# Checks whether a given node is a variable declaration or not.
#
# @param {ASTNode} node - any node
# @returns {boolean} `true` if the node is a variable declaration.
###
isVariableDeclaration = (node) ->
(node.type is 'ExpressionStatement' and
isDeclarationAssignment(node.expression)) or
(node.type is 'ExportNamedDeclaration' and
isDeclarationAssignment node.declaration)
###*
# Checks whether this variable is on top of the block body
# @param {ASTNode} node - The node to check
# @param {ASTNode[]} statements - collection of ASTNodes for the parent node block
# @returns {boolean} True if var is on top otherwise false
###
isVarOnTop = (node, statements) ->
l = statements.length
i = 0
# skip over directives
while i < l
if (
not looksLikeDirective(statements[i]) and
not looksLikeImport statements[i]
)
break
++i
while i < l
return no unless isVariableDeclaration statements[i]
return yes if statements[i] is node
++i
no
###*
# Checks whether variable is on top at the global level
# @param {ASTNode} node - The node to check
# @param {ASTNode} parent - Parent of the node
# @returns {void}
###
globalVarCheck = (node, assignment, parent) ->
unless isVarOnTop assignment, parent.body
context.report {node, message: errorMessage}
###*
# Checks whether variable is on top at functional block scope level
# @param {ASTNode} node - The node to check
# @param {ASTNode} parent - Parent of the node
# @param {ASTNode} grandParent - Parent of the node's parent
# @returns {void}
###
blockScopeVarCheck = (node, assignment, parent, grandParent) ->
unless (
assignment? and
/Function/.test(grandParent.type) and
parent.type is 'BlockStatement' and
isVarOnTop assignment, parent.body
)
context.report {node, message: errorMessage}
findEnclosingAssignment = (node) ->
currentNode = node
prevNode = null
while currentNode
return if boundaryNodeRegex.test node.type
return if (
currentNode.type is 'Property' and
prevNode is currentNode.key and
prevNode isnt currentNode.value
)
if currentNode.type is 'AssignmentExpression'
if prevNode is currentNode.left
return currentNode
return
prevNode = currentNode
currentNode = currentNode.parent
findEnclosingExpressionStatement = (assignmentNode) ->
currentNode = assignmentNode
currentNode = currentNode.parent while (
currentNode.type is 'AssignmentExpression'
)
return currentNode if currentNode.type is 'ExpressionStatement'
#--------------------------------------------------------------------------
# Public API
#--------------------------------------------------------------------------
'Identifier[declaration=true]': (node) ->
enclosingAssignment = findEnclosingAssignment node
return unless enclosingAssignment
enclosingExpressionStatement = findEnclosingExpressionStatement(
enclosingAssignment
)
if enclosingAssignment.parent.type is 'ExportNamedDeclaration'
globalVarCheck(
node
enclosingAssignment.parent
enclosingAssignment.parent.parent
)
else if enclosingExpressionStatement?.parent.type is 'Program'
globalVarCheck(
node
enclosingExpressionStatement
enclosingExpressionStatement.parent
)
else
blockScopeVarCheck(
node
enclosingExpressionStatement
enclosingExpressionStatement?.parent
enclosingExpressionStatement?.parent.parent
)
| true | ###*
# @fileoverview Rule to enforce var declarations are only at the top of a function.
# @author PI:NAME:<NAME>END_PI
# @author PI:NAME:<NAME>END_PI
###
'use strict'
{isDeclarationAssignment} = require '../util/ast-utils'
boundaryNodeRegex = /Function/
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'require `var` declarations be placed at the top of their containing scope'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/vars-on-top'
schema: []
create: (context) ->
errorMessage = 'All declarations must be at the top of the function scope.'
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
###*
# @param {ASTNode} node - any node
# @returns {boolean} whether the given node structurally represents a directive
###
looksLikeDirective = (node) ->
node.type is 'ExpressionStatement' and
node.expression.type is 'Literal' and
typeof node.expression.value is 'string'
###*
# Check to see if its a ES6 import declaration
# @param {ASTNode} node - any node
# @returns {boolean} whether the given node represents a import declaration
###
looksLikeImport = (node) ->
node.type in [
'ImportDeclaration'
'ImportSpecifier'
'ImportDefaultSpecifier'
'ImportNamespaceSpecifier'
]
###*
# Checks whether a given node is a variable declaration or not.
#
# @param {ASTNode} node - any node
# @returns {boolean} `true` if the node is a variable declaration.
###
isVariableDeclaration = (node) ->
(node.type is 'ExpressionStatement' and
isDeclarationAssignment(node.expression)) or
(node.type is 'ExportNamedDeclaration' and
isDeclarationAssignment node.declaration)
###*
# Checks whether this variable is on top of the block body
# @param {ASTNode} node - The node to check
# @param {ASTNode[]} statements - collection of ASTNodes for the parent node block
# @returns {boolean} True if var is on top otherwise false
###
isVarOnTop = (node, statements) ->
l = statements.length
i = 0
# skip over directives
while i < l
if (
not looksLikeDirective(statements[i]) and
not looksLikeImport statements[i]
)
break
++i
while i < l
return no unless isVariableDeclaration statements[i]
return yes if statements[i] is node
++i
no
###*
# Checks whether variable is on top at the global level
# @param {ASTNode} node - The node to check
# @param {ASTNode} parent - Parent of the node
# @returns {void}
###
globalVarCheck = (node, assignment, parent) ->
unless isVarOnTop assignment, parent.body
context.report {node, message: errorMessage}
###*
# Checks whether variable is on top at functional block scope level
# @param {ASTNode} node - The node to check
# @param {ASTNode} parent - Parent of the node
# @param {ASTNode} grandParent - Parent of the node's parent
# @returns {void}
###
blockScopeVarCheck = (node, assignment, parent, grandParent) ->
unless (
assignment? and
/Function/.test(grandParent.type) and
parent.type is 'BlockStatement' and
isVarOnTop assignment, parent.body
)
context.report {node, message: errorMessage}
findEnclosingAssignment = (node) ->
currentNode = node
prevNode = null
while currentNode
return if boundaryNodeRegex.test node.type
return if (
currentNode.type is 'Property' and
prevNode is currentNode.key and
prevNode isnt currentNode.value
)
if currentNode.type is 'AssignmentExpression'
if prevNode is currentNode.left
return currentNode
return
prevNode = currentNode
currentNode = currentNode.parent
findEnclosingExpressionStatement = (assignmentNode) ->
currentNode = assignmentNode
currentNode = currentNode.parent while (
currentNode.type is 'AssignmentExpression'
)
return currentNode if currentNode.type is 'ExpressionStatement'
#--------------------------------------------------------------------------
# Public API
#--------------------------------------------------------------------------
'Identifier[declaration=true]': (node) ->
enclosingAssignment = findEnclosingAssignment node
return unless enclosingAssignment
enclosingExpressionStatement = findEnclosingExpressionStatement(
enclosingAssignment
)
if enclosingAssignment.parent.type is 'ExportNamedDeclaration'
globalVarCheck(
node
enclosingAssignment.parent
enclosingAssignment.parent.parent
)
else if enclosingExpressionStatement?.parent.type is 'Program'
globalVarCheck(
node
enclosingExpressionStatement
enclosingExpressionStatement.parent
)
else
blockScopeVarCheck(
node
enclosingExpressionStatement
enclosingExpressionStatement?.parent
enclosingExpressionStatement?.parent.parent
)
|
[
{
"context": " Teleport To Me\"\n\t@description:\"Minions damaged by Syvrel are pulled in front of him\"\n\n\tmaxStacks: 1\n\n\t",
"end": 553,
"score": 0.5143386721611023,
"start": 551,
"tag": "NAME",
"value": "Sy"
}
] | app/sdk/modifiers/modifierDealDamageWatchTeleportToMe.coffee | willroberts/duelyst | 5 | Modifier = require './modifier'
AttackAction = require 'app/sdk/actions/attackAction'
CardType = require 'app/sdk/cards/cardType'
ModifierDealDamageWatch = require './modifierDealDamageWatch'
TeleportInFrontOfUnitAction = require 'app/sdk/actions/teleportInFrontOfUnitAction'
_ = require 'underscore'
class ModifierDealDamageWatchTeleportToMe extends ModifierDealDamageWatch
type:"ModifierDealDamageWatchTeleportToMe"
@type:"ModifierDealDamageWatchTeleportToMe"
@modifierName:"Deal Damage Watch Teleport To Me"
@description:"Minions damaged by Syvrel are pulled in front of him"
maxStacks: 1
onDealDamage: (action) ->
if @getGameSession().getIsRunningAsAuthoritative()
# calculate results of teleport only on server, since results may change at execution time
target = action.getTarget()
if target and !target.getIsGeneral()
# move target in front of this minion
teleAction = new TeleportInFrontOfUnitAction(@getGameSession(), @getCard(), target)
teleAction.setFXResource(_.union(teleAction.getFXResource(), @getFXResource()))
@getGameSession().executeAction(teleAction)
module.exports = ModifierDealDamageWatchTeleportToMe
| 218054 | Modifier = require './modifier'
AttackAction = require 'app/sdk/actions/attackAction'
CardType = require 'app/sdk/cards/cardType'
ModifierDealDamageWatch = require './modifierDealDamageWatch'
TeleportInFrontOfUnitAction = require 'app/sdk/actions/teleportInFrontOfUnitAction'
_ = require 'underscore'
class ModifierDealDamageWatchTeleportToMe extends ModifierDealDamageWatch
type:"ModifierDealDamageWatchTeleportToMe"
@type:"ModifierDealDamageWatchTeleportToMe"
@modifierName:"Deal Damage Watch Teleport To Me"
@description:"Minions damaged by <NAME>vrel are pulled in front of him"
maxStacks: 1
onDealDamage: (action) ->
if @getGameSession().getIsRunningAsAuthoritative()
# calculate results of teleport only on server, since results may change at execution time
target = action.getTarget()
if target and !target.getIsGeneral()
# move target in front of this minion
teleAction = new TeleportInFrontOfUnitAction(@getGameSession(), @getCard(), target)
teleAction.setFXResource(_.union(teleAction.getFXResource(), @getFXResource()))
@getGameSession().executeAction(teleAction)
module.exports = ModifierDealDamageWatchTeleportToMe
| true | Modifier = require './modifier'
AttackAction = require 'app/sdk/actions/attackAction'
CardType = require 'app/sdk/cards/cardType'
ModifierDealDamageWatch = require './modifierDealDamageWatch'
TeleportInFrontOfUnitAction = require 'app/sdk/actions/teleportInFrontOfUnitAction'
_ = require 'underscore'
class ModifierDealDamageWatchTeleportToMe extends ModifierDealDamageWatch
type:"ModifierDealDamageWatchTeleportToMe"
@type:"ModifierDealDamageWatchTeleportToMe"
@modifierName:"Deal Damage Watch Teleport To Me"
@description:"Minions damaged by PI:NAME:<NAME>END_PIvrel are pulled in front of him"
maxStacks: 1
onDealDamage: (action) ->
if @getGameSession().getIsRunningAsAuthoritative()
# calculate results of teleport only on server, since results may change at execution time
target = action.getTarget()
if target and !target.getIsGeneral()
# move target in front of this minion
teleAction = new TeleportInFrontOfUnitAction(@getGameSession(), @getCard(), target)
teleAction.setFXResource(_.union(teleAction.getFXResource(), @getFXResource()))
@getGameSession().executeAction(teleAction)
module.exports = ModifierDealDamageWatchTeleportToMe
|
[
{
"context": "api/authz\n@requires joukou-api/agent/Model\n@author Isaac Johnston <isaac.johnston@joukou.com>\n###\n\n_ = ",
"end": 840,
"score": 0.9998639822006226,
"start": 826,
"tag": "NAME",
"value": "Isaac Johnston"
},
{
"context": "es joukou-api/agent/Model\n@author... | src/agent/routes.coffee | joukou/joukou-api | 0 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/agent/Model|Agent} routes.
@module joukou-api/agent/routes
@requires lodash
@requires jsonwebtoken
@requires joukou-api/config
@requires joukou-api/authn
@requires joukou-api/authz
@requires joukou-api/agent/Model
@author Isaac Johnston <isaac.johnston@joukou.com>
###
_ = require( 'lodash' )
jwt = require( 'jsonwebtoken' )
authn = require( '../authn' )
authz = require( '../authz' )
config = require( '../config' )
AgentModel = require( './model' )
{ UnauthorizedError, NotFoundError } = require( 'restify' )
env = require( '../env' )
passport = require( 'passport' )
githubEnv = env.getGithubAuth()
graph_routes = require( './graph/routes' )
module.exports = self =
###*
Register the `/agent` routes with the `server`.
@param {joukou-api/server} server
###
registerRoutes: ( server ) ->
server.del( '/agent', authn.authenticate, self.delete )
server.get( '/agent', authn.authenticate, self.index )
server.post( '/agent', self.create )
# Post should be handled a different way
# It should really only be a get
server.get( '/agent/authenticate/github', authn.Github.authenticate, self.authenticate )
server.get( '/agent/authenticate', authn.Github.authenticate, self.authenticate )
# server.post( '/agent/authenticate', authn.authenticateOAuth, self.authenticate )
server.get( '/agent/authenticate/callback', authn.Github.authenticate, self.callback )
server.get( '/agent/authenticate/failed', self.failed )
server.get( '/agent/:agentKey', authn.authenticate, self.retrieve )
graph_routes.registerRoutes( server )
delete: ( req, res, next ) ->
if not req.user
res.send(503)
return
req.user.delete().then(->
res.send(204)
).fail( next )
failed: ( req, res ) ->
# res.header("Location", githubEnv.failedUrl )
res.send(503)
callback: (req, res, val ) ->
token = null
if req and req.user
token = authn.Bearer.generate(req.user)
if token
res.header("Location", githubEnv.successUrl + "/" + token)
else
res.header("Location", githubEnv.failedUrl )
res.send(302)
index: ( req, res, next ) ->
res.send( 200, req.user.getValue() )
###*
Handles a request to create an agent.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
create: ( req, res, next ) ->
AgentModel.create( req.body ).then( ( agent ) ->
agent.save()
)
.then( ( agent ) ->
self = "/agent/#{agent.getKey()}"
res.header( 'Location', self )
res.link( self, 'joukou:agent' )
res.send( 201, {} )
)
.fail( ( err ) -> res.send( err ) )
###*
Handles a request to authenticate an agent, and respond with a JSON Web Token
if successful.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
authenticate: ( req, res, next ) ->
# TODO config.jwt.secret
token = authn.Bearer.generate(req.user)
res.link( "/agent/#{req.user.getKey()}", 'joukou:agent' )
res.link( '/persona', 'joukou:personas', title: 'List of Personas' )
res.send( 200, token: token )
###*
Handles a request to retrieve details about an agent.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
retrieve: ( req, res, next ) ->
AgentModel.retrieve( req.params.agentKey ).then( ( agent ) ->
unless agent.getEmail() is req.user.getEmail() or
req.user.hasRole( 'operator' )
next( new UnauthorizedError() )
return
res.link( '/persona', 'joukou:personas', title: 'List of Personas' )
res.send( 200, agent.getRepresentation() )
).fail( ( err ) ->
if err instanceof NotFoundError
# Technically this should be a 404 NotFound, but that could be abused by
# an attacker to discover valid user keys.
res.send( 401 )
else
next( err )
)
| 36674 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/agent/Model|Agent} routes.
@module joukou-api/agent/routes
@requires lodash
@requires jsonwebtoken
@requires joukou-api/config
@requires joukou-api/authn
@requires joukou-api/authz
@requires joukou-api/agent/Model
@author <NAME> <<EMAIL>>
###
_ = require( 'lodash' )
jwt = require( 'jsonwebtoken' )
authn = require( '../authn' )
authz = require( '../authz' )
config = require( '../config' )
AgentModel = require( './model' )
{ UnauthorizedError, NotFoundError } = require( 'restify' )
env = require( '../env' )
passport = require( 'passport' )
githubEnv = env.getGithubAuth()
graph_routes = require( './graph/routes' )
module.exports = self =
###*
Register the `/agent` routes with the `server`.
@param {joukou-api/server} server
###
registerRoutes: ( server ) ->
server.del( '/agent', authn.authenticate, self.delete )
server.get( '/agent', authn.authenticate, self.index )
server.post( '/agent', self.create )
# Post should be handled a different way
# It should really only be a get
server.get( '/agent/authenticate/github', authn.Github.authenticate, self.authenticate )
server.get( '/agent/authenticate', authn.Github.authenticate, self.authenticate )
# server.post( '/agent/authenticate', authn.authenticateOAuth, self.authenticate )
server.get( '/agent/authenticate/callback', authn.Github.authenticate, self.callback )
server.get( '/agent/authenticate/failed', self.failed )
server.get( '/agent/:agentKey', authn.authenticate, self.retrieve )
graph_routes.registerRoutes( server )
delete: ( req, res, next ) ->
if not req.user
res.send(503)
return
req.user.delete().then(->
res.send(204)
).fail( next )
failed: ( req, res ) ->
# res.header("Location", githubEnv.failedUrl )
res.send(503)
callback: (req, res, val ) ->
token = null
if req and req.user
token = authn.<KEY>(req.user)
if token
res.header("Location", githubEnv.successUrl + "/" + token)
else
res.header("Location", githubEnv.failedUrl )
res.send(302)
index: ( req, res, next ) ->
res.send( 200, req.user.getValue() )
###*
Handles a request to create an agent.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
create: ( req, res, next ) ->
AgentModel.create( req.body ).then( ( agent ) ->
agent.save()
)
.then( ( agent ) ->
self = "/agent/#{agent.getKey()}"
res.header( 'Location', self )
res.link( self, 'joukou:agent' )
res.send( 201, {} )
)
.fail( ( err ) -> res.send( err ) )
###*
Handles a request to authenticate an agent, and respond with a JSON Web Token
if successful.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
authenticate: ( req, res, next ) ->
# TODO config.jwt.secret
token = authn.Bearer.generate(req.user)
res.link( "/agent/#{req.user.getKey()}", 'joukou:agent' )
res.link( '/persona', 'joukou:personas', title: 'List of Personas' )
res.send( 200, token: token )
###*
Handles a request to retrieve details about an agent.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
retrieve: ( req, res, next ) ->
AgentModel.retrieve( req.params.agentKey ).then( ( agent ) ->
unless agent.getEmail() is req.user.getEmail() or
req.user.hasRole( 'operator' )
next( new UnauthorizedError() )
return
res.link( '/persona', 'joukou:personas', title: 'List of Personas' )
res.send( 200, agent.getRepresentation() )
).fail( ( err ) ->
if err instanceof NotFoundError
# Technically this should be a 404 NotFound, but that could be abused by
# an attacker to discover valid user keys.
res.send( 401 )
else
next( err )
)
| true | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/agent/Model|Agent} routes.
@module joukou-api/agent/routes
@requires lodash
@requires jsonwebtoken
@requires joukou-api/config
@requires joukou-api/authn
@requires joukou-api/authz
@requires joukou-api/agent/Model
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
_ = require( 'lodash' )
jwt = require( 'jsonwebtoken' )
authn = require( '../authn' )
authz = require( '../authz' )
config = require( '../config' )
AgentModel = require( './model' )
{ UnauthorizedError, NotFoundError } = require( 'restify' )
env = require( '../env' )
passport = require( 'passport' )
githubEnv = env.getGithubAuth()
graph_routes = require( './graph/routes' )
module.exports = self =
###*
Register the `/agent` routes with the `server`.
@param {joukou-api/server} server
###
registerRoutes: ( server ) ->
server.del( '/agent', authn.authenticate, self.delete )
server.get( '/agent', authn.authenticate, self.index )
server.post( '/agent', self.create )
# Post should be handled a different way
# It should really only be a get
server.get( '/agent/authenticate/github', authn.Github.authenticate, self.authenticate )
server.get( '/agent/authenticate', authn.Github.authenticate, self.authenticate )
# server.post( '/agent/authenticate', authn.authenticateOAuth, self.authenticate )
server.get( '/agent/authenticate/callback', authn.Github.authenticate, self.callback )
server.get( '/agent/authenticate/failed', self.failed )
server.get( '/agent/:agentKey', authn.authenticate, self.retrieve )
graph_routes.registerRoutes( server )
delete: ( req, res, next ) ->
if not req.user
res.send(503)
return
req.user.delete().then(->
res.send(204)
).fail( next )
failed: ( req, res ) ->
# res.header("Location", githubEnv.failedUrl )
res.send(503)
callback: (req, res, val ) ->
token = null
if req and req.user
token = authn.PI:KEY:<KEY>END_PI(req.user)
if token
res.header("Location", githubEnv.successUrl + "/" + token)
else
res.header("Location", githubEnv.failedUrl )
res.send(302)
index: ( req, res, next ) ->
res.send( 200, req.user.getValue() )
###*
Handles a request to create an agent.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
create: ( req, res, next ) ->
AgentModel.create( req.body ).then( ( agent ) ->
agent.save()
)
.then( ( agent ) ->
self = "/agent/#{agent.getKey()}"
res.header( 'Location', self )
res.link( self, 'joukou:agent' )
res.send( 201, {} )
)
.fail( ( err ) -> res.send( err ) )
###*
Handles a request to authenticate an agent, and respond with a JSON Web Token
if successful.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
authenticate: ( req, res, next ) ->
# TODO config.jwt.secret
token = authn.Bearer.generate(req.user)
res.link( "/agent/#{req.user.getKey()}", 'joukou:agent' )
res.link( '/persona', 'joukou:personas', title: 'List of Personas' )
res.send( 200, token: token )
###*
Handles a request to retrieve details about an agent.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {Function} next
###
retrieve: ( req, res, next ) ->
AgentModel.retrieve( req.params.agentKey ).then( ( agent ) ->
unless agent.getEmail() is req.user.getEmail() or
req.user.hasRole( 'operator' )
next( new UnauthorizedError() )
return
res.link( '/persona', 'joukou:personas', title: 'List of Personas' )
res.send( 200, agent.getRepresentation() )
).fail( ( err ) ->
if err instanceof NotFoundError
# Technically this should be a 404 NotFound, but that could be abused by
# an attacker to discover valid user keys.
res.send( 401 )
else
next( err )
)
|
[
{
"context": "awRequest 'byMailboxRequest',\n startkey: ['uid', boxID, 0]\n endkey: ['uid', boxID, 'a'] #",
"end": 1643,
"score": 0.7424681782722473,
"start": 1640,
"tag": "KEY",
"value": "uid"
},
{
"context": " startkey: ['uid', boxID, 0]\n endkey: ['uid', bo... | server/patchs/ignored.coffee | cozy-labs/emails | 58 | Message = require '../models/message'
safeLoop = require '../utils/safeloop'
async = require 'async'
ramStore = require '../models/store_account_and_boxes'
log = require('../utils/logging')(prefix: 'patch:ignored')
exports.patchAllAccounts = (callback) ->
accounts = ramStore.getAllAccounts()
async.eachSeries accounts, patchOneAccount, callback
# Public: patch this account to mark its junk & spam message as ignored
#
# Returns (callback) at completion
patchOneAccount = (account, callback) ->
log.debug "applyPatchIgnored, already = ", account.patchIgnored
return callback null if account.patchIgnored
boxes = []
boxes.push account.trashMailbox if account.trashMailbox
boxes.push account.junkMailbox if account.junkMailbox
log.debug "applyPatchIgnored", boxes
safeLoop boxes, markAllMessagesAsIgnored, (errors) ->
if errors.length
log.debug "applyPatchIgnored:fail", account.id
callback null
else
log.debug "applyPatchIgnored:success", account.id
# if there was no error, the account is patched
# note it so we dont apply patch again
account.updateAttributes patchIgnored: true, callback
# Public: mark all messages in a box as ignoreInCount
# keep looping but throw an error if one fail
#
# boxID - {String} the box id
#
# Returns (callback) at completion
markAllMessagesAsIgnored = (boxID, callback) ->
return callback()
changes = ignoreInCount: true
markIgnored = (id, next) ->
Message.updateAttributes id, changes, next
Message.rawRequest 'byMailboxRequest',
startkey: ['uid', boxID, 0]
endkey: ['uid', boxID, 'a'] # = Infinity in couchdb collation
reduce: false
, (err, rows) ->
return callback err if err
ids = rows?.map (row) -> row.id
safeLoop ids, markIgnored, (errors) ->
log.warn "error marking msg ignored", err for err in errors
callback errors[0]
| 71385 | Message = require '../models/message'
safeLoop = require '../utils/safeloop'
async = require 'async'
ramStore = require '../models/store_account_and_boxes'
log = require('../utils/logging')(prefix: 'patch:ignored')
exports.patchAllAccounts = (callback) ->
accounts = ramStore.getAllAccounts()
async.eachSeries accounts, patchOneAccount, callback
# Public: patch this account to mark its junk & spam message as ignored
#
# Returns (callback) at completion
patchOneAccount = (account, callback) ->
log.debug "applyPatchIgnored, already = ", account.patchIgnored
return callback null if account.patchIgnored
boxes = []
boxes.push account.trashMailbox if account.trashMailbox
boxes.push account.junkMailbox if account.junkMailbox
log.debug "applyPatchIgnored", boxes
safeLoop boxes, markAllMessagesAsIgnored, (errors) ->
if errors.length
log.debug "applyPatchIgnored:fail", account.id
callback null
else
log.debug "applyPatchIgnored:success", account.id
# if there was no error, the account is patched
# note it so we dont apply patch again
account.updateAttributes patchIgnored: true, callback
# Public: mark all messages in a box as ignoreInCount
# keep looping but throw an error if one fail
#
# boxID - {String} the box id
#
# Returns (callback) at completion
markAllMessagesAsIgnored = (boxID, callback) ->
return callback()
changes = ignoreInCount: true
markIgnored = (id, next) ->
Message.updateAttributes id, changes, next
Message.rawRequest 'byMailboxRequest',
startkey: ['<KEY>', boxID, 0]
endkey: ['<KEY>', boxID, 'a'] # = Infinity in couchdb collation
reduce: false
, (err, rows) ->
return callback err if err
ids = rows?.map (row) -> row.id
safeLoop ids, markIgnored, (errors) ->
log.warn "error marking msg ignored", err for err in errors
callback errors[0]
| true | Message = require '../models/message'
safeLoop = require '../utils/safeloop'
async = require 'async'
ramStore = require '../models/store_account_and_boxes'
log = require('../utils/logging')(prefix: 'patch:ignored')
exports.patchAllAccounts = (callback) ->
accounts = ramStore.getAllAccounts()
async.eachSeries accounts, patchOneAccount, callback
# Public: patch this account to mark its junk & spam message as ignored
#
# Returns (callback) at completion
patchOneAccount = (account, callback) ->
log.debug "applyPatchIgnored, already = ", account.patchIgnored
return callback null if account.patchIgnored
boxes = []
boxes.push account.trashMailbox if account.trashMailbox
boxes.push account.junkMailbox if account.junkMailbox
log.debug "applyPatchIgnored", boxes
safeLoop boxes, markAllMessagesAsIgnored, (errors) ->
if errors.length
log.debug "applyPatchIgnored:fail", account.id
callback null
else
log.debug "applyPatchIgnored:success", account.id
# if there was no error, the account is patched
# note it so we dont apply patch again
account.updateAttributes patchIgnored: true, callback
# Public: mark all messages in a box as ignoreInCount
# keep looping but throw an error if one fail
#
# boxID - {String} the box id
#
# Returns (callback) at completion
markAllMessagesAsIgnored = (boxID, callback) ->
return callback()
changes = ignoreInCount: true
markIgnored = (id, next) ->
Message.updateAttributes id, changes, next
Message.rawRequest 'byMailboxRequest',
startkey: ['PI:KEY:<KEY>END_PI', boxID, 0]
endkey: ['PI:KEY:<KEY>END_PI', boxID, 'a'] # = Infinity in couchdb collation
reduce: false
, (err, rows) ->
return callback err if err
ids = rows?.map (row) -> row.id
safeLoop ids, markIgnored, (errors) ->
log.warn "error marking msg ignored", err for err in errors
callback errors[0]
|
[
{
"context": "that it\n # can also be used in \"AdvancedCtrl\" (Jyrki Niemi 2015-09-24)\n $scope.getWithins = () ->\n ",
"end": 1335,
"score": 0.9871250987052917,
"start": 1324,
"tag": "NAME",
"value": "Jyrki Niemi"
},
{
"context": "related_words is undefined or\n ... | app/scripts/search_controllers.coffee | CSCfi/Kielipankki-korp-frontend | 0 | korpApp = angular.module("korpApp")
window.SearchCtrl = ["$scope", "$location", "utils", "searches", ( ($scope, $location, utils, searches) ->
$scope.visibleTabs = [true, true, true, true]
$scope.extendedTmpl = "views/extended_tmpl.html"
# for parallel mode
searches.langDef.resolve()
$scope.isCompareSelected = false
$scope.$watch( (() -> $location.search().search_tab),
(val) ->
$scope.isCompareSelected = val == 3
)
$scope.$watch (() -> $location.search().word_pic), (val) ->
$scope.word_pic = Boolean(val)
$scope.$watch "word_pic", (val) ->
$location.search("word_pic", Boolean(val) or null)
$scope.$watch (() -> $location.search().show_map), (val) ->
$scope.show_map = Boolean(val)
$scope.$watch "show_map", (val) -> $location.search("show_map", Boolean(val) or null)
$scope.$watch (() -> $location.search().show_name_classif), (val) ->
$scope.show_name_classif = Boolean(val)
$scope.$watch "show_name_classif", (val) ->
$location.search("show_name_classif", Boolean(val) or null)
$scope.settings = settings
$scope.showStats = () ->
return settings.statistics != false
# $scope.getWithins was copied from "ExtendedSearch", so that it
# can also be used in "AdvancedCtrl" (Jyrki Niemi 2015-09-24)
$scope.getWithins = () ->
union = settings.corpusListing.getWithinKeys()
output = _.map union, (item) -> {value : item}
return output
unless $location.search().stats_reduce
$location.search 'stats_reduce', ("word")
$scope.corpusChangeListener = $scope.$on "corpuschooserchange", (event, selected) ->
c.log "SearchCtrl corpuschooserchange"
$scope.noCorporaSelected = not selected.length
allAttrs = settings.corpusListing.getStatsAttributeGroups()
$scope.statCurrentAttrs = _.filter allAttrs, (item) -> not item.hideStatistics
$scope.statSelectedAttrs = $location.search().stats_reduce.split ','
insensitiveAttrs = $location.search().stats_reduce_insensitive
if insensitiveAttrs
$scope.statInsensitiveAttrs = insensitiveAttrs.split ','
$scope.$watch 'statSelectedAttrs', ((selected) ->
if selected and selected.length > 0
$location.search 'stats_reduce', ($scope.statSelectedAttrs.join ',')
), true
$scope.$watch 'statInsensitiveAttrs', ((insensitive) ->
if insensitive and insensitive.length > 0
$location.search 'stats_reduce_insensitive', ($scope.statInsensitiveAttrs.join ',')
else if insensitive
$location.search 'stats_reduce_insensitive', null
), true
)]
korpApp.controller "SearchCtrl", window.SearchCtrl
korpApp.controller "SimpleCtrl", ($scope, utils, $location, backend, $rootScope, searches, compareSearches, $uibModal) ->
s = $scope
prequeries_enabled = settings.simple_search_restrict_context
if prequeries_enabled
# Simple prequery, prequery within and prequery attribute
s.simple_prequery = ""
s.prequery_within_opts = [
"sentence"
"paragraph"
"text"
]
s.prequery_within_default = s.prequery_within_opts[0]
s.prequery_within = s.prequery_within_opts[0]
s.prequery_attr_opts = [
# Word attribute name, localization key
["lemma", "baseforms"]
["word", "wordforms"]
]
# s.prequery_attr = s.prequery_attr_opts[0][0]
s.prequery_attr = "lemma|word"
# Set the value of simple_prequery based on the URL parameter
# simple_prequery
s.$watch( (() -> $location.search().simple_prequery),
(val) -> s.simple_prequery = val
)
s.$on "popover_submit", (event, name) ->
cqp = s.instance.getCQP()
compareSearches.saveSearch {
label : name or cqp
cqp : cqp
corpora : settings.corpusListing.getSelectedCorpora()
}
s.stringifyRelatedHeader = (wd) ->
wd.replace(/_/g, " ")
s.stringifyRelated = (wd) ->
util.saldoToString(wd)
modalInstance = null
s.clickRelated = (wd) ->
modalInstance?.close()
c.log "modalInstance", modalInstance
$scope.$root.searchtabs()[1].select()
s.$root.$broadcast "extended_set", "[saldo contains '#{wd}']"
$location.search("search", "cqp|" + "[saldo contains '#{wd}']")
s.relatedDefault = 3
s.clickX = () ->
modalInstance.dismiss()
s.showAllRelated = () ->
modalInstance = $uibModal.open(
template: """
<div class="modal-header">
<h3 class="modal-title">{{'similar_header' | loc:lang}} (SWE-FN)</h3>
<span ng-click="clickX()" class="close-x">×</span>
</div>
<div class="modal-body">
<div ng-repeat="obj in relatedObj" class="col"><a target="_blank" ng-href="http://spraakbanken.gu.se/karp/#?lexicon=swefn&search=extended||and|sense|equals|swefn--{{obj.label}}" class="header">{{stringifyRelatedHeader(obj.label)}}</a>
<div class="list_wrapper">
<ul>
<li ng-repeat="wd in obj.words"> <a ng-click="clickRelated(wd)" class="link">{{stringifyRelated(wd) + " "}}</a></li>
</ul>
</div>
</div>
</div>
"""
scope : s
size : 'lg'
windowClass : "related"
)
s.searches = searches
s.$watch "searches.activeSearch", (search) =>
c.log "search", search
unless search then return
page = Number($location.search().page) or 0
s.relatedObj = null
if prequeries_enabled
# Set URL parameters based on simple prequery variables
if s.simple_prequery
$location.search("simple_prequery", s.simple_prequery)
if s.prequery_within != s.prequery_within_default
$location.search("prequery_within", s.prequery_within)
# $location.search("prequery_attr", s.prequery_attr)
if search.type == "word"
$("#simple_text input").val(search.val) # Necessary for displaying the wordform if it came from the URL
s.simple_text = search.val
cqp = simpleSearch.getCQP(search.val)
c.log "simple search cqp", cqp
if search.pageOnly
searches.kwicRequest(cqp, true)
return
else
searches.kwicSearch(cqp)
if settings.wordpicture != false and s.word_pic and " " not in search.val
lemgramResults.makeRequest(search.val, "word")
else
lemgramResults?.resetView()
else if search.type == "lemgram"
s.placeholder = search.val
s.simple_text = ""
s.model = search.val
cqp = simpleSearch.getCQP()
# Show related words if show_related_words is undefined or
# true (Jyrki Niemi 2016-01-15)
if settings.show_related_words != false
backend.relatedWordSearch(search.val).then (data) ->
s.relatedObj = data
if s.word_pic
searches.lemgramSearch(search.val, s.prefix, s.suffix, search.pageOnly)
else
# Add possible prequery CQPs
# TODO: Check if the prequeries are always added
# before coming here, in which case this code would
# not be needed.
if prequeries_enabled and s.simple_prequery and
cqp.indexOf("||") < 0
# c.log("lemgram simple_prequery", cqp, s.simple_prequery)
cqps = simpleSearch.makePrequeryCQPs(s.simple_prequery)
cqps.push(cqp)
# c.log("cqps", cqps)
cqp = util.combineCQPs(cqps)
# c.log("searches.activeSearch prequeries cqp", cqp)
searches.kwicSearch(cqp, search.pageOnly)
else
s.placeholder = null
s.simple_text = ""
lemgramResults?.resetView()
s.lemgramToString = (lemgram) ->
unless lemgram then return
util.lemgramToString(lemgram).replace(/<.*?>/g, "")
utils.setupHash s, [
key : "prefix"
,
key : "suffix"
,
key : "isCaseInsensitive"
]
if prequeries_enabled
utils.setupHash s, [
key : "simple_prequery"
default : ""
,
key : "prequery_within"
default : s.prequery_within_default
# ,
# key : "prequery_attr"
]
$scope.$on "btn_submit", () ->
$location.search "within", null
korpApp.controller "ExtendedSearch", ($scope, utils, $location, backend, $rootScope, searches, compareSearches, $timeout) ->
s = $scope
s.$on "popover_submit", (event, name) ->
compareSearches.saveSearch {
label : name or $rootScope.extendedCQP
cqp : $rootScope.extendedCQP
corpora : settings.corpusListing.getSelectedCorpora()
}
s.searches = searches
s.$on "btn_submit", () ->
c.log "extended submit"
$location.search("search", null)
$location.search("page", null)
$timeout( () ->
$location.search("search", "cqp")
within = s.within if s.within not in _.keys settings.defaultWithin
$location.search "within", within
, 0)
s.$on "extended_set", ($event, val) ->
c.log "extended_set", val
s.cqp = val
if $location.search().cqp
s.cqp = $location.search().cqp
s.setExtendedCQP = (val) ->
# c.log "setExtendedCQP", val
try
$rootScope.extendedCQP = CQP.expandOperators(val)
# c.log "cqp expanded ops", $rootScope.extendedCQP
# Add the possible ignorable tokens between tokens
# (regardless of the current search tab). This makes the
# modified version to be shown in the advanced search as
# the extended search expression.
# (Jyrki Niemi 2015-09-25, 2016-03-18)
$rootScope.extendedCQP =
settings.corpusListing.addIgnoreBetweenTokensCQP(
$rootScope.extendedCQP, true)
# c.log "cqp added ignore", $rootScope.extendedCQP
catch e
c.log "cqp parse error:", e
s.$watch "cqp", (val) ->
c.log "cqp change", val
unless val then return
s.setExtendedCQP val
$location.search("cqp", val)
s.withins = []
s.getWithins = () ->
union = settings.corpusListing.getWithinKeys()
output = _.map union, (item) -> {value : item}
return output
s.$on "corpuschooserchange", () ->
s.withins = s.getWithins()
s.within = s.withins[0]?.value
# Update the ignorable tokens between tokens and set the CQP
# expression shown in the advanced search for the extended
# search.
settings.corpusListing.updateIgnoreBetweenTokensCQP()
s.setExtendedCQP $location.search().cqp
korpApp.controller "ExtendedToken", ($scope, utils, $location) ->
s = $scope
cqp = '[]'
s.valfilter = utils.valfilter
s.setDefault = (or_obj) ->
# assign the first value from the opts
opts = s.getOpts(or_obj.type)
unless opts
or_obj.op = "is"
else
or_obj.op = _.values(opts)[0][1]
or_obj.val = ""
# returning new array each time kills angular, hence the memoizing
s.getOpts = _.memoize (type) ->
unless type of (s.typeMapping or {}) then return
confObj = s.typeMapping?[type]
unless confObj
c.log "confObj missing", type, s.typeMapping
return
confObj = _.extend {}, (confObj?.opts or settings.defaultOptions)
if confObj.type == "set"
confObj.is = "contains"
return _.pairs confObj
onCorpusChange = (event, selected) ->
# TODO: respect the setting 'word_attribute_selector' and similar
unless selected?.length then return
lang = s.$parent.$parent?.l?.lang
allAttrs = settings.corpusListing.getAttributeGroups(lang)
s.types = _.filter allAttrs, (item) -> not item.hideExtended
s.typeMapping = _.object _.map s.types, (item) ->
if item.isStructAttr
["_." + item.value, item]
else
[item.value, item]
s.$on "corpuschooserchange", onCorpusChange
onCorpusChange(null, settings.corpusListing.selected)
s.removeOr = (token, and_array, i) ->
if and_array.length > 1
and_array.splice(i, 1)
else if token.and_block.length > 1
token.and_block.splice (_.indexOf token.and_block, and_array), 1
s.addAnd = (token) ->
token.and_block.push s.addOr([])
toggleBound = (token, bnd) ->
unless token.bound?[bnd]
boundObj = {}
boundObj[bnd] = true
token.bound = _.extend (token.bound or {}), boundObj
else
delete token.bound?[bnd]
s.toggleStart = (token) ->
toggleBound(token, "lbound")
s.toggleEnd = (token) ->
toggleBound(token, "rbound")
s.toggleRepeat = (token) ->
unless token.repeat
token.repeat = [1,1]
else
delete token.repeat
s.getTokenCqp = ->
if not s.token.cqp
return ""
s.token.cqp.match(/\[(.*)]/)[1]
s.onInsertMousedown = (event) ->
event.stopPropagation()
korpApp.directive "advancedSearch", () ->
controller : ($scope, compareSearches, $location, $timeout) ->
s = $scope
expr = ""
if $location.search().search
[type, expr...] = $location.search().search?.split("|")
expr = expr.join("|")
if type == "cqp"
$scope.cqp = expr or "[]"
else
$scope.cqp = "[]"
# Show the within selection list unless settings.advanced_search_within
# is false. (Jyrki Niemi 2015-09-24)
s.showWithin = if settings.advanced_search_within?
settings.advanced_search_within
else
true
s.within = if s.showWithin
$location.search().within or "sentence"
else
"sentence"
$scope.$watch () ->
simpleSearch?.getCQP()
, (val) ->
$scope.simpleCQP = val
$scope.$on "popover_submit", (event, name) ->
compareSearches.saveSearch {
label : name or $rootScope.extendedCQP
cqp : $scope.cqp
corpora : settings.corpusListing.getSelectedCorpora()
}
$scope.$on "btn_submit", () ->
c.log "advanced cqp", $scope.cqp
$location.search "search", null
$location.search "page", null
$location.search "within", null
$timeout( () ->
# Copied from "ExtendedSearch" (Jyrki Niemi 2015-09-24)
within = s.within unless s.within in _.keys settings.defaultWithin
$location.search("within", within or null)
$location.search "search", "cqp|" + $scope.cqp
, 0)
if s.showWithin
# Copied from "ExtendedSearch" (Jyrki Niemi 2015-09-24)
s.withins = []
s.$on "corpuschooserchange", () ->
s.withins = s.getWithins()
korpApp.filter "mapper", () ->
return (item, f) ->
return f(item)
korpApp.directive "compareSearchCtrl", () ->
controller: ($scope, utils, $location, backend, $rootScope, compareSearches) ->
s = $scope
s.valfilter = utils.valfilter
s.savedSearches = compareSearches.savedSearches
s.$watch "savedSearches.length", () ->
s.cmp1 = compareSearches.savedSearches[0]
s.cmp2 = compareSearches.savedSearches[1]
unless s.cmp1 and s.cmp2 then return
listing = settings.corpusListing.subsetFactory(_.uniq ([].concat s.cmp1.corpora, s.cmp2.corpora))
allAttrs = listing.getAttributeGroups()
s.currentAttrs = _.filter allAttrs, (item) -> not item.hideCompare
s.reduce = 'word'
s.sendCompare = () ->
$rootScope.compareTabs.push backend.requestCompare(s.cmp1, s.cmp2, [s.reduce])
s.deleteCompares = () ->
compareSearches.flush()
korpApp.filter "loc", ($rootScope) ->
(translationKey, lang) ->
return util.getLocaleString translationKey, lang
| 189864 | korpApp = angular.module("korpApp")
window.SearchCtrl = ["$scope", "$location", "utils", "searches", ( ($scope, $location, utils, searches) ->
$scope.visibleTabs = [true, true, true, true]
$scope.extendedTmpl = "views/extended_tmpl.html"
# for parallel mode
searches.langDef.resolve()
$scope.isCompareSelected = false
$scope.$watch( (() -> $location.search().search_tab),
(val) ->
$scope.isCompareSelected = val == 3
)
$scope.$watch (() -> $location.search().word_pic), (val) ->
$scope.word_pic = Boolean(val)
$scope.$watch "word_pic", (val) ->
$location.search("word_pic", Boolean(val) or null)
$scope.$watch (() -> $location.search().show_map), (val) ->
$scope.show_map = Boolean(val)
$scope.$watch "show_map", (val) -> $location.search("show_map", Boolean(val) or null)
$scope.$watch (() -> $location.search().show_name_classif), (val) ->
$scope.show_name_classif = Boolean(val)
$scope.$watch "show_name_classif", (val) ->
$location.search("show_name_classif", Boolean(val) or null)
$scope.settings = settings
$scope.showStats = () ->
return settings.statistics != false
# $scope.getWithins was copied from "ExtendedSearch", so that it
# can also be used in "AdvancedCtrl" (<NAME> 2015-09-24)
$scope.getWithins = () ->
union = settings.corpusListing.getWithinKeys()
output = _.map union, (item) -> {value : item}
return output
unless $location.search().stats_reduce
$location.search 'stats_reduce', ("word")
$scope.corpusChangeListener = $scope.$on "corpuschooserchange", (event, selected) ->
c.log "SearchCtrl corpuschooserchange"
$scope.noCorporaSelected = not selected.length
allAttrs = settings.corpusListing.getStatsAttributeGroups()
$scope.statCurrentAttrs = _.filter allAttrs, (item) -> not item.hideStatistics
$scope.statSelectedAttrs = $location.search().stats_reduce.split ','
insensitiveAttrs = $location.search().stats_reduce_insensitive
if insensitiveAttrs
$scope.statInsensitiveAttrs = insensitiveAttrs.split ','
$scope.$watch 'statSelectedAttrs', ((selected) ->
if selected and selected.length > 0
$location.search 'stats_reduce', ($scope.statSelectedAttrs.join ',')
), true
$scope.$watch 'statInsensitiveAttrs', ((insensitive) ->
if insensitive and insensitive.length > 0
$location.search 'stats_reduce_insensitive', ($scope.statInsensitiveAttrs.join ',')
else if insensitive
$location.search 'stats_reduce_insensitive', null
), true
)]
korpApp.controller "SearchCtrl", window.SearchCtrl
korpApp.controller "SimpleCtrl", ($scope, utils, $location, backend, $rootScope, searches, compareSearches, $uibModal) ->
s = $scope
prequeries_enabled = settings.simple_search_restrict_context
if prequeries_enabled
# Simple prequery, prequery within and prequery attribute
s.simple_prequery = ""
s.prequery_within_opts = [
"sentence"
"paragraph"
"text"
]
s.prequery_within_default = s.prequery_within_opts[0]
s.prequery_within = s.prequery_within_opts[0]
s.prequery_attr_opts = [
# Word attribute name, localization key
["lemma", "baseforms"]
["word", "wordforms"]
]
# s.prequery_attr = s.prequery_attr_opts[0][0]
s.prequery_attr = "lemma|word"
# Set the value of simple_prequery based on the URL parameter
# simple_prequery
s.$watch( (() -> $location.search().simple_prequery),
(val) -> s.simple_prequery = val
)
s.$on "popover_submit", (event, name) ->
cqp = s.instance.getCQP()
compareSearches.saveSearch {
label : name or cqp
cqp : cqp
corpora : settings.corpusListing.getSelectedCorpora()
}
s.stringifyRelatedHeader = (wd) ->
wd.replace(/_/g, " ")
s.stringifyRelated = (wd) ->
util.saldoToString(wd)
modalInstance = null
s.clickRelated = (wd) ->
modalInstance?.close()
c.log "modalInstance", modalInstance
$scope.$root.searchtabs()[1].select()
s.$root.$broadcast "extended_set", "[saldo contains '#{wd}']"
$location.search("search", "cqp|" + "[saldo contains '#{wd}']")
s.relatedDefault = 3
s.clickX = () ->
modalInstance.dismiss()
s.showAllRelated = () ->
modalInstance = $uibModal.open(
template: """
<div class="modal-header">
<h3 class="modal-title">{{'similar_header' | loc:lang}} (SWE-FN)</h3>
<span ng-click="clickX()" class="close-x">×</span>
</div>
<div class="modal-body">
<div ng-repeat="obj in relatedObj" class="col"><a target="_blank" ng-href="http://spraakbanken.gu.se/karp/#?lexicon=swefn&search=extended||and|sense|equals|swefn--{{obj.label}}" class="header">{{stringifyRelatedHeader(obj.label)}}</a>
<div class="list_wrapper">
<ul>
<li ng-repeat="wd in obj.words"> <a ng-click="clickRelated(wd)" class="link">{{stringifyRelated(wd) + " "}}</a></li>
</ul>
</div>
</div>
</div>
"""
scope : s
size : 'lg'
windowClass : "related"
)
s.searches = searches
s.$watch "searches.activeSearch", (search) =>
c.log "search", search
unless search then return
page = Number($location.search().page) or 0
s.relatedObj = null
if prequeries_enabled
# Set URL parameters based on simple prequery variables
if s.simple_prequery
$location.search("simple_prequery", s.simple_prequery)
if s.prequery_within != s.prequery_within_default
$location.search("prequery_within", s.prequery_within)
# $location.search("prequery_attr", s.prequery_attr)
if search.type == "word"
$("#simple_text input").val(search.val) # Necessary for displaying the wordform if it came from the URL
s.simple_text = search.val
cqp = simpleSearch.getCQP(search.val)
c.log "simple search cqp", cqp
if search.pageOnly
searches.kwicRequest(cqp, true)
return
else
searches.kwicSearch(cqp)
if settings.wordpicture != false and s.word_pic and " " not in search.val
lemgramResults.makeRequest(search.val, "word")
else
lemgramResults?.resetView()
else if search.type == "lemgram"
s.placeholder = search.val
s.simple_text = ""
s.model = search.val
cqp = simpleSearch.getCQP()
# Show related words if show_related_words is undefined or
# true (<NAME> 2016-01-15)
if settings.show_related_words != false
backend.relatedWordSearch(search.val).then (data) ->
s.relatedObj = data
if s.word_pic
searches.lemgramSearch(search.val, s.prefix, s.suffix, search.pageOnly)
else
# Add possible prequery CQPs
# TODO: Check if the prequeries are always added
# before coming here, in which case this code would
# not be needed.
if prequeries_enabled and s.simple_prequery and
cqp.indexOf("||") < 0
# c.log("lemgram simple_prequery", cqp, s.simple_prequery)
cqps = simpleSearch.makePrequeryCQPs(s.simple_prequery)
cqps.push(cqp)
# c.log("cqps", cqps)
cqp = util.combineCQPs(cqps)
# c.log("searches.activeSearch prequeries cqp", cqp)
searches.kwicSearch(cqp, search.pageOnly)
else
s.placeholder = null
s.simple_text = ""
lemgramResults?.resetView()
s.lemgramToString = (lemgram) ->
unless lemgram then return
util.lemgramToString(lemgram).replace(/<.*?>/g, "")
utils.setupHash s, [
key : "prefix"
,
key : "suffix"
,
key : "isCaseInsensitive"
]
if prequeries_enabled
utils.setupHash s, [
key : "simple_prequery"
default : ""
,
key : "prequery_within"
default : s.prequery_within_default
# ,
# key : "prequery_attr"
]
$scope.$on "btn_submit", () ->
$location.search "within", null
korpApp.controller "ExtendedSearch", ($scope, utils, $location, backend, $rootScope, searches, compareSearches, $timeout) ->
s = $scope
s.$on "popover_submit", (event, name) ->
compareSearches.saveSearch {
label : name or $rootScope.extendedCQP
cqp : $rootScope.extendedCQP
corpora : settings.corpusListing.getSelectedCorpora()
}
s.searches = searches
s.$on "btn_submit", () ->
c.log "extended submit"
$location.search("search", null)
$location.search("page", null)
$timeout( () ->
$location.search("search", "cqp")
within = s.within if s.within not in _.keys settings.defaultWithin
$location.search "within", within
, 0)
s.$on "extended_set", ($event, val) ->
c.log "extended_set", val
s.cqp = val
if $location.search().cqp
s.cqp = $location.search().cqp
s.setExtendedCQP = (val) ->
# c.log "setExtendedCQP", val
try
$rootScope.extendedCQP = CQP.expandOperators(val)
# c.log "cqp expanded ops", $rootScope.extendedCQP
# Add the possible ignorable tokens between tokens
# (regardless of the current search tab). This makes the
# modified version to be shown in the advanced search as
# the extended search expression.
# (<NAME> 2015-09-25, 2016-03-18)
$rootScope.extendedCQP =
settings.corpusListing.addIgnoreBetweenTokensCQP(
$rootScope.extendedCQP, true)
# c.log "cqp added ignore", $rootScope.extendedCQP
catch e
c.log "cqp parse error:", e
s.$watch "cqp", (val) ->
c.log "cqp change", val
unless val then return
s.setExtendedCQP val
$location.search("cqp", val)
s.withins = []
s.getWithins = () ->
union = settings.corpusListing.getWithinKeys()
output = _.map union, (item) -> {value : item}
return output
s.$on "corpuschooserchange", () ->
s.withins = s.getWithins()
s.within = s.withins[0]?.value
# Update the ignorable tokens between tokens and set the CQP
# expression shown in the advanced search for the extended
# search.
settings.corpusListing.updateIgnoreBetweenTokensCQP()
s.setExtendedCQP $location.search().cqp
korpApp.controller "ExtendedToken", ($scope, utils, $location) ->
s = $scope
cqp = '[]'
s.valfilter = utils.valfilter
s.setDefault = (or_obj) ->
# assign the first value from the opts
opts = s.getOpts(or_obj.type)
unless opts
or_obj.op = "is"
else
or_obj.op = _.values(opts)[0][1]
or_obj.val = ""
# returning new array each time kills angular, hence the memoizing
s.getOpts = _.memoize (type) ->
unless type of (s.typeMapping or {}) then return
confObj = s.typeMapping?[type]
unless confObj
c.log "confObj missing", type, s.typeMapping
return
confObj = _.extend {}, (confObj?.opts or settings.defaultOptions)
if confObj.type == "set"
confObj.is = "contains"
return _.pairs confObj
onCorpusChange = (event, selected) ->
# TODO: respect the setting 'word_attribute_selector' and similar
unless selected?.length then return
lang = s.$parent.$parent?.l?.lang
allAttrs = settings.corpusListing.getAttributeGroups(lang)
s.types = _.filter allAttrs, (item) -> not item.hideExtended
s.typeMapping = _.object _.map s.types, (item) ->
if item.isStructAttr
["_." + item.value, item]
else
[item.value, item]
s.$on "corpuschooserchange", onCorpusChange
onCorpusChange(null, settings.corpusListing.selected)
s.removeOr = (token, and_array, i) ->
if and_array.length > 1
and_array.splice(i, 1)
else if token.and_block.length > 1
token.and_block.splice (_.indexOf token.and_block, and_array), 1
s.addAnd = (token) ->
token.and_block.push s.addOr([])
toggleBound = (token, bnd) ->
unless token.bound?[bnd]
boundObj = {}
boundObj[bnd] = true
token.bound = _.extend (token.bound or {}), boundObj
else
delete token.bound?[bnd]
s.toggleStart = (token) ->
toggleBound(token, "lbound")
s.toggleEnd = (token) ->
toggleBound(token, "rbound")
s.toggleRepeat = (token) ->
unless token.repeat
token.repeat = [1,1]
else
delete token.repeat
s.getTokenCqp = ->
if not s.token.cqp
return ""
s.token.cqp.match(/\[(.*)]/)[1]
s.onInsertMousedown = (event) ->
event.stopPropagation()
korpApp.directive "advancedSearch", () ->
controller : ($scope, compareSearches, $location, $timeout) ->
s = $scope
expr = ""
if $location.search().search
[type, expr...] = $location.search().search?.split("|")
expr = expr.join("|")
if type == "cqp"
$scope.cqp = expr or "[]"
else
$scope.cqp = "[]"
# Show the within selection list unless settings.advanced_search_within
# is false. (J<NAME> 2015-09-24)
s.showWithin = if settings.advanced_search_within?
settings.advanced_search_within
else
true
s.within = if s.showWithin
$location.search().within or "sentence"
else
"sentence"
$scope.$watch () ->
simpleSearch?.getCQP()
, (val) ->
$scope.simpleCQP = val
$scope.$on "popover_submit", (event, name) ->
compareSearches.saveSearch {
label : name or $rootScope.extendedCQP
cqp : $scope.cqp
corpora : settings.corpusListing.getSelectedCorpora()
}
$scope.$on "btn_submit", () ->
c.log "advanced cqp", $scope.cqp
$location.search "search", null
$location.search "page", null
$location.search "within", null
$timeout( () ->
# Copied from "ExtendedSearch" (J<NAME> Niemi 2015-09-24)
within = s.within unless s.within in _.keys settings.defaultWithin
$location.search("within", within or null)
$location.search "search", "cqp|" + $scope.cqp
, 0)
if s.showWithin
# Copied from "ExtendedSearch" (J<NAME>ki Niemi 2015-09-24)
s.withins = []
s.$on "corpuschooserchange", () ->
s.withins = s.getWithins()
korpApp.filter "mapper", () ->
return (item, f) ->
return f(item)
korpApp.directive "compareSearchCtrl", () ->
controller: ($scope, utils, $location, backend, $rootScope, compareSearches) ->
s = $scope
s.valfilter = utils.valfilter
s.savedSearches = compareSearches.savedSearches
s.$watch "savedSearches.length", () ->
s.cmp1 = compareSearches.savedSearches[0]
s.cmp2 = compareSearches.savedSearches[1]
unless s.cmp1 and s.cmp2 then return
listing = settings.corpusListing.subsetFactory(_.uniq ([].concat s.cmp1.corpora, s.cmp2.corpora))
allAttrs = listing.getAttributeGroups()
s.currentAttrs = _.filter allAttrs, (item) -> not item.hideCompare
s.reduce = 'word'
s.sendCompare = () ->
$rootScope.compareTabs.push backend.requestCompare(s.cmp1, s.cmp2, [s.reduce])
s.deleteCompares = () ->
compareSearches.flush()
korpApp.filter "loc", ($rootScope) ->
(translationKey, lang) ->
return util.getLocaleString translationKey, lang
| true | korpApp = angular.module("korpApp")
window.SearchCtrl = ["$scope", "$location", "utils", "searches", ( ($scope, $location, utils, searches) ->
$scope.visibleTabs = [true, true, true, true]
$scope.extendedTmpl = "views/extended_tmpl.html"
# for parallel mode
searches.langDef.resolve()
$scope.isCompareSelected = false
$scope.$watch( (() -> $location.search().search_tab),
(val) ->
$scope.isCompareSelected = val == 3
)
$scope.$watch (() -> $location.search().word_pic), (val) ->
$scope.word_pic = Boolean(val)
$scope.$watch "word_pic", (val) ->
$location.search("word_pic", Boolean(val) or null)
$scope.$watch (() -> $location.search().show_map), (val) ->
$scope.show_map = Boolean(val)
$scope.$watch "show_map", (val) -> $location.search("show_map", Boolean(val) or null)
$scope.$watch (() -> $location.search().show_name_classif), (val) ->
$scope.show_name_classif = Boolean(val)
$scope.$watch "show_name_classif", (val) ->
$location.search("show_name_classif", Boolean(val) or null)
$scope.settings = settings
$scope.showStats = () ->
return settings.statistics != false
# $scope.getWithins was copied from "ExtendedSearch", so that it
# can also be used in "AdvancedCtrl" (PI:NAME:<NAME>END_PI 2015-09-24)
$scope.getWithins = () ->
union = settings.corpusListing.getWithinKeys()
output = _.map union, (item) -> {value : item}
return output
unless $location.search().stats_reduce
$location.search 'stats_reduce', ("word")
$scope.corpusChangeListener = $scope.$on "corpuschooserchange", (event, selected) ->
c.log "SearchCtrl corpuschooserchange"
$scope.noCorporaSelected = not selected.length
allAttrs = settings.corpusListing.getStatsAttributeGroups()
$scope.statCurrentAttrs = _.filter allAttrs, (item) -> not item.hideStatistics
$scope.statSelectedAttrs = $location.search().stats_reduce.split ','
insensitiveAttrs = $location.search().stats_reduce_insensitive
if insensitiveAttrs
$scope.statInsensitiveAttrs = insensitiveAttrs.split ','
$scope.$watch 'statSelectedAttrs', ((selected) ->
if selected and selected.length > 0
$location.search 'stats_reduce', ($scope.statSelectedAttrs.join ',')
), true
$scope.$watch 'statInsensitiveAttrs', ((insensitive) ->
if insensitive and insensitive.length > 0
$location.search 'stats_reduce_insensitive', ($scope.statInsensitiveAttrs.join ',')
else if insensitive
$location.search 'stats_reduce_insensitive', null
), true
)]
korpApp.controller "SearchCtrl", window.SearchCtrl
korpApp.controller "SimpleCtrl", ($scope, utils, $location, backend, $rootScope, searches, compareSearches, $uibModal) ->
s = $scope
prequeries_enabled = settings.simple_search_restrict_context
if prequeries_enabled
# Simple prequery, prequery within and prequery attribute
s.simple_prequery = ""
s.prequery_within_opts = [
"sentence"
"paragraph"
"text"
]
s.prequery_within_default = s.prequery_within_opts[0]
s.prequery_within = s.prequery_within_opts[0]
s.prequery_attr_opts = [
# Word attribute name, localization key
["lemma", "baseforms"]
["word", "wordforms"]
]
# s.prequery_attr = s.prequery_attr_opts[0][0]
s.prequery_attr = "lemma|word"
# Set the value of simple_prequery based on the URL parameter
# simple_prequery
s.$watch( (() -> $location.search().simple_prequery),
(val) -> s.simple_prequery = val
)
s.$on "popover_submit", (event, name) ->
cqp = s.instance.getCQP()
compareSearches.saveSearch {
label : name or cqp
cqp : cqp
corpora : settings.corpusListing.getSelectedCorpora()
}
s.stringifyRelatedHeader = (wd) ->
wd.replace(/_/g, " ")
s.stringifyRelated = (wd) ->
util.saldoToString(wd)
modalInstance = null
s.clickRelated = (wd) ->
modalInstance?.close()
c.log "modalInstance", modalInstance
$scope.$root.searchtabs()[1].select()
s.$root.$broadcast "extended_set", "[saldo contains '#{wd}']"
$location.search("search", "cqp|" + "[saldo contains '#{wd}']")
s.relatedDefault = 3
s.clickX = () ->
modalInstance.dismiss()
s.showAllRelated = () ->
modalInstance = $uibModal.open(
template: """
<div class="modal-header">
<h3 class="modal-title">{{'similar_header' | loc:lang}} (SWE-FN)</h3>
<span ng-click="clickX()" class="close-x">×</span>
</div>
<div class="modal-body">
<div ng-repeat="obj in relatedObj" class="col"><a target="_blank" ng-href="http://spraakbanken.gu.se/karp/#?lexicon=swefn&search=extended||and|sense|equals|swefn--{{obj.label}}" class="header">{{stringifyRelatedHeader(obj.label)}}</a>
<div class="list_wrapper">
<ul>
<li ng-repeat="wd in obj.words"> <a ng-click="clickRelated(wd)" class="link">{{stringifyRelated(wd) + " "}}</a></li>
</ul>
</div>
</div>
</div>
"""
scope : s
size : 'lg'
windowClass : "related"
)
s.searches = searches
s.$watch "searches.activeSearch", (search) =>
c.log "search", search
unless search then return
page = Number($location.search().page) or 0
s.relatedObj = null
if prequeries_enabled
# Set URL parameters based on simple prequery variables
if s.simple_prequery
$location.search("simple_prequery", s.simple_prequery)
if s.prequery_within != s.prequery_within_default
$location.search("prequery_within", s.prequery_within)
# $location.search("prequery_attr", s.prequery_attr)
if search.type == "word"
$("#simple_text input").val(search.val) # Necessary for displaying the wordform if it came from the URL
s.simple_text = search.val
cqp = simpleSearch.getCQP(search.val)
c.log "simple search cqp", cqp
if search.pageOnly
searches.kwicRequest(cqp, true)
return
else
searches.kwicSearch(cqp)
if settings.wordpicture != false and s.word_pic and " " not in search.val
lemgramResults.makeRequest(search.val, "word")
else
lemgramResults?.resetView()
else if search.type == "lemgram"
s.placeholder = search.val
s.simple_text = ""
s.model = search.val
cqp = simpleSearch.getCQP()
# Show related words if show_related_words is undefined or
# true (PI:NAME:<NAME>END_PI 2016-01-15)
if settings.show_related_words != false
backend.relatedWordSearch(search.val).then (data) ->
s.relatedObj = data
if s.word_pic
searches.lemgramSearch(search.val, s.prefix, s.suffix, search.pageOnly)
else
# Add possible prequery CQPs
# TODO: Check if the prequeries are always added
# before coming here, in which case this code would
# not be needed.
if prequeries_enabled and s.simple_prequery and
cqp.indexOf("||") < 0
# c.log("lemgram simple_prequery", cqp, s.simple_prequery)
cqps = simpleSearch.makePrequeryCQPs(s.simple_prequery)
cqps.push(cqp)
# c.log("cqps", cqps)
cqp = util.combineCQPs(cqps)
# c.log("searches.activeSearch prequeries cqp", cqp)
searches.kwicSearch(cqp, search.pageOnly)
else
s.placeholder = null
s.simple_text = ""
lemgramResults?.resetView()
s.lemgramToString = (lemgram) ->
unless lemgram then return
util.lemgramToString(lemgram).replace(/<.*?>/g, "")
utils.setupHash s, [
key : "prefix"
,
key : "suffix"
,
key : "isCaseInsensitive"
]
if prequeries_enabled
utils.setupHash s, [
key : "simple_prequery"
default : ""
,
key : "prequery_within"
default : s.prequery_within_default
# ,
# key : "prequery_attr"
]
$scope.$on "btn_submit", () ->
$location.search "within", null
korpApp.controller "ExtendedSearch", ($scope, utils, $location, backend, $rootScope, searches, compareSearches, $timeout) ->
s = $scope
s.$on "popover_submit", (event, name) ->
compareSearches.saveSearch {
label : name or $rootScope.extendedCQP
cqp : $rootScope.extendedCQP
corpora : settings.corpusListing.getSelectedCorpora()
}
s.searches = searches
s.$on "btn_submit", () ->
c.log "extended submit"
$location.search("search", null)
$location.search("page", null)
$timeout( () ->
$location.search("search", "cqp")
within = s.within if s.within not in _.keys settings.defaultWithin
$location.search "within", within
, 0)
s.$on "extended_set", ($event, val) ->
c.log "extended_set", val
s.cqp = val
if $location.search().cqp
s.cqp = $location.search().cqp
s.setExtendedCQP = (val) ->
# c.log "setExtendedCQP", val
try
$rootScope.extendedCQP = CQP.expandOperators(val)
# c.log "cqp expanded ops", $rootScope.extendedCQP
# Add the possible ignorable tokens between tokens
# (regardless of the current search tab). This makes the
# modified version to be shown in the advanced search as
# the extended search expression.
# (PI:NAME:<NAME>END_PI 2015-09-25, 2016-03-18)
$rootScope.extendedCQP =
settings.corpusListing.addIgnoreBetweenTokensCQP(
$rootScope.extendedCQP, true)
# c.log "cqp added ignore", $rootScope.extendedCQP
catch e
c.log "cqp parse error:", e
s.$watch "cqp", (val) ->
c.log "cqp change", val
unless val then return
s.setExtendedCQP val
$location.search("cqp", val)
s.withins = []
s.getWithins = () ->
union = settings.corpusListing.getWithinKeys()
output = _.map union, (item) -> {value : item}
return output
s.$on "corpuschooserchange", () ->
s.withins = s.getWithins()
s.within = s.withins[0]?.value
# Update the ignorable tokens between tokens and set the CQP
# expression shown in the advanced search for the extended
# search.
settings.corpusListing.updateIgnoreBetweenTokensCQP()
s.setExtendedCQP $location.search().cqp
korpApp.controller "ExtendedToken", ($scope, utils, $location) ->
s = $scope
cqp = '[]'
s.valfilter = utils.valfilter
s.setDefault = (or_obj) ->
# assign the first value from the opts
opts = s.getOpts(or_obj.type)
unless opts
or_obj.op = "is"
else
or_obj.op = _.values(opts)[0][1]
or_obj.val = ""
# returning new array each time kills angular, hence the memoizing
s.getOpts = _.memoize (type) ->
unless type of (s.typeMapping or {}) then return
confObj = s.typeMapping?[type]
unless confObj
c.log "confObj missing", type, s.typeMapping
return
confObj = _.extend {}, (confObj?.opts or settings.defaultOptions)
if confObj.type == "set"
confObj.is = "contains"
return _.pairs confObj
onCorpusChange = (event, selected) ->
# TODO: respect the setting 'word_attribute_selector' and similar
unless selected?.length then return
lang = s.$parent.$parent?.l?.lang
allAttrs = settings.corpusListing.getAttributeGroups(lang)
s.types = _.filter allAttrs, (item) -> not item.hideExtended
s.typeMapping = _.object _.map s.types, (item) ->
if item.isStructAttr
["_." + item.value, item]
else
[item.value, item]
s.$on "corpuschooserchange", onCorpusChange
onCorpusChange(null, settings.corpusListing.selected)
s.removeOr = (token, and_array, i) ->
if and_array.length > 1
and_array.splice(i, 1)
else if token.and_block.length > 1
token.and_block.splice (_.indexOf token.and_block, and_array), 1
s.addAnd = (token) ->
token.and_block.push s.addOr([])
toggleBound = (token, bnd) ->
unless token.bound?[bnd]
boundObj = {}
boundObj[bnd] = true
token.bound = _.extend (token.bound or {}), boundObj
else
delete token.bound?[bnd]
s.toggleStart = (token) ->
toggleBound(token, "lbound")
s.toggleEnd = (token) ->
toggleBound(token, "rbound")
s.toggleRepeat = (token) ->
unless token.repeat
token.repeat = [1,1]
else
delete token.repeat
s.getTokenCqp = ->
if not s.token.cqp
return ""
s.token.cqp.match(/\[(.*)]/)[1]
s.onInsertMousedown = (event) ->
event.stopPropagation()
korpApp.directive "advancedSearch", () ->
controller : ($scope, compareSearches, $location, $timeout) ->
s = $scope
expr = ""
if $location.search().search
[type, expr...] = $location.search().search?.split("|")
expr = expr.join("|")
if type == "cqp"
$scope.cqp = expr or "[]"
else
$scope.cqp = "[]"
# Show the within selection list unless settings.advanced_search_within
# is false. (JPI:NAME:<NAME>END_PI 2015-09-24)
s.showWithin = if settings.advanced_search_within?
settings.advanced_search_within
else
true
s.within = if s.showWithin
$location.search().within or "sentence"
else
"sentence"
$scope.$watch () ->
simpleSearch?.getCQP()
, (val) ->
$scope.simpleCQP = val
$scope.$on "popover_submit", (event, name) ->
compareSearches.saveSearch {
label : name or $rootScope.extendedCQP
cqp : $scope.cqp
corpora : settings.corpusListing.getSelectedCorpora()
}
$scope.$on "btn_submit", () ->
c.log "advanced cqp", $scope.cqp
$location.search "search", null
$location.search "page", null
$location.search "within", null
$timeout( () ->
# Copied from "ExtendedSearch" (JPI:NAME:<NAME>END_PI Niemi 2015-09-24)
within = s.within unless s.within in _.keys settings.defaultWithin
$location.search("within", within or null)
$location.search "search", "cqp|" + $scope.cqp
, 0)
if s.showWithin
# Copied from "ExtendedSearch" (JPI:NAME:<NAME>END_PIki Niemi 2015-09-24)
s.withins = []
s.$on "corpuschooserchange", () ->
s.withins = s.getWithins()
korpApp.filter "mapper", () ->
return (item, f) ->
return f(item)
korpApp.directive "compareSearchCtrl", () ->
controller: ($scope, utils, $location, backend, $rootScope, compareSearches) ->
s = $scope
s.valfilter = utils.valfilter
s.savedSearches = compareSearches.savedSearches
s.$watch "savedSearches.length", () ->
s.cmp1 = compareSearches.savedSearches[0]
s.cmp2 = compareSearches.savedSearches[1]
unless s.cmp1 and s.cmp2 then return
listing = settings.corpusListing.subsetFactory(_.uniq ([].concat s.cmp1.corpora, s.cmp2.corpora))
allAttrs = listing.getAttributeGroups()
s.currentAttrs = _.filter allAttrs, (item) -> not item.hideCompare
s.reduce = 'word'
s.sendCompare = () ->
$rootScope.compareTabs.push backend.requestCompare(s.cmp1, s.cmp2, [s.reduce])
s.deleteCompares = () ->
compareSearches.flush()
korpApp.filter "loc", ($rootScope) ->
(translationKey, lang) ->
return util.getLocaleString translationKey, lang
|
[
{
"context": " use when updating a channel header\n#\n# Author\n# Christopher De Cairos\n\nAPI_KEY = process.env.WHOS_ON_CALL_API_KEY\nWEBHO",
"end": 642,
"score": 0.9984822273254395,
"start": 621,
"tag": "NAME",
"value": "Christopher De Cairos"
}
] | src/whos-on-call.coffee | cadecairos/hubot-whos-on-call-mattermost | 6 | # Description:
# Change Channel Header based on who is on-call on pagerduty
#
# Configuration
# WHOS_ON_CALL_API_KEY
# WHOS_ON_CALL_WEBHOOK_SECRET
# WHOS_ON_CALL_SCHEDULE_ID
# WHOS_ON_CALL_HEADER_TEMPLATE
#
# Notes:
# WHOS_ON_CALL_API_KEY: Pagerduty API key
# WHOS_ON_CALL_WEBHOOK_SECRET: Random string for validating webhook request authenticity
# WHOS_ON_CALL_SCHEDULE_ID: Pagerduty Schedule ID string - fetch this using the Pagerduty API, or try scraping it off the site using developer tools
# WHOS_ON_CALL_HEADER_TEMPLATE: Default template text to use when updating a channel header
#
# Author
# Christopher De Cairos
API_KEY = process.env.WHOS_ON_CALL_API_KEY
WEBHOOK_SECRET = process.env.WHOS_ON_CALL_WEBHOOK_SECRET
SCHEDULE_ID = process.env.WHOS_ON_CALL_SCHEDULE_ID
DEFAULT_HEADER_TEMPLATE = process.env.WHOS_ON_CALL_HEADER_TEMPLATE
PAGERDUTY_SCHEDULE_REQUEST_URI = "https://api.pagerduty.com/oncalls?schedule_ids[]=#{SCHEDULE_ID}"
module.exports = (robot) ->
robot.router.post '/webhook/on-call/:channel', (req, res) ->
res.end ""
unless req.body.secret == WEBHOOK_SECRET
return robot.logger.error "Invalid hook received"
onCallMap = robot.brain.get 'whos-on-call'
unless onCallMap? then onCallMap = {}
channel = req.params.channel
headerTemplate = if req.body.header? then req.body.header else DEFAULT_HEADER_TEMPLATE
robot.http(PAGERDUTY_SCHEDULE_REQUEST_URI)
.header("Authorization", "Token token=#{API_KEY}")
.get() (err, res, body) ->
if err?
return robot.logger.error err
try
json = JSON.parse body
catch error
return robot.logger.error error
onCallEngineer = json.oncalls[0].user.summary
return unless onCallMap[channel] != onCallEngineer
onCallMap[channel] = onCallEngineer
robot.brain.set "whos-on-call", onCallMap
robot.logger.info "Updating on call engineer to: #{onCallEngineer}"
header = headerTemplate.replace("$ENGINEER", onCallEngineer)
robot.adapter.changeHeader(channel, header)
| 131093 | # Description:
# Change Channel Header based on who is on-call on pagerduty
#
# Configuration
# WHOS_ON_CALL_API_KEY
# WHOS_ON_CALL_WEBHOOK_SECRET
# WHOS_ON_CALL_SCHEDULE_ID
# WHOS_ON_CALL_HEADER_TEMPLATE
#
# Notes:
# WHOS_ON_CALL_API_KEY: Pagerduty API key
# WHOS_ON_CALL_WEBHOOK_SECRET: Random string for validating webhook request authenticity
# WHOS_ON_CALL_SCHEDULE_ID: Pagerduty Schedule ID string - fetch this using the Pagerduty API, or try scraping it off the site using developer tools
# WHOS_ON_CALL_HEADER_TEMPLATE: Default template text to use when updating a channel header
#
# Author
# <NAME>
API_KEY = process.env.WHOS_ON_CALL_API_KEY
WEBHOOK_SECRET = process.env.WHOS_ON_CALL_WEBHOOK_SECRET
SCHEDULE_ID = process.env.WHOS_ON_CALL_SCHEDULE_ID
DEFAULT_HEADER_TEMPLATE = process.env.WHOS_ON_CALL_HEADER_TEMPLATE
PAGERDUTY_SCHEDULE_REQUEST_URI = "https://api.pagerduty.com/oncalls?schedule_ids[]=#{SCHEDULE_ID}"
module.exports = (robot) ->
robot.router.post '/webhook/on-call/:channel', (req, res) ->
res.end ""
unless req.body.secret == WEBHOOK_SECRET
return robot.logger.error "Invalid hook received"
onCallMap = robot.brain.get 'whos-on-call'
unless onCallMap? then onCallMap = {}
channel = req.params.channel
headerTemplate = if req.body.header? then req.body.header else DEFAULT_HEADER_TEMPLATE
robot.http(PAGERDUTY_SCHEDULE_REQUEST_URI)
.header("Authorization", "Token token=#{API_KEY}")
.get() (err, res, body) ->
if err?
return robot.logger.error err
try
json = JSON.parse body
catch error
return robot.logger.error error
onCallEngineer = json.oncalls[0].user.summary
return unless onCallMap[channel] != onCallEngineer
onCallMap[channel] = onCallEngineer
robot.brain.set "whos-on-call", onCallMap
robot.logger.info "Updating on call engineer to: #{onCallEngineer}"
header = headerTemplate.replace("$ENGINEER", onCallEngineer)
robot.adapter.changeHeader(channel, header)
| true | # Description:
# Change Channel Header based on who is on-call on pagerduty
#
# Configuration
# WHOS_ON_CALL_API_KEY
# WHOS_ON_CALL_WEBHOOK_SECRET
# WHOS_ON_CALL_SCHEDULE_ID
# WHOS_ON_CALL_HEADER_TEMPLATE
#
# Notes:
# WHOS_ON_CALL_API_KEY: Pagerduty API key
# WHOS_ON_CALL_WEBHOOK_SECRET: Random string for validating webhook request authenticity
# WHOS_ON_CALL_SCHEDULE_ID: Pagerduty Schedule ID string - fetch this using the Pagerduty API, or try scraping it off the site using developer tools
# WHOS_ON_CALL_HEADER_TEMPLATE: Default template text to use when updating a channel header
#
# Author
# PI:NAME:<NAME>END_PI
API_KEY = process.env.WHOS_ON_CALL_API_KEY
WEBHOOK_SECRET = process.env.WHOS_ON_CALL_WEBHOOK_SECRET
SCHEDULE_ID = process.env.WHOS_ON_CALL_SCHEDULE_ID
DEFAULT_HEADER_TEMPLATE = process.env.WHOS_ON_CALL_HEADER_TEMPLATE
PAGERDUTY_SCHEDULE_REQUEST_URI = "https://api.pagerduty.com/oncalls?schedule_ids[]=#{SCHEDULE_ID}"
module.exports = (robot) ->
robot.router.post '/webhook/on-call/:channel', (req, res) ->
res.end ""
unless req.body.secret == WEBHOOK_SECRET
return robot.logger.error "Invalid hook received"
onCallMap = robot.brain.get 'whos-on-call'
unless onCallMap? then onCallMap = {}
channel = req.params.channel
headerTemplate = if req.body.header? then req.body.header else DEFAULT_HEADER_TEMPLATE
robot.http(PAGERDUTY_SCHEDULE_REQUEST_URI)
.header("Authorization", "Token token=#{API_KEY}")
.get() (err, res, body) ->
if err?
return robot.logger.error err
try
json = JSON.parse body
catch error
return robot.logger.error error
onCallEngineer = json.oncalls[0].user.summary
return unless onCallMap[channel] != onCallEngineer
onCallMap[channel] = onCallEngineer
robot.brain.set "whos-on-call", onCallMap
robot.logger.info "Updating on call engineer to: #{onCallEngineer}"
header = headerTemplate.replace("$ENGINEER", onCallEngineer)
robot.adapter.changeHeader(channel, header)
|
[
{
"context": "# **Author:** Peter Urbak<br/>\n# **Version:** 2013-03-10\n\nroot = exports ? ",
"end": 25,
"score": 0.9998564720153809,
"start": 14,
"tag": "NAME",
"value": "Peter Urbak"
}
] | client_src/tableModel.coffee | dragonwasrobot/gesture-recognition | 2 | # **Author:** Peter Urbak<br/>
# **Version:** 2013-03-10
root = exports ? window
# The `TableModel` encapsulates the state of the multi-touch table and the
# objects on it.
class App.TableModel
# ### Constructors
# Constructs a `TableModel`.
#
# - **surface:** The <div> tag on which to append new object.
# - **stylesheet:** A JSON object containing the styling properties of the
# model.
constructor: (@surface, @stylesheet) ->
@models = {} # { sid , ObjectModel }
# ### Methods
# #### Model Manipulation - Position
# Adds a new object on the screen.
#
# - **object:** The JSON object to be added to the model and screen.
addObjectModel: (object) ->
if not @models[object.sid]?
objectModel = new App.ObjectModel(object, @surface, 90, 90)
@models[object.sid] = objectModel
# Updates an `ObjectModel` according to the changes in `object`.
#
# - **object:** The JSON object containing the updates of the `ObjectModel`.
updateObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.rotate(App.radiansToDegrees(object.angle))
objectModel.moveToPosition(object.x, object.y)
# Removes an `ObjectModel` from the `TableModel`.
#
# - **object:** The object to be removed from the model and screen.
removeObjectModel: (object) ->
@models[object.sid].remove()
delete @models[object.sid]
# #### Model Manipulation - Color
# Selects/Deselect an object.
#
# - **object:**
selectDeselectObjectModel: (object) ->
objectModel = @models[object.sid]
if objectModel.isSelected()
@deselectObjectModel(object)
else
@selectObjectModel(object)
# Selects an object.
#
# - **object:**
selectObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setSelected true
objectModel.changeColor @stylesheet['objectSelectedColor']
# Deselects an object.
#
# - **object:**
deselectObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setSelected false
if objectModel.isUnfolded()
objectModel.changeColor @stylesheet.objectUnfoldedColor
else
objectModel.changeColor @stylesheet.objectFoldedColor
# Fold/Unfold an object.
#
# - **object:**
foldUnfoldObjectModel: (object) ->
objectModel = @models[object.sid]
if objectModel.isUnfolded()
@foldObjectModel(object)
else
@unfoldObjectModel(object)
# Unfolds an object.
#
# - **object:**
unfoldObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setUnfolded true
objectModel.changeColor @stylesheet.objectUnfoldedColor
# Folds/Deselect an object.
#
# - **object:**
foldObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setUnfolded false
objectModel.changeColor @stylesheet.objectFoldedColor
| 195578 | # **Author:** <NAME><br/>
# **Version:** 2013-03-10
root = exports ? window
# The `TableModel` encapsulates the state of the multi-touch table and the
# objects on it.
class App.TableModel
# ### Constructors
# Constructs a `TableModel`.
#
# - **surface:** The <div> tag on which to append new object.
# - **stylesheet:** A JSON object containing the styling properties of the
# model.
constructor: (@surface, @stylesheet) ->
@models = {} # { sid , ObjectModel }
# ### Methods
# #### Model Manipulation - Position
# Adds a new object on the screen.
#
# - **object:** The JSON object to be added to the model and screen.
addObjectModel: (object) ->
if not @models[object.sid]?
objectModel = new App.ObjectModel(object, @surface, 90, 90)
@models[object.sid] = objectModel
# Updates an `ObjectModel` according to the changes in `object`.
#
# - **object:** The JSON object containing the updates of the `ObjectModel`.
updateObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.rotate(App.radiansToDegrees(object.angle))
objectModel.moveToPosition(object.x, object.y)
# Removes an `ObjectModel` from the `TableModel`.
#
# - **object:** The object to be removed from the model and screen.
removeObjectModel: (object) ->
@models[object.sid].remove()
delete @models[object.sid]
# #### Model Manipulation - Color
# Selects/Deselect an object.
#
# - **object:**
selectDeselectObjectModel: (object) ->
objectModel = @models[object.sid]
if objectModel.isSelected()
@deselectObjectModel(object)
else
@selectObjectModel(object)
# Selects an object.
#
# - **object:**
selectObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setSelected true
objectModel.changeColor @stylesheet['objectSelectedColor']
# Deselects an object.
#
# - **object:**
deselectObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setSelected false
if objectModel.isUnfolded()
objectModel.changeColor @stylesheet.objectUnfoldedColor
else
objectModel.changeColor @stylesheet.objectFoldedColor
# Fold/Unfold an object.
#
# - **object:**
foldUnfoldObjectModel: (object) ->
objectModel = @models[object.sid]
if objectModel.isUnfolded()
@foldObjectModel(object)
else
@unfoldObjectModel(object)
# Unfolds an object.
#
# - **object:**
unfoldObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setUnfolded true
objectModel.changeColor @stylesheet.objectUnfoldedColor
# Folds/Deselect an object.
#
# - **object:**
foldObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setUnfolded false
objectModel.changeColor @stylesheet.objectFoldedColor
| true | # **Author:** PI:NAME:<NAME>END_PI<br/>
# **Version:** 2013-03-10
root = exports ? window
# The `TableModel` encapsulates the state of the multi-touch table and the
# objects on it.
class App.TableModel
# ### Constructors
# Constructs a `TableModel`.
#
# - **surface:** The <div> tag on which to append new object.
# - **stylesheet:** A JSON object containing the styling properties of the
# model.
constructor: (@surface, @stylesheet) ->
@models = {} # { sid , ObjectModel }
# ### Methods
# #### Model Manipulation - Position
# Adds a new object on the screen.
#
# - **object:** The JSON object to be added to the model and screen.
addObjectModel: (object) ->
if not @models[object.sid]?
objectModel = new App.ObjectModel(object, @surface, 90, 90)
@models[object.sid] = objectModel
# Updates an `ObjectModel` according to the changes in `object`.
#
# - **object:** The JSON object containing the updates of the `ObjectModel`.
updateObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.rotate(App.radiansToDegrees(object.angle))
objectModel.moveToPosition(object.x, object.y)
# Removes an `ObjectModel` from the `TableModel`.
#
# - **object:** The object to be removed from the model and screen.
removeObjectModel: (object) ->
@models[object.sid].remove()
delete @models[object.sid]
# #### Model Manipulation - Color
# Selects/Deselect an object.
#
# - **object:**
selectDeselectObjectModel: (object) ->
objectModel = @models[object.sid]
if objectModel.isSelected()
@deselectObjectModel(object)
else
@selectObjectModel(object)
# Selects an object.
#
# - **object:**
selectObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setSelected true
objectModel.changeColor @stylesheet['objectSelectedColor']
# Deselects an object.
#
# - **object:**
deselectObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setSelected false
if objectModel.isUnfolded()
objectModel.changeColor @stylesheet.objectUnfoldedColor
else
objectModel.changeColor @stylesheet.objectFoldedColor
# Fold/Unfold an object.
#
# - **object:**
foldUnfoldObjectModel: (object) ->
objectModel = @models[object.sid]
if objectModel.isUnfolded()
@foldObjectModel(object)
else
@unfoldObjectModel(object)
# Unfolds an object.
#
# - **object:**
unfoldObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setUnfolded true
objectModel.changeColor @stylesheet.objectUnfoldedColor
# Folds/Deselect an object.
#
# - **object:**
foldObjectModel: (object) ->
objectModel = @models[object.sid]
objectModel.setUnfolded false
objectModel.changeColor @stylesheet.objectFoldedColor
|
[
{
"context": "# \n# Ported to Coffeescript by Donnie Flood (donnie@floodfx.com)\n#\n# A JavaScript implementat",
"end": 43,
"score": 0.9998889565467834,
"start": 31,
"tag": "NAME",
"value": "Donnie Flood"
},
{
"context": "# \n# Ported to Coffeescript by Donnie Flood (donnie@floodfx.c... | src/coffeescript/src/common/sha1_utils.coffee | floodfx/chrome-sdb | 0 | #
# Ported to Coffeescript by Donnie Flood (donnie@floodfx.com)
#
# A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined
# in FIPS 180-1
# Version 2.2-alpha Copyright Paul Johnston 2000 - 2002.
# Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
# Distributed under the BSD License
# See http://pajhome.org.uk/crypt/md5 for details.
#
#
# These are the functions you'll usually want to call
# They take string arguments and return either hex or base-64 encoded strings
#
SHA1 = (()->
pub = {}
#
# Calculate the SHA1 of a raw string
#
rstr_sha1 = (s)->
return binb2rstr(binb_sha1(rstr2binb(s), s.length * 8))
#
# Calculate the HMAC-SHA1 of a key and some data (raw strings)
#
rstr_hmac_sha1 = (key, data)->
bkey = rstr2binb(key)
if(bkey.length > 16)
bkey = binb_sha1(bkey, key.length * 8)
ipad = Array(16)
opad = Array(16)
for i in [0...16]
ipad[i] = bkey[i] ^ 0x36363636
opad[i] = bkey[i] ^ 0x5C5C5C5C
hash = binb_sha1(ipad.concat(rstr2binb(data)), 512 + data.length * 8)
return binb2rstr(binb_sha1(opad.concat(hash), 512 + 160))
#
# Convert a raw string to a hex string
#
rstr2hex = (input, lower_hexcase=true)->
hex_tab = if(lower_hexcase) then "0123456789abcdef" else "0123456789ABCDEF"
output = ""
for i in [0...input.length]
x = input.charCodeAt(i)
output += hex_tab.charAt((x >>> 4) & 0x0F) +
hex_tab.charAt( x & 0x0F)
return output
#
# Convert a raw string to a base-64 string
#
rstr2b64 = (input, b64pad="=")->
tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
output = ""
len = input.length;
for i in [0...len] by 3
triplet = (input.charCodeAt(i) << 16) |
(if(i + 1 < len) then input.charCodeAt(i+1) << 8 else 0) |
(if(i + 2 < len) then input.charCodeAt(i+2) else 0)
for j in [0...4]
if(i * 8 + j * 6 > input.length * 8)
output += b64pad
else
output += tab.charAt((triplet >>> 6*(3-j)) & 0x3F)
return output
#
# Convert a raw string to an arbitrary string encoding
#
rstr2any = (input, encoding)->
divisor = encoding.length
remainders = Array()
# Convert to an array of 16-bit big-endian values, forming the dividend
dividend = Array(Math.ceil(input.length / 2));
for i in [0...dividend.length]
dividend[i] = (input.charCodeAt(i * 2) << 8) | input.charCodeAt(i * 2 + 1)
#
# Repeatedly perform a long division. The binary array forms the dividend,
# the length of the encoding is the divisor. Once computed, the quotient
# forms the dividend for the next step. We stop when the dividend is zero.
# All remainders are stored for later use.
#
while(dividend.length > 0)
quotient = Array()
x = 0
for i in [0...dividend.length]
x = (x << 16) + dividend[i]
q = Math.floor(x / divisor)
x -= q * divisor
if(quotient.length > 0 || q > 0)
quotient[quotient.length] = q
remainders[remainders.length] = x
dividend = quotient
# Convert the remainders to the output string
output = ""
for i in [remainders.length - 1..0]
output += encoding.charAt(remainders[i])
# Append leading zero equivalents
full_length = Math.ceil(input.length * 8 / (Math.log(encoding.length) / Math.log(2)))
for i in [output.length..full_length]
output = encoding[0] + output
return output
#
# Encode a string as utf-8.
# For efficiency, this assumes the input is valid utf-16.
#
str2rstr_utf8 = (input)->
output = ""
for i in [0...input.length]
# Decode utf-16 surrogate pairs
x = input.charCodeAt(i)
y = if( i + 1 < input.length) then input.charCodeAt(i + 1) else 0
if(0xD800 <= x && x <= 0xDBFF && 0xDC00 <= y && y <= 0xDFFF)
x = 0x10000 + ((x & 0x03FF) << 10) + (y & 0x03FF)
i++
# Encode output as utf-8
if(x <= 0x7F)
output += String.fromCharCode(x)
else if(x <= 0x7FF)
output += String.fromCharCode(0xC0 | ((x >>> 6 ) & 0x1F),
0x80 | ( x & 0x3F))
else if(x <= 0xFFFF)
output += String.fromCharCode(0xE0 | ((x >>> 12) & 0x0F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F))
else if(x <= 0x1FFFFF)
output += String.fromCharCode(0xF0 | ((x >>> 18) & 0x07),
0x80 | ((x >>> 12) & 0x3F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F))
return output
###
Encode a string as utf-16
###
str2rstr_utf16le = (input)->
output = ""
for i in [0...input.length]
output += String.fromCharCode( input.charCodeAt(i) & 0xFF,
(input.charCodeAt(i) >>> 8) & 0xFF)
return output
str2rstr_utf16be = (input)->
output = ""
for i in [0...input.length]
output += String.fromCharCode((input.charCodeAt(i) >>> 8) & 0xFF,
input.charCodeAt(i) & 0xFF)
return output
###
Convert a raw string to an array of big-endian words
Characters >255 have their high-byte silently ignored.
###
rstr2binb = (input)->
output = Array(input.length >> 2)
for i in [0...output.length]
output[i] = 0;
for i in [0...input.length*8] by 8
output[i>>5] |= (input.charCodeAt(i / 8) & 0xFF) << (24 - i % 32)
return output
###
Convert an array of little-endian words to a string
###
binb2rstr = (input)->
output = ""
for i in [0...input.length*32] by 8
output += String.fromCharCode((input[i>>5] >>> (24 - i % 32)) & 0xFF)
return output
###
Calculate the SHA-1 of an array of big-endian words, and a bit length
###
binb_sha1 = (x, len)->
# append padding
x[len >> 5] |= 0x80 << (24 - len % 32)
x[((len + 64 >> 9) << 4) + 15] = len
w = Array(80)
a = 1732584193
b = -271733879
c = -1732584194
d = 271733878
e = -1009589776
for i in [0...x.length] by 16
olda = a;
oldb = b;
oldc = c;
oldd = d;
olde = e;
for j in [0...80]
if(j < 16)
w[j] = x[i + j]
else
w[j] = bit_rol(w[j-3] ^ w[j-8] ^ w[j-14] ^ w[j-16], 1)
t = safe_add(safe_add(bit_rol(a, 5), sha1_ft(j, b, c, d)),
safe_add(safe_add(e, w[j]), sha1_kt(j)))
e = d
d = c
c = bit_rol(b, 30)
b = a
a = t
a = safe_add(a, olda)
b = safe_add(b, oldb)
c = safe_add(c, oldc)
d = safe_add(d, oldd)
e = safe_add(e, olde)
return Array(a, b, c, d, e)
###
Perform the appropriate triplet combination function for the current iteration
###
sha1_ft = (t, b, c, d)->
if(t < 20)
(b & c) | ((~b) & d)
else if(t < 40)
b ^ c ^ d
else if(t < 60)
(b & c) | (b & d) | (c & d)
else
b ^ c ^ d
###
Determine the appropriate additive constant for the current iteration
###
sha1_kt = (t)->
if(t < 20)
1518500249
else if(t < 40)
1859775393
else if(t < 60)
-1894007588
else
-899497514
###
Add integers, wrapping at 2^32. This uses 16-bit operations internally
to work around bugs in some JS interpreters.
###
safe_add = (x, y)->
lsw = (x & 0xFFFF) + (y & 0xFFFF)
msw = (x >> 16) + (y >> 16) + (lsw >> 16)
return (msw << 16) | (lsw & 0xFFFF)
###
Bitwise rotate a 32-bit number to the left.
###
bit_rol = (num, cnt)->
return (num << cnt) | (num >>> (32 - cnt))
pub.hex_sha1 = (s)->
rstr2hex(rstr_sha1(str2rstr_utf8(s)))
pub.b64_sha1 = (s)->
return rstr2b64(rstr_sha1(str2rstr_utf8(s)))
pub.any_sha1 = (s, e)->
return rstr2any(rstr_sha1(str2rstr_utf8(s)), e)
pub.hex_hmac_sha1 = (k, d)->
return rstr2hex(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)))
pub.b64_hmac_sha1 = (k, d)->
return rstr2b64(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)))
pub.any_hmac_sha1 = (k, d, e)->
return rstr2any(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)), e)
return pub
)()
| 179385 | #
# Ported to Coffeescript by <NAME> (<EMAIL>)
#
# A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined
# in FIPS 180-1
# Version 2.2-alpha Copyright <NAME> 2000 - 2002.
# Other contributors: <NAME>, <NAME>, <NAME>, <NAME>
# Distributed under the BSD License
# See http://pajhome.org.uk/crypt/md5 for details.
#
#
# These are the functions you'll usually want to call
# They take string arguments and return either hex or base-64 encoded strings
#
SHA1 = (()->
pub = {}
#
# Calculate the SHA1 of a raw string
#
rstr_sha1 = (s)->
return binb2rstr(binb_sha1(rstr2binb(s), s.length * 8))
#
# Calculate the HMAC-SHA1 of a key and some data (raw strings)
#
rstr_hmac_sha1 = (key, data)->
bkey = rstr2binb(key)
if(bkey.length > 16)
bkey = binb_sha1(bkey, key.length * 8)
ipad = Array(16)
opad = Array(16)
for i in [0...16]
ipad[i] = bkey[i] ^ 0x36363636
opad[i] = bkey[i] ^ 0x5C5C5C5C
hash = binb_sha1(ipad.concat(rstr2binb(data)), 512 + data.length * 8)
return binb2rstr(binb_sha1(opad.concat(hash), 512 + 160))
#
# Convert a raw string to a hex string
#
rstr2hex = (input, lower_hexcase=true)->
hex_tab = if(lower_hexcase) then "0123456789abcdef" else "0123<KEY>56789ABCDEF"
output = ""
for i in [0...input.length]
x = input.charCodeAt(i)
output += hex_tab.charAt((x >>> 4) & 0x0F) +
hex_tab.charAt( x & 0x0F)
return output
#
# Convert a raw string to a base-64 string
#
rstr2b64 = (input, b64pad="=")->
tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
output = ""
len = input.length;
for i in [0...len] by 3
triplet = (input.charCodeAt(i) << 16) |
(if(i + 1 < len) then input.charCodeAt(i+1) << 8 else 0) |
(if(i + 2 < len) then input.charCodeAt(i+2) else 0)
for j in [0...4]
if(i * 8 + j * 6 > input.length * 8)
output += b64pad
else
output += tab.charAt((triplet >>> 6*(3-j)) & 0x3F)
return output
#
# Convert a raw string to an arbitrary string encoding
#
rstr2any = (input, encoding)->
divisor = encoding.length
remainders = Array()
# Convert to an array of 16-bit big-endian values, forming the dividend
dividend = Array(Math.ceil(input.length / 2));
for i in [0...dividend.length]
dividend[i] = (input.charCodeAt(i * 2) << 8) | input.charCodeAt(i * 2 + 1)
#
# Repeatedly perform a long division. The binary array forms the dividend,
# the length of the encoding is the divisor. Once computed, the quotient
# forms the dividend for the next step. We stop when the dividend is zero.
# All remainders are stored for later use.
#
while(dividend.length > 0)
quotient = Array()
x = 0
for i in [0...dividend.length]
x = (x << 16) + dividend[i]
q = Math.floor(x / divisor)
x -= q * divisor
if(quotient.length > 0 || q > 0)
quotient[quotient.length] = q
remainders[remainders.length] = x
dividend = quotient
# Convert the remainders to the output string
output = ""
for i in [remainders.length - 1..0]
output += encoding.charAt(remainders[i])
# Append leading zero equivalents
full_length = Math.ceil(input.length * 8 / (Math.log(encoding.length) / Math.log(2)))
for i in [output.length..full_length]
output = encoding[0] + output
return output
#
# Encode a string as utf-8.
# For efficiency, this assumes the input is valid utf-16.
#
str2rstr_utf8 = (input)->
output = ""
for i in [0...input.length]
# Decode utf-16 surrogate pairs
x = input.charCodeAt(i)
y = if( i + 1 < input.length) then input.charCodeAt(i + 1) else 0
if(0xD800 <= x && x <= 0xDBFF && 0xDC00 <= y && y <= 0xDFFF)
x = 0x10000 + ((x & 0x03FF) << 10) + (y & 0x03FF)
i++
# Encode output as utf-8
if(x <= 0x7F)
output += String.fromCharCode(x)
else if(x <= 0x7FF)
output += String.fromCharCode(0xC0 | ((x >>> 6 ) & 0x1F),
0x80 | ( x & 0x3F))
else if(x <= 0xFFFF)
output += String.fromCharCode(0xE0 | ((x >>> 12) & 0x0F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F))
else if(x <= 0x1FFFFF)
output += String.fromCharCode(0xF0 | ((x >>> 18) & 0x07),
0x80 | ((x >>> 12) & 0x3F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F))
return output
###
Encode a string as utf-16
###
str2rstr_utf16le = (input)->
output = ""
for i in [0...input.length]
output += String.fromCharCode( input.charCodeAt(i) & 0xFF,
(input.charCodeAt(i) >>> 8) & 0xFF)
return output
str2rstr_utf16be = (input)->
output = ""
for i in [0...input.length]
output += String.fromCharCode((input.charCodeAt(i) >>> 8) & 0xFF,
input.charCodeAt(i) & 0xFF)
return output
###
Convert a raw string to an array of big-endian words
Characters >255 have their high-byte silently ignored.
###
rstr2binb = (input)->
output = Array(input.length >> 2)
for i in [0...output.length]
output[i] = 0;
for i in [0...input.length*8] by 8
output[i>>5] |= (input.charCodeAt(i / 8) & 0xFF) << (24 - i % 32)
return output
###
Convert an array of little-endian words to a string
###
binb2rstr = (input)->
output = ""
for i in [0...input.length*32] by 8
output += String.fromCharCode((input[i>>5] >>> (24 - i % 32)) & 0xFF)
return output
###
Calculate the SHA-1 of an array of big-endian words, and a bit length
###
binb_sha1 = (x, len)->
# append padding
x[len >> 5] |= 0x80 << (24 - len % 32)
x[((len + 64 >> 9) << 4) + 15] = len
w = Array(80)
a = 1732584193
b = -271733879
c = -1732584194
d = 271733878
e = -1009589776
for i in [0...x.length] by 16
olda = a;
oldb = b;
oldc = c;
oldd = d;
olde = e;
for j in [0...80]
if(j < 16)
w[j] = x[i + j]
else
w[j] = bit_rol(w[j-3] ^ w[j-8] ^ w[j-14] ^ w[j-16], 1)
t = safe_add(safe_add(bit_rol(a, 5), sha1_ft(j, b, c, d)),
safe_add(safe_add(e, w[j]), sha1_kt(j)))
e = d
d = c
c = bit_rol(b, 30)
b = a
a = t
a = safe_add(a, olda)
b = safe_add(b, oldb)
c = safe_add(c, oldc)
d = safe_add(d, oldd)
e = safe_add(e, olde)
return Array(a, b, c, d, e)
###
Perform the appropriate triplet combination function for the current iteration
###
sha1_ft = (t, b, c, d)->
if(t < 20)
(b & c) | ((~b) & d)
else if(t < 40)
b ^ c ^ d
else if(t < 60)
(b & c) | (b & d) | (c & d)
else
b ^ c ^ d
###
Determine the appropriate additive constant for the current iteration
###
sha1_kt = (t)->
if(t < 20)
1518500249
else if(t < 40)
1859775393
else if(t < 60)
-1894007588
else
-899497514
###
Add integers, wrapping at 2^32. This uses 16-bit operations internally
to work around bugs in some JS interpreters.
###
safe_add = (x, y)->
lsw = (x & 0xFFFF) + (y & 0xFFFF)
msw = (x >> 16) + (y >> 16) + (lsw >> 16)
return (msw << 16) | (lsw & 0xFFFF)
###
Bitwise rotate a 32-bit number to the left.
###
bit_rol = (num, cnt)->
return (num << cnt) | (num >>> (32 - cnt))
pub.hex_sha1 = (s)->
rstr2hex(rstr_sha1(str2rstr_utf8(s)))
pub.b64_sha1 = (s)->
return rstr2b64(rstr_sha1(str2rstr_utf8(s)))
pub.any_sha1 = (s, e)->
return rstr2any(rstr_sha1(str2rstr_utf8(s)), e)
pub.hex_hmac_sha1 = (k, d)->
return rstr2hex(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)))
pub.b64_hmac_sha1 = (k, d)->
return rstr2b64(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)))
pub.any_hmac_sha1 = (k, d, e)->
return rstr2any(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)), e)
return pub
)()
| true | #
# Ported to Coffeescript by PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
#
# A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined
# in FIPS 180-1
# Version 2.2-alpha Copyright PI:NAME:<NAME>END_PI 2000 - 2002.
# Other contributors: PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
# Distributed under the BSD License
# See http://pajhome.org.uk/crypt/md5 for details.
#
#
# These are the functions you'll usually want to call
# They take string arguments and return either hex or base-64 encoded strings
#
SHA1 = (()->
pub = {}
#
# Calculate the SHA1 of a raw string
#
rstr_sha1 = (s)->
return binb2rstr(binb_sha1(rstr2binb(s), s.length * 8))
#
# Calculate the HMAC-SHA1 of a key and some data (raw strings)
#
rstr_hmac_sha1 = (key, data)->
bkey = rstr2binb(key)
if(bkey.length > 16)
bkey = binb_sha1(bkey, key.length * 8)
ipad = Array(16)
opad = Array(16)
for i in [0...16]
ipad[i] = bkey[i] ^ 0x36363636
opad[i] = bkey[i] ^ 0x5C5C5C5C
hash = binb_sha1(ipad.concat(rstr2binb(data)), 512 + data.length * 8)
return binb2rstr(binb_sha1(opad.concat(hash), 512 + 160))
#
# Convert a raw string to a hex string
#
rstr2hex = (input, lower_hexcase=true)->
hex_tab = if(lower_hexcase) then "0123456789abcdef" else "0123PI:KEY:<KEY>END_PI56789ABCDEF"
output = ""
for i in [0...input.length]
x = input.charCodeAt(i)
output += hex_tab.charAt((x >>> 4) & 0x0F) +
hex_tab.charAt( x & 0x0F)
return output
#
# Convert a raw string to a base-64 string
#
rstr2b64 = (input, b64pad="=")->
tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
output = ""
len = input.length;
for i in [0...len] by 3
triplet = (input.charCodeAt(i) << 16) |
(if(i + 1 < len) then input.charCodeAt(i+1) << 8 else 0) |
(if(i + 2 < len) then input.charCodeAt(i+2) else 0)
for j in [0...4]
if(i * 8 + j * 6 > input.length * 8)
output += b64pad
else
output += tab.charAt((triplet >>> 6*(3-j)) & 0x3F)
return output
#
# Convert a raw string to an arbitrary string encoding
#
rstr2any = (input, encoding)->
divisor = encoding.length
remainders = Array()
# Convert to an array of 16-bit big-endian values, forming the dividend
dividend = Array(Math.ceil(input.length / 2));
for i in [0...dividend.length]
dividend[i] = (input.charCodeAt(i * 2) << 8) | input.charCodeAt(i * 2 + 1)
#
# Repeatedly perform a long division. The binary array forms the dividend,
# the length of the encoding is the divisor. Once computed, the quotient
# forms the dividend for the next step. We stop when the dividend is zero.
# All remainders are stored for later use.
#
while(dividend.length > 0)
quotient = Array()
x = 0
for i in [0...dividend.length]
x = (x << 16) + dividend[i]
q = Math.floor(x / divisor)
x -= q * divisor
if(quotient.length > 0 || q > 0)
quotient[quotient.length] = q
remainders[remainders.length] = x
dividend = quotient
# Convert the remainders to the output string
output = ""
for i in [remainders.length - 1..0]
output += encoding.charAt(remainders[i])
# Append leading zero equivalents
full_length = Math.ceil(input.length * 8 / (Math.log(encoding.length) / Math.log(2)))
for i in [output.length..full_length]
output = encoding[0] + output
return output
#
# Encode a string as utf-8.
# For efficiency, this assumes the input is valid utf-16.
#
str2rstr_utf8 = (input)->
output = ""
for i in [0...input.length]
# Decode utf-16 surrogate pairs
x = input.charCodeAt(i)
y = if( i + 1 < input.length) then input.charCodeAt(i + 1) else 0
if(0xD800 <= x && x <= 0xDBFF && 0xDC00 <= y && y <= 0xDFFF)
x = 0x10000 + ((x & 0x03FF) << 10) + (y & 0x03FF)
i++
# Encode output as utf-8
if(x <= 0x7F)
output += String.fromCharCode(x)
else if(x <= 0x7FF)
output += String.fromCharCode(0xC0 | ((x >>> 6 ) & 0x1F),
0x80 | ( x & 0x3F))
else if(x <= 0xFFFF)
output += String.fromCharCode(0xE0 | ((x >>> 12) & 0x0F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F))
else if(x <= 0x1FFFFF)
output += String.fromCharCode(0xF0 | ((x >>> 18) & 0x07),
0x80 | ((x >>> 12) & 0x3F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F))
return output
###
Encode a string as utf-16
###
str2rstr_utf16le = (input)->
output = ""
for i in [0...input.length]
output += String.fromCharCode( input.charCodeAt(i) & 0xFF,
(input.charCodeAt(i) >>> 8) & 0xFF)
return output
str2rstr_utf16be = (input)->
output = ""
for i in [0...input.length]
output += String.fromCharCode((input.charCodeAt(i) >>> 8) & 0xFF,
input.charCodeAt(i) & 0xFF)
return output
###
Convert a raw string to an array of big-endian words
Characters >255 have their high-byte silently ignored.
###
rstr2binb = (input)->
output = Array(input.length >> 2)
for i in [0...output.length]
output[i] = 0;
for i in [0...input.length*8] by 8
output[i>>5] |= (input.charCodeAt(i / 8) & 0xFF) << (24 - i % 32)
return output
###
Convert an array of little-endian words to a string
###
binb2rstr = (input)->
output = ""
for i in [0...input.length*32] by 8
output += String.fromCharCode((input[i>>5] >>> (24 - i % 32)) & 0xFF)
return output
###
Calculate the SHA-1 of an array of big-endian words, and a bit length
###
binb_sha1 = (x, len)->
# append padding
x[len >> 5] |= 0x80 << (24 - len % 32)
x[((len + 64 >> 9) << 4) + 15] = len
w = Array(80)
a = 1732584193
b = -271733879
c = -1732584194
d = 271733878
e = -1009589776
for i in [0...x.length] by 16
olda = a;
oldb = b;
oldc = c;
oldd = d;
olde = e;
for j in [0...80]
if(j < 16)
w[j] = x[i + j]
else
w[j] = bit_rol(w[j-3] ^ w[j-8] ^ w[j-14] ^ w[j-16], 1)
t = safe_add(safe_add(bit_rol(a, 5), sha1_ft(j, b, c, d)),
safe_add(safe_add(e, w[j]), sha1_kt(j)))
e = d
d = c
c = bit_rol(b, 30)
b = a
a = t
a = safe_add(a, olda)
b = safe_add(b, oldb)
c = safe_add(c, oldc)
d = safe_add(d, oldd)
e = safe_add(e, olde)
return Array(a, b, c, d, e)
###
Perform the appropriate triplet combination function for the current iteration
###
sha1_ft = (t, b, c, d)->
if(t < 20)
(b & c) | ((~b) & d)
else if(t < 40)
b ^ c ^ d
else if(t < 60)
(b & c) | (b & d) | (c & d)
else
b ^ c ^ d
###
Determine the appropriate additive constant for the current iteration
###
sha1_kt = (t)->
if(t < 20)
1518500249
else if(t < 40)
1859775393
else if(t < 60)
-1894007588
else
-899497514
###
Add integers, wrapping at 2^32. This uses 16-bit operations internally
to work around bugs in some JS interpreters.
###
safe_add = (x, y)->
lsw = (x & 0xFFFF) + (y & 0xFFFF)
msw = (x >> 16) + (y >> 16) + (lsw >> 16)
return (msw << 16) | (lsw & 0xFFFF)
###
Bitwise rotate a 32-bit number to the left.
###
bit_rol = (num, cnt)->
return (num << cnt) | (num >>> (32 - cnt))
pub.hex_sha1 = (s)->
rstr2hex(rstr_sha1(str2rstr_utf8(s)))
pub.b64_sha1 = (s)->
return rstr2b64(rstr_sha1(str2rstr_utf8(s)))
pub.any_sha1 = (s, e)->
return rstr2any(rstr_sha1(str2rstr_utf8(s)), e)
pub.hex_hmac_sha1 = (k, d)->
return rstr2hex(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)))
pub.b64_hmac_sha1 = (k, d)->
return rstr2b64(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)))
pub.any_hmac_sha1 = (k, d, e)->
return rstr2any(rstr_hmac_sha1(str2rstr_utf8(k), str2rstr_utf8(d)), e)
return pub
)()
|
[
{
"context": " 'force new connection': true\n query: 'token=supdoge'\n\n client1.on 'error', (err) ->\n err.shou",
"end": 1121,
"score": 0.8809185028076172,
"start": 1114,
"tag": "PASSWORD",
"value": "supdoge"
}
] | server/test/sockets/auth.coffee | stevelacy/portal | 0 | should = require 'should'
client = require 'socket.io-client'
socketIo = require 'socket.io'
config = require '../../config'
io = require '../../http/sockets'
app = require '../../'
setup = require '../setup'
db = require '../../db'
{User} = db.models
user = setup.user
describe 'socket auth', ->
beforeEach db.wipe
beforeEach (cb) ->
User.create user, cb
it 'should not accept an unauthorized connection without a token', (done) ->
client1 = client.connect config.url,
'force new connection': true
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should not accept an unauthorized connection with a null token', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: 'token='
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should not accept an unauthorized connection without a valid token', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: 'token=supdoge'
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should accept a connection from an authorized user', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: "token=#{user.token}"
client1.on 'connect', ->
client1.disconnect()
done()
| 171831 | should = require 'should'
client = require 'socket.io-client'
socketIo = require 'socket.io'
config = require '../../config'
io = require '../../http/sockets'
app = require '../../'
setup = require '../setup'
db = require '../../db'
{User} = db.models
user = setup.user
describe 'socket auth', ->
beforeEach db.wipe
beforeEach (cb) ->
User.create user, cb
it 'should not accept an unauthorized connection without a token', (done) ->
client1 = client.connect config.url,
'force new connection': true
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should not accept an unauthorized connection with a null token', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: 'token='
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should not accept an unauthorized connection without a valid token', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: 'token=<PASSWORD>'
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should accept a connection from an authorized user', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: "token=#{user.token}"
client1.on 'connect', ->
client1.disconnect()
done()
| true | should = require 'should'
client = require 'socket.io-client'
socketIo = require 'socket.io'
config = require '../../config'
io = require '../../http/sockets'
app = require '../../'
setup = require '../setup'
db = require '../../db'
{User} = db.models
user = setup.user
describe 'socket auth', ->
beforeEach db.wipe
beforeEach (cb) ->
User.create user, cb
it 'should not accept an unauthorized connection without a token', (done) ->
client1 = client.connect config.url,
'force new connection': true
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should not accept an unauthorized connection with a null token', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: 'token='
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should not accept an unauthorized connection without a valid token', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: 'token=PI:PASSWORD:<PASSWORD>END_PI'
client1.on 'error', (err) ->
err.should.equal 'Not authorized'
client1.disconnect()
done()
it 'should accept a connection from an authorized user', (done) ->
client1 = client.connect config.url,
'force new connection': true
query: "token=#{user.token}"
client1.on 'connect', ->
client1.disconnect()
done()
|
[
{
"context": "p and change for added security)'\n default: 'd41a439c438a100756f5'\n pusher_app_secret:\n type: 'string'\n ",
"end": 689,
"score": 0.993878960609436,
"start": 669,
"tag": "KEY",
"value": "d41a439c438a100756f5"
},
{
"context": " description: 'Pusher App ... | lib/supercopair.coffee | mingsai/atom-supercopair | 7 | #atom-supercollider
Controller = require './controller'
module.exports = SuperCopair =
controller: null
config:
growlOnError:
type: 'boolean'
default: false
debug:
type: 'boolean'
default: false
#atom-pair
config:
hipchat_token:
type: 'string'
description: 'HipChat admin token (optional)'
default: 'null'
hipchat_room_name:
type: 'string'
description: 'HipChat room name for sending invitations (optional)'
default: 'null'
pusher_app_key:
type: 'string'
description: 'Pusher App Key (sign up at http://pusher.com/signup and change for added security)'
default: 'd41a439c438a100756f5'
pusher_app_secret:
type: 'string'
description: 'Pusher App Secret'
default: '4bf35003e819bb138249'
broadcast_bypass:
type: 'boolean'
description: 'Select if you want to be asked before evaluating external code'
default: false
disable_broadcast:
type: 'boolean'
description: 'Select if you do not want to receive any external evaluation'
default: false
#atom-supercollider
activate: (state) ->
if @controller
return
@controller = new Controller(atom.project.getDirectories()[0])
@controller.start()
deactivate: ->
@controller.stop()
@controller = null
serialize: ->
{}
| 138853 | #atom-supercollider
Controller = require './controller'
module.exports = SuperCopair =
controller: null
config:
growlOnError:
type: 'boolean'
default: false
debug:
type: 'boolean'
default: false
#atom-pair
config:
hipchat_token:
type: 'string'
description: 'HipChat admin token (optional)'
default: 'null'
hipchat_room_name:
type: 'string'
description: 'HipChat room name for sending invitations (optional)'
default: 'null'
pusher_app_key:
type: 'string'
description: 'Pusher App Key (sign up at http://pusher.com/signup and change for added security)'
default: '<KEY>'
pusher_app_secret:
type: 'string'
description: 'Pusher App Secret'
default: '<KEY>'
broadcast_bypass:
type: 'boolean'
description: 'Select if you want to be asked before evaluating external code'
default: false
disable_broadcast:
type: 'boolean'
description: 'Select if you do not want to receive any external evaluation'
default: false
#atom-supercollider
activate: (state) ->
if @controller
return
@controller = new Controller(atom.project.getDirectories()[0])
@controller.start()
deactivate: ->
@controller.stop()
@controller = null
serialize: ->
{}
| true | #atom-supercollider
Controller = require './controller'
module.exports = SuperCopair =
controller: null
config:
growlOnError:
type: 'boolean'
default: false
debug:
type: 'boolean'
default: false
#atom-pair
config:
hipchat_token:
type: 'string'
description: 'HipChat admin token (optional)'
default: 'null'
hipchat_room_name:
type: 'string'
description: 'HipChat room name for sending invitations (optional)'
default: 'null'
pusher_app_key:
type: 'string'
description: 'Pusher App Key (sign up at http://pusher.com/signup and change for added security)'
default: 'PI:KEY:<KEY>END_PI'
pusher_app_secret:
type: 'string'
description: 'Pusher App Secret'
default: 'PI:KEY:<KEY>END_PI'
broadcast_bypass:
type: 'boolean'
description: 'Select if you want to be asked before evaluating external code'
default: false
disable_broadcast:
type: 'boolean'
description: 'Select if you do not want to receive any external evaluation'
default: false
#atom-supercollider
activate: (state) ->
if @controller
return
@controller = new Controller(atom.project.getDirectories()[0])
@controller.start()
deactivate: ->
@controller.stop()
@controller = null
serialize: ->
{}
|
[
{
"context": " controller: [ \"$scope\", (scope) ->\n apikey = 'kevin3dr.hokdl9ko' # FIXME - move this someplace better\n\n longit",
"end": 359,
"score": 0.9996601343154907,
"start": 342,
"tag": "KEY",
"value": "kevin3dr.hokdl9ko"
}
] | src/scripts/directives/mapboxStaticMap.coffee | SecureCloud-biz/droneshare | 41 | angular.module('app').directive 'mapboxStaticMap', () ->
restrict: 'A'
template: '<img ng-src="{{url}}"></img>'
scope:
latitude: '='
longitude: '='
width: '='
height: '='
zoom: '=?'
icon: '=?' # A mapbox icon name string
color: '=?' # html hex color string
controller: [ "$scope", (scope) ->
apikey = 'kevin3dr.hokdl9ko' # FIXME - move this someplace better
longitude = scope.longitude
latitude = scope.latitude
zoom = scope.zoom ? "8"
latlonstr = "#{longitude},#{latitude},#{zoom}"
markerstr = if scope.icon?
color = scope.color ? "f44" # default to redish
"pin-s-#{scope.icon}+#{color}(#{latlonstr})/"
else
""
scope.url = "http://api.tiles.mapbox.com/v3/#{apikey}/#{markerstr}#{latlonstr}/#{scope.width}x#{scope.height}.png"
]
| 132044 | angular.module('app').directive 'mapboxStaticMap', () ->
restrict: 'A'
template: '<img ng-src="{{url}}"></img>'
scope:
latitude: '='
longitude: '='
width: '='
height: '='
zoom: '=?'
icon: '=?' # A mapbox icon name string
color: '=?' # html hex color string
controller: [ "$scope", (scope) ->
apikey = '<KEY>' # FIXME - move this someplace better
longitude = scope.longitude
latitude = scope.latitude
zoom = scope.zoom ? "8"
latlonstr = "#{longitude},#{latitude},#{zoom}"
markerstr = if scope.icon?
color = scope.color ? "f44" # default to redish
"pin-s-#{scope.icon}+#{color}(#{latlonstr})/"
else
""
scope.url = "http://api.tiles.mapbox.com/v3/#{apikey}/#{markerstr}#{latlonstr}/#{scope.width}x#{scope.height}.png"
]
| true | angular.module('app').directive 'mapboxStaticMap', () ->
restrict: 'A'
template: '<img ng-src="{{url}}"></img>'
scope:
latitude: '='
longitude: '='
width: '='
height: '='
zoom: '=?'
icon: '=?' # A mapbox icon name string
color: '=?' # html hex color string
controller: [ "$scope", (scope) ->
apikey = 'PI:KEY:<KEY>END_PI' # FIXME - move this someplace better
longitude = scope.longitude
latitude = scope.latitude
zoom = scope.zoom ? "8"
latlonstr = "#{longitude},#{latitude},#{zoom}"
markerstr = if scope.icon?
color = scope.color ? "f44" # default to redish
"pin-s-#{scope.icon}+#{color}(#{latlonstr})/"
else
""
scope.url = "http://api.tiles.mapbox.com/v3/#{apikey}/#{markerstr}#{latlonstr}/#{scope.width}x#{scope.height}.png"
]
|
[
{
"context": "ast(json).proud, 1000)\n\n data = {\n name: 'proudness',\n values:\n json.map (proud)->\n",
"end": 171,
"score": 0.6635373830795288,
"start": 168,
"tag": "NAME",
"value": "pro"
}
] | src/drawer.coffee | KarlGl/Proudometer | 0 | _ = require('lodash')
module.exports = (speedometer)->
index: (er, res, json)->
speedometer.animatedUpdate( _.last(json).proud, 1000)
data = {
name: 'proudness',
values:
json.map (proud)->
x: new Date(proud.time)
y: proud.proud
};
new LineChart(
x_scale: d3.time.scale()
y_scale: d3.scale.linear().domain([0,100])
y_axis_text: 'Proudness'
).for([data]).plot();
update: (er, res, json)->
proud = json[0].proud
speedometer.animatedUpdate(proud, 3000)
| 212621 | _ = require('lodash')
module.exports = (speedometer)->
index: (er, res, json)->
speedometer.animatedUpdate( _.last(json).proud, 1000)
data = {
name: '<NAME>udness',
values:
json.map (proud)->
x: new Date(proud.time)
y: proud.proud
};
new LineChart(
x_scale: d3.time.scale()
y_scale: d3.scale.linear().domain([0,100])
y_axis_text: 'Proudness'
).for([data]).plot();
update: (er, res, json)->
proud = json[0].proud
speedometer.animatedUpdate(proud, 3000)
| true | _ = require('lodash')
module.exports = (speedometer)->
index: (er, res, json)->
speedometer.animatedUpdate( _.last(json).proud, 1000)
data = {
name: 'PI:NAME:<NAME>END_PIudness',
values:
json.map (proud)->
x: new Date(proud.time)
y: proud.proud
};
new LineChart(
x_scale: d3.time.scale()
y_scale: d3.scale.linear().domain([0,100])
y_axis_text: 'Proudness'
).for([data]).plot();
update: (er, res, json)->
proud = json[0].proud
speedometer.animatedUpdate(proud, 3000)
|
[
{
"context": " what we need and do not need (pain points)\n# With Joe, Han, Phil\n#\n# Prioritize work with time.\n#\n\n\n\n\t\n",
"end": 837,
"score": 0.9980170726776123,
"start": 834,
"tag": "NAME",
"value": "Joe"
},
{
"context": " we need and do not need (pain points)\n# With Joe,... | src/public/framer02.framer/app.coffee | jmanhart/personal-portfolio-17 | 0 | {LottieLayer} = require 'LottieLayer'
gradient = new Gradient
start: "#05F"
end: "#0DF"
Framer.Device.background.gradient = gradient
# Variables
loadTime = 10
delayTime = 2
pageCount = 6
gutter = 10
hideBelow = Screen.height
spacer = 20
indicators = []
# Projects Data
animationData = [
{
name: "anim01",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
{
name: "anim02",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
{
name: "anim03",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
]
# Interal Audit of what we need and do not need (pain points)
# With Joe, Han, Phil
#
# Prioritize work with time.
#
# Set up FlowComponent
flow = new FlowComponent
flow.showNext(progFlow00)
# Setting Up Toast Banners
progressToast.parent = progFlow02
progressToast.y = hideBelow
progressToastAlt.parent = progFlow02
progressToastAlt.y = hideBelow
sucessToast.parent = progFlow02
sucessToast.y = hideBelow
failToast.parent = progFlow02
failToast.y = hideBelow
# bottomBars = [ bottomBarContSucess, bottomBarContProgress]
# Animations ---------------------------------
progBarFill = new Animation progFill,
width: progBar.width
options:
time: loadTime
spin = new Animation progressSpinner,
rotation: 1080
options:
time: loadTime
# States -------------------------------------
progressToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
expanded:
height: 400
y: 400
progressToastAlt.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
expanded:
height: 400
y: 400
sucessToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
failToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
# 1. Start
btnPair.onClick ->
flow.showNext(progFlow01)
progBarFill.start()
# Animation Fill
progBarFill.onAnimationStart ->
Utils.delay 0.5, ->
flow.showOverlayBottom(progFlow02)
progressToastAlt.stateCycle("active")
spin.start()
# Positioning the progBar
progBar.parent = progressToast
progBar.y = Align.center(3)
progBar.x = Align.center()
progBarFill.onAnimationEnd ->
rando = Utils.randomChoice(["fail", "fail"])
if rando == "fail"
failToast.bringToFront()
failToast.stateCycle("active")
if rando == "pass"
sucessToast.bringToFront()
sucessToast.stateCycle("active")
# print rando
# Animation Fill Up
pageScroller = new PageComponent
parent: progFlow02
y: Align.top()
x: Align.center()
width: Screen.width
height: Screen.height - sucessToast.height
scrollVertical: false
backgroundColor: null
clip: false
# Card Pagination Indicator
indicatorCont = new Layer
width: pageScroller.width / 4.5
height: 20
x: Align.center(9)
y: Align.bottom(-(failToast.height + (spacer*2)))
backgroundColor: null
parent: progFlow02
# Loop to create pages
for i in [0...animationData.length]
page = new Layer
height: 250
width: Screen.width
x: (Screen.width + gutter) * i
y: Align.center()
backgroundColor: null
opacity: 1
parent: pageScroller.content
name: "page " + (i + 1)
animationCont = new Layer
parent: page
height: page.height
width: page.width
x: Align.center
y: Align.center
backgroundColor: null
dummyAnimation = new Layer
parent: page
size: 150
borderRadius: 10
backgroundColor: "#CCCCCC"
x: Align.center()
y: Align.top()
bar01 = new Layer
parent: dummyAnimation
height: 2
width: dummyAnimation.width * 0.9
x: Align.center()
y: Align.center()
rotation: 45
bar02 = new Layer
parent: dummyAnimation
height: 2
width: dummyAnimation.width * 0.9
x: Align.center()
y: Align.center()
rotation: -45
# Animation Block
# customAnim = new LottieLayer
# name: animationData[i].name
# path: animationData[i].path
# speed: 1
# opacity: 0.5
# autoplay: true
# backgroundColor:null
# loop: true
# x: Align.center()
# y: Align.top()
# height: 150
# width: 150
# parent: animationCont
subText = new TextLayer
parent: page
text: animationData[i].subText
width: page.width * 0.4
fontSize: 15
lineHeight: 1.20
# fontFamily: 'SF Pro Text'
fontWeight: 'regular'
letterSpacing: -.24
textAlign: 'center'
y: Align.bottom()
x: Align.center()
color: '#000'
inactiveColor = "rgba(0,0,0,0.2)"
activeColor = "rgba(0,0,0,1)"
# creating the indicator
indicator = new Layer
parent: indicatorCont
size: 8
borderRadius: indicatorCont.height
x: (indicatorCont.width / animationData.length) * i
y: Align.center()
name: i
backgroundColor: inactiveColor
# creating states for the indicator
indicator.states =
active:
backgroundColor: activeColor
inactive:
backgroundColor: inactiveColor
#pushing indicators into array
indicators.push(indicator)
# Making the first indicator active
pageScroller.snapToPage(pageScroller.content.children[0])
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Changing indicator state on page change
pageScroller.on "change:currentPage", ->
indicator.states.switch("default") for indicator in indicators
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Interactions ---------------------------------
# Fail Interactions
failToast.onTap ->
flow.showOverlayBottom(failScreen)
btnRetry.onTap ->
flow.showNext(progFlow01)
failScreen.onTap ->
flow.showPrevious()
# Sucess Interactions
sucessToast.onTap ->
flow.showNext(profileScreen)
sucessScreen.onTap ->
flow.showPrevious()
# Progress Interactions
progressToast.onTap ->
flow.showOverlayBottom(inProgressScreen)
# Positioning the progBar
progBar.parent = inProgressScreen
progBar.y = Align.center()
progBar.x = Align.center()
inProgressScreen.onTap ->
flow.showPrevious()
# Positioning the progBar
progBar.parent = progressToast
progBar.y = Align.center(3)
progBar.x = Align.center()
# Canceling Pairing
# expandBtn.onTap ->
# progressToast.stateCycle("expanded", "active")
# Utils.delay (delayTime/2), ->
# flow.showNext(progFlow00)
| 158907 | {LottieLayer} = require 'LottieLayer'
gradient = new Gradient
start: "#05F"
end: "#0DF"
Framer.Device.background.gradient = gradient
# Variables
loadTime = 10
delayTime = 2
pageCount = 6
gutter = 10
hideBelow = Screen.height
spacer = 20
indicators = []
# Projects Data
animationData = [
{
name: "anim01",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
{
name: "anim02",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
{
name: "anim03",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
]
# Interal Audit of what we need and do not need (pain points)
# With <NAME>, <NAME>, <NAME>
#
# Prioritize work with time.
#
# Set up FlowComponent
flow = new FlowComponent
flow.showNext(progFlow00)
# Setting Up Toast Banners
progressToast.parent = progFlow02
progressToast.y = hideBelow
progressToastAlt.parent = progFlow02
progressToastAlt.y = hideBelow
sucessToast.parent = progFlow02
sucessToast.y = hideBelow
failToast.parent = progFlow02
failToast.y = hideBelow
# bottomBars = [ bottomBarContSucess, bottomBarContProgress]
# Animations ---------------------------------
progBarFill = new Animation progFill,
width: progBar.width
options:
time: loadTime
spin = new Animation progressSpinner,
rotation: 1080
options:
time: loadTime
# States -------------------------------------
progressToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
expanded:
height: 400
y: 400
progressToastAlt.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
expanded:
height: 400
y: 400
sucessToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
failToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
# 1. Start
btnPair.onClick ->
flow.showNext(progFlow01)
progBarFill.start()
# Animation Fill
progBarFill.onAnimationStart ->
Utils.delay 0.5, ->
flow.showOverlayBottom(progFlow02)
progressToastAlt.stateCycle("active")
spin.start()
# Positioning the progBar
progBar.parent = progressToast
progBar.y = Align.center(3)
progBar.x = Align.center()
progBarFill.onAnimationEnd ->
rando = Utils.randomChoice(["fail", "fail"])
if rando == "fail"
failToast.bringToFront()
failToast.stateCycle("active")
if rando == "pass"
sucessToast.bringToFront()
sucessToast.stateCycle("active")
# print rando
# Animation Fill Up
pageScroller = new PageComponent
parent: progFlow02
y: Align.top()
x: Align.center()
width: Screen.width
height: Screen.height - sucessToast.height
scrollVertical: false
backgroundColor: null
clip: false
# Card Pagination Indicator
indicatorCont = new Layer
width: pageScroller.width / 4.5
height: 20
x: Align.center(9)
y: Align.bottom(-(failToast.height + (spacer*2)))
backgroundColor: null
parent: progFlow02
# Loop to create pages
for i in [0...animationData.length]
page = new Layer
height: 250
width: Screen.width
x: (Screen.width + gutter) * i
y: Align.center()
backgroundColor: null
opacity: 1
parent: pageScroller.content
name: "page " + (i + 1)
animationCont = new Layer
parent: page
height: page.height
width: page.width
x: Align.center
y: Align.center
backgroundColor: null
dummyAnimation = new Layer
parent: page
size: 150
borderRadius: 10
backgroundColor: "#CCCCCC"
x: Align.center()
y: Align.top()
bar01 = new Layer
parent: dummyAnimation
height: 2
width: dummyAnimation.width * 0.9
x: Align.center()
y: Align.center()
rotation: 45
bar02 = new Layer
parent: dummyAnimation
height: 2
width: dummyAnimation.width * 0.9
x: Align.center()
y: Align.center()
rotation: -45
# Animation Block
# customAnim = new LottieLayer
# name: animationData[i].name
# path: animationData[i].path
# speed: 1
# opacity: 0.5
# autoplay: true
# backgroundColor:null
# loop: true
# x: Align.center()
# y: Align.top()
# height: 150
# width: 150
# parent: animationCont
subText = new TextLayer
parent: page
text: animationData[i].subText
width: page.width * 0.4
fontSize: 15
lineHeight: 1.20
# fontFamily: 'SF Pro Text'
fontWeight: 'regular'
letterSpacing: -.24
textAlign: 'center'
y: Align.bottom()
x: Align.center()
color: '#000'
inactiveColor = "rgba(0,0,0,0.2)"
activeColor = "rgba(0,0,0,1)"
# creating the indicator
indicator = new Layer
parent: indicatorCont
size: 8
borderRadius: indicatorCont.height
x: (indicatorCont.width / animationData.length) * i
y: Align.center()
name: i
backgroundColor: inactiveColor
# creating states for the indicator
indicator.states =
active:
backgroundColor: activeColor
inactive:
backgroundColor: inactiveColor
#pushing indicators into array
indicators.push(indicator)
# Making the first indicator active
pageScroller.snapToPage(pageScroller.content.children[0])
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Changing indicator state on page change
pageScroller.on "change:currentPage", ->
indicator.states.switch("default") for indicator in indicators
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Interactions ---------------------------------
# Fail Interactions
failToast.onTap ->
flow.showOverlayBottom(failScreen)
btnRetry.onTap ->
flow.showNext(progFlow01)
failScreen.onTap ->
flow.showPrevious()
# Sucess Interactions
sucessToast.onTap ->
flow.showNext(profileScreen)
sucessScreen.onTap ->
flow.showPrevious()
# Progress Interactions
progressToast.onTap ->
flow.showOverlayBottom(inProgressScreen)
# Positioning the progBar
progBar.parent = inProgressScreen
progBar.y = Align.center()
progBar.x = Align.center()
inProgressScreen.onTap ->
flow.showPrevious()
# Positioning the progBar
progBar.parent = progressToast
progBar.y = Align.center(3)
progBar.x = Align.center()
# Canceling Pairing
# expandBtn.onTap ->
# progressToast.stateCycle("expanded", "active")
# Utils.delay (delayTime/2), ->
# flow.showNext(progFlow00)
| true | {LottieLayer} = require 'LottieLayer'
gradient = new Gradient
start: "#05F"
end: "#0DF"
Framer.Device.background.gradient = gradient
# Variables
loadTime = 10
delayTime = 2
pageCount = 6
gutter = 10
hideBelow = Screen.height
spacer = 20
indicators = []
# Projects Data
animationData = [
{
name: "anim01",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
{
name: "anim02",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
{
name: "anim03",
path: "images/project-lottie/va3mLTE-01/va3mLTE-01.json",
subText: "Lorem ipsum dolor amet direct trade cred yuccie kinfolk."
},
]
# Interal Audit of what we need and do not need (pain points)
# With PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
#
# Prioritize work with time.
#
# Set up FlowComponent
flow = new FlowComponent
flow.showNext(progFlow00)
# Setting Up Toast Banners
progressToast.parent = progFlow02
progressToast.y = hideBelow
progressToastAlt.parent = progFlow02
progressToastAlt.y = hideBelow
sucessToast.parent = progFlow02
sucessToast.y = hideBelow
failToast.parent = progFlow02
failToast.y = hideBelow
# bottomBars = [ bottomBarContSucess, bottomBarContProgress]
# Animations ---------------------------------
progBarFill = new Animation progFill,
width: progBar.width
options:
time: loadTime
spin = new Animation progressSpinner,
rotation: 1080
options:
time: loadTime
# States -------------------------------------
progressToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
expanded:
height: 400
y: 400
progressToastAlt.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
expanded:
height: 400
y: 400
sucessToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
failToast.states =
active:
y: Align.bottom()
animationOptions:
curve: Spring(damping: 0.5)
time: 0.5
# 1. Start
btnPair.onClick ->
flow.showNext(progFlow01)
progBarFill.start()
# Animation Fill
progBarFill.onAnimationStart ->
Utils.delay 0.5, ->
flow.showOverlayBottom(progFlow02)
progressToastAlt.stateCycle("active")
spin.start()
# Positioning the progBar
progBar.parent = progressToast
progBar.y = Align.center(3)
progBar.x = Align.center()
progBarFill.onAnimationEnd ->
rando = Utils.randomChoice(["fail", "fail"])
if rando == "fail"
failToast.bringToFront()
failToast.stateCycle("active")
if rando == "pass"
sucessToast.bringToFront()
sucessToast.stateCycle("active")
# print rando
# Animation Fill Up
pageScroller = new PageComponent
parent: progFlow02
y: Align.top()
x: Align.center()
width: Screen.width
height: Screen.height - sucessToast.height
scrollVertical: false
backgroundColor: null
clip: false
# Card Pagination Indicator
indicatorCont = new Layer
width: pageScroller.width / 4.5
height: 20
x: Align.center(9)
y: Align.bottom(-(failToast.height + (spacer*2)))
backgroundColor: null
parent: progFlow02
# Loop to create pages
for i in [0...animationData.length]
page = new Layer
height: 250
width: Screen.width
x: (Screen.width + gutter) * i
y: Align.center()
backgroundColor: null
opacity: 1
parent: pageScroller.content
name: "page " + (i + 1)
animationCont = new Layer
parent: page
height: page.height
width: page.width
x: Align.center
y: Align.center
backgroundColor: null
dummyAnimation = new Layer
parent: page
size: 150
borderRadius: 10
backgroundColor: "#CCCCCC"
x: Align.center()
y: Align.top()
bar01 = new Layer
parent: dummyAnimation
height: 2
width: dummyAnimation.width * 0.9
x: Align.center()
y: Align.center()
rotation: 45
bar02 = new Layer
parent: dummyAnimation
height: 2
width: dummyAnimation.width * 0.9
x: Align.center()
y: Align.center()
rotation: -45
# Animation Block
# customAnim = new LottieLayer
# name: animationData[i].name
# path: animationData[i].path
# speed: 1
# opacity: 0.5
# autoplay: true
# backgroundColor:null
# loop: true
# x: Align.center()
# y: Align.top()
# height: 150
# width: 150
# parent: animationCont
subText = new TextLayer
parent: page
text: animationData[i].subText
width: page.width * 0.4
fontSize: 15
lineHeight: 1.20
# fontFamily: 'SF Pro Text'
fontWeight: 'regular'
letterSpacing: -.24
textAlign: 'center'
y: Align.bottom()
x: Align.center()
color: '#000'
inactiveColor = "rgba(0,0,0,0.2)"
activeColor = "rgba(0,0,0,1)"
# creating the indicator
indicator = new Layer
parent: indicatorCont
size: 8
borderRadius: indicatorCont.height
x: (indicatorCont.width / animationData.length) * i
y: Align.center()
name: i
backgroundColor: inactiveColor
# creating states for the indicator
indicator.states =
active:
backgroundColor: activeColor
inactive:
backgroundColor: inactiveColor
#pushing indicators into array
indicators.push(indicator)
# Making the first indicator active
pageScroller.snapToPage(pageScroller.content.children[0])
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Changing indicator state on page change
pageScroller.on "change:currentPage", ->
indicator.states.switch("default") for indicator in indicators
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Interactions ---------------------------------
# Fail Interactions
failToast.onTap ->
flow.showOverlayBottom(failScreen)
btnRetry.onTap ->
flow.showNext(progFlow01)
failScreen.onTap ->
flow.showPrevious()
# Sucess Interactions
sucessToast.onTap ->
flow.showNext(profileScreen)
sucessScreen.onTap ->
flow.showPrevious()
# Progress Interactions
progressToast.onTap ->
flow.showOverlayBottom(inProgressScreen)
# Positioning the progBar
progBar.parent = inProgressScreen
progBar.y = Align.center()
progBar.x = Align.center()
inProgressScreen.onTap ->
flow.showPrevious()
# Positioning the progBar
progBar.parent = progressToast
progBar.y = Align.center(3)
progBar.x = Align.center()
# Canceling Pairing
# expandBtn.onTap ->
# progressToast.stateCycle("expanded", "active")
# Utils.delay (delayTime/2), ->
# flow.showNext(progFlow00)
|
[
{
"context": "tate.moreSubject\n classification.task_key = 'completion_assessment_task'\n classification.annotation['value'] = 'inco",
"end": 3863,
"score": 0.8286312222480774,
"start": 3837,
"tag": "KEY",
"value": "completion_assessment_task"
},
{
"context": "state.badSubj... | app/assets/javascripts/lib/workflow-methods-mixin.cjsx | johnscancella/scribeAPI | 0 | Classification = require 'models/classification.coffee'
coreTools = require 'components/core-tools'
markTools = require 'components/mark/tools'
transcribeTools = require 'components/transcribe/tools'
verifyTools = require 'components/verify/tools'
module.exports =
# Convenience method for selecting currently active workflow based on active controller
getActiveWorkflow: ->
return null if ! @props.project
k = (k for w,k in @props.project.workflows when w.name == @props.workflowName)
return null if k?.length != 1
@props.project.workflows[k[0]]
getWorkflowByName: (name) ->
k = (k for w,k in @props.project.workflows when w.name is name)
return null if k?.length != 1
@props.project.workflows[k[0]]
# Start a new classification (optionally initialized with given annotation hash):
beginClassification: (annotation = {}, callback) ->
classifications = @state.classifications
classification = new Classification()
if annotation?
classification.annotation[k] = v for k, v of annotation
classifications.push classification
@setState
classifications: classifications
classificationIndex: classifications.length-1
, =>
@forceUpdate()
window.classifications = @state.classifications # make accessible to console
callback() if callback?
commitClassification: (classification) ->
return unless classification?
# Create visual interim mark just in case POST takes a while
interim_mark = @addInterimMark classification
# Commit classification to backend
classification.commit (classification) =>
# Did this generate a child_subject? Update local copy:
if classification.child_subject
@appendChildSubject classification.subject_id, classification.child_subject
# Now that we have the real mark, hide the interim mark:
@hideInterimMark(interim_mark) if interim_mark?
if @state.badSubject
@toggleBadSubject =>
@advanceToNextSubject()
if @state.illegibleSubject
@toggleIllegibleSubject =>
@advanceToNextSubject()
# Called immediately before saving a classification, adds a fake mark in lieu
# of the real generated mark:
addInterimMark: (classification) ->
# Uniquely identify local interim marks:
@interim_mark_id ||= 0
# Interim mark is the region (the mark classification's annotation hash) with extras:
interim_mark = $.extend({
show: true # Default to show. We'll disable this when classification saved
interim_id: (@interim_mark_id += 1) # Unique id
subject_id : classification.subject_id # Keep subject_id so we know which subject to show it over
}, classification.annotation)
# Add interim mark to array in @state
interimMarks = @state.interimMarks ? []
interimMarks.push interim_mark
@setState interimMarks: interimMarks
interim_mark
# Counterpart to addInterimMark, hides the given interim mark
hideInterimMark: (interim_mark) ->
interimMarks = @state.interimMarks
for m, i in interimMarks
# If this is the interim mark to hide, hide it:
if m.interim_id == interim_mark.interim_id
m.show = false
@setState interimMarks: interimMarks
# We found it, move on:
break
commitCurrentClassification: () ->
classification = @getCurrentClassification()
classification.subject_id = @getCurrentSubject()?.id
classification.subject_set_id = @getCurrentSubjectSet().id if @getCurrentSubjectSet()?
classification.workflow_id = @getActiveWorkflow().id
# If user activated 'Bad Subject' button, override task:
if @state.moreSubject
classification.task_key = 'completion_assessment_task'
classification.annotation['value'] = 'incomplete_subject'
else if @state.badSubject
classification.task_key = 'flag_bad_subject_task'
else if @state.illegibleSubject
classification.task_key = 'flag_illegible_subject_task'
# Otherwise, classification is for active task:
else
classification.task_key = @state.taskKey
if Object.keys(classification.annotation).length == 0
if @getActiveWorkflow().name is "mark" and not @getActiveWorkflow()?.show_completion_assessment_task
# user clicking "Done", which implies complete_subject for completion_assessment_task
classification.task_key = 'completion_assessment_task'
classification.annotation['value'] = 'complete_subject'
return if Object.keys(classification.annotation).length == 0
@commitClassification(classification)
@beginClassification()
toggleBadSubject: (e, callback) ->
@setState badSubject: not @state.badSubject, =>
callback?()
setMoreSubject: ->
@state.moreSubject = true
resetMoreSubject: ->
@state.moreSubject = false
toggleIllegibleSubject: (e, callback) ->
@setState illegibleSubject: not @state.illegibleSubject, =>
callback?()
flagSubjectAsUserDeleted: (subject_id) ->
classification = @getCurrentClassification()
classification.subject_id = subject_id # @getCurrentSubject()?.id
classification.workflow_id = @getActiveWorkflow().id
classification.task_key = 'flag_bad_subject_task'
classification.commit (classification) =>
@updateChildSubject @getCurrentSubject().id, classification.subject_id, user_has_deleted: true
@beginClassification()
# Update specified child_subject with given properties (e.g. after submitting a delete flag)
updateChildSubject: (parent_subject_id, child_subject_id, props) ->
if (s = @getSubjectById(parent_subject_id))
for c, i in s.child_subjects
if c.id == child_subject_id
c[k] = v for k,v of props
# Add newly acquired child_subject to child_subjects array of relevant subject (i.e. after submitting a subject-generating classification)
appendChildSubject: (subject_id, child_subject) ->
if (s = @getSubjectById(subject_id))
s.child_subjects.push $.extend({userCreated: true}, child_subject)
# We've updated an internal object in @state.subjectSets, but framework doesn't notice, so tell it to update:
@forceUpdate()
# Get a reference to the local copy of a subject by id regardless of whether viewing subject-sets or just subjects
getSubjectById: (id) ->
if @state.subjectSets?
# If current subject set has no subjects, we're likely in between one subject set
# and the next (for which we're currently fetching subjects), so return null:
return null if ! @getCurrentSubjectSet().subjects?
for s in @getCurrentSubjectSet().subjects
return s if s.id == id
else
for s in @state.subjects
return s if s.id == id
# Get current classification:
getCurrentClassification: ->
@state.classifications[@state.classificationIndex]
# Get current task:
getCurrentTask: ->
return null if ! @state.taskKey?
console.warn "TaskKey invalid: #{@state.taskKey}. Should be: #{(k for k,v of @getTasks())}" if ! @getTasks()[@state.taskKey]?
@getTasks()[@state.taskKey]
getTasks: ->
# Add completion_assessment_task to list of tasks dynamically:
tasks = @getActiveWorkflow().tasks
completion_assessment_task = @getCompletionAssessmentTask()
# Merge keys recursively if it exists in config
completion_assessment_task = $.extend true, tasks['completion_assessment_task'], completion_assessment_task if tasks['completion_assessment_task']?
$.extend tasks, completion_assessment_task: completion_assessment_task
# Get instance of current tool:
getCurrentTool: ->
toolKey = @getCurrentTask()?.tool
tool = @toolByKey toolKey
toolByKey: (toolKey) ->
( ( coreTools[toolKey] ? markTools[toolKey] ) ? transcribeTools[toolKey] ) ? verifyTools[toolKey]
# Load next logical task
advanceToNextTask: () ->
nextTaskKey = @getNextTask()?.key
if nextTaskKey is null
return
# Commit whatever current classification is:
@commitCurrentClassification()
# start a new one:
# @beginClassification {} # this keps adding empty (uncommitted) classifications to @state.classifications --STI
# After classification ready with empty annotation, proceed to next task:
@advanceToTask nextTaskKey
# Get next logical task
getNextTask: ->
task = @getTasks()[@state.taskKey]
# PB: Moving from hash of options to an array of options
if (options = (c for c in task.tool_config?.options when c.value is @getCurrentClassification()?.annotation?.value)) && options.length > 0 && (opt = options[0])? && opt.next_task?
nextKey = opt.next_task
else
nextKey = @getTasks()[@state.taskKey].next_task
@getTasks()[nextKey]
# Advance to a named task:
advanceToTask: (key) ->
task = @getTasks()[ key ]
tool = @toolByKey task?.tool
if ! task?
console.warn "WARN: Invalid task key: ", key
else if ! tool?
console.warn "WARN: Invalid tool specified in #{key}: #{task.tool}"
else
@setState
taskKey: key
# Get currently viewed subject set
getCurrentSubjectSet: ->
if @state.subjectSets?[@state.subject_set_index]
@state.subjectSets?[@state.subject_set_index]
# else @state.subjectSets #having a hard time accounting for one subject_set
# Get currently viewed subject
getCurrentSubject: ->
# If we've viewing a subject-set (i.e. Mark) let's use that subject-set's subjects
if @getCurrentSubjectSet()?
subjects = @getCurrentSubjectSet().subjects
# Otherwise, since we're not viewing subject-sets, we must have an array of indiv subjects:
else
subjects = @state.subjects
# It's possible we have no subjects at all, in which case fail with null:
return null unless subjects?
subjects[@state.subject_index] # otherwise, return subject
getCompletionAssessmentTask: ->
generates_subject_type: null
instruction: "Thanks for all your work! Is there anything left to #{@props.workflowName}?"
key: "completion_assessment_task"
next_task: null
tool: "pickOne"
help: {
title: "Completion Assessment",
body: "<p>Have all requested fields on this page been marked with a rectangle?</p><p>You do not have to mark every field on the page, however, it helps us to know if you think there is more to mark. Thank you!</p>"
},
tool_config: {
"options": [
{
"label": "Nothing left to #{@props.workflowName}",
"next_task": null,
"value": "complete_subject"
},
{
"label": "There's more to #{@props.workflowName}",
"next_task": null,
"value": "incomplete_subject"
}
]
}
subToolIndex: 0
# Regardless of what workflow we're in, call this to display next subject (if any avail)
advanceToNextSubject: ->
if @state.subjects?
@_advanceToNextSubjectInSubjects()
else
@_advanceToNextSubjectInSubjectSets()
# This is the version of advanceToNextSubject for workflows that consume subjects (transcribe,verify)
_advanceToNextSubjectInSubjects: ->
if @state.subject_index + 1 < @state.subjects.length
next_index = @state.subject_index + 1
next_subject = @state.subjects[next_index]
@setState
taskKey: next_subject.type
subject_index: next_index, =>
key = @getCurrentSubject().type
@advanceToTask key
# Haz more pages of subjects?
else if @state.subjects_next_page?
@fetchSubjects page: @state.subjects_next_page
else
@setState
subject_index: null
noMoreSubjects: true
userClassifiedAll: @state.subjects.length > 0
# This is the version of advanceToNextSubject for workflows that consume subject sets (mark)
_advanceToNextSubjectInSubjectSets: ->
new_subject_set_index = @state.subject_set_index
new_subject_index = @state.subject_index + 1
# If we've exhausted pages in this subject set, move to next one:
if new_subject_index >= @getCurrentSubjectSet().subjects.length
new_subject_set_index += 1
new_subject_index = 0
# If we've exhausted all subject sets, collapse in shame
if new_subject_set_index >= @state.subjectSets.length
if @state.subject_sets_current_page < @state.subject_sets_total_pages
# Gather filters by which to query subject-sets
params =
group_id: @props.query.group_id ? null
page: @state.subject_sets_current_page + 1
@fetchSubjectSets params
else
@setState
taskKey: null
notice:
header: "All Done!"
message: "There's nothing more for you to #{@props.workflowName} here."
onClick: () =>
@transitionTo? 'mark' # "/#/mark"
@setState
notice: null
taskKey: @getActiveWorkflow().first_task
console.warn "NO MORE SUBJECT SETS"
return
# console.log "Mark#index Advancing to subject_set_index #{new_subject_set_index} (of #{@state.subjectSets.length}), subject_index #{new_subject_index} (of #{@state.subjectSets[new_subject_set_index].subjects.length})"
@setState
subject_set_index: new_subject_set_index
subject_index: new_subject_index
taskKey: @getActiveWorkflow().first_task
currentSubToolIndex: 0, () =>
@fetchSubjectsForCurrentSubjectSet(1, 100)
commitClassificationAndContinue: (d) ->
@commitCurrentClassification()
@beginClassification {}, () =>
if @getCurrentTask()?.next_task?
@advanceToTask @getCurrentTask().next_task
else
@advanceToNextSubject()
# Clear annotation. Used by skip to next subject action
clearCurrentAnnotation: ->
currentClassification = @getCurrentClassification()
currentClassification.annotation = {}
# Handle user selecting a pick/drawing tool:
handleDataFromTool: (d) ->
# Kind of a hack: We receive annotation data from two places:
# 1. tool selection widget in right-col
# 2. the actual draggable marking tools
# We want to remember the subToolIndex so that the right-col menu highlights
# the correct tool after committing a mark. If incoming data has subToolIndex
# but no mark location information, we know this callback was called by the
# right-col. So only in that case, record currentSubToolIndex, which we use
# to initialize marks going forward
if d.subToolIndex? && ! d.x? && ! d.y?
@setState currentSubToolIndex: d.subToolIndex
@setState currentSubtool: d.tool if d.tool?
else
classifications = @state.classifications
classifications[@state.classificationIndex].annotation[k] = v for k, v of d
# PB: Saving STI's notes here in case we decide tools should fully
# replace annotation hash rather than selectively update by key as above:
# not clear whether we should replace annotations, or append to it --STI
# classifications[@state.classificationIndex].annotation = d #[k] = v for k, v of d
@setState
classifications: classifications
, =>
@forceUpdate()
cleanAnnotation: (annotation) ->
# remove extra white spaces in strings as they are not important
# and only cause confusion during verifying
for k,v of annotation
if typeof v is 'string'
v = v.trim()
# replace multiple spaces and tabs with single space
v = v.replace(/[ \t]+/g, " ")
# replace multiple new lines with a single new line
v = v.replace(/\n+/g, "\n")
# remove spaces around new line
v = v.replace(/ ?\n ?/g, "\n")
annotation[k] = v
handleTaskComplete: (d) ->
@handleDataFromTool(d)
# clean the classification's annotation before submit
@cleanAnnotation @getCurrentClassification().annotation
@commitClassificationAndContinue d
| 184541 | Classification = require 'models/classification.coffee'
coreTools = require 'components/core-tools'
markTools = require 'components/mark/tools'
transcribeTools = require 'components/transcribe/tools'
verifyTools = require 'components/verify/tools'
module.exports =
# Convenience method for selecting currently active workflow based on active controller
getActiveWorkflow: ->
return null if ! @props.project
k = (k for w,k in @props.project.workflows when w.name == @props.workflowName)
return null if k?.length != 1
@props.project.workflows[k[0]]
getWorkflowByName: (name) ->
k = (k for w,k in @props.project.workflows when w.name is name)
return null if k?.length != 1
@props.project.workflows[k[0]]
# Start a new classification (optionally initialized with given annotation hash):
beginClassification: (annotation = {}, callback) ->
classifications = @state.classifications
classification = new Classification()
if annotation?
classification.annotation[k] = v for k, v of annotation
classifications.push classification
@setState
classifications: classifications
classificationIndex: classifications.length-1
, =>
@forceUpdate()
window.classifications = @state.classifications # make accessible to console
callback() if callback?
commitClassification: (classification) ->
return unless classification?
# Create visual interim mark just in case POST takes a while
interim_mark = @addInterimMark classification
# Commit classification to backend
classification.commit (classification) =>
# Did this generate a child_subject? Update local copy:
if classification.child_subject
@appendChildSubject classification.subject_id, classification.child_subject
# Now that we have the real mark, hide the interim mark:
@hideInterimMark(interim_mark) if interim_mark?
if @state.badSubject
@toggleBadSubject =>
@advanceToNextSubject()
if @state.illegibleSubject
@toggleIllegibleSubject =>
@advanceToNextSubject()
# Called immediately before saving a classification, adds a fake mark in lieu
# of the real generated mark:
addInterimMark: (classification) ->
# Uniquely identify local interim marks:
@interim_mark_id ||= 0
# Interim mark is the region (the mark classification's annotation hash) with extras:
interim_mark = $.extend({
show: true # Default to show. We'll disable this when classification saved
interim_id: (@interim_mark_id += 1) # Unique id
subject_id : classification.subject_id # Keep subject_id so we know which subject to show it over
}, classification.annotation)
# Add interim mark to array in @state
interimMarks = @state.interimMarks ? []
interimMarks.push interim_mark
@setState interimMarks: interimMarks
interim_mark
# Counterpart to addInterimMark, hides the given interim mark
hideInterimMark: (interim_mark) ->
interimMarks = @state.interimMarks
for m, i in interimMarks
# If this is the interim mark to hide, hide it:
if m.interim_id == interim_mark.interim_id
m.show = false
@setState interimMarks: interimMarks
# We found it, move on:
break
commitCurrentClassification: () ->
classification = @getCurrentClassification()
classification.subject_id = @getCurrentSubject()?.id
classification.subject_set_id = @getCurrentSubjectSet().id if @getCurrentSubjectSet()?
classification.workflow_id = @getActiveWorkflow().id
# If user activated 'Bad Subject' button, override task:
if @state.moreSubject
classification.task_key = '<KEY>'
classification.annotation['value'] = 'incomplete_subject'
else if @state.badSubject
classification.task_key = '<KEY>'
else if @state.illegibleSubject
classification.task_key = '<KEY>'
# Otherwise, classification is for active task:
else
classification.task_key = @state.taskKey
if Object.keys(classification.annotation).length == 0
if @getActiveWorkflow().name is "mark" and not @getActiveWorkflow()?.show_completion_assessment_task
# user clicking "Done", which implies complete_subject for completion_assessment_task
classification.task_key = '<KEY>'
classification.annotation['value'] = 'complete_subject'
return if Object.keys(classification.annotation).length == 0
@commitClassification(classification)
@beginClassification()
toggleBadSubject: (e, callback) ->
@setState badSubject: not @state.badSubject, =>
callback?()
setMoreSubject: ->
@state.moreSubject = true
resetMoreSubject: ->
@state.moreSubject = false
toggleIllegibleSubject: (e, callback) ->
@setState illegibleSubject: not @state.illegibleSubject, =>
callback?()
flagSubjectAsUserDeleted: (subject_id) ->
classification = @getCurrentClassification()
classification.subject_id = subject_id # @getCurrentSubject()?.id
classification.workflow_id = @getActiveWorkflow().id
classification.task_key = '<KEY>'
classification.commit (classification) =>
@updateChildSubject @getCurrentSubject().id, classification.subject_id, user_has_deleted: true
@beginClassification()
# Update specified child_subject with given properties (e.g. after submitting a delete flag)
updateChildSubject: (parent_subject_id, child_subject_id, props) ->
if (s = @getSubjectById(parent_subject_id))
for c, i in s.child_subjects
if c.id == child_subject_id
c[k] = v for k,v of props
# Add newly acquired child_subject to child_subjects array of relevant subject (i.e. after submitting a subject-generating classification)
appendChildSubject: (subject_id, child_subject) ->
if (s = @getSubjectById(subject_id))
s.child_subjects.push $.extend({userCreated: true}, child_subject)
# We've updated an internal object in @state.subjectSets, but framework doesn't notice, so tell it to update:
@forceUpdate()
# Get a reference to the local copy of a subject by id regardless of whether viewing subject-sets or just subjects
getSubjectById: (id) ->
if @state.subjectSets?
# If current subject set has no subjects, we're likely in between one subject set
# and the next (for which we're currently fetching subjects), so return null:
return null if ! @getCurrentSubjectSet().subjects?
for s in @getCurrentSubjectSet().subjects
return s if s.id == id
else
for s in @state.subjects
return s if s.id == id
# Get current classification:
getCurrentClassification: ->
@state.classifications[@state.classificationIndex]
# Get current task:
getCurrentTask: ->
return null if ! @state.taskKey?
console.warn "TaskKey invalid: #{@state.taskKey}. Should be: #{(k for k,v of @getTasks())}" if ! @getTasks()[@state.taskKey]?
@getTasks()[@state.taskKey]
getTasks: ->
# Add completion_assessment_task to list of tasks dynamically:
tasks = @getActiveWorkflow().tasks
completion_assessment_task = @getCompletionAssessmentTask()
# Merge keys recursively if it exists in config
completion_assessment_task = $.extend true, tasks['completion_assessment_task'], completion_assessment_task if tasks['completion_assessment_task']?
$.extend tasks, completion_assessment_task: completion_assessment_task
# Get instance of current tool:
getCurrentTool: ->
toolKey = @getCurrentTask()?.tool
tool = @toolByKey toolKey
toolByKey: (toolKey) ->
( ( coreTools[toolKey] ? markTools[toolKey] ) ? transcribeTools[toolKey] ) ? verifyTools[toolKey]
# Load next logical task
advanceToNextTask: () ->
nextTaskKey = @getNextTask()?.key
if nextTaskKey is null
return
# Commit whatever current classification is:
@commitCurrentClassification()
# start a new one:
# @beginClassification {} # this keps adding empty (uncommitted) classifications to @state.classifications --STI
# After classification ready with empty annotation, proceed to next task:
@advanceToTask nextTaskKey
# Get next logical task
getNextTask: ->
task = @getTasks()[@state.taskKey]
# PB: Moving from hash of options to an array of options
if (options = (c for c in task.tool_config?.options when c.value is @getCurrentClassification()?.annotation?.value)) && options.length > 0 && (opt = options[0])? && opt.next_task?
nextKey = opt.next_task
else
nextKey = @getTasks()[@state.taskKey].next_task
@getTasks()[nextKey]
# Advance to a named task:
advanceToTask: (key) ->
task = @getTasks()[ key ]
tool = @toolByKey task?.tool
if ! task?
console.warn "WARN: Invalid task key: ", key
else if ! tool?
console.warn "WARN: Invalid tool specified in #{key}: #{task.tool}"
else
@setState
taskKey: key
# Get currently viewed subject set
getCurrentSubjectSet: ->
if @state.subjectSets?[@state.subject_set_index]
@state.subjectSets?[@state.subject_set_index]
# else @state.subjectSets #having a hard time accounting for one subject_set
# Get currently viewed subject
getCurrentSubject: ->
# If we've viewing a subject-set (i.e. Mark) let's use that subject-set's subjects
if @getCurrentSubjectSet()?
subjects = @getCurrentSubjectSet().subjects
# Otherwise, since we're not viewing subject-sets, we must have an array of indiv subjects:
else
subjects = @state.subjects
# It's possible we have no subjects at all, in which case fail with null:
return null unless subjects?
subjects[@state.subject_index] # otherwise, return subject
getCompletionAssessmentTask: ->
generates_subject_type: null
instruction: "Thanks for all your work! Is there anything left to #{@props.workflowName}?"
key: "completion_<KEY>"
next_task: null
tool: "pickOne"
help: {
title: "Completion Assessment",
body: "<p>Have all requested fields on this page been marked with a rectangle?</p><p>You do not have to mark every field on the page, however, it helps us to know if you think there is more to mark. Thank you!</p>"
},
tool_config: {
"options": [
{
"label": "Nothing left to #{@props.workflowName}",
"next_task": null,
"value": "complete_subject"
},
{
"label": "There's more to #{@props.workflowName}",
"next_task": null,
"value": "incomplete_subject"
}
]
}
subToolIndex: 0
# Regardless of what workflow we're in, call this to display next subject (if any avail)
advanceToNextSubject: ->
if @state.subjects?
@_advanceToNextSubjectInSubjects()
else
@_advanceToNextSubjectInSubjectSets()
# This is the version of advanceToNextSubject for workflows that consume subjects (transcribe,verify)
_advanceToNextSubjectInSubjects: ->
if @state.subject_index + 1 < @state.subjects.length
next_index = @state.subject_index + 1
next_subject = @state.subjects[next_index]
@setState
taskKey: next_subject.<KEY>
subject_index: next_index, =>
key = @getCurrentSubject().type
@advanceToTask key
# Haz more pages of subjects?
else if @state.subjects_next_page?
@fetchSubjects page: @state.subjects_next_page
else
@setState
subject_index: null
noMoreSubjects: true
userClassifiedAll: @state.subjects.length > 0
# This is the version of advanceToNextSubject for workflows that consume subject sets (mark)
_advanceToNextSubjectInSubjectSets: ->
new_subject_set_index = @state.subject_set_index
new_subject_index = @state.subject_index + 1
# If we've exhausted pages in this subject set, move to next one:
if new_subject_index >= @getCurrentSubjectSet().subjects.length
new_subject_set_index += 1
new_subject_index = 0
# If we've exhausted all subject sets, collapse in shame
if new_subject_set_index >= @state.subjectSets.length
if @state.subject_sets_current_page < @state.subject_sets_total_pages
# Gather filters by which to query subject-sets
params =
group_id: @props.query.group_id ? null
page: @state.subject_sets_current_page + 1
@fetchSubjectSets params
else
@setState
taskKey: null
notice:
header: "All Done!"
message: "There's nothing more for you to #{@props.workflowName} here."
onClick: () =>
@transitionTo? 'mark' # "/#/mark"
@setState
notice: null
taskKey: @getActiveWorkflow().first_task
console.warn "NO MORE SUBJECT SETS"
return
# console.log "Mark#index Advancing to subject_set_index #{new_subject_set_index} (of #{@state.subjectSets.length}), subject_index #{new_subject_index} (of #{@state.subjectSets[new_subject_set_index].subjects.length})"
@setState
subject_set_index: new_subject_set_index
subject_index: new_subject_index
taskKey: @getActiveWorkflow().first_task
currentSubToolIndex: 0, () =>
@fetchSubjectsForCurrentSubjectSet(1, 100)
commitClassificationAndContinue: (d) ->
@commitCurrentClassification()
@beginClassification {}, () =>
if @getCurrentTask()?.next_task?
@advanceToTask @getCurrentTask().next_task
else
@advanceToNextSubject()
# Clear annotation. Used by skip to next subject action
clearCurrentAnnotation: ->
currentClassification = @getCurrentClassification()
currentClassification.annotation = {}
# Handle user selecting a pick/drawing tool:
handleDataFromTool: (d) ->
# Kind of a hack: We receive annotation data from two places:
# 1. tool selection widget in right-col
# 2. the actual draggable marking tools
# We want to remember the subToolIndex so that the right-col menu highlights
# the correct tool after committing a mark. If incoming data has subToolIndex
# but no mark location information, we know this callback was called by the
# right-col. So only in that case, record currentSubToolIndex, which we use
# to initialize marks going forward
if d.subToolIndex? && ! d.x? && ! d.y?
@setState currentSubToolIndex: d.subToolIndex
@setState currentSubtool: d.tool if d.tool?
else
classifications = @state.classifications
classifications[@state.classificationIndex].annotation[k] = v for k, v of d
# PB: Saving STI's notes here in case we decide tools should fully
# replace annotation hash rather than selectively update by key as above:
# not clear whether we should replace annotations, or append to it --STI
# classifications[@state.classificationIndex].annotation = d #[k] = v for k, v of d
@setState
classifications: classifications
, =>
@forceUpdate()
cleanAnnotation: (annotation) ->
# remove extra white spaces in strings as they are not important
# and only cause confusion during verifying
for k,v of annotation
if typeof v is 'string'
v = v.trim()
# replace multiple spaces and tabs with single space
v = v.replace(/[ \t]+/g, " ")
# replace multiple new lines with a single new line
v = v.replace(/\n+/g, "\n")
# remove spaces around new line
v = v.replace(/ ?\n ?/g, "\n")
annotation[k] = v
handleTaskComplete: (d) ->
@handleDataFromTool(d)
# clean the classification's annotation before submit
@cleanAnnotation @getCurrentClassification().annotation
@commitClassificationAndContinue d
| true | Classification = require 'models/classification.coffee'
coreTools = require 'components/core-tools'
markTools = require 'components/mark/tools'
transcribeTools = require 'components/transcribe/tools'
verifyTools = require 'components/verify/tools'
module.exports =
# Convenience method for selecting currently active workflow based on active controller
getActiveWorkflow: ->
return null if ! @props.project
k = (k for w,k in @props.project.workflows when w.name == @props.workflowName)
return null if k?.length != 1
@props.project.workflows[k[0]]
getWorkflowByName: (name) ->
k = (k for w,k in @props.project.workflows when w.name is name)
return null if k?.length != 1
@props.project.workflows[k[0]]
# Start a new classification (optionally initialized with given annotation hash):
beginClassification: (annotation = {}, callback) ->
classifications = @state.classifications
classification = new Classification()
if annotation?
classification.annotation[k] = v for k, v of annotation
classifications.push classification
@setState
classifications: classifications
classificationIndex: classifications.length-1
, =>
@forceUpdate()
window.classifications = @state.classifications # make accessible to console
callback() if callback?
commitClassification: (classification) ->
return unless classification?
# Create visual interim mark just in case POST takes a while
interim_mark = @addInterimMark classification
# Commit classification to backend
classification.commit (classification) =>
# Did this generate a child_subject? Update local copy:
if classification.child_subject
@appendChildSubject classification.subject_id, classification.child_subject
# Now that we have the real mark, hide the interim mark:
@hideInterimMark(interim_mark) if interim_mark?
if @state.badSubject
@toggleBadSubject =>
@advanceToNextSubject()
if @state.illegibleSubject
@toggleIllegibleSubject =>
@advanceToNextSubject()
# Called immediately before saving a classification, adds a fake mark in lieu
# of the real generated mark:
addInterimMark: (classification) ->
# Uniquely identify local interim marks:
@interim_mark_id ||= 0
# Interim mark is the region (the mark classification's annotation hash) with extras:
interim_mark = $.extend({
show: true # Default to show. We'll disable this when classification saved
interim_id: (@interim_mark_id += 1) # Unique id
subject_id : classification.subject_id # Keep subject_id so we know which subject to show it over
}, classification.annotation)
# Add interim mark to array in @state
interimMarks = @state.interimMarks ? []
interimMarks.push interim_mark
@setState interimMarks: interimMarks
interim_mark
# Counterpart to addInterimMark, hides the given interim mark
hideInterimMark: (interim_mark) ->
interimMarks = @state.interimMarks
for m, i in interimMarks
# If this is the interim mark to hide, hide it:
if m.interim_id == interim_mark.interim_id
m.show = false
@setState interimMarks: interimMarks
# We found it, move on:
break
commitCurrentClassification: () ->
classification = @getCurrentClassification()
classification.subject_id = @getCurrentSubject()?.id
classification.subject_set_id = @getCurrentSubjectSet().id if @getCurrentSubjectSet()?
classification.workflow_id = @getActiveWorkflow().id
# If user activated 'Bad Subject' button, override task:
if @state.moreSubject
classification.task_key = 'PI:KEY:<KEY>END_PI'
classification.annotation['value'] = 'incomplete_subject'
else if @state.badSubject
classification.task_key = 'PI:KEY:<KEY>END_PI'
else if @state.illegibleSubject
classification.task_key = 'PI:KEY:<KEY>END_PI'
# Otherwise, classification is for active task:
else
classification.task_key = @state.taskKey
if Object.keys(classification.annotation).length == 0
if @getActiveWorkflow().name is "mark" and not @getActiveWorkflow()?.show_completion_assessment_task
# user clicking "Done", which implies complete_subject for completion_assessment_task
classification.task_key = 'PI:KEY:<KEY>END_PI'
classification.annotation['value'] = 'complete_subject'
return if Object.keys(classification.annotation).length == 0
@commitClassification(classification)
@beginClassification()
toggleBadSubject: (e, callback) ->
@setState badSubject: not @state.badSubject, =>
callback?()
setMoreSubject: ->
@state.moreSubject = true
resetMoreSubject: ->
@state.moreSubject = false
toggleIllegibleSubject: (e, callback) ->
@setState illegibleSubject: not @state.illegibleSubject, =>
callback?()
flagSubjectAsUserDeleted: (subject_id) ->
classification = @getCurrentClassification()
classification.subject_id = subject_id # @getCurrentSubject()?.id
classification.workflow_id = @getActiveWorkflow().id
classification.task_key = 'PI:KEY:<KEY>END_PI'
classification.commit (classification) =>
@updateChildSubject @getCurrentSubject().id, classification.subject_id, user_has_deleted: true
@beginClassification()
# Update specified child_subject with given properties (e.g. after submitting a delete flag)
updateChildSubject: (parent_subject_id, child_subject_id, props) ->
if (s = @getSubjectById(parent_subject_id))
for c, i in s.child_subjects
if c.id == child_subject_id
c[k] = v for k,v of props
# Add newly acquired child_subject to child_subjects array of relevant subject (i.e. after submitting a subject-generating classification)
appendChildSubject: (subject_id, child_subject) ->
if (s = @getSubjectById(subject_id))
s.child_subjects.push $.extend({userCreated: true}, child_subject)
# We've updated an internal object in @state.subjectSets, but framework doesn't notice, so tell it to update:
@forceUpdate()
# Get a reference to the local copy of a subject by id regardless of whether viewing subject-sets or just subjects
getSubjectById: (id) ->
if @state.subjectSets?
# If current subject set has no subjects, we're likely in between one subject set
# and the next (for which we're currently fetching subjects), so return null:
return null if ! @getCurrentSubjectSet().subjects?
for s in @getCurrentSubjectSet().subjects
return s if s.id == id
else
for s in @state.subjects
return s if s.id == id
# Get current classification:
getCurrentClassification: ->
@state.classifications[@state.classificationIndex]
# Get current task:
getCurrentTask: ->
return null if ! @state.taskKey?
console.warn "TaskKey invalid: #{@state.taskKey}. Should be: #{(k for k,v of @getTasks())}" if ! @getTasks()[@state.taskKey]?
@getTasks()[@state.taskKey]
getTasks: ->
# Add completion_assessment_task to list of tasks dynamically:
tasks = @getActiveWorkflow().tasks
completion_assessment_task = @getCompletionAssessmentTask()
# Merge keys recursively if it exists in config
completion_assessment_task = $.extend true, tasks['completion_assessment_task'], completion_assessment_task if tasks['completion_assessment_task']?
$.extend tasks, completion_assessment_task: completion_assessment_task
# Get instance of current tool:
getCurrentTool: ->
toolKey = @getCurrentTask()?.tool
tool = @toolByKey toolKey
toolByKey: (toolKey) ->
( ( coreTools[toolKey] ? markTools[toolKey] ) ? transcribeTools[toolKey] ) ? verifyTools[toolKey]
# Load next logical task
advanceToNextTask: () ->
nextTaskKey = @getNextTask()?.key
if nextTaskKey is null
return
# Commit whatever current classification is:
@commitCurrentClassification()
# start a new one:
# @beginClassification {} # this keps adding empty (uncommitted) classifications to @state.classifications --STI
# After classification ready with empty annotation, proceed to next task:
@advanceToTask nextTaskKey
# Get next logical task
getNextTask: ->
task = @getTasks()[@state.taskKey]
# PB: Moving from hash of options to an array of options
if (options = (c for c in task.tool_config?.options when c.value is @getCurrentClassification()?.annotation?.value)) && options.length > 0 && (opt = options[0])? && opt.next_task?
nextKey = opt.next_task
else
nextKey = @getTasks()[@state.taskKey].next_task
@getTasks()[nextKey]
# Advance to a named task:
advanceToTask: (key) ->
task = @getTasks()[ key ]
tool = @toolByKey task?.tool
if ! task?
console.warn "WARN: Invalid task key: ", key
else if ! tool?
console.warn "WARN: Invalid tool specified in #{key}: #{task.tool}"
else
@setState
taskKey: key
# Get currently viewed subject set
getCurrentSubjectSet: ->
if @state.subjectSets?[@state.subject_set_index]
@state.subjectSets?[@state.subject_set_index]
# else @state.subjectSets #having a hard time accounting for one subject_set
# Get currently viewed subject
getCurrentSubject: ->
# If we've viewing a subject-set (i.e. Mark) let's use that subject-set's subjects
if @getCurrentSubjectSet()?
subjects = @getCurrentSubjectSet().subjects
# Otherwise, since we're not viewing subject-sets, we must have an array of indiv subjects:
else
subjects = @state.subjects
# It's possible we have no subjects at all, in which case fail with null:
return null unless subjects?
subjects[@state.subject_index] # otherwise, return subject
getCompletionAssessmentTask: ->
generates_subject_type: null
instruction: "Thanks for all your work! Is there anything left to #{@props.workflowName}?"
key: "completion_PI:KEY:<KEY>END_PI"
next_task: null
tool: "pickOne"
help: {
title: "Completion Assessment",
body: "<p>Have all requested fields on this page been marked with a rectangle?</p><p>You do not have to mark every field on the page, however, it helps us to know if you think there is more to mark. Thank you!</p>"
},
tool_config: {
"options": [
{
"label": "Nothing left to #{@props.workflowName}",
"next_task": null,
"value": "complete_subject"
},
{
"label": "There's more to #{@props.workflowName}",
"next_task": null,
"value": "incomplete_subject"
}
]
}
subToolIndex: 0
# Regardless of what workflow we're in, call this to display next subject (if any avail)
advanceToNextSubject: ->
if @state.subjects?
@_advanceToNextSubjectInSubjects()
else
@_advanceToNextSubjectInSubjectSets()
# This is the version of advanceToNextSubject for workflows that consume subjects (transcribe,verify)
_advanceToNextSubjectInSubjects: ->
if @state.subject_index + 1 < @state.subjects.length
next_index = @state.subject_index + 1
next_subject = @state.subjects[next_index]
@setState
taskKey: next_subject.PI:KEY:<KEY>END_PI
subject_index: next_index, =>
key = @getCurrentSubject().type
@advanceToTask key
# Haz more pages of subjects?
else if @state.subjects_next_page?
@fetchSubjects page: @state.subjects_next_page
else
@setState
subject_index: null
noMoreSubjects: true
userClassifiedAll: @state.subjects.length > 0
# This is the version of advanceToNextSubject for workflows that consume subject sets (mark)
_advanceToNextSubjectInSubjectSets: ->
new_subject_set_index = @state.subject_set_index
new_subject_index = @state.subject_index + 1
# If we've exhausted pages in this subject set, move to next one:
if new_subject_index >= @getCurrentSubjectSet().subjects.length
new_subject_set_index += 1
new_subject_index = 0
# If we've exhausted all subject sets, collapse in shame
if new_subject_set_index >= @state.subjectSets.length
if @state.subject_sets_current_page < @state.subject_sets_total_pages
# Gather filters by which to query subject-sets
params =
group_id: @props.query.group_id ? null
page: @state.subject_sets_current_page + 1
@fetchSubjectSets params
else
@setState
taskKey: null
notice:
header: "All Done!"
message: "There's nothing more for you to #{@props.workflowName} here."
onClick: () =>
@transitionTo? 'mark' # "/#/mark"
@setState
notice: null
taskKey: @getActiveWorkflow().first_task
console.warn "NO MORE SUBJECT SETS"
return
# console.log "Mark#index Advancing to subject_set_index #{new_subject_set_index} (of #{@state.subjectSets.length}), subject_index #{new_subject_index} (of #{@state.subjectSets[new_subject_set_index].subjects.length})"
@setState
subject_set_index: new_subject_set_index
subject_index: new_subject_index
taskKey: @getActiveWorkflow().first_task
currentSubToolIndex: 0, () =>
@fetchSubjectsForCurrentSubjectSet(1, 100)
commitClassificationAndContinue: (d) ->
@commitCurrentClassification()
@beginClassification {}, () =>
if @getCurrentTask()?.next_task?
@advanceToTask @getCurrentTask().next_task
else
@advanceToNextSubject()
# Clear annotation. Used by skip to next subject action
clearCurrentAnnotation: ->
currentClassification = @getCurrentClassification()
currentClassification.annotation = {}
# Handle user selecting a pick/drawing tool:
handleDataFromTool: (d) ->
# Kind of a hack: We receive annotation data from two places:
# 1. tool selection widget in right-col
# 2. the actual draggable marking tools
# We want to remember the subToolIndex so that the right-col menu highlights
# the correct tool after committing a mark. If incoming data has subToolIndex
# but no mark location information, we know this callback was called by the
# right-col. So only in that case, record currentSubToolIndex, which we use
# to initialize marks going forward
if d.subToolIndex? && ! d.x? && ! d.y?
@setState currentSubToolIndex: d.subToolIndex
@setState currentSubtool: d.tool if d.tool?
else
classifications = @state.classifications
classifications[@state.classificationIndex].annotation[k] = v for k, v of d
# PB: Saving STI's notes here in case we decide tools should fully
# replace annotation hash rather than selectively update by key as above:
# not clear whether we should replace annotations, or append to it --STI
# classifications[@state.classificationIndex].annotation = d #[k] = v for k, v of d
@setState
classifications: classifications
, =>
@forceUpdate()
cleanAnnotation: (annotation) ->
# remove extra white spaces in strings as they are not important
# and only cause confusion during verifying
for k,v of annotation
if typeof v is 'string'
v = v.trim()
# replace multiple spaces and tabs with single space
v = v.replace(/[ \t]+/g, " ")
# replace multiple new lines with a single new line
v = v.replace(/\n+/g, "\n")
# remove spaces around new line
v = v.replace(/ ?\n ?/g, "\n")
annotation[k] = v
handleTaskComplete: (d) ->
@handleDataFromTool(d)
# clean the classification's annotation before submit
@cleanAnnotation @getCurrentClassification().annotation
@commitClassificationAndContinue d
|
[
{
"context": "module.exports = [\n (\n ) ->\n\n password: false\n\n]\n",
"end": 50,
"score": 0.9992626905441284,
"start": 45,
"tag": "PASSWORD",
"value": "false"
}
] | client/app/src/services/admin_service.coffee | codyseibert/webdevpro | 1 | module.exports = [
(
) ->
password: false
]
| 46958 | module.exports = [
(
) ->
password: <PASSWORD>
]
| true | module.exports = [
(
) ->
password: PI:PASSWORD:<PASSWORD>END_PI
]
|
[
{
"context": "config.e2ePort %>'\n hostname: '0.0.0.0'\n middleware: (connect) ->\n ",
"end": 1863,
"score": 0.9293089509010315,
"start": 1856,
"tag": "IP_ADDRESS",
"value": "0.0.0.0"
},
{
"context": "rocess.env.SAUCE_USERNAME\n ... | Gruntfile.coffee | rubenv/angular-import-scope | 1 | module.exports = (grunt) ->
@loadNpmTasks('grunt-bump')
@loadNpmTasks('grunt-contrib-clean')
@loadNpmTasks('grunt-contrib-concat')
@loadNpmTasks('grunt-contrib-connect')
@loadNpmTasks('grunt-contrib-jshint')
@loadNpmTasks('grunt-contrib-uglify')
@loadNpmTasks('grunt-contrib-watch')
@loadNpmTasks('grunt-jscs')
@loadNpmTasks('grunt-ng-annotate')
@loadNpmTasks('grunt-protractor-runner')
@loadNpmTasks('grunt-sauce-tunnel')
@loadNpmTasks('grunt-shell')
@initConfig
config:
name: 'angular-import-scope'
e2ePort: 9000
jshint:
lib:
options:
jshintrc: '.jshintrc'
files:
src: ['src/**.js']
test:
options:
jshintrc: '.jshintrc-test'
files:
src: ['test/*{,/*}.js']
jscs:
lib:
options:
config: '.jscs.json'
files:
src: ['src/**.js']
concat:
dist:
files:
'dist/<%= config.name %>.js': ['src/*.js']
uglify:
dist:
files:
'dist/<%= config.name %>.min.js': 'dist/<%= config.name %>.js'
clean:
all: ['dist']
watch:
options:
livereload: true
all:
files: ['src/**.js', 'test/*{,/*}']
tasks: ['build', 'protractor:dev']
ngAnnotate:
dist:
files:
'dist/<%= config.name %>.js': 'dist/<%= config.name %>.js'
connect:
e2e:
options:
port: '<%= config.e2ePort %>'
hostname: '0.0.0.0'
middleware: (connect) ->
return [
connect.static(__dirname)
]
protractor:
options:
noColor: false
dev:
configFile: 'test-config.js'
options:
keepAlive: true
args:
chromeOnly: true
ci:
configFile: 'test-config.js'
options:
args:
browser: 'firefox'
bump:
options:
files: ['package.json', 'bower.json']
commitFiles: ['-a']
pushTo: 'origin'
shell:
protractor_update:
command: './node_modules/.bin/webdriver-manager update'
options:
stdout: true
sauce_tunnel:
ci:
options:
username: process.env.SAUCE_USERNAME
key: process.env.SAUCE_ACCESS_KEY
identifier: process.env.TRAVIS_JOB_NUMBER || 'test'
browsers = require('open-sauce-browsers')('angular-import-scope')
protractorConfig = @config('protractor')
browserTasks = []
for browser, index in browsers
protractorConfig['ci_' + index] = {
configFile: 'test-config.js'
options:
keepAlive: false
args:
sauceUser: process.env.SAUCE_USERNAME
sauceKey: process.env.SAUCE_ACCESS_KEY
capabilities: browser
}
browserTasks.push('protractor:ci_' + index)
@config('protractor', protractorConfig)
@registerTask 'ci_saucelabs', browserTasks
@registerTask 'default', ['test']
@registerTask 'build', ['clean', 'jshint', 'jscs', 'concat', 'ngAnnotate', 'uglify']
@registerTask 'test', ['build', 'shell:protractor_update', 'connect:e2e', 'protractor:dev', 'watch:all']
@registerTask 'ci', ['build', 'shell:protractor_update', 'connect:e2e', 'protractor:ci']
@registerTask 'saucelabs', ['build', 'shell:protractor_update', 'sauce_tunnel', 'connect:e2e', 'ci_saucelabs']
| 23387 | module.exports = (grunt) ->
@loadNpmTasks('grunt-bump')
@loadNpmTasks('grunt-contrib-clean')
@loadNpmTasks('grunt-contrib-concat')
@loadNpmTasks('grunt-contrib-connect')
@loadNpmTasks('grunt-contrib-jshint')
@loadNpmTasks('grunt-contrib-uglify')
@loadNpmTasks('grunt-contrib-watch')
@loadNpmTasks('grunt-jscs')
@loadNpmTasks('grunt-ng-annotate')
@loadNpmTasks('grunt-protractor-runner')
@loadNpmTasks('grunt-sauce-tunnel')
@loadNpmTasks('grunt-shell')
@initConfig
config:
name: 'angular-import-scope'
e2ePort: 9000
jshint:
lib:
options:
jshintrc: '.jshintrc'
files:
src: ['src/**.js']
test:
options:
jshintrc: '.jshintrc-test'
files:
src: ['test/*{,/*}.js']
jscs:
lib:
options:
config: '.jscs.json'
files:
src: ['src/**.js']
concat:
dist:
files:
'dist/<%= config.name %>.js': ['src/*.js']
uglify:
dist:
files:
'dist/<%= config.name %>.min.js': 'dist/<%= config.name %>.js'
clean:
all: ['dist']
watch:
options:
livereload: true
all:
files: ['src/**.js', 'test/*{,/*}']
tasks: ['build', 'protractor:dev']
ngAnnotate:
dist:
files:
'dist/<%= config.name %>.js': 'dist/<%= config.name %>.js'
connect:
e2e:
options:
port: '<%= config.e2ePort %>'
hostname: '0.0.0.0'
middleware: (connect) ->
return [
connect.static(__dirname)
]
protractor:
options:
noColor: false
dev:
configFile: 'test-config.js'
options:
keepAlive: true
args:
chromeOnly: true
ci:
configFile: 'test-config.js'
options:
args:
browser: 'firefox'
bump:
options:
files: ['package.json', 'bower.json']
commitFiles: ['-a']
pushTo: 'origin'
shell:
protractor_update:
command: './node_modules/.bin/webdriver-manager update'
options:
stdout: true
sauce_tunnel:
ci:
options:
username: process.env.SAUCE_USERNAME
key: <KEY>SA<KEY>
identifier: process.env.TRAVIS_JOB_NUMBER || 'test'
browsers = require('open-sauce-browsers')('angular-import-scope')
protractorConfig = @config('protractor')
browserTasks = []
for browser, index in browsers
protractorConfig['ci_' + index] = {
configFile: 'test-config.js'
options:
keepAlive: false
args:
sauceUser: process.env.SAUCE_USERNAME
sauceKey: process.env.SAUCE_ACCESS_KEY
capabilities: browser
}
browserTasks.push('protractor:ci_' + index)
@config('protractor', protractorConfig)
@registerTask 'ci_saucelabs', browserTasks
@registerTask 'default', ['test']
@registerTask 'build', ['clean', 'jshint', 'jscs', 'concat', 'ngAnnotate', 'uglify']
@registerTask 'test', ['build', 'shell:protractor_update', 'connect:e2e', 'protractor:dev', 'watch:all']
@registerTask 'ci', ['build', 'shell:protractor_update', 'connect:e2e', 'protractor:ci']
@registerTask 'saucelabs', ['build', 'shell:protractor_update', 'sauce_tunnel', 'connect:e2e', 'ci_saucelabs']
| true | module.exports = (grunt) ->
@loadNpmTasks('grunt-bump')
@loadNpmTasks('grunt-contrib-clean')
@loadNpmTasks('grunt-contrib-concat')
@loadNpmTasks('grunt-contrib-connect')
@loadNpmTasks('grunt-contrib-jshint')
@loadNpmTasks('grunt-contrib-uglify')
@loadNpmTasks('grunt-contrib-watch')
@loadNpmTasks('grunt-jscs')
@loadNpmTasks('grunt-ng-annotate')
@loadNpmTasks('grunt-protractor-runner')
@loadNpmTasks('grunt-sauce-tunnel')
@loadNpmTasks('grunt-shell')
@initConfig
config:
name: 'angular-import-scope'
e2ePort: 9000
jshint:
lib:
options:
jshintrc: '.jshintrc'
files:
src: ['src/**.js']
test:
options:
jshintrc: '.jshintrc-test'
files:
src: ['test/*{,/*}.js']
jscs:
lib:
options:
config: '.jscs.json'
files:
src: ['src/**.js']
concat:
dist:
files:
'dist/<%= config.name %>.js': ['src/*.js']
uglify:
dist:
files:
'dist/<%= config.name %>.min.js': 'dist/<%= config.name %>.js'
clean:
all: ['dist']
watch:
options:
livereload: true
all:
files: ['src/**.js', 'test/*{,/*}']
tasks: ['build', 'protractor:dev']
ngAnnotate:
dist:
files:
'dist/<%= config.name %>.js': 'dist/<%= config.name %>.js'
connect:
e2e:
options:
port: '<%= config.e2ePort %>'
hostname: '0.0.0.0'
middleware: (connect) ->
return [
connect.static(__dirname)
]
protractor:
options:
noColor: false
dev:
configFile: 'test-config.js'
options:
keepAlive: true
args:
chromeOnly: true
ci:
configFile: 'test-config.js'
options:
args:
browser: 'firefox'
bump:
options:
files: ['package.json', 'bower.json']
commitFiles: ['-a']
pushTo: 'origin'
shell:
protractor_update:
command: './node_modules/.bin/webdriver-manager update'
options:
stdout: true
sauce_tunnel:
ci:
options:
username: process.env.SAUCE_USERNAME
key: PI:KEY:<KEY>END_PISAPI:KEY:<KEY>END_PI
identifier: process.env.TRAVIS_JOB_NUMBER || 'test'
browsers = require('open-sauce-browsers')('angular-import-scope')
protractorConfig = @config('protractor')
browserTasks = []
for browser, index in browsers
protractorConfig['ci_' + index] = {
configFile: 'test-config.js'
options:
keepAlive: false
args:
sauceUser: process.env.SAUCE_USERNAME
sauceKey: process.env.SAUCE_ACCESS_KEY
capabilities: browser
}
browserTasks.push('protractor:ci_' + index)
@config('protractor', protractorConfig)
@registerTask 'ci_saucelabs', browserTasks
@registerTask 'default', ['test']
@registerTask 'build', ['clean', 'jshint', 'jscs', 'concat', 'ngAnnotate', 'uglify']
@registerTask 'test', ['build', 'shell:protractor_update', 'connect:e2e', 'protractor:dev', 'watch:all']
@registerTask 'ci', ['build', 'shell:protractor_update', 'connect:e2e', 'protractor:ci']
@registerTask 'saucelabs', ['build', 'shell:protractor_update', 'sauce_tunnel', 'connect:e2e', 'ci_saucelabs']
|
[
{
"context": "gs.get('servers')\n .findWhere(url: \"http://127.0.0.1:5000\")\n applicationSettings.set 'activeServe",
"end": 1626,
"score": 0.9995460510253906,
"start": 1617,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "nitsspiyi\",\n \"enterer\": {\n ... | test/views/form.spec.coffee | OpenSourceFieldlinguistics/dative | 7 | # Tests for `FormView`
#
# Right now this just tests that interlinearize works with search patterns.
# There was a bug where doing a regex search for a space in an IGT value would
# cause single-character words to fail to be displayed.
define (require) ->
# Note: If you don't load `UserView` before `FormView`, you'll get the
# following error (which seems like a circular dependency thing ...)::
#
# Uncaught TypeError: Cannot read property 'prototype' of undefined
# (enterer-field-display.coffee:1)
globals = require '../../../scripts/utils/globals'
ApplicationSettingsModel = require '../../../scripts/models/application-settings'
FormModel = require '../../../scripts/models/form'
UserView = require '../../../scripts/views/user-old'
FormView = require '../../../scripts/views/form'
describe '`FormView`', ->
before ->
@spied = [
'interlinearize'
'_interlinearize'
'hideIGTFields'
'toggleHistory'
'disableHistoryButton'
]
for method in @spied
sinon.spy FormView::, method
spinStub = sinon.stub FormView::, 'spin', -> console.log 'spinning'
stopSpinStub = sinon.stub FormView::, 'stopSpin', -> console.log 'stopping spinning'
@$fixture = $ '<div id="view-fixture"></div>'
beforeEach ->
# Reset spies
for method in @spied
FormView::[method].reset()
# Global app settings needs to be (the default) OLD one.
applicationSettings = new ApplicationSettingsModel()
oldLocalServer = applicationSettings.get('servers')
.findWhere(url: "http://127.0.0.1:5000")
applicationSettings.set 'activeServer', oldLocalServer
globals.applicationSettings = applicationSettings
@$fixture.empty().appendTo $('#fixtures')
@$fixture.prepend '<div id="form"></div>'
afterEach -> $('#fixtures').empty()
# Return a FormView whose $el is in our fixture.
getForm = (populate=true) ->
if populate
formModel = new FormModel formObject
else
formModel = new FormModel()
formView = new FormView model: formModel
formView.setElement $('#form')
formView
# Interlinearize
# TODO: this should be in the test spec for `FormBaseView`
describe '`@interlinearize`', ->
it 'highlights a single-char regex that matches one morpheme break word',
(done) ->
# Simulate a search for /k/ in morpheme_break
formView = getForm()
formView.searchPatternsObject = morpheme_break: /((?:k))/g
formView.render()
x = ->
$morphemeBreakIGTCells =
$ '.igt-tables-container .igt-word-cell.morpheme-break-value'
$morphemeBreakIGTCellsWithHighlight =
$morphemeBreakIGTCells.find 'span.dative-state-highlight'
# Three columns for a 3-word form
expect($morphemeBreakIGTCells.length).to.equal 3
# One column has a search match highlight in it: /k/ 'COMP' matches /k/
expect($morphemeBreakIGTCellsWithHighlight.length).to.equal 1
expect(formView.interlinearize).to.have.been.calledOnce
expect(formView._interlinearize).to.have.been.calledOnce
expect(formView.hideIGTFields).to.have.been.calledOnce
done()
# We need `setTimeout` because `interlinearize` uses a 1-millisecond
# delay.
setTimeout x, 3
it 'highlights nothing on regex search for space character', (done) ->
# Simulate a search for /( )/ in morpheme_break
formView = getForm()
formView.searchPatternsObject = morpheme_break: /((?:( )))/g
formView.render()
x = ->
$morphemeBreakIGTCells =
$ '.igt-tables-container .igt-word-cell.morpheme-break-value'
$morphemeBreakIGTCellsWithHighlight =
$morphemeBreakIGTCells.find 'span.dative-state-highlight'
expect($morphemeBreakIGTCells.length).to.equal 3
# No columns have search match highlights in them (because spaces
# aren't represented overtly; they are the spaces between columns).
expect($morphemeBreakIGTCellsWithHighlight.length).to.equal 0
expect(formView.interlinearize).to.have.been.calledOnce
expect(formView._interlinearize).to.have.been.calledOnce
expect(formView.hideIGTFields).to.have.been.calledOnce
done()
setTimeout x, 3
# HTML
describe 'its HTML', ->
it 'has a header which is hidden by default', ->
formView = getForm false
formView.render()
expect(formView.$('div.dative-widget-header').length).to.equal 1
expect(formView.$('div.dative-widget-header').first().is(':visible'))
.to.be.false
it 'has no header title text', ->
$headerTitleDiv = $('div.dative-widget-header').first()
.find 'div.dative-widget-header-title'
expect($headerTitleDiv.text()).to.equal ''
describe 'with an empty model, it ...', ->
it 'has update, export, and settings buttons', ->
formView = getForm false
formView.render()
expect(formView.$('button.update-resource').length).to.equal 1
expect(formView.$('button.export-resource').length).to.equal 1
expect(formView.$('button.settings').length).to.equal 1
it 'does NOT have delete, duplicate, history, controls, or data buttons',
->
formView = getForm false
formView.render()
expect(formView.$('button.delete-resource').length).to.equal 0
expect(formView.$('button.duplicate-resource').length).to.equal 0
expect(formView.$('button.resource-history').length).to.equal 0
expect(formView.$('button.controls').length).to.equal 0
expect(formView.$('button.file-data').length).to.equal 0
describe 'with a non-empty model, it ...', ->
it 'has update, export, delete, duplicate, history, and settings
buttons', ->
formView = getForm()
formView.render()
expect(formView.$('button.update-resource').length).to.equal 1
expect(formView.$('button.delete-resource').length).to.equal 1
expect(formView.$('button.duplicate-resource').length).to.equal 1
expect(formView.$('button.export-resource').length).to.equal 1
expect(formView.$('button.resource-history').length).to.equal 1
expect(formView.$('button.settings').length).to.equal 1
it 'does NOT have controls or data buttons', ->
formView = getForm()
formView.render()
expect(formView.$('button.controls').length).to.equal 0
expect(formView.$('button.file-data').length).to.equal 0
describe 'History functionality', ->
describe 'its init state', ->
it 'starts off with no previous versions', ->
formView = getForm()
formView.render()
previousVersionsDivIsEmpty =
formView.$('div.resource-previous-versions').first().is ':empty'
expect(formView.previousVersionModels).to.be.empty
expect(formView.previousVersionView).to.be.empty
expect(previousVersionsDivIsEmpty).to.be.true
it 'involves no history event responders having been called', ->
sinon.spy FormView::, 'fetchHistoryFormStart'
sinon.spy FormView::, 'fetchHistoryFormEnd'
sinon.spy FormView::, 'fetchHistoryFormSuccess'
sinon.spy FormView::, 'fetchHistoryFormFail'
formView = getForm()
expect(formView.fetchHistoryFormStart).not.to.have.been.called
expect(formView.fetchHistoryFormEnd).not.to.have.been.called
expect(formView.fetchHistoryFormFail).not.to.have.been.called
expect(formView.fetchHistoryFormSuccess).not.to.have.been.called
FormView::fetchHistoryFormStart.restore()
FormView::fetchHistoryFormEnd.restore()
FormView::fetchHistoryFormSuccess.restore()
FormView::fetchHistoryFormFail.restore()
describe 'its “history” button', ->
it 'triggers `@toggleHistory` when clicked', ->
formView = getForm()
formView.render()
$historyButton = formView.$('button.resource-history').first()
expect(formView.toggleHistory).not.to.have.been.called
expect(formView.disableHistoryButton).not.to.have.been.called
expect($historyButton.button 'option', 'disabled').to.be.false
$historyButton.click()
expect(formView.toggleHistory).to.have.been.calledOnce
expect(formView.disableHistoryButton).to.have.been.calledOnce
# Unsure why the following is failing. I must not be understanding
# the jQuery button API ...
#expect($historyButton.button 'option', 'disabled').to.be.true
# An object for creating an OLD-style `FormModel` instance. Core values:
#
# nitsspiyi k nitsspiyi
# /nit-ihpiyi k nit-ihpiyi/
# 1-dance COMP 1-dance
formObject = {
"files": [],
"syntax": "",
"morpheme_break_ids": [
[
[
[
14639,
"1",
"agra"
]
],
[
[
2394,
"dance",
"vai"
]
]
],
[
[
[
14957,
"2",
"agra"
],
[
17363,
"IMP.PL",
"agrb"
]
]
],
[
[
[
14639,
"1",
"agra"
]
],
[
[
2394,
"dance",
"vai"
]
]
]
],
"grammaticality": "",
"datetime_modified": "2015-10-03T18:13:13",
"morpheme_gloss_ids": [
[
[
[
14639,
"nit",
"agra"
]
],
[
[
2394,
"ihpiyi",
"vai"
]
]
],
[
[]
],
[
[
[
14639,
"nit",
"agra"
]
],
[
[
2394,
"ihpiyi",
"vai"
]
]
]
],
"date_elicited": null,
"morpheme_gloss": "1-dance COMP 1-dance",
"id": 25111,
"datetime_entered": "2015-09-11T14:17:29",
"transcription": "nitsspiyi k nitsspiyi",
"enterer": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"speaker_comments": "",
"status": "tested",
"elicitor": null,
"break_gloss_category": "nit|1|agra-ihpiyi|dance|vai k|COMP|agra nit|1|agra-ihpiyi|dance|vai",
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "I danced that I danced",
"grammaticality": "",
"id": 25225
}
],
"syntactic_category": null,
"phonetic_transcription": "",
"semantics": "",
"UUID": "5a4ec347-2b03-4146-9f4d-9736fc03620f",
"narrow_phonetic_transcription": "",
"syntactic_category_string": "agra-vai agra agra-vai",
"morpheme_break": "nit-ihpiyi k nit-ihpiyi",
"modifier": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
}
}
# An object containing both an OLD-style form and its previous versions. See
# attributes `form` and `previous_versions`. Core values:
#
# Áístotoinoyiiawa anni Piitaakiiyi.
# aist-oto-ino-yii-wa ann-yi piitaakii-yi
# to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG
# ‘He came to see Piitaakii.’
formObjectWithHistory = {
"previous_versions": [
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[
[
25107,
"eagle.woman",
"PN"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"datetime_modified": "2015-09-01T20:41:10",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34873,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi piitaakii-yi",
"modifier": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"datetime_modified": "2015-09-01T20:40:36",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34615,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-09-01T18:44:31",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34614,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-09-01T17:53:35",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34611,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yxi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yxi",
"modifier": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-08-31T20:35:11",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34610,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
}
],
"form": {
"files": [],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[
[
25107,
"eagle.woman",
"PN"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"grammaticality": "",
"datetime_modified": "2015-09-04T01:01:13",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 25105,
"datetime_entered": "2015-08-31T20:35:11",
"transcription": "Áístotoinoyiiawa anni Piitaakiiyi.",
"enterer": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"speaker_comments": "",
"status": "tested",
"elicitor": null,
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntactic_category": null,
"phonetic_transcription": "",
"semantics": "",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"morpheme_break": "aist-oto-ino-yii-wa ann-yi piitaakii-yi",
"modifier": {
"first_name": "Joel",
"last_name": "Dunham",
"role": "administrator",
"id": 1
}
}
}
| 161851 | # Tests for `FormView`
#
# Right now this just tests that interlinearize works with search patterns.
# There was a bug where doing a regex search for a space in an IGT value would
# cause single-character words to fail to be displayed.
define (require) ->
# Note: If you don't load `UserView` before `FormView`, you'll get the
# following error (which seems like a circular dependency thing ...)::
#
# Uncaught TypeError: Cannot read property 'prototype' of undefined
# (enterer-field-display.coffee:1)
globals = require '../../../scripts/utils/globals'
ApplicationSettingsModel = require '../../../scripts/models/application-settings'
FormModel = require '../../../scripts/models/form'
UserView = require '../../../scripts/views/user-old'
FormView = require '../../../scripts/views/form'
describe '`FormView`', ->
before ->
@spied = [
'interlinearize'
'_interlinearize'
'hideIGTFields'
'toggleHistory'
'disableHistoryButton'
]
for method in @spied
sinon.spy FormView::, method
spinStub = sinon.stub FormView::, 'spin', -> console.log 'spinning'
stopSpinStub = sinon.stub FormView::, 'stopSpin', -> console.log 'stopping spinning'
@$fixture = $ '<div id="view-fixture"></div>'
beforeEach ->
# Reset spies
for method in @spied
FormView::[method].reset()
# Global app settings needs to be (the default) OLD one.
applicationSettings = new ApplicationSettingsModel()
oldLocalServer = applicationSettings.get('servers')
.findWhere(url: "http://127.0.0.1:5000")
applicationSettings.set 'activeServer', oldLocalServer
globals.applicationSettings = applicationSettings
@$fixture.empty().appendTo $('#fixtures')
@$fixture.prepend '<div id="form"></div>'
afterEach -> $('#fixtures').empty()
# Return a FormView whose $el is in our fixture.
getForm = (populate=true) ->
if populate
formModel = new FormModel formObject
else
formModel = new FormModel()
formView = new FormView model: formModel
formView.setElement $('#form')
formView
# Interlinearize
# TODO: this should be in the test spec for `FormBaseView`
describe '`@interlinearize`', ->
it 'highlights a single-char regex that matches one morpheme break word',
(done) ->
# Simulate a search for /k/ in morpheme_break
formView = getForm()
formView.searchPatternsObject = morpheme_break: /((?:k))/g
formView.render()
x = ->
$morphemeBreakIGTCells =
$ '.igt-tables-container .igt-word-cell.morpheme-break-value'
$morphemeBreakIGTCellsWithHighlight =
$morphemeBreakIGTCells.find 'span.dative-state-highlight'
# Three columns for a 3-word form
expect($morphemeBreakIGTCells.length).to.equal 3
# One column has a search match highlight in it: /k/ 'COMP' matches /k/
expect($morphemeBreakIGTCellsWithHighlight.length).to.equal 1
expect(formView.interlinearize).to.have.been.calledOnce
expect(formView._interlinearize).to.have.been.calledOnce
expect(formView.hideIGTFields).to.have.been.calledOnce
done()
# We need `setTimeout` because `interlinearize` uses a 1-millisecond
# delay.
setTimeout x, 3
it 'highlights nothing on regex search for space character', (done) ->
# Simulate a search for /( )/ in morpheme_break
formView = getForm()
formView.searchPatternsObject = morpheme_break: /((?:( )))/g
formView.render()
x = ->
$morphemeBreakIGTCells =
$ '.igt-tables-container .igt-word-cell.morpheme-break-value'
$morphemeBreakIGTCellsWithHighlight =
$morphemeBreakIGTCells.find 'span.dative-state-highlight'
expect($morphemeBreakIGTCells.length).to.equal 3
# No columns have search match highlights in them (because spaces
# aren't represented overtly; they are the spaces between columns).
expect($morphemeBreakIGTCellsWithHighlight.length).to.equal 0
expect(formView.interlinearize).to.have.been.calledOnce
expect(formView._interlinearize).to.have.been.calledOnce
expect(formView.hideIGTFields).to.have.been.calledOnce
done()
setTimeout x, 3
# HTML
describe 'its HTML', ->
it 'has a header which is hidden by default', ->
formView = getForm false
formView.render()
expect(formView.$('div.dative-widget-header').length).to.equal 1
expect(formView.$('div.dative-widget-header').first().is(':visible'))
.to.be.false
it 'has no header title text', ->
$headerTitleDiv = $('div.dative-widget-header').first()
.find 'div.dative-widget-header-title'
expect($headerTitleDiv.text()).to.equal ''
describe 'with an empty model, it ...', ->
it 'has update, export, and settings buttons', ->
formView = getForm false
formView.render()
expect(formView.$('button.update-resource').length).to.equal 1
expect(formView.$('button.export-resource').length).to.equal 1
expect(formView.$('button.settings').length).to.equal 1
it 'does NOT have delete, duplicate, history, controls, or data buttons',
->
formView = getForm false
formView.render()
expect(formView.$('button.delete-resource').length).to.equal 0
expect(formView.$('button.duplicate-resource').length).to.equal 0
expect(formView.$('button.resource-history').length).to.equal 0
expect(formView.$('button.controls').length).to.equal 0
expect(formView.$('button.file-data').length).to.equal 0
describe 'with a non-empty model, it ...', ->
it 'has update, export, delete, duplicate, history, and settings
buttons', ->
formView = getForm()
formView.render()
expect(formView.$('button.update-resource').length).to.equal 1
expect(formView.$('button.delete-resource').length).to.equal 1
expect(formView.$('button.duplicate-resource').length).to.equal 1
expect(formView.$('button.export-resource').length).to.equal 1
expect(formView.$('button.resource-history').length).to.equal 1
expect(formView.$('button.settings').length).to.equal 1
it 'does NOT have controls or data buttons', ->
formView = getForm()
formView.render()
expect(formView.$('button.controls').length).to.equal 0
expect(formView.$('button.file-data').length).to.equal 0
describe 'History functionality', ->
describe 'its init state', ->
it 'starts off with no previous versions', ->
formView = getForm()
formView.render()
previousVersionsDivIsEmpty =
formView.$('div.resource-previous-versions').first().is ':empty'
expect(formView.previousVersionModels).to.be.empty
expect(formView.previousVersionView).to.be.empty
expect(previousVersionsDivIsEmpty).to.be.true
it 'involves no history event responders having been called', ->
sinon.spy FormView::, 'fetchHistoryFormStart'
sinon.spy FormView::, 'fetchHistoryFormEnd'
sinon.spy FormView::, 'fetchHistoryFormSuccess'
sinon.spy FormView::, 'fetchHistoryFormFail'
formView = getForm()
expect(formView.fetchHistoryFormStart).not.to.have.been.called
expect(formView.fetchHistoryFormEnd).not.to.have.been.called
expect(formView.fetchHistoryFormFail).not.to.have.been.called
expect(formView.fetchHistoryFormSuccess).not.to.have.been.called
FormView::fetchHistoryFormStart.restore()
FormView::fetchHistoryFormEnd.restore()
FormView::fetchHistoryFormSuccess.restore()
FormView::fetchHistoryFormFail.restore()
describe 'its “history” button', ->
it 'triggers `@toggleHistory` when clicked', ->
formView = getForm()
formView.render()
$historyButton = formView.$('button.resource-history').first()
expect(formView.toggleHistory).not.to.have.been.called
expect(formView.disableHistoryButton).not.to.have.been.called
expect($historyButton.button 'option', 'disabled').to.be.false
$historyButton.click()
expect(formView.toggleHistory).to.have.been.calledOnce
expect(formView.disableHistoryButton).to.have.been.calledOnce
# Unsure why the following is failing. I must not be understanding
# the jQuery button API ...
#expect($historyButton.button 'option', 'disabled').to.be.true
# An object for creating an OLD-style `FormModel` instance. Core values:
#
# nitsspiyi k nitsspiyi
# /nit-ihpiyi k nit-ihpiyi/
# 1-dance COMP 1-dance
formObject = {
"files": [],
"syntax": "",
"morpheme_break_ids": [
[
[
[
14639,
"1",
"agra"
]
],
[
[
2394,
"dance",
"vai"
]
]
],
[
[
[
14957,
"2",
"agra"
],
[
17363,
"IMP.PL",
"agrb"
]
]
],
[
[
[
14639,
"1",
"agra"
]
],
[
[
2394,
"dance",
"vai"
]
]
]
],
"grammaticality": "",
"datetime_modified": "2015-10-03T18:13:13",
"morpheme_gloss_ids": [
[
[
[
14639,
"nit",
"agra"
]
],
[
[
2394,
"ihpiyi",
"vai"
]
]
],
[
[]
],
[
[
[
14639,
"nit",
"agra"
]
],
[
[
2394,
"ihpiyi",
"vai"
]
]
]
],
"date_elicited": null,
"morpheme_gloss": "1-dance COMP 1-dance",
"id": 25111,
"datetime_entered": "2015-09-11T14:17:29",
"transcription": "nitsspiyi k nitsspiyi",
"enterer": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"speaker_comments": "",
"status": "tested",
"elicitor": null,
"break_gloss_category": "nit|1|agra-ihpiyi|dance|vai k|COMP|agra nit|1|agra-ihpiyi|dance|vai",
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "I danced that I danced",
"grammaticality": "",
"id": 25225
}
],
"syntactic_category": null,
"phonetic_transcription": "",
"semantics": "",
"UUID": "5a4ec347-2b03-4146-9f4d-9736fc03620f",
"narrow_phonetic_transcription": "",
"syntactic_category_string": "agra-vai agra agra-vai",
"morpheme_break": "nit-ihpiyi k nit-ihpiyi",
"modifier": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
}
}
# An object containing both an OLD-style form and its previous versions. See
# attributes `form` and `previous_versions`. Core values:
#
# <NAME>́ístotoinoyiiawa <NAME> <NAME>.
# aist-oto-ino-yii-wa ann-yi <NAME>itaakii-yi
# to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG
# ‘He came to see <NAME>.’
formObjectWithHistory = {
"previous_versions": [
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see <NAME>.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[
[
25107,
"eagle.woman",
"PN"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"datetime_modified": "2015-09-01T20:41:10",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34873,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi piitaakii-yi",
"modifier": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"datetime_modified": "2015-09-01T20:40:36",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34615,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-09-01T18:44:31",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34614,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-09-01T17:53:35",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34611,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yxi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni <NAME>yi.",
"enterer": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yxi",
"modifier": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-08-31T20:35:11",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34610,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
}
],
"form": {
"files": [],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[
[
25107,
"eagle.woman",
"PN"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"grammaticality": "",
"datetime_modified": "2015-09-04T01:01:13",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 25105,
"datetime_entered": "2015-08-31T20:35:11",
"transcription": "Áístotoinoyiiawa anni Piitaakiiyi.",
"enterer": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"speaker_comments": "",
"status": "tested",
"elicitor": null,
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntactic_category": null,
"phonetic_transcription": "",
"semantics": "",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"morpheme_break": "aist-oto-ino-yii-wa ann-yi piitaakii-yi",
"modifier": {
"first_name": "<NAME>",
"last_name": "<NAME>",
"role": "administrator",
"id": 1
}
}
}
| true | # Tests for `FormView`
#
# Right now this just tests that interlinearize works with search patterns.
# There was a bug where doing a regex search for a space in an IGT value would
# cause single-character words to fail to be displayed.
define (require) ->
# Note: If you don't load `UserView` before `FormView`, you'll get the
# following error (which seems like a circular dependency thing ...)::
#
# Uncaught TypeError: Cannot read property 'prototype' of undefined
# (enterer-field-display.coffee:1)
globals = require '../../../scripts/utils/globals'
ApplicationSettingsModel = require '../../../scripts/models/application-settings'
FormModel = require '../../../scripts/models/form'
UserView = require '../../../scripts/views/user-old'
FormView = require '../../../scripts/views/form'
describe '`FormView`', ->
before ->
@spied = [
'interlinearize'
'_interlinearize'
'hideIGTFields'
'toggleHistory'
'disableHistoryButton'
]
for method in @spied
sinon.spy FormView::, method
spinStub = sinon.stub FormView::, 'spin', -> console.log 'spinning'
stopSpinStub = sinon.stub FormView::, 'stopSpin', -> console.log 'stopping spinning'
@$fixture = $ '<div id="view-fixture"></div>'
beforeEach ->
# Reset spies
for method in @spied
FormView::[method].reset()
# Global app settings needs to be (the default) OLD one.
applicationSettings = new ApplicationSettingsModel()
oldLocalServer = applicationSettings.get('servers')
.findWhere(url: "http://127.0.0.1:5000")
applicationSettings.set 'activeServer', oldLocalServer
globals.applicationSettings = applicationSettings
@$fixture.empty().appendTo $('#fixtures')
@$fixture.prepend '<div id="form"></div>'
afterEach -> $('#fixtures').empty()
# Return a FormView whose $el is in our fixture.
getForm = (populate=true) ->
if populate
formModel = new FormModel formObject
else
formModel = new FormModel()
formView = new FormView model: formModel
formView.setElement $('#form')
formView
# Interlinearize
# TODO: this should be in the test spec for `FormBaseView`
describe '`@interlinearize`', ->
it 'highlights a single-char regex that matches one morpheme break word',
(done) ->
# Simulate a search for /k/ in morpheme_break
formView = getForm()
formView.searchPatternsObject = morpheme_break: /((?:k))/g
formView.render()
x = ->
$morphemeBreakIGTCells =
$ '.igt-tables-container .igt-word-cell.morpheme-break-value'
$morphemeBreakIGTCellsWithHighlight =
$morphemeBreakIGTCells.find 'span.dative-state-highlight'
# Three columns for a 3-word form
expect($morphemeBreakIGTCells.length).to.equal 3
# One column has a search match highlight in it: /k/ 'COMP' matches /k/
expect($morphemeBreakIGTCellsWithHighlight.length).to.equal 1
expect(formView.interlinearize).to.have.been.calledOnce
expect(formView._interlinearize).to.have.been.calledOnce
expect(formView.hideIGTFields).to.have.been.calledOnce
done()
# We need `setTimeout` because `interlinearize` uses a 1-millisecond
# delay.
setTimeout x, 3
it 'highlights nothing on regex search for space character', (done) ->
# Simulate a search for /( )/ in morpheme_break
formView = getForm()
formView.searchPatternsObject = morpheme_break: /((?:( )))/g
formView.render()
x = ->
$morphemeBreakIGTCells =
$ '.igt-tables-container .igt-word-cell.morpheme-break-value'
$morphemeBreakIGTCellsWithHighlight =
$morphemeBreakIGTCells.find 'span.dative-state-highlight'
expect($morphemeBreakIGTCells.length).to.equal 3
# No columns have search match highlights in them (because spaces
# aren't represented overtly; they are the spaces between columns).
expect($morphemeBreakIGTCellsWithHighlight.length).to.equal 0
expect(formView.interlinearize).to.have.been.calledOnce
expect(formView._interlinearize).to.have.been.calledOnce
expect(formView.hideIGTFields).to.have.been.calledOnce
done()
setTimeout x, 3
# HTML
describe 'its HTML', ->
it 'has a header which is hidden by default', ->
formView = getForm false
formView.render()
expect(formView.$('div.dative-widget-header').length).to.equal 1
expect(formView.$('div.dative-widget-header').first().is(':visible'))
.to.be.false
it 'has no header title text', ->
$headerTitleDiv = $('div.dative-widget-header').first()
.find 'div.dative-widget-header-title'
expect($headerTitleDiv.text()).to.equal ''
describe 'with an empty model, it ...', ->
it 'has update, export, and settings buttons', ->
formView = getForm false
formView.render()
expect(formView.$('button.update-resource').length).to.equal 1
expect(formView.$('button.export-resource').length).to.equal 1
expect(formView.$('button.settings').length).to.equal 1
it 'does NOT have delete, duplicate, history, controls, or data buttons',
->
formView = getForm false
formView.render()
expect(formView.$('button.delete-resource').length).to.equal 0
expect(formView.$('button.duplicate-resource').length).to.equal 0
expect(formView.$('button.resource-history').length).to.equal 0
expect(formView.$('button.controls').length).to.equal 0
expect(formView.$('button.file-data').length).to.equal 0
describe 'with a non-empty model, it ...', ->
it 'has update, export, delete, duplicate, history, and settings
buttons', ->
formView = getForm()
formView.render()
expect(formView.$('button.update-resource').length).to.equal 1
expect(formView.$('button.delete-resource').length).to.equal 1
expect(formView.$('button.duplicate-resource').length).to.equal 1
expect(formView.$('button.export-resource').length).to.equal 1
expect(formView.$('button.resource-history').length).to.equal 1
expect(formView.$('button.settings').length).to.equal 1
it 'does NOT have controls or data buttons', ->
formView = getForm()
formView.render()
expect(formView.$('button.controls').length).to.equal 0
expect(formView.$('button.file-data').length).to.equal 0
describe 'History functionality', ->
describe 'its init state', ->
it 'starts off with no previous versions', ->
formView = getForm()
formView.render()
previousVersionsDivIsEmpty =
formView.$('div.resource-previous-versions').first().is ':empty'
expect(formView.previousVersionModels).to.be.empty
expect(formView.previousVersionView).to.be.empty
expect(previousVersionsDivIsEmpty).to.be.true
it 'involves no history event responders having been called', ->
sinon.spy FormView::, 'fetchHistoryFormStart'
sinon.spy FormView::, 'fetchHistoryFormEnd'
sinon.spy FormView::, 'fetchHistoryFormSuccess'
sinon.spy FormView::, 'fetchHistoryFormFail'
formView = getForm()
expect(formView.fetchHistoryFormStart).not.to.have.been.called
expect(formView.fetchHistoryFormEnd).not.to.have.been.called
expect(formView.fetchHistoryFormFail).not.to.have.been.called
expect(formView.fetchHistoryFormSuccess).not.to.have.been.called
FormView::fetchHistoryFormStart.restore()
FormView::fetchHistoryFormEnd.restore()
FormView::fetchHistoryFormSuccess.restore()
FormView::fetchHistoryFormFail.restore()
describe 'its “history” button', ->
it 'triggers `@toggleHistory` when clicked', ->
formView = getForm()
formView.render()
$historyButton = formView.$('button.resource-history').first()
expect(formView.toggleHistory).not.to.have.been.called
expect(formView.disableHistoryButton).not.to.have.been.called
expect($historyButton.button 'option', 'disabled').to.be.false
$historyButton.click()
expect(formView.toggleHistory).to.have.been.calledOnce
expect(formView.disableHistoryButton).to.have.been.calledOnce
# Unsure why the following is failing. I must not be understanding
# the jQuery button API ...
#expect($historyButton.button 'option', 'disabled').to.be.true
# An object for creating an OLD-style `FormModel` instance. Core values:
#
# nitsspiyi k nitsspiyi
# /nit-ihpiyi k nit-ihpiyi/
# 1-dance COMP 1-dance
formObject = {
"files": [],
"syntax": "",
"morpheme_break_ids": [
[
[
[
14639,
"1",
"agra"
]
],
[
[
2394,
"dance",
"vai"
]
]
],
[
[
[
14957,
"2",
"agra"
],
[
17363,
"IMP.PL",
"agrb"
]
]
],
[
[
[
14639,
"1",
"agra"
]
],
[
[
2394,
"dance",
"vai"
]
]
]
],
"grammaticality": "",
"datetime_modified": "2015-10-03T18:13:13",
"morpheme_gloss_ids": [
[
[
[
14639,
"nit",
"agra"
]
],
[
[
2394,
"ihpiyi",
"vai"
]
]
],
[
[]
],
[
[
[
14639,
"nit",
"agra"
]
],
[
[
2394,
"ihpiyi",
"vai"
]
]
]
],
"date_elicited": null,
"morpheme_gloss": "1-dance COMP 1-dance",
"id": 25111,
"datetime_entered": "2015-09-11T14:17:29",
"transcription": "nitsspiyi k nitsspiyi",
"enterer": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"speaker_comments": "",
"status": "tested",
"elicitor": null,
"break_gloss_category": "nit|1|agra-ihpiyi|dance|vai k|COMP|agra nit|1|agra-ihpiyi|dance|vai",
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "I danced that I danced",
"grammaticality": "",
"id": 25225
}
],
"syntactic_category": null,
"phonetic_transcription": "",
"semantics": "",
"UUID": "5a4ec347-2b03-4146-9f4d-9736fc03620f",
"narrow_phonetic_transcription": "",
"syntactic_category_string": "agra-vai agra agra-vai",
"morpheme_break": "nit-ihpiyi k nit-ihpiyi",
"modifier": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
}
}
# An object containing both an OLD-style form and its previous versions. See
# attributes `form` and `previous_versions`. Core values:
#
# PI:NAME:<NAME>END_PÍístotoinoyiiawa PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI.
# aist-oto-ino-yii-wa ann-yi PI:NAME:<NAME>END_PIitaakii-yi
# to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG
# ‘He came to see PI:NAME:<NAME>END_PI.’
formObjectWithHistory = {
"previous_versions": [
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see PI:NAME:<NAME>END_PI.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[
[
25107,
"eagle.woman",
"PN"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"datetime_modified": "2015-09-01T20:41:10",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34873,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi piitaakii-yi",
"modifier": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"datetime_modified": "2015-09-01T20:40:36",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34615,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-09-01T18:44:31",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34614,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-09-01T17:53:35",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34611,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yxi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni PI:NAME:<NAME>END_PIyi.",
"enterer": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yxi",
"modifier": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
},
{
"status": "tested",
"files": [],
"elicitor": null,
"form_id": 25105,
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"syntactic_category": null,
"grammaticality": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num ?-num",
"datetime_modified": "2015-08-31T20:35:11",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"phonetic_transcription": "",
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 34610,
"semantics": "",
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num Piitaakii|eagle.woman|?-yi|OBV.SG|num",
"datetime_entered": "2015-08-31T20:35:11",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"transcription": "Áístotoinoyiiwa anni Piitaakiiyi.",
"enterer": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"morpheme_break": "aist-oto-ino-yii-wa ann-yi Piitaakii-yi",
"modifier": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"speaker_comments": ""
}
],
"form": {
"files": [],
"syntax": "",
"morpheme_break_ids": [
[
[
[
353,
"to.speaker",
"adt"
]
],
[
[
148,
"go.to.do",
"adt"
]
],
[
[
3597,
"see",
"avta"
]
],
[
[
14666,
"DIR",
"thm"
]
],
[
[
14624,
"PROX.SG",
"num"
]
]
],
[
[
[
402,
"DEM",
"drt"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
],
[
[
[
25107,
"eagle.woman",
"PN"
]
],
[
[
14634,
"OBV.SG",
"num"
]
]
]
],
"grammaticality": "",
"datetime_modified": "2015-09-04T01:01:13",
"morpheme_gloss_ids": [
[
[
[
353,
"aist",
"adt"
]
],
[
[
148,
"oto",
"adt"
]
],
[
[
3597,
"ino",
"avta"
]
],
[
[
14666,
"yii",
"thm"
]
],
[
[
14624,
"wa",
"num"
]
]
],
[
[
[
402,
"ann",
"drt"
]
],
[
[
14634,
"yi",
"num"
]
]
],
[
[
[
25107,
"piitaakii",
"PN"
]
],
[
[
14634,
"yi",
"num"
]
]
]
],
"date_elicited": null,
"morpheme_gloss": "to.speaker-go.to.do-see-DIR-PROX.SG DEM-OBV.SG eagle.woman-OBV.SG",
"id": 25105,
"datetime_entered": "2015-08-31T20:35:11",
"transcription": "Áístotoinoyiiawa anni Piitaakiiyi.",
"enterer": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
},
"comments": "",
"source": null,
"verifier": null,
"speaker": null,
"speaker_comments": "",
"status": "tested",
"elicitor": null,
"break_gloss_category": "aist|to.speaker|adt-oto|go.to.do|adt-ino|see|avta-yii|DIR|thm-wa|PROX.SG|num ann|DEM|drt-yi|OBV.SG|num piitaakii|eagle.woman|PN-yi|OBV.SG|num",
"tags": [],
"elicitation_method": null,
"translations": [
{
"transcription": "He came to see Piitaakii.",
"grammaticality": "",
"id": 25214
}
],
"syntactic_category": null,
"phonetic_transcription": "",
"semantics": "",
"UUID": "3b484bd6-86b0-49b0-a587-ba2d32c800c7",
"narrow_phonetic_transcription": "",
"syntactic_category_string": "adt-adt-avta-thm-num drt-num PN-num",
"morpheme_break": "aist-oto-ino-yii-wa ann-yi piitaakii-yi",
"modifier": {
"first_name": "PI:NAME:<NAME>END_PI",
"last_name": "PI:NAME:<NAME>END_PI",
"role": "administrator",
"id": 1
}
}
}
|
[
{
"context": "mes: 値の名字と実際のアレの対応\n #@names={ Human:{name:\"村人\",color:\"#FF0000\"}...}\n chk=(d,vals)-> # 合",
"end": 568,
"score": 0.9881263375282288,
"start": 566,
"tag": "NAME",
"value": "村人"
}
] | client/code/pages/user/graph.coffee | Yukimir/jinrou | 0 | # graph module
class Graph
margin:10
constructor:(@size)->
@canvas=document.createElement "canvas"
@canvas.height=@size+@margin*2
@canvas.width=@size+@margin*2
@ctx=@canvas.getContext '2d'
@canvas.style.float="left"
@canvas.style.clear="both"
@data=null
setData:(@data)->
class CircleGraph extends Graph
constructor:->
super
@circ=1 #0~1で円の完成度
@table=null
hide:->@circ=0
setData:(@data,@names)-> #names: 値の名字と実際のアレの対応
#@names={ Human:{name:"村人",color:"#FF0000"}...}
chk=(d,vals)-> # 合計算出 valsも一緒に作る
unless typeof d=="object"
return d
su=0
for name,value of d
if typeof value=="object"
# 入れ子
arr=[]
arr.name=name
vals?.push arr
su+=chk value,arr
else
vals?.push name
su+=value
su
@vals=[]
@sum=chk @data,@vals
#大きい順にsort
@depth=1 # 深度 最高いくつの深さがあるか
sortv=(vals,data,dp=1)=> #dp: 现在の深度
@depth=Math.max @depth,dp
vals.forEach (x)->
if x instanceof Array
sortv x,data[x.name],dp+1
vals.sort (a,b)->(chk data[if b instanceof Array then b.name else b])-(chk data[if a instanceof Array then a.name else a])
sortv @vals,@data
#table作成
if @table?.parentNode
@table.parentNode.removeChild @table
@table=document.createElement "table"
datatable= (data,vals,names,dp=0)=>
for name in vals
_name=name
if typeof name=="object" then _name=name.name
thissum=chk data[_name]
continue unless thissum
tr=@table.insertRow -1
td=tr.insertCell -1
td.style.color=names[_name].color ? "#cccccc"
i=0
spaces= (" " while i++<dp).join ""
td.textContent="#{spaces}■"
td=tr.insertCell -1
if typeof data[_name]=="object"
# 子がある
td.textContent="#{names[_name].name} #{thissum}(#{(thissum/@sum*100).toPrecision(2)}%)"
datatable data[_name],name,names[_name],dp+1
else
td.textContent="#{names[_name].name} #{data[_name]}(#{(data[_name]/@sum*100).toPrecision(2)}%)"
datatable @data,@vals,@names
if @canvas.parentNode
@canvas.parentNode.insertBefore @table,@canvas.nextSibling
@draw()
openAnimate:(sec,step=0.02)->
# sec[s]かけてオープン
step=Math.max step,sec/60 #60fps以上は出したくない
@circ=0
ss= =>
@circ+=step
if @circ>1 then @circ=1
@draw()
if @circ<1
setTimeout ss,sec/step
ss()
draw:->
ctx=@ctx
ctx.save()
ctx.translate @margin,@margin
tx=ty=r=@size/2 # グラフ中心,半径
dx=@size+@margin*2 # 説明部分左端
sum=0 #ここまでの角度合計
startangle=-Math.PI/2 #始点は上
onepart=(data,vals,names,start,dp=1)=>
#start: 始点の角度
for name in vals
_name=name
# 順番に描画
if typeof name=="object"
_name=name.name #valsのオブジェクトにはname
rad=Math.PI*2*@getsum(data[_name])/@sum*@circ
ctx.beginPath()
# 外側の弧
ctx.arc tx,ty,r*dp/@depth,start+startangle,start+rad+startangle,false
# 内側の弧
ctx.arc tx,ty,r*(dp-1)/@depth,start+rad+startangle,start+startangle,true
ctx.closePath()
ctx.fillStyle=names[_name].color ? "#cccccc"
ctx.fill()
if typeof name=="object"
# 子供たち
onepart data[_name],name,names[_name],start,dp+1
start+=rad #描画した
onepart @data,@vals,@names,0
ctx.restore()
getsum:(data)->
unless typeof data=="object"
return data
sum=0
for name,value of data
sum+=@getsum value
sum
exports.circleGraph=(size)->new CircleGraph size
| 29109 | # graph module
class Graph
margin:10
constructor:(@size)->
@canvas=document.createElement "canvas"
@canvas.height=@size+@margin*2
@canvas.width=@size+@margin*2
@ctx=@canvas.getContext '2d'
@canvas.style.float="left"
@canvas.style.clear="both"
@data=null
setData:(@data)->
class CircleGraph extends Graph
constructor:->
super
@circ=1 #0~1で円の完成度
@table=null
hide:->@circ=0
setData:(@data,@names)-> #names: 値の名字と実際のアレの対応
#@names={ Human:{name:"<NAME>",color:"#FF0000"}...}
chk=(d,vals)-> # 合計算出 valsも一緒に作る
unless typeof d=="object"
return d
su=0
for name,value of d
if typeof value=="object"
# 入れ子
arr=[]
arr.name=name
vals?.push arr
su+=chk value,arr
else
vals?.push name
su+=value
su
@vals=[]
@sum=chk @data,@vals
#大きい順にsort
@depth=1 # 深度 最高いくつの深さがあるか
sortv=(vals,data,dp=1)=> #dp: 现在の深度
@depth=Math.max @depth,dp
vals.forEach (x)->
if x instanceof Array
sortv x,data[x.name],dp+1
vals.sort (a,b)->(chk data[if b instanceof Array then b.name else b])-(chk data[if a instanceof Array then a.name else a])
sortv @vals,@data
#table作成
if @table?.parentNode
@table.parentNode.removeChild @table
@table=document.createElement "table"
datatable= (data,vals,names,dp=0)=>
for name in vals
_name=name
if typeof name=="object" then _name=name.name
thissum=chk data[_name]
continue unless thissum
tr=@table.insertRow -1
td=tr.insertCell -1
td.style.color=names[_name].color ? "#cccccc"
i=0
spaces= (" " while i++<dp).join ""
td.textContent="#{spaces}■"
td=tr.insertCell -1
if typeof data[_name]=="object"
# 子がある
td.textContent="#{names[_name].name} #{thissum}(#{(thissum/@sum*100).toPrecision(2)}%)"
datatable data[_name],name,names[_name],dp+1
else
td.textContent="#{names[_name].name} #{data[_name]}(#{(data[_name]/@sum*100).toPrecision(2)}%)"
datatable @data,@vals,@names
if @canvas.parentNode
@canvas.parentNode.insertBefore @table,@canvas.nextSibling
@draw()
openAnimate:(sec,step=0.02)->
# sec[s]かけてオープン
step=Math.max step,sec/60 #60fps以上は出したくない
@circ=0
ss= =>
@circ+=step
if @circ>1 then @circ=1
@draw()
if @circ<1
setTimeout ss,sec/step
ss()
draw:->
ctx=@ctx
ctx.save()
ctx.translate @margin,@margin
tx=ty=r=@size/2 # グラフ中心,半径
dx=@size+@margin*2 # 説明部分左端
sum=0 #ここまでの角度合計
startangle=-Math.PI/2 #始点は上
onepart=(data,vals,names,start,dp=1)=>
#start: 始点の角度
for name in vals
_name=name
# 順番に描画
if typeof name=="object"
_name=name.name #valsのオブジェクトにはname
rad=Math.PI*2*@getsum(data[_name])/@sum*@circ
ctx.beginPath()
# 外側の弧
ctx.arc tx,ty,r*dp/@depth,start+startangle,start+rad+startangle,false
# 内側の弧
ctx.arc tx,ty,r*(dp-1)/@depth,start+rad+startangle,start+startangle,true
ctx.closePath()
ctx.fillStyle=names[_name].color ? "#cccccc"
ctx.fill()
if typeof name=="object"
# 子供たち
onepart data[_name],name,names[_name],start,dp+1
start+=rad #描画した
onepart @data,@vals,@names,0
ctx.restore()
getsum:(data)->
unless typeof data=="object"
return data
sum=0
for name,value of data
sum+=@getsum value
sum
exports.circleGraph=(size)->new CircleGraph size
| true | # graph module
class Graph
margin:10
constructor:(@size)->
@canvas=document.createElement "canvas"
@canvas.height=@size+@margin*2
@canvas.width=@size+@margin*2
@ctx=@canvas.getContext '2d'
@canvas.style.float="left"
@canvas.style.clear="both"
@data=null
setData:(@data)->
class CircleGraph extends Graph
constructor:->
super
@circ=1 #0~1で円の完成度
@table=null
hide:->@circ=0
setData:(@data,@names)-> #names: 値の名字と実際のアレの対応
#@names={ Human:{name:"PI:NAME:<NAME>END_PI",color:"#FF0000"}...}
chk=(d,vals)-> # 合計算出 valsも一緒に作る
unless typeof d=="object"
return d
su=0
for name,value of d
if typeof value=="object"
# 入れ子
arr=[]
arr.name=name
vals?.push arr
su+=chk value,arr
else
vals?.push name
su+=value
su
@vals=[]
@sum=chk @data,@vals
#大きい順にsort
@depth=1 # 深度 最高いくつの深さがあるか
sortv=(vals,data,dp=1)=> #dp: 现在の深度
@depth=Math.max @depth,dp
vals.forEach (x)->
if x instanceof Array
sortv x,data[x.name],dp+1
vals.sort (a,b)->(chk data[if b instanceof Array then b.name else b])-(chk data[if a instanceof Array then a.name else a])
sortv @vals,@data
#table作成
if @table?.parentNode
@table.parentNode.removeChild @table
@table=document.createElement "table"
datatable= (data,vals,names,dp=0)=>
for name in vals
_name=name
if typeof name=="object" then _name=name.name
thissum=chk data[_name]
continue unless thissum
tr=@table.insertRow -1
td=tr.insertCell -1
td.style.color=names[_name].color ? "#cccccc"
i=0
spaces= (" " while i++<dp).join ""
td.textContent="#{spaces}■"
td=tr.insertCell -1
if typeof data[_name]=="object"
# 子がある
td.textContent="#{names[_name].name} #{thissum}(#{(thissum/@sum*100).toPrecision(2)}%)"
datatable data[_name],name,names[_name],dp+1
else
td.textContent="#{names[_name].name} #{data[_name]}(#{(data[_name]/@sum*100).toPrecision(2)}%)"
datatable @data,@vals,@names
if @canvas.parentNode
@canvas.parentNode.insertBefore @table,@canvas.nextSibling
@draw()
openAnimate:(sec,step=0.02)->
# sec[s]かけてオープン
step=Math.max step,sec/60 #60fps以上は出したくない
@circ=0
ss= =>
@circ+=step
if @circ>1 then @circ=1
@draw()
if @circ<1
setTimeout ss,sec/step
ss()
draw:->
ctx=@ctx
ctx.save()
ctx.translate @margin,@margin
tx=ty=r=@size/2 # グラフ中心,半径
dx=@size+@margin*2 # 説明部分左端
sum=0 #ここまでの角度合計
startangle=-Math.PI/2 #始点は上
onepart=(data,vals,names,start,dp=1)=>
#start: 始点の角度
for name in vals
_name=name
# 順番に描画
if typeof name=="object"
_name=name.name #valsのオブジェクトにはname
rad=Math.PI*2*@getsum(data[_name])/@sum*@circ
ctx.beginPath()
# 外側の弧
ctx.arc tx,ty,r*dp/@depth,start+startangle,start+rad+startangle,false
# 内側の弧
ctx.arc tx,ty,r*(dp-1)/@depth,start+rad+startangle,start+startangle,true
ctx.closePath()
ctx.fillStyle=names[_name].color ? "#cccccc"
ctx.fill()
if typeof name=="object"
# 子供たち
onepart data[_name],name,names[_name],start,dp+1
start+=rad #描画した
onepart @data,@vals,@names,0
ctx.restore()
getsum:(data)->
unless typeof data=="object"
return data
sum=0
for name,value of data
sum+=@getsum value
sum
exports.circleGraph=(size)->new CircleGraph size
|
[
{
"context": "es you more likely to leave parties.\n *\n * @name Impartial\n * @prerequisite Leave 250 parties\n * @effect +",
"end": 135,
"score": 0.9863455891609192,
"start": 126,
"tag": "NAME",
"value": "Impartial"
}
] | src/character/personalities/Impartial.coffee | sadbear-/IdleLands | 3 |
Personality = require "../base/Personality"
`/**
* This personality makes you more likely to leave parties.
*
* @name Impartial
* @prerequisite Leave 250 parties
* @effect +50 partyLeavePercent
* @category Personalities
* @package Player
*/`
class Impartial extends Personality
constructor: ->
partyLeavePercent: -> 50
@canUse = (player) ->
player.statistics["player party leave"] >= 250
@desc = "Leave 250 parties"
module.exports = exports = Impartial | 60193 |
Personality = require "../base/Personality"
`/**
* This personality makes you more likely to leave parties.
*
* @name <NAME>
* @prerequisite Leave 250 parties
* @effect +50 partyLeavePercent
* @category Personalities
* @package Player
*/`
class Impartial extends Personality
constructor: ->
partyLeavePercent: -> 50
@canUse = (player) ->
player.statistics["player party leave"] >= 250
@desc = "Leave 250 parties"
module.exports = exports = Impartial | true |
Personality = require "../base/Personality"
`/**
* This personality makes you more likely to leave parties.
*
* @name PI:NAME:<NAME>END_PI
* @prerequisite Leave 250 parties
* @effect +50 partyLeavePercent
* @category Personalities
* @package Player
*/`
class Impartial extends Personality
constructor: ->
partyLeavePercent: -> 50
@canUse = (player) ->
player.statistics["player party leave"] >= 250
@desc = "Leave 250 parties"
module.exports = exports = Impartial |
[
{
"context": "ual status, 200\n assert.equal body.fornavn, 'Hans Kristian'\n done()\n\n it 'should get member for member",
"end": 1608,
"score": 0.9997113347053528,
"start": 1595,
"tag": "NAME",
"value": "Hans Kristian"
},
{
"context": "ual status, 200\n assert.equal ... | test/suite.coffee | Turistforeningen/node-dnt-api | 0 | DNT = require '../src/dnt-api.coffee'
assert = require 'assert'
dnt = null
client = 'NodeJS Test Client/1.0.0'
beforeEach -> dnt = new DNT client, process.env.DNT_CONNECT_KEY
describe 'new DNT()', ->
it 'should throw error for missing client name param', ->
assert.throws(
-> new DNT()
, /Client name is not defined/)
it 'should throw error for missing API key param', ->
assert.throws(
-> new DNT('myKey')
, /API key is not defined/)
it 'should make new API object instance', ->
assert dnt instanceof DNT
assert.equal dnt.client, client
assert.equal dnt.key, process.env.DNT_CONNECT_KEY
assert.equal dnt.version, ''
it 'should be able to set API version', ->
dnt = new DNT client, process.env.DNT_CONNECT_KEY, version: '99'
assert dnt instanceof DNT
assert.equal dnt.client, client
assert.equal dnt.key, process.env.DNT_CONNECT_KEY
assert.equal dnt.version, '.v99+json'
describe '#getMemberFor()', ->
it 'should return error message for invalid API key', (done) ->
@timeout 20000
dnt = new DNT(client, 'IjA1ChOTDZjWxRwU/DBZTw==')
dnt.getMemberFor sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 403
assert.deepEqual body,
errors: [
message: "Invalid authentication"
code: 1
]
done()
it 'should get member for sherpa id', (done) ->
@timeout 20000
dnt.getMemberFor sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert.equal body.fornavn, 'Hans Kristian'
done()
it 'should get member for membership number', (done) ->
@timeout 20000
dnt.getMemberFor medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert.equal body.fornavn, 'Hans Kristian'
done()
it 'should handle missing member gracefully', (done) ->
@timeout 20000
dnt.getMemberFor medlemsnummer: 1337, (err, status, body) ->
assert.ifError err
assert.equal status, 404
assert.deepEqual body,
errors: [
message: "A member matching that 'sherpa_id', 'medlemsnummer', or " +
"both if both were provided, does not exist."
code: 4
]
done()
it 'should handle invalid API version gracefully', (done) ->
@timeout 20000
dnt = new DNT client, process.env.DNT_CONNECT_KEY, version: '99'
dnt.getMemberFor medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 400
assert.deepEqual body,
errors: [
message: "You need to accept one of the following API versions in " +
"your media type: v0, v1"
code: 2
]
done()
describe '#getAssociationsFor()', ->
it 'should get associations for sherpa id', (done) ->
@timeout 20000
dnt.getAssociationsFor bruker_sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert body instanceof Array
assert body.length > 0
assert.equal typeof body[1].sherpa_id, 'number'
assert.equal typeof body[1].gruppetype, 'string'
assert.equal typeof body[1].type, 'string'
assert.equal typeof body[1].object_id, 'string'
assert.equal typeof body[1].navn, 'string'
done()
it 'should get associations for membership number', (done) ->
@timeout 20000
dnt.getAssociationsFor bruker_medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert body instanceof Array
assert body.length > 0
assert.equal typeof body[1].sherpa_id, 'number'
assert.equal typeof body[1].gruppetype, 'string'
assert.equal typeof body[1].type, 'string'
assert.equal typeof body[1].object_id, 'string'
assert.equal typeof body[1].navn, 'string'
done()
| 82266 | DNT = require '../src/dnt-api.coffee'
assert = require 'assert'
dnt = null
client = 'NodeJS Test Client/1.0.0'
beforeEach -> dnt = new DNT client, process.env.DNT_CONNECT_KEY
describe 'new DNT()', ->
it 'should throw error for missing client name param', ->
assert.throws(
-> new DNT()
, /Client name is not defined/)
it 'should throw error for missing API key param', ->
assert.throws(
-> new DNT('myKey')
, /API key is not defined/)
it 'should make new API object instance', ->
assert dnt instanceof DNT
assert.equal dnt.client, client
assert.equal dnt.key, process.env.DNT_CONNECT_KEY
assert.equal dnt.version, ''
it 'should be able to set API version', ->
dnt = new DNT client, process.env.DNT_CONNECT_KEY, version: '99'
assert dnt instanceof DNT
assert.equal dnt.client, client
assert.equal dnt.key, process.env.DNT_CONNECT_KEY
assert.equal dnt.version, '.v99+json'
describe '#getMemberFor()', ->
it 'should return error message for invalid API key', (done) ->
@timeout 20000
dnt = new DNT(client, 'IjA1ChOTDZjWxRwU/DBZTw==')
dnt.getMemberFor sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 403
assert.deepEqual body,
errors: [
message: "Invalid authentication"
code: 1
]
done()
it 'should get member for sherpa id', (done) ->
@timeout 20000
dnt.getMemberFor sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert.equal body.fornavn, '<NAME>'
done()
it 'should get member for membership number', (done) ->
@timeout 20000
dnt.getMemberFor medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert.equal body.fornavn, '<NAME>'
done()
it 'should handle missing member gracefully', (done) ->
@timeout 20000
dnt.getMemberFor medlemsnummer: 1337, (err, status, body) ->
assert.ifError err
assert.equal status, 404
assert.deepEqual body,
errors: [
message: "A member matching that 'sherpa_id', 'medlemsnummer', or " +
"both if both were provided, does not exist."
code: 4
]
done()
it 'should handle invalid API version gracefully', (done) ->
@timeout 20000
dnt = new DNT client, process.env.DNT_CONNECT_KEY, version: '99'
dnt.getMemberFor medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 400
assert.deepEqual body,
errors: [
message: "You need to accept one of the following API versions in " +
"your media type: v0, v1"
code: 2
]
done()
describe '#getAssociationsFor()', ->
it 'should get associations for sherpa id', (done) ->
@timeout 20000
dnt.getAssociationsFor bruker_sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert body instanceof Array
assert body.length > 0
assert.equal typeof body[1].sherpa_id, 'number'
assert.equal typeof body[1].gruppetype, 'string'
assert.equal typeof body[1].type, 'string'
assert.equal typeof body[1].object_id, 'string'
assert.equal typeof body[1].navn, 'string'
done()
it 'should get associations for membership number', (done) ->
@timeout 20000
dnt.getAssociationsFor bruker_medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert body instanceof Array
assert body.length > 0
assert.equal typeof body[1].sherpa_id, 'number'
assert.equal typeof body[1].gruppetype, 'string'
assert.equal typeof body[1].type, 'string'
assert.equal typeof body[1].object_id, 'string'
assert.equal typeof body[1].navn, 'string'
done()
| true | DNT = require '../src/dnt-api.coffee'
assert = require 'assert'
dnt = null
client = 'NodeJS Test Client/1.0.0'
beforeEach -> dnt = new DNT client, process.env.DNT_CONNECT_KEY
describe 'new DNT()', ->
it 'should throw error for missing client name param', ->
assert.throws(
-> new DNT()
, /Client name is not defined/)
it 'should throw error for missing API key param', ->
assert.throws(
-> new DNT('myKey')
, /API key is not defined/)
it 'should make new API object instance', ->
assert dnt instanceof DNT
assert.equal dnt.client, client
assert.equal dnt.key, process.env.DNT_CONNECT_KEY
assert.equal dnt.version, ''
it 'should be able to set API version', ->
dnt = new DNT client, process.env.DNT_CONNECT_KEY, version: '99'
assert dnt instanceof DNT
assert.equal dnt.client, client
assert.equal dnt.key, process.env.DNT_CONNECT_KEY
assert.equal dnt.version, '.v99+json'
describe '#getMemberFor()', ->
it 'should return error message for invalid API key', (done) ->
@timeout 20000
dnt = new DNT(client, 'IjA1ChOTDZjWxRwU/DBZTw==')
dnt.getMemberFor sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 403
assert.deepEqual body,
errors: [
message: "Invalid authentication"
code: 1
]
done()
it 'should get member for sherpa id', (done) ->
@timeout 20000
dnt.getMemberFor sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert.equal body.fornavn, 'PI:NAME:<NAME>END_PI'
done()
it 'should get member for membership number', (done) ->
@timeout 20000
dnt.getMemberFor medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert.equal body.fornavn, 'PI:NAME:<NAME>END_PI'
done()
it 'should handle missing member gracefully', (done) ->
@timeout 20000
dnt.getMemberFor medlemsnummer: 1337, (err, status, body) ->
assert.ifError err
assert.equal status, 404
assert.deepEqual body,
errors: [
message: "A member matching that 'sherpa_id', 'medlemsnummer', or " +
"both if both were provided, does not exist."
code: 4
]
done()
it 'should handle invalid API version gracefully', (done) ->
@timeout 20000
dnt = new DNT client, process.env.DNT_CONNECT_KEY, version: '99'
dnt.getMemberFor medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 400
assert.deepEqual body,
errors: [
message: "You need to accept one of the following API versions in " +
"your media type: v0, v1"
code: 2
]
done()
describe '#getAssociationsFor()', ->
it 'should get associations for sherpa id', (done) ->
@timeout 20000
dnt.getAssociationsFor bruker_sherpa_id: 10142, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert body instanceof Array
assert body.length > 0
assert.equal typeof body[1].sherpa_id, 'number'
assert.equal typeof body[1].gruppetype, 'string'
assert.equal typeof body[1].type, 'string'
assert.equal typeof body[1].object_id, 'string'
assert.equal typeof body[1].navn, 'string'
done()
it 'should get associations for membership number', (done) ->
@timeout 20000
dnt.getAssociationsFor bruker_medlemsnummer: 1692762, (err, status, body) ->
assert.ifError err
assert.equal status, 200
assert body instanceof Array
assert body.length > 0
assert.equal typeof body[1].sherpa_id, 'number'
assert.equal typeof body[1].gruppetype, 'string'
assert.equal typeof body[1].type, 'string'
assert.equal typeof body[1].object_id, 'string'
assert.equal typeof body[1].navn, 'string'
done()
|
[
{
"context": " game.playState.round.onGameCommand({playerName: 'ghost'})\n\n game.deck = new Core.Deck()\n game.hole",
"end": 768,
"score": 0.9942653179168701,
"start": 763,
"tag": "USERNAME",
"value": "ghost"
},
{
"context": "_.flatten(game.deck.cards)\n\n game.sendCommand(... | spec/card-poker/texas-hold-em-spec.coffee | robksawyer/hubot-poker | 1 | Core = require('../../js/card-poker/core')
TexasHoldEm = require('../../js/card-poker/texas-hold-em')
Fakes = require('../poker/fake-time')
Rounds = require('../../js/poker/round')
Player = require('../../js/poker/player')
_ = require('underscore')
describe 'TexasHoldEm', ->
store = game = null
builder = time = null
beforeEach ->
store = {}
builder = new Fakes.TimeBuilder().withHour(1).withMinute(0).withSecond(0)
time = new Fakes.FakeTimeProvider(builder.build())
listener = new FakeListener()
game = new TexasHoldEm(store, time)
game.playerStartingPoints = 25
game.addListener(listener)
it 'basic gameplay with play, bet and settle rounds', ->
game.startRound()
game.playState.round.onGameCommand({playerName: 'ghost'})
game.deck = new Core.Deck()
game.holeCards = game.deck.findAll(['5C', '8C', '8D', '9H', 'JC'])
playerCards = game.deck.findAll(['2S','4S','5S','5H','QC','3D','10H','KH','AS','7S']).cards
game.deck.cards.unshift playerCards
game.deck.cards = _.flatten(game.deck.cards)
game.sendCommand('chrismo', 'deal')
game.sendCommand('romer', 'deal')
game.sendCommand('sara', 'deal')
game.sendCommand('glv', 'deal')
game.sendCommand('bogdan', 'deal')
game.startBetting()
expect(-> game.bet('woodall', '10')).toThrow "Can't bet if you haven't played."
game.sendCommand('bogdan', 'bet 3')
game.sendCommand('chrismo', 'bet 10')
game.sendCommand('romer', 'bet 5')
game.sendCommand('romer', 'bet 7')
game.sendCommand('sara', 'fold')
# player can call at this point now. It's potentially confusing if the player
# has a strict expectation that call either means no higher bets can be made
# or that the call command will be sticky, meaning it auto-adjusts to higher
# bets. Play testing so far has shown players to be more confused over not
# being able to issue this command at this point, presuming it would be
# simply synonymous with "bet #{highest}".
game.sendCommand('glv', 'call')
game.sendCommand('chrismo', 'bet 3')
game.settleUp()
expect(-> game.bet('romer', '5')).toThrow "No new bets."
game.sendCommand('glv', 'call')
game.sendCommand('bogdan', 'fold')
# romer does nothing and auto-calls the remaining 1 point owed
game.finishRound()
expect(game.winningHandResult.playerName).toBe 'romer'
expect(game.winningHandResult.matchedHand.hand.name).toBe 'Full House'
expect(game.playerStore[0].name).toBe 'chrismo'
expect(game.playerStore[0].points).toBe 25 - 1 - 13
expect(game.playerStore[1].name).toBe 'romer'
expect(game.playerStore[1].points).toBe 25 - 1 - 13 + (1 + 1 + 1 + 1 + 1) + (13 + 13 + 13 + 3)
expect(game.playerStore[2].name).toBe 'sara'
expect(game.playerStore[2].points).toBe 25 - 1
expect(game.playerStore[3].name).toBe 'glv'
expect(game.playerStore[3].points).toBe 25 - 1 - 13
expect(game.playerStore[4].name).toBe 'bogdan'
expect(game.playerStore[4].points).toBe 25 - 1 - 3
# folded players should be marked
expect(game.boardStore.sara.folded).toBe true
expect(game.boardStore.bogdan.folded).toBe true
expect(game.playState.name).toBe 'end'
game.startNewRound()
expect(game.playState.name).toBe 'play'
it 'bet round bug', ->
# this simple test drove out a funky playState bug.
# see the big comment in BetPlayState.nextRound()
game.sendCommand('chrismo', 'deal')
game.sendCommand('sara', 'deal')
time.now = builder.withMinute(1).build()
time.execCallback()
expect(game.playState.name).toBe 'bet'
class FakeListener
constructor: ->
@msgs = []
onStartRound: ->
onStatus: (msg) ->
@msgs.push msg
canPushToPlayer: (playerName) ->
true
| 111616 | Core = require('../../js/card-poker/core')
TexasHoldEm = require('../../js/card-poker/texas-hold-em')
Fakes = require('../poker/fake-time')
Rounds = require('../../js/poker/round')
Player = require('../../js/poker/player')
_ = require('underscore')
describe 'TexasHoldEm', ->
store = game = null
builder = time = null
beforeEach ->
store = {}
builder = new Fakes.TimeBuilder().withHour(1).withMinute(0).withSecond(0)
time = new Fakes.FakeTimeProvider(builder.build())
listener = new FakeListener()
game = new TexasHoldEm(store, time)
game.playerStartingPoints = 25
game.addListener(listener)
it 'basic gameplay with play, bet and settle rounds', ->
game.startRound()
game.playState.round.onGameCommand({playerName: 'ghost'})
game.deck = new Core.Deck()
game.holeCards = game.deck.findAll(['5C', '8C', '8D', '9H', 'JC'])
playerCards = game.deck.findAll(['2S','4S','5S','5H','QC','3D','10H','KH','AS','7S']).cards
game.deck.cards.unshift playerCards
game.deck.cards = _.flatten(game.deck.cards)
game.sendCommand('chrismo', 'deal')
game.sendCommand('romer', 'deal')
game.sendCommand('sara', 'deal')
game.sendCommand('glv', 'deal')
game.sendCommand('bogdan', 'deal')
game.startBetting()
expect(-> game.bet('woodall', '10')).toThrow "Can't bet if you haven't played."
game.sendCommand('bogdan', 'bet 3')
game.sendCommand('chrismo', 'bet 10')
game.sendCommand('romer', 'bet 5')
game.sendCommand('romer', 'bet 7')
game.sendCommand('sara', 'fold')
# player can call at this point now. It's potentially confusing if the player
# has a strict expectation that call either means no higher bets can be made
# or that the call command will be sticky, meaning it auto-adjusts to higher
# bets. Play testing so far has shown players to be more confused over not
# being able to issue this command at this point, presuming it would be
# simply synonymous with "bet #{highest}".
game.sendCommand('glv', 'call')
game.sendCommand('chrismo', 'bet 3')
game.settleUp()
expect(-> game.bet('<NAME>', '5')).toThrow "No new bets."
game.sendCommand('glv', 'call')
game.sendCommand('<NAME>', 'fold')
# <NAME> does nothing and auto-calls the remaining 1 point owed
game.finishRound()
expect(game.winningHandResult.playerName).toBe '<NAME>'
expect(game.winningHandResult.matchedHand.hand.name).toBe 'Full House'
expect(game.playerStore[0].name).toBe '<NAME>'
expect(game.playerStore[0].points).toBe 25 - 1 - 13
expect(game.playerStore[1].name).toBe '<NAME>'
expect(game.playerStore[1].points).toBe 25 - 1 - 13 + (1 + 1 + 1 + 1 + 1) + (13 + 13 + 13 + 3)
expect(game.playerStore[2].name).toBe '<NAME>'
expect(game.playerStore[2].points).toBe 25 - 1
expect(game.playerStore[3].name).toBe 'glv'
expect(game.playerStore[3].points).toBe 25 - 1 - 13
expect(game.playerStore[4].name).toBe '<NAME>'
expect(game.playerStore[4].points).toBe 25 - 1 - 3
# folded players should be marked
expect(game.boardStore.sara.folded).toBe true
expect(game.boardStore.bogdan.folded).toBe true
expect(game.playState.name).toBe 'end'
game.startNewRound()
expect(game.playState.name).toBe 'play'
it 'bet round bug', ->
# this simple test drove out a funky playState bug.
# see the big comment in BetPlayState.nextRound()
game.sendCommand('<NAME>', 'deal')
game.sendCommand('<NAME>', 'deal')
time.now = builder.withMinute(1).build()
time.execCallback()
expect(game.playState.name).toBe 'bet'
class FakeListener
constructor: ->
@msgs = []
onStartRound: ->
onStatus: (msg) ->
@msgs.push msg
canPushToPlayer: (playerName) ->
true
| true | Core = require('../../js/card-poker/core')
TexasHoldEm = require('../../js/card-poker/texas-hold-em')
Fakes = require('../poker/fake-time')
Rounds = require('../../js/poker/round')
Player = require('../../js/poker/player')
_ = require('underscore')
describe 'TexasHoldEm', ->
store = game = null
builder = time = null
beforeEach ->
store = {}
builder = new Fakes.TimeBuilder().withHour(1).withMinute(0).withSecond(0)
time = new Fakes.FakeTimeProvider(builder.build())
listener = new FakeListener()
game = new TexasHoldEm(store, time)
game.playerStartingPoints = 25
game.addListener(listener)
it 'basic gameplay with play, bet and settle rounds', ->
game.startRound()
game.playState.round.onGameCommand({playerName: 'ghost'})
game.deck = new Core.Deck()
game.holeCards = game.deck.findAll(['5C', '8C', '8D', '9H', 'JC'])
playerCards = game.deck.findAll(['2S','4S','5S','5H','QC','3D','10H','KH','AS','7S']).cards
game.deck.cards.unshift playerCards
game.deck.cards = _.flatten(game.deck.cards)
game.sendCommand('chrismo', 'deal')
game.sendCommand('romer', 'deal')
game.sendCommand('sara', 'deal')
game.sendCommand('glv', 'deal')
game.sendCommand('bogdan', 'deal')
game.startBetting()
expect(-> game.bet('woodall', '10')).toThrow "Can't bet if you haven't played."
game.sendCommand('bogdan', 'bet 3')
game.sendCommand('chrismo', 'bet 10')
game.sendCommand('romer', 'bet 5')
game.sendCommand('romer', 'bet 7')
game.sendCommand('sara', 'fold')
# player can call at this point now. It's potentially confusing if the player
# has a strict expectation that call either means no higher bets can be made
# or that the call command will be sticky, meaning it auto-adjusts to higher
# bets. Play testing so far has shown players to be more confused over not
# being able to issue this command at this point, presuming it would be
# simply synonymous with "bet #{highest}".
game.sendCommand('glv', 'call')
game.sendCommand('chrismo', 'bet 3')
game.settleUp()
expect(-> game.bet('PI:NAME:<NAME>END_PI', '5')).toThrow "No new bets."
game.sendCommand('glv', 'call')
game.sendCommand('PI:NAME:<NAME>END_PI', 'fold')
# PI:NAME:<NAME>END_PI does nothing and auto-calls the remaining 1 point owed
game.finishRound()
expect(game.winningHandResult.playerName).toBe 'PI:NAME:<NAME>END_PI'
expect(game.winningHandResult.matchedHand.hand.name).toBe 'Full House'
expect(game.playerStore[0].name).toBe 'PI:NAME:<NAME>END_PI'
expect(game.playerStore[0].points).toBe 25 - 1 - 13
expect(game.playerStore[1].name).toBe 'PI:NAME:<NAME>END_PI'
expect(game.playerStore[1].points).toBe 25 - 1 - 13 + (1 + 1 + 1 + 1 + 1) + (13 + 13 + 13 + 3)
expect(game.playerStore[2].name).toBe 'PI:NAME:<NAME>END_PI'
expect(game.playerStore[2].points).toBe 25 - 1
expect(game.playerStore[3].name).toBe 'glv'
expect(game.playerStore[3].points).toBe 25 - 1 - 13
expect(game.playerStore[4].name).toBe 'PI:NAME:<NAME>END_PI'
expect(game.playerStore[4].points).toBe 25 - 1 - 3
# folded players should be marked
expect(game.boardStore.sara.folded).toBe true
expect(game.boardStore.bogdan.folded).toBe true
expect(game.playState.name).toBe 'end'
game.startNewRound()
expect(game.playState.name).toBe 'play'
it 'bet round bug', ->
# this simple test drove out a funky playState bug.
# see the big comment in BetPlayState.nextRound()
game.sendCommand('PI:NAME:<NAME>END_PI', 'deal')
game.sendCommand('PI:NAME:<NAME>END_PI', 'deal')
time.now = builder.withMinute(1).build()
time.execCallback()
expect(game.playState.name).toBe 'bet'
class FakeListener
constructor: ->
@msgs = []
onStartRound: ->
onStatus: (msg) ->
@msgs.push msg
canPushToPlayer: (playerName) ->
true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.