entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "_kv', root\n for kv in kv_list\n key = hash_key_to_value kv.value_array[0].value\n valu",
"end": 13366,
"score": 0.71524578332901,
"start": 13362,
"tag": "KEY",
"value": "hash"
},
{
"context": " key = hash_key_to_value kv.value_array[0].value... | src/node2ast.coffee | hu2prod/elayer | 0 | module = @
Type = require 'type'
ast = require './ast'
{Node} = require 'gram3/lib/node'
pre_op_map =
'++' : 'INC_RET'
'--' : 'DEC_RET'
'!' : 'BOOL_NOT'
'~' : 'BIT_NOT'
'not' : 'BOOL_NOT' # пока так. На самом деле ti
'+' : 'PLUS'
'-' : 'MINUS'
post_op_map =
'++' : 'RET_INC'
'--' : 'RET_DEC'
'?' : 'IS_NOT_NULL'
bin_op_map =
'+' : 'ADD'
'-' : 'SUB'
'*' : 'MUL'
'/' : 'DIV'
'%' : 'MOD'
'**' : 'POW'
'&' : 'BIT_AND'
'|' : 'BIT_OR'
'^' : 'BIT_XOR'
'&&' : 'BOOL_AND'
'||' : 'BOOL_OR'
'^^' : 'BOOL_XOR'
'and' : 'BOOL_AND'
'or' : 'BOOL_OR'
'xor' : 'BOOL_XOR'
'>>' : 'SHR'
'<<' : 'SHL'
'>>>' : 'LSR'
'=' : 'ASSIGN'
'+=' : 'ASS_ADD'
'-=' : 'ASS_SUB'
'*=' : 'ASS_MUL'
'/=' : 'ASS_DIV'
'%=' : 'ASS_MOD'
'**=' : 'ASS_POW'
'>>=' : 'ASS_SHR'
'<<=' : 'ASS_SHL'
'>>>=' : 'ASS_LSR'
'&=' : 'ASS_BIT_AND'
'|=' : 'ASS_BIT_OR'
'^=' : 'ASS_BIT_XOR'
'&&=' : 'ASS_BOOL_AND'
'||=' : 'ASS_BOOL_OR'
'^^=' : 'ASS_BOOL_XOR'
'==' : 'EQ'
'!=' : 'NE'
'<>' : 'NE'
'>' : 'GT'
'<' : 'LT'
'>=' : 'GTE'
'<=' : 'LTE'
# INDEX_ACCESS : true # a[b] как бинарный оператор
seek_token = (name, t)->
for v in t.value_array
return v if v.mx_hash.hash_key == name
null
seek_token_list = (name, t)->
list = []
for v in t.value_array
list.push v if v.mx_hash.hash_key == name
list
seek_token_list_deep = (name, t)->
list = []
for v in t.value_array
if v.mx_hash.hash_key == name
list.push v
else
list.append seek_token_list_deep name, v
list
seek_and_set_line_pos = (ret, root)->
walk = (root)->
if root.line != -1
ret.line = root.line
ret.pos = root.pos
return true
for v in root.value_array
return true if walk(v)
return false
walk root
fix_iterator = (t)->
# hack. В идеале должен быть lvalue
t.mx_hash.hacked = 'true'
t.mx_hash.ult = 'id'
t.value_view = t.value
t
wrap_scope = (stmt)->
ret = new ast.Scope
ret.list.push stmt
ret
hash_key_to_value = (key)->
if key[0] in ["'", '"']
eval key
else
key
# ###################################################################################################
# macro+
# ###################################################################################################
gen = null
@macro_fn_map = macro_fn_map =
'loop' : (condition, block)->
if condition
throw new Error "macro loop should not have condition"
ret = new ast.Loop
seek_and_set_line_pos ret, block
ret.scope= gen block
ret
'while' : (condition, block)->
if !condition
throw new Error "macro while should have condition"
ret = new ast.While
seek_and_set_line_pos ret, block
ret.cond= gen condition
ret.scope= gen block
ret
ast_call = (target, arg_list, scope)->
target_ast = gen target
if target_ast.constructor.name == 'Var'
if !arg_list or arg_list.length == 1
if fn = macro_fn_map[target_ast.name]
return fn(arg_list?[0], scope)
#
ret = new ast.Ast_call
ret.target = target_ast
if arg_list
ret.call = true
for v in arg_list
ret.arg_list.push gen v
ret.scope = gen scope
# pp ret
ret
@gen = gen = (root, opt={})->
switch root.mx_hash.ult
when "deep_scope"
ret = new ast.Scope
seek_and_set_line_pos ret, root
for v in root.value_array
continue if v.mx_hash.hash_key == 'eol'
loc = gen v, opt
continue if !loc
if loc instanceof ast.Scope
ret.list.append loc.list
else
ret.list.push loc
ret
when "deep"
gen root.value_array[0], opt
# ###################################################################################################
# misc
# ###################################################################################################
when "block"
gen root.value_array[1], opt
when "comment"
null
# ###################################################################################################
# single position
# ###################################################################################################
when "id"
if root.value_view in ["true", "false"]
ret = new ast.Const
seek_and_set_line_pos ret, root
ret.val = root.value_view
ret.type = new Type "bool"
ret
else if root.value_view == "continue"
ret = new ast.Continue
seek_and_set_line_pos ret, root
ret
else if root.value_view == "break"
ret = new ast.Break
seek_and_set_line_pos ret, root
ret
else
ret = new ast.Var
seek_and_set_line_pos ret, root
ret.name = root.value_view
ret
when "const"
ret = new ast.Const
seek_and_set_line_pos ret, root
ret.val = root.value_view
ret.type = new Type root.mx_hash.type
ret
# ###################################################################################################
# Operators
# ###################################################################################################
when "bin_op"
ret = new ast.Bin_op
seek_and_set_line_pos ret, root
ret.op = bin_op_map[op = root.value_array[1].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown bin_op=#{op}"
ret.a = gen root.value_array[0], opt
ret.b = gen root.value_array[2], opt
ret
when "pre_op"
ret = new ast.Un_op
seek_and_set_line_pos ret, root
ret.op = pre_op_map[op = root.value_array[0].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown pre_op=#{op}"
ret.a = gen root.value_array[1], opt
ret
when "post_op"
ret = new ast.Un_op
seek_and_set_line_pos ret, root
ret.op = post_op_map[op = root.value_array[1].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown post_op=#{op}"
ret.a = gen root.value_array[0], opt
ret
when "field_access"
ret = new ast.Field_access
seek_and_set_line_pos ret, root
ret.t = gen root.value_array[0], opt
ret.name = root.value_array[2].value
ret
when "index_access"
ret = new ast.Bin_op
seek_and_set_line_pos ret, root
ret.op = 'INDEX_ACCESS'
ret.a = gen root.value_array[0], opt
ret.b = gen root.value_array[2], opt
ret
when "bracket"
gen root.value_array[1], opt
# ###################################################################################################
# control flow
# ###################################################################################################
when "if_postfix"
condition = seek_token 'rvalue', root
block = seek_token 'stmt', root
ret = new ast.If
seek_and_set_line_pos ret, block
ret.cond= gen condition, opt
ret.t = wrap_scope gen block, opt
ret
when "if"
if_walk = (condition, block, if_tail_stmt)->
_ret = new ast.If
seek_and_set_line_pos _ret, block
_ret.cond= gen condition, opt
_ret.t = gen block, opt
if if_tail_stmt
value0 = if_tail_stmt.value_array[0].value
value1 = if_tail_stmt.value_array[1].value
is_else_if = false
if value0 in ['elseif', 'elsif', 'elif']
is_else_if = true
if value1 == 'if'
is_else_if = true
if is_else_if
condition = seek_token 'rvalue', if_tail_stmt
block = seek_token 'block', if_tail_stmt
new_if_tail_stmt = seek_token 'if_tail_stmt', if_tail_stmt
_ret.f.list.push if_walk condition, block, new_if_tail_stmt
else
_ret.f = gen seek_token('block', if_tail_stmt), opt
_ret
condition = seek_token 'rvalue', root
block = seek_token 'block', root
if_tail_stmt = seek_token 'if_tail_stmt', root
if_walk condition, block, if_tail_stmt
when "switch"
condition = seek_token 'rvalue', root
switch_tail_stmt = seek_token 'switch_tail_stmt', root
ret = new ast.Switch
seek_and_set_line_pos ret, root
ret.cond= gen condition, opt
while switch_tail_stmt
switch switch_tail_stmt.mx_hash.ult
when 'switch_when'
condition = gen seek_token('rvalue', switch_tail_stmt), opt
v = switch_tail_stmt
unless condition instanceof ast.Const
perr condition
throw new Error "when cond should be const"
ret.hash[condition.val] = gen seek_token('block', switch_tail_stmt), opt
when 'switch_else'
ret.f = gen seek_token('block', switch_tail_stmt), opt
else
### !pragma coverage-skip-block ###
perr root
throw new Error "unknown ult=#{root.mx_hash.ult} in switch"
switch_tail_stmt = seek_token 'switch_tail_stmt', switch_tail_stmt
ret
when "for_range"
ret = new ast.For_range
seek_and_set_line_pos ret, root
ret.exclusive = seek_token('ranger', root).value_view == '...'
[_for_skip, i] = seek_token_list 'tok_identifier', root
ret.i = gen fix_iterator i, opt
[a, b, by_node] = seek_token_list 'rvalue', root
ret.a = gen a, opt
ret.b = gen b, opt
ret.step = gen by_node, opt if by_node
ret.scope = gen seek_token('block', root), opt
ret
when "for_col"
ret = new ast.For_col
seek_and_set_line_pos ret, root
[_for_skip, k, v] = seek_token_list 'tok_identifier', root
if !v
v = k
k = null
ret.k = gen fix_iterator k, opt if k
ret.v = gen fix_iterator v, opt
ret.t = gen seek_token('rvalue', root), opt
ret.scope = gen seek_token('block', root), opt
ret
# ###################################################################################################
# Functions
# ###################################################################################################
when "fn_decl", "cl_decl"
ret = new ast.Fn_decl
seek_and_set_line_pos ret, root
if name = seek_token 'tok_identifier', root
ret.name = name.value
if root.mx_hash.ult == "cl_decl"
ret.is_closure = true
ret.type = new Type "function"
arg_list = []
if fn_decl_arg_list = seek_token 'fn_decl_arg_list', root
walk = (t)->
arg = t.value_array[0]
if arg.value_array[2]
type = new Type arg.value_array[2].value_view.replace(/\s+/g, '')
else
type = new Type "void"
arg_list.push {
name : arg.value_array[0].value
type
}
if t.value_array.length == 3
walk t.value_array[2]
return
walk fn_decl_arg_list
if type = seek_token('type', root)
ret.type.nest_list.push new Type type.value_view.replace(/\s+/g, '')
else
ret.type.nest_list.push new Type "void"
for arg in arg_list
ret.type.nest_list.push arg.type
ret.arg_name_list.push arg.name
scope = null
scope ?= seek_token 'block', root
scope ?= seek_token 'rvalue', root
if scope
ret.scope = gen scope, opt
ret
when "fn_call"
ret = new ast.Fn_call
seek_and_set_line_pos ret, root
ret.fn = gen root.value_array[0], opt
arg_list = []
if fn_decl_arg_list = seek_token 'fn_call_arg_list', root
walk = (t)->
arg_list.push gen t.value_array[0], opt
if t.value_array.length == 3
walk t.value_array[2]
return
walk fn_decl_arg_list
ret.arg_list = arg_list
ret
when "return"
ret = new ast.Ret
seek_and_set_line_pos ret, root
if root.value_array[1]
ret.t = gen root.value_array[1], opt
ret
# ###################################################################################################
# class
# ###################################################################################################
when "var_decl"
ret = new ast.Var_decl
seek_and_set_line_pos ret, root
ret.name = root.value_array[1].value
ret.type = new Type root.value_array[3].value_view.replace(/\s+/g, '')
ret
when "class_decl"
ret = new ast.Class_decl
seek_and_set_line_pos ret, root
ret.name = root.value_array[1].value
if scope = seek_token 'block', root
ret.scope = gen scope, opt
ret
when "struct_init"
ret = new ast.Struct_init
seek_and_set_line_pos ret, root
kv_list = seek_token_list_deep 'struct_init_kv', root
for kv in kv_list
key = hash_key_to_value kv.value_array[0].value
value = gen kv.value_array[2], opt
ret.hash[key] = value
ret
when "array_init"
ret = new ast.Array_init
seek_and_set_line_pos ret, root
value_list = seek_token_list_deep 'rvalue', root
for value in value_list
ret.list.push gen value, opt
ret
# ###################################################################################################
# mbg special
# ###################################################################################################
# TODO func call with block
# <some>.ast_call(serialized/generated ast)
# can use iffy
when "directive_fn_call"
arg_list = null
fn_call_arg_list = seek_token 'fn_call_arg_list', root
if root.value_array[1].value == "("
arg_list = []
if fn_call_arg_list
arg_list = seek_token_list_deep 'rvalue', fn_call_arg_list
# macro = seek_token 'rvalue', root
macro = root.value_array[0]
scope = seek_token 'block', root
ast_call macro, arg_list, scope
when "at"
ret = new ast.Var
seek_and_set_line_pos ret, root
ret.name = "this"
# ret.type = new Type "" # LATER
ret
when "at_field_access"
ret = new ast.Field_access
a_this = new ast.Var
a_this.name = "this"
seek_and_set_line_pos ret, root
ret.t = a_this
ret.name = root.value_array[1].value
ret
else
if opt.next_gen?
return opt.next_gen root
### !pragma coverage-skip-block ###
perr root
throw new Error "unknown ult=#{root.mx_hash.ult}"
#
| 203079 | module = @
Type = require 'type'
ast = require './ast'
{Node} = require 'gram3/lib/node'
pre_op_map =
'++' : 'INC_RET'
'--' : 'DEC_RET'
'!' : 'BOOL_NOT'
'~' : 'BIT_NOT'
'not' : 'BOOL_NOT' # пока так. На самом деле ti
'+' : 'PLUS'
'-' : 'MINUS'
post_op_map =
'++' : 'RET_INC'
'--' : 'RET_DEC'
'?' : 'IS_NOT_NULL'
bin_op_map =
'+' : 'ADD'
'-' : 'SUB'
'*' : 'MUL'
'/' : 'DIV'
'%' : 'MOD'
'**' : 'POW'
'&' : 'BIT_AND'
'|' : 'BIT_OR'
'^' : 'BIT_XOR'
'&&' : 'BOOL_AND'
'||' : 'BOOL_OR'
'^^' : 'BOOL_XOR'
'and' : 'BOOL_AND'
'or' : 'BOOL_OR'
'xor' : 'BOOL_XOR'
'>>' : 'SHR'
'<<' : 'SHL'
'>>>' : 'LSR'
'=' : 'ASSIGN'
'+=' : 'ASS_ADD'
'-=' : 'ASS_SUB'
'*=' : 'ASS_MUL'
'/=' : 'ASS_DIV'
'%=' : 'ASS_MOD'
'**=' : 'ASS_POW'
'>>=' : 'ASS_SHR'
'<<=' : 'ASS_SHL'
'>>>=' : 'ASS_LSR'
'&=' : 'ASS_BIT_AND'
'|=' : 'ASS_BIT_OR'
'^=' : 'ASS_BIT_XOR'
'&&=' : 'ASS_BOOL_AND'
'||=' : 'ASS_BOOL_OR'
'^^=' : 'ASS_BOOL_XOR'
'==' : 'EQ'
'!=' : 'NE'
'<>' : 'NE'
'>' : 'GT'
'<' : 'LT'
'>=' : 'GTE'
'<=' : 'LTE'
# INDEX_ACCESS : true # a[b] как бинарный оператор
seek_token = (name, t)->
for v in t.value_array
return v if v.mx_hash.hash_key == name
null
seek_token_list = (name, t)->
list = []
for v in t.value_array
list.push v if v.mx_hash.hash_key == name
list
seek_token_list_deep = (name, t)->
list = []
for v in t.value_array
if v.mx_hash.hash_key == name
list.push v
else
list.append seek_token_list_deep name, v
list
seek_and_set_line_pos = (ret, root)->
walk = (root)->
if root.line != -1
ret.line = root.line
ret.pos = root.pos
return true
for v in root.value_array
return true if walk(v)
return false
walk root
fix_iterator = (t)->
# hack. В идеале должен быть lvalue
t.mx_hash.hacked = 'true'
t.mx_hash.ult = 'id'
t.value_view = t.value
t
wrap_scope = (stmt)->
ret = new ast.Scope
ret.list.push stmt
ret
hash_key_to_value = (key)->
if key[0] in ["'", '"']
eval key
else
key
# ###################################################################################################
# macro+
# ###################################################################################################
gen = null
@macro_fn_map = macro_fn_map =
'loop' : (condition, block)->
if condition
throw new Error "macro loop should not have condition"
ret = new ast.Loop
seek_and_set_line_pos ret, block
ret.scope= gen block
ret
'while' : (condition, block)->
if !condition
throw new Error "macro while should have condition"
ret = new ast.While
seek_and_set_line_pos ret, block
ret.cond= gen condition
ret.scope= gen block
ret
ast_call = (target, arg_list, scope)->
target_ast = gen target
if target_ast.constructor.name == 'Var'
if !arg_list or arg_list.length == 1
if fn = macro_fn_map[target_ast.name]
return fn(arg_list?[0], scope)
#
ret = new ast.Ast_call
ret.target = target_ast
if arg_list
ret.call = true
for v in arg_list
ret.arg_list.push gen v
ret.scope = gen scope
# pp ret
ret
@gen = gen = (root, opt={})->
switch root.mx_hash.ult
when "deep_scope"
ret = new ast.Scope
seek_and_set_line_pos ret, root
for v in root.value_array
continue if v.mx_hash.hash_key == 'eol'
loc = gen v, opt
continue if !loc
if loc instanceof ast.Scope
ret.list.append loc.list
else
ret.list.push loc
ret
when "deep"
gen root.value_array[0], opt
# ###################################################################################################
# misc
# ###################################################################################################
when "block"
gen root.value_array[1], opt
when "comment"
null
# ###################################################################################################
# single position
# ###################################################################################################
when "id"
if root.value_view in ["true", "false"]
ret = new ast.Const
seek_and_set_line_pos ret, root
ret.val = root.value_view
ret.type = new Type "bool"
ret
else if root.value_view == "continue"
ret = new ast.Continue
seek_and_set_line_pos ret, root
ret
else if root.value_view == "break"
ret = new ast.Break
seek_and_set_line_pos ret, root
ret
else
ret = new ast.Var
seek_and_set_line_pos ret, root
ret.name = root.value_view
ret
when "const"
ret = new ast.Const
seek_and_set_line_pos ret, root
ret.val = root.value_view
ret.type = new Type root.mx_hash.type
ret
# ###################################################################################################
# Operators
# ###################################################################################################
when "bin_op"
ret = new ast.Bin_op
seek_and_set_line_pos ret, root
ret.op = bin_op_map[op = root.value_array[1].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown bin_op=#{op}"
ret.a = gen root.value_array[0], opt
ret.b = gen root.value_array[2], opt
ret
when "pre_op"
ret = new ast.Un_op
seek_and_set_line_pos ret, root
ret.op = pre_op_map[op = root.value_array[0].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown pre_op=#{op}"
ret.a = gen root.value_array[1], opt
ret
when "post_op"
ret = new ast.Un_op
seek_and_set_line_pos ret, root
ret.op = post_op_map[op = root.value_array[1].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown post_op=#{op}"
ret.a = gen root.value_array[0], opt
ret
when "field_access"
ret = new ast.Field_access
seek_and_set_line_pos ret, root
ret.t = gen root.value_array[0], opt
ret.name = root.value_array[2].value
ret
when "index_access"
ret = new ast.Bin_op
seek_and_set_line_pos ret, root
ret.op = 'INDEX_ACCESS'
ret.a = gen root.value_array[0], opt
ret.b = gen root.value_array[2], opt
ret
when "bracket"
gen root.value_array[1], opt
# ###################################################################################################
# control flow
# ###################################################################################################
when "if_postfix"
condition = seek_token 'rvalue', root
block = seek_token 'stmt', root
ret = new ast.If
seek_and_set_line_pos ret, block
ret.cond= gen condition, opt
ret.t = wrap_scope gen block, opt
ret
when "if"
if_walk = (condition, block, if_tail_stmt)->
_ret = new ast.If
seek_and_set_line_pos _ret, block
_ret.cond= gen condition, opt
_ret.t = gen block, opt
if if_tail_stmt
value0 = if_tail_stmt.value_array[0].value
value1 = if_tail_stmt.value_array[1].value
is_else_if = false
if value0 in ['elseif', 'elsif', 'elif']
is_else_if = true
if value1 == 'if'
is_else_if = true
if is_else_if
condition = seek_token 'rvalue', if_tail_stmt
block = seek_token 'block', if_tail_stmt
new_if_tail_stmt = seek_token 'if_tail_stmt', if_tail_stmt
_ret.f.list.push if_walk condition, block, new_if_tail_stmt
else
_ret.f = gen seek_token('block', if_tail_stmt), opt
_ret
condition = seek_token 'rvalue', root
block = seek_token 'block', root
if_tail_stmt = seek_token 'if_tail_stmt', root
if_walk condition, block, if_tail_stmt
when "switch"
condition = seek_token 'rvalue', root
switch_tail_stmt = seek_token 'switch_tail_stmt', root
ret = new ast.Switch
seek_and_set_line_pos ret, root
ret.cond= gen condition, opt
while switch_tail_stmt
switch switch_tail_stmt.mx_hash.ult
when 'switch_when'
condition = gen seek_token('rvalue', switch_tail_stmt), opt
v = switch_tail_stmt
unless condition instanceof ast.Const
perr condition
throw new Error "when cond should be const"
ret.hash[condition.val] = gen seek_token('block', switch_tail_stmt), opt
when 'switch_else'
ret.f = gen seek_token('block', switch_tail_stmt), opt
else
### !pragma coverage-skip-block ###
perr root
throw new Error "unknown ult=#{root.mx_hash.ult} in switch"
switch_tail_stmt = seek_token 'switch_tail_stmt', switch_tail_stmt
ret
when "for_range"
ret = new ast.For_range
seek_and_set_line_pos ret, root
ret.exclusive = seek_token('ranger', root).value_view == '...'
[_for_skip, i] = seek_token_list 'tok_identifier', root
ret.i = gen fix_iterator i, opt
[a, b, by_node] = seek_token_list 'rvalue', root
ret.a = gen a, opt
ret.b = gen b, opt
ret.step = gen by_node, opt if by_node
ret.scope = gen seek_token('block', root), opt
ret
when "for_col"
ret = new ast.For_col
seek_and_set_line_pos ret, root
[_for_skip, k, v] = seek_token_list 'tok_identifier', root
if !v
v = k
k = null
ret.k = gen fix_iterator k, opt if k
ret.v = gen fix_iterator v, opt
ret.t = gen seek_token('rvalue', root), opt
ret.scope = gen seek_token('block', root), opt
ret
# ###################################################################################################
# Functions
# ###################################################################################################
when "fn_decl", "cl_decl"
ret = new ast.Fn_decl
seek_and_set_line_pos ret, root
if name = seek_token 'tok_identifier', root
ret.name = name.value
if root.mx_hash.ult == "cl_decl"
ret.is_closure = true
ret.type = new Type "function"
arg_list = []
if fn_decl_arg_list = seek_token 'fn_decl_arg_list', root
walk = (t)->
arg = t.value_array[0]
if arg.value_array[2]
type = new Type arg.value_array[2].value_view.replace(/\s+/g, '')
else
type = new Type "void"
arg_list.push {
name : arg.value_array[0].value
type
}
if t.value_array.length == 3
walk t.value_array[2]
return
walk fn_decl_arg_list
if type = seek_token('type', root)
ret.type.nest_list.push new Type type.value_view.replace(/\s+/g, '')
else
ret.type.nest_list.push new Type "void"
for arg in arg_list
ret.type.nest_list.push arg.type
ret.arg_name_list.push arg.name
scope = null
scope ?= seek_token 'block', root
scope ?= seek_token 'rvalue', root
if scope
ret.scope = gen scope, opt
ret
when "fn_call"
ret = new ast.Fn_call
seek_and_set_line_pos ret, root
ret.fn = gen root.value_array[0], opt
arg_list = []
if fn_decl_arg_list = seek_token 'fn_call_arg_list', root
walk = (t)->
arg_list.push gen t.value_array[0], opt
if t.value_array.length == 3
walk t.value_array[2]
return
walk fn_decl_arg_list
ret.arg_list = arg_list
ret
when "return"
ret = new ast.Ret
seek_and_set_line_pos ret, root
if root.value_array[1]
ret.t = gen root.value_array[1], opt
ret
# ###################################################################################################
# class
# ###################################################################################################
when "var_decl"
ret = new ast.Var_decl
seek_and_set_line_pos ret, root
ret.name = root.value_array[1].value
ret.type = new Type root.value_array[3].value_view.replace(/\s+/g, '')
ret
when "class_decl"
ret = new ast.Class_decl
seek_and_set_line_pos ret, root
ret.name = root.value_array[1].value
if scope = seek_token 'block', root
ret.scope = gen scope, opt
ret
when "struct_init"
ret = new ast.Struct_init
seek_and_set_line_pos ret, root
kv_list = seek_token_list_deep 'struct_init_kv', root
for kv in kv_list
key = <KEY>_key_to_value kv.value_array[0].<KEY>
value = gen kv.value_array[2], opt
ret.hash[key] = value
ret
when "array_init"
ret = new ast.Array_init
seek_and_set_line_pos ret, root
value_list = seek_token_list_deep 'rvalue', root
for value in value_list
ret.list.push gen value, opt
ret
# ###################################################################################################
# mbg special
# ###################################################################################################
# TODO func call with block
# <some>.ast_call(serialized/generated ast)
# can use iffy
when "directive_fn_call"
arg_list = null
fn_call_arg_list = seek_token 'fn_call_arg_list', root
if root.value_array[1].value == "("
arg_list = []
if fn_call_arg_list
arg_list = seek_token_list_deep 'rvalue', fn_call_arg_list
# macro = seek_token 'rvalue', root
macro = root.value_array[0]
scope = seek_token 'block', root
ast_call macro, arg_list, scope
when "at"
ret = new ast.Var
seek_and_set_line_pos ret, root
ret.name = "this"
# ret.type = new Type "" # LATER
ret
when "at_field_access"
ret = new ast.Field_access
a_this = new ast.Var
a_this.name = "this"
seek_and_set_line_pos ret, root
ret.t = a_this
ret.name = root.value_array[1].value
ret
else
if opt.next_gen?
return opt.next_gen root
### !pragma coverage-skip-block ###
perr root
throw new Error "unknown ult=#{root.mx_hash.ult}"
#
| true | module = @
Type = require 'type'
ast = require './ast'
{Node} = require 'gram3/lib/node'
pre_op_map =
'++' : 'INC_RET'
'--' : 'DEC_RET'
'!' : 'BOOL_NOT'
'~' : 'BIT_NOT'
'not' : 'BOOL_NOT' # пока так. На самом деле ti
'+' : 'PLUS'
'-' : 'MINUS'
post_op_map =
'++' : 'RET_INC'
'--' : 'RET_DEC'
'?' : 'IS_NOT_NULL'
bin_op_map =
'+' : 'ADD'
'-' : 'SUB'
'*' : 'MUL'
'/' : 'DIV'
'%' : 'MOD'
'**' : 'POW'
'&' : 'BIT_AND'
'|' : 'BIT_OR'
'^' : 'BIT_XOR'
'&&' : 'BOOL_AND'
'||' : 'BOOL_OR'
'^^' : 'BOOL_XOR'
'and' : 'BOOL_AND'
'or' : 'BOOL_OR'
'xor' : 'BOOL_XOR'
'>>' : 'SHR'
'<<' : 'SHL'
'>>>' : 'LSR'
'=' : 'ASSIGN'
'+=' : 'ASS_ADD'
'-=' : 'ASS_SUB'
'*=' : 'ASS_MUL'
'/=' : 'ASS_DIV'
'%=' : 'ASS_MOD'
'**=' : 'ASS_POW'
'>>=' : 'ASS_SHR'
'<<=' : 'ASS_SHL'
'>>>=' : 'ASS_LSR'
'&=' : 'ASS_BIT_AND'
'|=' : 'ASS_BIT_OR'
'^=' : 'ASS_BIT_XOR'
'&&=' : 'ASS_BOOL_AND'
'||=' : 'ASS_BOOL_OR'
'^^=' : 'ASS_BOOL_XOR'
'==' : 'EQ'
'!=' : 'NE'
'<>' : 'NE'
'>' : 'GT'
'<' : 'LT'
'>=' : 'GTE'
'<=' : 'LTE'
# INDEX_ACCESS : true # a[b] как бинарный оператор
seek_token = (name, t)->
for v in t.value_array
return v if v.mx_hash.hash_key == name
null
seek_token_list = (name, t)->
list = []
for v in t.value_array
list.push v if v.mx_hash.hash_key == name
list
seek_token_list_deep = (name, t)->
list = []
for v in t.value_array
if v.mx_hash.hash_key == name
list.push v
else
list.append seek_token_list_deep name, v
list
seek_and_set_line_pos = (ret, root)->
walk = (root)->
if root.line != -1
ret.line = root.line
ret.pos = root.pos
return true
for v in root.value_array
return true if walk(v)
return false
walk root
fix_iterator = (t)->
# hack. В идеале должен быть lvalue
t.mx_hash.hacked = 'true'
t.mx_hash.ult = 'id'
t.value_view = t.value
t
wrap_scope = (stmt)->
ret = new ast.Scope
ret.list.push stmt
ret
hash_key_to_value = (key)->
if key[0] in ["'", '"']
eval key
else
key
# ###################################################################################################
# macro+
# ###################################################################################################
gen = null
@macro_fn_map = macro_fn_map =
'loop' : (condition, block)->
if condition
throw new Error "macro loop should not have condition"
ret = new ast.Loop
seek_and_set_line_pos ret, block
ret.scope= gen block
ret
'while' : (condition, block)->
if !condition
throw new Error "macro while should have condition"
ret = new ast.While
seek_and_set_line_pos ret, block
ret.cond= gen condition
ret.scope= gen block
ret
ast_call = (target, arg_list, scope)->
target_ast = gen target
if target_ast.constructor.name == 'Var'
if !arg_list or arg_list.length == 1
if fn = macro_fn_map[target_ast.name]
return fn(arg_list?[0], scope)
#
ret = new ast.Ast_call
ret.target = target_ast
if arg_list
ret.call = true
for v in arg_list
ret.arg_list.push gen v
ret.scope = gen scope
# pp ret
ret
@gen = gen = (root, opt={})->
switch root.mx_hash.ult
when "deep_scope"
ret = new ast.Scope
seek_and_set_line_pos ret, root
for v in root.value_array
continue if v.mx_hash.hash_key == 'eol'
loc = gen v, opt
continue if !loc
if loc instanceof ast.Scope
ret.list.append loc.list
else
ret.list.push loc
ret
when "deep"
gen root.value_array[0], opt
# ###################################################################################################
# misc
# ###################################################################################################
when "block"
gen root.value_array[1], opt
when "comment"
null
# ###################################################################################################
# single position
# ###################################################################################################
when "id"
if root.value_view in ["true", "false"]
ret = new ast.Const
seek_and_set_line_pos ret, root
ret.val = root.value_view
ret.type = new Type "bool"
ret
else if root.value_view == "continue"
ret = new ast.Continue
seek_and_set_line_pos ret, root
ret
else if root.value_view == "break"
ret = new ast.Break
seek_and_set_line_pos ret, root
ret
else
ret = new ast.Var
seek_and_set_line_pos ret, root
ret.name = root.value_view
ret
when "const"
ret = new ast.Const
seek_and_set_line_pos ret, root
ret.val = root.value_view
ret.type = new Type root.mx_hash.type
ret
# ###################################################################################################
# Operators
# ###################################################################################################
when "bin_op"
ret = new ast.Bin_op
seek_and_set_line_pos ret, root
ret.op = bin_op_map[op = root.value_array[1].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown bin_op=#{op}"
ret.a = gen root.value_array[0], opt
ret.b = gen root.value_array[2], opt
ret
when "pre_op"
ret = new ast.Un_op
seek_and_set_line_pos ret, root
ret.op = pre_op_map[op = root.value_array[0].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown pre_op=#{op}"
ret.a = gen root.value_array[1], opt
ret
when "post_op"
ret = new ast.Un_op
seek_and_set_line_pos ret, root
ret.op = post_op_map[op = root.value_array[1].value_view]
if !ret.op
### !pragma coverage-skip-block ###
throw new Error "unknown post_op=#{op}"
ret.a = gen root.value_array[0], opt
ret
when "field_access"
ret = new ast.Field_access
seek_and_set_line_pos ret, root
ret.t = gen root.value_array[0], opt
ret.name = root.value_array[2].value
ret
when "index_access"
ret = new ast.Bin_op
seek_and_set_line_pos ret, root
ret.op = 'INDEX_ACCESS'
ret.a = gen root.value_array[0], opt
ret.b = gen root.value_array[2], opt
ret
when "bracket"
gen root.value_array[1], opt
# ###################################################################################################
# control flow
# ###################################################################################################
when "if_postfix"
condition = seek_token 'rvalue', root
block = seek_token 'stmt', root
ret = new ast.If
seek_and_set_line_pos ret, block
ret.cond= gen condition, opt
ret.t = wrap_scope gen block, opt
ret
when "if"
if_walk = (condition, block, if_tail_stmt)->
_ret = new ast.If
seek_and_set_line_pos _ret, block
_ret.cond= gen condition, opt
_ret.t = gen block, opt
if if_tail_stmt
value0 = if_tail_stmt.value_array[0].value
value1 = if_tail_stmt.value_array[1].value
is_else_if = false
if value0 in ['elseif', 'elsif', 'elif']
is_else_if = true
if value1 == 'if'
is_else_if = true
if is_else_if
condition = seek_token 'rvalue', if_tail_stmt
block = seek_token 'block', if_tail_stmt
new_if_tail_stmt = seek_token 'if_tail_stmt', if_tail_stmt
_ret.f.list.push if_walk condition, block, new_if_tail_stmt
else
_ret.f = gen seek_token('block', if_tail_stmt), opt
_ret
condition = seek_token 'rvalue', root
block = seek_token 'block', root
if_tail_stmt = seek_token 'if_tail_stmt', root
if_walk condition, block, if_tail_stmt
when "switch"
condition = seek_token 'rvalue', root
switch_tail_stmt = seek_token 'switch_tail_stmt', root
ret = new ast.Switch
seek_and_set_line_pos ret, root
ret.cond= gen condition, opt
while switch_tail_stmt
switch switch_tail_stmt.mx_hash.ult
when 'switch_when'
condition = gen seek_token('rvalue', switch_tail_stmt), opt
v = switch_tail_stmt
unless condition instanceof ast.Const
perr condition
throw new Error "when cond should be const"
ret.hash[condition.val] = gen seek_token('block', switch_tail_stmt), opt
when 'switch_else'
ret.f = gen seek_token('block', switch_tail_stmt), opt
else
### !pragma coverage-skip-block ###
perr root
throw new Error "unknown ult=#{root.mx_hash.ult} in switch"
switch_tail_stmt = seek_token 'switch_tail_stmt', switch_tail_stmt
ret
when "for_range"
ret = new ast.For_range
seek_and_set_line_pos ret, root
ret.exclusive = seek_token('ranger', root).value_view == '...'
[_for_skip, i] = seek_token_list 'tok_identifier', root
ret.i = gen fix_iterator i, opt
[a, b, by_node] = seek_token_list 'rvalue', root
ret.a = gen a, opt
ret.b = gen b, opt
ret.step = gen by_node, opt if by_node
ret.scope = gen seek_token('block', root), opt
ret
when "for_col"
ret = new ast.For_col
seek_and_set_line_pos ret, root
[_for_skip, k, v] = seek_token_list 'tok_identifier', root
if !v
v = k
k = null
ret.k = gen fix_iterator k, opt if k
ret.v = gen fix_iterator v, opt
ret.t = gen seek_token('rvalue', root), opt
ret.scope = gen seek_token('block', root), opt
ret
# ###################################################################################################
# Functions
# ###################################################################################################
when "fn_decl", "cl_decl"
ret = new ast.Fn_decl
seek_and_set_line_pos ret, root
if name = seek_token 'tok_identifier', root
ret.name = name.value
if root.mx_hash.ult == "cl_decl"
ret.is_closure = true
ret.type = new Type "function"
arg_list = []
if fn_decl_arg_list = seek_token 'fn_decl_arg_list', root
walk = (t)->
arg = t.value_array[0]
if arg.value_array[2]
type = new Type arg.value_array[2].value_view.replace(/\s+/g, '')
else
type = new Type "void"
arg_list.push {
name : arg.value_array[0].value
type
}
if t.value_array.length == 3
walk t.value_array[2]
return
walk fn_decl_arg_list
if type = seek_token('type', root)
ret.type.nest_list.push new Type type.value_view.replace(/\s+/g, '')
else
ret.type.nest_list.push new Type "void"
for arg in arg_list
ret.type.nest_list.push arg.type
ret.arg_name_list.push arg.name
scope = null
scope ?= seek_token 'block', root
scope ?= seek_token 'rvalue', root
if scope
ret.scope = gen scope, opt
ret
when "fn_call"
ret = new ast.Fn_call
seek_and_set_line_pos ret, root
ret.fn = gen root.value_array[0], opt
arg_list = []
if fn_decl_arg_list = seek_token 'fn_call_arg_list', root
walk = (t)->
arg_list.push gen t.value_array[0], opt
if t.value_array.length == 3
walk t.value_array[2]
return
walk fn_decl_arg_list
ret.arg_list = arg_list
ret
when "return"
ret = new ast.Ret
seek_and_set_line_pos ret, root
if root.value_array[1]
ret.t = gen root.value_array[1], opt
ret
# ###################################################################################################
# class
# ###################################################################################################
when "var_decl"
ret = new ast.Var_decl
seek_and_set_line_pos ret, root
ret.name = root.value_array[1].value
ret.type = new Type root.value_array[3].value_view.replace(/\s+/g, '')
ret
when "class_decl"
ret = new ast.Class_decl
seek_and_set_line_pos ret, root
ret.name = root.value_array[1].value
if scope = seek_token 'block', root
ret.scope = gen scope, opt
ret
when "struct_init"
ret = new ast.Struct_init
seek_and_set_line_pos ret, root
kv_list = seek_token_list_deep 'struct_init_kv', root
for kv in kv_list
key = PI:KEY:<KEY>END_PI_key_to_value kv.value_array[0].PI:KEY:<KEY>END_PI
value = gen kv.value_array[2], opt
ret.hash[key] = value
ret
when "array_init"
ret = new ast.Array_init
seek_and_set_line_pos ret, root
value_list = seek_token_list_deep 'rvalue', root
for value in value_list
ret.list.push gen value, opt
ret
# ###################################################################################################
# mbg special
# ###################################################################################################
# TODO func call with block
# <some>.ast_call(serialized/generated ast)
# can use iffy
when "directive_fn_call"
arg_list = null
fn_call_arg_list = seek_token 'fn_call_arg_list', root
if root.value_array[1].value == "("
arg_list = []
if fn_call_arg_list
arg_list = seek_token_list_deep 'rvalue', fn_call_arg_list
# macro = seek_token 'rvalue', root
macro = root.value_array[0]
scope = seek_token 'block', root
ast_call macro, arg_list, scope
when "at"
ret = new ast.Var
seek_and_set_line_pos ret, root
ret.name = "this"
# ret.type = new Type "" # LATER
ret
when "at_field_access"
ret = new ast.Field_access
a_this = new ast.Var
a_this.name = "this"
seek_and_set_line_pos ret, root
ret.t = a_this
ret.name = root.value_array[1].value
ret
else
if opt.next_gen?
return opt.next_gen root
### !pragma coverage-skip-block ###
perr root
throw new Error "unknown ult=#{root.mx_hash.ult}"
#
|
[
{
"context": "\n\ndb = mongoose.createConnection('mongodb://david:1qazxsw2@alex.mongohq.com:10041/calendarsync')\ndb.on \"error\", console.error",
"end": 865,
"score": 0.9986139535903931,
"start": 840,
"tag": "EMAIL",
"value": "1qazxsw2@alex.mongohq.com"
},
{
"context": " bcrypt.genS... | data/coffeescript/dc6e2709c6bf74ef2ae0cc1f03907584_app.coffee | maxim5/code-inspector | 5 |
###
Module dependencies.
###
express = require("express")
http = require("http")
https = require("https")
path = require("path")
passport = require('passport')
querystring = require('querystring')
GoogleStrategy = require('passport-google').Strategy;
GoogleOAuth2Strategy = require('passport-google-oauth').OAuth2Strategy;
mongoose = require('mongoose')
bcrypt = require('bcrypt')
util = require("util")
exec = require("child_process").exec
child = undefined
Encryption = require('./encryption').Encryption
SALT_WORK_FACTOR = 10;
GoogleCalendar = require('google-calendar')
calendar = new GoogleCalendar.GoogleCalendar(
'314528618328-v4k0lve6dl83dboanp04d85n7u9mldd6.apps.googleusercontent.com',
'zH8LPkdnVmawAePGthgJ_cRo',
'http://calendarsync.dadams.c9.io/auth/google/callback')
db = mongoose.createConnection('mongodb://david:1qazxsw2@alex.mongohq.com:10041/calendarsync')
db.on "error", console.error.bind(console, "connection error:")
#child = exec 'python calendarsync.py', (error, stdout, stderr) ->
#console.log "stdout: " + stdout
#console.log "stderr: " + stderr
#console.log "exec error: " + error if error isnt null
#
encryption = new Encryption()
#console.log 'salting'
#console.log bcrypt.genSaltSync SALT_WORK_FACTOR
#password = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
#input = "hello world"
#encryption.encrypt input, password, (encoded) ->
#console.log encoded
#encryption.decrypt encoded, password, (output) ->
#console.log output
userSchema = new mongoose.Schema(
# _id:mongoose.Types.ObjectId
displayName:String
email:
type: String, unique:true
firstname: String
lastname:String
sonausername: String
sonapassword: String
sonavalid: Boolean
experiments: Array
calendarnames: Array
calendarids: Array
salt: String
gid:
type: String, unique:true
accessToken:
type: String, unique:true)
User = db.model 'users', userSchema
experimentSchema = new mongoose.Schema(
_id:String
Date:String
Date2:String
starttime:String
endtime:String
ParticipantSigned:String
StudentTimeSlot:String
sonanumber:Number
experiment:String
calendarid:String)
Experiment = db.model 'experiments', experimentSchema
#david = new User
#name: 'David'
#email: 'blah@blah.com'
#
#david.save (err)->
#console.log("error saving") if err
#
#Experiment.find (err,result)->
#console.log 'finding things'
#if(err)
#console.log(err)
#else
#console.log(result)
passport.serializeUser (user, done) ->
if user.identifier
done null, user.identifier
if user.id
done null, user.id
#console.log 'name ' + user.name
#console.log ' email ' + user.emails
#User.findOne 'gid':user.identifier, (err,u)->
#console.log 'serializeUser ' + user.identifier
#if u
#console.log 'found old user' + u
#done(null,u)
#else
#console.log 'serializeUser no user found'
#u['gid'] = user.identifier
#u.save (err)->
#console.log("error saving") if err
#done null, u
passport.deserializeUser (id, done) ->
#console.log 'deserializeUser ' + id
User.findOne 'gid':id, (err, user)->
if err
console.log 'user not found ' + id
done err, user
###
passport.use new GoogleStrategy(
returnURL: "http://calendarsync.dadams.c9.io/auth/google/return"
realm: "http://calendarsync.dadams.c9.io/",
(identifier, profile, done) ->
# asynchronous verification, for effect...
process.nextTick ->
#To keep the example simple, the user's Google profile is returned to
#represent the logged-in user. In a typical application, you would want
#to associate the Google account with a user record in your database,
#and return that user instead.
profile.identifier = identifier
User.findOne 'gid':identifier, (err, user)->
if !user
console.log 'creating new user'
u = new User()
u.emails = profile.emails.value
u.firstname = profile.name.givenName
u.lastname = profile.name.familyName
u.displayName = profile.displayName
u.gid = identifier
u.save()
#console.log(profile)
#console.log(identifier)
done null, profile
)
###
passport.use new GoogleOAuth2Strategy(
clientID: '314528618328-v4k0lve6dl83dboanp04d85n7u9mldd6.apps.googleusercontent.com'
clientSecret: 'zH8LPkdnVmawAePGthgJ_cRo'
callbackURL: "http://calendarsync.dadams.c9.io/auth/google/callback",
(accessToken, refreshToken, profile, done) ->
#console.log accessToken
#console.log refreshToken
#console.log profile._json
json = JSON.parse(JSON.stringify(profile._json))
console.log json
User.findOne 'gid':json.id, (err, user)->
if !user
console.log 'creating new user'
u = new User()
u.email = json.email
u.firstname = json.given_name
u.lastname = json.family_name
u.displayName = json.name
u.gid = json.id
u.sonavalid = false
u.sonapassword='default'
u.accessToken = accessToken
u.save (err)->
console.log(err) if err
else
user.accessToken = accessToken
user.save (err)->
console.log(err) if err
done err, profile
#User.findOrCreate
#googleId: profile.id,
#(err, user) ->
#done err, user
)
app = express()
app.configure ->
app.set "port", process.env.PORT or 80
app.set "views", __dirname + "/views"
app.set "view engine", "jade"
app.use express.favicon()
app.use express.logger("dev")
app.use express.cookieParser()
app.use express.bodyParser()
app.use express.methodOverride()
app.use express.session { secret: 'keyboard cat' }
#app.use express.compiler(src: __dirname + '/public/lib', enable: ['less'] )
#app.use express.compiler(
# src: __dirname + "/public"
# enable: ["less"])
app.use passport.initialize()
app.use passport.session()
app.use app.router
app.use express.static(path.join(__dirname, "public"))
app.configure "development", ->
app.use express.errorHandler()
app.get "/", (req, res) ->
res.render "index",
user: req.user
title: 'Calendarsync'
app.get "/account", ensureAuthenticated, (req, res) ->
res.render "account",
user: req.user
app.get "/auth/google",
passport.authenticate("google",
scope: ['https://www.googleapis.com/auth/calendar',
'https://www.googleapis.com/auth/userinfo.profile',
'https://www.googleapis.com/auth/userinfo.email']
failureRedirect: "/"),
(req, res) ->
res.redirect "/"
app.get "/auth/google/return", passport.authenticate("google", failureRedirect: "/"),
(req, res) ->
res.redirect "/manage"
app.get "/auth/google/callback", passport.authenticate("google", failureRedirect: "/"),
(req, res) ->
# Successful authentication, redirect home.
res.redirect "/"
app.get "/logout", (req, res) ->
req.logout()
res.redirect "/"
app.get "/manage", (req, res) ->
res.render "manage",
user: req.user
app.post "/manage", (req, res) ->
sonalogin = req.param 'sona_login', null
sonapass = req.param 'sona_password', null
#console.log sonalogin
#console.log sonapass
if sonapass
req.user.sonapassword=sonapass
if sonalogin
req.user.sonausername=sonalogin
req.user.save (err) ->
console.log("error saving sona data " + err) if err
res.redirect '/update'
app.post "/update/:index?", (req, res) ->
calendarname = req.param 'calendar_name', null
index = req.params.index
calendar.listCalendarList req.user.accessToken, (err, calendarList) ->
if err
console.log err
else
calendarList.items.forEach (cal) ->
console.log "Calendar : " + cal.id
#console.log "index : " + index
#console.log calendarname
if calendarname == cal.summary
Experiment.find
'experiment': req.user.experiments[index]
(err,result)->
#console.log result
for exp in result
console.log exp
if exp.ParticipantSigned == '1'
event =
"status": 'confirmed'
"summary": exp.StudentTimeSlot
"description": exp.experiment
"start":
"dateTime": exp.starttime
"end":
"dateTime": exp.endtime
console.log event
if exp.calendarid
calendar.updateEvent req.user.accessToken, cal.id, exp.calendarid,(err, event) ->
if err
console.log err
else
console.log 'updated existing event ' + event.id
else
calendar.insertEvent req.user.accessToken, cal.id,event,(err, event) ->
if err
console.log err
else
console.log 'added new event ' + event.id
exp.calendarid = event.id
#exp.save (err)->
#console.log err if err
#e = new Experiment(exp)
#e.save (err)->
#console.log err if err
Experiment.findByIdAndUpdate exp._id,
'calendarid': event.id, (err)->
console.log err if err
else if exp.ParticipantSigned == '0' and exp.calendarid
calendar.deleteEvent req.user.accessToken, cal.id, exp.calendarid,(err, event) ->
if err
console.log err
else
console.log 'deleted existing event ' + event.id
event.id = null
exp.save (err)->
console.log err if err
#Events.list
res.redirect '/'
###
app.post "/update", (req, res) ->
options = querystring.stringify
'host': 'www.googleapis.com'
'path': '/calendar/v3/users/me/calendarList?key=zH8LPkdnVmawAePGthgJ_cRo'
'method': 'GET'
'authorization': 'Bearer ' + req.user.accessToken
'referer': 'http://calendarsync.dadams.c9.io'
console.log options
request = https.request(options, (response) ->
console.log "statusCode: ", response.statusCode
console.log "headers: ", JSON.stringify(response.headers)
response.on "data", (d) ->
console.log d
)
request.end()
request.on "error", (e) ->
console.error e
res.redirect '/'
###
app.get "/update", (req, res) ->
child = exec 'python calendarsync.py', (error, stdout, stderr) ->
console.log "stdout: " + stdout
console.log "stderr: " + stderr
console.log "exec error: " + error if error isnt null
User.findOne 'gid':req.user.gid, (err, user)->
req.user = user
if req.user.sonavalid
res.redirect "/experiments"
else
res.redirect '/manage'
app.get "/experiments/:experiment", (req, res) ->
console.log "experiment number " + req.params.experiment
console.log req.user.experiments[parseInt req.params.experiment]
Experiment.find
'experiment': req.user.experiments[parseInt req.params.experiment]
'ParticipantSigned': '1',
(err,result)->
console.log 'finding experiments'
if(err)
console.log(err)
res.redirect '/experiments'
else
res.render "calendar"
user: req.user
experiments: result
title: req.user.experiments[parseInt req.params.experiment]
index: parseInt req.params.experiment
#app.get '/calendar', (req, res) ->
#res.render "calendar"
app.get "/experiments", (req, res) ->
if req.user.sonavalid
res.render "experiments",
title: 'Experiments'
user: req.user
else
res.redirect '/manage'
ensureAuthenticated = (req, res, next) ->
return next() if req.isAuthenticated()
res.redirect "/"
userSchema.pre "save", (next) ->
user = this
# only hash the password if it has been modified (or is new)
return next() unless user.isModified("sonapassword")
# generate a salt
salt = bcrypt.genSaltSync SALT_WORK_FACTOR
console.log user
encryption.encrypt user.sonapassword, salt, (encoded) ->
#console.log 'encrypted ' + encoded
user.sonapassword = encoded
#console.log encoded
user.salt = salt
next()
#encryption.decrypt encoded, salt, (output) ->
#next()
# override the cleartext password with the hashed one
http.createServer(app).listen app.get("port"), ->
console.log "Express server listening on port " + app.get("port")
| 200972 |
###
Module dependencies.
###
express = require("express")
http = require("http")
https = require("https")
path = require("path")
passport = require('passport')
querystring = require('querystring')
GoogleStrategy = require('passport-google').Strategy;
GoogleOAuth2Strategy = require('passport-google-oauth').OAuth2Strategy;
mongoose = require('mongoose')
bcrypt = require('bcrypt')
util = require("util")
exec = require("child_process").exec
child = undefined
Encryption = require('./encryption').Encryption
SALT_WORK_FACTOR = 10;
GoogleCalendar = require('google-calendar')
calendar = new GoogleCalendar.GoogleCalendar(
'314528618328-v4k0lve6dl83dboanp04d85n7u9mldd6.apps.googleusercontent.com',
'zH8LPkdnVmawAePGthgJ_cRo',
'http://calendarsync.dadams.c9.io/auth/google/callback')
db = mongoose.createConnection('mongodb://david:<EMAIL>:10041/calendarsync')
db.on "error", console.error.bind(console, "connection error:")
#child = exec 'python calendarsync.py', (error, stdout, stderr) ->
#console.log "stdout: " + stdout
#console.log "stderr: " + stderr
#console.log "exec error: " + error if error isnt null
#
encryption = new Encryption()
#console.log 'salting'
#console.log bcrypt.genSaltSync SALT_WORK_FACTOR
#password = "<PASSWORD>"
#input = "hello world"
#encryption.encrypt input, password, (encoded) ->
#console.log encoded
#encryption.decrypt encoded, password, (output) ->
#console.log output
userSchema = new mongoose.Schema(
# _id:mongoose.Types.ObjectId
displayName:String
email:
type: String, unique:true
firstname: String
lastname:String
sonausername: String
sonapassword: String
sonavalid: Boolean
experiments: Array
calendarnames: Array
calendarids: Array
salt: String
gid:
type: String, unique:true
accessToken:
type: String, unique:true)
User = db.model 'users', userSchema
experimentSchema = new mongoose.Schema(
_id:String
Date:String
Date2:String
starttime:String
endtime:String
ParticipantSigned:String
StudentTimeSlot:String
sonanumber:Number
experiment:String
calendarid:String)
Experiment = db.model 'experiments', experimentSchema
#david = new User
#name: '<NAME>'
#email: '<EMAIL>'
#
#david.save (err)->
#console.log("error saving") if err
#
#Experiment.find (err,result)->
#console.log 'finding things'
#if(err)
#console.log(err)
#else
#console.log(result)
passport.serializeUser (user, done) ->
if user.identifier
done null, user.identifier
if user.id
done null, user.id
#console.log 'name ' + user.name
#console.log ' email ' + user.emails
#User.findOne 'gid':user.identifier, (err,u)->
#console.log 'serializeUser ' + user.identifier
#if u
#console.log 'found old user' + u
#done(null,u)
#else
#console.log 'serializeUser no user found'
#u['gid'] = user.identifier
#u.save (err)->
#console.log("error saving") if err
#done null, u
passport.deserializeUser (id, done) ->
#console.log 'deserializeUser ' + id
User.findOne 'gid':id, (err, user)->
if err
console.log 'user not found ' + id
done err, user
###
passport.use new GoogleStrategy(
returnURL: "http://calendarsync.dadams.c9.io/auth/google/return"
realm: "http://calendarsync.dadams.c9.io/",
(identifier, profile, done) ->
# asynchronous verification, for effect...
process.nextTick ->
#To keep the example simple, the user's Google profile is returned to
#represent the logged-in user. In a typical application, you would want
#to associate the Google account with a user record in your database,
#and return that user instead.
profile.identifier = identifier
User.findOne 'gid':identifier, (err, user)->
if !user
console.log 'creating new user'
u = new User()
u.emails = profile.emails.value
u.firstname = profile.name.givenName
u.lastname = profile.name.familyName
u.displayName = profile.displayName
u.gid = identifier
u.save()
#console.log(profile)
#console.log(identifier)
done null, profile
)
###
passport.use new GoogleOAuth2Strategy(
clientID: '314528618328-v4k0lve6dl83dboanp04d85n7u9mldd6.apps.googleusercontent.com'
clientSecret: '<KEY>'
callbackURL: "http://calendarsync.dadams.c9.io/auth/google/callback",
(accessToken, refreshToken, profile, done) ->
#console.log accessToken
#console.log refreshToken
#console.log profile._json
json = JSON.parse(JSON.stringify(profile._json))
console.log json
User.findOne 'gid':json.id, (err, user)->
if !user
console.log 'creating new user'
u = new User()
u.email = json.email
u.firstname = json.given_name
u.lastname = json.family_name
u.displayName = json.name
u.gid = json.id
u.sonavalid = false
u.sonapassword='<PASSWORD>'
u.accessToken = accessToken
u.save (err)->
console.log(err) if err
else
user.accessToken = accessToken
user.save (err)->
console.log(err) if err
done err, profile
#User.findOrCreate
#googleId: profile.id,
#(err, user) ->
#done err, user
)
app = express()
app.configure ->
app.set "port", process.env.PORT or 80
app.set "views", __dirname + "/views"
app.set "view engine", "jade"
app.use express.favicon()
app.use express.logger("dev")
app.use express.cookieParser()
app.use express.bodyParser()
app.use express.methodOverride()
app.use express.session { secret: 'keyboard cat' }
#app.use express.compiler(src: __dirname + '/public/lib', enable: ['less'] )
#app.use express.compiler(
# src: __dirname + "/public"
# enable: ["less"])
app.use passport.initialize()
app.use passport.session()
app.use app.router
app.use express.static(path.join(__dirname, "public"))
app.configure "development", ->
app.use express.errorHandler()
app.get "/", (req, res) ->
res.render "index",
user: req.user
title: 'Calendarsync'
app.get "/account", ensureAuthenticated, (req, res) ->
res.render "account",
user: req.user
app.get "/auth/google",
passport.authenticate("google",
scope: ['https://www.googleapis.com/auth/calendar',
'https://www.googleapis.com/auth/userinfo.profile',
'https://www.googleapis.com/auth/userinfo.email']
failureRedirect: "/"),
(req, res) ->
res.redirect "/"
app.get "/auth/google/return", passport.authenticate("google", failureRedirect: "/"),
(req, res) ->
res.redirect "/manage"
app.get "/auth/google/callback", passport.authenticate("google", failureRedirect: "/"),
(req, res) ->
# Successful authentication, redirect home.
res.redirect "/"
app.get "/logout", (req, res) ->
req.logout()
res.redirect "/"
app.get "/manage", (req, res) ->
res.render "manage",
user: req.user
app.post "/manage", (req, res) ->
sonalogin = req.param 'sona_login', null
sonapass = req.param 'sona_password', null
#console.log sonalogin
#console.log sonapass
if sonapass
req.user.sonapassword=<PASSWORD>
if sonalogin
req.user.sonausername=sonalogin
req.user.save (err) ->
console.log("error saving sona data " + err) if err
res.redirect '/update'
app.post "/update/:index?", (req, res) ->
calendarname = req.param 'calendar_name', null
index = req.params.index
calendar.listCalendarList req.user.accessToken, (err, calendarList) ->
if err
console.log err
else
calendarList.items.forEach (cal) ->
console.log "Calendar : " + cal.id
#console.log "index : " + index
#console.log calendarname
if calendarname == cal.summary
Experiment.find
'experiment': req.user.experiments[index]
(err,result)->
#console.log result
for exp in result
console.log exp
if exp.ParticipantSigned == '1'
event =
"status": 'confirmed'
"summary": exp.StudentTimeSlot
"description": exp.experiment
"start":
"dateTime": exp.starttime
"end":
"dateTime": exp.endtime
console.log event
if exp.calendarid
calendar.updateEvent req.user.accessToken, cal.id, exp.calendarid,(err, event) ->
if err
console.log err
else
console.log 'updated existing event ' + event.id
else
calendar.insertEvent req.user.accessToken, cal.id,event,(err, event) ->
if err
console.log err
else
console.log 'added new event ' + event.id
exp.calendarid = event.id
#exp.save (err)->
#console.log err if err
#e = new Experiment(exp)
#e.save (err)->
#console.log err if err
Experiment.findByIdAndUpdate exp._id,
'calendarid': event.id, (err)->
console.log err if err
else if exp.ParticipantSigned == '0' and exp.calendarid
calendar.deleteEvent req.user.accessToken, cal.id, exp.calendarid,(err, event) ->
if err
console.log err
else
console.log 'deleted existing event ' + event.id
event.id = null
exp.save (err)->
console.log err if err
#Events.list
res.redirect '/'
###
app.post "/update", (req, res) ->
options = querystring.stringify
'host': 'www.googleapis.com'
'path': '/calendar/v3/users/me/calendarList?key=z<KEY>'
'method': 'GET'
'authorization': 'Bearer ' + req.user.accessToken
'referer': 'http://calendarsync.dadams.c9.io'
console.log options
request = https.request(options, (response) ->
console.log "statusCode: ", response.statusCode
console.log "headers: ", JSON.stringify(response.headers)
response.on "data", (d) ->
console.log d
)
request.end()
request.on "error", (e) ->
console.error e
res.redirect '/'
###
app.get "/update", (req, res) ->
child = exec 'python calendarsync.py', (error, stdout, stderr) ->
console.log "stdout: " + stdout
console.log "stderr: " + stderr
console.log "exec error: " + error if error isnt null
User.findOne 'gid':req.user.gid, (err, user)->
req.user = user
if req.user.sonavalid
res.redirect "/experiments"
else
res.redirect '/manage'
app.get "/experiments/:experiment", (req, res) ->
console.log "experiment number " + req.params.experiment
console.log req.user.experiments[parseInt req.params.experiment]
Experiment.find
'experiment': req.user.experiments[parseInt req.params.experiment]
'ParticipantSigned': '1',
(err,result)->
console.log 'finding experiments'
if(err)
console.log(err)
res.redirect '/experiments'
else
res.render "calendar"
user: req.user
experiments: result
title: req.user.experiments[parseInt req.params.experiment]
index: parseInt req.params.experiment
#app.get '/calendar', (req, res) ->
#res.render "calendar"
app.get "/experiments", (req, res) ->
if req.user.sonavalid
res.render "experiments",
title: 'Experiments'
user: req.user
else
res.redirect '/manage'
ensureAuthenticated = (req, res, next) ->
return next() if req.isAuthenticated()
res.redirect "/"
userSchema.pre "save", (next) ->
user = this
# only hash the password if it has been modified (or is new)
return next() unless user.isModified("sonapassword")
# generate a salt
salt = bcrypt.genSaltSync SALT_WORK_FACTOR
console.log user
encryption.encrypt user.sonapassword, salt, (encoded) ->
#console.log 'encrypted ' + encoded
user.sonapassword = encoded
#console.log encoded
user.salt = salt
next()
#encryption.decrypt encoded, salt, (output) ->
#next()
# override the cleartext password with the hashed one
http.createServer(app).listen app.get("port"), ->
console.log "Express server listening on port " + app.get("port")
| true |
###
Module dependencies.
###
express = require("express")
http = require("http")
https = require("https")
path = require("path")
passport = require('passport')
querystring = require('querystring')
GoogleStrategy = require('passport-google').Strategy;
GoogleOAuth2Strategy = require('passport-google-oauth').OAuth2Strategy;
mongoose = require('mongoose')
bcrypt = require('bcrypt')
util = require("util")
exec = require("child_process").exec
child = undefined
Encryption = require('./encryption').Encryption
SALT_WORK_FACTOR = 10;
GoogleCalendar = require('google-calendar')
calendar = new GoogleCalendar.GoogleCalendar(
'314528618328-v4k0lve6dl83dboanp04d85n7u9mldd6.apps.googleusercontent.com',
'zH8LPkdnVmawAePGthgJ_cRo',
'http://calendarsync.dadams.c9.io/auth/google/callback')
db = mongoose.createConnection('mongodb://david:PI:EMAIL:<EMAIL>END_PI:10041/calendarsync')
db.on "error", console.error.bind(console, "connection error:")
#child = exec 'python calendarsync.py', (error, stdout, stderr) ->
#console.log "stdout: " + stdout
#console.log "stderr: " + stderr
#console.log "exec error: " + error if error isnt null
#
encryption = new Encryption()
#console.log 'salting'
#console.log bcrypt.genSaltSync SALT_WORK_FACTOR
#password = "PI:PASSWORD:<PASSWORD>END_PI"
#input = "hello world"
#encryption.encrypt input, password, (encoded) ->
#console.log encoded
#encryption.decrypt encoded, password, (output) ->
#console.log output
userSchema = new mongoose.Schema(
# _id:mongoose.Types.ObjectId
displayName:String
email:
type: String, unique:true
firstname: String
lastname:String
sonausername: String
sonapassword: String
sonavalid: Boolean
experiments: Array
calendarnames: Array
calendarids: Array
salt: String
gid:
type: String, unique:true
accessToken:
type: String, unique:true)
User = db.model 'users', userSchema
experimentSchema = new mongoose.Schema(
_id:String
Date:String
Date2:String
starttime:String
endtime:String
ParticipantSigned:String
StudentTimeSlot:String
sonanumber:Number
experiment:String
calendarid:String)
Experiment = db.model 'experiments', experimentSchema
#david = new User
#name: 'PI:NAME:<NAME>END_PI'
#email: 'PI:EMAIL:<EMAIL>END_PI'
#
#david.save (err)->
#console.log("error saving") if err
#
#Experiment.find (err,result)->
#console.log 'finding things'
#if(err)
#console.log(err)
#else
#console.log(result)
passport.serializeUser (user, done) ->
if user.identifier
done null, user.identifier
if user.id
done null, user.id
#console.log 'name ' + user.name
#console.log ' email ' + user.emails
#User.findOne 'gid':user.identifier, (err,u)->
#console.log 'serializeUser ' + user.identifier
#if u
#console.log 'found old user' + u
#done(null,u)
#else
#console.log 'serializeUser no user found'
#u['gid'] = user.identifier
#u.save (err)->
#console.log("error saving") if err
#done null, u
passport.deserializeUser (id, done) ->
#console.log 'deserializeUser ' + id
User.findOne 'gid':id, (err, user)->
if err
console.log 'user not found ' + id
done err, user
###
passport.use new GoogleStrategy(
returnURL: "http://calendarsync.dadams.c9.io/auth/google/return"
realm: "http://calendarsync.dadams.c9.io/",
(identifier, profile, done) ->
# asynchronous verification, for effect...
process.nextTick ->
#To keep the example simple, the user's Google profile is returned to
#represent the logged-in user. In a typical application, you would want
#to associate the Google account with a user record in your database,
#and return that user instead.
profile.identifier = identifier
User.findOne 'gid':identifier, (err, user)->
if !user
console.log 'creating new user'
u = new User()
u.emails = profile.emails.value
u.firstname = profile.name.givenName
u.lastname = profile.name.familyName
u.displayName = profile.displayName
u.gid = identifier
u.save()
#console.log(profile)
#console.log(identifier)
done null, profile
)
###
passport.use new GoogleOAuth2Strategy(
clientID: '314528618328-v4k0lve6dl83dboanp04d85n7u9mldd6.apps.googleusercontent.com'
clientSecret: 'PI:KEY:<KEY>END_PI'
callbackURL: "http://calendarsync.dadams.c9.io/auth/google/callback",
(accessToken, refreshToken, profile, done) ->
#console.log accessToken
#console.log refreshToken
#console.log profile._json
json = JSON.parse(JSON.stringify(profile._json))
console.log json
User.findOne 'gid':json.id, (err, user)->
if !user
console.log 'creating new user'
u = new User()
u.email = json.email
u.firstname = json.given_name
u.lastname = json.family_name
u.displayName = json.name
u.gid = json.id
u.sonavalid = false
u.sonapassword='PI:PASSWORD:<PASSWORD>END_PI'
u.accessToken = accessToken
u.save (err)->
console.log(err) if err
else
user.accessToken = accessToken
user.save (err)->
console.log(err) if err
done err, profile
#User.findOrCreate
#googleId: profile.id,
#(err, user) ->
#done err, user
)
app = express()
app.configure ->
app.set "port", process.env.PORT or 80
app.set "views", __dirname + "/views"
app.set "view engine", "jade"
app.use express.favicon()
app.use express.logger("dev")
app.use express.cookieParser()
app.use express.bodyParser()
app.use express.methodOverride()
app.use express.session { secret: 'keyboard cat' }
#app.use express.compiler(src: __dirname + '/public/lib', enable: ['less'] )
#app.use express.compiler(
# src: __dirname + "/public"
# enable: ["less"])
app.use passport.initialize()
app.use passport.session()
app.use app.router
app.use express.static(path.join(__dirname, "public"))
app.configure "development", ->
app.use express.errorHandler()
app.get "/", (req, res) ->
res.render "index",
user: req.user
title: 'Calendarsync'
app.get "/account", ensureAuthenticated, (req, res) ->
res.render "account",
user: req.user
app.get "/auth/google",
passport.authenticate("google",
scope: ['https://www.googleapis.com/auth/calendar',
'https://www.googleapis.com/auth/userinfo.profile',
'https://www.googleapis.com/auth/userinfo.email']
failureRedirect: "/"),
(req, res) ->
res.redirect "/"
app.get "/auth/google/return", passport.authenticate("google", failureRedirect: "/"),
(req, res) ->
res.redirect "/manage"
app.get "/auth/google/callback", passport.authenticate("google", failureRedirect: "/"),
(req, res) ->
# Successful authentication, redirect home.
res.redirect "/"
app.get "/logout", (req, res) ->
req.logout()
res.redirect "/"
app.get "/manage", (req, res) ->
res.render "manage",
user: req.user
app.post "/manage", (req, res) ->
sonalogin = req.param 'sona_login', null
sonapass = req.param 'sona_password', null
#console.log sonalogin
#console.log sonapass
if sonapass
req.user.sonapassword=PI:PASSWORD:<PASSWORD>END_PI
if sonalogin
req.user.sonausername=sonalogin
req.user.save (err) ->
console.log("error saving sona data " + err) if err
res.redirect '/update'
app.post "/update/:index?", (req, res) ->
calendarname = req.param 'calendar_name', null
index = req.params.index
calendar.listCalendarList req.user.accessToken, (err, calendarList) ->
if err
console.log err
else
calendarList.items.forEach (cal) ->
console.log "Calendar : " + cal.id
#console.log "index : " + index
#console.log calendarname
if calendarname == cal.summary
Experiment.find
'experiment': req.user.experiments[index]
(err,result)->
#console.log result
for exp in result
console.log exp
if exp.ParticipantSigned == '1'
event =
"status": 'confirmed'
"summary": exp.StudentTimeSlot
"description": exp.experiment
"start":
"dateTime": exp.starttime
"end":
"dateTime": exp.endtime
console.log event
if exp.calendarid
calendar.updateEvent req.user.accessToken, cal.id, exp.calendarid,(err, event) ->
if err
console.log err
else
console.log 'updated existing event ' + event.id
else
calendar.insertEvent req.user.accessToken, cal.id,event,(err, event) ->
if err
console.log err
else
console.log 'added new event ' + event.id
exp.calendarid = event.id
#exp.save (err)->
#console.log err if err
#e = new Experiment(exp)
#e.save (err)->
#console.log err if err
Experiment.findByIdAndUpdate exp._id,
'calendarid': event.id, (err)->
console.log err if err
else if exp.ParticipantSigned == '0' and exp.calendarid
calendar.deleteEvent req.user.accessToken, cal.id, exp.calendarid,(err, event) ->
if err
console.log err
else
console.log 'deleted existing event ' + event.id
event.id = null
exp.save (err)->
console.log err if err
#Events.list
res.redirect '/'
###
app.post "/update", (req, res) ->
options = querystring.stringify
'host': 'www.googleapis.com'
'path': '/calendar/v3/users/me/calendarList?key=zPI:KEY:<KEY>END_PI'
'method': 'GET'
'authorization': 'Bearer ' + req.user.accessToken
'referer': 'http://calendarsync.dadams.c9.io'
console.log options
request = https.request(options, (response) ->
console.log "statusCode: ", response.statusCode
console.log "headers: ", JSON.stringify(response.headers)
response.on "data", (d) ->
console.log d
)
request.end()
request.on "error", (e) ->
console.error e
res.redirect '/'
###
app.get "/update", (req, res) ->
child = exec 'python calendarsync.py', (error, stdout, stderr) ->
console.log "stdout: " + stdout
console.log "stderr: " + stderr
console.log "exec error: " + error if error isnt null
User.findOne 'gid':req.user.gid, (err, user)->
req.user = user
if req.user.sonavalid
res.redirect "/experiments"
else
res.redirect '/manage'
app.get "/experiments/:experiment", (req, res) ->
console.log "experiment number " + req.params.experiment
console.log req.user.experiments[parseInt req.params.experiment]
Experiment.find
'experiment': req.user.experiments[parseInt req.params.experiment]
'ParticipantSigned': '1',
(err,result)->
console.log 'finding experiments'
if(err)
console.log(err)
res.redirect '/experiments'
else
res.render "calendar"
user: req.user
experiments: result
title: req.user.experiments[parseInt req.params.experiment]
index: parseInt req.params.experiment
#app.get '/calendar', (req, res) ->
#res.render "calendar"
app.get "/experiments", (req, res) ->
if req.user.sonavalid
res.render "experiments",
title: 'Experiments'
user: req.user
else
res.redirect '/manage'
ensureAuthenticated = (req, res, next) ->
return next() if req.isAuthenticated()
res.redirect "/"
userSchema.pre "save", (next) ->
user = this
# only hash the password if it has been modified (or is new)
return next() unless user.isModified("sonapassword")
# generate a salt
salt = bcrypt.genSaltSync SALT_WORK_FACTOR
console.log user
encryption.encrypt user.sonapassword, salt, (encoded) ->
#console.log 'encrypted ' + encoded
user.sonapassword = encoded
#console.log encoded
user.salt = salt
next()
#encryption.decrypt encoded, salt, (output) ->
#next()
# override the cleartext password with the hashed one
http.createServer(app).listen app.get("port"), ->
console.log "Express server listening on port " + app.get("port")
|
[
{
"context": "rce: {\n resourceType: 'Task', name: 'foo bar'\n })]\n )[0].fhir_create_resou",
"end": 4682,
"score": 0.9480881690979004,
"start": 4675,
"tag": "NAME",
"value": "foo bar"
},
{
"context": "ource\n )\n assert.equal(created.name... | test/integration_spec.coffee | micabe/fhirbase | 0 | plv8 = require('../plpl/src/plv8')
assert = require('assert')
copy = (x)-> JSON.parse(JSON.stringify(x))
describe 'Integration',->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
it 'conformance', ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
conformance = plv8.execute(
'SELECT fhir_conformance($1)',
[JSON.stringify({somekey: 'somevalue'})]
)
assert.equal(
JSON.parse(conformance[0].fhir_conformance)
.rest[0].resource.filter(
(resource)-> resource.type == 'Task'
).length,
1
)
it 'FHIR version', ->
version = plv8.execute("SELECT fhir_version()")[0].fhir_version
assert.equal(
!!version.match(/.*[0-9]*\.[0-9]*\.[0-9].*/),
true
)
it 'Fhirbase version', ->
version = plv8.execute("SELECT fhirbase_version()")[0].fhirbase_version
assert.equal(
!!version.match(/.*[0-9]*\.[0-9]*\.[0-9].*/),
true
)
it 'Fhirbase release date', ->
version = plv8.execute("SELECT fhirbase_release_date()")[0]
.fhirbase_release_date
assert.equal(
!!version.match(/-?[0-9]{4}(-(0[1-9]|1[0-2])(-(0[0-9]|[1-2][0-9]|3[0-1])(T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9](\.[0-9]+)?(Z|(\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00)))?)?)?/),
true
)
describe 'Schema storage', ->
beforeEach ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'create', ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
1
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task_history' AND table_schema = current_schema()
''').length,
1
)
it 'create all', ->
this.timeout(15000) # creating all storage takes longer time than default 2000 milliseconds <https://mochajs.org/#timeouts>
plv8.execute('SELECT fhir_create_all_storages()')
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
1
)
it 'drop', ->
plv8.execute(
'SELECT fhir_drop_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
0
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task_history' AND table_schema = current_schema()
''').length,
0
)
it 'drop all', ->
plv8.execute('SELECT fhir_drop_all_storages()')
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
0
)
it 'truncate', ->
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)
truncateOutcome =
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
issue = JSON.parse(truncateOutcome[0].fhir_truncate_storage).issue[0]
assert.equal(issue.diagnostics, 'Resource type "Task" has been truncated')
it 'describe', ->
describe = plv8.execute(
'SELECT fhir_describe_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
JSON.parse(describe[0].fhir_describe_storage).name,
'task'
)
describe 'CRUD', ->
before ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'create', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: 'foo bar'
})]
)[0].fhir_create_resource
)
assert.equal(created.name, 'foo bar')
it 'read', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(readed.id, created.id)
it 'vread', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)[0].fhir_create_resource
)
created.versionId = created.meta.versionId
vreaded =
JSON.parse(
plv8.execute(
'SELECT fhir_vread_resource($1)',
[JSON.stringify(created)]
)[0].fhir_vread_resource
)
assert.equal(created.id, vreaded.id)
it 'update', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'foo'})]
)[0].fhir_create_resource
)
toUpdate = copy(created)
toUpdate.name = 'bar'
updated =
JSON.parse(
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)[0].fhir_update_resource
)
assert.equal(updated.name, toUpdate.name)
it 'delete', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(allowId: true, resource: {
id: 'toBeDeleted', resourceType: 'Task'
})]
)[0].fhir_create_resource
)
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)
readDeleted =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(readDeleted.resourceType, 'OperationOutcome')
issue = readDeleted.issue[0]
assert.equal(
issue.details.coding[0].display,
'The resource "toBeDeleted" has been deleted'
)
it 'terminate', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(allowId: true, resource: {
id: 'toBeTerminated', resourceType: 'Task'
})]
)[0].fhir_create_resource
)
plv8.execute(
'SELECT fhir_terminate_resource($1)',
[JSON.stringify(resourceType: 'Task', id: created.id)]
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(
readed.issue[0].diagnostics,
'Resource Id "toBeTerminated" does not exist'
)
it 'patch', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'foo'})]
)[0].fhir_create_resource
)
patched =
JSON.parse(
plv8.execute(
'SELECT fhir_patch_resource($1)', [JSON.stringify(
resource: {id: created.id, resourceType: 'Task'},
patch: [
{op: 'replace', path: '/name', value: 'bar1'},
{op: 'replace', path: '/name', value: 'bar2'}
]
)])[0].fhir_patch_resource
)
assert.deepEqual(patched.name, 'bar2')
assert.notEqual(patched.meta.versionId, false)
assert.notEqual(patched.meta.versionId, created.meta.versionId)
read_patched =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(patched)]
)[0].fhir_read_resource
)
assert.deepEqual(read_patched.name, 'bar2')
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_history($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_resource_history
)
assert.deepEqual(read_patched.name, 'bar2')
assert.equal(hx.total, 2)
assert.equal(hx.entry.length, 2)
describe 'History', ->
before ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'resource', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: 'foo'
})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
toUpdate = copy(readed)
toUpdate.name = 'bar'
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)
deleted =
JSON.parse(
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_delete_resource
)
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_history($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_resource_history
)
assert.equal(hx.total, 3)
assert.equal(hx.entry.length, 3)
assert.deepEqual(
hx.entry.map((entry) -> entry.request.method),
['DELETE', 'PUT', 'POST']
)
it 'resource type', ->
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'u1'})]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'u2'})]
)
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: 'foo'
})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
toUpdate = copy(readed)
toUpdate.name = 'bar'
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)
deleted =
JSON.parse(
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_delete_resource
)
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_type_history($1)',
[JSON.stringify(resourceType: 'Task')]
)[0].fhir_resource_type_history
)
assert.equal(hx.total, 5)
assert.deepEqual(
hx.entry.map((entry) -> entry.request.method),
['DELETE', 'PUT', 'POST', 'POST', 'POST']
)
describe 'Search API', ->
before ->
plv8.execute(
'SELECT fhir_drop_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task',
identifier: {
system: 'http://example.com/TaskIdentifier',
value: 'foo'
}
})]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task',
identifier: {
system: 'http://example.com/TaskIdentifier',
value: 'bar'
}
})]
)
it 'search by identifier', ->
searched =
JSON.parse(
plv8.execute(
'SELECT fhir_search($1)',
[JSON.stringify(
resourceType: 'Task',
queryString: 'identifier=foo'
)]
)[0].fhir_search
)
assert.equal(searched.total, 1)
it 'index', ->
indexed =
JSON.parse(
plv8.execute(
'SELECT fhir_index_parameter($1)',
[JSON.stringify(resourceType: 'Task', name: 'identifier')]
)[0].fhir_index_parameter
)
assert.equal(indexed[0].status, 'ok')
assert.equal(indexed[0].message, 'Index task_identifier_token was created')
it 'analyze', ->
analyzed =
JSON.parse(
plv8.execute(
'SELECT fhir_analyze_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)[0].fhir_analyze_storage
)
assert.equal(analyzed.message, 'analyzed')
it 'explain', ->
explained =
plv8.execute(
'SELECT fhir_explain_search($1)',
[JSON.stringify(
resourceType: 'Task',
queryString: 'identifier=foo'
)]
)[0].fhir_explain_search
assert.equal(explained, 1)
it 'pagination', ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
for _ in [1..11]
plv8.execute('''
SELECT fhir_create_resource(' {"resource": {"resourceType": "Patient"}} ');
''')
outcome1 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": ""}
');
''')[0].fhir_search
)
assert.equal(outcome1.entry.length, 10)
outcome2 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": "_count=3"}
');
''')[0].fhir_search
)
assert.equal(outcome2.entry.length, 3)
outcome3 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": "_count=999"}
');
''')[0].fhir_search
)
assert.equal(outcome3.entry.length, 11)
| 65412 | plv8 = require('../plpl/src/plv8')
assert = require('assert')
copy = (x)-> JSON.parse(JSON.stringify(x))
describe 'Integration',->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
it 'conformance', ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
conformance = plv8.execute(
'SELECT fhir_conformance($1)',
[JSON.stringify({somekey: 'somevalue'})]
)
assert.equal(
JSON.parse(conformance[0].fhir_conformance)
.rest[0].resource.filter(
(resource)-> resource.type == 'Task'
).length,
1
)
it 'FHIR version', ->
version = plv8.execute("SELECT fhir_version()")[0].fhir_version
assert.equal(
!!version.match(/.*[0-9]*\.[0-9]*\.[0-9].*/),
true
)
it 'Fhirbase version', ->
version = plv8.execute("SELECT fhirbase_version()")[0].fhirbase_version
assert.equal(
!!version.match(/.*[0-9]*\.[0-9]*\.[0-9].*/),
true
)
it 'Fhirbase release date', ->
version = plv8.execute("SELECT fhirbase_release_date()")[0]
.fhirbase_release_date
assert.equal(
!!version.match(/-?[0-9]{4}(-(0[1-9]|1[0-2])(-(0[0-9]|[1-2][0-9]|3[0-1])(T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9](\.[0-9]+)?(Z|(\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00)))?)?)?/),
true
)
describe 'Schema storage', ->
beforeEach ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'create', ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
1
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task_history' AND table_schema = current_schema()
''').length,
1
)
it 'create all', ->
this.timeout(15000) # creating all storage takes longer time than default 2000 milliseconds <https://mochajs.org/#timeouts>
plv8.execute('SELECT fhir_create_all_storages()')
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
1
)
it 'drop', ->
plv8.execute(
'SELECT fhir_drop_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
0
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task_history' AND table_schema = current_schema()
''').length,
0
)
it 'drop all', ->
plv8.execute('SELECT fhir_drop_all_storages()')
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
0
)
it 'truncate', ->
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)
truncateOutcome =
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
issue = JSON.parse(truncateOutcome[0].fhir_truncate_storage).issue[0]
assert.equal(issue.diagnostics, 'Resource type "Task" has been truncated')
it 'describe', ->
describe = plv8.execute(
'SELECT fhir_describe_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
JSON.parse(describe[0].fhir_describe_storage).name,
'task'
)
describe 'CRUD', ->
before ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'create', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: '<NAME>'
})]
)[0].fhir_create_resource
)
assert.equal(created.name, '<NAME>')
it 'read', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(readed.id, created.id)
it 'vread', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)[0].fhir_create_resource
)
created.versionId = created.meta.versionId
vreaded =
JSON.parse(
plv8.execute(
'SELECT fhir_vread_resource($1)',
[JSON.stringify(created)]
)[0].fhir_vread_resource
)
assert.equal(created.id, vreaded.id)
it 'update', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'foo'})]
)[0].fhir_create_resource
)
toUpdate = copy(created)
toUpdate.name = 'bar'
updated =
JSON.parse(
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)[0].fhir_update_resource
)
assert.equal(updated.name, toUpdate.name)
it 'delete', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(allowId: true, resource: {
id: 'toBeDeleted', resourceType: 'Task'
})]
)[0].fhir_create_resource
)
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)
readDeleted =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(readDeleted.resourceType, 'OperationOutcome')
issue = readDeleted.issue[0]
assert.equal(
issue.details.coding[0].display,
'The resource "toBeDeleted" has been deleted'
)
it 'terminate', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(allowId: true, resource: {
id: 'toBeTerminated', resourceType: 'Task'
})]
)[0].fhir_create_resource
)
plv8.execute(
'SELECT fhir_terminate_resource($1)',
[JSON.stringify(resourceType: 'Task', id: created.id)]
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(
readed.issue[0].diagnostics,
'Resource Id "toBeTerminated" does not exist'
)
it 'patch', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'foo'})]
)[0].fhir_create_resource
)
patched =
JSON.parse(
plv8.execute(
'SELECT fhir_patch_resource($1)', [JSON.stringify(
resource: {id: created.id, resourceType: 'Task'},
patch: [
{op: 'replace', path: '/name', value: 'bar1'},
{op: 'replace', path: '/name', value: 'bar2'}
]
)])[0].fhir_patch_resource
)
assert.deepEqual(patched.name, 'bar2')
assert.notEqual(patched.meta.versionId, false)
assert.notEqual(patched.meta.versionId, created.meta.versionId)
read_patched =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(patched)]
)[0].fhir_read_resource
)
assert.deepEqual(read_patched.name, 'bar2')
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_history($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_resource_history
)
assert.deepEqual(read_patched.name, 'bar2')
assert.equal(hx.total, 2)
assert.equal(hx.entry.length, 2)
describe 'History', ->
before ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'resource', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: 'foo'
})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
toUpdate = copy(readed)
toUpdate.name = '<NAME>'
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)
deleted =
JSON.parse(
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_delete_resource
)
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_history($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_resource_history
)
assert.equal(hx.total, 3)
assert.equal(hx.entry.length, 3)
assert.deepEqual(
hx.entry.map((entry) -> entry.request.method),
['DELETE', 'PUT', 'POST']
)
it 'resource type', ->
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'u1'})]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'u2'})]
)
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: '<NAME>'
})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
toUpdate = copy(readed)
toUpdate.name = '<NAME>'
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)
deleted =
JSON.parse(
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_delete_resource
)
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_type_history($1)',
[JSON.stringify(resourceType: 'Task')]
)[0].fhir_resource_type_history
)
assert.equal(hx.total, 5)
assert.deepEqual(
hx.entry.map((entry) -> entry.request.method),
['DELETE', 'PUT', 'POST', 'POST', 'POST']
)
describe 'Search API', ->
before ->
plv8.execute(
'SELECT fhir_drop_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task',
identifier: {
system: 'http://example.com/TaskIdentifier',
value: 'foo'
}
})]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task',
identifier: {
system: 'http://example.com/TaskIdentifier',
value: 'bar'
}
})]
)
it 'search by identifier', ->
searched =
JSON.parse(
plv8.execute(
'SELECT fhir_search($1)',
[JSON.stringify(
resourceType: 'Task',
queryString: 'identifier=foo'
)]
)[0].fhir_search
)
assert.equal(searched.total, 1)
it 'index', ->
indexed =
JSON.parse(
plv8.execute(
'SELECT fhir_index_parameter($1)',
[JSON.stringify(resourceType: 'Task', name: 'identifier')]
)[0].fhir_index_parameter
)
assert.equal(indexed[0].status, 'ok')
assert.equal(indexed[0].message, 'Index task_identifier_token was created')
it 'analyze', ->
analyzed =
JSON.parse(
plv8.execute(
'SELECT fhir_analyze_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)[0].fhir_analyze_storage
)
assert.equal(analyzed.message, 'analyzed')
it 'explain', ->
explained =
plv8.execute(
'SELECT fhir_explain_search($1)',
[JSON.stringify(
resourceType: 'Task',
queryString: 'identifier=foo'
)]
)[0].fhir_explain_search
assert.equal(explained, 1)
it 'pagination', ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
for _ in [1..11]
plv8.execute('''
SELECT fhir_create_resource(' {"resource": {"resourceType": "Patient"}} ');
''')
outcome1 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": ""}
');
''')[0].fhir_search
)
assert.equal(outcome1.entry.length, 10)
outcome2 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": "_count=3"}
');
''')[0].fhir_search
)
assert.equal(outcome2.entry.length, 3)
outcome3 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": "_count=999"}
');
''')[0].fhir_search
)
assert.equal(outcome3.entry.length, 11)
| true | plv8 = require('../plpl/src/plv8')
assert = require('assert')
copy = (x)-> JSON.parse(JSON.stringify(x))
describe 'Integration',->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
it 'conformance', ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
conformance = plv8.execute(
'SELECT fhir_conformance($1)',
[JSON.stringify({somekey: 'somevalue'})]
)
assert.equal(
JSON.parse(conformance[0].fhir_conformance)
.rest[0].resource.filter(
(resource)-> resource.type == 'Task'
).length,
1
)
it 'FHIR version', ->
version = plv8.execute("SELECT fhir_version()")[0].fhir_version
assert.equal(
!!version.match(/.*[0-9]*\.[0-9]*\.[0-9].*/),
true
)
it 'Fhirbase version', ->
version = plv8.execute("SELECT fhirbase_version()")[0].fhirbase_version
assert.equal(
!!version.match(/.*[0-9]*\.[0-9]*\.[0-9].*/),
true
)
it 'Fhirbase release date', ->
version = plv8.execute("SELECT fhirbase_release_date()")[0]
.fhirbase_release_date
assert.equal(
!!version.match(/-?[0-9]{4}(-(0[1-9]|1[0-2])(-(0[0-9]|[1-2][0-9]|3[0-1])(T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9](\.[0-9]+)?(Z|(\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00)))?)?)?/),
true
)
describe 'Schema storage', ->
beforeEach ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'create', ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
1
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task_history' AND table_schema = current_schema()
''').length,
1
)
it 'create all', ->
this.timeout(15000) # creating all storage takes longer time than default 2000 milliseconds <https://mochajs.org/#timeouts>
plv8.execute('SELECT fhir_create_all_storages()')
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
1
)
it 'drop', ->
plv8.execute(
'SELECT fhir_drop_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
0
)
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task_history' AND table_schema = current_schema()
''').length,
0
)
it 'drop all', ->
plv8.execute('SELECT fhir_drop_all_storages()')
assert.equal(
plv8.execute('''
SELECT * from information_schema.tables
WHERE table_name = 'task' AND table_schema = current_schema()
''').length,
0
)
it 'truncate', ->
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)
truncateOutcome =
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
issue = JSON.parse(truncateOutcome[0].fhir_truncate_storage).issue[0]
assert.equal(issue.diagnostics, 'Resource type "Task" has been truncated')
it 'describe', ->
describe = plv8.execute(
'SELECT fhir_describe_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
assert.equal(
JSON.parse(describe[0].fhir_describe_storage).name,
'task'
)
describe 'CRUD', ->
before ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'create', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: 'PI:NAME:<NAME>END_PI'
})]
)[0].fhir_create_resource
)
assert.equal(created.name, 'PI:NAME:<NAME>END_PI')
it 'read', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(readed.id, created.id)
it 'vread', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task'})]
)[0].fhir_create_resource
)
created.versionId = created.meta.versionId
vreaded =
JSON.parse(
plv8.execute(
'SELECT fhir_vread_resource($1)',
[JSON.stringify(created)]
)[0].fhir_vread_resource
)
assert.equal(created.id, vreaded.id)
it 'update', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'foo'})]
)[0].fhir_create_resource
)
toUpdate = copy(created)
toUpdate.name = 'bar'
updated =
JSON.parse(
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)[0].fhir_update_resource
)
assert.equal(updated.name, toUpdate.name)
it 'delete', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(allowId: true, resource: {
id: 'toBeDeleted', resourceType: 'Task'
})]
)[0].fhir_create_resource
)
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)
readDeleted =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(readDeleted.resourceType, 'OperationOutcome')
issue = readDeleted.issue[0]
assert.equal(
issue.details.coding[0].display,
'The resource "toBeDeleted" has been deleted'
)
it 'terminate', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(allowId: true, resource: {
id: 'toBeTerminated', resourceType: 'Task'
})]
)[0].fhir_create_resource
)
plv8.execute(
'SELECT fhir_terminate_resource($1)',
[JSON.stringify(resourceType: 'Task', id: created.id)]
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
assert.equal(
readed.issue[0].diagnostics,
'Resource Id "toBeTerminated" does not exist'
)
it 'patch', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'foo'})]
)[0].fhir_create_resource
)
patched =
JSON.parse(
plv8.execute(
'SELECT fhir_patch_resource($1)', [JSON.stringify(
resource: {id: created.id, resourceType: 'Task'},
patch: [
{op: 'replace', path: '/name', value: 'bar1'},
{op: 'replace', path: '/name', value: 'bar2'}
]
)])[0].fhir_patch_resource
)
assert.deepEqual(patched.name, 'bar2')
assert.notEqual(patched.meta.versionId, false)
assert.notEqual(patched.meta.versionId, created.meta.versionId)
read_patched =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(patched)]
)[0].fhir_read_resource
)
assert.deepEqual(read_patched.name, 'bar2')
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_history($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_resource_history
)
assert.deepEqual(read_patched.name, 'bar2')
assert.equal(hx.total, 2)
assert.equal(hx.entry.length, 2)
describe 'History', ->
before ->
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
it 'resource', ->
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: 'foo'
})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
toUpdate = copy(readed)
toUpdate.name = 'PI:NAME:<NAME>END_PI'
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)
deleted =
JSON.parse(
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_delete_resource
)
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_history($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_resource_history
)
assert.equal(hx.total, 3)
assert.equal(hx.entry.length, 3)
assert.deepEqual(
hx.entry.map((entry) -> entry.request.method),
['DELETE', 'PUT', 'POST']
)
it 'resource type', ->
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'u1'})]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {resourceType: 'Task', name: 'u2'})]
)
created =
JSON.parse(
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task', name: 'PI:NAME:<NAME>END_PI'
})]
)[0].fhir_create_resource
)
readed =
JSON.parse(
plv8.execute(
'SELECT fhir_read_resource($1)',
[JSON.stringify(id: created.id, resourceType: 'Task')]
)[0].fhir_read_resource
)
toUpdate = copy(readed)
toUpdate.name = 'PI:NAME:<NAME>END_PI'
plv8.execute(
'SELECT fhir_update_resource($1)',
[JSON.stringify(resource: toUpdate)]
)
deleted =
JSON.parse(
plv8.execute(
'SELECT fhir_delete_resource($1)',
[JSON.stringify(id: readed.id, resourceType: 'Task')]
)[0].fhir_delete_resource
)
hx =
JSON.parse(
plv8.execute(
'SELECT fhir_resource_type_history($1)',
[JSON.stringify(resourceType: 'Task')]
)[0].fhir_resource_type_history
)
assert.equal(hx.total, 5)
assert.deepEqual(
hx.entry.map((entry) -> entry.request.method),
['DELETE', 'PUT', 'POST', 'POST', 'POST']
)
describe 'Search API', ->
before ->
plv8.execute(
'SELECT fhir_drop_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_create_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
beforeEach ->
plv8.execute(
'SELECT fhir_truncate_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task',
identifier: {
system: 'http://example.com/TaskIdentifier',
value: 'foo'
}
})]
)
plv8.execute(
'SELECT fhir_create_resource($1)',
[JSON.stringify(resource: {
resourceType: 'Task',
identifier: {
system: 'http://example.com/TaskIdentifier',
value: 'bar'
}
})]
)
it 'search by identifier', ->
searched =
JSON.parse(
plv8.execute(
'SELECT fhir_search($1)',
[JSON.stringify(
resourceType: 'Task',
queryString: 'identifier=foo'
)]
)[0].fhir_search
)
assert.equal(searched.total, 1)
it 'index', ->
indexed =
JSON.parse(
plv8.execute(
'SELECT fhir_index_parameter($1)',
[JSON.stringify(resourceType: 'Task', name: 'identifier')]
)[0].fhir_index_parameter
)
assert.equal(indexed[0].status, 'ok')
assert.equal(indexed[0].message, 'Index task_identifier_token was created')
it 'analyze', ->
analyzed =
JSON.parse(
plv8.execute(
'SELECT fhir_analyze_storage($1)',
[JSON.stringify(resourceType: 'Task')]
)[0].fhir_analyze_storage
)
assert.equal(analyzed.message, 'analyzed')
it 'explain', ->
explained =
plv8.execute(
'SELECT fhir_explain_search($1)',
[JSON.stringify(
resourceType: 'Task',
queryString: 'identifier=foo'
)]
)[0].fhir_explain_search
assert.equal(explained, 1)
it 'pagination', ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
for _ in [1..11]
plv8.execute('''
SELECT fhir_create_resource(' {"resource": {"resourceType": "Patient"}} ');
''')
outcome1 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": ""}
');
''')[0].fhir_search
)
assert.equal(outcome1.entry.length, 10)
outcome2 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": "_count=3"}
');
''')[0].fhir_search
)
assert.equal(outcome2.entry.length, 3)
outcome3 =
JSON.parse(
plv8.execute('''
SELECT fhir_search('
{"resourceType": "Patient", "queryString": "_count=999"}
');
''')[0].fhir_search
)
assert.equal(outcome3.entry.length, 11)
|
[
{
"context": " and imbedded uppercased word\nMY_DOG = \" My dog, Tommy, is a really smart dog. \"\nSTRONG_HAS = MY_DO",
"end": 149,
"score": 0.9994627237319946,
"start": 144,
"tag": "NAME",
"value": "Tommy"
}
] | test/has.coffee | littlebee/bumble-strings | 0 |
BStr = require('../src/bumble-strings')
debugger
# test value with extra spaces everywhere and imbedded uppercased word
MY_DOG = " My dog, Tommy, is a really smart dog. "
STRONG_HAS = MY_DOG.slice(6, 15)
WEAK_HAS = BStr.trim(MY_DOG.slice(6, 20), all: true)
NON_MATCHING = ["smart cat", "smart unicorn.", "Somthing completely different"]
describe "has() & weaklyHas", ->
it "should has itself", -> # well, duh
assert BStr.has(MY_DOG, MY_DOG), "with itself"
assert BStr.has(MY_DOG, STRONG_HAS), "with a substring of itself"
it "should not has weaker version of itself", ->
assert !BStr.has(MY_DOG, WEAK_HAS)
it "should not has any non matching as array or individually", ->
testArray = NON_MATCHING.slice(0).concat([WEAK_HAS])
assert !BStr.has(MY_DOG, testArray), "array of nonmatching strings"
for nonMatch in NON_MATCHING
assert !BStr.has(MY_DOG, nonMatch), "like not #{nonMatch}"
it "should has one of an array", ->
# inject a stong match into the middle of non matching
testArray = NON_MATCHING.slice(0, 1).concat([STRONG_HAS]).concat(NON_MATCHING.slice(1))
assert BStr.has(MY_DOG, testArray)
it "should weaklyHas itself", -> # well, duh
assert BStr.weaklyHas(MY_DOG, STRONG_HAS)
it "should weaklyHas weaker versions of itself", ->
assert BStr.weaklyHas(MY_DOG, WEAK_HAS), "first 12 trimmed => #{WEAK_HAS}"
assert BStr.weaklyHas(MY_DOG, WEAK_HAS.toLowerCase()), "first 12 trimmed and lowercased"
assert BStr.weaklyHas(MY_DOG, " "), "single space at end"
it "should weaklyHas one of an array", ->
# inject a stong match into the middle of non matching
testArray = NON_MATCHING.slice(0, 1).concat([STRONG_HAS]).concat(NON_MATCHING.slice(1))
assert BStr.weaklyHas(MY_DOG, testArray), "should have found #{STRONG_HAS} in #{JSON.stringify(testArray)}"
testArray = NON_MATCHING.slice(0).concat([WEAK_HAS])
assert BStr.weaklyHas(MY_DOG, testArray), "should have found #{WEAK_HAS} in #{JSON.stringify(testArray)}"
| 135530 |
BStr = require('../src/bumble-strings')
debugger
# test value with extra spaces everywhere and imbedded uppercased word
MY_DOG = " My dog, <NAME>, is a really smart dog. "
STRONG_HAS = MY_DOG.slice(6, 15)
WEAK_HAS = BStr.trim(MY_DOG.slice(6, 20), all: true)
NON_MATCHING = ["smart cat", "smart unicorn.", "Somthing completely different"]
describe "has() & weaklyHas", ->
it "should has itself", -> # well, duh
assert BStr.has(MY_DOG, MY_DOG), "with itself"
assert BStr.has(MY_DOG, STRONG_HAS), "with a substring of itself"
it "should not has weaker version of itself", ->
assert !BStr.has(MY_DOG, WEAK_HAS)
it "should not has any non matching as array or individually", ->
testArray = NON_MATCHING.slice(0).concat([WEAK_HAS])
assert !BStr.has(MY_DOG, testArray), "array of nonmatching strings"
for nonMatch in NON_MATCHING
assert !BStr.has(MY_DOG, nonMatch), "like not #{nonMatch}"
it "should has one of an array", ->
# inject a stong match into the middle of non matching
testArray = NON_MATCHING.slice(0, 1).concat([STRONG_HAS]).concat(NON_MATCHING.slice(1))
assert BStr.has(MY_DOG, testArray)
it "should weaklyHas itself", -> # well, duh
assert BStr.weaklyHas(MY_DOG, STRONG_HAS)
it "should weaklyHas weaker versions of itself", ->
assert BStr.weaklyHas(MY_DOG, WEAK_HAS), "first 12 trimmed => #{WEAK_HAS}"
assert BStr.weaklyHas(MY_DOG, WEAK_HAS.toLowerCase()), "first 12 trimmed and lowercased"
assert BStr.weaklyHas(MY_DOG, " "), "single space at end"
it "should weaklyHas one of an array", ->
# inject a stong match into the middle of non matching
testArray = NON_MATCHING.slice(0, 1).concat([STRONG_HAS]).concat(NON_MATCHING.slice(1))
assert BStr.weaklyHas(MY_DOG, testArray), "should have found #{STRONG_HAS} in #{JSON.stringify(testArray)}"
testArray = NON_MATCHING.slice(0).concat([WEAK_HAS])
assert BStr.weaklyHas(MY_DOG, testArray), "should have found #{WEAK_HAS} in #{JSON.stringify(testArray)}"
| true |
BStr = require('../src/bumble-strings')
debugger
# test value with extra spaces everywhere and imbedded uppercased word
MY_DOG = " My dog, PI:NAME:<NAME>END_PI, is a really smart dog. "
STRONG_HAS = MY_DOG.slice(6, 15)
WEAK_HAS = BStr.trim(MY_DOG.slice(6, 20), all: true)
NON_MATCHING = ["smart cat", "smart unicorn.", "Somthing completely different"]
describe "has() & weaklyHas", ->
it "should has itself", -> # well, duh
assert BStr.has(MY_DOG, MY_DOG), "with itself"
assert BStr.has(MY_DOG, STRONG_HAS), "with a substring of itself"
it "should not has weaker version of itself", ->
assert !BStr.has(MY_DOG, WEAK_HAS)
it "should not has any non matching as array or individually", ->
testArray = NON_MATCHING.slice(0).concat([WEAK_HAS])
assert !BStr.has(MY_DOG, testArray), "array of nonmatching strings"
for nonMatch in NON_MATCHING
assert !BStr.has(MY_DOG, nonMatch), "like not #{nonMatch}"
it "should has one of an array", ->
# inject a stong match into the middle of non matching
testArray = NON_MATCHING.slice(0, 1).concat([STRONG_HAS]).concat(NON_MATCHING.slice(1))
assert BStr.has(MY_DOG, testArray)
it "should weaklyHas itself", -> # well, duh
assert BStr.weaklyHas(MY_DOG, STRONG_HAS)
it "should weaklyHas weaker versions of itself", ->
assert BStr.weaklyHas(MY_DOG, WEAK_HAS), "first 12 trimmed => #{WEAK_HAS}"
assert BStr.weaklyHas(MY_DOG, WEAK_HAS.toLowerCase()), "first 12 trimmed and lowercased"
assert BStr.weaklyHas(MY_DOG, " "), "single space at end"
it "should weaklyHas one of an array", ->
# inject a stong match into the middle of non matching
testArray = NON_MATCHING.slice(0, 1).concat([STRONG_HAS]).concat(NON_MATCHING.slice(1))
assert BStr.weaklyHas(MY_DOG, testArray), "should have found #{STRONG_HAS} in #{JSON.stringify(testArray)}"
testArray = NON_MATCHING.slice(0).concat([WEAK_HAS])
assert BStr.weaklyHas(MY_DOG, testArray), "should have found #{WEAK_HAS} in #{JSON.stringify(testArray)}"
|
[
{
"context": "GS IN THE SOFTWARE.\n\n# Taken from hubot at commit 71d1c686d9ffdfad54751080c699979fa17190a1 and modified to fit current use.\n\n{inspect} = req",
"end": 1166,
"score": 0.976119875907898,
"start": 1126,
"tag": "PASSWORD",
"value": "71d1c686d9ffdfad54751080c699979fa17190a1"
}
... | src/listener.coffee | kumpelblase2/modlab-chat | 0 | # Copyright (c) 2013 GitHub Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Taken from hubot at commit 71d1c686d9ffdfad54751080c699979fa17190a1 and modified to fit current use.
{inspect} = require 'util'
{TextMessage} = require './message'
class Listener
# Listeners receive every message from the chat source and decide if they
# want to act on it.
#
# bot - A Robot instance.
# matcher - A Function that determines if this listener should trigger the
# callback.
# callback - A Function that is triggered if the incoming message matches.
constructor: (@bot, @matcher, @callback) ->
# Public: Determines if the listener likes the content of the message. If
# so, a Response built from the given Message is passed to the Listener
# callback.
#
# message - A Message instance.
#
# Returns a boolean of whether the matcher matched.
call: (message) ->
if match = @matcher message
@bot.logger.debug "Message '#{message}' matched regex /#{inspect @regex}/" if @regex
@callback new @bot.Response(@bot, message, match)
true
else
false
class TextListener extends Listener
# TextListeners receive every message from the chat source and decide if they
# want to act on it.
#
# bot - A Robot instance.
# regex - A Regex that determines if this listener should trigger the
# callback.
# callback - A Function that is triggered if the incoming message matches.
constructor: (@bot, @regex, @callback) ->
@matcher = (message) =>
if message instanceof TextMessage
message.match @regex
module.exports = {
Listener
TextListener
}
| 180664 | # Copyright (c) 2013 GitHub Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Taken from hubot at commit <PASSWORD> and modified to fit current use.
{inspect} = require 'util'
{TextMessage} = require './message'
class Listener
# Listeners receive every message from the chat source and decide if they
# want to act on it.
#
# bot - A Robot instance.
# matcher - A Function that determines if this listener should trigger the
# callback.
# callback - A Function that is triggered if the incoming message matches.
constructor: (@bot, @matcher, @callback) ->
# Public: Determines if the listener likes the content of the message. If
# so, a Response built from the given Message is passed to the Listener
# callback.
#
# message - A Message instance.
#
# Returns a boolean of whether the matcher matched.
call: (message) ->
if match = @matcher message
@bot.logger.debug "Message '#{message}' matched regex /#{inspect @regex}/" if @regex
@callback new @bot.Response(@bot, message, match)
true
else
false
class TextListener extends Listener
# TextListeners receive every message from the chat source and decide if they
# want to act on it.
#
# bot - A Robot instance.
# regex - A Regex that determines if this listener should trigger the
# callback.
# callback - A Function that is triggered if the incoming message matches.
constructor: (@bot, @regex, @callback) ->
@matcher = (message) =>
if message instanceof TextMessage
message.match @regex
module.exports = {
Listener
TextListener
}
| true | # Copyright (c) 2013 GitHub Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Taken from hubot at commit PI:PASSWORD:<PASSWORD>END_PI and modified to fit current use.
{inspect} = require 'util'
{TextMessage} = require './message'
class Listener
# Listeners receive every message from the chat source and decide if they
# want to act on it.
#
# bot - A Robot instance.
# matcher - A Function that determines if this listener should trigger the
# callback.
# callback - A Function that is triggered if the incoming message matches.
constructor: (@bot, @matcher, @callback) ->
# Public: Determines if the listener likes the content of the message. If
# so, a Response built from the given Message is passed to the Listener
# callback.
#
# message - A Message instance.
#
# Returns a boolean of whether the matcher matched.
call: (message) ->
if match = @matcher message
@bot.logger.debug "Message '#{message}' matched regex /#{inspect @regex}/" if @regex
@callback new @bot.Response(@bot, message, match)
true
else
false
class TextListener extends Listener
# TextListeners receive every message from the chat source and decide if they
# want to act on it.
#
# bot - A Robot instance.
# regex - A Regex that determines if this listener should trigger the
# callback.
# callback - A Function that is triggered if the incoming message matches.
constructor: (@bot, @regex, @callback) ->
@matcher = (message) =>
if message instanceof TextMessage
message.match @regex
module.exports = {
Listener
TextListener
}
|
[
{
"context": " authentication\n # @param {?String} password the password credential to be used in HTTP basic\n # authent",
"end": 3310,
"score": 0.4628887176513672,
"start": 3302,
"tag": "PASSWORD",
"value": "password"
}
] | src/001-xml_http_request.coffee | hoodunit/node-xhr2 | 0 | # This file's name is set up in such a way that it will always show up second
# in the list of files given to coffee --join, so it can use the
# XMLHttpRequestEventTarget definition and so that the other files can assume
# that XMLHttpRequest was already defined.
http = require 'http'
https = require 'https'
os = require 'os'
url = require 'url'
# The ECMAScript HTTP API.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#introduction
class XMLHttpRequest extends XMLHttpRequestEventTarget
# Creates a new request.
#
# @param {Object} options one or more of the options below
# @option options {Boolean} anon if true, the request's anonymous flag
# will be set
# @see http://www.w3.org/TR/XMLHttpRequest/#constructors
# @see http://www.w3.org/TR/XMLHttpRequest/#anonymous-flag
constructor: (options) ->
super()
@onreadystatechange = null
@_anonymous = options and options.anon
@readyState = XMLHttpRequest.UNSENT
@response = null
@responseText = ''
@responseType = ''
@responseURL = ''
@status = 0
@statusText = ''
@timeout = 0
@upload = new XMLHttpRequestUpload @
@_method = null # String
@_url = null # Return value of url.parse()
@_sync = false
@_headers = null # Object<String, String>
@_loweredHeaders = null # Object<lowercase String, String>
@_mimeOverride = null
@_request = null # http.ClientRequest
@_response = null # http.ClientResponse
@_responseParts = null # Array<Buffer, String>
@_responseHeaders = null # Object<lowercase String, String>
@_aborting = null
@_error = null
@_loadedBytes = 0
@_totalBytes = 0
@_lengthComputable = false
# @property {function(ProgressEvent)} DOM level 0-style handler for the
# 'readystatechange' event
onreadystatechange: null
# @property {Number} the current state of the XHR object
# @see http://www.w3.org/TR/XMLHttpRequest/#states
readyState: null
# @property {String, ArrayBuffer, Buffer, Object} processed XHR response
# @see http://www.w3.org/TR/XMLHttpRequest/#the-response-attribute
response: null
# @property {String} response string, if responseType is '' or 'text'
# @see http://www.w3.org/TR/XMLHttpRequest/#the-responsetext-attribute
responseText: null
# @property {String} sets the parsing method for the XHR response
# @see http://www.w3.org/TR/XMLHttpRequest/#the-responsetype-attribute
responseType: null
# @property {Number} the HTTP
# @see http://www.w3.org/TR/XMLHttpRequest/#the-status-attribute
status: null
# @property {Number} milliseconds to wait for the request to complete
# @see http://www.w3.org/TR/XMLHttpRequest/#the-timeout-attribute
timeout: null
# @property {XMLHttpRequestUpload} the associated upload information
# @see http://www.w3.org/TR/XMLHttpRequest/#the-upload-attribute
upload: null
# Sets the XHR's method, URL, synchronous flag, and authentication params.
#
# @param {String} method the HTTP method to be used
# @param {String} url the URL that the request will be made to
# @param {?Boolean} async if false, the XHR should be processed
# synchronously; true by default
# @param {?String} user the user credential to be used in HTTP basic
# authentication
# @param {?String} password the password credential to be used in HTTP basic
# authentication
# @return {undefined} undefined
# @throw {SecurityError} method is not one of the allowed methods
# @throw {SyntaxError} urlString is not a valid URL
# @throw {Error} the URL contains an unsupported protocol; the supported
# protocols are file, http and https
# @see http://www.w3.org/TR/XMLHttpRequest/#the-open()-method
open: (method, url, async, user, password) ->
method = method.toUpperCase()
if method of @_restrictedMethods
throw new SecurityError "HTTP method #{method} is not allowed in XHR"
xhrUrl = @_parseUrl url
async = true if async is undefined
switch @readyState
when XMLHttpRequest.UNSENT, XMLHttpRequest.OPENED, XMLHttpRequest.DONE
# Nothing to do here.
null
when XMLHttpRequest.HEADERS_RECEIVED, XMLHttpRequest.LOADING
# TODO(pwnall): terminate abort(), terminate send()
null
@_method = method
@_url = xhrUrl
@_sync = !async
@_headers = {}
@_loweredHeaders = {}
@_mimeOverride = null
@_setReadyState XMLHttpRequest.OPENED
@_request = null
@_response = null
@status = 0
@statusText = ''
@_responseParts = []
@_responseHeaders = null
@_loadedBytes = 0
@_totalBytes = 0
@_lengthComputable = false
undefined
# Appends a header to the list of author request headers.
#
# @param {String} name the HTTP header name
# @param {String} value the HTTP header value
# @return {undefined} undefined
# @throw {InvalidStateError} readyState is not OPENED
# @throw {SyntaxError} name is not a valid HTTP header name or value is not
# a valid HTTP header value
# @see http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader()-method
setRequestHeader: (name, value) ->
unless @readyState is XMLHttpRequest.OPENED
throw new InvalidStateError "XHR readyState must be OPENED"
loweredName = name.toLowerCase()
if @_restrictedHeaders[loweredName] or /^sec\-/.test(loweredName) or
/^proxy-/.test(loweredName)
console.warn "Refused to set unsafe header \"#{name}\""
return undefined
value = value.toString()
if loweredName of @_loweredHeaders
# Combine value with the existing header value.
name = @_loweredHeaders[loweredName]
@_headers[name] = @_headers[name] + ', ' + value
else
# New header.
@_loweredHeaders[loweredName] = name
@_headers[name] = value
undefined
# Initiates the request.
#
# @param {?String, ?ArrayBufferView} data the data to be sent; ignored for
# GET and HEAD requests
# @return {undefined} undefined
# @throw {InvalidStateError} readyState is not OPENED
# @see http://www.w3.org/TR/XMLHttpRequest/#the-send()-method
send: (data) ->
unless @readyState is XMLHttpRequest.OPENED
throw new InvalidStateError "XHR readyState must be OPENED"
if @_request
throw new InvalidStateError "send() already called"
switch @_url.protocol
when 'file:'
@_sendFile data
when 'http:', 'https:'
@_sendHttp data
else
throw new NetworkError "Unsupported protocol #{@_url.protocol}"
undefined
# Cancels the network activity performed by this request.
#
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-abort()-method
abort: ->
return unless @_request
@_request.abort()
@_setError()
@_dispatchProgress 'abort'
@_dispatchProgress 'loadend'
undefined
# Returns a header value in the HTTP response for this XHR.
#
# @param {String} name case-insensitive HTTP header name
# @return {?String} value the value of the header whose name matches the
# given name, or null if there is no such header
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
getResponseHeader: (name) ->
return null unless @_responseHeaders
loweredName = name.toLowerCase()
if loweredName of @_responseHeaders
@_responseHeaders[loweredName]
else
null
# Returns all the HTTP headers in this XHR's response.
#
# @return {String} header lines separated by CR LF, where each header line
# has the name and value separated by a ": " (colon, space); the empty
# string is returned if the headers are not available
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getallresponseheaders()-method
getAllResponseHeaders: ->
return '' unless @_responseHeaders
lines = ("#{name}: #{value}" for name, value of @_responseHeaders)
lines.join "\r\n"
# Overrides the Content-Type
#
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-overridemimetype()-method
overrideMimeType: (newMimeType) ->
if @readyState is XMLHttpRequest.LOADING or
@readyState is XMLHttpRequest.DONE
throw new InvalidStateError(
"overrideMimeType() not allowed in LOADING or DONE")
@_mimeOverride = newMimeType.toLowerCase()
undefined
# Network configuration not exposed in the XHR API.
#
# Although the XMLHttpRequest specification calls itself "ECMAScript HTTP",
# it assumes that requests are always performed in the context of a browser
# application, where some network parameters are set by the browser user and
# should not be modified by Web applications. This API provides access to
# these network parameters.
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in
# browsers. It is a stable node-xhr2 API.
#
# @param {Object} options one or more of the options below
# @option options {?http.Agent} httpAgent the value for the nodejsHttpAgent
# property (the agent used for HTTP requests)
# @option options {?https.Agent} httpsAgent the value for the
# nodejsHttpsAgent property (the agent used for HTTPS requests)
# @return {undefined} undefined
nodejsSet: (options) ->
if 'httpAgent' of options
@nodejsHttpAgent = options.httpAgent
if 'httpsAgent' of options
@nodejsHttpsAgent = options.httpsAgent
if 'baseUrl' of options
baseUrl = options.baseUrl
unless baseUrl is null
parsedUrl = url.parse baseUrl, false, true
unless parsedUrl.protocol
throw new SyntaxError("baseUrl must be an absolute URL")
@nodejsBaseUrl = baseUrl
undefined
# Default settings for the network configuration not exposed in the XHR API.
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in
# browsers. It is a stable node-xhr2 API.
#
# @param {Object} options one or more of the options below
# @option options {?http.Agent} httpAgent the default value for the
# nodejsHttpAgent property (the agent used for HTTP requests)
# @option options {https.Agent} httpsAgent the default value for the
# nodejsHttpsAgent property (the agent used for HTTPS requests)
# @return {undefined} undefined
# @see XMLHttpRequest.nodejsSet
@nodejsSet: (options) ->
# "this" will be set to XMLHttpRequest.prototype, so the instance nodejsSet
# operates on default property values.
XMLHttpRequest::nodejsSet options
undefined
# readyState value before XMLHttpRequest#open() is called
UNSENT: 0
# readyState value before XMLHttpRequest#open() is called
@UNSENT: 0
# readyState value after XMLHttpRequest#open() is called, and before
# XMLHttpRequest#send() is called; XMLHttpRequest#setRequestHeader() can be
# called in this state
OPENED: 1
# readyState value after XMLHttpRequest#open() is called, and before
# XMLHttpRequest#send() is called; XMLHttpRequest#setRequestHeader() can be
# called in this state
@OPENED: 1
# readyState value after redirects have been followed and the HTTP headers of
# the final response have been received
HEADERS_RECEIVED: 2
# readyState value after redirects have been followed and the HTTP headers of
# the final response have been received
@HEADERS_RECEIVED: 2
# readyState value when the response entity body is being received
LOADING: 3
# readyState value when the response entity body is being received
@LOADING: 3
# readyState value after the request has been completely processed
DONE: 4
# readyState value after the request has been completely processed
@DONE: 4
# @property {http.Agent} the agent option passed to HTTP requests
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# It is a stable node-xhr2 API that is useful for testing & going through
# web-proxies.
nodejsHttpAgent: http.globalAgent
# @property {https.Agent} the agent option passed to HTTPS requests
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# It is a stable node-xhr2 API that is useful for testing & going through
# web-proxies.
nodejsHttpsAgent: https.globalAgent
# @property {String} the base URL that relative URLs get resolved to
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# Its browser equivalent is the base URL of the document associated with the
# Window object. It is a stable node-xhr2 API provided for libraries such as
# Angular Universal.
nodejsBaseUrl: null
# HTTP methods that are disallowed in the XHR spec.
#
# @private
# @see Step 6 in http://www.w3.org/TR/XMLHttpRequest/#the-open()-method
_restrictedMethods:
CONNECT: true
TRACE: true
TRACK: true
# HTTP request headers that are disallowed in the XHR spec.
#
# @private
# @see Step 5 in
# http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader()-method
_restrictedHeaders:
'accept-charset': true
'accept-encoding': true
'access-control-request-headers': true
'access-control-request-method': true
connection: true
'content-length': true
cookie: false
cookie2: true
date: true
dnt: true
expect: true
host: true
'keep-alive': true
origin: true
referer: true
te: true
trailer: true
'transfer-encoding': true
upgrade: true
'user-agent': true
via: true
# HTTP response headers that should not be exposed according to the XHR spec.
#
# @private
# @see Step 3 in
# http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
_privateHeaders:
'set-cookie': true
'set-cookie2': true
# The value of the User-Agent header.
_userAgent: "Mozilla/5.0 (#{os.type()} #{os.arch()}) " +
"node.js/#{process.versions.node} v8/#{process.versions.v8}"
# Sets the readyState property and fires the readystatechange event.
#
# @private
# @param {Number} newReadyState the new value of readyState
# @return {undefined} undefined
_setReadyState: (newReadyState) ->
@readyState = newReadyState
event = new ProgressEvent 'readystatechange'
@dispatchEvent event
undefined
# XMLHttpRequest#send() implementation for the file: protocol.
#
# @private
_sendFile: ->
unless @_url.method is 'GET'
throw new NetworkError 'The file protocol only supports GET'
throw new Error "Protocol file: not implemented"
# XMLHttpRequest#send() implementation for the http: and https: protocols.
#
# @private
# This method sets the instance variables and calls _sendHxxpRequest(), which
# is responsible for building a node.js request and firing it off. The code
# in _sendHxxpRequest() is separated off so it can be reused when handling
# redirects.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#infrastructure-for-the-send()-method
_sendHttp: (data) ->
if @_sync
throw new Error "Synchronous XHR processing not implemented"
if data? and (@_method is 'GET' or @_method is 'HEAD')
console.warn "Discarding entity body for #{@_method} requests"
data = null
else
# Send Content-Length: 0
data or= ''
# NOTE: this is called before finalizeHeaders so that the uploader can
# figure out Content-Length and Content-Type.
@upload._setData data
@_finalizeHeaders()
@_sendHxxpRequest()
undefined
# Sets up and fires off a HTTP/HTTPS request using the node.js API.
#
# @private
# This method contains the bulk of the XMLHttpRequest#send() implementation,
# and is also used to issue new HTTP requests when handling HTTP redirects.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#infrastructure-for-the-send()-method
_sendHxxpRequest: ->
if @_url.protocol is 'http:'
hxxp = http
agent = @nodejsHttpAgent
else
hxxp = https
agent = @nodejsHttpsAgent
request = hxxp.request
hostname: @_url.hostname, port: @_url.port, path: @_url.path,
auth: @_url.auth, method: @_method, headers: @_headers, agent: agent
@_request = request
if @timeout
request.setTimeout @timeout, => @_onHttpTimeout request
request.on 'response', (response) => @_onHttpResponse request, response
request.on 'error', (error) => @_onHttpRequestError request, error
@upload._startUpload request
if @_request is request # An http error might have already fired.
@_dispatchProgress 'loadstart'
undefined
# Fills in the restricted HTTP headers with default values.
#
# This is called right before the HTTP request is sent off.
#
# @private
# @return {undefined} undefined
_finalizeHeaders: ->
@_headers['Connection'] = 'keep-alive'
@_headers['Host'] = @_url.host
if @_anonymous
@_headers['Referer'] = 'about:blank'
@_headers['User-Agent'] = @_userAgent
@upload._finalizeHeaders @_headers, @_loweredHeaders
undefined
# Called when the headers of an HTTP response have been received.
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# produced this response
# @param {http.ClientResponse} response the node.js ClientResponse instance
# passed to
_onHttpResponse: (request, response) ->
return unless @_request is request
# Transparent redirection handling.
switch response.statusCode
when 301, 302, 303, 307, 308
@_url = @_parseUrl response.headers['location']
@_method = 'GET'
if 'content-type' of @_loweredHeaders
delete @_headers[@_loweredHeaders['content-type']]
delete @_loweredHeaders['content-type']
# XMLHttpRequestUpload#_finalizeHeaders() sets Content-Type directly.
if 'Content-Type' of @_headers
delete @_headers['Content-Type']
# Restricted headers can't be set by the user, no need to check
# loweredHeaders.
delete @_headers['Content-Length']
@upload._reset()
@_finalizeHeaders()
@_sendHxxpRequest()
return
@_response = response
@_response.on 'data', (data) => @_onHttpResponseData response, data
@_response.on 'end', => @_onHttpResponseEnd response
@_response.on 'close', => @_onHttpResponseClose response
@responseURL = @_url.href.split('#')[0]
@status = @_response.statusCode
@statusText = http.STATUS_CODES[@status]
@_parseResponseHeaders response
if lengthString = @_responseHeaders['content-length']
@_totalBytes = parseInt(lengthString)
@_lengthComputable = true
else
@_lengthComputable = false
@_setReadyState XMLHttpRequest.HEADERS_RECEIVED
# Called when some data has been received on a HTTP connection.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
# @param {String, Buffer} data the data that has been received
_onHttpResponseData: (response, data) ->
return unless @_response is response
@_responseParts.push data
@_loadedBytes += data.length
if @readyState isnt XMLHttpRequest.LOADING
@_setReadyState XMLHttpRequest.LOADING
@_dispatchProgress 'progress'
# Called when the HTTP request finished processing.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
_onHttpResponseEnd: (response) ->
return unless @_response is response
@_parseResponse()
@_request = null
@_response = null
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'load'
@_dispatchProgress 'loadend'
# Called when the underlying HTTP connection was closed prematurely.
#
# If this method is called, it will be called after or instead of
# onHttpResponseEnd.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
_onHttpResponseClose: (response) ->
return unless @_response is response
request = @_request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'error'
@_dispatchProgress 'loadend'
# Called when the timeout set on the HTTP socket expires.
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# fired this event
_onHttpTimeout: (request) ->
return unless @_request is request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'timeout'
@_dispatchProgress 'loadend'
# Called when something wrong happens on the HTTP socket
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# fired this event
# @param {Error} error emitted exception
_onHttpRequestError: (request, error) ->
return unless @_request is request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'error'
@_dispatchProgress 'loadend'
# Fires an XHR progress event.
#
# @private
# @param {String} eventType one of the XHR progress event types, such as
# 'load' and 'progress'
_dispatchProgress: (eventType) ->
event = new ProgressEvent eventType
event.lengthComputable = @_lengthComputable
event.loaded = @_loadedBytes
event.total = @_totalBytes
@dispatchEvent event
undefined
# Sets up the XHR to reflect the fact that an error has occurred.
#
# The possible errors are a network error, a timeout, or an abort.
#
# @private
_setError: ->
@_request = null
@_response = null
@_responseHeaders = null
@_responseParts = null
undefined
# Parses a request URL string.
#
# @private
# This method is a thin wrapper around url.parse() that normalizes HTTP
# user/password credentials. It is used to parse the URL string passed to
# XMLHttpRequest#open() and the URLs in the Location headers of HTTP redirect
# responses.
#
# @param {String} urlString the URL to be parsed
# @return {Object} parsed URL
_parseUrl: (urlString) ->
if @nodejsBaseUrl is null
absoluteUrlString = urlString
else
absoluteUrlString = url.resolve @nodejsBaseUrl, urlString
xhrUrl = url.parse absoluteUrlString, false, true
xhrUrl.hash = null
if xhrUrl.auth and (user? or password?)
index = xhrUrl.auth.indexOf ':'
if index is -1
user = xhrUrl.auth unless user
else
user = xhrUrl.substring(0, index) unless user
password = xhrUrl.substring(index + 1) unless password
if user or password
xhrUrl.auth = "#{user}:#{password}"
xhrUrl
# Reads the headers from a node.js ClientResponse instance.
#
# @private
# @param {http.ClientResponse} response the response whose headers will be
# imported into this XMLHttpRequest's state
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getallresponseheaders()-method
_parseResponseHeaders: (response) ->
@_responseHeaders = {}
for name, value of response.headers
loweredName = name.toLowerCase()
continue if @_privateHeaders[loweredName]
if @_mimeOverride isnt null and loweredName is 'content-type'
value = @_mimeOverride
@_responseHeaders[loweredName] = value
if @_mimeOverride isnt null and !('content-type' of @_responseHeaders)
@_responseHeaders['content-type'] = @_mimeOverride
undefined
# Sets the response and responseText properties when an XHR completes.
#
# @private
# @return {undefined} undefined
_parseResponse: ->
if Buffer.concat
buffer = Buffer.concat @_responseParts
else
# node 0.6
buffer = @_concatBuffers @_responseParts
@_responseParts = null
switch @responseType
when 'text'
@_parseTextResponse buffer
when 'json'
@responseText = null
try
@response = JSON.parse buffer.toString('utf-8')
catch jsonError
@response = null
when 'buffer'
@responseText = null
@response = buffer
when 'arraybuffer'
@responseText = null
arrayBuffer = new ArrayBuffer buffer.length
view = new Uint8Array arrayBuffer
view[i] = buffer[i] for i in [0...buffer.length]
@response = arrayBuffer
else
# TODO(pwnall): content-base detection
@_parseTextResponse buffer
undefined
# Sets response and responseText for a 'text' response type.
#
# @private
# @param {Buffer} buffer the node.js Buffer containing the binary response
# @return {undefined} undefined
_parseTextResponse: (buffer) ->
try
@responseText = buffer.toString @_parseResponseEncoding()
catch e
# Unknown encoding.
@responseText = buffer.toString 'binary'
@response = @responseText
undefined
# Figures out the string encoding of the XHR's response.
#
# This is called to determine the encoding when responseText is set.
#
# @private
# @return {String} a string encoding, e.g. 'utf-8'
_parseResponseEncoding: ->
encoding = null
if contentType = @_responseHeaders['content-type']
if match = /\;\s*charset\=(.*)$/.exec contentType
return match[1]
'utf-8'
# Buffer.concat implementation for node 0.6.
#
# @private
# @param {Array<Buffer>} buffers the buffers whose contents will be merged
# @return {Buffer} same as Buffer.concat(buffers) in node 0.8 and above
_concatBuffers: (buffers) ->
if buffers.length is 0
return Buffer.alloc 0
if buffers.length is 1
return buffers[0]
length = 0
length += buffer.length for buffer in buffers
target = Buffer.alloc length
length = 0
for buffer in buffers
buffer.copy target, length
length += buffer.length
target
# XMLHttpRequest is the result of require('node-xhr2').
module.exports = XMLHttpRequest
# Make node-xhr2 work as a drop-in replacement for libraries that promote the
# following usage pattern:
# var XMLHttpRequest = require('xhr-library-name').XMLHttpRequest
XMLHttpRequest.XMLHttpRequest = XMLHttpRequest
| 61000 | # This file's name is set up in such a way that it will always show up second
# in the list of files given to coffee --join, so it can use the
# XMLHttpRequestEventTarget definition and so that the other files can assume
# that XMLHttpRequest was already defined.
http = require 'http'
https = require 'https'
os = require 'os'
url = require 'url'
# The ECMAScript HTTP API.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#introduction
class XMLHttpRequest extends XMLHttpRequestEventTarget
# Creates a new request.
#
# @param {Object} options one or more of the options below
# @option options {Boolean} anon if true, the request's anonymous flag
# will be set
# @see http://www.w3.org/TR/XMLHttpRequest/#constructors
# @see http://www.w3.org/TR/XMLHttpRequest/#anonymous-flag
constructor: (options) ->
super()
@onreadystatechange = null
@_anonymous = options and options.anon
@readyState = XMLHttpRequest.UNSENT
@response = null
@responseText = ''
@responseType = ''
@responseURL = ''
@status = 0
@statusText = ''
@timeout = 0
@upload = new XMLHttpRequestUpload @
@_method = null # String
@_url = null # Return value of url.parse()
@_sync = false
@_headers = null # Object<String, String>
@_loweredHeaders = null # Object<lowercase String, String>
@_mimeOverride = null
@_request = null # http.ClientRequest
@_response = null # http.ClientResponse
@_responseParts = null # Array<Buffer, String>
@_responseHeaders = null # Object<lowercase String, String>
@_aborting = null
@_error = null
@_loadedBytes = 0
@_totalBytes = 0
@_lengthComputable = false
# @property {function(ProgressEvent)} DOM level 0-style handler for the
# 'readystatechange' event
onreadystatechange: null
# @property {Number} the current state of the XHR object
# @see http://www.w3.org/TR/XMLHttpRequest/#states
readyState: null
# @property {String, ArrayBuffer, Buffer, Object} processed XHR response
# @see http://www.w3.org/TR/XMLHttpRequest/#the-response-attribute
response: null
# @property {String} response string, if responseType is '' or 'text'
# @see http://www.w3.org/TR/XMLHttpRequest/#the-responsetext-attribute
responseText: null
# @property {String} sets the parsing method for the XHR response
# @see http://www.w3.org/TR/XMLHttpRequest/#the-responsetype-attribute
responseType: null
# @property {Number} the HTTP
# @see http://www.w3.org/TR/XMLHttpRequest/#the-status-attribute
status: null
# @property {Number} milliseconds to wait for the request to complete
# @see http://www.w3.org/TR/XMLHttpRequest/#the-timeout-attribute
timeout: null
# @property {XMLHttpRequestUpload} the associated upload information
# @see http://www.w3.org/TR/XMLHttpRequest/#the-upload-attribute
upload: null
# Sets the XHR's method, URL, synchronous flag, and authentication params.
#
# @param {String} method the HTTP method to be used
# @param {String} url the URL that the request will be made to
# @param {?Boolean} async if false, the XHR should be processed
# synchronously; true by default
# @param {?String} user the user credential to be used in HTTP basic
# authentication
# @param {?String} password the <PASSWORD> credential to be used in HTTP basic
# authentication
# @return {undefined} undefined
# @throw {SecurityError} method is not one of the allowed methods
# @throw {SyntaxError} urlString is not a valid URL
# @throw {Error} the URL contains an unsupported protocol; the supported
# protocols are file, http and https
# @see http://www.w3.org/TR/XMLHttpRequest/#the-open()-method
open: (method, url, async, user, password) ->
method = method.toUpperCase()
if method of @_restrictedMethods
throw new SecurityError "HTTP method #{method} is not allowed in XHR"
xhrUrl = @_parseUrl url
async = true if async is undefined
switch @readyState
when XMLHttpRequest.UNSENT, XMLHttpRequest.OPENED, XMLHttpRequest.DONE
# Nothing to do here.
null
when XMLHttpRequest.HEADERS_RECEIVED, XMLHttpRequest.LOADING
# TODO(pwnall): terminate abort(), terminate send()
null
@_method = method
@_url = xhrUrl
@_sync = !async
@_headers = {}
@_loweredHeaders = {}
@_mimeOverride = null
@_setReadyState XMLHttpRequest.OPENED
@_request = null
@_response = null
@status = 0
@statusText = ''
@_responseParts = []
@_responseHeaders = null
@_loadedBytes = 0
@_totalBytes = 0
@_lengthComputable = false
undefined
# Appends a header to the list of author request headers.
#
# @param {String} name the HTTP header name
# @param {String} value the HTTP header value
# @return {undefined} undefined
# @throw {InvalidStateError} readyState is not OPENED
# @throw {SyntaxError} name is not a valid HTTP header name or value is not
# a valid HTTP header value
# @see http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader()-method
setRequestHeader: (name, value) ->
unless @readyState is XMLHttpRequest.OPENED
throw new InvalidStateError "XHR readyState must be OPENED"
loweredName = name.toLowerCase()
if @_restrictedHeaders[loweredName] or /^sec\-/.test(loweredName) or
/^proxy-/.test(loweredName)
console.warn "Refused to set unsafe header \"#{name}\""
return undefined
value = value.toString()
if loweredName of @_loweredHeaders
# Combine value with the existing header value.
name = @_loweredHeaders[loweredName]
@_headers[name] = @_headers[name] + ', ' + value
else
# New header.
@_loweredHeaders[loweredName] = name
@_headers[name] = value
undefined
# Initiates the request.
#
# @param {?String, ?ArrayBufferView} data the data to be sent; ignored for
# GET and HEAD requests
# @return {undefined} undefined
# @throw {InvalidStateError} readyState is not OPENED
# @see http://www.w3.org/TR/XMLHttpRequest/#the-send()-method
send: (data) ->
unless @readyState is XMLHttpRequest.OPENED
throw new InvalidStateError "XHR readyState must be OPENED"
if @_request
throw new InvalidStateError "send() already called"
switch @_url.protocol
when 'file:'
@_sendFile data
when 'http:', 'https:'
@_sendHttp data
else
throw new NetworkError "Unsupported protocol #{@_url.protocol}"
undefined
# Cancels the network activity performed by this request.
#
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-abort()-method
abort: ->
return unless @_request
@_request.abort()
@_setError()
@_dispatchProgress 'abort'
@_dispatchProgress 'loadend'
undefined
# Returns a header value in the HTTP response for this XHR.
#
# @param {String} name case-insensitive HTTP header name
# @return {?String} value the value of the header whose name matches the
# given name, or null if there is no such header
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
getResponseHeader: (name) ->
return null unless @_responseHeaders
loweredName = name.toLowerCase()
if loweredName of @_responseHeaders
@_responseHeaders[loweredName]
else
null
# Returns all the HTTP headers in this XHR's response.
#
# @return {String} header lines separated by CR LF, where each header line
# has the name and value separated by a ": " (colon, space); the empty
# string is returned if the headers are not available
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getallresponseheaders()-method
getAllResponseHeaders: ->
return '' unless @_responseHeaders
lines = ("#{name}: #{value}" for name, value of @_responseHeaders)
lines.join "\r\n"
# Overrides the Content-Type
#
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-overridemimetype()-method
overrideMimeType: (newMimeType) ->
if @readyState is XMLHttpRequest.LOADING or
@readyState is XMLHttpRequest.DONE
throw new InvalidStateError(
"overrideMimeType() not allowed in LOADING or DONE")
@_mimeOverride = newMimeType.toLowerCase()
undefined
# Network configuration not exposed in the XHR API.
#
# Although the XMLHttpRequest specification calls itself "ECMAScript HTTP",
# it assumes that requests are always performed in the context of a browser
# application, where some network parameters are set by the browser user and
# should not be modified by Web applications. This API provides access to
# these network parameters.
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in
# browsers. It is a stable node-xhr2 API.
#
# @param {Object} options one or more of the options below
# @option options {?http.Agent} httpAgent the value for the nodejsHttpAgent
# property (the agent used for HTTP requests)
# @option options {?https.Agent} httpsAgent the value for the
# nodejsHttpsAgent property (the agent used for HTTPS requests)
# @return {undefined} undefined
nodejsSet: (options) ->
if 'httpAgent' of options
@nodejsHttpAgent = options.httpAgent
if 'httpsAgent' of options
@nodejsHttpsAgent = options.httpsAgent
if 'baseUrl' of options
baseUrl = options.baseUrl
unless baseUrl is null
parsedUrl = url.parse baseUrl, false, true
unless parsedUrl.protocol
throw new SyntaxError("baseUrl must be an absolute URL")
@nodejsBaseUrl = baseUrl
undefined
# Default settings for the network configuration not exposed in the XHR API.
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in
# browsers. It is a stable node-xhr2 API.
#
# @param {Object} options one or more of the options below
# @option options {?http.Agent} httpAgent the default value for the
# nodejsHttpAgent property (the agent used for HTTP requests)
# @option options {https.Agent} httpsAgent the default value for the
# nodejsHttpsAgent property (the agent used for HTTPS requests)
# @return {undefined} undefined
# @see XMLHttpRequest.nodejsSet
@nodejsSet: (options) ->
# "this" will be set to XMLHttpRequest.prototype, so the instance nodejsSet
# operates on default property values.
XMLHttpRequest::nodejsSet options
undefined
# readyState value before XMLHttpRequest#open() is called
UNSENT: 0
# readyState value before XMLHttpRequest#open() is called
@UNSENT: 0
# readyState value after XMLHttpRequest#open() is called, and before
# XMLHttpRequest#send() is called; XMLHttpRequest#setRequestHeader() can be
# called in this state
OPENED: 1
# readyState value after XMLHttpRequest#open() is called, and before
# XMLHttpRequest#send() is called; XMLHttpRequest#setRequestHeader() can be
# called in this state
@OPENED: 1
# readyState value after redirects have been followed and the HTTP headers of
# the final response have been received
HEADERS_RECEIVED: 2
# readyState value after redirects have been followed and the HTTP headers of
# the final response have been received
@HEADERS_RECEIVED: 2
# readyState value when the response entity body is being received
LOADING: 3
# readyState value when the response entity body is being received
@LOADING: 3
# readyState value after the request has been completely processed
DONE: 4
# readyState value after the request has been completely processed
@DONE: 4
# @property {http.Agent} the agent option passed to HTTP requests
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# It is a stable node-xhr2 API that is useful for testing & going through
# web-proxies.
nodejsHttpAgent: http.globalAgent
# @property {https.Agent} the agent option passed to HTTPS requests
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# It is a stable node-xhr2 API that is useful for testing & going through
# web-proxies.
nodejsHttpsAgent: https.globalAgent
# @property {String} the base URL that relative URLs get resolved to
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# Its browser equivalent is the base URL of the document associated with the
# Window object. It is a stable node-xhr2 API provided for libraries such as
# Angular Universal.
nodejsBaseUrl: null
# HTTP methods that are disallowed in the XHR spec.
#
# @private
# @see Step 6 in http://www.w3.org/TR/XMLHttpRequest/#the-open()-method
_restrictedMethods:
CONNECT: true
TRACE: true
TRACK: true
# HTTP request headers that are disallowed in the XHR spec.
#
# @private
# @see Step 5 in
# http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader()-method
_restrictedHeaders:
'accept-charset': true
'accept-encoding': true
'access-control-request-headers': true
'access-control-request-method': true
connection: true
'content-length': true
cookie: false
cookie2: true
date: true
dnt: true
expect: true
host: true
'keep-alive': true
origin: true
referer: true
te: true
trailer: true
'transfer-encoding': true
upgrade: true
'user-agent': true
via: true
# HTTP response headers that should not be exposed according to the XHR spec.
#
# @private
# @see Step 3 in
# http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
_privateHeaders:
'set-cookie': true
'set-cookie2': true
# The value of the User-Agent header.
_userAgent: "Mozilla/5.0 (#{os.type()} #{os.arch()}) " +
"node.js/#{process.versions.node} v8/#{process.versions.v8}"
# Sets the readyState property and fires the readystatechange event.
#
# @private
# @param {Number} newReadyState the new value of readyState
# @return {undefined} undefined
_setReadyState: (newReadyState) ->
@readyState = newReadyState
event = new ProgressEvent 'readystatechange'
@dispatchEvent event
undefined
# XMLHttpRequest#send() implementation for the file: protocol.
#
# @private
_sendFile: ->
unless @_url.method is 'GET'
throw new NetworkError 'The file protocol only supports GET'
throw new Error "Protocol file: not implemented"
# XMLHttpRequest#send() implementation for the http: and https: protocols.
#
# @private
# This method sets the instance variables and calls _sendHxxpRequest(), which
# is responsible for building a node.js request and firing it off. The code
# in _sendHxxpRequest() is separated off so it can be reused when handling
# redirects.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#infrastructure-for-the-send()-method
_sendHttp: (data) ->
if @_sync
throw new Error "Synchronous XHR processing not implemented"
if data? and (@_method is 'GET' or @_method is 'HEAD')
console.warn "Discarding entity body for #{@_method} requests"
data = null
else
# Send Content-Length: 0
data or= ''
# NOTE: this is called before finalizeHeaders so that the uploader can
# figure out Content-Length and Content-Type.
@upload._setData data
@_finalizeHeaders()
@_sendHxxpRequest()
undefined
# Sets up and fires off a HTTP/HTTPS request using the node.js API.
#
# @private
# This method contains the bulk of the XMLHttpRequest#send() implementation,
# and is also used to issue new HTTP requests when handling HTTP redirects.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#infrastructure-for-the-send()-method
_sendHxxpRequest: ->
if @_url.protocol is 'http:'
hxxp = http
agent = @nodejsHttpAgent
else
hxxp = https
agent = @nodejsHttpsAgent
request = hxxp.request
hostname: @_url.hostname, port: @_url.port, path: @_url.path,
auth: @_url.auth, method: @_method, headers: @_headers, agent: agent
@_request = request
if @timeout
request.setTimeout @timeout, => @_onHttpTimeout request
request.on 'response', (response) => @_onHttpResponse request, response
request.on 'error', (error) => @_onHttpRequestError request, error
@upload._startUpload request
if @_request is request # An http error might have already fired.
@_dispatchProgress 'loadstart'
undefined
# Fills in the restricted HTTP headers with default values.
#
# This is called right before the HTTP request is sent off.
#
# @private
# @return {undefined} undefined
_finalizeHeaders: ->
@_headers['Connection'] = 'keep-alive'
@_headers['Host'] = @_url.host
if @_anonymous
@_headers['Referer'] = 'about:blank'
@_headers['User-Agent'] = @_userAgent
@upload._finalizeHeaders @_headers, @_loweredHeaders
undefined
# Called when the headers of an HTTP response have been received.
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# produced this response
# @param {http.ClientResponse} response the node.js ClientResponse instance
# passed to
_onHttpResponse: (request, response) ->
return unless @_request is request
# Transparent redirection handling.
switch response.statusCode
when 301, 302, 303, 307, 308
@_url = @_parseUrl response.headers['location']
@_method = 'GET'
if 'content-type' of @_loweredHeaders
delete @_headers[@_loweredHeaders['content-type']]
delete @_loweredHeaders['content-type']
# XMLHttpRequestUpload#_finalizeHeaders() sets Content-Type directly.
if 'Content-Type' of @_headers
delete @_headers['Content-Type']
# Restricted headers can't be set by the user, no need to check
# loweredHeaders.
delete @_headers['Content-Length']
@upload._reset()
@_finalizeHeaders()
@_sendHxxpRequest()
return
@_response = response
@_response.on 'data', (data) => @_onHttpResponseData response, data
@_response.on 'end', => @_onHttpResponseEnd response
@_response.on 'close', => @_onHttpResponseClose response
@responseURL = @_url.href.split('#')[0]
@status = @_response.statusCode
@statusText = http.STATUS_CODES[@status]
@_parseResponseHeaders response
if lengthString = @_responseHeaders['content-length']
@_totalBytes = parseInt(lengthString)
@_lengthComputable = true
else
@_lengthComputable = false
@_setReadyState XMLHttpRequest.HEADERS_RECEIVED
# Called when some data has been received on a HTTP connection.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
# @param {String, Buffer} data the data that has been received
_onHttpResponseData: (response, data) ->
return unless @_response is response
@_responseParts.push data
@_loadedBytes += data.length
if @readyState isnt XMLHttpRequest.LOADING
@_setReadyState XMLHttpRequest.LOADING
@_dispatchProgress 'progress'
# Called when the HTTP request finished processing.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
_onHttpResponseEnd: (response) ->
return unless @_response is response
@_parseResponse()
@_request = null
@_response = null
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'load'
@_dispatchProgress 'loadend'
# Called when the underlying HTTP connection was closed prematurely.
#
# If this method is called, it will be called after or instead of
# onHttpResponseEnd.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
_onHttpResponseClose: (response) ->
return unless @_response is response
request = @_request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'error'
@_dispatchProgress 'loadend'
# Called when the timeout set on the HTTP socket expires.
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# fired this event
_onHttpTimeout: (request) ->
return unless @_request is request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'timeout'
@_dispatchProgress 'loadend'
# Called when something wrong happens on the HTTP socket
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# fired this event
# @param {Error} error emitted exception
_onHttpRequestError: (request, error) ->
return unless @_request is request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'error'
@_dispatchProgress 'loadend'
# Fires an XHR progress event.
#
# @private
# @param {String} eventType one of the XHR progress event types, such as
# 'load' and 'progress'
_dispatchProgress: (eventType) ->
event = new ProgressEvent eventType
event.lengthComputable = @_lengthComputable
event.loaded = @_loadedBytes
event.total = @_totalBytes
@dispatchEvent event
undefined
# Sets up the XHR to reflect the fact that an error has occurred.
#
# The possible errors are a network error, a timeout, or an abort.
#
# @private
_setError: ->
@_request = null
@_response = null
@_responseHeaders = null
@_responseParts = null
undefined
# Parses a request URL string.
#
# @private
# This method is a thin wrapper around url.parse() that normalizes HTTP
# user/password credentials. It is used to parse the URL string passed to
# XMLHttpRequest#open() and the URLs in the Location headers of HTTP redirect
# responses.
#
# @param {String} urlString the URL to be parsed
# @return {Object} parsed URL
_parseUrl: (urlString) ->
if @nodejsBaseUrl is null
absoluteUrlString = urlString
else
absoluteUrlString = url.resolve @nodejsBaseUrl, urlString
xhrUrl = url.parse absoluteUrlString, false, true
xhrUrl.hash = null
if xhrUrl.auth and (user? or password?)
index = xhrUrl.auth.indexOf ':'
if index is -1
user = xhrUrl.auth unless user
else
user = xhrUrl.substring(0, index) unless user
password = xhrUrl.substring(index + 1) unless password
if user or password
xhrUrl.auth = "#{user}:#{password}"
xhrUrl
# Reads the headers from a node.js ClientResponse instance.
#
# @private
# @param {http.ClientResponse} response the response whose headers will be
# imported into this XMLHttpRequest's state
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getallresponseheaders()-method
_parseResponseHeaders: (response) ->
@_responseHeaders = {}
for name, value of response.headers
loweredName = name.toLowerCase()
continue if @_privateHeaders[loweredName]
if @_mimeOverride isnt null and loweredName is 'content-type'
value = @_mimeOverride
@_responseHeaders[loweredName] = value
if @_mimeOverride isnt null and !('content-type' of @_responseHeaders)
@_responseHeaders['content-type'] = @_mimeOverride
undefined
# Sets the response and responseText properties when an XHR completes.
#
# @private
# @return {undefined} undefined
_parseResponse: ->
if Buffer.concat
buffer = Buffer.concat @_responseParts
else
# node 0.6
buffer = @_concatBuffers @_responseParts
@_responseParts = null
switch @responseType
when 'text'
@_parseTextResponse buffer
when 'json'
@responseText = null
try
@response = JSON.parse buffer.toString('utf-8')
catch jsonError
@response = null
when 'buffer'
@responseText = null
@response = buffer
when 'arraybuffer'
@responseText = null
arrayBuffer = new ArrayBuffer buffer.length
view = new Uint8Array arrayBuffer
view[i] = buffer[i] for i in [0...buffer.length]
@response = arrayBuffer
else
# TODO(pwnall): content-base detection
@_parseTextResponse buffer
undefined
# Sets response and responseText for a 'text' response type.
#
# @private
# @param {Buffer} buffer the node.js Buffer containing the binary response
# @return {undefined} undefined
_parseTextResponse: (buffer) ->
try
@responseText = buffer.toString @_parseResponseEncoding()
catch e
# Unknown encoding.
@responseText = buffer.toString 'binary'
@response = @responseText
undefined
# Figures out the string encoding of the XHR's response.
#
# This is called to determine the encoding when responseText is set.
#
# @private
# @return {String} a string encoding, e.g. 'utf-8'
_parseResponseEncoding: ->
encoding = null
if contentType = @_responseHeaders['content-type']
if match = /\;\s*charset\=(.*)$/.exec contentType
return match[1]
'utf-8'
# Buffer.concat implementation for node 0.6.
#
# @private
# @param {Array<Buffer>} buffers the buffers whose contents will be merged
# @return {Buffer} same as Buffer.concat(buffers) in node 0.8 and above
_concatBuffers: (buffers) ->
if buffers.length is 0
return Buffer.alloc 0
if buffers.length is 1
return buffers[0]
length = 0
length += buffer.length for buffer in buffers
target = Buffer.alloc length
length = 0
for buffer in buffers
buffer.copy target, length
length += buffer.length
target
# XMLHttpRequest is the result of require('node-xhr2').
module.exports = XMLHttpRequest
# Make node-xhr2 work as a drop-in replacement for libraries that promote the
# following usage pattern:
# var XMLHttpRequest = require('xhr-library-name').XMLHttpRequest
XMLHttpRequest.XMLHttpRequest = XMLHttpRequest
| true | # This file's name is set up in such a way that it will always show up second
# in the list of files given to coffee --join, so it can use the
# XMLHttpRequestEventTarget definition and so that the other files can assume
# that XMLHttpRequest was already defined.
http = require 'http'
https = require 'https'
os = require 'os'
url = require 'url'
# The ECMAScript HTTP API.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#introduction
class XMLHttpRequest extends XMLHttpRequestEventTarget
# Creates a new request.
#
# @param {Object} options one or more of the options below
# @option options {Boolean} anon if true, the request's anonymous flag
# will be set
# @see http://www.w3.org/TR/XMLHttpRequest/#constructors
# @see http://www.w3.org/TR/XMLHttpRequest/#anonymous-flag
constructor: (options) ->
super()
@onreadystatechange = null
@_anonymous = options and options.anon
@readyState = XMLHttpRequest.UNSENT
@response = null
@responseText = ''
@responseType = ''
@responseURL = ''
@status = 0
@statusText = ''
@timeout = 0
@upload = new XMLHttpRequestUpload @
@_method = null # String
@_url = null # Return value of url.parse()
@_sync = false
@_headers = null # Object<String, String>
@_loweredHeaders = null # Object<lowercase String, String>
@_mimeOverride = null
@_request = null # http.ClientRequest
@_response = null # http.ClientResponse
@_responseParts = null # Array<Buffer, String>
@_responseHeaders = null # Object<lowercase String, String>
@_aborting = null
@_error = null
@_loadedBytes = 0
@_totalBytes = 0
@_lengthComputable = false
# @property {function(ProgressEvent)} DOM level 0-style handler for the
# 'readystatechange' event
onreadystatechange: null
# @property {Number} the current state of the XHR object
# @see http://www.w3.org/TR/XMLHttpRequest/#states
readyState: null
# @property {String, ArrayBuffer, Buffer, Object} processed XHR response
# @see http://www.w3.org/TR/XMLHttpRequest/#the-response-attribute
response: null
# @property {String} response string, if responseType is '' or 'text'
# @see http://www.w3.org/TR/XMLHttpRequest/#the-responsetext-attribute
responseText: null
# @property {String} sets the parsing method for the XHR response
# @see http://www.w3.org/TR/XMLHttpRequest/#the-responsetype-attribute
responseType: null
# @property {Number} the HTTP
# @see http://www.w3.org/TR/XMLHttpRequest/#the-status-attribute
status: null
# @property {Number} milliseconds to wait for the request to complete
# @see http://www.w3.org/TR/XMLHttpRequest/#the-timeout-attribute
timeout: null
# @property {XMLHttpRequestUpload} the associated upload information
# @see http://www.w3.org/TR/XMLHttpRequest/#the-upload-attribute
upload: null
# Sets the XHR's method, URL, synchronous flag, and authentication params.
#
# @param {String} method the HTTP method to be used
# @param {String} url the URL that the request will be made to
# @param {?Boolean} async if false, the XHR should be processed
# synchronously; true by default
# @param {?String} user the user credential to be used in HTTP basic
# authentication
# @param {?String} password the PI:PASSWORD:<PASSWORD>END_PI credential to be used in HTTP basic
# authentication
# @return {undefined} undefined
# @throw {SecurityError} method is not one of the allowed methods
# @throw {SyntaxError} urlString is not a valid URL
# @throw {Error} the URL contains an unsupported protocol; the supported
# protocols are file, http and https
# @see http://www.w3.org/TR/XMLHttpRequest/#the-open()-method
open: (method, url, async, user, password) ->
method = method.toUpperCase()
if method of @_restrictedMethods
throw new SecurityError "HTTP method #{method} is not allowed in XHR"
xhrUrl = @_parseUrl url
async = true if async is undefined
switch @readyState
when XMLHttpRequest.UNSENT, XMLHttpRequest.OPENED, XMLHttpRequest.DONE
# Nothing to do here.
null
when XMLHttpRequest.HEADERS_RECEIVED, XMLHttpRequest.LOADING
# TODO(pwnall): terminate abort(), terminate send()
null
@_method = method
@_url = xhrUrl
@_sync = !async
@_headers = {}
@_loweredHeaders = {}
@_mimeOverride = null
@_setReadyState XMLHttpRequest.OPENED
@_request = null
@_response = null
@status = 0
@statusText = ''
@_responseParts = []
@_responseHeaders = null
@_loadedBytes = 0
@_totalBytes = 0
@_lengthComputable = false
undefined
# Appends a header to the list of author request headers.
#
# @param {String} name the HTTP header name
# @param {String} value the HTTP header value
# @return {undefined} undefined
# @throw {InvalidStateError} readyState is not OPENED
# @throw {SyntaxError} name is not a valid HTTP header name or value is not
# a valid HTTP header value
# @see http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader()-method
setRequestHeader: (name, value) ->
unless @readyState is XMLHttpRequest.OPENED
throw new InvalidStateError "XHR readyState must be OPENED"
loweredName = name.toLowerCase()
if @_restrictedHeaders[loweredName] or /^sec\-/.test(loweredName) or
/^proxy-/.test(loweredName)
console.warn "Refused to set unsafe header \"#{name}\""
return undefined
value = value.toString()
if loweredName of @_loweredHeaders
# Combine value with the existing header value.
name = @_loweredHeaders[loweredName]
@_headers[name] = @_headers[name] + ', ' + value
else
# New header.
@_loweredHeaders[loweredName] = name
@_headers[name] = value
undefined
# Initiates the request.
#
# @param {?String, ?ArrayBufferView} data the data to be sent; ignored for
# GET and HEAD requests
# @return {undefined} undefined
# @throw {InvalidStateError} readyState is not OPENED
# @see http://www.w3.org/TR/XMLHttpRequest/#the-send()-method
send: (data) ->
unless @readyState is XMLHttpRequest.OPENED
throw new InvalidStateError "XHR readyState must be OPENED"
if @_request
throw new InvalidStateError "send() already called"
switch @_url.protocol
when 'file:'
@_sendFile data
when 'http:', 'https:'
@_sendHttp data
else
throw new NetworkError "Unsupported protocol #{@_url.protocol}"
undefined
# Cancels the network activity performed by this request.
#
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-abort()-method
abort: ->
return unless @_request
@_request.abort()
@_setError()
@_dispatchProgress 'abort'
@_dispatchProgress 'loadend'
undefined
# Returns a header value in the HTTP response for this XHR.
#
# @param {String} name case-insensitive HTTP header name
# @return {?String} value the value of the header whose name matches the
# given name, or null if there is no such header
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
getResponseHeader: (name) ->
return null unless @_responseHeaders
loweredName = name.toLowerCase()
if loweredName of @_responseHeaders
@_responseHeaders[loweredName]
else
null
# Returns all the HTTP headers in this XHR's response.
#
# @return {String} header lines separated by CR LF, where each header line
# has the name and value separated by a ": " (colon, space); the empty
# string is returned if the headers are not available
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getallresponseheaders()-method
getAllResponseHeaders: ->
return '' unless @_responseHeaders
lines = ("#{name}: #{value}" for name, value of @_responseHeaders)
lines.join "\r\n"
# Overrides the Content-Type
#
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-overridemimetype()-method
overrideMimeType: (newMimeType) ->
if @readyState is XMLHttpRequest.LOADING or
@readyState is XMLHttpRequest.DONE
throw new InvalidStateError(
"overrideMimeType() not allowed in LOADING or DONE")
@_mimeOverride = newMimeType.toLowerCase()
undefined
# Network configuration not exposed in the XHR API.
#
# Although the XMLHttpRequest specification calls itself "ECMAScript HTTP",
# it assumes that requests are always performed in the context of a browser
# application, where some network parameters are set by the browser user and
# should not be modified by Web applications. This API provides access to
# these network parameters.
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in
# browsers. It is a stable node-xhr2 API.
#
# @param {Object} options one or more of the options below
# @option options {?http.Agent} httpAgent the value for the nodejsHttpAgent
# property (the agent used for HTTP requests)
# @option options {?https.Agent} httpsAgent the value for the
# nodejsHttpsAgent property (the agent used for HTTPS requests)
# @return {undefined} undefined
nodejsSet: (options) ->
if 'httpAgent' of options
@nodejsHttpAgent = options.httpAgent
if 'httpsAgent' of options
@nodejsHttpsAgent = options.httpsAgent
if 'baseUrl' of options
baseUrl = options.baseUrl
unless baseUrl is null
parsedUrl = url.parse baseUrl, false, true
unless parsedUrl.protocol
throw new SyntaxError("baseUrl must be an absolute URL")
@nodejsBaseUrl = baseUrl
undefined
# Default settings for the network configuration not exposed in the XHR API.
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in
# browsers. It is a stable node-xhr2 API.
#
# @param {Object} options one or more of the options below
# @option options {?http.Agent} httpAgent the default value for the
# nodejsHttpAgent property (the agent used for HTTP requests)
# @option options {https.Agent} httpsAgent the default value for the
# nodejsHttpsAgent property (the agent used for HTTPS requests)
# @return {undefined} undefined
# @see XMLHttpRequest.nodejsSet
@nodejsSet: (options) ->
# "this" will be set to XMLHttpRequest.prototype, so the instance nodejsSet
# operates on default property values.
XMLHttpRequest::nodejsSet options
undefined
# readyState value before XMLHttpRequest#open() is called
UNSENT: 0
# readyState value before XMLHttpRequest#open() is called
@UNSENT: 0
# readyState value after XMLHttpRequest#open() is called, and before
# XMLHttpRequest#send() is called; XMLHttpRequest#setRequestHeader() can be
# called in this state
OPENED: 1
# readyState value after XMLHttpRequest#open() is called, and before
# XMLHttpRequest#send() is called; XMLHttpRequest#setRequestHeader() can be
# called in this state
@OPENED: 1
# readyState value after redirects have been followed and the HTTP headers of
# the final response have been received
HEADERS_RECEIVED: 2
# readyState value after redirects have been followed and the HTTP headers of
# the final response have been received
@HEADERS_RECEIVED: 2
# readyState value when the response entity body is being received
LOADING: 3
# readyState value when the response entity body is being received
@LOADING: 3
# readyState value after the request has been completely processed
DONE: 4
# readyState value after the request has been completely processed
@DONE: 4
# @property {http.Agent} the agent option passed to HTTP requests
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# It is a stable node-xhr2 API that is useful for testing & going through
# web-proxies.
nodejsHttpAgent: http.globalAgent
# @property {https.Agent} the agent option passed to HTTPS requests
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# It is a stable node-xhr2 API that is useful for testing & going through
# web-proxies.
nodejsHttpsAgent: https.globalAgent
# @property {String} the base URL that relative URLs get resolved to
#
# NOTE: this is not in the XMLHttpRequest API, and will not work in browsers.
# Its browser equivalent is the base URL of the document associated with the
# Window object. It is a stable node-xhr2 API provided for libraries such as
# Angular Universal.
nodejsBaseUrl: null
# HTTP methods that are disallowed in the XHR spec.
#
# @private
# @see Step 6 in http://www.w3.org/TR/XMLHttpRequest/#the-open()-method
_restrictedMethods:
CONNECT: true
TRACE: true
TRACK: true
# HTTP request headers that are disallowed in the XHR spec.
#
# @private
# @see Step 5 in
# http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader()-method
_restrictedHeaders:
'accept-charset': true
'accept-encoding': true
'access-control-request-headers': true
'access-control-request-method': true
connection: true
'content-length': true
cookie: false
cookie2: true
date: true
dnt: true
expect: true
host: true
'keep-alive': true
origin: true
referer: true
te: true
trailer: true
'transfer-encoding': true
upgrade: true
'user-agent': true
via: true
# HTTP response headers that should not be exposed according to the XHR spec.
#
# @private
# @see Step 3 in
# http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
_privateHeaders:
'set-cookie': true
'set-cookie2': true
# The value of the User-Agent header.
_userAgent: "Mozilla/5.0 (#{os.type()} #{os.arch()}) " +
"node.js/#{process.versions.node} v8/#{process.versions.v8}"
# Sets the readyState property and fires the readystatechange event.
#
# @private
# @param {Number} newReadyState the new value of readyState
# @return {undefined} undefined
_setReadyState: (newReadyState) ->
@readyState = newReadyState
event = new ProgressEvent 'readystatechange'
@dispatchEvent event
undefined
# XMLHttpRequest#send() implementation for the file: protocol.
#
# @private
_sendFile: ->
unless @_url.method is 'GET'
throw new NetworkError 'The file protocol only supports GET'
throw new Error "Protocol file: not implemented"
# XMLHttpRequest#send() implementation for the http: and https: protocols.
#
# @private
# This method sets the instance variables and calls _sendHxxpRequest(), which
# is responsible for building a node.js request and firing it off. The code
# in _sendHxxpRequest() is separated off so it can be reused when handling
# redirects.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#infrastructure-for-the-send()-method
_sendHttp: (data) ->
if @_sync
throw new Error "Synchronous XHR processing not implemented"
if data? and (@_method is 'GET' or @_method is 'HEAD')
console.warn "Discarding entity body for #{@_method} requests"
data = null
else
# Send Content-Length: 0
data or= ''
# NOTE: this is called before finalizeHeaders so that the uploader can
# figure out Content-Length and Content-Type.
@upload._setData data
@_finalizeHeaders()
@_sendHxxpRequest()
undefined
# Sets up and fires off a HTTP/HTTPS request using the node.js API.
#
# @private
# This method contains the bulk of the XMLHttpRequest#send() implementation,
# and is also used to issue new HTTP requests when handling HTTP redirects.
#
# @see http://www.w3.org/TR/XMLHttpRequest/#infrastructure-for-the-send()-method
_sendHxxpRequest: ->
if @_url.protocol is 'http:'
hxxp = http
agent = @nodejsHttpAgent
else
hxxp = https
agent = @nodejsHttpsAgent
request = hxxp.request
hostname: @_url.hostname, port: @_url.port, path: @_url.path,
auth: @_url.auth, method: @_method, headers: @_headers, agent: agent
@_request = request
if @timeout
request.setTimeout @timeout, => @_onHttpTimeout request
request.on 'response', (response) => @_onHttpResponse request, response
request.on 'error', (error) => @_onHttpRequestError request, error
@upload._startUpload request
if @_request is request # An http error might have already fired.
@_dispatchProgress 'loadstart'
undefined
# Fills in the restricted HTTP headers with default values.
#
# This is called right before the HTTP request is sent off.
#
# @private
# @return {undefined} undefined
_finalizeHeaders: ->
@_headers['Connection'] = 'keep-alive'
@_headers['Host'] = @_url.host
if @_anonymous
@_headers['Referer'] = 'about:blank'
@_headers['User-Agent'] = @_userAgent
@upload._finalizeHeaders @_headers, @_loweredHeaders
undefined
# Called when the headers of an HTTP response have been received.
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# produced this response
# @param {http.ClientResponse} response the node.js ClientResponse instance
# passed to
_onHttpResponse: (request, response) ->
return unless @_request is request
# Transparent redirection handling.
switch response.statusCode
when 301, 302, 303, 307, 308
@_url = @_parseUrl response.headers['location']
@_method = 'GET'
if 'content-type' of @_loweredHeaders
delete @_headers[@_loweredHeaders['content-type']]
delete @_loweredHeaders['content-type']
# XMLHttpRequestUpload#_finalizeHeaders() sets Content-Type directly.
if 'Content-Type' of @_headers
delete @_headers['Content-Type']
# Restricted headers can't be set by the user, no need to check
# loweredHeaders.
delete @_headers['Content-Length']
@upload._reset()
@_finalizeHeaders()
@_sendHxxpRequest()
return
@_response = response
@_response.on 'data', (data) => @_onHttpResponseData response, data
@_response.on 'end', => @_onHttpResponseEnd response
@_response.on 'close', => @_onHttpResponseClose response
@responseURL = @_url.href.split('#')[0]
@status = @_response.statusCode
@statusText = http.STATUS_CODES[@status]
@_parseResponseHeaders response
if lengthString = @_responseHeaders['content-length']
@_totalBytes = parseInt(lengthString)
@_lengthComputable = true
else
@_lengthComputable = false
@_setReadyState XMLHttpRequest.HEADERS_RECEIVED
# Called when some data has been received on a HTTP connection.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
# @param {String, Buffer} data the data that has been received
_onHttpResponseData: (response, data) ->
return unless @_response is response
@_responseParts.push data
@_loadedBytes += data.length
if @readyState isnt XMLHttpRequest.LOADING
@_setReadyState XMLHttpRequest.LOADING
@_dispatchProgress 'progress'
# Called when the HTTP request finished processing.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
_onHttpResponseEnd: (response) ->
return unless @_response is response
@_parseResponse()
@_request = null
@_response = null
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'load'
@_dispatchProgress 'loadend'
# Called when the underlying HTTP connection was closed prematurely.
#
# If this method is called, it will be called after or instead of
# onHttpResponseEnd.
#
# @private
# @param {http.ClientResponse} response the node.js ClientResponse instance
# that fired this event
_onHttpResponseClose: (response) ->
return unless @_response is response
request = @_request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'error'
@_dispatchProgress 'loadend'
# Called when the timeout set on the HTTP socket expires.
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# fired this event
_onHttpTimeout: (request) ->
return unless @_request is request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'timeout'
@_dispatchProgress 'loadend'
# Called when something wrong happens on the HTTP socket
#
# @private
# @param {http.ClientRequest} request the node.js ClientRequest instance that
# fired this event
# @param {Error} error emitted exception
_onHttpRequestError: (request, error) ->
return unless @_request is request
@_setError()
request.abort()
@_setReadyState XMLHttpRequest.DONE
@_dispatchProgress 'error'
@_dispatchProgress 'loadend'
# Fires an XHR progress event.
#
# @private
# @param {String} eventType one of the XHR progress event types, such as
# 'load' and 'progress'
_dispatchProgress: (eventType) ->
event = new ProgressEvent eventType
event.lengthComputable = @_lengthComputable
event.loaded = @_loadedBytes
event.total = @_totalBytes
@dispatchEvent event
undefined
# Sets up the XHR to reflect the fact that an error has occurred.
#
# The possible errors are a network error, a timeout, or an abort.
#
# @private
_setError: ->
@_request = null
@_response = null
@_responseHeaders = null
@_responseParts = null
undefined
# Parses a request URL string.
#
# @private
# This method is a thin wrapper around url.parse() that normalizes HTTP
# user/password credentials. It is used to parse the URL string passed to
# XMLHttpRequest#open() and the URLs in the Location headers of HTTP redirect
# responses.
#
# @param {String} urlString the URL to be parsed
# @return {Object} parsed URL
_parseUrl: (urlString) ->
if @nodejsBaseUrl is null
absoluteUrlString = urlString
else
absoluteUrlString = url.resolve @nodejsBaseUrl, urlString
xhrUrl = url.parse absoluteUrlString, false, true
xhrUrl.hash = null
if xhrUrl.auth and (user? or password?)
index = xhrUrl.auth.indexOf ':'
if index is -1
user = xhrUrl.auth unless user
else
user = xhrUrl.substring(0, index) unless user
password = xhrUrl.substring(index + 1) unless password
if user or password
xhrUrl.auth = "#{user}:#{password}"
xhrUrl
# Reads the headers from a node.js ClientResponse instance.
#
# @private
# @param {http.ClientResponse} response the response whose headers will be
# imported into this XMLHttpRequest's state
# @return {undefined} undefined
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getresponseheader()-method
# @see http://www.w3.org/TR/XMLHttpRequest/#the-getallresponseheaders()-method
_parseResponseHeaders: (response) ->
@_responseHeaders = {}
for name, value of response.headers
loweredName = name.toLowerCase()
continue if @_privateHeaders[loweredName]
if @_mimeOverride isnt null and loweredName is 'content-type'
value = @_mimeOverride
@_responseHeaders[loweredName] = value
if @_mimeOverride isnt null and !('content-type' of @_responseHeaders)
@_responseHeaders['content-type'] = @_mimeOverride
undefined
# Sets the response and responseText properties when an XHR completes.
#
# @private
# @return {undefined} undefined
_parseResponse: ->
if Buffer.concat
buffer = Buffer.concat @_responseParts
else
# node 0.6
buffer = @_concatBuffers @_responseParts
@_responseParts = null
switch @responseType
when 'text'
@_parseTextResponse buffer
when 'json'
@responseText = null
try
@response = JSON.parse buffer.toString('utf-8')
catch jsonError
@response = null
when 'buffer'
@responseText = null
@response = buffer
when 'arraybuffer'
@responseText = null
arrayBuffer = new ArrayBuffer buffer.length
view = new Uint8Array arrayBuffer
view[i] = buffer[i] for i in [0...buffer.length]
@response = arrayBuffer
else
# TODO(pwnall): content-base detection
@_parseTextResponse buffer
undefined
# Sets response and responseText for a 'text' response type.
#
# @private
# @param {Buffer} buffer the node.js Buffer containing the binary response
# @return {undefined} undefined
_parseTextResponse: (buffer) ->
try
@responseText = buffer.toString @_parseResponseEncoding()
catch e
# Unknown encoding.
@responseText = buffer.toString 'binary'
@response = @responseText
undefined
# Figures out the string encoding of the XHR's response.
#
# This is called to determine the encoding when responseText is set.
#
# @private
# @return {String} a string encoding, e.g. 'utf-8'
_parseResponseEncoding: ->
encoding = null
if contentType = @_responseHeaders['content-type']
if match = /\;\s*charset\=(.*)$/.exec contentType
return match[1]
'utf-8'
# Buffer.concat implementation for node 0.6.
#
# @private
# @param {Array<Buffer>} buffers the buffers whose contents will be merged
# @return {Buffer} same as Buffer.concat(buffers) in node 0.8 and above
_concatBuffers: (buffers) ->
if buffers.length is 0
return Buffer.alloc 0
if buffers.length is 1
return buffers[0]
length = 0
length += buffer.length for buffer in buffers
target = Buffer.alloc length
length = 0
for buffer in buffers
buffer.copy target, length
length += buffer.length
target
# XMLHttpRequest is the result of require('node-xhr2').
module.exports = XMLHttpRequest
# Make node-xhr2 work as a drop-in replacement for libraries that promote the
# following usage pattern:
# var XMLHttpRequest = require('xhr-library-name').XMLHttpRequest
XMLHttpRequest.XMLHttpRequest = XMLHttpRequest
|
[
{
"context": " \"0.0.1\"')\n expect(path).toContain('author: \"John Doe\"')\n expect(path).toContain('description: \"a ",
"end": 1051,
"score": 0.9998074769973755,
"start": 1043,
"tag": "NAME",
"value": "John Doe"
}
] | test/functionals/generators/project.gen.spec.coffee | bcmw/neat | 1 | require '../../test_helper'
Neat = require '../../../lib/neat'
{run} = Neat.require 'utils/commands'
{print} = require 'util'
describe 'when outside a project', ->
beforeEach -> process.chdir TEST_TMP_DIR
describe 'running `neat generate project`', ->
it "should return a status of 1 and don't generate anything", (done) ->
# options =
# stderr: (data)-> print data
# stdout: (data)-> print data
run 'node', [NEAT_BIN, 'generate', 'project'], (status) ->
expect(status).toBe(1)
done()
withProject 'neat_project', 'when outside a project', ->
describe 'running `neat generate project foo`', ->
beforeEach -> process.chdir TEST_TMP_DIR
it 'should return a status code of 0', ->
expect(@status).toBe(0)
it 'should generates the neat manifest for the new project', ->
path = inProject ".neat"
expect(path).toExist()
expect(path).toContain('name: "neat_project"')
expect(path).toContain('version: "0.0.1"')
expect(path).toContain('author: "John Doe"')
expect(path).toContain('description: "a description"')
expect(path).toContain('keywords: ["foo", "bar", "baz"]')
it 'should generates a Nemfile depending on the current neat version', ->
path = inProject "Nemfile"
expect(path).toExist()
expect(path).toContain("npm 'neat', '#{Neat.meta.version}'")
it 'should generates a configuration file for default environment', ->
path = inProject "src/config/environments/default.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for test environment', ->
path = inProject "src/config/environments/test.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for development environment', ->
path = inProject "src/config/environments/development.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for production environment', ->
path = inProject "src/config/environments/production.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a project in the current directory', ->
expect(inProject ".gitignore").toExist()
expect(inProject ".npmignore").toExist()
expect(inProject "src/tasks/.gitkeep").toExist()
expect(inProject "src/commands/.gitkeep").toExist()
expect(inProject "src/generators/.gitkeep").toExist()
expect(inProject "src/config/initializers/.gitkeep").toExist()
expect(inProject "templates/.gitkeep").toExist()
expect(inProject "test/test_helper.coffee").toExist()
expect(inProject "test/units/.gitkeep").toExist()
expect(inProject "test/helpers/.gitkeep").toExist()
expect(inProject "test/functionals/.gitkeep").toExist()
expect(inProject "test/integrations/.gitkeep").toExist()
expect(inProject "test/fixtures/.gitkeep").toExist()
expect(inProject "config").not.toExist()
expect(inProject "config/packages").not.toExist()
expect(inProject "config/packages/compile.cup").not.toExist()
expect(inProject "Cakefile")
.toContain(loadFixture 'generators/project/Cakefile')
expect(inProject "Neatfile")
.toContain(loadFixture 'generators/project/Neatfile')
expect(inProject "Watchfile")
.toContain(loadFixture 'generators/project/Watchfile')
| 32913 | require '../../test_helper'
Neat = require '../../../lib/neat'
{run} = Neat.require 'utils/commands'
{print} = require 'util'
describe 'when outside a project', ->
beforeEach -> process.chdir TEST_TMP_DIR
describe 'running `neat generate project`', ->
it "should return a status of 1 and don't generate anything", (done) ->
# options =
# stderr: (data)-> print data
# stdout: (data)-> print data
run 'node', [NEAT_BIN, 'generate', 'project'], (status) ->
expect(status).toBe(1)
done()
withProject 'neat_project', 'when outside a project', ->
describe 'running `neat generate project foo`', ->
beforeEach -> process.chdir TEST_TMP_DIR
it 'should return a status code of 0', ->
expect(@status).toBe(0)
it 'should generates the neat manifest for the new project', ->
path = inProject ".neat"
expect(path).toExist()
expect(path).toContain('name: "neat_project"')
expect(path).toContain('version: "0.0.1"')
expect(path).toContain('author: "<NAME>"')
expect(path).toContain('description: "a description"')
expect(path).toContain('keywords: ["foo", "bar", "baz"]')
it 'should generates a Nemfile depending on the current neat version', ->
path = inProject "Nemfile"
expect(path).toExist()
expect(path).toContain("npm 'neat', '#{Neat.meta.version}'")
it 'should generates a configuration file for default environment', ->
path = inProject "src/config/environments/default.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for test environment', ->
path = inProject "src/config/environments/test.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for development environment', ->
path = inProject "src/config/environments/development.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for production environment', ->
path = inProject "src/config/environments/production.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a project in the current directory', ->
expect(inProject ".gitignore").toExist()
expect(inProject ".npmignore").toExist()
expect(inProject "src/tasks/.gitkeep").toExist()
expect(inProject "src/commands/.gitkeep").toExist()
expect(inProject "src/generators/.gitkeep").toExist()
expect(inProject "src/config/initializers/.gitkeep").toExist()
expect(inProject "templates/.gitkeep").toExist()
expect(inProject "test/test_helper.coffee").toExist()
expect(inProject "test/units/.gitkeep").toExist()
expect(inProject "test/helpers/.gitkeep").toExist()
expect(inProject "test/functionals/.gitkeep").toExist()
expect(inProject "test/integrations/.gitkeep").toExist()
expect(inProject "test/fixtures/.gitkeep").toExist()
expect(inProject "config").not.toExist()
expect(inProject "config/packages").not.toExist()
expect(inProject "config/packages/compile.cup").not.toExist()
expect(inProject "Cakefile")
.toContain(loadFixture 'generators/project/Cakefile')
expect(inProject "Neatfile")
.toContain(loadFixture 'generators/project/Neatfile')
expect(inProject "Watchfile")
.toContain(loadFixture 'generators/project/Watchfile')
| true | require '../../test_helper'
Neat = require '../../../lib/neat'
{run} = Neat.require 'utils/commands'
{print} = require 'util'
describe 'when outside a project', ->
beforeEach -> process.chdir TEST_TMP_DIR
describe 'running `neat generate project`', ->
it "should return a status of 1 and don't generate anything", (done) ->
# options =
# stderr: (data)-> print data
# stdout: (data)-> print data
run 'node', [NEAT_BIN, 'generate', 'project'], (status) ->
expect(status).toBe(1)
done()
withProject 'neat_project', 'when outside a project', ->
describe 'running `neat generate project foo`', ->
beforeEach -> process.chdir TEST_TMP_DIR
it 'should return a status code of 0', ->
expect(@status).toBe(0)
it 'should generates the neat manifest for the new project', ->
path = inProject ".neat"
expect(path).toExist()
expect(path).toContain('name: "neat_project"')
expect(path).toContain('version: "0.0.1"')
expect(path).toContain('author: "PI:NAME:<NAME>END_PI"')
expect(path).toContain('description: "a description"')
expect(path).toContain('keywords: ["foo", "bar", "baz"]')
it 'should generates a Nemfile depending on the current neat version', ->
path = inProject "Nemfile"
expect(path).toExist()
expect(path).toContain("npm 'neat', '#{Neat.meta.version}'")
it 'should generates a configuration file for default environment', ->
path = inProject "src/config/environments/default.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for test environment', ->
path = inProject "src/config/environments/test.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for development environment', ->
path = inProject "src/config/environments/development.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a configuration file for production environment', ->
path = inProject "src/config/environments/production.coffee"
expect(path).toExist()
expect(path).toContain('module.exports = (config) ->')
it 'should generates a project in the current directory', ->
expect(inProject ".gitignore").toExist()
expect(inProject ".npmignore").toExist()
expect(inProject "src/tasks/.gitkeep").toExist()
expect(inProject "src/commands/.gitkeep").toExist()
expect(inProject "src/generators/.gitkeep").toExist()
expect(inProject "src/config/initializers/.gitkeep").toExist()
expect(inProject "templates/.gitkeep").toExist()
expect(inProject "test/test_helper.coffee").toExist()
expect(inProject "test/units/.gitkeep").toExist()
expect(inProject "test/helpers/.gitkeep").toExist()
expect(inProject "test/functionals/.gitkeep").toExist()
expect(inProject "test/integrations/.gitkeep").toExist()
expect(inProject "test/fixtures/.gitkeep").toExist()
expect(inProject "config").not.toExist()
expect(inProject "config/packages").not.toExist()
expect(inProject "config/packages/compile.cup").not.toExist()
expect(inProject "Cakefile")
.toContain(loadFixture 'generators/project/Cakefile')
expect(inProject "Neatfile")
.toContain(loadFixture 'generators/project/Neatfile')
expect(inProject "Watchfile")
.toContain(loadFixture 'generators/project/Watchfile')
|
[
{
"context": "iption:\n# Jodo co, ¿qué pasa co?\n#\n# Author:\n# Francho\n\nmodule.exports = (robot) ->\n leaveReplies = [\n ",
"end": 65,
"score": 0.9993433952331543,
"start": 58,
"tag": "NAME",
"value": "Francho"
}
] | scripts/co.coffee | francho/agilico | 1 | # Description:
# Jodo co, ¿qué pasa co?
#
# Author:
# Francho
module.exports = (robot) ->
leaveReplies = [
'https://www.youtube.com/watch?v=zAo7fS_8a60',
'¿qué pasa co?',
'cooooooo!!!',
'hey co',
'jodo co',
'chicooooo',
'nada co',
'noooooojodas co',
'co, ¿y eso co?'
]
robot.hear /\bco\b/im, (res) ->
return if res.message.text.match(/^co \w/)
res.reply res.random leaveReplies
| 81176 | # Description:
# Jodo co, ¿qué pasa co?
#
# Author:
# <NAME>
module.exports = (robot) ->
leaveReplies = [
'https://www.youtube.com/watch?v=zAo7fS_8a60',
'¿qué pasa co?',
'cooooooo!!!',
'hey co',
'jodo co',
'chicooooo',
'nada co',
'noooooojodas co',
'co, ¿y eso co?'
]
robot.hear /\bco\b/im, (res) ->
return if res.message.text.match(/^co \w/)
res.reply res.random leaveReplies
| true | # Description:
# Jodo co, ¿qué pasa co?
#
# Author:
# PI:NAME:<NAME>END_PI
module.exports = (robot) ->
leaveReplies = [
'https://www.youtube.com/watch?v=zAo7fS_8a60',
'¿qué pasa co?',
'cooooooo!!!',
'hey co',
'jodo co',
'chicooooo',
'nada co',
'noooooojodas co',
'co, ¿y eso co?'
]
robot.hear /\bco\b/im, (res) ->
return if res.message.text.match(/^co \w/)
res.reply res.random leaveReplies
|
[
{
"context": "eate\n username: 'admin'\n password: 'admin'\n email: 'shaunfarrell@g.harvard.edu'\n ",
"end": 1589,
"score": 0.9995458126068115,
"start": 1584,
"tag": "PASSWORD",
"value": "admin"
},
{
"context": "'admin'\n password: 'admin'\n emai... | src/modules/data.coffee | Soundscape/sublime-oauth2 | 0 | Core = require 'sublime-core'
Data = require 'sublime-data'
mongo = require 'sails-mongo'
models = [
new Data.Model(
{ identity: 'token', connection: 'primary', autoPK: false },
{
userId: { type: 'string', required: true }
clientId: { type: 'string', required: true }
value: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'code', connection: 'primary' },
{
userId: { type: 'string', required: true }
clientId: { type: 'string', required: true }
redirectUri: { type: 'string', required: true }
value: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'client', connection: 'primary' },
{
name: { type: 'string', unique: true, required: true }
userId: { type: 'string', required: true }
secret: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'user', connection: 'primary' },
{
username: { type: 'string', unique: true, required: true }
password: { type: 'string', required: true }
email: { type: 'email', required: true }
thumbnail: { type: 'string', required: true }
gender: { type: 'string', required: true }
}
)
]
cfg =
adapters:
'default': mongo
mongo: mongo
connections:
primary:
adapter: 'mongo'
host: 'localhost'
port: 27017
database: 'sublime-db'
defaults:
migrate: 'drop'
ctx = new Data.DataContext cfg
ctx.init models
.then () ->
ctx.models.user
.create
username: 'admin'
password: 'admin'
email: 'shaunfarrell@g.harvard.edu'
thumbnail: 'https://lh3.googleusercontent.com/-i2djnmHtrWw/AAAAAAAAAAI/AAAAAAAAAF8/NGlm7wio9L4/photo.jpg?sz=50'
gender: 'male'
.exec (err, user) ->
ctx.models.client
.create
userId: user.id
name: 'sublime'
secret: 'keyboard cat'
.exec () -> return
module.exports = ctx
| 20645 | Core = require 'sublime-core'
Data = require 'sublime-data'
mongo = require 'sails-mongo'
models = [
new Data.Model(
{ identity: 'token', connection: 'primary', autoPK: false },
{
userId: { type: 'string', required: true }
clientId: { type: 'string', required: true }
value: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'code', connection: 'primary' },
{
userId: { type: 'string', required: true }
clientId: { type: 'string', required: true }
redirectUri: { type: 'string', required: true }
value: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'client', connection: 'primary' },
{
name: { type: 'string', unique: true, required: true }
userId: { type: 'string', required: true }
secret: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'user', connection: 'primary' },
{
username: { type: 'string', unique: true, required: true }
password: { type: 'string', required: true }
email: { type: 'email', required: true }
thumbnail: { type: 'string', required: true }
gender: { type: 'string', required: true }
}
)
]
cfg =
adapters:
'default': mongo
mongo: mongo
connections:
primary:
adapter: 'mongo'
host: 'localhost'
port: 27017
database: 'sublime-db'
defaults:
migrate: 'drop'
ctx = new Data.DataContext cfg
ctx.init models
.then () ->
ctx.models.user
.create
username: 'admin'
password: '<PASSWORD>'
email: '<EMAIL>'
thumbnail: 'https://lh3.googleusercontent.com/-i2djnmHtrWw/AAAAAAAAAAI/AAAAAAAAAF8/NGlm7wio9L4/photo.jpg?sz=50'
gender: 'male'
.exec (err, user) ->
ctx.models.client
.create
userId: user.id
name: 'sublime'
secret: 'keyboard cat'
.exec () -> return
module.exports = ctx
| true | Core = require 'sublime-core'
Data = require 'sublime-data'
mongo = require 'sails-mongo'
models = [
new Data.Model(
{ identity: 'token', connection: 'primary', autoPK: false },
{
userId: { type: 'string', required: true }
clientId: { type: 'string', required: true }
value: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'code', connection: 'primary' },
{
userId: { type: 'string', required: true }
clientId: { type: 'string', required: true }
redirectUri: { type: 'string', required: true }
value: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'client', connection: 'primary' },
{
name: { type: 'string', unique: true, required: true }
userId: { type: 'string', required: true }
secret: { type: 'string', required: true }
}
),
new Data.Model(
{ identity: 'user', connection: 'primary' },
{
username: { type: 'string', unique: true, required: true }
password: { type: 'string', required: true }
email: { type: 'email', required: true }
thumbnail: { type: 'string', required: true }
gender: { type: 'string', required: true }
}
)
]
cfg =
adapters:
'default': mongo
mongo: mongo
connections:
primary:
adapter: 'mongo'
host: 'localhost'
port: 27017
database: 'sublime-db'
defaults:
migrate: 'drop'
ctx = new Data.DataContext cfg
ctx.init models
.then () ->
ctx.models.user
.create
username: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
thumbnail: 'https://lh3.googleusercontent.com/-i2djnmHtrWw/AAAAAAAAAAI/AAAAAAAAAF8/NGlm7wio9L4/photo.jpg?sz=50'
gender: 'male'
.exec (err, user) ->
ctx.models.client
.create
userId: user.id
name: 'sublime'
secret: 'keyboard cat'
.exec () -> return
module.exports = ctx
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9991328120231628,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-http-response-close.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
http = require("http")
requestGotEnd = false
responseGotEnd = false
server = http.createServer((req, res) ->
res.writeHead 200
res.write "a"
req.on "close", ->
console.error "request aborted"
requestGotEnd = true
return
res.on "close", ->
console.error "response aborted"
responseGotEnd = true
return
return
)
server.listen common.PORT
server.on "listening", ->
console.error "make req"
http.get
port: common.PORT
, (res) ->
console.error "got res"
res.on "data", (data) ->
console.error "destroy res"
res.destroy()
server.close()
return
return
return
process.on "exit", ->
assert.ok requestGotEnd
assert.ok responseGotEnd
return
| 31122 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
http = require("http")
requestGotEnd = false
responseGotEnd = false
server = http.createServer((req, res) ->
res.writeHead 200
res.write "a"
req.on "close", ->
console.error "request aborted"
requestGotEnd = true
return
res.on "close", ->
console.error "response aborted"
responseGotEnd = true
return
return
)
server.listen common.PORT
server.on "listening", ->
console.error "make req"
http.get
port: common.PORT
, (res) ->
console.error "got res"
res.on "data", (data) ->
console.error "destroy res"
res.destroy()
server.close()
return
return
return
process.on "exit", ->
assert.ok requestGotEnd
assert.ok responseGotEnd
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
http = require("http")
requestGotEnd = false
responseGotEnd = false
server = http.createServer((req, res) ->
res.writeHead 200
res.write "a"
req.on "close", ->
console.error "request aborted"
requestGotEnd = true
return
res.on "close", ->
console.error "response aborted"
responseGotEnd = true
return
return
)
server.listen common.PORT
server.on "listening", ->
console.error "make req"
http.get
port: common.PORT
, (res) ->
console.error "got res"
res.on "data", (data) ->
console.error "destroy res"
res.destroy()
server.close()
return
return
return
process.on "exit", ->
assert.ok requestGotEnd
assert.ok responseGotEnd
return
|
[
{
"context": "###\nAuthors: Nick Giancola (@patbenatar), Brendan Loudermilk (@bloudermilk)\n",
"end": 26,
"score": 0.9998705983161926,
"start": 13,
"tag": "NAME",
"value": "Nick Giancola"
},
{
"context": "###\nAuthors: Nick Giancola (@patbenatar), Brendan Loudermilk (@bloudermilk)\nH... | vendor/assets/javascripts/jquery.nested_attributes.coffee | teadur/registry | 29 | ###
Authors: Nick Giancola (@patbenatar), Brendan Loudermilk (@bloudermilk)
Homepage: https://github.com/patbenatar/jquery-nested_attributes
###
$ = jQuery
methods =
init: (options) ->
$el = $(@)
throw "Can't initialize more than one item at a time" if $el.length > 1
if $el.data("nestedAttributes")
throw "Can't initialize on this element more than once"
instance = new NestedAttributes($el, options)
$el.data("nestedAttributes", instance)
return $el
add: ->
$el = $(@)
unless $el.data("nestedAttributes")?
throw "You are trying to call instance methods without initializing first"
$el.data("nestedAttributes").addItem()
return $el
$.fn.nestedAttributes = (method) ->
if methods[method]?
return methods[method].apply @, Array.prototype.slice.call(arguments, 1)
else if typeof method == 'object' || !method
return methods.init.apply(@, arguments)
else
$.error("Method #{method} does not exist on jQuery.nestedAttributes")
class NestedAttributes
RELEVANT_INPUTS_SELECTOR: ":input[name][name!=\"\"]"
settings:
collectionName: false # If not provided, we will autodetect
bindAddTo: false # Required
removeOnLoadIf: false
collectIdAttributes: true
beforeAdd: false
afterAdd: false
beforeMove: false
afterMove: false
beforeDestroy: false
afterDestroy: false
destroySelector: '.destroy'
deepClone: true
$clone: null
######################
## ##
## Initialization ##
## ##
######################
constructor: ($el, options) ->
# This plugin gets called on the container
@$container = $el
# Merge default options
@options = $.extend({}, @settings, options)
# If the user provided a jQuery object to bind the "Add"
# bind it now or forever hold your peace.
@options.bindAddTo.click(@addClick) if @options.bindAddTo
# Cache all the items
@$items = @$container.children()
# If the user didn't provide a collectionName, autodetect it
unless @options.collectionName
@autodetectCollectionName()
# Initialize existing items
@$items.each (i, el) =>
$item = $(el)
# If the user wants us to attempt to collect Rail's ID attributes, do it now
# Using the default rails helpers, ID attributes will wind up right after their
# propper containers in the form.
if @options.collectIdAttributes and $item.is('input')
# Move the _id field into its proper container
$item.appendTo($item.prev())
# Remove it from the $items collection
@$items = @$items.not($item)
else
# Try to find and bind the destroy link if the user wanted one
@bindDestroy($item)
# Now that we've collected ID attributes
@hideIfAlreadyDestroyed $(item) for item in @$items
# Remove any items on load if the client implements a check and the check passes
if @options.removeOnLoadIf
@$items.each (i, el) =>
$el = $(el)
if $el.call(true, @options.removeOnLoadIf, i)
$el.remove()
########################
## ##
## Instance Methods ##
## ##
########################
autodetectCollectionName: ->
pattern = /\[(.[^\]]*)_attributes\]/
try
match = pattern.exec(@$items.first().find("#{@RELEVANT_INPUTS_SELECTOR}:first").attr('name'))[1]
if match != null
@options.collectionName = match
else
throw "Regex error"
catch error
console.log "Error detecting collection name", error
addClick: (event) =>
@addItem()
# Don't let the link do anything
event.preventDefault()
addItem: ->
# Piece together an item
newIndex = @$items.length
$newClone = @applyIndexToItem(@extractClone(), newIndex)
# Give the user a chance to make their own changes before we insert
if (@options.beforeAdd)
# Stop the add process if the callback returns false
return false if !@options.beforeAdd.call(undefined, $newClone, newIndex)
# Insert the new item after the last item
@$container.append($newClone)
# Give the user a chance to make their own changes after insertion
@options.afterAdd.call(undefined, $newClone, newIndex) if (@options.afterAdd)
# Add this item to the items list
@refreshItems()
extractClone: ->
# Are we restoring from an already created clone?
if @$restorableClone
$record = @$restorableClone
@$restorableClone = null
else
$record = @options.$clone || @$items.first()
# Make a deep clone (bound events and data)
$record = $record.clone(@options.deepClone)
@bindDestroy($record) if @options.$clone or !@options.deepClone
# Empty out the values of text inputs and selects
$record.find(':text, textarea, select').val('')
# Reset checkboxes and radios
$record.find(':checkbox, :radio').attr("checked", false)
# Empty out any hidden [id] or [_destroy] fields
$record.find('input[name$="\\[id\\]"]').remove()
$record.find('input[name$="\\[_destroy\\]"]').remove()
# Make sure it's not hidden as we return.
# It would be hidden in the case where we're duplicating an
# already removed item for its template.
return $record.show()
applyIndexToItem: ($item, index) ->
collectionName = @options.collectionName
$item.find(@RELEVANT_INPUTS_SELECTOR).each (i, el) =>
$el = $(el)
idRegExp = new RegExp("_#{collectionName}_attributes_\\d+_")
idReplacement = "_#{collectionName}_attributes_#{index}_"
nameRegExp = new RegExp("\\[#{collectionName}_attributes\\]\\[\\d+\\]")
nameReplacement = "[#{collectionName}_attributes][#{index}]"
newID = $el.attr('id').replace(idRegExp, idReplacement) if $el.attr('id')
newName = $el.attr('name').replace(nameRegExp, nameReplacement)
$el.attr
id: newID
name: newName
$item.find('label[for]').each (i, el) =>
$el = $(el)
try
forRegExp = new RegExp("_#{collectionName}_attributes_\\d+_")
forReplacement = "_#{collectionName}_attributes_#{index}_"
newFor = $el.attr('for').replace(forRegExp, forReplacement)
$el.attr('for', newFor)
catch error
console.log "Error updating label", error
return $item
hideIfAlreadyDestroyed: ($item) ->
$destroyField = $item.find("[name$='[_destroy]']")
if $destroyField.length && $destroyField.val() == "true"
@destroy $item
# Hides a item from the user and marks it for deletion in the
# DOM by setting _destroy to true if the record already exists. If it
# is a new escalation, we simple delete the item
destroyClick: (event) =>
event.preventDefault()
@destroy $(event.target).parentsUntil(@$container).last()
destroy: ($item) ->
# If you're about to delete the last one,
# cache a clone of it first so we have something to show
# the next time user hits add
@$restorableClone = @extractClone() unless @$items.length-1
index = @indexForItem($item)
itemIsNew = $item.find('input[name$="\\[id\\]"]').length == 0
if (@options.beforeDestroy)
# Stop the destroy process if the callback returns false
return false if !@options.beforeDestroy.call(undefined, $item, index, itemIsNew)
# Add a blank item row if none are visible after this deletion
@addItem() unless @$items.filter(':visible').length-1
if itemIsNew
$item.remove()
else
# Hide the item
$item.hide()
# Add the _destroy field
otherFieldName = $item.find(':input[name]:first').attr('name')
attributePosition = otherFieldName.lastIndexOf('[')
destroyFieldName = "#{otherFieldName.substring(0, attributePosition)}[_destroy]"
# First look for an existing _destroy field
$destroyField = $item.find("input[name='#{destroyFieldName}']")
# If it doesn't exist, create it
if $destroyField.length == 0
$destroyField = $("<input type=\"hidden\" name=\"#{destroyFieldName}\" />")
$item.append($destroyField)
$destroyField.val(true).change()
@options.afterDestroy.call($item, index, itemIsNew) if (@options.afterDestroy)
# Remove this item from the items list
@refreshItems()
# Rename the remaining items
@resetIndexes()
indexForItem: ($item) ->
regExp = new RegExp("\\[#{@options.collectionName}_attributes\\]\\[\\d+\\]")
name = $item.find("#{@RELEVANT_INPUTS_SELECTOR}:first").attr('name')
return parseInt(name.match(regExp)[0].split('][')[1].slice(0, -1), 10)
refreshItems: ->
@$items = @$container.children()
# Sets the proper association indices and labels to all items
# Used when removing items
resetIndexes: ->
@$items.each (i, el) =>
$el = $(el)
# Make sure this is actually a new position
oldIndex = @indexForItem($el)
return true if (i == oldIndex)
@options.beforeMove.call($el, i, oldIndex) if (@options.beforeMove)
# Change the number to the new index
@applyIndexToItem($el, i)
@options.afterMove.call($el, i, oldIndex) if (@options.afterMove)
bindDestroy: ($item) ->
$item.find(@options.destroySelector).click(@destroyClick) if (@options.destroySelector)
| 181235 | ###
Authors: <NAME> (@patbenatar), <NAME> (@bloudermilk)
Homepage: https://github.com/patbenatar/jquery-nested_attributes
###
$ = jQuery
methods =
init: (options) ->
$el = $(@)
throw "Can't initialize more than one item at a time" if $el.length > 1
if $el.data("nestedAttributes")
throw "Can't initialize on this element more than once"
instance = new NestedAttributes($el, options)
$el.data("nestedAttributes", instance)
return $el
add: ->
$el = $(@)
unless $el.data("nestedAttributes")?
throw "You are trying to call instance methods without initializing first"
$el.data("nestedAttributes").addItem()
return $el
$.fn.nestedAttributes = (method) ->
if methods[method]?
return methods[method].apply @, Array.prototype.slice.call(arguments, 1)
else if typeof method == 'object' || !method
return methods.init.apply(@, arguments)
else
$.error("Method #{method} does not exist on jQuery.nestedAttributes")
class NestedAttributes
RELEVANT_INPUTS_SELECTOR: ":input[name][name!=\"\"]"
settings:
collectionName: false # If not provided, we will autodetect
bindAddTo: false # Required
removeOnLoadIf: false
collectIdAttributes: true
beforeAdd: false
afterAdd: false
beforeMove: false
afterMove: false
beforeDestroy: false
afterDestroy: false
destroySelector: '.destroy'
deepClone: true
$clone: null
######################
## ##
## Initialization ##
## ##
######################
constructor: ($el, options) ->
# This plugin gets called on the container
@$container = $el
# Merge default options
@options = $.extend({}, @settings, options)
# If the user provided a jQuery object to bind the "Add"
# bind it now or forever hold your peace.
@options.bindAddTo.click(@addClick) if @options.bindAddTo
# Cache all the items
@$items = @$container.children()
# If the user didn't provide a collectionName, autodetect it
unless @options.collectionName
@autodetectCollectionName()
# Initialize existing items
@$items.each (i, el) =>
$item = $(el)
# If the user wants us to attempt to collect Rail's ID attributes, do it now
# Using the default rails helpers, ID attributes will wind up right after their
# propper containers in the form.
if @options.collectIdAttributes and $item.is('input')
# Move the _id field into its proper container
$item.appendTo($item.prev())
# Remove it from the $items collection
@$items = @$items.not($item)
else
# Try to find and bind the destroy link if the user wanted one
@bindDestroy($item)
# Now that we've collected ID attributes
@hideIfAlreadyDestroyed $(item) for item in @$items
# Remove any items on load if the client implements a check and the check passes
if @options.removeOnLoadIf
@$items.each (i, el) =>
$el = $(el)
if $el.call(true, @options.removeOnLoadIf, i)
$el.remove()
########################
## ##
## Instance Methods ##
## ##
########################
autodetectCollectionName: ->
pattern = /\[(.[^\]]*)_attributes\]/
try
match = pattern.exec(@$items.first().find("#{@RELEVANT_INPUTS_SELECTOR}:first").attr('name'))[1]
if match != null
@options.collectionName = match
else
throw "Regex error"
catch error
console.log "Error detecting collection name", error
addClick: (event) =>
@addItem()
# Don't let the link do anything
event.preventDefault()
addItem: ->
# Piece together an item
newIndex = @$items.length
$newClone = @applyIndexToItem(@extractClone(), newIndex)
# Give the user a chance to make their own changes before we insert
if (@options.beforeAdd)
# Stop the add process if the callback returns false
return false if !@options.beforeAdd.call(undefined, $newClone, newIndex)
# Insert the new item after the last item
@$container.append($newClone)
# Give the user a chance to make their own changes after insertion
@options.afterAdd.call(undefined, $newClone, newIndex) if (@options.afterAdd)
# Add this item to the items list
@refreshItems()
extractClone: ->
# Are we restoring from an already created clone?
if @$restorableClone
$record = @$restorableClone
@$restorableClone = null
else
$record = @options.$clone || @$items.first()
# Make a deep clone (bound events and data)
$record = $record.clone(@options.deepClone)
@bindDestroy($record) if @options.$clone or !@options.deepClone
# Empty out the values of text inputs and selects
$record.find(':text, textarea, select').val('')
# Reset checkboxes and radios
$record.find(':checkbox, :radio').attr("checked", false)
# Empty out any hidden [id] or [_destroy] fields
$record.find('input[name$="\\[id\\]"]').remove()
$record.find('input[name$="\\[_destroy\\]"]').remove()
# Make sure it's not hidden as we return.
# It would be hidden in the case where we're duplicating an
# already removed item for its template.
return $record.show()
applyIndexToItem: ($item, index) ->
collectionName = @options.collectionName
$item.find(@RELEVANT_INPUTS_SELECTOR).each (i, el) =>
$el = $(el)
idRegExp = new RegExp("_#{collectionName}_attributes_\\d+_")
idReplacement = "_#{collectionName}_attributes_#{index}_"
nameRegExp = new RegExp("\\[#{collectionName}_attributes\\]\\[\\d+\\]")
nameReplacement = "[#{collectionName}_attributes][#{index}]"
newID = $el.attr('id').replace(idRegExp, idReplacement) if $el.attr('id')
newName = $el.attr('name').replace(nameRegExp, nameReplacement)
$el.attr
id: newID
name: newName
$item.find('label[for]').each (i, el) =>
$el = $(el)
try
forRegExp = new RegExp("_#{collectionName}_attributes_\\d+_")
forReplacement = "_#{collectionName}_attributes_#{index}_"
newFor = $el.attr('for').replace(forRegExp, forReplacement)
$el.attr('for', newFor)
catch error
console.log "Error updating label", error
return $item
hideIfAlreadyDestroyed: ($item) ->
$destroyField = $item.find("[name$='[_destroy]']")
if $destroyField.length && $destroyField.val() == "true"
@destroy $item
# Hides a item from the user and marks it for deletion in the
# DOM by setting _destroy to true if the record already exists. If it
# is a new escalation, we simple delete the item
destroyClick: (event) =>
event.preventDefault()
@destroy $(event.target).parentsUntil(@$container).last()
destroy: ($item) ->
# If you're about to delete the last one,
# cache a clone of it first so we have something to show
# the next time user hits add
@$restorableClone = @extractClone() unless @$items.length-1
index = @indexForItem($item)
itemIsNew = $item.find('input[name$="\\[id\\]"]').length == 0
if (@options.beforeDestroy)
# Stop the destroy process if the callback returns false
return false if !@options.beforeDestroy.call(undefined, $item, index, itemIsNew)
# Add a blank item row if none are visible after this deletion
@addItem() unless @$items.filter(':visible').length-1
if itemIsNew
$item.remove()
else
# Hide the item
$item.hide()
# Add the _destroy field
otherFieldName = $item.find(':input[name]:first').attr('name')
attributePosition = otherFieldName.lastIndexOf('[')
destroyFieldName = "#{otherFieldName.substring(0, attributePosition)}[_destroy]"
# First look for an existing _destroy field
$destroyField = $item.find("input[name='#{destroyFieldName}']")
# If it doesn't exist, create it
if $destroyField.length == 0
$destroyField = $("<input type=\"hidden\" name=\"#{destroyFieldName}\" />")
$item.append($destroyField)
$destroyField.val(true).change()
@options.afterDestroy.call($item, index, itemIsNew) if (@options.afterDestroy)
# Remove this item from the items list
@refreshItems()
# Rename the remaining items
@resetIndexes()
indexForItem: ($item) ->
regExp = new RegExp("\\[#{@options.collectionName}_attributes\\]\\[\\d+\\]")
name = $item.find("#{@RELEVANT_INPUTS_SELECTOR}:first").attr('name')
return parseInt(name.match(regExp)[0].split('][')[1].slice(0, -1), 10)
refreshItems: ->
@$items = @$container.children()
# Sets the proper association indices and labels to all items
# Used when removing items
resetIndexes: ->
@$items.each (i, el) =>
$el = $(el)
# Make sure this is actually a new position
oldIndex = @indexForItem($el)
return true if (i == oldIndex)
@options.beforeMove.call($el, i, oldIndex) if (@options.beforeMove)
# Change the number to the new index
@applyIndexToItem($el, i)
@options.afterMove.call($el, i, oldIndex) if (@options.afterMove)
bindDestroy: ($item) ->
$item.find(@options.destroySelector).click(@destroyClick) if (@options.destroySelector)
| true | ###
Authors: PI:NAME:<NAME>END_PI (@patbenatar), PI:NAME:<NAME>END_PI (@bloudermilk)
Homepage: https://github.com/patbenatar/jquery-nested_attributes
###
$ = jQuery
methods =
init: (options) ->
$el = $(@)
throw "Can't initialize more than one item at a time" if $el.length > 1
if $el.data("nestedAttributes")
throw "Can't initialize on this element more than once"
instance = new NestedAttributes($el, options)
$el.data("nestedAttributes", instance)
return $el
add: ->
$el = $(@)
unless $el.data("nestedAttributes")?
throw "You are trying to call instance methods without initializing first"
$el.data("nestedAttributes").addItem()
return $el
$.fn.nestedAttributes = (method) ->
if methods[method]?
return methods[method].apply @, Array.prototype.slice.call(arguments, 1)
else if typeof method == 'object' || !method
return methods.init.apply(@, arguments)
else
$.error("Method #{method} does not exist on jQuery.nestedAttributes")
class NestedAttributes
RELEVANT_INPUTS_SELECTOR: ":input[name][name!=\"\"]"
settings:
collectionName: false # If not provided, we will autodetect
bindAddTo: false # Required
removeOnLoadIf: false
collectIdAttributes: true
beforeAdd: false
afterAdd: false
beforeMove: false
afterMove: false
beforeDestroy: false
afterDestroy: false
destroySelector: '.destroy'
deepClone: true
$clone: null
######################
## ##
## Initialization ##
## ##
######################
constructor: ($el, options) ->
# This plugin gets called on the container
@$container = $el
# Merge default options
@options = $.extend({}, @settings, options)
# If the user provided a jQuery object to bind the "Add"
# bind it now or forever hold your peace.
@options.bindAddTo.click(@addClick) if @options.bindAddTo
# Cache all the items
@$items = @$container.children()
# If the user didn't provide a collectionName, autodetect it
unless @options.collectionName
@autodetectCollectionName()
# Initialize existing items
@$items.each (i, el) =>
$item = $(el)
# If the user wants us to attempt to collect Rail's ID attributes, do it now
# Using the default rails helpers, ID attributes will wind up right after their
# propper containers in the form.
if @options.collectIdAttributes and $item.is('input')
# Move the _id field into its proper container
$item.appendTo($item.prev())
# Remove it from the $items collection
@$items = @$items.not($item)
else
# Try to find and bind the destroy link if the user wanted one
@bindDestroy($item)
# Now that we've collected ID attributes
@hideIfAlreadyDestroyed $(item) for item in @$items
# Remove any items on load if the client implements a check and the check passes
if @options.removeOnLoadIf
@$items.each (i, el) =>
$el = $(el)
if $el.call(true, @options.removeOnLoadIf, i)
$el.remove()
########################
## ##
## Instance Methods ##
## ##
########################
autodetectCollectionName: ->
pattern = /\[(.[^\]]*)_attributes\]/
try
match = pattern.exec(@$items.first().find("#{@RELEVANT_INPUTS_SELECTOR}:first").attr('name'))[1]
if match != null
@options.collectionName = match
else
throw "Regex error"
catch error
console.log "Error detecting collection name", error
addClick: (event) =>
@addItem()
# Don't let the link do anything
event.preventDefault()
addItem: ->
# Piece together an item
newIndex = @$items.length
$newClone = @applyIndexToItem(@extractClone(), newIndex)
# Give the user a chance to make their own changes before we insert
if (@options.beforeAdd)
# Stop the add process if the callback returns false
return false if !@options.beforeAdd.call(undefined, $newClone, newIndex)
# Insert the new item after the last item
@$container.append($newClone)
# Give the user a chance to make their own changes after insertion
@options.afterAdd.call(undefined, $newClone, newIndex) if (@options.afterAdd)
# Add this item to the items list
@refreshItems()
extractClone: ->
# Are we restoring from an already created clone?
if @$restorableClone
$record = @$restorableClone
@$restorableClone = null
else
$record = @options.$clone || @$items.first()
# Make a deep clone (bound events and data)
$record = $record.clone(@options.deepClone)
@bindDestroy($record) if @options.$clone or !@options.deepClone
# Empty out the values of text inputs and selects
$record.find(':text, textarea, select').val('')
# Reset checkboxes and radios
$record.find(':checkbox, :radio').attr("checked", false)
# Empty out any hidden [id] or [_destroy] fields
$record.find('input[name$="\\[id\\]"]').remove()
$record.find('input[name$="\\[_destroy\\]"]').remove()
# Make sure it's not hidden as we return.
# It would be hidden in the case where we're duplicating an
# already removed item for its template.
return $record.show()
applyIndexToItem: ($item, index) ->
collectionName = @options.collectionName
$item.find(@RELEVANT_INPUTS_SELECTOR).each (i, el) =>
$el = $(el)
idRegExp = new RegExp("_#{collectionName}_attributes_\\d+_")
idReplacement = "_#{collectionName}_attributes_#{index}_"
nameRegExp = new RegExp("\\[#{collectionName}_attributes\\]\\[\\d+\\]")
nameReplacement = "[#{collectionName}_attributes][#{index}]"
newID = $el.attr('id').replace(idRegExp, idReplacement) if $el.attr('id')
newName = $el.attr('name').replace(nameRegExp, nameReplacement)
$el.attr
id: newID
name: newName
$item.find('label[for]').each (i, el) =>
$el = $(el)
try
forRegExp = new RegExp("_#{collectionName}_attributes_\\d+_")
forReplacement = "_#{collectionName}_attributes_#{index}_"
newFor = $el.attr('for').replace(forRegExp, forReplacement)
$el.attr('for', newFor)
catch error
console.log "Error updating label", error
return $item
hideIfAlreadyDestroyed: ($item) ->
$destroyField = $item.find("[name$='[_destroy]']")
if $destroyField.length && $destroyField.val() == "true"
@destroy $item
# Hides a item from the user and marks it for deletion in the
# DOM by setting _destroy to true if the record already exists. If it
# is a new escalation, we simple delete the item
destroyClick: (event) =>
event.preventDefault()
@destroy $(event.target).parentsUntil(@$container).last()
destroy: ($item) ->
# If you're about to delete the last one,
# cache a clone of it first so we have something to show
# the next time user hits add
@$restorableClone = @extractClone() unless @$items.length-1
index = @indexForItem($item)
itemIsNew = $item.find('input[name$="\\[id\\]"]').length == 0
if (@options.beforeDestroy)
# Stop the destroy process if the callback returns false
return false if !@options.beforeDestroy.call(undefined, $item, index, itemIsNew)
# Add a blank item row if none are visible after this deletion
@addItem() unless @$items.filter(':visible').length-1
if itemIsNew
$item.remove()
else
# Hide the item
$item.hide()
# Add the _destroy field
otherFieldName = $item.find(':input[name]:first').attr('name')
attributePosition = otherFieldName.lastIndexOf('[')
destroyFieldName = "#{otherFieldName.substring(0, attributePosition)}[_destroy]"
# First look for an existing _destroy field
$destroyField = $item.find("input[name='#{destroyFieldName}']")
# If it doesn't exist, create it
if $destroyField.length == 0
$destroyField = $("<input type=\"hidden\" name=\"#{destroyFieldName}\" />")
$item.append($destroyField)
$destroyField.val(true).change()
@options.afterDestroy.call($item, index, itemIsNew) if (@options.afterDestroy)
# Remove this item from the items list
@refreshItems()
# Rename the remaining items
@resetIndexes()
indexForItem: ($item) ->
regExp = new RegExp("\\[#{@options.collectionName}_attributes\\]\\[\\d+\\]")
name = $item.find("#{@RELEVANT_INPUTS_SELECTOR}:first").attr('name')
return parseInt(name.match(regExp)[0].split('][')[1].slice(0, -1), 10)
refreshItems: ->
@$items = @$container.children()
# Sets the proper association indices and labels to all items
# Used when removing items
resetIndexes: ->
@$items.each (i, el) =>
$el = $(el)
# Make sure this is actually a new position
oldIndex = @indexForItem($el)
return true if (i == oldIndex)
@options.beforeMove.call($el, i, oldIndex) if (@options.beforeMove)
# Change the number to the new index
@applyIndexToItem($el, i)
@options.afterMove.call($el, i, oldIndex) if (@options.afterMove)
bindDestroy: ($item) ->
$item.find(@options.destroySelector).click(@destroyClick) if (@options.destroySelector)
|
[
{
"context": "t: poem.tiger}\n annotation2 = {id: 2, user: poem.tiger}\n annotation3 = {id: 3, tags: ['Tiger']}\n\n",
"end": 4326,
"score": 0.9939451217651367,
"start": 4316,
"tag": "USERNAME",
"value": "poem.tiger"
},
{
"context": "oteSelector\",\n \"exact... | h/static/scripts/test/view-filter-test.coffee | noscripter/h | 2 | {module, inject} = angular.mock
poem =
tiger: 'Tiger! Tiger! burning bright
In the forest of the night
What immortal hand or eye
Could frame thy fearful symmetry?'
raven: 'Once upon a midnight dreary, while I pondered, weak and weary,
Over many a quaint and curious volume of forgotten lore—
While I nodded, nearly napping, suddenly there came a tapping,
As of some one gently rapping, rapping at my chamber door.
“’Tis some visitor,” I muttered, “tapping at my chamber door—
Only this and nothing more.”'
describe 'viewFilter', ->
sandbox = null
fakeUnicode = null
viewFilter = null
before ->
angular.module('h', [])
.service('viewFilter', require('../view-filter'))
beforeEach module('h')
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeUnicode = {
fold: sinon.stub().returnsArg(0)
normalize: sinon.stub().returnsArg(0)
}
$provide.value('unicode', fakeUnicode)
return
beforeEach inject (_viewFilter_) ->
viewFilter = _viewFilter_
afterEach ->
sandbox.restore()
describe 'filter', ->
it 'normalizes the filter terms', ->
filters =
text:
terms: ['Tiger']
operator: 'and'
viewFilter.filter [], filters
assert.calledWith fakeUnicode.fold, 'tiger'
describe 'filter operators', ->
annotations = null
beforeEach ->
annotations = [
{id: 1, text: poem.tiger},
{id: 2, text: poem.raven}
]
it 'all terms must match for "and" operator', ->
filters =
text:
terms: ['Tiger', 'burning', 'bright']
operator: 'and'
result = viewFilter.filter annotations, filters
assert.equal result.length, 1
assert.equal result[0], 1
it 'only one term must match for "or" operator', ->
filters =
text:
terms: ['Tiger', 'quaint']
operator: 'or'
result = viewFilter.filter annotations, filters
assert.equal result.length, 2
describe 'fields', ->
describe 'autofalse', ->
it 'consider auto false function', ->
viewFilter.fields =
test:
autofalse: sandbox.stub().returns(true)
value: (annotation) -> return annotation.test
match: (term, value) -> return value.indexOf(term) > -1
filters =
test:
terms: ['Tiger']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.autofalse
assert.equal result.length, 0
it 'uses the value function to extract data from the annotation', ->
viewFilter.fields =
test:
autofalse: (annotation) -> return false
value: sandbox.stub().returns('test')
match: (term, value) -> return value.indexOf(term) > -1
filters =
test:
terms: ['test']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.value
assert.equal result.length, 1
it 'the match function determines the matching', ->
viewFilter.fields =
test:
autofalse: (annotation) -> return false
value: (annotation) -> return annotation.test
match: sandbox.stub().returns(false)
filters =
test:
terms: ['Tiger']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.match
assert.equal result.length, 0
viewFilter.fields.test.match.returns(true)
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.match
assert.equal result.length, 1
describe 'any field', ->
it 'finds matches across many fields', ->
annotation1 = {id: 1, text: poem.tiger}
annotation2 = {id: 2, user: poem.tiger}
annotation3 = {id: 3, tags: ['Tiger']}
annotations = [annotation1, annotation2, annotation3]
filters =
any:
terms: ['Tiger']
operator: 'and'
result = viewFilter.filter annotations, filters
assert.equal result.length, 3
it 'can find terms across different fields', ->
annotation =
id:1
text: poem.tiger
target: [
selector: [{
"type": "TextQuoteSelector",
"exact": "The Tiger by William Blake",
}]
user: "acct:poe@edgar.com"
tags: ["poem", "Blake", "Tiger"]
]
filters =
any:
terms: ['burning', 'William', 'poem', 'bright']
operator: 'and'
result = viewFilter.filter [annotation], filters
assert.equal result.length, 1
assert.equal result[0], 1
| 104046 | {module, inject} = angular.mock
poem =
tiger: 'Tiger! Tiger! burning bright
In the forest of the night
What immortal hand or eye
Could frame thy fearful symmetry?'
raven: 'Once upon a midnight dreary, while I pondered, weak and weary,
Over many a quaint and curious volume of forgotten lore—
While I nodded, nearly napping, suddenly there came a tapping,
As of some one gently rapping, rapping at my chamber door.
“’Tis some visitor,” I muttered, “tapping at my chamber door—
Only this and nothing more.”'
describe 'viewFilter', ->
sandbox = null
fakeUnicode = null
viewFilter = null
before ->
angular.module('h', [])
.service('viewFilter', require('../view-filter'))
beforeEach module('h')
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeUnicode = {
fold: sinon.stub().returnsArg(0)
normalize: sinon.stub().returnsArg(0)
}
$provide.value('unicode', fakeUnicode)
return
beforeEach inject (_viewFilter_) ->
viewFilter = _viewFilter_
afterEach ->
sandbox.restore()
describe 'filter', ->
it 'normalizes the filter terms', ->
filters =
text:
terms: ['Tiger']
operator: 'and'
viewFilter.filter [], filters
assert.calledWith fakeUnicode.fold, 'tiger'
describe 'filter operators', ->
annotations = null
beforeEach ->
annotations = [
{id: 1, text: poem.tiger},
{id: 2, text: poem.raven}
]
it 'all terms must match for "and" operator', ->
filters =
text:
terms: ['Tiger', 'burning', 'bright']
operator: 'and'
result = viewFilter.filter annotations, filters
assert.equal result.length, 1
assert.equal result[0], 1
it 'only one term must match for "or" operator', ->
filters =
text:
terms: ['Tiger', 'quaint']
operator: 'or'
result = viewFilter.filter annotations, filters
assert.equal result.length, 2
describe 'fields', ->
describe 'autofalse', ->
it 'consider auto false function', ->
viewFilter.fields =
test:
autofalse: sandbox.stub().returns(true)
value: (annotation) -> return annotation.test
match: (term, value) -> return value.indexOf(term) > -1
filters =
test:
terms: ['Tiger']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.autofalse
assert.equal result.length, 0
it 'uses the value function to extract data from the annotation', ->
viewFilter.fields =
test:
autofalse: (annotation) -> return false
value: sandbox.stub().returns('test')
match: (term, value) -> return value.indexOf(term) > -1
filters =
test:
terms: ['test']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.value
assert.equal result.length, 1
it 'the match function determines the matching', ->
viewFilter.fields =
test:
autofalse: (annotation) -> return false
value: (annotation) -> return annotation.test
match: sandbox.stub().returns(false)
filters =
test:
terms: ['Tiger']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.match
assert.equal result.length, 0
viewFilter.fields.test.match.returns(true)
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.match
assert.equal result.length, 1
describe 'any field', ->
it 'finds matches across many fields', ->
annotation1 = {id: 1, text: poem.tiger}
annotation2 = {id: 2, user: poem.tiger}
annotation3 = {id: 3, tags: ['Tiger']}
annotations = [annotation1, annotation2, annotation3]
filters =
any:
terms: ['Tiger']
operator: 'and'
result = viewFilter.filter annotations, filters
assert.equal result.length, 3
it 'can find terms across different fields', ->
annotation =
id:1
text: poem.tiger
target: [
selector: [{
"type": "TextQuoteSelector",
"exact": "The Tiger by <NAME>",
}]
user: "acct:<EMAIL>"
tags: ["poem", "Blake", "Tiger"]
]
filters =
any:
terms: ['burning', '<NAME>', 'poem', 'bright']
operator: 'and'
result = viewFilter.filter [annotation], filters
assert.equal result.length, 1
assert.equal result[0], 1
| true | {module, inject} = angular.mock
poem =
tiger: 'Tiger! Tiger! burning bright
In the forest of the night
What immortal hand or eye
Could frame thy fearful symmetry?'
raven: 'Once upon a midnight dreary, while I pondered, weak and weary,
Over many a quaint and curious volume of forgotten lore—
While I nodded, nearly napping, suddenly there came a tapping,
As of some one gently rapping, rapping at my chamber door.
“’Tis some visitor,” I muttered, “tapping at my chamber door—
Only this and nothing more.”'
describe 'viewFilter', ->
sandbox = null
fakeUnicode = null
viewFilter = null
before ->
angular.module('h', [])
.service('viewFilter', require('../view-filter'))
beforeEach module('h')
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeUnicode = {
fold: sinon.stub().returnsArg(0)
normalize: sinon.stub().returnsArg(0)
}
$provide.value('unicode', fakeUnicode)
return
beforeEach inject (_viewFilter_) ->
viewFilter = _viewFilter_
afterEach ->
sandbox.restore()
describe 'filter', ->
it 'normalizes the filter terms', ->
filters =
text:
terms: ['Tiger']
operator: 'and'
viewFilter.filter [], filters
assert.calledWith fakeUnicode.fold, 'tiger'
describe 'filter operators', ->
annotations = null
beforeEach ->
annotations = [
{id: 1, text: poem.tiger},
{id: 2, text: poem.raven}
]
it 'all terms must match for "and" operator', ->
filters =
text:
terms: ['Tiger', 'burning', 'bright']
operator: 'and'
result = viewFilter.filter annotations, filters
assert.equal result.length, 1
assert.equal result[0], 1
it 'only one term must match for "or" operator', ->
filters =
text:
terms: ['Tiger', 'quaint']
operator: 'or'
result = viewFilter.filter annotations, filters
assert.equal result.length, 2
describe 'fields', ->
describe 'autofalse', ->
it 'consider auto false function', ->
viewFilter.fields =
test:
autofalse: sandbox.stub().returns(true)
value: (annotation) -> return annotation.test
match: (term, value) -> return value.indexOf(term) > -1
filters =
test:
terms: ['Tiger']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.autofalse
assert.equal result.length, 0
it 'uses the value function to extract data from the annotation', ->
viewFilter.fields =
test:
autofalse: (annotation) -> return false
value: sandbox.stub().returns('test')
match: (term, value) -> return value.indexOf(term) > -1
filters =
test:
terms: ['test']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.value
assert.equal result.length, 1
it 'the match function determines the matching', ->
viewFilter.fields =
test:
autofalse: (annotation) -> return false
value: (annotation) -> return annotation.test
match: sandbox.stub().returns(false)
filters =
test:
terms: ['Tiger']
operator: 'and'
annotations = [{id: 1, test: poem.tiger}]
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.match
assert.equal result.length, 0
viewFilter.fields.test.match.returns(true)
result = viewFilter.filter annotations, filters
assert.called viewFilter.fields.test.match
assert.equal result.length, 1
describe 'any field', ->
it 'finds matches across many fields', ->
annotation1 = {id: 1, text: poem.tiger}
annotation2 = {id: 2, user: poem.tiger}
annotation3 = {id: 3, tags: ['Tiger']}
annotations = [annotation1, annotation2, annotation3]
filters =
any:
terms: ['Tiger']
operator: 'and'
result = viewFilter.filter annotations, filters
assert.equal result.length, 3
it 'can find terms across different fields', ->
annotation =
id:1
text: poem.tiger
target: [
selector: [{
"type": "TextQuoteSelector",
"exact": "The Tiger by PI:NAME:<NAME>END_PI",
}]
user: "acct:PI:EMAIL:<EMAIL>END_PI"
tags: ["poem", "Blake", "Tiger"]
]
filters =
any:
terms: ['burning', 'PI:NAME:<NAME>END_PI', 'poem', 'bright']
operator: 'and'
result = viewFilter.filter [annotation], filters
assert.equal result.length, 1
assert.equal result[0], 1
|
[
{
"context": "\n url: '/:title'\n views:\n 'content@courses.show':\n controller: 'ContentCtrl'\n ",
"end": 766,
"score": 0.9971720576286316,
"start": 746,
"tag": "EMAIL",
"value": "content@courses.show"
}
] | client/app/src/routes.coffee | codyseibert/webdevpro | 1 | module.exports = [
'$stateProvider'
'$urlRouterProvider'
'$locationProvider'
(
$stateProvider
$urlRouterProvider
$locationProvider
) ->
$urlRouterProvider.otherwise '/courses'
$locationProvider.html5Mode enabled: true, requireBase: false
$locationProvider.hashPrefix '!'
$stateProvider
.state 'courses',
url: '/courses'
views:
'main':
controller: 'CoursesCtrl'
templateUrl: 'courses/courses.html'
.state 'courses.show',
url: '/:shortName'
views:
'main@':
controller: 'MainCtrl'
templateUrl: 'main/main.html'
.state 'courses.show.page',
url: '/:title'
views:
'content@courses.show':
controller: 'ContentCtrl'
templateUrl: 'content/content.html'
.state 'tos',
url: '/tos'
views:
'main':
controller: 'TOSCtrl'
templateUrl: 'tos/tos.html'
.state 'pp',
url: '/pp'
views:
'main':
controller: 'PPCtrl'
templateUrl: 'pp/pp.html'
return this
]
| 211808 | module.exports = [
'$stateProvider'
'$urlRouterProvider'
'$locationProvider'
(
$stateProvider
$urlRouterProvider
$locationProvider
) ->
$urlRouterProvider.otherwise '/courses'
$locationProvider.html5Mode enabled: true, requireBase: false
$locationProvider.hashPrefix '!'
$stateProvider
.state 'courses',
url: '/courses'
views:
'main':
controller: 'CoursesCtrl'
templateUrl: 'courses/courses.html'
.state 'courses.show',
url: '/:shortName'
views:
'main@':
controller: 'MainCtrl'
templateUrl: 'main/main.html'
.state 'courses.show.page',
url: '/:title'
views:
'<EMAIL>':
controller: 'ContentCtrl'
templateUrl: 'content/content.html'
.state 'tos',
url: '/tos'
views:
'main':
controller: 'TOSCtrl'
templateUrl: 'tos/tos.html'
.state 'pp',
url: '/pp'
views:
'main':
controller: 'PPCtrl'
templateUrl: 'pp/pp.html'
return this
]
| true | module.exports = [
'$stateProvider'
'$urlRouterProvider'
'$locationProvider'
(
$stateProvider
$urlRouterProvider
$locationProvider
) ->
$urlRouterProvider.otherwise '/courses'
$locationProvider.html5Mode enabled: true, requireBase: false
$locationProvider.hashPrefix '!'
$stateProvider
.state 'courses',
url: '/courses'
views:
'main':
controller: 'CoursesCtrl'
templateUrl: 'courses/courses.html'
.state 'courses.show',
url: '/:shortName'
views:
'main@':
controller: 'MainCtrl'
templateUrl: 'main/main.html'
.state 'courses.show.page',
url: '/:title'
views:
'PI:EMAIL:<EMAIL>END_PI':
controller: 'ContentCtrl'
templateUrl: 'content/content.html'
.state 'tos',
url: '/tos'
views:
'main':
controller: 'TOSCtrl'
templateUrl: 'tos/tos.html'
.state 'pp',
url: '/pp'
views:
'main':
controller: 'PPCtrl'
templateUrl: 'pp/pp.html'
return this
]
|
[
{
"context": "# Droplet Treewalker framework.\n#\n# Copyright (c) Anthony Bau (dab1998@gmail.com)\n# MIT License\nhelper = requir",
"end": 61,
"score": 0.9998647570610046,
"start": 50,
"tag": "NAME",
"value": "Anthony Bau"
},
{
"context": "ewalker framework.\n#\n# Copyright (c) Antho... | src/treewalk.coffee | sanyaade-teachings/spresensedroplet | 0 | # Droplet Treewalker framework.
#
# Copyright (c) Anthony Bau (dab1998@gmail.com)
# MIT License
helper = require './helper.coffee'
model = require './model.coffee'
parser = require './parser.coffee'
Graph = require 'node-dijkstra'
EMPTY_OBJECT = {}
exports.createTreewalkParser = (parse, config, root) ->
class TreewalkParser extends parser.Parser
constructor: (@text, @opts = {}) ->
super
@lines = @text.split '\n'
isComment: (text) ->
if config?.isComment?
return config.isComment(text)
else
return false
handleButton: ->
if config.handleButton?
config.handleButton.apply @, arguments
handleAcceptance: ->
if config.handleAcceptance?
config.handleAcceptance.apply @, arguments
else
null
parseComment: (text) ->
return config.parseComment text
preparse: (context = root) -> parse(context, @text)
# Parse text into tree if necessary, then apply markup starting at the root.
markRoot: (context = root, cachedParse = null) ->
if cachedParse?
parseTree = cachedParse
else
parseTree = parse(context, @text)
# console.log("UNMARKED DATA: " + helper.noCycleStringify(parseTree))
# Parse
@mark parseTree, '', 0
guessPrefix: (bounds) ->
if bounds.start.line is bounds.end.line
return ' '
else
line = @lines[bounds.start.line + 1]
return line[0...line.length - line.trimLeft().length]
# Format rule to {type: rule} format; includes calling customized functions in configurations.
applyRule: (node) ->
if node._cachedApplication?
return node._cachedApplication
rule = config.RULES[node.type]
if rule instanceof Function
rule = rule(node, @opts)
if 'string' is typeof rule
rule = {type: rule}
node._cachedApplication = rule
return rule
# Detect the type of a node and return it.
det: (node) ->
if node.type of config.RULES
return @applyRule(node).type
return 'block'
getButtons: (node) ->
if node.type of config.RULES
return @applyRule(node).buttons ? EMPTY_OBJECT
return EMPTY_OBJECT
# Detect type type of a node and return it, but override "blockified" nodes to type "block"
detNode: (node) -> if node.blockified then 'block' else @det(node)
getColor: (node) ->
color = config.COLOR_CALLBACK?(@opts, node)
if color?
color = color
else if node.type of config.COLOR_RULES
color = config.COLOR_RULES[node.type]
else
color = 'comment'
if @opts.categories? and color of @opts.categories
return @opts.categories[color]
else if color of config.COLOR_DEFAULTS
return config.COLOR_DEFAULTS[color]
else
return color
getShape: (node, rules) ->
shape = config.SHAPE_CALLBACK?(@opts, node)
if shape?
return shape
else if node.type of config.SHAPE_RULES
return config.SHAPE_RULES[node.type]
else
return helper.ANY_DROP
getNodeContext: (node, wrap) ->
if wrap?
new parser.PreNodeContext(node.type,
helper.clipLines(@lines, wrap.bounds.start, node.bounds.start).length,
helper.clipLines(@lines, node.bounds.end, wrap.bounds.end).length
)
else
return new parser.PreNodeContext node.type, 0, 0
# Mark-up a node and its children as appropriate.
mark: (node, prefix, depth, pass, rules, context, wrap) ->
unless pass
context = node.parent
while context? and @detNode(context) in ['skip', 'parens']
context = context.parent
rules ?= []
rules = rules.slice 0
unless wrap?
rules.push node.type
# Check to see if this AST type is part of the special empty strings map.
# If so, check to see if it is the special empty string for its type,
# and null it out if it is.
#
# TODO this may be a place where we need to optimize performance.
if context? and @detNode(context) is 'block' and config.EMPTY_STRINGS? and
node.type of config.EMPTY_STRINGS and helper.clipLines(@lines, node.bounds.start, node.bounds.end) is config.EMPTY_STRINGS[node.type]
@addSocket
empty: config.EMPTY_STRINGS[node.type]
bounds: node.bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@flagToRemove node.bounds, depth + 1
# Pass through to child if single-child
else if node.children.length is 1 and @detNode(node) not in ['indent', 'buttonContainer']
@mark node.children[0], prefix, depth, true, rules, context, wrap
else if node.children.length > 0
switch @detNode node
when 'block'
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
if context? and @detNode(context) in ['block', 'buttonContainer']
@addSocket
empty: config.EMPTY_STRINGS?[rules[0]] ? config.empty
bounds: bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@addBlock
bounds: bounds
depth: depth + 1
color: @getColor node
shape: @getShape node
buttons: @getButtons node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
when 'buttonContainer'
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
@addButtonContainer
bounds: bounds
depth: depth + 1
parseContext: rules[0]
buttons: @getButtons(node)
color: @getColor node
shape: @getShape node
parseContext: rules[rules.length - 1]
when 'parens'
# Parens are assumed to wrap the only child that has children
child = null; ok = true
for el, i in node.children
if el.children.length > 0
if child?
ok = false
break
else
child = el
if ok
@mark child, prefix, depth, true, rules, context, wrap ? node
return
else
node.blockified = true
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
if context? and @detNode(context) in ['block', 'buttonContainer']
@addSocket
bounds: bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@addBlock
bounds: bounds
depth: depth + 1
color: @getColor node
buttons: @getButtons node
shape: @getShape node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
when 'indent'
# A lone indent needs to be wrapped in a block.
if @det(context) isnt 'block'
@addBlock
bounds: node.bounds
depth: depth
color: @getColor node
shape: @getShape node
buttons: @getButtons node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
depth += 1
start = origin = node.children[0].bounds.start
for child, i in node.children
if child.children.length > 0
break
else unless helper.clipLines(@lines, origin, child.bounds.end).trim().length is 0 or i is node.children.length - 1
start = child.bounds.end
if @lines[start.line][...start.column].trim().length is 0
start = {
line: start.line - 1
column: @lines[start.line - 1].length
}
end = node.children[node.children.length - 1].bounds.end
for child, i in node.children by -1
if child.children.length > 0
end = child.bounds.end
break
else unless i is 0
end = child.bounds.start
if end.line < @lines.length and @lines[end.line][...end.column].trim().length is 0
end = {
line: end.line - 1
column: @lines[end.line - 1].length
}
bounds = {
start: start
end: end
}
oldPrefix = prefix
prefix = @guessPrefix bounds
@addIndent
bounds: bounds
depth: depth
prefix: prefix[oldPrefix.length...prefix.length]
indentContext: @applyRule(node).indentContext
when 'socket'
if context? and @detNode(context) in ['block', 'buttonContainer']
socketResult = config.SHOULD_SOCKET(@opts, node)
if ((not config.SHOULD_SOCKET?) or socketResult is true)
@addSocket
empty: config.EMPTY_STRINGS?[node.type] ? config.empty
bounds: node.bounds
depth: depth
parseContext: rules[0]
dropdown: config.DROPDOWNS?[rules[0]] ? null
if config.EMPTY_STRINGS? and not @opts.preserveEmpty and
helper.clipLines(@lines, node.bounds.start, node.bounds.end) is (config.EMPTY_STRINGS[node.type] ? config.empty)
@flagToRemove node.bounds, depth + 1
else if socketResult isnt false and socketResult.type is 'locked'
@addLockedSocket
bounds: node.bounds
depth: depth
dropdown: socketResult.dropdown
parseContext: rules[0]
for child in node.children
@mark child, prefix, depth + 2, false
else if context? and @detNode(context) in ['block', 'buttonContainer']
if @det(node) is 'socket'
socketResult = config.SHOULD_SOCKET(@opts, node)
if ((not config.SHOULD_SOCKET?) or socketResult is true)
@addSocket
empty: config.EMPTY_STRINGS?[node.type] ? config.empty
bounds: node.bounds
depth: depth
parseContext: rules[0]
dropdown: config.DROPDOWNS?[rules[0]] ? null
if config.EMPTY_STRINGS? and not @opts.preserveEmpty and
helper.clipLines(@lines, node.bounds.start, node.bounds.end) is (config.EMPTY_STRINGS[node.type] ? config.empty)
@flagToRemove node.bounds, depth + 1
else if socketResult isnt false and socketResult.type is 'locked'
@addLockedSocket
bounds: node.bounds
depth: depth
dropdown: socketResult.dropdown
parseContext: rules[0]
if config.droppabilityGraph?
# DroppabilityGraph contains all the rules for what things can be
# other things (strictly) in the grammar. For instance a * b is a multiplicativeExpression in C,
# but can also play the role of an additiveExpression, or an expression. In this case,
# the graph would contain edges pointing from expression to additiveExpression, and additiveExpression to multiplicativeExpression.
#
# This allows us to do a graph search to determine whether A can, transitively, be dropped in B.
droppabilityGraph = new Graph(config.droppabilityGraph)
# parenGraph is DroppabilityGraph with paren rules added.
#
# Paren edges point from things like primaryExpression -> expression, indicating that you can
# from a primaryExpression from an expression by adding parentheses. In this case, it would be adding '(' and ')'.
#
# When we do paren wrapping, we walk through the graph and add any necessary parentheses to get from our AST context
# to the desintation.
parenGraph = new Graph(config.parenGraph)
TreewalkParser.drop = (block, context, pred) ->
if block.parseContext is '__comment__' and context.type in ['indent', 'document']
return helper.ENCOURAGE
else if context.parseContext is '__comment__'
return helper.DISCOURAGE
parseContext = context.indentContext ? context.parseContext
if helper.dfs(parenGraph, parseContext, block.nodeContext.type)
return helper.ENCOURAGE
else
return helper.FORBID
TreewalkParser.parens = (leading, trailing, node, context) ->
# Comments never get paren-wrapped
if context is null or node.parseContext is '__comment__' or context.parseContext is '__comment__'
return node.parseContext
parseContext = context.indentContext ? context.parseContext
# Check to see if we can unwrap all our parentheses
if helper.dfs(droppabilityGraph, parseContext, node.nodeContext.type)
leading node.nodeContext.prefix
trailing node.nodeContext.suffix
return node.nodeContext.type
# Otherwise, for performance reasons,
# check to see if we can drop without modifying our parentheses
if node.parseContext isnt node.nodeContext and helper.dfs(droppabilityGraph, parseContext, node.parseContext)
return node.parseContext
# Otherwise, do a full paren-wrap traversal. We find the shortest rule-inheritance path
# from the bottom-most type of the block to the top-most type of the socket, applying
# any paren rules we encounter along the way.
else
path = parenGraph.shortestPath(parseContext, node.nodeContext.type, {reverse: true})
leading node.nodeContext.prefix
trailing node.nodeContext.suffix
for element, i in path when i > 0
if config.PAREN_RULES[path[i]]?[path[i - 1]]?
config.PAREN_RULES[path[i]][path[i - 1]](leading, trailing, node, context)
node.parseContext = path[i]
return node.parseContext
TreewalkParser.getParenCandidates = (context) ->
result = []
for dest, sources of config.PAREN_RULES
if helper.dfs(parenGraph, context, dest)
for source of sources when source not in result
result.push source
return result
else if config.drop?
TreewalkParser.drop = config.drop
TreewalkParser.parens = config.parens ? ->
TreewalkParser.stringFixer = config.stringFixer
TreewalkParser.rootContext = config.rootContext
TreewalkParser.getDefaultSelectionRange = config.getDefaultSelectionRange
TreewalkParser.empty = config.empty
TreewalkParser.lockedSocketCallback = config.lockedSocketCallback
return TreewalkParser
| 202545 | # Droplet Treewalker framework.
#
# Copyright (c) <NAME> (<EMAIL>)
# MIT License
helper = require './helper.coffee'
model = require './model.coffee'
parser = require './parser.coffee'
Graph = require 'node-dijkstra'
EMPTY_OBJECT = {}
exports.createTreewalkParser = (parse, config, root) ->
class TreewalkParser extends parser.Parser
constructor: (@text, @opts = {}) ->
super
@lines = @text.split '\n'
isComment: (text) ->
if config?.isComment?
return config.isComment(text)
else
return false
handleButton: ->
if config.handleButton?
config.handleButton.apply @, arguments
handleAcceptance: ->
if config.handleAcceptance?
config.handleAcceptance.apply @, arguments
else
null
parseComment: (text) ->
return config.parseComment text
preparse: (context = root) -> parse(context, @text)
# Parse text into tree if necessary, then apply markup starting at the root.
markRoot: (context = root, cachedParse = null) ->
if cachedParse?
parseTree = cachedParse
else
parseTree = parse(context, @text)
# console.log("UNMARKED DATA: " + helper.noCycleStringify(parseTree))
# Parse
@mark parseTree, '', 0
guessPrefix: (bounds) ->
if bounds.start.line is bounds.end.line
return ' '
else
line = @lines[bounds.start.line + 1]
return line[0...line.length - line.trimLeft().length]
# Format rule to {type: rule} format; includes calling customized functions in configurations.
applyRule: (node) ->
if node._cachedApplication?
return node._cachedApplication
rule = config.RULES[node.type]
if rule instanceof Function
rule = rule(node, @opts)
if 'string' is typeof rule
rule = {type: rule}
node._cachedApplication = rule
return rule
# Detect the type of a node and return it.
det: (node) ->
if node.type of config.RULES
return @applyRule(node).type
return 'block'
getButtons: (node) ->
if node.type of config.RULES
return @applyRule(node).buttons ? EMPTY_OBJECT
return EMPTY_OBJECT
# Detect type type of a node and return it, but override "blockified" nodes to type "block"
detNode: (node) -> if node.blockified then 'block' else @det(node)
getColor: (node) ->
color = config.COLOR_CALLBACK?(@opts, node)
if color?
color = color
else if node.type of config.COLOR_RULES
color = config.COLOR_RULES[node.type]
else
color = 'comment'
if @opts.categories? and color of @opts.categories
return @opts.categories[color]
else if color of config.COLOR_DEFAULTS
return config.COLOR_DEFAULTS[color]
else
return color
getShape: (node, rules) ->
shape = config.SHAPE_CALLBACK?(@opts, node)
if shape?
return shape
else if node.type of config.SHAPE_RULES
return config.SHAPE_RULES[node.type]
else
return helper.ANY_DROP
getNodeContext: (node, wrap) ->
if wrap?
new parser.PreNodeContext(node.type,
helper.clipLines(@lines, wrap.bounds.start, node.bounds.start).length,
helper.clipLines(@lines, node.bounds.end, wrap.bounds.end).length
)
else
return new parser.PreNodeContext node.type, 0, 0
# Mark-up a node and its children as appropriate.
mark: (node, prefix, depth, pass, rules, context, wrap) ->
unless pass
context = node.parent
while context? and @detNode(context) in ['skip', 'parens']
context = context.parent
rules ?= []
rules = rules.slice 0
unless wrap?
rules.push node.type
# Check to see if this AST type is part of the special empty strings map.
# If so, check to see if it is the special empty string for its type,
# and null it out if it is.
#
# TODO this may be a place where we need to optimize performance.
if context? and @detNode(context) is 'block' and config.EMPTY_STRINGS? and
node.type of config.EMPTY_STRINGS and helper.clipLines(@lines, node.bounds.start, node.bounds.end) is config.EMPTY_STRINGS[node.type]
@addSocket
empty: config.EMPTY_STRINGS[node.type]
bounds: node.bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@flagToRemove node.bounds, depth + 1
# Pass through to child if single-child
else if node.children.length is 1 and @detNode(node) not in ['indent', 'buttonContainer']
@mark node.children[0], prefix, depth, true, rules, context, wrap
else if node.children.length > 0
switch @detNode node
when 'block'
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
if context? and @detNode(context) in ['block', 'buttonContainer']
@addSocket
empty: config.EMPTY_STRINGS?[rules[0]] ? config.empty
bounds: bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@addBlock
bounds: bounds
depth: depth + 1
color: @getColor node
shape: @getShape node
buttons: @getButtons node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
when 'buttonContainer'
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
@addButtonContainer
bounds: bounds
depth: depth + 1
parseContext: rules[0]
buttons: @getButtons(node)
color: @getColor node
shape: @getShape node
parseContext: rules[rules.length - 1]
when 'parens'
# Parens are assumed to wrap the only child that has children
child = null; ok = true
for el, i in node.children
if el.children.length > 0
if child?
ok = false
break
else
child = el
if ok
@mark child, prefix, depth, true, rules, context, wrap ? node
return
else
node.blockified = true
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
if context? and @detNode(context) in ['block', 'buttonContainer']
@addSocket
bounds: bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@addBlock
bounds: bounds
depth: depth + 1
color: @getColor node
buttons: @getButtons node
shape: @getShape node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
when 'indent'
# A lone indent needs to be wrapped in a block.
if @det(context) isnt 'block'
@addBlock
bounds: node.bounds
depth: depth
color: @getColor node
shape: @getShape node
buttons: @getButtons node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
depth += 1
start = origin = node.children[0].bounds.start
for child, i in node.children
if child.children.length > 0
break
else unless helper.clipLines(@lines, origin, child.bounds.end).trim().length is 0 or i is node.children.length - 1
start = child.bounds.end
if @lines[start.line][...start.column].trim().length is 0
start = {
line: start.line - 1
column: @lines[start.line - 1].length
}
end = node.children[node.children.length - 1].bounds.end
for child, i in node.children by -1
if child.children.length > 0
end = child.bounds.end
break
else unless i is 0
end = child.bounds.start
if end.line < @lines.length and @lines[end.line][...end.column].trim().length is 0
end = {
line: end.line - 1
column: @lines[end.line - 1].length
}
bounds = {
start: start
end: end
}
oldPrefix = prefix
prefix = @guessPrefix bounds
@addIndent
bounds: bounds
depth: depth
prefix: prefix[oldPrefix.length...prefix.length]
indentContext: @applyRule(node).indentContext
when 'socket'
if context? and @detNode(context) in ['block', 'buttonContainer']
socketResult = config.SHOULD_SOCKET(@opts, node)
if ((not config.SHOULD_SOCKET?) or socketResult is true)
@addSocket
empty: config.EMPTY_STRINGS?[node.type] ? config.empty
bounds: node.bounds
depth: depth
parseContext: rules[0]
dropdown: config.DROPDOWNS?[rules[0]] ? null
if config.EMPTY_STRINGS? and not @opts.preserveEmpty and
helper.clipLines(@lines, node.bounds.start, node.bounds.end) is (config.EMPTY_STRINGS[node.type] ? config.empty)
@flagToRemove node.bounds, depth + 1
else if socketResult isnt false and socketResult.type is 'locked'
@addLockedSocket
bounds: node.bounds
depth: depth
dropdown: socketResult.dropdown
parseContext: rules[0]
for child in node.children
@mark child, prefix, depth + 2, false
else if context? and @detNode(context) in ['block', 'buttonContainer']
if @det(node) is 'socket'
socketResult = config.SHOULD_SOCKET(@opts, node)
if ((not config.SHOULD_SOCKET?) or socketResult is true)
@addSocket
empty: config.EMPTY_STRINGS?[node.type] ? config.empty
bounds: node.bounds
depth: depth
parseContext: rules[0]
dropdown: config.DROPDOWNS?[rules[0]] ? null
if config.EMPTY_STRINGS? and not @opts.preserveEmpty and
helper.clipLines(@lines, node.bounds.start, node.bounds.end) is (config.EMPTY_STRINGS[node.type] ? config.empty)
@flagToRemove node.bounds, depth + 1
else if socketResult isnt false and socketResult.type is 'locked'
@addLockedSocket
bounds: node.bounds
depth: depth
dropdown: socketResult.dropdown
parseContext: rules[0]
if config.droppabilityGraph?
# DroppabilityGraph contains all the rules for what things can be
# other things (strictly) in the grammar. For instance a * b is a multiplicativeExpression in C,
# but can also play the role of an additiveExpression, or an expression. In this case,
# the graph would contain edges pointing from expression to additiveExpression, and additiveExpression to multiplicativeExpression.
#
# This allows us to do a graph search to determine whether A can, transitively, be dropped in B.
droppabilityGraph = new Graph(config.droppabilityGraph)
# parenGraph is DroppabilityGraph with paren rules added.
#
# Paren edges point from things like primaryExpression -> expression, indicating that you can
# from a primaryExpression from an expression by adding parentheses. In this case, it would be adding '(' and ')'.
#
# When we do paren wrapping, we walk through the graph and add any necessary parentheses to get from our AST context
# to the desintation.
parenGraph = new Graph(config.parenGraph)
TreewalkParser.drop = (block, context, pred) ->
if block.parseContext is '__comment__' and context.type in ['indent', 'document']
return helper.ENCOURAGE
else if context.parseContext is '__comment__'
return helper.DISCOURAGE
parseContext = context.indentContext ? context.parseContext
if helper.dfs(parenGraph, parseContext, block.nodeContext.type)
return helper.ENCOURAGE
else
return helper.FORBID
TreewalkParser.parens = (leading, trailing, node, context) ->
# Comments never get paren-wrapped
if context is null or node.parseContext is '__comment__' or context.parseContext is '__comment__'
return node.parseContext
parseContext = context.indentContext ? context.parseContext
# Check to see if we can unwrap all our parentheses
if helper.dfs(droppabilityGraph, parseContext, node.nodeContext.type)
leading node.nodeContext.prefix
trailing node.nodeContext.suffix
return node.nodeContext.type
# Otherwise, for performance reasons,
# check to see if we can drop without modifying our parentheses
if node.parseContext isnt node.nodeContext and helper.dfs(droppabilityGraph, parseContext, node.parseContext)
return node.parseContext
# Otherwise, do a full paren-wrap traversal. We find the shortest rule-inheritance path
# from the bottom-most type of the block to the top-most type of the socket, applying
# any paren rules we encounter along the way.
else
path = parenGraph.shortestPath(parseContext, node.nodeContext.type, {reverse: true})
leading node.nodeContext.prefix
trailing node.nodeContext.suffix
for element, i in path when i > 0
if config.PAREN_RULES[path[i]]?[path[i - 1]]?
config.PAREN_RULES[path[i]][path[i - 1]](leading, trailing, node, context)
node.parseContext = path[i]
return node.parseContext
TreewalkParser.getParenCandidates = (context) ->
result = []
for dest, sources of config.PAREN_RULES
if helper.dfs(parenGraph, context, dest)
for source of sources when source not in result
result.push source
return result
else if config.drop?
TreewalkParser.drop = config.drop
TreewalkParser.parens = config.parens ? ->
TreewalkParser.stringFixer = config.stringFixer
TreewalkParser.rootContext = config.rootContext
TreewalkParser.getDefaultSelectionRange = config.getDefaultSelectionRange
TreewalkParser.empty = config.empty
TreewalkParser.lockedSocketCallback = config.lockedSocketCallback
return TreewalkParser
| true | # Droplet Treewalker framework.
#
# Copyright (c) PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
# MIT License
helper = require './helper.coffee'
model = require './model.coffee'
parser = require './parser.coffee'
Graph = require 'node-dijkstra'
EMPTY_OBJECT = {}
exports.createTreewalkParser = (parse, config, root) ->
class TreewalkParser extends parser.Parser
constructor: (@text, @opts = {}) ->
super
@lines = @text.split '\n'
isComment: (text) ->
if config?.isComment?
return config.isComment(text)
else
return false
handleButton: ->
if config.handleButton?
config.handleButton.apply @, arguments
handleAcceptance: ->
if config.handleAcceptance?
config.handleAcceptance.apply @, arguments
else
null
parseComment: (text) ->
return config.parseComment text
preparse: (context = root) -> parse(context, @text)
# Parse text into tree if necessary, then apply markup starting at the root.
markRoot: (context = root, cachedParse = null) ->
if cachedParse?
parseTree = cachedParse
else
parseTree = parse(context, @text)
# console.log("UNMARKED DATA: " + helper.noCycleStringify(parseTree))
# Parse
@mark parseTree, '', 0
guessPrefix: (bounds) ->
if bounds.start.line is bounds.end.line
return ' '
else
line = @lines[bounds.start.line + 1]
return line[0...line.length - line.trimLeft().length]
# Format rule to {type: rule} format; includes calling customized functions in configurations.
applyRule: (node) ->
if node._cachedApplication?
return node._cachedApplication
rule = config.RULES[node.type]
if rule instanceof Function
rule = rule(node, @opts)
if 'string' is typeof rule
rule = {type: rule}
node._cachedApplication = rule
return rule
# Detect the type of a node and return it.
det: (node) ->
if node.type of config.RULES
return @applyRule(node).type
return 'block'
getButtons: (node) ->
if node.type of config.RULES
return @applyRule(node).buttons ? EMPTY_OBJECT
return EMPTY_OBJECT
# Detect type type of a node and return it, but override "blockified" nodes to type "block"
detNode: (node) -> if node.blockified then 'block' else @det(node)
getColor: (node) ->
color = config.COLOR_CALLBACK?(@opts, node)
if color?
color = color
else if node.type of config.COLOR_RULES
color = config.COLOR_RULES[node.type]
else
color = 'comment'
if @opts.categories? and color of @opts.categories
return @opts.categories[color]
else if color of config.COLOR_DEFAULTS
return config.COLOR_DEFAULTS[color]
else
return color
getShape: (node, rules) ->
shape = config.SHAPE_CALLBACK?(@opts, node)
if shape?
return shape
else if node.type of config.SHAPE_RULES
return config.SHAPE_RULES[node.type]
else
return helper.ANY_DROP
getNodeContext: (node, wrap) ->
if wrap?
new parser.PreNodeContext(node.type,
helper.clipLines(@lines, wrap.bounds.start, node.bounds.start).length,
helper.clipLines(@lines, node.bounds.end, wrap.bounds.end).length
)
else
return new parser.PreNodeContext node.type, 0, 0
# Mark-up a node and its children as appropriate.
mark: (node, prefix, depth, pass, rules, context, wrap) ->
unless pass
context = node.parent
while context? and @detNode(context) in ['skip', 'parens']
context = context.parent
rules ?= []
rules = rules.slice 0
unless wrap?
rules.push node.type
# Check to see if this AST type is part of the special empty strings map.
# If so, check to see if it is the special empty string for its type,
# and null it out if it is.
#
# TODO this may be a place where we need to optimize performance.
if context? and @detNode(context) is 'block' and config.EMPTY_STRINGS? and
node.type of config.EMPTY_STRINGS and helper.clipLines(@lines, node.bounds.start, node.bounds.end) is config.EMPTY_STRINGS[node.type]
@addSocket
empty: config.EMPTY_STRINGS[node.type]
bounds: node.bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@flagToRemove node.bounds, depth + 1
# Pass through to child if single-child
else if node.children.length is 1 and @detNode(node) not in ['indent', 'buttonContainer']
@mark node.children[0], prefix, depth, true, rules, context, wrap
else if node.children.length > 0
switch @detNode node
when 'block'
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
if context? and @detNode(context) in ['block', 'buttonContainer']
@addSocket
empty: config.EMPTY_STRINGS?[rules[0]] ? config.empty
bounds: bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@addBlock
bounds: bounds
depth: depth + 1
color: @getColor node
shape: @getShape node
buttons: @getButtons node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
when 'buttonContainer'
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
@addButtonContainer
bounds: bounds
depth: depth + 1
parseContext: rules[0]
buttons: @getButtons(node)
color: @getColor node
shape: @getShape node
parseContext: rules[rules.length - 1]
when 'parens'
# Parens are assumed to wrap the only child that has children
child = null; ok = true
for el, i in node.children
if el.children.length > 0
if child?
ok = false
break
else
child = el
if ok
@mark child, prefix, depth, true, rules, context, wrap ? node
return
else
node.blockified = true
if wrap?
bounds = wrap.bounds
else
bounds = node.bounds
if context? and @detNode(context) in ['block', 'buttonContainer']
@addSocket
bounds: bounds
depth: depth
dropdown: config.DROPDOWNS?[rules[0]] ? null
parseContext: rules[0]
@addBlock
bounds: bounds
depth: depth + 1
color: @getColor node
buttons: @getButtons node
shape: @getShape node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
when 'indent'
# A lone indent needs to be wrapped in a block.
if @det(context) isnt 'block'
@addBlock
bounds: node.bounds
depth: depth
color: @getColor node
shape: @getShape node
buttons: @getButtons node
nodeContext: @getNodeContext node, wrap
parseContext: rules[rules.length - 1]
depth += 1
start = origin = node.children[0].bounds.start
for child, i in node.children
if child.children.length > 0
break
else unless helper.clipLines(@lines, origin, child.bounds.end).trim().length is 0 or i is node.children.length - 1
start = child.bounds.end
if @lines[start.line][...start.column].trim().length is 0
start = {
line: start.line - 1
column: @lines[start.line - 1].length
}
end = node.children[node.children.length - 1].bounds.end
for child, i in node.children by -1
if child.children.length > 0
end = child.bounds.end
break
else unless i is 0
end = child.bounds.start
if end.line < @lines.length and @lines[end.line][...end.column].trim().length is 0
end = {
line: end.line - 1
column: @lines[end.line - 1].length
}
bounds = {
start: start
end: end
}
oldPrefix = prefix
prefix = @guessPrefix bounds
@addIndent
bounds: bounds
depth: depth
prefix: prefix[oldPrefix.length...prefix.length]
indentContext: @applyRule(node).indentContext
when 'socket'
if context? and @detNode(context) in ['block', 'buttonContainer']
socketResult = config.SHOULD_SOCKET(@opts, node)
if ((not config.SHOULD_SOCKET?) or socketResult is true)
@addSocket
empty: config.EMPTY_STRINGS?[node.type] ? config.empty
bounds: node.bounds
depth: depth
parseContext: rules[0]
dropdown: config.DROPDOWNS?[rules[0]] ? null
if config.EMPTY_STRINGS? and not @opts.preserveEmpty and
helper.clipLines(@lines, node.bounds.start, node.bounds.end) is (config.EMPTY_STRINGS[node.type] ? config.empty)
@flagToRemove node.bounds, depth + 1
else if socketResult isnt false and socketResult.type is 'locked'
@addLockedSocket
bounds: node.bounds
depth: depth
dropdown: socketResult.dropdown
parseContext: rules[0]
for child in node.children
@mark child, prefix, depth + 2, false
else if context? and @detNode(context) in ['block', 'buttonContainer']
if @det(node) is 'socket'
socketResult = config.SHOULD_SOCKET(@opts, node)
if ((not config.SHOULD_SOCKET?) or socketResult is true)
@addSocket
empty: config.EMPTY_STRINGS?[node.type] ? config.empty
bounds: node.bounds
depth: depth
parseContext: rules[0]
dropdown: config.DROPDOWNS?[rules[0]] ? null
if config.EMPTY_STRINGS? and not @opts.preserveEmpty and
helper.clipLines(@lines, node.bounds.start, node.bounds.end) is (config.EMPTY_STRINGS[node.type] ? config.empty)
@flagToRemove node.bounds, depth + 1
else if socketResult isnt false and socketResult.type is 'locked'
@addLockedSocket
bounds: node.bounds
depth: depth
dropdown: socketResult.dropdown
parseContext: rules[0]
if config.droppabilityGraph?
# DroppabilityGraph contains all the rules for what things can be
# other things (strictly) in the grammar. For instance a * b is a multiplicativeExpression in C,
# but can also play the role of an additiveExpression, or an expression. In this case,
# the graph would contain edges pointing from expression to additiveExpression, and additiveExpression to multiplicativeExpression.
#
# This allows us to do a graph search to determine whether A can, transitively, be dropped in B.
droppabilityGraph = new Graph(config.droppabilityGraph)
# parenGraph is DroppabilityGraph with paren rules added.
#
# Paren edges point from things like primaryExpression -> expression, indicating that you can
# from a primaryExpression from an expression by adding parentheses. In this case, it would be adding '(' and ')'.
#
# When we do paren wrapping, we walk through the graph and add any necessary parentheses to get from our AST context
# to the desintation.
parenGraph = new Graph(config.parenGraph)
TreewalkParser.drop = (block, context, pred) ->
if block.parseContext is '__comment__' and context.type in ['indent', 'document']
return helper.ENCOURAGE
else if context.parseContext is '__comment__'
return helper.DISCOURAGE
parseContext = context.indentContext ? context.parseContext
if helper.dfs(parenGraph, parseContext, block.nodeContext.type)
return helper.ENCOURAGE
else
return helper.FORBID
TreewalkParser.parens = (leading, trailing, node, context) ->
# Comments never get paren-wrapped
if context is null or node.parseContext is '__comment__' or context.parseContext is '__comment__'
return node.parseContext
parseContext = context.indentContext ? context.parseContext
# Check to see if we can unwrap all our parentheses
if helper.dfs(droppabilityGraph, parseContext, node.nodeContext.type)
leading node.nodeContext.prefix
trailing node.nodeContext.suffix
return node.nodeContext.type
# Otherwise, for performance reasons,
# check to see if we can drop without modifying our parentheses
if node.parseContext isnt node.nodeContext and helper.dfs(droppabilityGraph, parseContext, node.parseContext)
return node.parseContext
# Otherwise, do a full paren-wrap traversal. We find the shortest rule-inheritance path
# from the bottom-most type of the block to the top-most type of the socket, applying
# any paren rules we encounter along the way.
else
path = parenGraph.shortestPath(parseContext, node.nodeContext.type, {reverse: true})
leading node.nodeContext.prefix
trailing node.nodeContext.suffix
for element, i in path when i > 0
if config.PAREN_RULES[path[i]]?[path[i - 1]]?
config.PAREN_RULES[path[i]][path[i - 1]](leading, trailing, node, context)
node.parseContext = path[i]
return node.parseContext
TreewalkParser.getParenCandidates = (context) ->
result = []
for dest, sources of config.PAREN_RULES
if helper.dfs(parenGraph, context, dest)
for source of sources when source not in result
result.push source
return result
else if config.drop?
TreewalkParser.drop = config.drop
TreewalkParser.parens = config.parens ? ->
TreewalkParser.stringFixer = config.stringFixer
TreewalkParser.rootContext = config.rootContext
TreewalkParser.getDefaultSelectionRange = config.getDefaultSelectionRange
TreewalkParser.empty = config.empty
TreewalkParser.lockedSocketCallback = config.lockedSocketCallback
return TreewalkParser
|
[
{
"context": "addData( globe_data, {format: 'magnitude', name: \"hello\"} )\n\t\t\t\t\t\t@globe.createPoints()\n\t\t\t\t\t\t@globe.anim",
"end": 12772,
"score": 0.9904717206954956,
"start": 12767,
"tag": "NAME",
"value": "hello"
}
] | bin/core/plugins/Sidebar/media/Sidebar.coffee | mkg20001/Fuzium | 0 | class Sidebar extends Class
constructor: ->
@tag = null
@container = null
@opened = false
@width = 410
@fixbutton = $(".fixbutton")
@fixbutton_addx = 0
@fixbutton_initx = 0
@fixbutton_targetx = 0
@page_width = $(window).width()
@frame = $("#webview")
@initFixbutton()
@dragStarted = 0
@globe = null
@preload_html = null
@original_set_site_info = wrapper.setSiteInfo # We going to override this, save the original
# Start in opened state for debugging
if false
@startDrag()
@moved()
@fixbutton_targetx = @fixbutton_initx - @width
@stopDrag()
initFixbutton: ->
###
@fixbutton.on "mousedown touchstart", (e) =>
if not @opened
@logStart("Preloading")
wrapper.ws.cmd "sidebarGetHtmlTag", {}, (res) =>
@logEnd("Preloading")
@preload_html = res
###
# Detect dragging
@fixbutton.on "mousedown touchstart", (e) =>
if e.button > 0 # Right or middle click
return
e.preventDefault()
# Disable previous listeners
@fixbutton.off "click touchstop touchcancel"
@fixbutton.off "mousemove touchmove"
# Make sure its not a click
@dragStarted = (+ new Date)
@fixbutton.one "mousemove touchmove", (e) =>
mousex = e.pageX
if not mousex
mousex = e.originalEvent.touches[0].pageX
@fixbutton_addx = @fixbutton.offset().left-mousex
@startDrag()
@fixbutton.parent().on "click touchstop touchcancel", (e) =>
@stopDrag()
@resized()
$(window).on "resize", @resized
resized: =>
@page_width = $(window).width()
@fixbutton_initx = @page_width - 75 # Initial x position
if @opened
@fixbutton.css
left: @fixbutton_initx - @width
else
@fixbutton.css
left: @fixbutton_initx
# Start dragging the fixbutton
startDrag: ->
@log "startDrag"
@fixbutton_targetx = @fixbutton_initx # Fallback x position
@fixbutton.addClass("dragging")
# Fullscreen drag bg to capture mouse events over iframe
$("<div class='drag-bg'></div>").appendTo(document.body)
# IE position wrap fix
if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0
@fixbutton.css("pointer-events", "none")
# Don't go to homepage
@fixbutton.one "click", (e) =>
@stopDrag()
@fixbutton.removeClass("dragging")
if Math.abs(@fixbutton.offset().left - @fixbutton_initx) > 5
# If moved more than some pixel the button then don't go to homepage
e.preventDefault()
# Animate drag
@fixbutton.parents().on "mousemove touchmove", @animDrag
@fixbutton.parents().on "mousemove touchmove" ,@waitMove
# Stop dragging listener
@fixbutton.parents().on "mouseup touchstop touchend touchcancel", (e) =>
e.preventDefault()
@stopDrag()
# Wait for moving the fixbutton
waitMove: (e) =>
if Math.abs(@fixbutton.offset().left - @fixbutton_targetx) > 10 and (+ new Date)-@dragStarted > 100
@moved()
@fixbutton.parents().off "mousemove touchmove" ,@waitMove
moved: ->
@log "Moved"
@createHtmltag()
$(document.body).css("perspective", "1000px").addClass("body-sidebar")
$(window).off "resize"
$(window).on "resize", =>
$(document.body).css "height", $(window).height()
@scrollable()
@resized()
$(window).trigger "resize"
# Override setsiteinfo to catch changes
wrapper.setSiteInfo = (site_info) =>
@setSiteInfo(site_info)
@original_set_site_info.apply(wrapper, arguments)
# Preload world.jpg
img = new Image();
img.src = "/uimedia/globe/world.jpg";
setSiteInfo: (site_info) ->
RateLimit 1500, =>
@updateHtmlTag()
RateLimit 30000, =>
@displayGlobe()
# Create the sidebar html tag
createHtmltag: ->
@when_loaded = $.Deferred()
if not @container
@container = $("""
<div class="sidebar-container"><div class="sidebar scrollable"><div class="content-wrapper"><div class="content">
</div></div></div></div>
""")
@container.appendTo(document.body)
@tag = @container.find(".sidebar")
@updateHtmlTag()
@scrollable = window.initScrollable()
updateHtmlTag: ->
if @preload_html
@setHtmlTag(@preload_html)
@preload_html = null
else
wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag
setHtmlTag: (res) =>
if @tag.find(".content").children().length == 0 # First update
@log "Creating content"
@container.addClass("loaded")
morphdom(@tag.find(".content")[0], '<div class="content">'+res+'</div>')
# @scrollable()
@when_loaded.resolve()
else # Not first update, patch the html to keep unchanged dom elements
@log "Patching content"
morphdom @tag.find(".content")[0], '<div class="content">'+res+'</div>', {
onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state
if from_el.className == "globe" or from_el.className.indexOf("noupdate") >= 0
return false
else
return true
}
animDrag: (e) =>
mousex = e.pageX
if not mousex
mousex = e.originalEvent.touches[0].pageX
overdrag = @fixbutton_initx-@width-mousex
if overdrag > 0 # Overdragged
overdrag_percent = 1+overdrag/300
mousex = (mousex + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent)
targetx = @fixbutton_initx-mousex-@fixbutton_addx
@fixbutton[0].style.left = (mousex+@fixbutton_addx)+"px"
if @tag
@tag[0].style.transform = "translateX(#{0-targetx}px)"
# Check if opened
if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9)
@fixbutton_targetx = @fixbutton_initx - @width # Make it opened
else
@fixbutton_targetx = @fixbutton_initx
# Stop dragging the fixbutton
stopDrag: ->
@fixbutton.parents().off "mousemove touchmove"
@fixbutton.off "mousemove touchmove"
@fixbutton.css("pointer-events", "")
$(".drag-bg").remove()
if not @fixbutton.hasClass("dragging")
return
@fixbutton.removeClass("dragging")
# Move back to initial position
if @fixbutton_targetx != @fixbutton.offset().left
# Animate fixbutton
@fixbutton.stop().animate {"left": @fixbutton_targetx}, 500, "easeOutBack", =>
# Switch back to auto align
if @fixbutton_targetx == @fixbutton_initx # Closed
@fixbutton.css("left", "auto")
else # Opened
@fixbutton.css("left", @fixbutton_targetx)
$(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status
# Animate sidebar and iframe
if @fixbutton_targetx == @fixbutton_initx
# Closed
targetx = 0
@opened = false
else
# Opened
targetx = @width
if not @opened
@when_loaded.done =>
@onOpened()
@opened = true
# Revent sidebar transitions
if @tag
@tag.css("transition", "0.4s ease-out")
@tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, =>
@tag.css("transition", "")
if not @opened
@container.remove()
@container = null
@tag.remove()
@tag = null
# Revert body transformations
@log "stopdrag", "opened:", @opened
if not @opened
@onClosed()
onOpened: ->
@log "Opened"
@scrollable()
# Re-calculate height when site admin opened or closed
@tag.find("#checkbox-owned").off("click").on "click", =>
setTimeout (=>
@scrollable()
), 300
# Site limit button
@tag.find("#button-sitelimit").off("click").on "click", =>
wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), =>
wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000
@updateHtmlTag()
return false
# Database reload
@tag.find("#button-dbreload").off("click").on "click", =>
wrapper.ws.cmd "dbReload", [], =>
wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded!", 5000
@updateHtmlTag()
return false
# Database rebuild
@tag.find("#button-dbrebuild").off("click").on "click", =>
wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...."
wrapper.ws.cmd "dbRebuild", [], =>
wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000
@updateHtmlTag()
return false
# Update site
@tag.find("#button-update").off("click").on "click", =>
@tag.find("#button-update").addClass("loading")
wrapper.ws.cmd "siteUpdate", wrapper.site_info.address, =>
wrapper.notifications.add "done-updated", "done", "Site updated!", 5000
@tag.find("#button-update").removeClass("loading")
return false
# Pause site
@tag.find("#button-pause").off("click").on "click", =>
@tag.find("#button-pause").addClass("hidden")
wrapper.ws.cmd "sitePause", wrapper.site_info.address
return false
# Resume site
@tag.find("#button-resume").off("click").on "click", =>
@tag.find("#button-resume").addClass("hidden")
wrapper.ws.cmd "siteResume", wrapper.site_info.address
return false
# Delete site
@tag.find("#button-delete").off("click").on "click", =>
wrapper.displayConfirm "Are you sure?", "Delete this site", =>
@tag.find("#button-delete").addClass("loading")
wrapper.ws.cmd "siteDelete", wrapper.site_info.address, ->
document.location = $(".fixbutton-bg").attr("href")
return false
# Owned checkbox
@tag.find("#checkbox-owned").off("click").on "click", =>
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
# Owned checkbox
@tag.find("#checkbox-autodownloadoptional").off("click").on "click", =>
wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")]
# Change identity button
@tag.find("#button-identity").off("click").on "click", =>
wrapper.ws.cmd "certSelect"
return false
# Owned checkbox
@tag.find("#checkbox-owned").off("click").on "click", =>
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
# Save settings
@tag.find("#button-settings").off("click").on "click", =>
wrapper.ws.cmd "fileGet", "content.json", (res) =>
data = JSON.parse(res)
data["title"] = $("#settings-title").val()
data["description"] = $("#settings-description").val()
json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t')))
wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw), true], (res) =>
if res != "ok" # fileWrite failed
wrapper.notifications.add "file-write", "error", "File write error: #{res}"
else
wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000
@updateHtmlTag()
return false
# Sign content.json
@tag.find("#button-sign").off("click").on "click", =>
inner_path = @tag.find("#input-contents").val()
if wrapper.site_info.privatekey
# Privatekey stored in users.json
wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: inner_path, update_changed_files: true}, (res) =>
wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000
else
# Ask the user for privatekey
wrapper.displayPrompt "Enter your private key:", "password", "Sign", (privatekey) => # Prompt the private key
wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) =>
if res == "ok"
wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000
return false
# Publish content.json
@tag.find("#button-publish").off("click").on "click", =>
inner_path = @tag.find("#input-contents").val()
@tag.find("#button-publish").addClass "loading"
wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}, =>
@tag.find("#button-publish").removeClass "loading"
@loadGlobe()
onClosed: ->
$(window).off "resize"
$(window).on "resize", @resized
$(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) =>
if e.target == document.body
$(document.body).css("height", "auto").css("perspective", "").css("transition", "").off transitionEnd
@unloadGlobe()
# We dont need site info anymore
wrapper.setSiteInfo = @original_set_site_info
loadGlobe: =>
console.log "loadGlobe", @tag.find(".globe").hasClass("loading")
if @tag.find(".globe").hasClass("loading")
setTimeout (=>
if typeof(DAT) == "undefined" # Globe script not loaded, do it first
$.getScript("/uimedia/globe/all.js", @displayGlobe)
else
@displayGlobe()
), 600
displayGlobe: =>
img = new Image();
img.src = "/uimedia/globe/world.jpg";
img.onload = =>
wrapper.ws.cmd "sidebarGetPeers", [], (globe_data) =>
if @globe
@globe.scene.remove(@globe.points)
@globe.addData( globe_data, {format: 'magnitude', name: "hello", animated: false} )
@globe.createPoints()
else if typeof(DAT) != "undefined"
try
@globe = new DAT.Globe( @tag.find(".globe")[0], {"imgDir": "/uimedia/globe/"} )
@globe.addData( globe_data, {format: 'magnitude', name: "hello"} )
@globe.createPoints()
@globe.animate()
catch e
console.log "WebGL error", e
@tag?.find(".globe").addClass("error").text("WebGL not supported")
@tag?.find(".globe").removeClass("loading")
unloadGlobe: =>
if not @globe
return false
@globe.unload()
@globe = null
setTimeout ( ->
window.sidebar = new Sidebar()
), 500
window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend'
| 15068 | class Sidebar extends Class
constructor: ->
@tag = null
@container = null
@opened = false
@width = 410
@fixbutton = $(".fixbutton")
@fixbutton_addx = 0
@fixbutton_initx = 0
@fixbutton_targetx = 0
@page_width = $(window).width()
@frame = $("#webview")
@initFixbutton()
@dragStarted = 0
@globe = null
@preload_html = null
@original_set_site_info = wrapper.setSiteInfo # We going to override this, save the original
# Start in opened state for debugging
if false
@startDrag()
@moved()
@fixbutton_targetx = @fixbutton_initx - @width
@stopDrag()
initFixbutton: ->
###
@fixbutton.on "mousedown touchstart", (e) =>
if not @opened
@logStart("Preloading")
wrapper.ws.cmd "sidebarGetHtmlTag", {}, (res) =>
@logEnd("Preloading")
@preload_html = res
###
# Detect dragging
@fixbutton.on "mousedown touchstart", (e) =>
if e.button > 0 # Right or middle click
return
e.preventDefault()
# Disable previous listeners
@fixbutton.off "click touchstop touchcancel"
@fixbutton.off "mousemove touchmove"
# Make sure its not a click
@dragStarted = (+ new Date)
@fixbutton.one "mousemove touchmove", (e) =>
mousex = e.pageX
if not mousex
mousex = e.originalEvent.touches[0].pageX
@fixbutton_addx = @fixbutton.offset().left-mousex
@startDrag()
@fixbutton.parent().on "click touchstop touchcancel", (e) =>
@stopDrag()
@resized()
$(window).on "resize", @resized
resized: =>
@page_width = $(window).width()
@fixbutton_initx = @page_width - 75 # Initial x position
if @opened
@fixbutton.css
left: @fixbutton_initx - @width
else
@fixbutton.css
left: @fixbutton_initx
# Start dragging the fixbutton
startDrag: ->
@log "startDrag"
@fixbutton_targetx = @fixbutton_initx # Fallback x position
@fixbutton.addClass("dragging")
# Fullscreen drag bg to capture mouse events over iframe
$("<div class='drag-bg'></div>").appendTo(document.body)
# IE position wrap fix
if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0
@fixbutton.css("pointer-events", "none")
# Don't go to homepage
@fixbutton.one "click", (e) =>
@stopDrag()
@fixbutton.removeClass("dragging")
if Math.abs(@fixbutton.offset().left - @fixbutton_initx) > 5
# If moved more than some pixel the button then don't go to homepage
e.preventDefault()
# Animate drag
@fixbutton.parents().on "mousemove touchmove", @animDrag
@fixbutton.parents().on "mousemove touchmove" ,@waitMove
# Stop dragging listener
@fixbutton.parents().on "mouseup touchstop touchend touchcancel", (e) =>
e.preventDefault()
@stopDrag()
# Wait for moving the fixbutton
waitMove: (e) =>
if Math.abs(@fixbutton.offset().left - @fixbutton_targetx) > 10 and (+ new Date)-@dragStarted > 100
@moved()
@fixbutton.parents().off "mousemove touchmove" ,@waitMove
moved: ->
@log "Moved"
@createHtmltag()
$(document.body).css("perspective", "1000px").addClass("body-sidebar")
$(window).off "resize"
$(window).on "resize", =>
$(document.body).css "height", $(window).height()
@scrollable()
@resized()
$(window).trigger "resize"
# Override setsiteinfo to catch changes
wrapper.setSiteInfo = (site_info) =>
@setSiteInfo(site_info)
@original_set_site_info.apply(wrapper, arguments)
# Preload world.jpg
img = new Image();
img.src = "/uimedia/globe/world.jpg";
setSiteInfo: (site_info) ->
RateLimit 1500, =>
@updateHtmlTag()
RateLimit 30000, =>
@displayGlobe()
# Create the sidebar html tag
createHtmltag: ->
@when_loaded = $.Deferred()
if not @container
@container = $("""
<div class="sidebar-container"><div class="sidebar scrollable"><div class="content-wrapper"><div class="content">
</div></div></div></div>
""")
@container.appendTo(document.body)
@tag = @container.find(".sidebar")
@updateHtmlTag()
@scrollable = window.initScrollable()
updateHtmlTag: ->
if @preload_html
@setHtmlTag(@preload_html)
@preload_html = null
else
wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag
setHtmlTag: (res) =>
if @tag.find(".content").children().length == 0 # First update
@log "Creating content"
@container.addClass("loaded")
morphdom(@tag.find(".content")[0], '<div class="content">'+res+'</div>')
# @scrollable()
@when_loaded.resolve()
else # Not first update, patch the html to keep unchanged dom elements
@log "Patching content"
morphdom @tag.find(".content")[0], '<div class="content">'+res+'</div>', {
onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state
if from_el.className == "globe" or from_el.className.indexOf("noupdate") >= 0
return false
else
return true
}
animDrag: (e) =>
mousex = e.pageX
if not mousex
mousex = e.originalEvent.touches[0].pageX
overdrag = @fixbutton_initx-@width-mousex
if overdrag > 0 # Overdragged
overdrag_percent = 1+overdrag/300
mousex = (mousex + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent)
targetx = @fixbutton_initx-mousex-@fixbutton_addx
@fixbutton[0].style.left = (mousex+@fixbutton_addx)+"px"
if @tag
@tag[0].style.transform = "translateX(#{0-targetx}px)"
# Check if opened
if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9)
@fixbutton_targetx = @fixbutton_initx - @width # Make it opened
else
@fixbutton_targetx = @fixbutton_initx
# Stop dragging the fixbutton
stopDrag: ->
@fixbutton.parents().off "mousemove touchmove"
@fixbutton.off "mousemove touchmove"
@fixbutton.css("pointer-events", "")
$(".drag-bg").remove()
if not @fixbutton.hasClass("dragging")
return
@fixbutton.removeClass("dragging")
# Move back to initial position
if @fixbutton_targetx != @fixbutton.offset().left
# Animate fixbutton
@fixbutton.stop().animate {"left": @fixbutton_targetx}, 500, "easeOutBack", =>
# Switch back to auto align
if @fixbutton_targetx == @fixbutton_initx # Closed
@fixbutton.css("left", "auto")
else # Opened
@fixbutton.css("left", @fixbutton_targetx)
$(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status
# Animate sidebar and iframe
if @fixbutton_targetx == @fixbutton_initx
# Closed
targetx = 0
@opened = false
else
# Opened
targetx = @width
if not @opened
@when_loaded.done =>
@onOpened()
@opened = true
# Revent sidebar transitions
if @tag
@tag.css("transition", "0.4s ease-out")
@tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, =>
@tag.css("transition", "")
if not @opened
@container.remove()
@container = null
@tag.remove()
@tag = null
# Revert body transformations
@log "stopdrag", "opened:", @opened
if not @opened
@onClosed()
onOpened: ->
@log "Opened"
@scrollable()
# Re-calculate height when site admin opened or closed
@tag.find("#checkbox-owned").off("click").on "click", =>
setTimeout (=>
@scrollable()
), 300
# Site limit button
@tag.find("#button-sitelimit").off("click").on "click", =>
wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), =>
wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000
@updateHtmlTag()
return false
# Database reload
@tag.find("#button-dbreload").off("click").on "click", =>
wrapper.ws.cmd "dbReload", [], =>
wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded!", 5000
@updateHtmlTag()
return false
# Database rebuild
@tag.find("#button-dbrebuild").off("click").on "click", =>
wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...."
wrapper.ws.cmd "dbRebuild", [], =>
wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000
@updateHtmlTag()
return false
# Update site
@tag.find("#button-update").off("click").on "click", =>
@tag.find("#button-update").addClass("loading")
wrapper.ws.cmd "siteUpdate", wrapper.site_info.address, =>
wrapper.notifications.add "done-updated", "done", "Site updated!", 5000
@tag.find("#button-update").removeClass("loading")
return false
# Pause site
@tag.find("#button-pause").off("click").on "click", =>
@tag.find("#button-pause").addClass("hidden")
wrapper.ws.cmd "sitePause", wrapper.site_info.address
return false
# Resume site
@tag.find("#button-resume").off("click").on "click", =>
@tag.find("#button-resume").addClass("hidden")
wrapper.ws.cmd "siteResume", wrapper.site_info.address
return false
# Delete site
@tag.find("#button-delete").off("click").on "click", =>
wrapper.displayConfirm "Are you sure?", "Delete this site", =>
@tag.find("#button-delete").addClass("loading")
wrapper.ws.cmd "siteDelete", wrapper.site_info.address, ->
document.location = $(".fixbutton-bg").attr("href")
return false
# Owned checkbox
@tag.find("#checkbox-owned").off("click").on "click", =>
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
# Owned checkbox
@tag.find("#checkbox-autodownloadoptional").off("click").on "click", =>
wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")]
# Change identity button
@tag.find("#button-identity").off("click").on "click", =>
wrapper.ws.cmd "certSelect"
return false
# Owned checkbox
@tag.find("#checkbox-owned").off("click").on "click", =>
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
# Save settings
@tag.find("#button-settings").off("click").on "click", =>
wrapper.ws.cmd "fileGet", "content.json", (res) =>
data = JSON.parse(res)
data["title"] = $("#settings-title").val()
data["description"] = $("#settings-description").val()
json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t')))
wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw), true], (res) =>
if res != "ok" # fileWrite failed
wrapper.notifications.add "file-write", "error", "File write error: #{res}"
else
wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000
@updateHtmlTag()
return false
# Sign content.json
@tag.find("#button-sign").off("click").on "click", =>
inner_path = @tag.find("#input-contents").val()
if wrapper.site_info.privatekey
# Privatekey stored in users.json
wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: inner_path, update_changed_files: true}, (res) =>
wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000
else
# Ask the user for privatekey
wrapper.displayPrompt "Enter your private key:", "password", "Sign", (privatekey) => # Prompt the private key
wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) =>
if res == "ok"
wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000
return false
# Publish content.json
@tag.find("#button-publish").off("click").on "click", =>
inner_path = @tag.find("#input-contents").val()
@tag.find("#button-publish").addClass "loading"
wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}, =>
@tag.find("#button-publish").removeClass "loading"
@loadGlobe()
onClosed: ->
$(window).off "resize"
$(window).on "resize", @resized
$(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) =>
if e.target == document.body
$(document.body).css("height", "auto").css("perspective", "").css("transition", "").off transitionEnd
@unloadGlobe()
# We dont need site info anymore
wrapper.setSiteInfo = @original_set_site_info
loadGlobe: =>
console.log "loadGlobe", @tag.find(".globe").hasClass("loading")
if @tag.find(".globe").hasClass("loading")
setTimeout (=>
if typeof(DAT) == "undefined" # Globe script not loaded, do it first
$.getScript("/uimedia/globe/all.js", @displayGlobe)
else
@displayGlobe()
), 600
displayGlobe: =>
img = new Image();
img.src = "/uimedia/globe/world.jpg";
img.onload = =>
wrapper.ws.cmd "sidebarGetPeers", [], (globe_data) =>
if @globe
@globe.scene.remove(@globe.points)
@globe.addData( globe_data, {format: 'magnitude', name: "hello", animated: false} )
@globe.createPoints()
else if typeof(DAT) != "undefined"
try
@globe = new DAT.Globe( @tag.find(".globe")[0], {"imgDir": "/uimedia/globe/"} )
@globe.addData( globe_data, {format: 'magnitude', name: "<NAME>"} )
@globe.createPoints()
@globe.animate()
catch e
console.log "WebGL error", e
@tag?.find(".globe").addClass("error").text("WebGL not supported")
@tag?.find(".globe").removeClass("loading")
unloadGlobe: =>
if not @globe
return false
@globe.unload()
@globe = null
setTimeout ( ->
window.sidebar = new Sidebar()
), 500
window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend'
| true | class Sidebar extends Class
constructor: ->
@tag = null
@container = null
@opened = false
@width = 410
@fixbutton = $(".fixbutton")
@fixbutton_addx = 0
@fixbutton_initx = 0
@fixbutton_targetx = 0
@page_width = $(window).width()
@frame = $("#webview")
@initFixbutton()
@dragStarted = 0
@globe = null
@preload_html = null
@original_set_site_info = wrapper.setSiteInfo # We going to override this, save the original
# Start in opened state for debugging
if false
@startDrag()
@moved()
@fixbutton_targetx = @fixbutton_initx - @width
@stopDrag()
initFixbutton: ->
###
@fixbutton.on "mousedown touchstart", (e) =>
if not @opened
@logStart("Preloading")
wrapper.ws.cmd "sidebarGetHtmlTag", {}, (res) =>
@logEnd("Preloading")
@preload_html = res
###
# Detect dragging
@fixbutton.on "mousedown touchstart", (e) =>
if e.button > 0 # Right or middle click
return
e.preventDefault()
# Disable previous listeners
@fixbutton.off "click touchstop touchcancel"
@fixbutton.off "mousemove touchmove"
# Make sure its not a click
@dragStarted = (+ new Date)
@fixbutton.one "mousemove touchmove", (e) =>
mousex = e.pageX
if not mousex
mousex = e.originalEvent.touches[0].pageX
@fixbutton_addx = @fixbutton.offset().left-mousex
@startDrag()
@fixbutton.parent().on "click touchstop touchcancel", (e) =>
@stopDrag()
@resized()
$(window).on "resize", @resized
resized: =>
@page_width = $(window).width()
@fixbutton_initx = @page_width - 75 # Initial x position
if @opened
@fixbutton.css
left: @fixbutton_initx - @width
else
@fixbutton.css
left: @fixbutton_initx
# Start dragging the fixbutton
startDrag: ->
@log "startDrag"
@fixbutton_targetx = @fixbutton_initx # Fallback x position
@fixbutton.addClass("dragging")
# Fullscreen drag bg to capture mouse events over iframe
$("<div class='drag-bg'></div>").appendTo(document.body)
# IE position wrap fix
if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0
@fixbutton.css("pointer-events", "none")
# Don't go to homepage
@fixbutton.one "click", (e) =>
@stopDrag()
@fixbutton.removeClass("dragging")
if Math.abs(@fixbutton.offset().left - @fixbutton_initx) > 5
# If moved more than some pixel the button then don't go to homepage
e.preventDefault()
# Animate drag
@fixbutton.parents().on "mousemove touchmove", @animDrag
@fixbutton.parents().on "mousemove touchmove" ,@waitMove
# Stop dragging listener
@fixbutton.parents().on "mouseup touchstop touchend touchcancel", (e) =>
e.preventDefault()
@stopDrag()
# Wait for moving the fixbutton
waitMove: (e) =>
if Math.abs(@fixbutton.offset().left - @fixbutton_targetx) > 10 and (+ new Date)-@dragStarted > 100
@moved()
@fixbutton.parents().off "mousemove touchmove" ,@waitMove
moved: ->
@log "Moved"
@createHtmltag()
$(document.body).css("perspective", "1000px").addClass("body-sidebar")
$(window).off "resize"
$(window).on "resize", =>
$(document.body).css "height", $(window).height()
@scrollable()
@resized()
$(window).trigger "resize"
# Override setsiteinfo to catch changes
wrapper.setSiteInfo = (site_info) =>
@setSiteInfo(site_info)
@original_set_site_info.apply(wrapper, arguments)
# Preload world.jpg
img = new Image();
img.src = "/uimedia/globe/world.jpg";
setSiteInfo: (site_info) ->
RateLimit 1500, =>
@updateHtmlTag()
RateLimit 30000, =>
@displayGlobe()
# Create the sidebar html tag
createHtmltag: ->
@when_loaded = $.Deferred()
if not @container
@container = $("""
<div class="sidebar-container"><div class="sidebar scrollable"><div class="content-wrapper"><div class="content">
</div></div></div></div>
""")
@container.appendTo(document.body)
@tag = @container.find(".sidebar")
@updateHtmlTag()
@scrollable = window.initScrollable()
updateHtmlTag: ->
if @preload_html
@setHtmlTag(@preload_html)
@preload_html = null
else
wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag
setHtmlTag: (res) =>
if @tag.find(".content").children().length == 0 # First update
@log "Creating content"
@container.addClass("loaded")
morphdom(@tag.find(".content")[0], '<div class="content">'+res+'</div>')
# @scrollable()
@when_loaded.resolve()
else # Not first update, patch the html to keep unchanged dom elements
@log "Patching content"
morphdom @tag.find(".content")[0], '<div class="content">'+res+'</div>', {
onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state
if from_el.className == "globe" or from_el.className.indexOf("noupdate") >= 0
return false
else
return true
}
animDrag: (e) =>
mousex = e.pageX
if not mousex
mousex = e.originalEvent.touches[0].pageX
overdrag = @fixbutton_initx-@width-mousex
if overdrag > 0 # Overdragged
overdrag_percent = 1+overdrag/300
mousex = (mousex + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent)
targetx = @fixbutton_initx-mousex-@fixbutton_addx
@fixbutton[0].style.left = (mousex+@fixbutton_addx)+"px"
if @tag
@tag[0].style.transform = "translateX(#{0-targetx}px)"
# Check if opened
if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9)
@fixbutton_targetx = @fixbutton_initx - @width # Make it opened
else
@fixbutton_targetx = @fixbutton_initx
# Stop dragging the fixbutton
stopDrag: ->
@fixbutton.parents().off "mousemove touchmove"
@fixbutton.off "mousemove touchmove"
@fixbutton.css("pointer-events", "")
$(".drag-bg").remove()
if not @fixbutton.hasClass("dragging")
return
@fixbutton.removeClass("dragging")
# Move back to initial position
if @fixbutton_targetx != @fixbutton.offset().left
# Animate fixbutton
@fixbutton.stop().animate {"left": @fixbutton_targetx}, 500, "easeOutBack", =>
# Switch back to auto align
if @fixbutton_targetx == @fixbutton_initx # Closed
@fixbutton.css("left", "auto")
else # Opened
@fixbutton.css("left", @fixbutton_targetx)
$(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status
# Animate sidebar and iframe
if @fixbutton_targetx == @fixbutton_initx
# Closed
targetx = 0
@opened = false
else
# Opened
targetx = @width
if not @opened
@when_loaded.done =>
@onOpened()
@opened = true
# Revent sidebar transitions
if @tag
@tag.css("transition", "0.4s ease-out")
@tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, =>
@tag.css("transition", "")
if not @opened
@container.remove()
@container = null
@tag.remove()
@tag = null
# Revert body transformations
@log "stopdrag", "opened:", @opened
if not @opened
@onClosed()
onOpened: ->
@log "Opened"
@scrollable()
# Re-calculate height when site admin opened or closed
@tag.find("#checkbox-owned").off("click").on "click", =>
setTimeout (=>
@scrollable()
), 300
# Site limit button
@tag.find("#button-sitelimit").off("click").on "click", =>
wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), =>
wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000
@updateHtmlTag()
return false
# Database reload
@tag.find("#button-dbreload").off("click").on "click", =>
wrapper.ws.cmd "dbReload", [], =>
wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded!", 5000
@updateHtmlTag()
return false
# Database rebuild
@tag.find("#button-dbrebuild").off("click").on "click", =>
wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...."
wrapper.ws.cmd "dbRebuild", [], =>
wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000
@updateHtmlTag()
return false
# Update site
@tag.find("#button-update").off("click").on "click", =>
@tag.find("#button-update").addClass("loading")
wrapper.ws.cmd "siteUpdate", wrapper.site_info.address, =>
wrapper.notifications.add "done-updated", "done", "Site updated!", 5000
@tag.find("#button-update").removeClass("loading")
return false
# Pause site
@tag.find("#button-pause").off("click").on "click", =>
@tag.find("#button-pause").addClass("hidden")
wrapper.ws.cmd "sitePause", wrapper.site_info.address
return false
# Resume site
@tag.find("#button-resume").off("click").on "click", =>
@tag.find("#button-resume").addClass("hidden")
wrapper.ws.cmd "siteResume", wrapper.site_info.address
return false
# Delete site
@tag.find("#button-delete").off("click").on "click", =>
wrapper.displayConfirm "Are you sure?", "Delete this site", =>
@tag.find("#button-delete").addClass("loading")
wrapper.ws.cmd "siteDelete", wrapper.site_info.address, ->
document.location = $(".fixbutton-bg").attr("href")
return false
# Owned checkbox
@tag.find("#checkbox-owned").off("click").on "click", =>
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
# Owned checkbox
@tag.find("#checkbox-autodownloadoptional").off("click").on "click", =>
wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")]
# Change identity button
@tag.find("#button-identity").off("click").on "click", =>
wrapper.ws.cmd "certSelect"
return false
# Owned checkbox
@tag.find("#checkbox-owned").off("click").on "click", =>
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
# Save settings
@tag.find("#button-settings").off("click").on "click", =>
wrapper.ws.cmd "fileGet", "content.json", (res) =>
data = JSON.parse(res)
data["title"] = $("#settings-title").val()
data["description"] = $("#settings-description").val()
json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t')))
wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw), true], (res) =>
if res != "ok" # fileWrite failed
wrapper.notifications.add "file-write", "error", "File write error: #{res}"
else
wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000
@updateHtmlTag()
return false
# Sign content.json
@tag.find("#button-sign").off("click").on "click", =>
inner_path = @tag.find("#input-contents").val()
if wrapper.site_info.privatekey
# Privatekey stored in users.json
wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: inner_path, update_changed_files: true}, (res) =>
wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000
else
# Ask the user for privatekey
wrapper.displayPrompt "Enter your private key:", "password", "Sign", (privatekey) => # Prompt the private key
wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) =>
if res == "ok"
wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000
return false
# Publish content.json
@tag.find("#button-publish").off("click").on "click", =>
inner_path = @tag.find("#input-contents").val()
@tag.find("#button-publish").addClass "loading"
wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}, =>
@tag.find("#button-publish").removeClass "loading"
@loadGlobe()
onClosed: ->
$(window).off "resize"
$(window).on "resize", @resized
$(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) =>
if e.target == document.body
$(document.body).css("height", "auto").css("perspective", "").css("transition", "").off transitionEnd
@unloadGlobe()
# We dont need site info anymore
wrapper.setSiteInfo = @original_set_site_info
loadGlobe: =>
console.log "loadGlobe", @tag.find(".globe").hasClass("loading")
if @tag.find(".globe").hasClass("loading")
setTimeout (=>
if typeof(DAT) == "undefined" # Globe script not loaded, do it first
$.getScript("/uimedia/globe/all.js", @displayGlobe)
else
@displayGlobe()
), 600
displayGlobe: =>
img = new Image();
img.src = "/uimedia/globe/world.jpg";
img.onload = =>
wrapper.ws.cmd "sidebarGetPeers", [], (globe_data) =>
if @globe
@globe.scene.remove(@globe.points)
@globe.addData( globe_data, {format: 'magnitude', name: "hello", animated: false} )
@globe.createPoints()
else if typeof(DAT) != "undefined"
try
@globe = new DAT.Globe( @tag.find(".globe")[0], {"imgDir": "/uimedia/globe/"} )
@globe.addData( globe_data, {format: 'magnitude', name: "PI:NAME:<NAME>END_PI"} )
@globe.createPoints()
@globe.animate()
catch e
console.log "WebGL error", e
@tag?.find(".globe").addClass("error").text("WebGL not supported")
@tag?.find(".globe").removeClass("loading")
unloadGlobe: =>
if not @globe
return false
@globe.unload()
@globe = null
setTimeout ( ->
window.sidebar = new Sidebar()
), 500
window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend'
|
[
{
"context": "\"title\": \"Artusi\"\n\n\"api\":\n \"baseUrl\": \"http://diegopinna.com/co",
"end": 16,
"score": 0.9982131719589233,
"start": 10,
"tag": "NAME",
"value": "Artusi"
}
] | config/config.cson | robertDpi/mangiabene | 0 | "title": "Artusi"
"api":
"baseUrl": "http://diegopinna.com/cooking/wp-json"
"timeout": 10000
"maxAttempt": 3
"translation":
"displayed" : ["en", "fr"]
"prefered": "en"
# BOOKMARK PAGE
"bookmark":
"cache":
#increased this from 10
"capacity": 20
# CACHE
# Todo: tweak the values for a better offline experience
"cache":
"img":
"localCacheFolder": "imgcache"
"useDataURI": false
"chromeQuota": 10485760
"usePersistentCache": true
"cacheClearSize": 0
"headers": {}
"skipURIencoding": false
"data":
"capacity": 100
"maxAge": 10800000
"deleteOnExpire": "aggressive"
"recycleFreq": 1000
"cacheFlushInterval": null
"storageMode": "localStorage"
"cordova":
"admob":
"enabled":false
"android":
"bannerID": null
"interstitialID": null
"ios":
"bannerID": null
"interstitialID": null
"pushNotifications":
"enabled": false
"baseUrl": "http://yourDomain.com/pnfw"
"android":
"senderID": ""
| 51023 | "title": "<NAME>"
"api":
"baseUrl": "http://diegopinna.com/cooking/wp-json"
"timeout": 10000
"maxAttempt": 3
"translation":
"displayed" : ["en", "fr"]
"prefered": "en"
# BOOKMARK PAGE
"bookmark":
"cache":
#increased this from 10
"capacity": 20
# CACHE
# Todo: tweak the values for a better offline experience
"cache":
"img":
"localCacheFolder": "imgcache"
"useDataURI": false
"chromeQuota": 10485760
"usePersistentCache": true
"cacheClearSize": 0
"headers": {}
"skipURIencoding": false
"data":
"capacity": 100
"maxAge": 10800000
"deleteOnExpire": "aggressive"
"recycleFreq": 1000
"cacheFlushInterval": null
"storageMode": "localStorage"
"cordova":
"admob":
"enabled":false
"android":
"bannerID": null
"interstitialID": null
"ios":
"bannerID": null
"interstitialID": null
"pushNotifications":
"enabled": false
"baseUrl": "http://yourDomain.com/pnfw"
"android":
"senderID": ""
| true | "title": "PI:NAME:<NAME>END_PI"
"api":
"baseUrl": "http://diegopinna.com/cooking/wp-json"
"timeout": 10000
"maxAttempt": 3
"translation":
"displayed" : ["en", "fr"]
"prefered": "en"
# BOOKMARK PAGE
"bookmark":
"cache":
#increased this from 10
"capacity": 20
# CACHE
# Todo: tweak the values for a better offline experience
"cache":
"img":
"localCacheFolder": "imgcache"
"useDataURI": false
"chromeQuota": 10485760
"usePersistentCache": true
"cacheClearSize": 0
"headers": {}
"skipURIencoding": false
"data":
"capacity": 100
"maxAge": 10800000
"deleteOnExpire": "aggressive"
"recycleFreq": 1000
"cacheFlushInterval": null
"storageMode": "localStorage"
"cordova":
"admob":
"enabled":false
"android":
"bannerID": null
"interstitialID": null
"ios":
"bannerID": null
"interstitialID": null
"pushNotifications":
"enabled": false
"baseUrl": "http://yourDomain.com/pnfw"
"android":
"senderID": ""
|
[
{
"context": "d for pixi.js with every basics you need\n# @author David Ronai / Makiopolis.com / @Makio64 \n# \nclass Stage2d\n\n\t@",
"end": 73,
"score": 0.999854564666748,
"start": 62,
"tag": "NAME",
"value": "David Ronai"
},
{
"context": "th every basics you need\n# @author David ... | src/coffee/core/2d/Stage2d.coffee | Makio64/Kyari | 0 | #
# Stageed for pixi.js with every basics you need
# @author David Ronai / Makiopolis.com / @Makio64
#
class Stage2d
@stage : null
@renderer : null
@init:(options)->
view = options.view||null
transparent = options.transparent||false
antialias = options.antialias||false
preserveDrawingBuffer = options.antialias||false
@renderer = new PIXI.autoDetectRenderer(window.innerWidth, window.innerHeight,view,antialias,transparent,preserveDrawingBuffer)
@stage = new PIXI.Stage()
document.body.appendChild( @renderer.view )
return
@addChild:(o)->
@stage.addChild(o)
return
@render:()->
@renderer.render ( @stage )
return
@resize:()->
if @renderer
@renderer.resize( window.innerWidth, window.innerHeight )
return | 39949 | #
# Stageed for pixi.js with every basics you need
# @author <NAME> / M<EMAIL> / @Makio64
#
class Stage2d
@stage : null
@renderer : null
@init:(options)->
view = options.view||null
transparent = options.transparent||false
antialias = options.antialias||false
preserveDrawingBuffer = options.antialias||false
@renderer = new PIXI.autoDetectRenderer(window.innerWidth, window.innerHeight,view,antialias,transparent,preserveDrawingBuffer)
@stage = new PIXI.Stage()
document.body.appendChild( @renderer.view )
return
@addChild:(o)->
@stage.addChild(o)
return
@render:()->
@renderer.render ( @stage )
return
@resize:()->
if @renderer
@renderer.resize( window.innerWidth, window.innerHeight )
return | true | #
# Stageed for pixi.js with every basics you need
# @author PI:NAME:<NAME>END_PI / MPI:EMAIL:<EMAIL>END_PI / @Makio64
#
class Stage2d
@stage : null
@renderer : null
@init:(options)->
view = options.view||null
transparent = options.transparent||false
antialias = options.antialias||false
preserveDrawingBuffer = options.antialias||false
@renderer = new PIXI.autoDetectRenderer(window.innerWidth, window.innerHeight,view,antialias,transparent,preserveDrawingBuffer)
@stage = new PIXI.Stage()
document.body.appendChild( @renderer.view )
return
@addChild:(o)->
@stage.addChild(o)
return
@render:()->
@renderer.render ( @stage )
return
@resize:()->
if @renderer
@renderer.resize( window.innerWidth, window.innerHeight )
return |
[
{
"context": "et \"/app/html\", (req, res) ->\n res.send(\"<html>Herman Melville</html>\")\n\ndescribe \"e2e baseUrl\", ->\n context \"h",
"end": 133,
"score": 0.999855637550354,
"start": 118,
"tag": "NAME",
"value": "Herman Melville"
}
] | packages/server/test/e2e/1_base_url_spec.coffee | nongmanh/cypress | 2 | e2e = require("../support/helpers/e2e")
onServer = (app) ->
app.get "/app/html", (req, res) ->
res.send("<html>Herman Melville</html>")
describe "e2e baseUrl", ->
context "https", ->
e2e.setup({
settings: {
baseUrl: "https://httpbin.org"
}
})
e2e.it "passes", {
spec: "base_url_spec.coffee"
snapshot: true
expectedExitCode: 0
}
context "http", ->
e2e.setup({
servers: {
port: 9999
onServer: onServer
}
settings: {
baseUrl: "http://localhost:9999/app"
}
})
e2e.it "passes", {
spec: "base_url_spec.coffee"
snapshot: true
expectedExitCode: 0
}
| 67152 | e2e = require("../support/helpers/e2e")
onServer = (app) ->
app.get "/app/html", (req, res) ->
res.send("<html><NAME></html>")
describe "e2e baseUrl", ->
context "https", ->
e2e.setup({
settings: {
baseUrl: "https://httpbin.org"
}
})
e2e.it "passes", {
spec: "base_url_spec.coffee"
snapshot: true
expectedExitCode: 0
}
context "http", ->
e2e.setup({
servers: {
port: 9999
onServer: onServer
}
settings: {
baseUrl: "http://localhost:9999/app"
}
})
e2e.it "passes", {
spec: "base_url_spec.coffee"
snapshot: true
expectedExitCode: 0
}
| true | e2e = require("../support/helpers/e2e")
onServer = (app) ->
app.get "/app/html", (req, res) ->
res.send("<html>PI:NAME:<NAME>END_PI</html>")
describe "e2e baseUrl", ->
context "https", ->
e2e.setup({
settings: {
baseUrl: "https://httpbin.org"
}
})
e2e.it "passes", {
spec: "base_url_spec.coffee"
snapshot: true
expectedExitCode: 0
}
context "http", ->
e2e.setup({
servers: {
port: 9999
onServer: onServer
}
settings: {
baseUrl: "http://localhost:9999/app"
}
})
e2e.it "passes", {
spec: "base_url_spec.coffee"
snapshot: true
expectedExitCode: 0
}
|
[
{
"context": " @body.calendar.should.have.property 'name', 'random.test@gmail.com'\n\n describe \"POST /import/ical the calenda",
"end": 4007,
"score": 0.9999217391014099,
"start": 3986,
"tag": "EMAIL",
"value": "random.test@gmail.com"
}
] | test/ical_test.coffee | aenario/cozy-agenda | 2 | should = require 'should'
async = require 'async'
moment = require 'moment-timezone'
Client = require('request-json').JsonClient
client = new Client "http://localhost:8888/"
clientDS = new Client 'http://localhost:9101'
helpers = require './helpers'
{ICalParser, VCalendar, VAlarm, VTodo, VEvent} = require 'cozy-ical'
# THIS TEST DUPPLICATE cozy-ical's
#@TODO : improve test there, remove here
expectedContent = """
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Cozy//NONSGML Cozy Calendar//EN
X-WR-CALNAME:my calendar
BEGIN:VEVENT
UID:[id-4]
DTSTAMP:20141110T090600Z
DTSTART:20130609T150000Z
DTEND:20130610T150000Z
LOCATION:my place
SUMMARY:my description
END:VEVENT
END:VCALENDAR
""".replace(/\n/g, '\r\n')
describe "Calendar export/import", ->
before helpers.before
after helpers.after
describe 'Resources', ->
describe "GET /export/my calendar.ics", ->
ids = null
before helpers.cleanDb
before (done) ->
async.series [
helpers.createEvent("2013-06-09T15:00:00.000Z",
"2013-06-10T15:00:00.000Z",
"my place", "my description",
"Indian/Cocos")
], (err, results) ->
ids = results.map (doc) -> doc.id
done()
it "When I request for iCal export file", (done) ->
client.get "export/my%20calendar.ics", (error, response, body) =>
@body = body
done()
, false
it "Then it should contains my event", ->
dtstamp = moment.tz moment(), 'UTC'
dtstampValue = "DTSTAMP:#{dtstamp.format 'YYYYMMDDTHHmmss[Z]'}"
expectedBody = expectedContent
.replace '[id-4]', ids[0]
# DTSTAMP is new all the time in our implementation
.replace /DTSTAMP:.*/g, dtstampValue
@body.should.equal expectedBody
describe "POST /import/ical the calendar name is found in iCal", ->
it "When I send an iCal file to import", (done) ->
client.sendFile "import/ical", "./test/calendar.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 3
it "It should have the default calendar name", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'my calendar'
it "When I send an iCal file from Apple to import", (done) ->
client.sendFile "import/ical", "./test/apple.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 2
it "It should have the default calendar name", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'my calendar'
it "When I send an iCal file from Google to import", (done) ->
client.sendFile "import/ical", "./test/google.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 2
it "It should have the calendar name from the file", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'random.test@gmail.com'
describe "POST /import/ical the calendar name is not found, with existing events", ->
before helpers.cleanDb
after helpers.cleanDb
it "Given there are existing tags", (done) ->
rawEvent =
description: 'Something to do'
start: "2013-04-25T15:30:00.000Z"
end: "2013-04-25T18:30:00.000Z"
place: "place"
tags: ['zsomething']
rawEvent2 =
description: 'Something to do'
start: "2013-04-25T18:30:00.000Z"
end: "2013-04-25T19:30:00.000Z"
place: "place"
tags: ['asomething']
helpers.createEventFromObject rawEvent, ->
helpers.createEventFromObject rawEvent2, ->
done()
it "When I send an iCal file to import", (done) ->
client.sendFile "import/ical", "./test/calendar.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 3
it "It should have the first calendar name by alphabetical order", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'asomething'
| 35486 | should = require 'should'
async = require 'async'
moment = require 'moment-timezone'
Client = require('request-json').JsonClient
client = new Client "http://localhost:8888/"
clientDS = new Client 'http://localhost:9101'
helpers = require './helpers'
{ICalParser, VCalendar, VAlarm, VTodo, VEvent} = require 'cozy-ical'
# THIS TEST DUPPLICATE cozy-ical's
#@TODO : improve test there, remove here
expectedContent = """
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Cozy//NONSGML Cozy Calendar//EN
X-WR-CALNAME:my calendar
BEGIN:VEVENT
UID:[id-4]
DTSTAMP:20141110T090600Z
DTSTART:20130609T150000Z
DTEND:20130610T150000Z
LOCATION:my place
SUMMARY:my description
END:VEVENT
END:VCALENDAR
""".replace(/\n/g, '\r\n')
describe "Calendar export/import", ->
before helpers.before
after helpers.after
describe 'Resources', ->
describe "GET /export/my calendar.ics", ->
ids = null
before helpers.cleanDb
before (done) ->
async.series [
helpers.createEvent("2013-06-09T15:00:00.000Z",
"2013-06-10T15:00:00.000Z",
"my place", "my description",
"Indian/Cocos")
], (err, results) ->
ids = results.map (doc) -> doc.id
done()
it "When I request for iCal export file", (done) ->
client.get "export/my%20calendar.ics", (error, response, body) =>
@body = body
done()
, false
it "Then it should contains my event", ->
dtstamp = moment.tz moment(), 'UTC'
dtstampValue = "DTSTAMP:#{dtstamp.format 'YYYYMMDDTHHmmss[Z]'}"
expectedBody = expectedContent
.replace '[id-4]', ids[0]
# DTSTAMP is new all the time in our implementation
.replace /DTSTAMP:.*/g, dtstampValue
@body.should.equal expectedBody
describe "POST /import/ical the calendar name is found in iCal", ->
it "When I send an iCal file to import", (done) ->
client.sendFile "import/ical", "./test/calendar.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 3
it "It should have the default calendar name", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'my calendar'
it "When I send an iCal file from Apple to import", (done) ->
client.sendFile "import/ical", "./test/apple.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 2
it "It should have the default calendar name", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'my calendar'
it "When I send an iCal file from Google to import", (done) ->
client.sendFile "import/ical", "./test/google.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 2
it "It should have the calendar name from the file", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', '<EMAIL>'
describe "POST /import/ical the calendar name is not found, with existing events", ->
before helpers.cleanDb
after helpers.cleanDb
it "Given there are existing tags", (done) ->
rawEvent =
description: 'Something to do'
start: "2013-04-25T15:30:00.000Z"
end: "2013-04-25T18:30:00.000Z"
place: "place"
tags: ['zsomething']
rawEvent2 =
description: 'Something to do'
start: "2013-04-25T18:30:00.000Z"
end: "2013-04-25T19:30:00.000Z"
place: "place"
tags: ['asomething']
helpers.createEventFromObject rawEvent, ->
helpers.createEventFromObject rawEvent2, ->
done()
it "When I send an iCal file to import", (done) ->
client.sendFile "import/ical", "./test/calendar.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 3
it "It should have the first calendar name by alphabetical order", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'asomething'
| true | should = require 'should'
async = require 'async'
moment = require 'moment-timezone'
Client = require('request-json').JsonClient
client = new Client "http://localhost:8888/"
clientDS = new Client 'http://localhost:9101'
helpers = require './helpers'
{ICalParser, VCalendar, VAlarm, VTodo, VEvent} = require 'cozy-ical'
# THIS TEST DUPPLICATE cozy-ical's
#@TODO : improve test there, remove here
expectedContent = """
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Cozy//NONSGML Cozy Calendar//EN
X-WR-CALNAME:my calendar
BEGIN:VEVENT
UID:[id-4]
DTSTAMP:20141110T090600Z
DTSTART:20130609T150000Z
DTEND:20130610T150000Z
LOCATION:my place
SUMMARY:my description
END:VEVENT
END:VCALENDAR
""".replace(/\n/g, '\r\n')
describe "Calendar export/import", ->
before helpers.before
after helpers.after
describe 'Resources', ->
describe "GET /export/my calendar.ics", ->
ids = null
before helpers.cleanDb
before (done) ->
async.series [
helpers.createEvent("2013-06-09T15:00:00.000Z",
"2013-06-10T15:00:00.000Z",
"my place", "my description",
"Indian/Cocos")
], (err, results) ->
ids = results.map (doc) -> doc.id
done()
it "When I request for iCal export file", (done) ->
client.get "export/my%20calendar.ics", (error, response, body) =>
@body = body
done()
, false
it "Then it should contains my event", ->
dtstamp = moment.tz moment(), 'UTC'
dtstampValue = "DTSTAMP:#{dtstamp.format 'YYYYMMDDTHHmmss[Z]'}"
expectedBody = expectedContent
.replace '[id-4]', ids[0]
# DTSTAMP is new all the time in our implementation
.replace /DTSTAMP:.*/g, dtstampValue
@body.should.equal expectedBody
describe "POST /import/ical the calendar name is found in iCal", ->
it "When I send an iCal file to import", (done) ->
client.sendFile "import/ical", "./test/calendar.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 3
it "It should have the default calendar name", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'my calendar'
it "When I send an iCal file from Apple to import", (done) ->
client.sendFile "import/ical", "./test/apple.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 2
it "It should have the default calendar name", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'my calendar'
it "When I send an iCal file from Google to import", (done) ->
client.sendFile "import/ical", "./test/google.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 2
it "It should have the calendar name from the file", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'PI:EMAIL:<EMAIL>END_PI'
describe "POST /import/ical the calendar name is not found, with existing events", ->
before helpers.cleanDb
after helpers.cleanDb
it "Given there are existing tags", (done) ->
rawEvent =
description: 'Something to do'
start: "2013-04-25T15:30:00.000Z"
end: "2013-04-25T18:30:00.000Z"
place: "place"
tags: ['zsomething']
rawEvent2 =
description: 'Something to do'
start: "2013-04-25T18:30:00.000Z"
end: "2013-04-25T19:30:00.000Z"
place: "place"
tags: ['asomething']
helpers.createEventFromObject rawEvent, ->
helpers.createEventFromObject rawEvent2, ->
done()
it "When I send an iCal file to import", (done) ->
client.sendFile "import/ical", "./test/calendar.ics", (err, res, body) =>
should.not.exist err
res.statusCode.should.equal 200
@body = JSON.parse body
done()
it "Then it sends to me the parsing result", ->
@body.events.length.should.equal 3
it "It should have the first calendar name by alphabetical order", ->
should.exist @body.calendar
@body.calendar.should.have.property 'name', 'asomething'
|
[
{
"context": "= undefined\n $timeout = undefined\n validName = 'Paul'\n invalidName = 'Pa'\n\n beforeEach module('res.s",
"end": 112,
"score": 0.9996418952941895,
"start": 108,
"tag": "NAME",
"value": "Paul"
},
{
"context": "= undefined\n validName = 'Paul'\n invalidName = 'Pa'... | test/showErrors.spec.coffee | Cadre5/res-show-errors | 0 | describe 'showErrors', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = 'Paul'
invalidName = 'Pa'
beforeEach module('res.showErrors')
beforeEach inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
compileEl = ->
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="firstName" ng-model="firstName" ng-minlength="3" class="form-control" />
</div>
<div id="last-name-group" class="form-group" res-show-errors="{ showSuccess: true }">
<input type="text" name="lastName" ng-model="lastName" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
angular.element(document.body).append el
$scope.$digest()
el
describe 'directive does not contain an input element with a form-control class and name attribute', ->
it 'throws an exception', ->
expect( ->
$compile('<form name="userFor"><div class="form-group" res-show-errors><input type="text" name="firstName"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'form-control' class"
it 'directive can find \'form-control\' in nested divs', ->
expect( ->
$compile('<form name="userFor"><div class="form-group" res-show-errors><div class="wrapper-container"><input type="text" class="form-control" name="firstName"></input></div></div></form>')($scope)
).not.toThrow
it "throws an exception if the element doesn't have the form-group or input-group class", ->
expect( ->
$compile('<div res-show-errors></div>')($scope)
).toThrow "show-errors element does not have the 'form-group' or 'input-group' class"
it "doesn't throw an exception if the element has the input-group class", ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="form-control" type="text" name="firstName"></input></div></form>')($scope)
).not.toThrow()
it "doesn't throw an exception if the element doesn't have the form-group class but uses the skipFormGroupCheck option", ->
expect( ->
$compile('<form name="userForm"><div res-show-errors="{ skipFormGroupCheck: true }"><input class="form-control" type="text" name="firstName"></input></div></form>')($scope)
).not.toThrow()
it "throws an exception if the element isn't in a form tag", ->
expect( ->
$compile('<div class="form-group" res-show-errors><input type="text" name="firstName"></input></div>')($scope)
).toThrow()
describe '$pristine && $invalid', ->
it 'has-error is absent', ->
el = compileEl()
expectFormGroupHasErrorClass(el).toBe false
describe '$dirty && $invalid && blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '$dirty && $invalid && not blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'keydown'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred then becomes $invalid before blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
expectFormGroupHasErrorClass(el).toBe true
describe '$valid && blurred then becomes $valid before blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
$scope.$apply ->
$scope.userForm.firstName.$setViewValue validName
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred then becomes $invalid after blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '$valid && blurred then $invalid after blurred then $valid after blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && other input is $invalid && blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
$scope.userForm.lastName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$invalid && showErrorsCheckValidity is set before blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
expectFormGroupHasErrorClass(el).toBe true
describe 'showErrorsCheckValidity is called twice', ->
it 'correctly applies the has-error class', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
$scope.$apply ->
$scope.showErrorsCheckValidity = true
expectFormGroupHasErrorClass(el).toBe true
describe 'showErrorsCheckValidity with form name', ->
it 'correctly applies when form name matches', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue(invalidName)
$scope.$broadcast('show-errors-check-validity', 'userForm')
expectFormGroupHasErrorClass(el).toBe true
it 'correctly skips when form name differs', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue(invalidName)
$scope.$broadcast('show-errors-check-validity', 'differentForm')
expectFormGroupHasErrorClass(el).toBe false
describe 'showErrorsReset', ->
it 'removes has-error', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectFormGroupHasErrorClass(el).toBe false
describe 'showErrorsReset then invalid without blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
expectFormGroupHasErrorClass(el).toBe false
describe 'call showErrorsReset multiple times', ->
it 'removes has-error', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectFormGroupHasErrorClass(el).toBe false
describe 'form input with dynamic name', ->
it 'should get name correctly', ->
$scope.uniqueId = 0
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="firstName" ng-model="firstName" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
$scope.uniqueId = 5
angular.element(find(el, '[name=firstName]')).triggerHandler 'blur'
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass('show-errors')
it 'should show errors when broadcasting check validity', ->
$scope.uniqueId = 0
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="firstName" ng-model="firstName" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
$scope.uniqueId = 0
$scope.$digest()
$scope.userForm['firstName'].$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
angular.element(find(el, '[name=firstName]')).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '{showSuccess: true} option', ->
describe '$pristine && $valid', ->
it 'has-success is absent', ->
el = compileEl()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe '$dirty && $valid && blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe '$dirty && $invalid && blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe '$invalid && blurred then becomes $valid before blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.lastName.$setViewValue invalidName
$scope.$apply ->
$scope.userForm.lastName.$setViewValue validName
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe '$valid && showErrorsCheckValidity is set before blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
$scope.$broadcast 'show-errors-check-validity'
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe 'showErrorsReset', ->
it 'removes has-success', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe 'showErrorsConfig with alternate form control class', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = 'Paul'
invalidName = 'Pa'
beforeEach ->
testModule = angular.module 'testModule', []
testModule.config (resShowErrorsConfigProvider) ->
resShowErrorsConfigProvider.formControlClass 'prj-form-control'
resShowErrorsConfigProvider.skipFormGroupCheck true
module 'res.showErrors', 'testModule'
inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
describe 'when resShowErrorsConfig.formControlClass is set', ->
describe 'and no options are given', ->
it 'should not throw error', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="prj-form-control" type="text" name="firstName"></input></div></form>')($scope)
).not.toThrow()
it 'should throw error if class is not found', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="form-control" type="text" name="firstName"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'prj-form-control' class"
describe 'and options are given', ->
it 'should throw exceptions if override dosent match class names', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors="{formControlClass: \'blah-blah\'}"><input class="form-control" type="text" name="firstName"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'blah-blah' class"
it 'should find the name if given override', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors="{formControlClass: \'blah-blah\'}"><input class="blah-blah" type="text" name="firstName"></input></div></form>')($scope)
).not.toThrow()
describe 'when resShowErrorsConfig.skipFormGroupCheck is set', ->
describe 'and no options are given', ->
it 'should not throw an error', ->
expect( ->
$compile('<form name="userForm"><div res-show-errors><input class="prj-form-control" type="text" name="firstName"></input></div></form>')($scope)
).not.toThrow()
describe 'and options are given', ->
# TODO: local options don't override the skip check to false because the compile time check of this property only checks that it exists, not weither it's true or false.
xit 'should throw an error', ->
expect( ->
$compile('<form name="userForm"><div res-show-errors="{skipFormGroupCheck: \'false\'}"><input class="prj-form-control" type="text" name="firstName"></input></div></form>')($scope)
).toThrow()
describe 'showErrorsConfig', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = 'Paul'
invalidName = 'Pa'
beforeEach ->
testModule = angular.module 'testModule', []
testModule.config (resShowErrorsConfigProvider) ->
resShowErrorsConfigProvider.showSuccess true
resShowErrorsConfigProvider.trigger 'keypress'
resShowErrorsConfigProvider.errorClass 'res-val-error'
module 'res.showErrors', 'testModule'
inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
compileEl = ->
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors="{showSuccess: false, trigger: \'blur\'}">
<input type="text" name="firstName" ng-model="firstName" ng-minlength="3" class="form-control" />
</div>
<div id="last-name-group" class="form-group" res-show-errors>
<input type="text" name="lastName" ng-model="lastName" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
angular.element(document.body).append el
$scope.$digest()
el
describe 'when showErrorsConfig.showSuccess is true', ->
describe 'and no options given', ->
it 'show-success class is applied', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe 'when showErrorsConfig.errorClass is "res-val-error"', ->
describe 'and no options given', ->
it '"res-val-error" class is applied', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasErrorClass(el, 'res-val-error').toBe true
describe 'when showErrorsConfig.showSuccess is true', ->
describe 'but options.showSuccess is false', ->
it 'show-success class is not applied', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectFirstNameFormGroupHasSuccessClass(el).toBe false
describe 'when showErrorsConfig.trigger is "keypress"', ->
describe 'and no options given', ->
it 'validates the value on the first keypress', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasErrorClass(el, 'res-val-error').toBe true
describe 'but options.trigger is "blur"', ->
it 'does not validate the value on keypress', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectFirstNameFormGroupHasErrorClass(el, 'res-val-error').toBe false
find = (el, selector) ->
el[0].querySelector selector
firstNameEl = (el) ->
find el, '[name=firstName]'
lastNameEl = (el) ->
find el, '[name=lastName]'
expectFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
expectFirstNameFormGroupHasSuccessClass = (el) ->
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass('has-success')
expectLastNameFormGroupHasSuccessClass = (el) ->
formGroup = el[0].querySelector '[id=last-name-group]'
expect angular.element(formGroup).hasClass('has-success')
expectFirstNameFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
expectLastNameFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=last-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
| 152594 | describe 'showErrors', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = '<NAME>'
invalidName = '<NAME>'
beforeEach module('res.showErrors')
beforeEach inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
compileEl = ->
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="<NAME>" ng-model="<NAME>" ng-minlength="3" class="form-control" />
</div>
<div id="last-name-group" class="form-group" res-show-errors="{ showSuccess: true }">
<input type="text" name="<NAME>" ng-model="<NAME>" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
angular.element(document.body).append el
$scope.$digest()
el
describe 'directive does not contain an input element with a form-control class and name attribute', ->
it 'throws an exception', ->
expect( ->
$compile('<form name="userFor"><div class="form-group" res-show-errors><input type="text" name="<NAME>"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'form-control' class"
it 'directive can find \'form-control\' in nested divs', ->
expect( ->
$compile('<form name="userFor"><div class="form-group" res-show-errors><div class="wrapper-container"><input type="text" class="form-control" name="<NAME>"></input></div></div></form>')($scope)
).not.toThrow
it "throws an exception if the element doesn't have the form-group or input-group class", ->
expect( ->
$compile('<div res-show-errors></div>')($scope)
).toThrow "show-errors element does not have the 'form-group' or 'input-group' class"
it "doesn't throw an exception if the element has the input-group class", ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="form-control" type="text" name="<NAME>"></input></div></form>')($scope)
).not.toThrow()
it "doesn't throw an exception if the element doesn't have the form-group class but uses the skipFormGroupCheck option", ->
expect( ->
$compile('<form name="userForm"><div res-show-errors="{ skipFormGroupCheck: true }"><input class="form-control" type="text" name="<NAME>"></input></div></form>')($scope)
).not.toThrow()
it "throws an exception if the element isn't in a form tag", ->
expect( ->
$compile('<div class="form-group" res-show-errors><input type="text" name="<NAME>"></input></div>')($scope)
).toThrow()
describe '$pristine && $invalid', ->
it 'has-error is absent', ->
el = compileEl()
expectFormGroupHasErrorClass(el).toBe false
describe '$dirty && $invalid && blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '$dirty && $invalid && not blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'keydown'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred then becomes $invalid before blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
expectFormGroupHasErrorClass(el).toBe true
describe '$valid && blurred then becomes $valid before blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
$scope.$apply ->
$scope.userForm.firstName.$setViewValue validName
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred then becomes $invalid after blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '$valid && blurred then $invalid after blurred then $valid after blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && other input is $invalid && blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
$scope.userForm.lastName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$invalid && showErrorsCheckValidity is set before blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
expectFormGroupHasErrorClass(el).toBe true
describe 'showErrorsCheckValidity is called twice', ->
it 'correctly applies the has-error class', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
$scope.$apply ->
$scope.showErrorsCheckValidity = true
expectFormGroupHasErrorClass(el).toBe true
describe 'showErrorsCheckValidity with form name', ->
it 'correctly applies when form name matches', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue(invalidName)
$scope.$broadcast('show-errors-check-validity', 'userForm')
expectFormGroupHasErrorClass(el).toBe true
it 'correctly skips when form name differs', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue(invalidName)
$scope.$broadcast('show-errors-check-validity', 'differentForm')
expectFormGroupHasErrorClass(el).toBe false
describe 'showErrorsReset', ->
it 'removes has-error', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectFormGroupHasErrorClass(el).toBe false
describe 'showErrorsReset then invalid without blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
expectFormGroupHasErrorClass(el).toBe false
describe 'call showErrorsReset multiple times', ->
it 'removes has-error', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectFormGroupHasErrorClass(el).toBe false
describe 'form input with dynamic name', ->
it 'should get name correctly', ->
$scope.uniqueId = 0
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="<NAME>" ng-model="<NAME>" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
$scope.uniqueId = 5
angular.element(find(el, '[name=firstName]')).triggerHandler 'blur'
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass('show-errors')
it 'should show errors when broadcasting check validity', ->
$scope.uniqueId = 0
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="<NAME>" ng-model="<NAME>" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
$scope.uniqueId = 0
$scope.$digest()
$scope.userForm['firstName'].$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
angular.element(find(el, '[name=firstName]')).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '{showSuccess: true} option', ->
describe '$pristine && $valid', ->
it 'has-success is absent', ->
el = compileEl()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe '$dirty && $valid && blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe '$dirty && $invalid && blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe '$invalid && blurred then becomes $valid before blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.lastName.$setViewValue invalidName
$scope.$apply ->
$scope.userForm.lastName.$setViewValue validName
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe '$valid && showErrorsCheckValidity is set before blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
$scope.$broadcast 'show-errors-check-validity'
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe 'showErrorsReset', ->
it 'removes has-success', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe 'showErrorsConfig with alternate form control class', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = '<NAME>'
invalidName = '<NAME>'
beforeEach ->
testModule = angular.module 'testModule', []
testModule.config (resShowErrorsConfigProvider) ->
resShowErrorsConfigProvider.formControlClass 'prj-form-control'
resShowErrorsConfigProvider.skipFormGroupCheck true
module 'res.showErrors', 'testModule'
inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
describe 'when resShowErrorsConfig.formControlClass is set', ->
describe 'and no options are given', ->
it 'should not throw error', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="prj-form-control" type="text" name="<NAME>"></input></div></form>')($scope)
).not.toThrow()
it 'should throw error if class is not found', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="form-control" type="text" name="<NAME>"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'prj-form-control' class"
describe 'and options are given', ->
it 'should throw exceptions if override dosent match class names', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors="{formControlClass: \'blah-blah\'}"><input class="form-control" type="text" name="<NAME>"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'blah-blah' class"
it 'should find the name if given override', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors="{formControlClass: \'blah-blah\'}"><input class="blah-blah" type="text" name="<NAME>"></input></div></form>')($scope)
).not.toThrow()
describe 'when resShowErrorsConfig.skipFormGroupCheck is set', ->
describe 'and no options are given', ->
it 'should not throw an error', ->
expect( ->
$compile('<form name="userForm"><div res-show-errors><input class="prj-form-control" type="text" name="<NAME>"></input></div></form>')($scope)
).not.toThrow()
describe 'and options are given', ->
# TODO: local options don't override the skip check to false because the compile time check of this property only checks that it exists, not weither it's true or false.
xit 'should throw an error', ->
expect( ->
$compile('<form name="userForm"><div res-show-errors="{skipFormGroupCheck: \'false\'}"><input class="prj-form-control" type="text" name="firstName"></input></div></form>')($scope)
).toThrow()
describe 'showErrorsConfig', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = '<NAME>'
invalidName = '<NAME>'
beforeEach ->
testModule = angular.module 'testModule', []
testModule.config (resShowErrorsConfigProvider) ->
resShowErrorsConfigProvider.showSuccess true
resShowErrorsConfigProvider.trigger 'keypress'
resShowErrorsConfigProvider.errorClass 'res-val-error'
module 'res.showErrors', 'testModule'
inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
compileEl = ->
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors="{showSuccess: false, trigger: \'blur\'}">
<input type="text" name="firstName" ng-model="firstName" ng-minlength="3" class="form-control" />
</div>
<div id="last-name-group" class="form-group" res-show-errors>
<input type="text" name="lastName" ng-model="lastName" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
angular.element(document.body).append el
$scope.$digest()
el
describe 'when showErrorsConfig.showSuccess is true', ->
describe 'and no options given', ->
it 'show-success class is applied', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe 'when showErrorsConfig.errorClass is "res-val-error"', ->
describe 'and no options given', ->
it '"res-val-error" class is applied', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasErrorClass(el, 'res-val-error').toBe true
describe 'when showErrorsConfig.showSuccess is true', ->
describe 'but options.showSuccess is false', ->
it 'show-success class is not applied', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectFirstNameFormGroupHasSuccessClass(el).toBe false
describe 'when showErrorsConfig.trigger is "keypress"', ->
describe 'and no options given', ->
it 'validates the value on the first keypress', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasErrorClass(el, 'res-val-error').toBe true
describe 'but options.trigger is "blur"', ->
it 'does not validate the value on keypress', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectFirstNameFormGroupHasErrorClass(el, 'res-val-error').toBe false
find = (el, selector) ->
el[0].querySelector selector
firstNameEl = (el) ->
find el, '[name=firstName]'
lastNameEl = (el) ->
find el, '[name=lastName]'
expectFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
expectFirstNameFormGroupHasSuccessClass = (el) ->
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass('has-success')
expectLastNameFormGroupHasSuccessClass = (el) ->
formGroup = el[0].querySelector '[id=last-name-group]'
expect angular.element(formGroup).hasClass('has-success')
expectFirstNameFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
expectLastNameFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=last-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
| true | describe 'showErrors', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = 'PI:NAME:<NAME>END_PI'
invalidName = 'PI:NAME:<NAME>END_PI'
beforeEach module('res.showErrors')
beforeEach inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
compileEl = ->
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="PI:NAME:<NAME>END_PI" ng-model="PI:NAME:<NAME>END_PI" ng-minlength="3" class="form-control" />
</div>
<div id="last-name-group" class="form-group" res-show-errors="{ showSuccess: true }">
<input type="text" name="PI:NAME:<NAME>END_PI" ng-model="PI:NAME:<NAME>END_PI" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
angular.element(document.body).append el
$scope.$digest()
el
describe 'directive does not contain an input element with a form-control class and name attribute', ->
it 'throws an exception', ->
expect( ->
$compile('<form name="userFor"><div class="form-group" res-show-errors><input type="text" name="PI:NAME:<NAME>END_PI"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'form-control' class"
it 'directive can find \'form-control\' in nested divs', ->
expect( ->
$compile('<form name="userFor"><div class="form-group" res-show-errors><div class="wrapper-container"><input type="text" class="form-control" name="PI:NAME:<NAME>END_PI"></input></div></div></form>')($scope)
).not.toThrow
it "throws an exception if the element doesn't have the form-group or input-group class", ->
expect( ->
$compile('<div res-show-errors></div>')($scope)
).toThrow "show-errors element does not have the 'form-group' or 'input-group' class"
it "doesn't throw an exception if the element has the input-group class", ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="form-control" type="text" name="PI:NAME:<NAME>END_PI"></input></div></form>')($scope)
).not.toThrow()
it "doesn't throw an exception if the element doesn't have the form-group class but uses the skipFormGroupCheck option", ->
expect( ->
$compile('<form name="userForm"><div res-show-errors="{ skipFormGroupCheck: true }"><input class="form-control" type="text" name="PI:NAME:<NAME>END_PI"></input></div></form>')($scope)
).not.toThrow()
it "throws an exception if the element isn't in a form tag", ->
expect( ->
$compile('<div class="form-group" res-show-errors><input type="text" name="PI:NAME:<NAME>END_PI"></input></div>')($scope)
).toThrow()
describe '$pristine && $invalid', ->
it 'has-error is absent', ->
el = compileEl()
expectFormGroupHasErrorClass(el).toBe false
describe '$dirty && $invalid && blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '$dirty && $invalid && not blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'keydown'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred then becomes $invalid before blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
expectFormGroupHasErrorClass(el).toBe true
describe '$valid && blurred then becomes $valid before blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
$scope.$apply ->
$scope.userForm.firstName.$setViewValue validName
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && blurred then becomes $invalid after blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '$valid && blurred then $invalid after blurred then $valid after blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$valid && other input is $invalid && blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
$scope.userForm.lastName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe false
describe '$invalid && showErrorsCheckValidity is set before blurred', ->
it 'has-error is present', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
expectFormGroupHasErrorClass(el).toBe true
describe 'showErrorsCheckValidity is called twice', ->
it 'correctly applies the has-error class', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.userForm.firstName.$setViewValue invalidName
$scope.$apply ->
$scope.showErrorsCheckValidity = true
expectFormGroupHasErrorClass(el).toBe true
describe 'showErrorsCheckValidity with form name', ->
it 'correctly applies when form name matches', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue(invalidName)
$scope.$broadcast('show-errors-check-validity', 'userForm')
expectFormGroupHasErrorClass(el).toBe true
it 'correctly skips when form name differs', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue(invalidName)
$scope.$broadcast('show-errors-check-validity', 'differentForm')
expectFormGroupHasErrorClass(el).toBe false
describe 'showErrorsReset', ->
it 'removes has-error', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectFormGroupHasErrorClass(el).toBe false
describe 'showErrorsReset then invalid without blurred', ->
it 'has-error is absent', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
$scope.$apply ->
$scope.userForm.firstName.$setViewValue invalidName
expectFormGroupHasErrorClass(el).toBe false
describe 'call showErrorsReset multiple times', ->
it 'removes has-error', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectFormGroupHasErrorClass(el).toBe false
describe 'form input with dynamic name', ->
it 'should get name correctly', ->
$scope.uniqueId = 0
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="PI:NAME:<NAME>END_PI" ng-model="PI:NAME:<NAME>END_PI" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
$scope.uniqueId = 5
angular.element(find(el, '[name=firstName]')).triggerHandler 'blur'
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass('show-errors')
it 'should show errors when broadcasting check validity', ->
$scope.uniqueId = 0
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors>
<input type="text" name="PI:NAME:<NAME>END_PI" ng-model="PI:NAME:<NAME>END_PI" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
$scope.uniqueId = 0
$scope.$digest()
$scope.userForm['firstName'].$setViewValue invalidName
$scope.$broadcast 'show-errors-check-validity'
angular.element(find(el, '[name=firstName]')).triggerHandler 'blur'
expectFormGroupHasErrorClass(el).toBe true
describe '{showSuccess: true} option', ->
describe '$pristine && $valid', ->
it 'has-success is absent', ->
el = compileEl()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe '$dirty && $valid && blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe '$dirty && $invalid && blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe '$invalid && blurred then becomes $valid before blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$apply ->
$scope.userForm.lastName.$setViewValue invalidName
$scope.$apply ->
$scope.userForm.lastName.$setViewValue validName
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe '$valid && showErrorsCheckValidity is set before blurred', ->
it 'has-success is present', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
$scope.$broadcast 'show-errors-check-validity'
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe 'showErrorsReset', ->
it 'removes has-success', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'blur'
$scope.$broadcast 'show-errors-reset'
$timeout.flush()
expectLastNameFormGroupHasSuccessClass(el).toBe false
describe 'showErrorsConfig with alternate form control class', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = 'PI:NAME:<NAME>END_PI'
invalidName = 'PI:NAME:<NAME>END_PI'
beforeEach ->
testModule = angular.module 'testModule', []
testModule.config (resShowErrorsConfigProvider) ->
resShowErrorsConfigProvider.formControlClass 'prj-form-control'
resShowErrorsConfigProvider.skipFormGroupCheck true
module 'res.showErrors', 'testModule'
inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
describe 'when resShowErrorsConfig.formControlClass is set', ->
describe 'and no options are given', ->
it 'should not throw error', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="prj-form-control" type="text" name="PI:NAME:<NAME>END_PI"></input></div></form>')($scope)
).not.toThrow()
it 'should throw error if class is not found', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors><input class="form-control" type="text" name="PI:NAME:<NAME>END_PI"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'prj-form-control' class"
describe 'and options are given', ->
it 'should throw exceptions if override dosent match class names', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors="{formControlClass: \'blah-blah\'}"><input class="form-control" type="text" name="PI:NAME:<NAME>END_PI"></input></div></form>')($scope)
).toThrow "show-errors element has no child input elements with a 'name' attribute and a 'blah-blah' class"
it 'should find the name if given override', ->
expect( ->
$compile('<form name="userForm"><div class="input-group" res-show-errors="{formControlClass: \'blah-blah\'}"><input class="blah-blah" type="text" name="PI:NAME:<NAME>END_PI"></input></div></form>')($scope)
).not.toThrow()
describe 'when resShowErrorsConfig.skipFormGroupCheck is set', ->
describe 'and no options are given', ->
it 'should not throw an error', ->
expect( ->
$compile('<form name="userForm"><div res-show-errors><input class="prj-form-control" type="text" name="PI:NAME:<NAME>END_PI"></input></div></form>')($scope)
).not.toThrow()
describe 'and options are given', ->
# TODO: local options don't override the skip check to false because the compile time check of this property only checks that it exists, not weither it's true or false.
xit 'should throw an error', ->
expect( ->
$compile('<form name="userForm"><div res-show-errors="{skipFormGroupCheck: \'false\'}"><input class="prj-form-control" type="text" name="firstName"></input></div></form>')($scope)
).toThrow()
describe 'showErrorsConfig', ->
$compile = undefined
$scope = undefined
$timeout = undefined
validName = 'PI:NAME:<NAME>END_PI'
invalidName = 'PI:NAME:<NAME>END_PI'
beforeEach ->
testModule = angular.module 'testModule', []
testModule.config (resShowErrorsConfigProvider) ->
resShowErrorsConfigProvider.showSuccess true
resShowErrorsConfigProvider.trigger 'keypress'
resShowErrorsConfigProvider.errorClass 'res-val-error'
module 'res.showErrors', 'testModule'
inject((_$compile_, _$rootScope_, _$timeout_) ->
$compile = _$compile_
$scope = _$rootScope_
$timeout = _$timeout_
)
compileEl = ->
el = $compile(
'<form name="userForm">
<div id="first-name-group" class="form-group" res-show-errors="{showSuccess: false, trigger: \'blur\'}">
<input type="text" name="firstName" ng-model="firstName" ng-minlength="3" class="form-control" />
</div>
<div id="last-name-group" class="form-group" res-show-errors>
<input type="text" name="lastName" ng-model="lastName" ng-minlength="3" class="form-control" />
</div>
</form>'
)($scope)
angular.element(document.body).append el
$scope.$digest()
el
describe 'when showErrorsConfig.showSuccess is true', ->
describe 'and no options given', ->
it 'show-success class is applied', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue validName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasSuccessClass(el).toBe true
describe 'when showErrorsConfig.errorClass is "res-val-error"', ->
describe 'and no options given', ->
it '"res-val-error" class is applied', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasErrorClass(el, 'res-val-error').toBe true
describe 'when showErrorsConfig.showSuccess is true', ->
describe 'but options.showSuccess is false', ->
it 'show-success class is not applied', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue validName
angular.element(firstNameEl(el)).triggerHandler 'blur'
$scope.$digest()
expectFirstNameFormGroupHasSuccessClass(el).toBe false
describe 'when showErrorsConfig.trigger is "keypress"', ->
describe 'and no options given', ->
it 'validates the value on the first keypress', ->
el = compileEl()
$scope.userForm.lastName.$setViewValue invalidName
angular.element(lastNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectLastNameFormGroupHasErrorClass(el, 'res-val-error').toBe true
describe 'but options.trigger is "blur"', ->
it 'does not validate the value on keypress', ->
el = compileEl()
$scope.userForm.firstName.$setViewValue invalidName
angular.element(firstNameEl(el)).triggerHandler 'keypress'
$scope.$digest()
expectFirstNameFormGroupHasErrorClass(el, 'res-val-error').toBe false
find = (el, selector) ->
el[0].querySelector selector
firstNameEl = (el) ->
find el, '[name=firstName]'
lastNameEl = (el) ->
find el, '[name=lastName]'
expectFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
expectFirstNameFormGroupHasSuccessClass = (el) ->
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass('has-success')
expectLastNameFormGroupHasSuccessClass = (el) ->
formGroup = el[0].querySelector '[id=last-name-group]'
expect angular.element(formGroup).hasClass('has-success')
expectFirstNameFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=first-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
expectLastNameFormGroupHasErrorClass = (el, errorClass) ->
_errorClass = 'has-error'
if errorClass?
_errorClass = errorClass
formGroup = el[0].querySelector '[id=last-name-group]'
expect angular.element(formGroup).hasClass(_errorClass)
|
[
{
"context": "ty: quantity,\n taxRate: {\n name: \"some_name\",\n amount: 0.19,\n includedInPri",
"end": 5668,
"score": 0.9954528212547302,
"start": 5659,
"tag": "NAME",
"value": "some_name"
},
{
"context": " }\n },\n taxRate: {\n ... | src/coffee/sphere_test_kit.coffee | sphereio/sphere-message-processing | 0 | Rx = require 'rx'
Q = require 'q'
{_} = require 'underscore'
{ErrorStatusCode} = require './sphere_service'
{LoggerFactory} = require '../lib/logger'
class SphereTestKit
stateDefs: [
{key: "A", transitions: ["B"]}
{key: "B", transitions: ["C", "D"]}
{key: "C", transitions: ["D"]}
{key: "D", transitions: ["E"]}
{key: "E", transitions: ["A"]}
{key: "ReadyForShipment", transitions: ["Pickup"]}
{key: "Pickup", transitions: ["Shipped"]}
{key: "Shipped", transitions: ["Finished"]}
{key: "Finished", transitions: ["ReadyForShipment"]}
{key: "canceled"}
{key: "returnNotApproved"}
{key: "closed"}
{key: "picking"}
{key: "backorder"}
{key: "readyToShip"}
{key: "shipped"}
{key: "returned"}
{key: "returnApproved"}
{key: "lost"}
{key: "lossApproved"}
{key: "lossNotApproved"}
]
channelDefs: [
{key: 'master', roles: ['OrderImport']}
]
taxCategoryDefs: [
{name: 'Test Category', rates: [{name: "Test Rate", amount: 0.19, includedInPrice: false, country: 'DE'}]}
]
constructor: (@sphere) ->
@logger = LoggerFactory.getLogger "test-kit.#{@sphere.getSourceInfo().prefix}"
setupProject: (onlyOneOrderWith3LineItems = false) ->
Q.all [
@configureStates()
@configureChannels()
@configureProduct()
@configureTaxCategory()
]
.then =>
if not onlyOneOrderWith3LineItems
orders = _.map _.range(1, 6), (idx) =>
@createTestOrder idx
Q.all orders
.then (orders) =>
@orders = _.map orders, (os) ->
[m, r] = os
{retailerOrder: r, masterOrder: m}
@logger.info "Orders"
_.each @orders, (o, i) =>
@logger.info "#{i} Retailer: #{o.retailerOrder.id}, Master: #{o.masterOrder.id}"
@logger.info _.map(@orders, (o)-> "\"#{o.retailerOrder.id}\"").join(',')
this
else
@sphere.importOrder @_orderJson(3)
.then (order) =>
@order = order
@logger.info "Order created: #{order.id}"
this
.then =>
if not onlyOneOrderWith3LineItems
@addStock(@orders[0].retailerOrder.lineItems[0].variant.sku, 1000000)
else
Q()
.then =>
@logger.info "Project setup finished"
this
ref: (type, obj) ->
{typeId: type, id: obj.id}
stateByKey: (key) ->
if key == 'Initial'
@initialState
else
_.find @states, (s) -> s.key == key
stateById: (id) ->
_.find @states, (s) -> s.id == id
abcStateSwitch: (currKey) ->
switch currKey
when'A' then 'B'
when'B' then 'D'
when'D' then 'E'
when'E' then 'A'
else throw new Error("Unsupported state #{currKey}")
shipmentStateSwitch: (currKey) ->
switch currKey
when'ReadyForShipment' then 'Pickup'
when'Pickup' then 'Shipped'
when'Shipped' then 'Finished'
when'Finished' then 'ReadyForShipment'
else throw new Error("Unsupported state #{currKey}")
transitionRetailerOrderStates: (first, newStateFn) ->
ps = _.map @orders, (os) =>
currStates = _.filter os.retailerOrder.lineItems[0].state, (s) => s.state.id != @initialState.id
p = if _.isEmpty(currStates)
@sphere.transitionLineItemState os.retailerOrder, os.retailerOrder.lineItems[0].id, 20, @ref('state', @initialState), @ref('state', @stateByKey(first))
else
currStateId = currStates[0].state.id
currStateQ = currStates[0].quantity
newState = @stateByKey newStateFn(@stateById(currStateId).key)
@sphere.transitionLineItemState os.retailerOrder, os.retailerOrder.lineItems[0].id, currStateQ, @ref('state', {id: currStateId}), @ref('state', newState)
p
.then (newOrder) ->
os.retailerOrder = newOrder
newOrder
Q.all ps
transitionStatePath: (lineItemIdx, quantity, path) ->
reduceFn = (acc, to) =>
acc.then ([from, order]) =>
@sphere.transitionLineItemState order, order.lineItems[lineItemIdx].id, quantity, @ref('state', @stateByKey(from)), @ref('state', @stateByKey(to))
.then (newOrder) ->
[to, newOrder]
_.reduce _.tail(path), reduceFn, Q([_.head(path), @order])
.then ([endKey, order]) =>
@order = order
order
scheduleStateTransitions: (first, stateSwitch) ->
Rx.Observable.interval 2000
.subscribe =>
@transitionRetailerOrderStates(first, stateSwitch)
.then =>
@logger.info "Transition finished"
.fail (error) =>
@logger.error "Error during state transition", error
.done()
configureStates: ->
Q.all [
@sphere.ensureStates [{key: "Initial"}]
@sphere.ensureStates @stateDefs
]
.then (states) =>
[[@initialState], @states] = states
@logger.info "States configured"
[@initialState, @states]
configureChannels: ->
@sphere.ensureChannels @channelDefs
.then (channels) =>
[@masterChannel] = channels
@logger.info "Channels configured"
@masterChannel
configureProduct: () ->
@sphere.getFirstProduct()
.then (product) =>
@product = product
@logger.info "Product found"
product
configureTaxCategory: () ->
@sphere.ensureTaxCategories @taxCategoryDefs
.then (tc) =>
[@taxCategory] = tc
@logger.info "Tax category configured"
tc
_orderJson: (lineItemCount = 1, quantity = 30) ->
lineItems = _.map _.range(0, lineItemCount), (idx) =>
{
variant: {
sku: @product.masterData.staged.masterVariant.sku
},
quantity: quantity,
taxRate: {
name: "some_name",
amount: 0.19,
includedInPrice: true,
country: "US",
id: @taxCategory.id
},
name: {
en: "Some Product #{idx}"
},
price: {
country: "US",
value: {
centAmount: 1190,
currencyCode: "USD"
}
}
}
{
lineItems: lineItems,
totalPrice: {
currencyCode: "USD",
centAmount: 1190
},
shippingAddress: {
country: "US"
},
shippingInfo: {
shippingMethodName: 'Normal',
price: {
centAmount: 1000,
currencyCode: "EUR"
},
shippingRate: {
price: {
centAmount: 1000,
currencyCode: "EUR"
}
},
taxRate: {
name: "some_name",
amount: 0.19,
includedInPrice: true,
country: "US",
id: @taxCategory.id
},
taxCategory: {"typeId": "tax-category", id: @taxCategory.id},
},
taxedPrice: {
taxPortions: [{
amount: {
centAmount: 190,
currencyCode: "USD"
},
rate: 0.19
}],
totalGross: {
centAmount: 1190,
currencyCode: "USD"
},
totalNet: {
centAmount: 1000,
currencyCode: "USD"
}
}
}
createTestOrder: (idx) ->
Q.all [
@sphere.importOrder @_orderJson()
@sphere.importOrder @_orderJson()
]
.then (orders) =>
[masterOrder, retailerOrder] = orders
@sphere.updateOrderSyncSuatus retailerOrder, @masterChannel, masterOrder.id
.then (newRetailerOrder) ->
[masterOrder, newRetailerOrder]
addSomeDeliveries: () ->
ps = _.map @orders, (o) =>
@sphere.addDelivery o.retailerOrder, [{id: o.retailerOrder.lineItems[0].id, quantity: 4}]
.then (o1) =>
@sphere.addParcel o1, o1.shippingInfo.deliveries[0].id, {heightInMillimeter: 11, lengthInMillimeter: 22, widthInMillimeter: 33, weightInGram: 44}, {trackingId: "ABCD123", carrier: "DHL"}
.then (o2) =>
@logger.info "Finished with deliveries: #{o2.id}"
o.retailerOrder = o2
Q.all ps
addStock: (sku, quantity) ->
@sphere.getInvetoryEntryBySkuAndChannel sku, null
.then (ie) =>
@sphere.addInventoryQuantity ie, quantity - ie.availableQuantity
.fail (e) =>
@sphere.createInventoryEntry sku, quantity
@setupProject: (sphereService, onlyOneOrderWith3LineItems = false) ->
sphereTestKit = new SphereTestKit sphereService
sphereTestKit.setupProject(onlyOneOrderWith3LineItems)
@cleanup = (done, subscription, processor) ->
if subscription?
subscription.dispose()
processor.stop()
.fail (error) =>
@logger.info "Error during processor cleanup", error
@reportSuccess: (done, subscription, processor) ->
@cleanup done, subscription, processor
.then ->
done()
.fail (error) ->
done(error)
.done()
@reportFailure: (done, error, subscription, processor) ->
@cleanup done, subscription, processor
.finally ->
done(error)
.done()
exports.SphereTestKit = SphereTestKit
| 12099 | Rx = require 'rx'
Q = require 'q'
{_} = require 'underscore'
{ErrorStatusCode} = require './sphere_service'
{LoggerFactory} = require '../lib/logger'
class SphereTestKit
stateDefs: [
{key: "A", transitions: ["B"]}
{key: "B", transitions: ["C", "D"]}
{key: "C", transitions: ["D"]}
{key: "D", transitions: ["E"]}
{key: "E", transitions: ["A"]}
{key: "ReadyForShipment", transitions: ["Pickup"]}
{key: "Pickup", transitions: ["Shipped"]}
{key: "Shipped", transitions: ["Finished"]}
{key: "Finished", transitions: ["ReadyForShipment"]}
{key: "canceled"}
{key: "returnNotApproved"}
{key: "closed"}
{key: "picking"}
{key: "backorder"}
{key: "readyToShip"}
{key: "shipped"}
{key: "returned"}
{key: "returnApproved"}
{key: "lost"}
{key: "lossApproved"}
{key: "lossNotApproved"}
]
channelDefs: [
{key: 'master', roles: ['OrderImport']}
]
taxCategoryDefs: [
{name: 'Test Category', rates: [{name: "Test Rate", amount: 0.19, includedInPrice: false, country: 'DE'}]}
]
constructor: (@sphere) ->
@logger = LoggerFactory.getLogger "test-kit.#{@sphere.getSourceInfo().prefix}"
setupProject: (onlyOneOrderWith3LineItems = false) ->
Q.all [
@configureStates()
@configureChannels()
@configureProduct()
@configureTaxCategory()
]
.then =>
if not onlyOneOrderWith3LineItems
orders = _.map _.range(1, 6), (idx) =>
@createTestOrder idx
Q.all orders
.then (orders) =>
@orders = _.map orders, (os) ->
[m, r] = os
{retailerOrder: r, masterOrder: m}
@logger.info "Orders"
_.each @orders, (o, i) =>
@logger.info "#{i} Retailer: #{o.retailerOrder.id}, Master: #{o.masterOrder.id}"
@logger.info _.map(@orders, (o)-> "\"#{o.retailerOrder.id}\"").join(',')
this
else
@sphere.importOrder @_orderJson(3)
.then (order) =>
@order = order
@logger.info "Order created: #{order.id}"
this
.then =>
if not onlyOneOrderWith3LineItems
@addStock(@orders[0].retailerOrder.lineItems[0].variant.sku, 1000000)
else
Q()
.then =>
@logger.info "Project setup finished"
this
ref: (type, obj) ->
{typeId: type, id: obj.id}
stateByKey: (key) ->
if key == 'Initial'
@initialState
else
_.find @states, (s) -> s.key == key
stateById: (id) ->
_.find @states, (s) -> s.id == id
abcStateSwitch: (currKey) ->
switch currKey
when'A' then 'B'
when'B' then 'D'
when'D' then 'E'
when'E' then 'A'
else throw new Error("Unsupported state #{currKey}")
shipmentStateSwitch: (currKey) ->
switch currKey
when'ReadyForShipment' then 'Pickup'
when'Pickup' then 'Shipped'
when'Shipped' then 'Finished'
when'Finished' then 'ReadyForShipment'
else throw new Error("Unsupported state #{currKey}")
transitionRetailerOrderStates: (first, newStateFn) ->
ps = _.map @orders, (os) =>
currStates = _.filter os.retailerOrder.lineItems[0].state, (s) => s.state.id != @initialState.id
p = if _.isEmpty(currStates)
@sphere.transitionLineItemState os.retailerOrder, os.retailerOrder.lineItems[0].id, 20, @ref('state', @initialState), @ref('state', @stateByKey(first))
else
currStateId = currStates[0].state.id
currStateQ = currStates[0].quantity
newState = @stateByKey newStateFn(@stateById(currStateId).key)
@sphere.transitionLineItemState os.retailerOrder, os.retailerOrder.lineItems[0].id, currStateQ, @ref('state', {id: currStateId}), @ref('state', newState)
p
.then (newOrder) ->
os.retailerOrder = newOrder
newOrder
Q.all ps
transitionStatePath: (lineItemIdx, quantity, path) ->
reduceFn = (acc, to) =>
acc.then ([from, order]) =>
@sphere.transitionLineItemState order, order.lineItems[lineItemIdx].id, quantity, @ref('state', @stateByKey(from)), @ref('state', @stateByKey(to))
.then (newOrder) ->
[to, newOrder]
_.reduce _.tail(path), reduceFn, Q([_.head(path), @order])
.then ([endKey, order]) =>
@order = order
order
scheduleStateTransitions: (first, stateSwitch) ->
Rx.Observable.interval 2000
.subscribe =>
@transitionRetailerOrderStates(first, stateSwitch)
.then =>
@logger.info "Transition finished"
.fail (error) =>
@logger.error "Error during state transition", error
.done()
configureStates: ->
Q.all [
@sphere.ensureStates [{key: "Initial"}]
@sphere.ensureStates @stateDefs
]
.then (states) =>
[[@initialState], @states] = states
@logger.info "States configured"
[@initialState, @states]
configureChannels: ->
@sphere.ensureChannels @channelDefs
.then (channels) =>
[@masterChannel] = channels
@logger.info "Channels configured"
@masterChannel
configureProduct: () ->
@sphere.getFirstProduct()
.then (product) =>
@product = product
@logger.info "Product found"
product
configureTaxCategory: () ->
@sphere.ensureTaxCategories @taxCategoryDefs
.then (tc) =>
[@taxCategory] = tc
@logger.info "Tax category configured"
tc
_orderJson: (lineItemCount = 1, quantity = 30) ->
lineItems = _.map _.range(0, lineItemCount), (idx) =>
{
variant: {
sku: @product.masterData.staged.masterVariant.sku
},
quantity: quantity,
taxRate: {
name: "<NAME>",
amount: 0.19,
includedInPrice: true,
country: "US",
id: @taxCategory.id
},
name: {
en: "Some Product #{idx}"
},
price: {
country: "US",
value: {
centAmount: 1190,
currencyCode: "USD"
}
}
}
{
lineItems: lineItems,
totalPrice: {
currencyCode: "USD",
centAmount: 1190
},
shippingAddress: {
country: "US"
},
shippingInfo: {
shippingMethodName: 'Normal',
price: {
centAmount: 1000,
currencyCode: "EUR"
},
shippingRate: {
price: {
centAmount: 1000,
currencyCode: "EUR"
}
},
taxRate: {
name: "<NAME>",
amount: 0.19,
includedInPrice: true,
country: "US",
id: @taxCategory.id
},
taxCategory: {"typeId": "tax-category", id: @taxCategory.id},
},
taxedPrice: {
taxPortions: [{
amount: {
centAmount: 190,
currencyCode: "USD"
},
rate: 0.19
}],
totalGross: {
centAmount: 1190,
currencyCode: "USD"
},
totalNet: {
centAmount: 1000,
currencyCode: "USD"
}
}
}
createTestOrder: (idx) ->
Q.all [
@sphere.importOrder @_orderJson()
@sphere.importOrder @_orderJson()
]
.then (orders) =>
[masterOrder, retailerOrder] = orders
@sphere.updateOrderSyncSuatus retailerOrder, @masterChannel, masterOrder.id
.then (newRetailerOrder) ->
[masterOrder, newRetailerOrder]
addSomeDeliveries: () ->
ps = _.map @orders, (o) =>
@sphere.addDelivery o.retailerOrder, [{id: o.retailerOrder.lineItems[0].id, quantity: 4}]
.then (o1) =>
@sphere.addParcel o1, o1.shippingInfo.deliveries[0].id, {heightInMillimeter: 11, lengthInMillimeter: 22, widthInMillimeter: 33, weightInGram: 44}, {trackingId: "ABCD123", carrier: "DHL"}
.then (o2) =>
@logger.info "Finished with deliveries: #{o2.id}"
o.retailerOrder = o2
Q.all ps
addStock: (sku, quantity) ->
@sphere.getInvetoryEntryBySkuAndChannel sku, null
.then (ie) =>
@sphere.addInventoryQuantity ie, quantity - ie.availableQuantity
.fail (e) =>
@sphere.createInventoryEntry sku, quantity
@setupProject: (sphereService, onlyOneOrderWith3LineItems = false) ->
sphereTestKit = new SphereTestKit sphereService
sphereTestKit.setupProject(onlyOneOrderWith3LineItems)
@cleanup = (done, subscription, processor) ->
if subscription?
subscription.dispose()
processor.stop()
.fail (error) =>
@logger.info "Error during processor cleanup", error
@reportSuccess: (done, subscription, processor) ->
@cleanup done, subscription, processor
.then ->
done()
.fail (error) ->
done(error)
.done()
@reportFailure: (done, error, subscription, processor) ->
@cleanup done, subscription, processor
.finally ->
done(error)
.done()
exports.SphereTestKit = SphereTestKit
| true | Rx = require 'rx'
Q = require 'q'
{_} = require 'underscore'
{ErrorStatusCode} = require './sphere_service'
{LoggerFactory} = require '../lib/logger'
class SphereTestKit
stateDefs: [
{key: "A", transitions: ["B"]}
{key: "B", transitions: ["C", "D"]}
{key: "C", transitions: ["D"]}
{key: "D", transitions: ["E"]}
{key: "E", transitions: ["A"]}
{key: "ReadyForShipment", transitions: ["Pickup"]}
{key: "Pickup", transitions: ["Shipped"]}
{key: "Shipped", transitions: ["Finished"]}
{key: "Finished", transitions: ["ReadyForShipment"]}
{key: "canceled"}
{key: "returnNotApproved"}
{key: "closed"}
{key: "picking"}
{key: "backorder"}
{key: "readyToShip"}
{key: "shipped"}
{key: "returned"}
{key: "returnApproved"}
{key: "lost"}
{key: "lossApproved"}
{key: "lossNotApproved"}
]
channelDefs: [
{key: 'master', roles: ['OrderImport']}
]
taxCategoryDefs: [
{name: 'Test Category', rates: [{name: "Test Rate", amount: 0.19, includedInPrice: false, country: 'DE'}]}
]
constructor: (@sphere) ->
@logger = LoggerFactory.getLogger "test-kit.#{@sphere.getSourceInfo().prefix}"
setupProject: (onlyOneOrderWith3LineItems = false) ->
Q.all [
@configureStates()
@configureChannels()
@configureProduct()
@configureTaxCategory()
]
.then =>
if not onlyOneOrderWith3LineItems
orders = _.map _.range(1, 6), (idx) =>
@createTestOrder idx
Q.all orders
.then (orders) =>
@orders = _.map orders, (os) ->
[m, r] = os
{retailerOrder: r, masterOrder: m}
@logger.info "Orders"
_.each @orders, (o, i) =>
@logger.info "#{i} Retailer: #{o.retailerOrder.id}, Master: #{o.masterOrder.id}"
@logger.info _.map(@orders, (o)-> "\"#{o.retailerOrder.id}\"").join(',')
this
else
@sphere.importOrder @_orderJson(3)
.then (order) =>
@order = order
@logger.info "Order created: #{order.id}"
this
.then =>
if not onlyOneOrderWith3LineItems
@addStock(@orders[0].retailerOrder.lineItems[0].variant.sku, 1000000)
else
Q()
.then =>
@logger.info "Project setup finished"
this
ref: (type, obj) ->
{typeId: type, id: obj.id}
stateByKey: (key) ->
if key == 'Initial'
@initialState
else
_.find @states, (s) -> s.key == key
stateById: (id) ->
_.find @states, (s) -> s.id == id
abcStateSwitch: (currKey) ->
switch currKey
when'A' then 'B'
when'B' then 'D'
when'D' then 'E'
when'E' then 'A'
else throw new Error("Unsupported state #{currKey}")
shipmentStateSwitch: (currKey) ->
switch currKey
when'ReadyForShipment' then 'Pickup'
when'Pickup' then 'Shipped'
when'Shipped' then 'Finished'
when'Finished' then 'ReadyForShipment'
else throw new Error("Unsupported state #{currKey}")
transitionRetailerOrderStates: (first, newStateFn) ->
ps = _.map @orders, (os) =>
currStates = _.filter os.retailerOrder.lineItems[0].state, (s) => s.state.id != @initialState.id
p = if _.isEmpty(currStates)
@sphere.transitionLineItemState os.retailerOrder, os.retailerOrder.lineItems[0].id, 20, @ref('state', @initialState), @ref('state', @stateByKey(first))
else
currStateId = currStates[0].state.id
currStateQ = currStates[0].quantity
newState = @stateByKey newStateFn(@stateById(currStateId).key)
@sphere.transitionLineItemState os.retailerOrder, os.retailerOrder.lineItems[0].id, currStateQ, @ref('state', {id: currStateId}), @ref('state', newState)
p
.then (newOrder) ->
os.retailerOrder = newOrder
newOrder
Q.all ps
transitionStatePath: (lineItemIdx, quantity, path) ->
reduceFn = (acc, to) =>
acc.then ([from, order]) =>
@sphere.transitionLineItemState order, order.lineItems[lineItemIdx].id, quantity, @ref('state', @stateByKey(from)), @ref('state', @stateByKey(to))
.then (newOrder) ->
[to, newOrder]
_.reduce _.tail(path), reduceFn, Q([_.head(path), @order])
.then ([endKey, order]) =>
@order = order
order
scheduleStateTransitions: (first, stateSwitch) ->
Rx.Observable.interval 2000
.subscribe =>
@transitionRetailerOrderStates(first, stateSwitch)
.then =>
@logger.info "Transition finished"
.fail (error) =>
@logger.error "Error during state transition", error
.done()
configureStates: ->
Q.all [
@sphere.ensureStates [{key: "Initial"}]
@sphere.ensureStates @stateDefs
]
.then (states) =>
[[@initialState], @states] = states
@logger.info "States configured"
[@initialState, @states]
configureChannels: ->
@sphere.ensureChannels @channelDefs
.then (channels) =>
[@masterChannel] = channels
@logger.info "Channels configured"
@masterChannel
configureProduct: () ->
@sphere.getFirstProduct()
.then (product) =>
@product = product
@logger.info "Product found"
product
configureTaxCategory: () ->
@sphere.ensureTaxCategories @taxCategoryDefs
.then (tc) =>
[@taxCategory] = tc
@logger.info "Tax category configured"
tc
_orderJson: (lineItemCount = 1, quantity = 30) ->
lineItems = _.map _.range(0, lineItemCount), (idx) =>
{
variant: {
sku: @product.masterData.staged.masterVariant.sku
},
quantity: quantity,
taxRate: {
name: "PI:NAME:<NAME>END_PI",
amount: 0.19,
includedInPrice: true,
country: "US",
id: @taxCategory.id
},
name: {
en: "Some Product #{idx}"
},
price: {
country: "US",
value: {
centAmount: 1190,
currencyCode: "USD"
}
}
}
{
lineItems: lineItems,
totalPrice: {
currencyCode: "USD",
centAmount: 1190
},
shippingAddress: {
country: "US"
},
shippingInfo: {
shippingMethodName: 'Normal',
price: {
centAmount: 1000,
currencyCode: "EUR"
},
shippingRate: {
price: {
centAmount: 1000,
currencyCode: "EUR"
}
},
taxRate: {
name: "PI:NAME:<NAME>END_PI",
amount: 0.19,
includedInPrice: true,
country: "US",
id: @taxCategory.id
},
taxCategory: {"typeId": "tax-category", id: @taxCategory.id},
},
taxedPrice: {
taxPortions: [{
amount: {
centAmount: 190,
currencyCode: "USD"
},
rate: 0.19
}],
totalGross: {
centAmount: 1190,
currencyCode: "USD"
},
totalNet: {
centAmount: 1000,
currencyCode: "USD"
}
}
}
createTestOrder: (idx) ->
Q.all [
@sphere.importOrder @_orderJson()
@sphere.importOrder @_orderJson()
]
.then (orders) =>
[masterOrder, retailerOrder] = orders
@sphere.updateOrderSyncSuatus retailerOrder, @masterChannel, masterOrder.id
.then (newRetailerOrder) ->
[masterOrder, newRetailerOrder]
addSomeDeliveries: () ->
ps = _.map @orders, (o) =>
@sphere.addDelivery o.retailerOrder, [{id: o.retailerOrder.lineItems[0].id, quantity: 4}]
.then (o1) =>
@sphere.addParcel o1, o1.shippingInfo.deliveries[0].id, {heightInMillimeter: 11, lengthInMillimeter: 22, widthInMillimeter: 33, weightInGram: 44}, {trackingId: "ABCD123", carrier: "DHL"}
.then (o2) =>
@logger.info "Finished with deliveries: #{o2.id}"
o.retailerOrder = o2
Q.all ps
addStock: (sku, quantity) ->
@sphere.getInvetoryEntryBySkuAndChannel sku, null
.then (ie) =>
@sphere.addInventoryQuantity ie, quantity - ie.availableQuantity
.fail (e) =>
@sphere.createInventoryEntry sku, quantity
@setupProject: (sphereService, onlyOneOrderWith3LineItems = false) ->
sphereTestKit = new SphereTestKit sphereService
sphereTestKit.setupProject(onlyOneOrderWith3LineItems)
@cleanup = (done, subscription, processor) ->
if subscription?
subscription.dispose()
processor.stop()
.fail (error) =>
@logger.info "Error during processor cleanup", error
@reportSuccess: (done, subscription, processor) ->
@cleanup done, subscription, processor
.then ->
done()
.fail (error) ->
done(error)
.done()
@reportFailure: (done, error, subscription, processor) ->
@cleanup done, subscription, processor
.finally ->
done(error)
.done()
exports.SphereTestKit = SphereTestKit
|
[
{
"context": " @interval = 1000\n @key = options['key'] || 'clock'\n @maxMilliseconds = options['maxMilliseconds'",
"end": 230,
"score": 0.9599863290786743,
"start": 225,
"tag": "KEY",
"value": "clock"
}
] | app/assets/javascripts/models/timer.js.coffee | stronglifters/surface | 2 | class Stronglifters.Timer
constructor: (options) ->
@databag = options['databag']
@format = options['format'] || (timer) ->
moment.utc(timer).format('mm:ss')
@interval = 1000
@key = options['key'] || 'clock'
@maxMilliseconds = options['maxMilliseconds'] || 600000
@success = options['success'] || -> { }
start: (options) ->
@stop()
@databag.set('timer', 0)
@intervalId = setInterval @refreshTimer, @interval
refreshTimer: =>
@databag.add('timer', @interval)
formattedValue = @format(@databag.get('timer'))
@databag.set(@key, formattedValue)
if @databag.get('timer') >= @maxMilliseconds
@stop()
@success()
stop: =>
if @running()
clearTimeout @intervalId
@intervalId = null
running: ->
@intervalId?
| 146854 | class Stronglifters.Timer
constructor: (options) ->
@databag = options['databag']
@format = options['format'] || (timer) ->
moment.utc(timer).format('mm:ss')
@interval = 1000
@key = options['key'] || '<KEY>'
@maxMilliseconds = options['maxMilliseconds'] || 600000
@success = options['success'] || -> { }
start: (options) ->
@stop()
@databag.set('timer', 0)
@intervalId = setInterval @refreshTimer, @interval
refreshTimer: =>
@databag.add('timer', @interval)
formattedValue = @format(@databag.get('timer'))
@databag.set(@key, formattedValue)
if @databag.get('timer') >= @maxMilliseconds
@stop()
@success()
stop: =>
if @running()
clearTimeout @intervalId
@intervalId = null
running: ->
@intervalId?
| true | class Stronglifters.Timer
constructor: (options) ->
@databag = options['databag']
@format = options['format'] || (timer) ->
moment.utc(timer).format('mm:ss')
@interval = 1000
@key = options['key'] || 'PI:KEY:<KEY>END_PI'
@maxMilliseconds = options['maxMilliseconds'] || 600000
@success = options['success'] || -> { }
start: (options) ->
@stop()
@databag.set('timer', 0)
@intervalId = setInterval @refreshTimer, @interval
refreshTimer: =>
@databag.add('timer', @interval)
formattedValue = @format(@databag.get('timer'))
@databag.set(@key, formattedValue)
if @databag.get('timer') >= @maxMilliseconds
@stop()
@success()
stop: =>
if @running()
clearTimeout @intervalId
@intervalId = null
running: ->
@intervalId?
|
[
{
"context": "orts = lemon.Component {\n package: 'wg'\n name: 'LoonCrest'\n class: 'webgradient'\n\n data: {\n pos",
"end": 175,
"score": 0.80583256483078,
"start": 173,
"tag": "NAME",
"value": "Lo"
},
{
"context": "ts = lemon.Component {\n package: 'wg'\n name: 'LoonCrest'... | LoonCrest.coffee | lemon/lemonjs-wg | 0 |
# dependencies
Path = require 'path'
# stylesheet
require Path.resolve(__dirname, 'gradient.css')
# component
module.exports = lemon.Component {
package: 'wg'
name: 'LoonCrest'
class: 'webgradient'
data: {
position: null
}
template: (data) ->
div class: data.position, style: "background: linear-gradient(to bottom, rgba(255,255,255,0.15) 0%, rgba(0,0,0,0.15) 100%), radial-gradient(at top center, rgba(255,255,255,0.40) 0%, rgba(0,0,0,0.40) 120%) #989898; background-blend-mode: multiply,multiply;"
} | 157966 |
# dependencies
Path = require 'path'
# stylesheet
require Path.resolve(__dirname, 'gradient.css')
# component
module.exports = lemon.Component {
package: 'wg'
name: '<NAME>onCrest'
class: 'webgradient'
data: {
position: null
}
template: (data) ->
div class: data.position, style: "background: linear-gradient(to bottom, rgba(255,255,255,0.15) 0%, rgba(0,0,0,0.15) 100%), radial-gradient(at top center, rgba(255,255,255,0.40) 0%, rgba(0,0,0,0.40) 120%) #989898; background-blend-mode: multiply,multiply;"
} | true |
# dependencies
Path = require 'path'
# stylesheet
require Path.resolve(__dirname, 'gradient.css')
# component
module.exports = lemon.Component {
package: 'wg'
name: 'PI:NAME:<NAME>END_PIonCrest'
class: 'webgradient'
data: {
position: null
}
template: (data) ->
div class: data.position, style: "background: linear-gradient(to bottom, rgba(255,255,255,0.15) 0%, rgba(0,0,0,0.15) 100%), radial-gradient(at top center, rgba(255,255,255,0.40) 0%, rgba(0,0,0,0.40) 120%) #989898; background-blend-mode: multiply,multiply;"
} |
[
{
"context": "data()\n .ondata((data) ->\n data.set([{id:'frasse', hej:\"då\"}, {id:'staffan', gurka:'groen'}])\n ",
"end": 370,
"score": 0.997245192527771,
"start": 364,
"tag": "NAME",
"value": "frasse"
},
{
"context": "data((data) ->\n data.set([{id:'frasse', hej:\"... | examples/index.coffee | KONDENSATOR/bucket | 1 | _ = require('underscore')
bucket = require("../index")
# Initialize test-data bucket and master branch
master_test_data = () ->
bucket("test-data")
.onerr((err) -> console.log("ERROR: #{err}"))
.use()
# Test insert, store, query, filter, map and store again
insert_and_query = () ->
master_test_data()
.ondata((data) ->
data.set([{id:'frasse', hej:"då"}, {id:'staffan', gurka:'groen'}])
.store()
.onstored((branch) ->
console.log("First store for branch #{branch.branch()}"))
.query()
.filter((i) -> i.id == "frasse")
.map((i) -> _(i).extend(name : "gurkan"))
.log()
.set()
.store()
.onstored((branch) ->
console.log("Second store for branch #{branch.branch()}")))
# Test remove
test_remove = () ->
master_test_data()
.ondata((data) ->
data.remove('frasse')
.store()
.onstored((branch) ->
console.log("Stored removing object for branch #{branch.branch()}")))
# Only run if run as entry point
if not module.parent?
insert_and_query()
test_remove()
| 28931 | _ = require('underscore')
bucket = require("../index")
# Initialize test-data bucket and master branch
master_test_data = () ->
bucket("test-data")
.onerr((err) -> console.log("ERROR: #{err}"))
.use()
# Test insert, store, query, filter, map and store again
insert_and_query = () ->
master_test_data()
.ondata((data) ->
data.set([{id:'<NAME>', hej:"<NAME>"}, {id:'<NAME>', gurka:'<NAME>'}])
.store()
.onstored((branch) ->
console.log("First store for branch #{branch.branch()}"))
.query()
.filter((i) -> i.id == "<NAME>")
.map((i) -> _(i).extend(name : "<NAME>"))
.log()
.set()
.store()
.onstored((branch) ->
console.log("Second store for branch #{branch.branch()}")))
# Test remove
test_remove = () ->
master_test_data()
.ondata((data) ->
data.remove('frasse')
.store()
.onstored((branch) ->
console.log("Stored removing object for branch #{branch.branch()}")))
# Only run if run as entry point
if not module.parent?
insert_and_query()
test_remove()
| true | _ = require('underscore')
bucket = require("../index")
# Initialize test-data bucket and master branch
master_test_data = () ->
bucket("test-data")
.onerr((err) -> console.log("ERROR: #{err}"))
.use()
# Test insert, store, query, filter, map and store again
insert_and_query = () ->
master_test_data()
.ondata((data) ->
data.set([{id:'PI:NAME:<NAME>END_PI', hej:"PI:NAME:<NAME>END_PI"}, {id:'PI:NAME:<NAME>END_PI', gurka:'PI:NAME:<NAME>END_PI'}])
.store()
.onstored((branch) ->
console.log("First store for branch #{branch.branch()}"))
.query()
.filter((i) -> i.id == "PI:NAME:<NAME>END_PI")
.map((i) -> _(i).extend(name : "PI:NAME:<NAME>END_PI"))
.log()
.set()
.store()
.onstored((branch) ->
console.log("Second store for branch #{branch.branch()}")))
# Test remove
test_remove = () ->
master_test_data()
.ondata((data) ->
data.remove('frasse')
.store()
.onstored((branch) ->
console.log("Stored removing object for branch #{branch.branch()}")))
# Only run if run as entry point
if not module.parent?
insert_and_query()
test_remove()
|
[
{
"context": "a näkyvät NPC-hahmot\n\nmodule.exports = [\n name: \"Fortunei\"\n admin: true\n,\n name: \"Joanna\"\n admin: true\n,",
"end": 67,
"score": 0.999743640422821,
"start": 59,
"tag": "NAME",
"value": "Fortunei"
},
{
"context": "s = [\n name: \"Fortunei\"\n admin: true\... | db/fixtures/npcs.coffee | woochi/cyberia | 0 | # Chatissa näkyvät NPC-hahmot
module.exports = [
name: "Fortunei"
admin: true
,
name: "Joanna"
admin: true
,
name: "Dick Dickinson"
admin: true
,
name: "Metatron"
admin: true
,
name: "Latisha"
admin: true
,
name: "Baer Chinwe"
admin: true
,
name: "Vivian Karlsen"
admin: true
,
name: "Charles Runkle"
admin: true
,
name: "Mystikko"
admin: true
]
| 192978 | # Chatissa näkyvät NPC-hahmot
module.exports = [
name: "<NAME>"
admin: true
,
name: "<NAME>"
admin: true
,
name: "<NAME>"
admin: true
,
name: "<NAME>"
admin: true
,
name: "<NAME>"
admin: true
,
name: "<NAME>"
admin: true
,
name: "<NAME>"
admin: true
,
name: "<NAME>"
admin: true
,
name: "<NAME>"
admin: true
]
| true | # Chatissa näkyvät NPC-hahmot
module.exports = [
name: "PI:NAME:<NAME>END_PI"
admin: true
,
name: "PI:NAME:<NAME>END_PI"
admin: true
,
name: "PI:NAME:<NAME>END_PI"
admin: true
,
name: "PI:NAME:<NAME>END_PI"
admin: true
,
name: "PI:NAME:<NAME>END_PI"
admin: true
,
name: "PI:NAME:<NAME>END_PI"
admin: true
,
name: "PI:NAME:<NAME>END_PI"
admin: true
,
name: "PI:NAME:<NAME>END_PI"
admin: true
,
name: "PI:NAME:<NAME>END_PI"
admin: true
]
|
[
{
"context": "'Django Unchained'\n\nconfig =\n author:\n name: 'Reuben Cummings'\n url: 'https://reubano.github.io'\n email: ",
"end": 77,
"score": 0.9998772740364075,
"start": 62,
"tag": "NAME",
"value": "Reuben Cummings"
},
{
"context": "\n url: 'https://reubano.github... | app/config.coffee | Luciekimotho/HDX-Hackathon | 0 | site_name = 'Django Unchained'
config =
author:
name: 'Reuben Cummings'
url: 'https://reubano.github.io'
email: 'reubano@gmail.com'
site:
title: site_name
description: 'Kenya Conflict'
url: 'http://nerevu.github.io/akili/'
data: 'https://data.hdx.rwlabs.org/dataset/wfp-food-prices'
source: 'https://github.com/nerevu/akili'
id: 'com.akili.vizapp'
type: 'webapp'
version: '0.1.0'
keywords: """
brunch, chaplin, nodejs, backbonejs, bower, html5, single page app
"""
default:
idAttr: 'id'
nameAttr: 'County'
metricAttr: 'bednet'
google:
analytics:
id: $PROCESS_ENV_GOOGLE_ANALYTICS_TRACKING_ID ? null
site_number: 3
adwords_id: $PROCESS_ENV_GOOGLE_ADWORDS_ID ? null
displayads_id: $PROCESS_ENV_GOOGLE_DISPLAYADS_ID ? null
app_name: site_name
app_id: ''
plus_id: $PROCESS_ENV_GOOGLE_PLUS_ID ? null
facebook:
app_id: ''
module.exports = config
| 175780 | site_name = 'Django Unchained'
config =
author:
name: '<NAME>'
url: 'https://reubano.github.io'
email: '<EMAIL>'
site:
title: site_name
description: 'Kenya Conflict'
url: 'http://nerevu.github.io/akili/'
data: 'https://data.hdx.rwlabs.org/dataset/wfp-food-prices'
source: 'https://github.com/nerevu/akili'
id: 'com.akili.vizapp'
type: 'webapp'
version: '0.1.0'
keywords: """
brunch, chaplin, nodejs, backbonejs, bower, html5, single page app
"""
default:
idAttr: 'id'
nameAttr: 'County'
metricAttr: 'bednet'
google:
analytics:
id: $PROCESS_ENV_GOOGLE_ANALYTICS_TRACKING_ID ? null
site_number: 3
adwords_id: $PROCESS_ENV_GOOGLE_ADWORDS_ID ? null
displayads_id: $PROCESS_ENV_GOOGLE_DISPLAYADS_ID ? null
app_name: site_name
app_id: ''
plus_id: $PROCESS_ENV_GOOGLE_PLUS_ID ? null
facebook:
app_id: ''
module.exports = config
| true | site_name = 'Django Unchained'
config =
author:
name: 'PI:NAME:<NAME>END_PI'
url: 'https://reubano.github.io'
email: 'PI:EMAIL:<EMAIL>END_PI'
site:
title: site_name
description: 'Kenya Conflict'
url: 'http://nerevu.github.io/akili/'
data: 'https://data.hdx.rwlabs.org/dataset/wfp-food-prices'
source: 'https://github.com/nerevu/akili'
id: 'com.akili.vizapp'
type: 'webapp'
version: '0.1.0'
keywords: """
brunch, chaplin, nodejs, backbonejs, bower, html5, single page app
"""
default:
idAttr: 'id'
nameAttr: 'County'
metricAttr: 'bednet'
google:
analytics:
id: $PROCESS_ENV_GOOGLE_ANALYTICS_TRACKING_ID ? null
site_number: 3
adwords_id: $PROCESS_ENV_GOOGLE_ADWORDS_ID ? null
displayads_id: $PROCESS_ENV_GOOGLE_DISPLAYADS_ID ? null
app_name: site_name
app_id: ''
plus_id: $PROCESS_ENV_GOOGLE_PLUS_ID ? null
facebook:
app_id: ''
module.exports = config
|
[
{
"context": " }\n cacheParams = {\n key: \"#{api.fullname}-games-#{region}-#{summonerId}\"\n ",
"end": 1624,
"score": 0.9533486366271973,
"start": 1621,
"tag": "KEY",
"value": "\"#{"
},
{
"context": "}\n cacheParams = {\n key: \"#{api.f... | src/api/game.coffee | jwalton/lol-js | 30 | assert = require 'assert'
ld = require 'lodash'
pb = require 'promise-breaker'
matchApi = require './match'
api = exports.api = {
fullname: "game-v1.3",
name: "game",
version: "v1.3"
}
exports.methods = {
# Gets recent games for this given summoner.
#
# Parameters:
# * `region` - Region where to retrieve the data.
# * `summonerId` - ID of the summoner for which to retrieve recent games.
# * `options.asMatches` - if specified, this will use the `match` api to fetch match objects for
# each game. These objects will automatically be populated with summoner identities, even
# if they are not ranked games. `asMatches` can either be `true`, or can be a hash of
# options which will be passed to `getMatch()` (e.g. `{includeTimeline: true}`)
# For some games, this will only populate players on the allied team. (For example, bot games.)
#
# Returns a `{games, summonerId}` object. If `options.asMatches` is specified, returns a
# `{games, matches, summonerId}` object.
#
getRecentGamesForSummoner: pb.break (region, summonerId, options={}) ->
# Since we're relying on other APIs, we assert here so that if those APIs change, we'll get
# unit test failures if we don't update this method.
assert.equal(matchApi.api.version, "v2.2", "match API version has changed.")
requestParams = {
caller: "getRecentGamesForSummoner",
region: region,
url: "#{@_makeUrl region, api}/by-summoner/#{summonerId}/recent"
}
cacheParams = {
key: "#{api.fullname}-games-#{region}-#{summonerId}"
region, api,
objectType: 'games'
params: {summonerId}
}
@_riotRequestWithCache(requestParams, cacheParams, {})
.then (games) =>
games ?= {games: [], summonerId}
games.games ?= []
if !options.asMatches
return games
else
# Fetch matches in parallel
return @Promise.all games.games.map (game) =>
@recentGameToMatch region, game, summonerId, {
matchOptions: if options.asMatches is true then null else options.asMatches
}
.then (matches) ->
games.matches = matches
games
# Converts a `game` from `getRecentGamesForSummoner()` into a match (as per `getMatch()`).
#
# This function may result in multiple calls to the Riot API, to load the match
# details and to load details of all the summoners in the game.
#
# For some games, this will only populate players on the allied team. (For example, bot games.)
#
# Parameters:
# * `region` - Region where to retrieve the data.
# * `game` - a game retrieved via `getRecentGamesForSummoner()`.
# * `summonerId` - summoner the game was fetched for.
# * `options.matchOptions` - options to pass to `getMatch()`.
#
recentGameToMatch: pb.break (region, game, summonerId, options={}) ->
matchOptions = if !options.matchOptions?
{region}
else
ld.extend {}, options.matchOptions, {region}
matchOptions.players = ld.clone game.fellowPlayers
matchOptions.players.push {
championId: game.championId,
teamId: game.teamId,
summonerId
}
return @getMatch region, game.gameId, matchOptions
}
| 149201 | assert = require 'assert'
ld = require 'lodash'
pb = require 'promise-breaker'
matchApi = require './match'
api = exports.api = {
fullname: "game-v1.3",
name: "game",
version: "v1.3"
}
exports.methods = {
# Gets recent games for this given summoner.
#
# Parameters:
# * `region` - Region where to retrieve the data.
# * `summonerId` - ID of the summoner for which to retrieve recent games.
# * `options.asMatches` - if specified, this will use the `match` api to fetch match objects for
# each game. These objects will automatically be populated with summoner identities, even
# if they are not ranked games. `asMatches` can either be `true`, or can be a hash of
# options which will be passed to `getMatch()` (e.g. `{includeTimeline: true}`)
# For some games, this will only populate players on the allied team. (For example, bot games.)
#
# Returns a `{games, summonerId}` object. If `options.asMatches` is specified, returns a
# `{games, matches, summonerId}` object.
#
getRecentGamesForSummoner: pb.break (region, summonerId, options={}) ->
# Since we're relying on other APIs, we assert here so that if those APIs change, we'll get
# unit test failures if we don't update this method.
assert.equal(matchApi.api.version, "v2.2", "match API version has changed.")
requestParams = {
caller: "getRecentGamesForSummoner",
region: region,
url: "#{@_makeUrl region, api}/by-summoner/#{summonerId}/recent"
}
cacheParams = {
key: <KEY>api.<KEY>
region, api,
objectType: 'games'
params: {summonerId}
}
@_riotRequestWithCache(requestParams, cacheParams, {})
.then (games) =>
games ?= {games: [], summonerId}
games.games ?= []
if !options.asMatches
return games
else
# Fetch matches in parallel
return @Promise.all games.games.map (game) =>
@recentGameToMatch region, game, summonerId, {
matchOptions: if options.asMatches is true then null else options.asMatches
}
.then (matches) ->
games.matches = matches
games
# Converts a `game` from `getRecentGamesForSummoner()` into a match (as per `getMatch()`).
#
# This function may result in multiple calls to the Riot API, to load the match
# details and to load details of all the summoners in the game.
#
# For some games, this will only populate players on the allied team. (For example, bot games.)
#
# Parameters:
# * `region` - Region where to retrieve the data.
# * `game` - a game retrieved via `getRecentGamesForSummoner()`.
# * `summonerId` - summoner the game was fetched for.
# * `options.matchOptions` - options to pass to `getMatch()`.
#
recentGameToMatch: pb.break (region, game, summonerId, options={}) ->
matchOptions = if !options.matchOptions?
{region}
else
ld.extend {}, options.matchOptions, {region}
matchOptions.players = ld.clone game.fellowPlayers
matchOptions.players.push {
championId: game.championId,
teamId: game.teamId,
summonerId
}
return @getMatch region, game.gameId, matchOptions
}
| true | assert = require 'assert'
ld = require 'lodash'
pb = require 'promise-breaker'
matchApi = require './match'
api = exports.api = {
fullname: "game-v1.3",
name: "game",
version: "v1.3"
}
exports.methods = {
# Gets recent games for this given summoner.
#
# Parameters:
# * `region` - Region where to retrieve the data.
# * `summonerId` - ID of the summoner for which to retrieve recent games.
# * `options.asMatches` - if specified, this will use the `match` api to fetch match objects for
# each game. These objects will automatically be populated with summoner identities, even
# if they are not ranked games. `asMatches` can either be `true`, or can be a hash of
# options which will be passed to `getMatch()` (e.g. `{includeTimeline: true}`)
# For some games, this will only populate players on the allied team. (For example, bot games.)
#
# Returns a `{games, summonerId}` object. If `options.asMatches` is specified, returns a
# `{games, matches, summonerId}` object.
#
getRecentGamesForSummoner: pb.break (region, summonerId, options={}) ->
# Since we're relying on other APIs, we assert here so that if those APIs change, we'll get
# unit test failures if we don't update this method.
assert.equal(matchApi.api.version, "v2.2", "match API version has changed.")
requestParams = {
caller: "getRecentGamesForSummoner",
region: region,
url: "#{@_makeUrl region, api}/by-summoner/#{summonerId}/recent"
}
cacheParams = {
key: PI:KEY:<KEY>END_PIapi.PI:KEY:<KEY>END_PI
region, api,
objectType: 'games'
params: {summonerId}
}
@_riotRequestWithCache(requestParams, cacheParams, {})
.then (games) =>
games ?= {games: [], summonerId}
games.games ?= []
if !options.asMatches
return games
else
# Fetch matches in parallel
return @Promise.all games.games.map (game) =>
@recentGameToMatch region, game, summonerId, {
matchOptions: if options.asMatches is true then null else options.asMatches
}
.then (matches) ->
games.matches = matches
games
# Converts a `game` from `getRecentGamesForSummoner()` into a match (as per `getMatch()`).
#
# This function may result in multiple calls to the Riot API, to load the match
# details and to load details of all the summoners in the game.
#
# For some games, this will only populate players on the allied team. (For example, bot games.)
#
# Parameters:
# * `region` - Region where to retrieve the data.
# * `game` - a game retrieved via `getRecentGamesForSummoner()`.
# * `summonerId` - summoner the game was fetched for.
# * `options.matchOptions` - options to pass to `getMatch()`.
#
recentGameToMatch: pb.break (region, game, summonerId, options={}) ->
matchOptions = if !options.matchOptions?
{region}
else
ld.extend {}, options.matchOptions, {region}
matchOptions.players = ld.clone game.fellowPlayers
matchOptions.players.push {
championId: game.championId,
teamId: game.teamId,
summonerId
}
return @getMatch region, game.gameId, matchOptions
}
|
[
{
"context": "next) ->\n email = req.body.email\n password = req.body.password\n req.login email, password, (err, user) ->\n ",
"end": 128,
"score": 0.9938876032829285,
"start": 111,
"tag": "PASSWORD",
"value": "req.body.password"
}
] | src/routes.coffee | reaktivo/lox | 0 | exports.login = (success = "/", fail = "/") ->
(req, res, next) ->
email = req.body.email
password = req.body.password
req.login email, password, (err, user) ->
res.redirect (if user then success else fail)
exports.logout = (success = "/", fail = "/") ->
(req, res, next) ->
req.logout (err) ->
res.redirect (if err then fail else success) | 154420 | exports.login = (success = "/", fail = "/") ->
(req, res, next) ->
email = req.body.email
password = <PASSWORD>
req.login email, password, (err, user) ->
res.redirect (if user then success else fail)
exports.logout = (success = "/", fail = "/") ->
(req, res, next) ->
req.logout (err) ->
res.redirect (if err then fail else success) | true | exports.login = (success = "/", fail = "/") ->
(req, res, next) ->
email = req.body.email
password = PI:PASSWORD:<PASSWORD>END_PI
req.login email, password, (err, user) ->
res.redirect (if user then success else fail)
exports.logout = (success = "/", fail = "/") ->
(req, res, next) ->
req.logout (err) ->
res.redirect (if err then fail else success) |
[
{
"context": "ar()\n\t\t\t\n\t\t\tcontext.translate -@camera.position.i, -@camera.position.j\n\n\t\t\tcontext.font = '10px \"Helvetica CY\"'\n",
"end": 1454,
"score": 0.9541225433349609,
"start": 1435,
"tag": "EMAIL",
"value": "-@camera.position.j"
}
] | src/unsorted/editor/client.coffee | sustained/MotionJS-pre-release | 1 | require [
'client/game'
'entity'
'camera'
'canvas'
'math/vector'
'geometry/polygon'
'geometry/circle'
'collision/SAT'
'dynamics/world'
'collision/aabb'
'screen'
#'http://localhost:8080/socket.io/socket.io.js'
], (Game, Entity, Camera, Canvas, Colour, Vector, Polygon, Circle, SAT, World, AABB, Screen) ->
game = new Game
rand = Math.rand
world = game.world
canvas = game.canvas
$hW = ($W = 1024) / 2
$hH = ($H = 768) / 2
world.gravity = new Vector 0, 0
###
socket = new io.Socket 'localhost', port: 8080
console.log socket
socket.on 'connect', ->
console.log 'connected'
socket.on 'message', (msg) ->
console.log "message #{msg}"
socket.on 'disconnect', ->
console.log 'disconnected'
socket.connect()
###
class EditorScreen extends Screen
constructor: ->
super
@run = true
@camera = new Camera [$W, $H], moveable: true
@xLines = $W / 4
@yLines = $H / 4
input: (Input) ->
if Input.isKeyDown 'left'
@camera.position.i -= 2
else if Input.isKeyDown 'right'
@camera.position.i += 2
if Input.isKeyDown 'up'
@camera.position.j -= 2
else if Input.isKeyDown 'down'
@camera.position.j += 2
update: (delta, tick) ->
@input game.Input
if @run then world.step delta
@camera.update delta
game.Input.update @camera
render: (context) ->
canvas.clear()
context.translate -@camera.position.i, -@camera.position.j
context.font = '10px "Helvetica CY"'
context.textAlign = 'center'
context.textBaseline = 'middle'
canvas.text new Vector(0, 0), '0', fill: 'lightgray'
x = 1; xl = 1024 / 32; while x <= xl
canvas.text new Vector(x * 32, 10), x * 32, fill: 'lightgray'
x++
y = 1; yl = 768 / 32; while y <= yl
canvas.text new Vector(10, y * 32), y * 32, fill: 'lightgray'
y++
###
x = 0; xl = 1024 / 32; while x <= xl
y = 0; yl = 768 / 32; while y <= yl
canvas.rectangle new Vector(x * 32, y * 32), [32, 32],
fill: if x & 1
if y & 1 then '#050505' else '#101010'
else
if y & 1 then '#101010' else '#050505'
canvas.text new Vector((x * 32) - 16, (y * 32) - 16), "#{x} , #{y}", fill: '#bdbdbd'
y++
x++
###
world.render context, @camera
context.translate @camera.position.i, @camera.position.j
game.Screen.add 'editor', EditorScreen, true
jQuery -> game.loop.start() | 102162 | require [
'client/game'
'entity'
'camera'
'canvas'
'math/vector'
'geometry/polygon'
'geometry/circle'
'collision/SAT'
'dynamics/world'
'collision/aabb'
'screen'
#'http://localhost:8080/socket.io/socket.io.js'
], (Game, Entity, Camera, Canvas, Colour, Vector, Polygon, Circle, SAT, World, AABB, Screen) ->
game = new Game
rand = Math.rand
world = game.world
canvas = game.canvas
$hW = ($W = 1024) / 2
$hH = ($H = 768) / 2
world.gravity = new Vector 0, 0
###
socket = new io.Socket 'localhost', port: 8080
console.log socket
socket.on 'connect', ->
console.log 'connected'
socket.on 'message', (msg) ->
console.log "message #{msg}"
socket.on 'disconnect', ->
console.log 'disconnected'
socket.connect()
###
class EditorScreen extends Screen
constructor: ->
super
@run = true
@camera = new Camera [$W, $H], moveable: true
@xLines = $W / 4
@yLines = $H / 4
input: (Input) ->
if Input.isKeyDown 'left'
@camera.position.i -= 2
else if Input.isKeyDown 'right'
@camera.position.i += 2
if Input.isKeyDown 'up'
@camera.position.j -= 2
else if Input.isKeyDown 'down'
@camera.position.j += 2
update: (delta, tick) ->
@input game.Input
if @run then world.step delta
@camera.update delta
game.Input.update @camera
render: (context) ->
canvas.clear()
context.translate -@camera.position.i, <EMAIL>
context.font = '10px "Helvetica CY"'
context.textAlign = 'center'
context.textBaseline = 'middle'
canvas.text new Vector(0, 0), '0', fill: 'lightgray'
x = 1; xl = 1024 / 32; while x <= xl
canvas.text new Vector(x * 32, 10), x * 32, fill: 'lightgray'
x++
y = 1; yl = 768 / 32; while y <= yl
canvas.text new Vector(10, y * 32), y * 32, fill: 'lightgray'
y++
###
x = 0; xl = 1024 / 32; while x <= xl
y = 0; yl = 768 / 32; while y <= yl
canvas.rectangle new Vector(x * 32, y * 32), [32, 32],
fill: if x & 1
if y & 1 then '#050505' else '#101010'
else
if y & 1 then '#101010' else '#050505'
canvas.text new Vector((x * 32) - 16, (y * 32) - 16), "#{x} , #{y}", fill: '#bdbdbd'
y++
x++
###
world.render context, @camera
context.translate @camera.position.i, @camera.position.j
game.Screen.add 'editor', EditorScreen, true
jQuery -> game.loop.start() | true | require [
'client/game'
'entity'
'camera'
'canvas'
'math/vector'
'geometry/polygon'
'geometry/circle'
'collision/SAT'
'dynamics/world'
'collision/aabb'
'screen'
#'http://localhost:8080/socket.io/socket.io.js'
], (Game, Entity, Camera, Canvas, Colour, Vector, Polygon, Circle, SAT, World, AABB, Screen) ->
game = new Game
rand = Math.rand
world = game.world
canvas = game.canvas
$hW = ($W = 1024) / 2
$hH = ($H = 768) / 2
world.gravity = new Vector 0, 0
###
socket = new io.Socket 'localhost', port: 8080
console.log socket
socket.on 'connect', ->
console.log 'connected'
socket.on 'message', (msg) ->
console.log "message #{msg}"
socket.on 'disconnect', ->
console.log 'disconnected'
socket.connect()
###
class EditorScreen extends Screen
constructor: ->
super
@run = true
@camera = new Camera [$W, $H], moveable: true
@xLines = $W / 4
@yLines = $H / 4
input: (Input) ->
if Input.isKeyDown 'left'
@camera.position.i -= 2
else if Input.isKeyDown 'right'
@camera.position.i += 2
if Input.isKeyDown 'up'
@camera.position.j -= 2
else if Input.isKeyDown 'down'
@camera.position.j += 2
update: (delta, tick) ->
@input game.Input
if @run then world.step delta
@camera.update delta
game.Input.update @camera
render: (context) ->
canvas.clear()
context.translate -@camera.position.i, PI:EMAIL:<EMAIL>END_PI
context.font = '10px "Helvetica CY"'
context.textAlign = 'center'
context.textBaseline = 'middle'
canvas.text new Vector(0, 0), '0', fill: 'lightgray'
x = 1; xl = 1024 / 32; while x <= xl
canvas.text new Vector(x * 32, 10), x * 32, fill: 'lightgray'
x++
y = 1; yl = 768 / 32; while y <= yl
canvas.text new Vector(10, y * 32), y * 32, fill: 'lightgray'
y++
###
x = 0; xl = 1024 / 32; while x <= xl
y = 0; yl = 768 / 32; while y <= yl
canvas.rectangle new Vector(x * 32, y * 32), [32, 32],
fill: if x & 1
if y & 1 then '#050505' else '#101010'
else
if y & 1 then '#101010' else '#050505'
canvas.text new Vector((x * 32) - 16, (y * 32) - 16), "#{x} , #{y}", fill: '#bdbdbd'
y++
x++
###
world.render context, @camera
context.translate @camera.position.i, @camera.position.j
game.Screen.add 'editor', EditorScreen, true
jQuery -> game.loop.start() |
[
{
"context": " @sut = new MeshbluXMPP uuid: 'uuid', token: 'token', hostname: 'localhost', port: 5222\n\n describe",
"end": 562,
"score": 0.42892736196517944,
"start": 557,
"tag": "KEY",
"value": "token"
}
] | test/status-spec.coffee | octoblu/node-meshblu-xmpp | 1 | _ = require 'lodash'
async = require 'async'
xmpp = require 'node-xmpp-server'
MeshbluXMPP = require '../'
describe 'Status', ->
beforeEach (done) ->
@server = new xmpp.C2S.TCPServer
port: 5222
domain: 'localhost'
@server.on 'connection', (@client) =>
@client.on 'authenticate', (opts, callback) =>
callback(null, opts)
@server.on 'listening', done
afterEach (done) ->
@server.end done
describe 'without an active connection', ->
beforeEach ->
@sut = new MeshbluXMPP uuid: 'uuid', token: 'token', hostname: 'localhost', port: 5222
describe 'when status is called', ->
beforeEach (done) ->
@sut.status (@error) => done()
it 'should yield an error', ->
expect(=> throw @error).to.throw 'MeshbluXMPP is not connected'
describe 'with an active connection', ->
beforeEach (done) ->
@sut = new MeshbluXMPP uuid: 'uuid', token: 'token', hostname: 'localhost', port: 5222
@sut.connect done
afterEach 'close client', ->
@sut.close()
describe 'when status is called', ->
beforeEach (done) ->
@client.on 'stanza', (@request) =>
@client.send new xmpp.Stanza('iq',
type: 'result'
to: @request.attrs.from
from: @request.attrs.to
id: @request.attrs.id
).c('response').c('rawData').t JSON.stringify({
meshblu: 'online'
})
@sut.status (error, @response) => done error
it 'should send a stanza to the server', ->
expect(@request).to.exist
expect(@request.toJSON()).to.containSubset
name: 'iq'
attrs:
to: 'localhost'
type: 'get'
children: [{
name: 'request'
children: [{
name: 'metadata'
children: [{
name: 'jobType'
children: ['GetStatus']
}]
}]
}]
it 'should return a status of online: true', ->
expect(@response).to.exist
expect(@response).to.deep.equal meshblu: 'online'
describe 'when status is called twice', ->
beforeEach (done) ->
wait = (delay, fn) -> setTimeout fn, delay
@client.on 'stanza', (@request) =>
@client.send new xmpp.Stanza('iq',
type: 'result'
to: @request.attrs.from
from: @request.attrs.to
id: @request.attrs.id
).c('response').c('rawData').t JSON.stringify({
meshblu: 'online'
})
async.times 2, ((i, callback) => @sut.status callback), done
it 'should return a status of online: true', ->
expect(true).to.be.true
| 218127 | _ = require 'lodash'
async = require 'async'
xmpp = require 'node-xmpp-server'
MeshbluXMPP = require '../'
describe 'Status', ->
beforeEach (done) ->
@server = new xmpp.C2S.TCPServer
port: 5222
domain: 'localhost'
@server.on 'connection', (@client) =>
@client.on 'authenticate', (opts, callback) =>
callback(null, opts)
@server.on 'listening', done
afterEach (done) ->
@server.end done
describe 'without an active connection', ->
beforeEach ->
@sut = new MeshbluXMPP uuid: 'uuid', token: '<KEY>', hostname: 'localhost', port: 5222
describe 'when status is called', ->
beforeEach (done) ->
@sut.status (@error) => done()
it 'should yield an error', ->
expect(=> throw @error).to.throw 'MeshbluXMPP is not connected'
describe 'with an active connection', ->
beforeEach (done) ->
@sut = new MeshbluXMPP uuid: 'uuid', token: 'token', hostname: 'localhost', port: 5222
@sut.connect done
afterEach 'close client', ->
@sut.close()
describe 'when status is called', ->
beforeEach (done) ->
@client.on 'stanza', (@request) =>
@client.send new xmpp.Stanza('iq',
type: 'result'
to: @request.attrs.from
from: @request.attrs.to
id: @request.attrs.id
).c('response').c('rawData').t JSON.stringify({
meshblu: 'online'
})
@sut.status (error, @response) => done error
it 'should send a stanza to the server', ->
expect(@request).to.exist
expect(@request.toJSON()).to.containSubset
name: 'iq'
attrs:
to: 'localhost'
type: 'get'
children: [{
name: 'request'
children: [{
name: 'metadata'
children: [{
name: 'jobType'
children: ['GetStatus']
}]
}]
}]
it 'should return a status of online: true', ->
expect(@response).to.exist
expect(@response).to.deep.equal meshblu: 'online'
describe 'when status is called twice', ->
beforeEach (done) ->
wait = (delay, fn) -> setTimeout fn, delay
@client.on 'stanza', (@request) =>
@client.send new xmpp.Stanza('iq',
type: 'result'
to: @request.attrs.from
from: @request.attrs.to
id: @request.attrs.id
).c('response').c('rawData').t JSON.stringify({
meshblu: 'online'
})
async.times 2, ((i, callback) => @sut.status callback), done
it 'should return a status of online: true', ->
expect(true).to.be.true
| true | _ = require 'lodash'
async = require 'async'
xmpp = require 'node-xmpp-server'
MeshbluXMPP = require '../'
describe 'Status', ->
beforeEach (done) ->
@server = new xmpp.C2S.TCPServer
port: 5222
domain: 'localhost'
@server.on 'connection', (@client) =>
@client.on 'authenticate', (opts, callback) =>
callback(null, opts)
@server.on 'listening', done
afterEach (done) ->
@server.end done
describe 'without an active connection', ->
beforeEach ->
@sut = new MeshbluXMPP uuid: 'uuid', token: 'PI:KEY:<KEY>END_PI', hostname: 'localhost', port: 5222
describe 'when status is called', ->
beforeEach (done) ->
@sut.status (@error) => done()
it 'should yield an error', ->
expect(=> throw @error).to.throw 'MeshbluXMPP is not connected'
describe 'with an active connection', ->
beforeEach (done) ->
@sut = new MeshbluXMPP uuid: 'uuid', token: 'token', hostname: 'localhost', port: 5222
@sut.connect done
afterEach 'close client', ->
@sut.close()
describe 'when status is called', ->
beforeEach (done) ->
@client.on 'stanza', (@request) =>
@client.send new xmpp.Stanza('iq',
type: 'result'
to: @request.attrs.from
from: @request.attrs.to
id: @request.attrs.id
).c('response').c('rawData').t JSON.stringify({
meshblu: 'online'
})
@sut.status (error, @response) => done error
it 'should send a stanza to the server', ->
expect(@request).to.exist
expect(@request.toJSON()).to.containSubset
name: 'iq'
attrs:
to: 'localhost'
type: 'get'
children: [{
name: 'request'
children: [{
name: 'metadata'
children: [{
name: 'jobType'
children: ['GetStatus']
}]
}]
}]
it 'should return a status of online: true', ->
expect(@response).to.exist
expect(@response).to.deep.equal meshblu: 'online'
describe 'when status is called twice', ->
beforeEach (done) ->
wait = (delay, fn) -> setTimeout fn, delay
@client.on 'stanza', (@request) =>
@client.send new xmpp.Stanza('iq',
type: 'result'
to: @request.attrs.from
from: @request.attrs.to
id: @request.attrs.id
).c('response').c('rawData').t JSON.stringify({
meshblu: 'online'
})
async.times 2, ((i, callback) => @sut.status callback), done
it 'should return a status of online: true', ->
expect(true).to.be.true
|
[
{
"context": ">\n\t\t\tcfg =\n\t\t\t\tuserName: config.user\n\t\t\t\tpassword: config.password\n\t\t\t\thost: config.server\n\t\t\t\tport: config.port\n\t\t\t",
"end": 4613,
"score": 0.9993476271629333,
"start": 4598,
"tag": "PASSWORD",
"value": "config.password"
}
] | src/tds.coffee | Earny/node-mssql | 0 | {Pool} = require 'generic-pool'
tds = require 'tds'
util = require 'util'
FIXED = false
{TYPES, declare} = require('./datatypes')
ISOLATION_LEVEL = require('./isolationlevel')
###
@ignore
###
castParameter = (value, type) ->
unless value? then return null
switch type
when TYPES.VarChar, TYPES.NVarChar, TYPES.Char, TYPES.NChar, TYPES.Xml, TYPES.Text, TYPES.NText
if typeof value isnt 'string' and value not instanceof String
value = value.toString()
when TYPES.Int, TYPES.TinyInt, TYPES.BigInt, TYPES.SmallInt
if typeof value isnt 'number' and value not instanceof Number
value = parseInt(value)
if isNaN(value) then value = null
when TYPES.Float, TYPES.Real, TYPES.Decimal, TYPES.Numeric, TYPES.SmallMoney, TYPES.Money
if typeof value isnt 'number' and value not instanceof Number
value = parseFloat(value)
if isNaN(value) then value = null
when TYPES.Bit
if typeof value isnt 'boolean' and value not instanceof Boolean
value = Boolean(value)
when TYPES.DateTime, TYPES.SmallDateTime, TYPES.DateTimeOffset, TYPES.Date
if value not instanceof Date
value = new Date(value)
when TYPES.Binary, TYPES.VarBinary, TYPES.Image
if value not instanceof Buffer
value = new Buffer(value.toString())
value
###
@ignore
###
createParameterHeader = (param) ->
header =
type: param.type.declaration
switch param.type
when TYPES.VarChar, TYPES.NVarChar, TYPES.VarBinary
header.size = "MAX"
when TYPES.Char, TYPES.NChar, TYPES.Binary
header.size = param.length ? param.value?.length ? 1
header
###
@ignore
###
createColumns = (metadata) ->
out = {}
for column, index in metadata
out[column.name] =
index: index
name: column.name
length: column.length
type: TYPES[column.type.sqlType]
out
###
@ignore
###
isolationLevelDeclaration = (type) ->
switch type
when ISOLATION_LEVEL.READ_UNCOMMITTED then return "READ UNCOMMITTED"
when ISOLATION_LEVEL.READ_COMMITTED then return "READ COMMITTED"
when ISOLATION_LEVEL.REPEATABLE_READ then return "REPEATABLE READ"
when ISOLATION_LEVEL.SERIALIZABLE then return "SERIALIZABLE"
when ISOLATION_LEVEL.SNAPSHOT then return "SNAPSHOT"
else throw new TransactionError "Invalid isolation level."
###
Taken from Tedious.
@private
###
HEXMAP = [
'00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '0A', '0B', '0C', '0D', '0E', '0F',
'10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '1A', '1B', '1C', '1D', '1E', '1F',
'20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '2A', '2B', '2C', '2D', '2E', '2F',
'30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '3A', '3B', '3C', '3D', '3E', '3F',
'40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '4A', '4B', '4C', '4D', '4E', '4F',
'50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '5A', '5B', '5C', '5D', '5E', '5F',
'60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '6A', '6B', '6C', '6D', '6E', '6F',
'70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '7A', '7B', '7C', '7D', '7E', '7F',
'80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '8A', '8B', '8C', '8D', '8E', '8F',
'90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '9A', '9B', '9C', '9D', '9E', '9F',
'A0', 'A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7', 'A8', 'A9', 'AA', 'AB', 'AC', 'AD', 'AE', 'AF',
'B0', 'B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B9', 'BA', 'BB', 'BC', 'BD', 'BE', 'BF',
'C0', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'CA', 'CB', 'CC', 'CD', 'CE', 'CF',
'D0', 'D1', 'D2', 'D3', 'D4', 'D5', 'D6', 'D7', 'D8', 'D9', 'DA', 'DB', 'DC', 'DD', 'DE', 'DF',
'E0', 'E1', 'E2', 'E3', 'E4', 'E5', 'E6', 'E7', 'E8', 'E9', 'EA', 'EB', 'EC', 'ED', 'EE', 'EF',
'F0', 'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9', 'FA', 'FB', 'FC', 'FD', 'FE', 'FF'
]
###
Taken from Tedious.
@private
###
parseGuid = (buffer) ->
HEXMAP[buffer[3]] +
HEXMAP[buffer[2]] +
HEXMAP[buffer[1]] +
HEXMAP[buffer[0]] +
'-' +
HEXMAP[buffer[5]] +
HEXMAP[buffer[4]] +
'-' +
HEXMAP[buffer[7]] +
HEXMAP[buffer[6]] +
'-' +
HEXMAP[buffer[8]] +
HEXMAP[buffer[9]] +
'-' +
HEXMAP[buffer[10]] +
HEXMAP[buffer[11]] +
HEXMAP[buffer[12]] +
HEXMAP[buffer[13]] +
HEXMAP[buffer[14]] +
HEXMAP[buffer[15]]
###
@ignore
###
module.exports = (Connection, Transaction, Request, ConnectionError, TransactionError, RequestError) ->
class TDSConnection extends Connection
pool: null
connect: (config, callback) ->
cfg =
userName: config.user
password: config.password
host: config.server
port: config.port
database: config.database
cfg_pool =
name: 'mssql'
max: 10
min: 0
idleTimeoutMillis: 30000
create: (callback) =>
c = new tds.Connection cfg
c.on 'error', (err) =>
if err.code is 'ECONNRESET'
c.hasError = true
return
@emit 'error', err
timeouted = false
tmr = setTimeout ->
timeouted = true
c._client._socket.destroy()
callback new ConnectionError("Connection timeout.", 'ETIMEOUT'), null # there must be a second argument null
, config.timeout ? 15000
c.connect (err) =>
clearTimeout tmr
if timeouted then return
if err then err = ConnectionError err
if err then return callback err, null # there must be a second argument null
callback null, c
validate: (c) ->
c? and not c.hasError
destroy: (c) ->
c?.end()
if config.pool
for key, value of config.pool
cfg_pool[key] = value
@pool = Pool cfg_pool, cfg
#create one testing connection to check if everything is ok
@pool.acquire (err, connection) =>
if err and err not instanceof Error then err = new Error err
if err
@pool.drain => #prevent the pool from creating additional connections. we're done with it
@pool?.destroyAllNow()
@pool = null
else
# and release it immediately
@pool.release connection
callback err
close: (callback) ->
unless @pool then return callback null
@pool.drain =>
@pool?.destroyAllNow()
@pool = null
callback null
class TDSTransaction extends Transaction
begin: (callback) ->
@connection.pool.acquire (err, connection) =>
if err then return callback err
@_pooledConnection = connection
@request().query "set transaction isolation level #{isolationLevelDeclaration(@isolationLevel)}", (err) =>
if err then return TransactionError err
connection.setAutoCommit false, callback
commit: (callback) ->
@_pooledConnection.commit (err) =>
if err then err = TransactionError err
@connection.pool.release @_pooledConnection
@_pooledConnection = null
callback err
rollback: (callback) ->
@_pooledConnection.rollback (err) =>
if err then err = TransactionError err
@connection.pool.release @_pooledConnection
@_pooledConnection = null
callback err
class TDSRequest extends Request
batch: (batch, callback) ->
TDSRequest::query.call @, batch, callback
bulk: (table, callback) ->
process.nextTick -> callback RequestError("Bulk insert is not supported in 'msnodesql' driver.", 'ENOTSUPP')
query: (command, callback) ->
if @verbose and not @nested then @_log "---------- sql query ----------\n query: #{command}"
if command.length is 0
return process.nextTick ->
if @verbose and not @nested
@_log "---------- response -----------"
elapsed = Date.now() - started
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
callback? null, if @multiple or @nested then [] else null
recordset = null
recordsets = []
started = Date.now()
handleOutput = false
errors = []
lastrow = null
paramHeaders = {}
paramValues = {}
for name, param of @parameters when param.io is 1
paramHeaders[name] = createParameterHeader param
paramValues[name] = castParameter(param.value, param.type)
# nested = function is called by this.execute
unless @nested
input = ("@#{param.name} #{declare(param.type, param)}" for name, param of @parameters when param.io is 2)
output = ("@#{param.name} as '#{param.name}'" for name, param of @parameters when param.io is 2)
if input.length then command = "declare #{input.join ','};#{command};"
if output.length
command += "select #{output.join ','};"
handleOutput = true
@_acquire (err, connection) =>
unless err
if @canceled
if @verbose then @_log "---------- canceling ----------"
@_release connection
return callback? new RequestError "Canceled.", 'ECANCEL'
@_cancel = =>
if @verbose then @_log "---------- canceling ----------"
req.cancel()
req = connection.createStatement command, paramHeaders
req.on 'row', (tdsrow) =>
row = {}
for col in tdsrow.metadata.columns
value = tdsrow.getValue col.name
if value?
# convert uniqueidentifier to string
if col.type.name is 'GUIDTYPE'
value = parseGuid value
exi = row[col.name]
if exi?
if exi instanceof Array
exi.push col.value
else
row[col.name] = [exi, value]
else
row[col.name] = value
if @verbose
@_log util.inspect(row)
@_log "---------- --------------------"
unless row["___return___"]?
# row with ___return___ col is the last row
if @stream then @emit 'row', row
else
lastrow = row
unless @stream
recordset.push row
req.on 'metadata', (metadata) =>
recordset = []
Object.defineProperty recordset, 'columns',
enumerable: false
value: createColumns(metadata.columns)
@nested
if @stream
unless recordset.columns["___return___"]?
# row with ___return___ col is the last row
@emit 'recordset', recordset.columns
else
recordsets.push recordset
req.on 'done', (res) =>
if @canceled
e = new RequestError "Canceled.", 'ECANCEL'
if @stream
@emit 'error', e
else
errors.push e
unless @nested
# do we have output parameters to handle?
if handleOutput
last = recordsets.pop()?[0]
for name, param of @parameters when param.io is 2
param.value = last[param.name]
if @verbose
@_log " output: @#{param.name}, #{param.type.declaration}, #{param.value}"
if @verbose
if errors.length
@_log " error: #{error}" for error in errors
elapsed = Date.now() - started
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
if errors.length and not @stream
error = errors.pop()
error.precedingErrors = errors
@_release connection
if @stream
callback null, if @nested then lastrow else null
else
callback? error, if @multiple or @nested then recordsets else recordsets[0]
req.on 'message', (msg) =>
@emit 'info',
message: msg.text
number: msg.number
state: msg.state
class: msg.severity
lineNumber: msg.lineNumber
serverName: msg.serverName
procName: msg.procName
req.on 'error', (err) =>
e = RequestError err, 'EREQUEST'
if @stream
@emit 'error', e
else
errors.push e
req.execute paramValues
else
if connection then @_release connection
callback? err
execute: (procedure, callback) ->
if @verbose then @_log "---------- sql execute --------\n proc: #{procedure}"
started = Date.now()
cmd = "declare #{['@___return___ int'].concat("@#{param.name} #{declare(param.type, param)}" for name, param of @parameters when param.io is 2).join ', '};"
cmd += "exec @___return___ = #{procedure} "
spp = []
for name, param of @parameters
if @verbose
@_log " #{if param.io is 1 then " input" else "output"}: @#{param.name}, #{param.type.declaration}, #{param.value}"
if param.io is 2
# output parameter
spp.push "@#{param.name}=@#{param.name} output"
else
# input parameter
spp.push "@#{param.name}=@#{param.name}"
cmd += "#{spp.join ', '};"
cmd += "select #{['@___return___ as \'___return___\''].concat("@#{param.name} as '#{param.name}'" for name, param of @parameters when param.io is 2).join ', '};"
if @verbose then @_log "---------- response -----------"
@nested = true
# direct call to query, in case method on main request object is overriden (e.g. co-mssql)
TDSRequest::query.call @, cmd, (err, recordsets) =>
@nested = false
if err
if @verbose
elapsed = Date.now() - started
@_log " error: #{err}"
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
callback? err
else
if @stream
last = recordsets
else
last = recordsets.pop()?[0]
if last and last.___return___?
returnValue = last.___return___
for name, param of @parameters when param.io is 2
param.value = last[param.name]
if @verbose
@_log " output: @#{param.name}, #{param.type.declaration}, #{param.value}"
if @verbose
elapsed = Date.now() - started
@_log " return: #{returnValue}"
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
if @stream
callback null, null, returnValue
else
recordsets.returnValue = returnValue
callback? null, recordsets, returnValue
###
Cancel currently executed request.
###
cancel: ->
if @_cancel then return @_cancel()
true
return {
Connection: TDSConnection
Transaction: TDSTransaction
Request: TDSRequest
fix: ->
unless FIXED
require './tds-fix'
FIXED = true
}
| 20195 | {Pool} = require 'generic-pool'
tds = require 'tds'
util = require 'util'
FIXED = false
{TYPES, declare} = require('./datatypes')
ISOLATION_LEVEL = require('./isolationlevel')
###
@ignore
###
castParameter = (value, type) ->
unless value? then return null
switch type
when TYPES.VarChar, TYPES.NVarChar, TYPES.Char, TYPES.NChar, TYPES.Xml, TYPES.Text, TYPES.NText
if typeof value isnt 'string' and value not instanceof String
value = value.toString()
when TYPES.Int, TYPES.TinyInt, TYPES.BigInt, TYPES.SmallInt
if typeof value isnt 'number' and value not instanceof Number
value = parseInt(value)
if isNaN(value) then value = null
when TYPES.Float, TYPES.Real, TYPES.Decimal, TYPES.Numeric, TYPES.SmallMoney, TYPES.Money
if typeof value isnt 'number' and value not instanceof Number
value = parseFloat(value)
if isNaN(value) then value = null
when TYPES.Bit
if typeof value isnt 'boolean' and value not instanceof Boolean
value = Boolean(value)
when TYPES.DateTime, TYPES.SmallDateTime, TYPES.DateTimeOffset, TYPES.Date
if value not instanceof Date
value = new Date(value)
when TYPES.Binary, TYPES.VarBinary, TYPES.Image
if value not instanceof Buffer
value = new Buffer(value.toString())
value
###
@ignore
###
createParameterHeader = (param) ->
header =
type: param.type.declaration
switch param.type
when TYPES.VarChar, TYPES.NVarChar, TYPES.VarBinary
header.size = "MAX"
when TYPES.Char, TYPES.NChar, TYPES.Binary
header.size = param.length ? param.value?.length ? 1
header
###
@ignore
###
createColumns = (metadata) ->
out = {}
for column, index in metadata
out[column.name] =
index: index
name: column.name
length: column.length
type: TYPES[column.type.sqlType]
out
###
@ignore
###
isolationLevelDeclaration = (type) ->
switch type
when ISOLATION_LEVEL.READ_UNCOMMITTED then return "READ UNCOMMITTED"
when ISOLATION_LEVEL.READ_COMMITTED then return "READ COMMITTED"
when ISOLATION_LEVEL.REPEATABLE_READ then return "REPEATABLE READ"
when ISOLATION_LEVEL.SERIALIZABLE then return "SERIALIZABLE"
when ISOLATION_LEVEL.SNAPSHOT then return "SNAPSHOT"
else throw new TransactionError "Invalid isolation level."
###
Taken from Tedious.
@private
###
HEXMAP = [
'00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '0A', '0B', '0C', '0D', '0E', '0F',
'10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '1A', '1B', '1C', '1D', '1E', '1F',
'20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '2A', '2B', '2C', '2D', '2E', '2F',
'30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '3A', '3B', '3C', '3D', '3E', '3F',
'40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '4A', '4B', '4C', '4D', '4E', '4F',
'50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '5A', '5B', '5C', '5D', '5E', '5F',
'60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '6A', '6B', '6C', '6D', '6E', '6F',
'70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '7A', '7B', '7C', '7D', '7E', '7F',
'80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '8A', '8B', '8C', '8D', '8E', '8F',
'90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '9A', '9B', '9C', '9D', '9E', '9F',
'A0', 'A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7', 'A8', 'A9', 'AA', 'AB', 'AC', 'AD', 'AE', 'AF',
'B0', 'B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B9', 'BA', 'BB', 'BC', 'BD', 'BE', 'BF',
'C0', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'CA', 'CB', 'CC', 'CD', 'CE', 'CF',
'D0', 'D1', 'D2', 'D3', 'D4', 'D5', 'D6', 'D7', 'D8', 'D9', 'DA', 'DB', 'DC', 'DD', 'DE', 'DF',
'E0', 'E1', 'E2', 'E3', 'E4', 'E5', 'E6', 'E7', 'E8', 'E9', 'EA', 'EB', 'EC', 'ED', 'EE', 'EF',
'F0', 'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9', 'FA', 'FB', 'FC', 'FD', 'FE', 'FF'
]
###
Taken from Tedious.
@private
###
parseGuid = (buffer) ->
HEXMAP[buffer[3]] +
HEXMAP[buffer[2]] +
HEXMAP[buffer[1]] +
HEXMAP[buffer[0]] +
'-' +
HEXMAP[buffer[5]] +
HEXMAP[buffer[4]] +
'-' +
HEXMAP[buffer[7]] +
HEXMAP[buffer[6]] +
'-' +
HEXMAP[buffer[8]] +
HEXMAP[buffer[9]] +
'-' +
HEXMAP[buffer[10]] +
HEXMAP[buffer[11]] +
HEXMAP[buffer[12]] +
HEXMAP[buffer[13]] +
HEXMAP[buffer[14]] +
HEXMAP[buffer[15]]
###
@ignore
###
module.exports = (Connection, Transaction, Request, ConnectionError, TransactionError, RequestError) ->
class TDSConnection extends Connection
pool: null
connect: (config, callback) ->
cfg =
userName: config.user
password: <PASSWORD>
host: config.server
port: config.port
database: config.database
cfg_pool =
name: 'mssql'
max: 10
min: 0
idleTimeoutMillis: 30000
create: (callback) =>
c = new tds.Connection cfg
c.on 'error', (err) =>
if err.code is 'ECONNRESET'
c.hasError = true
return
@emit 'error', err
timeouted = false
tmr = setTimeout ->
timeouted = true
c._client._socket.destroy()
callback new ConnectionError("Connection timeout.", 'ETIMEOUT'), null # there must be a second argument null
, config.timeout ? 15000
c.connect (err) =>
clearTimeout tmr
if timeouted then return
if err then err = ConnectionError err
if err then return callback err, null # there must be a second argument null
callback null, c
validate: (c) ->
c? and not c.hasError
destroy: (c) ->
c?.end()
if config.pool
for key, value of config.pool
cfg_pool[key] = value
@pool = Pool cfg_pool, cfg
#create one testing connection to check if everything is ok
@pool.acquire (err, connection) =>
if err and err not instanceof Error then err = new Error err
if err
@pool.drain => #prevent the pool from creating additional connections. we're done with it
@pool?.destroyAllNow()
@pool = null
else
# and release it immediately
@pool.release connection
callback err
close: (callback) ->
unless @pool then return callback null
@pool.drain =>
@pool?.destroyAllNow()
@pool = null
callback null
class TDSTransaction extends Transaction
begin: (callback) ->
@connection.pool.acquire (err, connection) =>
if err then return callback err
@_pooledConnection = connection
@request().query "set transaction isolation level #{isolationLevelDeclaration(@isolationLevel)}", (err) =>
if err then return TransactionError err
connection.setAutoCommit false, callback
commit: (callback) ->
@_pooledConnection.commit (err) =>
if err then err = TransactionError err
@connection.pool.release @_pooledConnection
@_pooledConnection = null
callback err
rollback: (callback) ->
@_pooledConnection.rollback (err) =>
if err then err = TransactionError err
@connection.pool.release @_pooledConnection
@_pooledConnection = null
callback err
class TDSRequest extends Request
batch: (batch, callback) ->
TDSRequest::query.call @, batch, callback
bulk: (table, callback) ->
process.nextTick -> callback RequestError("Bulk insert is not supported in 'msnodesql' driver.", 'ENOTSUPP')
query: (command, callback) ->
if @verbose and not @nested then @_log "---------- sql query ----------\n query: #{command}"
if command.length is 0
return process.nextTick ->
if @verbose and not @nested
@_log "---------- response -----------"
elapsed = Date.now() - started
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
callback? null, if @multiple or @nested then [] else null
recordset = null
recordsets = []
started = Date.now()
handleOutput = false
errors = []
lastrow = null
paramHeaders = {}
paramValues = {}
for name, param of @parameters when param.io is 1
paramHeaders[name] = createParameterHeader param
paramValues[name] = castParameter(param.value, param.type)
# nested = function is called by this.execute
unless @nested
input = ("@#{param.name} #{declare(param.type, param)}" for name, param of @parameters when param.io is 2)
output = ("@#{param.name} as '#{param.name}'" for name, param of @parameters when param.io is 2)
if input.length then command = "declare #{input.join ','};#{command};"
if output.length
command += "select #{output.join ','};"
handleOutput = true
@_acquire (err, connection) =>
unless err
if @canceled
if @verbose then @_log "---------- canceling ----------"
@_release connection
return callback? new RequestError "Canceled.", 'ECANCEL'
@_cancel = =>
if @verbose then @_log "---------- canceling ----------"
req.cancel()
req = connection.createStatement command, paramHeaders
req.on 'row', (tdsrow) =>
row = {}
for col in tdsrow.metadata.columns
value = tdsrow.getValue col.name
if value?
# convert uniqueidentifier to string
if col.type.name is 'GUIDTYPE'
value = parseGuid value
exi = row[col.name]
if exi?
if exi instanceof Array
exi.push col.value
else
row[col.name] = [exi, value]
else
row[col.name] = value
if @verbose
@_log util.inspect(row)
@_log "---------- --------------------"
unless row["___return___"]?
# row with ___return___ col is the last row
if @stream then @emit 'row', row
else
lastrow = row
unless @stream
recordset.push row
req.on 'metadata', (metadata) =>
recordset = []
Object.defineProperty recordset, 'columns',
enumerable: false
value: createColumns(metadata.columns)
@nested
if @stream
unless recordset.columns["___return___"]?
# row with ___return___ col is the last row
@emit 'recordset', recordset.columns
else
recordsets.push recordset
req.on 'done', (res) =>
if @canceled
e = new RequestError "Canceled.", 'ECANCEL'
if @stream
@emit 'error', e
else
errors.push e
unless @nested
# do we have output parameters to handle?
if handleOutput
last = recordsets.pop()?[0]
for name, param of @parameters when param.io is 2
param.value = last[param.name]
if @verbose
@_log " output: @#{param.name}, #{param.type.declaration}, #{param.value}"
if @verbose
if errors.length
@_log " error: #{error}" for error in errors
elapsed = Date.now() - started
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
if errors.length and not @stream
error = errors.pop()
error.precedingErrors = errors
@_release connection
if @stream
callback null, if @nested then lastrow else null
else
callback? error, if @multiple or @nested then recordsets else recordsets[0]
req.on 'message', (msg) =>
@emit 'info',
message: msg.text
number: msg.number
state: msg.state
class: msg.severity
lineNumber: msg.lineNumber
serverName: msg.serverName
procName: msg.procName
req.on 'error', (err) =>
e = RequestError err, 'EREQUEST'
if @stream
@emit 'error', e
else
errors.push e
req.execute paramValues
else
if connection then @_release connection
callback? err
execute: (procedure, callback) ->
if @verbose then @_log "---------- sql execute --------\n proc: #{procedure}"
started = Date.now()
cmd = "declare #{['@___return___ int'].concat("@#{param.name} #{declare(param.type, param)}" for name, param of @parameters when param.io is 2).join ', '};"
cmd += "exec @___return___ = #{procedure} "
spp = []
for name, param of @parameters
if @verbose
@_log " #{if param.io is 1 then " input" else "output"}: @#{param.name}, #{param.type.declaration}, #{param.value}"
if param.io is 2
# output parameter
spp.push "@#{param.name}=@#{param.name} output"
else
# input parameter
spp.push "@#{param.name}=@#{param.name}"
cmd += "#{spp.join ', '};"
cmd += "select #{['@___return___ as \'___return___\''].concat("@#{param.name} as '#{param.name}'" for name, param of @parameters when param.io is 2).join ', '};"
if @verbose then @_log "---------- response -----------"
@nested = true
# direct call to query, in case method on main request object is overriden (e.g. co-mssql)
TDSRequest::query.call @, cmd, (err, recordsets) =>
@nested = false
if err
if @verbose
elapsed = Date.now() - started
@_log " error: #{err}"
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
callback? err
else
if @stream
last = recordsets
else
last = recordsets.pop()?[0]
if last and last.___return___?
returnValue = last.___return___
for name, param of @parameters when param.io is 2
param.value = last[param.name]
if @verbose
@_log " output: @#{param.name}, #{param.type.declaration}, #{param.value}"
if @verbose
elapsed = Date.now() - started
@_log " return: #{returnValue}"
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
if @stream
callback null, null, returnValue
else
recordsets.returnValue = returnValue
callback? null, recordsets, returnValue
###
Cancel currently executed request.
###
cancel: ->
if @_cancel then return @_cancel()
true
return {
Connection: TDSConnection
Transaction: TDSTransaction
Request: TDSRequest
fix: ->
unless FIXED
require './tds-fix'
FIXED = true
}
| true | {Pool} = require 'generic-pool'
tds = require 'tds'
util = require 'util'
FIXED = false
{TYPES, declare} = require('./datatypes')
ISOLATION_LEVEL = require('./isolationlevel')
###
@ignore
###
castParameter = (value, type) ->
unless value? then return null
switch type
when TYPES.VarChar, TYPES.NVarChar, TYPES.Char, TYPES.NChar, TYPES.Xml, TYPES.Text, TYPES.NText
if typeof value isnt 'string' and value not instanceof String
value = value.toString()
when TYPES.Int, TYPES.TinyInt, TYPES.BigInt, TYPES.SmallInt
if typeof value isnt 'number' and value not instanceof Number
value = parseInt(value)
if isNaN(value) then value = null
when TYPES.Float, TYPES.Real, TYPES.Decimal, TYPES.Numeric, TYPES.SmallMoney, TYPES.Money
if typeof value isnt 'number' and value not instanceof Number
value = parseFloat(value)
if isNaN(value) then value = null
when TYPES.Bit
if typeof value isnt 'boolean' and value not instanceof Boolean
value = Boolean(value)
when TYPES.DateTime, TYPES.SmallDateTime, TYPES.DateTimeOffset, TYPES.Date
if value not instanceof Date
value = new Date(value)
when TYPES.Binary, TYPES.VarBinary, TYPES.Image
if value not instanceof Buffer
value = new Buffer(value.toString())
value
###
@ignore
###
createParameterHeader = (param) ->
header =
type: param.type.declaration
switch param.type
when TYPES.VarChar, TYPES.NVarChar, TYPES.VarBinary
header.size = "MAX"
when TYPES.Char, TYPES.NChar, TYPES.Binary
header.size = param.length ? param.value?.length ? 1
header
###
@ignore
###
createColumns = (metadata) ->
out = {}
for column, index in metadata
out[column.name] =
index: index
name: column.name
length: column.length
type: TYPES[column.type.sqlType]
out
###
@ignore
###
isolationLevelDeclaration = (type) ->
switch type
when ISOLATION_LEVEL.READ_UNCOMMITTED then return "READ UNCOMMITTED"
when ISOLATION_LEVEL.READ_COMMITTED then return "READ COMMITTED"
when ISOLATION_LEVEL.REPEATABLE_READ then return "REPEATABLE READ"
when ISOLATION_LEVEL.SERIALIZABLE then return "SERIALIZABLE"
when ISOLATION_LEVEL.SNAPSHOT then return "SNAPSHOT"
else throw new TransactionError "Invalid isolation level."
###
Taken from Tedious.
@private
###
HEXMAP = [
'00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '0A', '0B', '0C', '0D', '0E', '0F',
'10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '1A', '1B', '1C', '1D', '1E', '1F',
'20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '2A', '2B', '2C', '2D', '2E', '2F',
'30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '3A', '3B', '3C', '3D', '3E', '3F',
'40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '4A', '4B', '4C', '4D', '4E', '4F',
'50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '5A', '5B', '5C', '5D', '5E', '5F',
'60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '6A', '6B', '6C', '6D', '6E', '6F',
'70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '7A', '7B', '7C', '7D', '7E', '7F',
'80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '8A', '8B', '8C', '8D', '8E', '8F',
'90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '9A', '9B', '9C', '9D', '9E', '9F',
'A0', 'A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7', 'A8', 'A9', 'AA', 'AB', 'AC', 'AD', 'AE', 'AF',
'B0', 'B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B9', 'BA', 'BB', 'BC', 'BD', 'BE', 'BF',
'C0', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'CA', 'CB', 'CC', 'CD', 'CE', 'CF',
'D0', 'D1', 'D2', 'D3', 'D4', 'D5', 'D6', 'D7', 'D8', 'D9', 'DA', 'DB', 'DC', 'DD', 'DE', 'DF',
'E0', 'E1', 'E2', 'E3', 'E4', 'E5', 'E6', 'E7', 'E8', 'E9', 'EA', 'EB', 'EC', 'ED', 'EE', 'EF',
'F0', 'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9', 'FA', 'FB', 'FC', 'FD', 'FE', 'FF'
]
###
Taken from Tedious.
@private
###
parseGuid = (buffer) ->
HEXMAP[buffer[3]] +
HEXMAP[buffer[2]] +
HEXMAP[buffer[1]] +
HEXMAP[buffer[0]] +
'-' +
HEXMAP[buffer[5]] +
HEXMAP[buffer[4]] +
'-' +
HEXMAP[buffer[7]] +
HEXMAP[buffer[6]] +
'-' +
HEXMAP[buffer[8]] +
HEXMAP[buffer[9]] +
'-' +
HEXMAP[buffer[10]] +
HEXMAP[buffer[11]] +
HEXMAP[buffer[12]] +
HEXMAP[buffer[13]] +
HEXMAP[buffer[14]] +
HEXMAP[buffer[15]]
###
@ignore
###
module.exports = (Connection, Transaction, Request, ConnectionError, TransactionError, RequestError) ->
class TDSConnection extends Connection
pool: null
connect: (config, callback) ->
cfg =
userName: config.user
password: PI:PASSWORD:<PASSWORD>END_PI
host: config.server
port: config.port
database: config.database
cfg_pool =
name: 'mssql'
max: 10
min: 0
idleTimeoutMillis: 30000
create: (callback) =>
c = new tds.Connection cfg
c.on 'error', (err) =>
if err.code is 'ECONNRESET'
c.hasError = true
return
@emit 'error', err
timeouted = false
tmr = setTimeout ->
timeouted = true
c._client._socket.destroy()
callback new ConnectionError("Connection timeout.", 'ETIMEOUT'), null # there must be a second argument null
, config.timeout ? 15000
c.connect (err) =>
clearTimeout tmr
if timeouted then return
if err then err = ConnectionError err
if err then return callback err, null # there must be a second argument null
callback null, c
validate: (c) ->
c? and not c.hasError
destroy: (c) ->
c?.end()
if config.pool
for key, value of config.pool
cfg_pool[key] = value
@pool = Pool cfg_pool, cfg
#create one testing connection to check if everything is ok
@pool.acquire (err, connection) =>
if err and err not instanceof Error then err = new Error err
if err
@pool.drain => #prevent the pool from creating additional connections. we're done with it
@pool?.destroyAllNow()
@pool = null
else
# and release it immediately
@pool.release connection
callback err
close: (callback) ->
unless @pool then return callback null
@pool.drain =>
@pool?.destroyAllNow()
@pool = null
callback null
class TDSTransaction extends Transaction
begin: (callback) ->
@connection.pool.acquire (err, connection) =>
if err then return callback err
@_pooledConnection = connection
@request().query "set transaction isolation level #{isolationLevelDeclaration(@isolationLevel)}", (err) =>
if err then return TransactionError err
connection.setAutoCommit false, callback
commit: (callback) ->
@_pooledConnection.commit (err) =>
if err then err = TransactionError err
@connection.pool.release @_pooledConnection
@_pooledConnection = null
callback err
rollback: (callback) ->
@_pooledConnection.rollback (err) =>
if err then err = TransactionError err
@connection.pool.release @_pooledConnection
@_pooledConnection = null
callback err
class TDSRequest extends Request
batch: (batch, callback) ->
TDSRequest::query.call @, batch, callback
bulk: (table, callback) ->
process.nextTick -> callback RequestError("Bulk insert is not supported in 'msnodesql' driver.", 'ENOTSUPP')
query: (command, callback) ->
if @verbose and not @nested then @_log "---------- sql query ----------\n query: #{command}"
if command.length is 0
return process.nextTick ->
if @verbose and not @nested
@_log "---------- response -----------"
elapsed = Date.now() - started
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
callback? null, if @multiple or @nested then [] else null
recordset = null
recordsets = []
started = Date.now()
handleOutput = false
errors = []
lastrow = null
paramHeaders = {}
paramValues = {}
for name, param of @parameters when param.io is 1
paramHeaders[name] = createParameterHeader param
paramValues[name] = castParameter(param.value, param.type)
# nested = function is called by this.execute
unless @nested
input = ("@#{param.name} #{declare(param.type, param)}" for name, param of @parameters when param.io is 2)
output = ("@#{param.name} as '#{param.name}'" for name, param of @parameters when param.io is 2)
if input.length then command = "declare #{input.join ','};#{command};"
if output.length
command += "select #{output.join ','};"
handleOutput = true
@_acquire (err, connection) =>
unless err
if @canceled
if @verbose then @_log "---------- canceling ----------"
@_release connection
return callback? new RequestError "Canceled.", 'ECANCEL'
@_cancel = =>
if @verbose then @_log "---------- canceling ----------"
req.cancel()
req = connection.createStatement command, paramHeaders
req.on 'row', (tdsrow) =>
row = {}
for col in tdsrow.metadata.columns
value = tdsrow.getValue col.name
if value?
# convert uniqueidentifier to string
if col.type.name is 'GUIDTYPE'
value = parseGuid value
exi = row[col.name]
if exi?
if exi instanceof Array
exi.push col.value
else
row[col.name] = [exi, value]
else
row[col.name] = value
if @verbose
@_log util.inspect(row)
@_log "---------- --------------------"
unless row["___return___"]?
# row with ___return___ col is the last row
if @stream then @emit 'row', row
else
lastrow = row
unless @stream
recordset.push row
req.on 'metadata', (metadata) =>
recordset = []
Object.defineProperty recordset, 'columns',
enumerable: false
value: createColumns(metadata.columns)
@nested
if @stream
unless recordset.columns["___return___"]?
# row with ___return___ col is the last row
@emit 'recordset', recordset.columns
else
recordsets.push recordset
req.on 'done', (res) =>
if @canceled
e = new RequestError "Canceled.", 'ECANCEL'
if @stream
@emit 'error', e
else
errors.push e
unless @nested
# do we have output parameters to handle?
if handleOutput
last = recordsets.pop()?[0]
for name, param of @parameters when param.io is 2
param.value = last[param.name]
if @verbose
@_log " output: @#{param.name}, #{param.type.declaration}, #{param.value}"
if @verbose
if errors.length
@_log " error: #{error}" for error in errors
elapsed = Date.now() - started
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
if errors.length and not @stream
error = errors.pop()
error.precedingErrors = errors
@_release connection
if @stream
callback null, if @nested then lastrow else null
else
callback? error, if @multiple or @nested then recordsets else recordsets[0]
req.on 'message', (msg) =>
@emit 'info',
message: msg.text
number: msg.number
state: msg.state
class: msg.severity
lineNumber: msg.lineNumber
serverName: msg.serverName
procName: msg.procName
req.on 'error', (err) =>
e = RequestError err, 'EREQUEST'
if @stream
@emit 'error', e
else
errors.push e
req.execute paramValues
else
if connection then @_release connection
callback? err
execute: (procedure, callback) ->
if @verbose then @_log "---------- sql execute --------\n proc: #{procedure}"
started = Date.now()
cmd = "declare #{['@___return___ int'].concat("@#{param.name} #{declare(param.type, param)}" for name, param of @parameters when param.io is 2).join ', '};"
cmd += "exec @___return___ = #{procedure} "
spp = []
for name, param of @parameters
if @verbose
@_log " #{if param.io is 1 then " input" else "output"}: @#{param.name}, #{param.type.declaration}, #{param.value}"
if param.io is 2
# output parameter
spp.push "@#{param.name}=@#{param.name} output"
else
# input parameter
spp.push "@#{param.name}=@#{param.name}"
cmd += "#{spp.join ', '};"
cmd += "select #{['@___return___ as \'___return___\''].concat("@#{param.name} as '#{param.name}'" for name, param of @parameters when param.io is 2).join ', '};"
if @verbose then @_log "---------- response -----------"
@nested = true
# direct call to query, in case method on main request object is overriden (e.g. co-mssql)
TDSRequest::query.call @, cmd, (err, recordsets) =>
@nested = false
if err
if @verbose
elapsed = Date.now() - started
@_log " error: #{err}"
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
callback? err
else
if @stream
last = recordsets
else
last = recordsets.pop()?[0]
if last and last.___return___?
returnValue = last.___return___
for name, param of @parameters when param.io is 2
param.value = last[param.name]
if @verbose
@_log " output: @#{param.name}, #{param.type.declaration}, #{param.value}"
if @verbose
elapsed = Date.now() - started
@_log " return: #{returnValue}"
@_log " duration: #{elapsed}ms"
@_log "---------- completed ----------"
if @stream
callback null, null, returnValue
else
recordsets.returnValue = returnValue
callback? null, recordsets, returnValue
###
Cancel currently executed request.
###
cancel: ->
if @_cancel then return @_cancel()
true
return {
Connection: TDSConnection
Transaction: TDSTransaction
Request: TDSRequest
fix: ->
unless FIXED
require './tds-fix'
FIXED = true
}
|
[
{
"context": "y (http://jquery.com)\n\n@source: http://github.com/zaniitiin/band-clock/\n@autor: Nitin Jha\n@version: 1.0\n###\n\n",
"end": 313,
"score": 0.9725514054298401,
"start": 304,
"tag": "USERNAME",
"value": "zaniitiin"
},
{
"context": "e: http://github.com/zaniitiin/band-clo... | bandClock.coffee | mohanhegde/fork_repo | 0 | ###
Band Clock is a jquery plugin to display a dynamic band clock.
Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php)
and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses.
Built on top of the jQuery library (http://jquery.com)
@source: http://github.com/zaniitiin/band-clock/
@autor: Nitin Jha
@version: 1.0
###
'use strict'
(($) ->
$.bandClock = (el, options) ->
@el = el
@$el = $ el
@$el.data 'bandClock', @
@init = =>
@options = $.extend {}, $.bandClock.defaultOptions, options
#creating canvas element
@canvas = $("<canvas width='#{@options.size}' height='#{@options.size}'
></canvas>").get(0)
@$el.append @canvas
@ctx = @canvas.getContext('2d')
if window.devicePixelRatio > 1
scaleBy = window.devicePixelRatio
$(@canvas).css({
width: @options.size
height: @options.size
})
@canvas.width *= scaleBy
@canvas.height *= scaleBy
@ctx.scale scaleBy, scaleBy
@$el.addClass 'bandClock'
@$el.css({
width: @options.size
height: @options.size
lineHeight: "#{@options.size}px"
})
_x = @options.size/2
@ctx.translate _x, _x
@ctx.shadowBlur = 2;
@ctx.shadowColor = @options.color
@
degToRed = (degree)->
factor = Math.PI / 180
degree * factor
renderTime = =>
_x = @options.size/2
_g = @options.gap
_r = _x - (@options.lineWidth + 2)
_r1 = _r - (_g + @options.lineWidth)
_r2 = _r1 - (_g + @options.lineWidth)
@ctx.fillStyle = @options.bgColor
@ctx.fillRect -_x, -_x, @options.size, @options.size
@ctx.strokeStyle = @options.color
@ctx.lineWidth = @options.lineWidth
@ctx.lineCap = @options.lineCap
now = new Date()
hours = now.getHours();
minutes = now.getMinutes();
seconds = now.getSeconds();
milliseconds = now.getMilliseconds();
newSeconds = seconds + (milliseconds / 1000);
time = now.toLocaleTimeString();
#Hours
@ctx.beginPath()
@ctx.arc(0, 0, _r, degToRed(270), degToRed((hours * 30) - 90))
@ctx.stroke()
#Minutes
@ctx.beginPath()
@ctx.arc(0, 0, _r1, degToRed(270), degToRed((minutes * 6) - 90))
@ctx.stroke()
#Seconds
@ctx.beginPath()
@ctx.arc(0, 0, _r2, degToRed(270), degToRed((newSeconds * 6) - 90))
@ctx.stroke()
#Time
@ctx.font = _this.options.fontStyle
@ctx.fillStyle = _this.options.color
@ctx.textAlign = "center"
@ctx.fillText(time, 0, 0)
@
setInterval renderTime, 40
@init()
$.bandClock.defaultOptions =
size : 300
color: '#18FFFF'
bgColor: '#212121'
lineWidth: 10
lineCap: 'butt'
gap: 5
fontStyle: '20px Verdana'
$.fn.bandClock = (options) ->
$.each @, (i, el) ->
$el = ($ el)
unless $el.data 'bandClock'
instanceOptions = $.extend {}, options , $el.data()
$el.data 'bandClock', new $.bandClock el, instanceOptions
undefined
)(jQuery)
| 59490 | ###
Band Clock is a jquery plugin to display a dynamic band clock.
Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php)
and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses.
Built on top of the jQuery library (http://jquery.com)
@source: http://github.com/zaniitiin/band-clock/
@autor: <NAME>
@version: 1.0
###
'use strict'
(($) ->
$.bandClock = (el, options) ->
@el = el
@$el = $ el
@$el.data 'bandClock', @
@init = =>
@options = $.extend {}, $.bandClock.defaultOptions, options
#creating canvas element
@canvas = $("<canvas width='#{@options.size}' height='#{@options.size}'
></canvas>").get(0)
@$el.append @canvas
@ctx = @canvas.getContext('2d')
if window.devicePixelRatio > 1
scaleBy = window.devicePixelRatio
$(@canvas).css({
width: @options.size
height: @options.size
})
@canvas.width *= scaleBy
@canvas.height *= scaleBy
@ctx.scale scaleBy, scaleBy
@$el.addClass 'bandClock'
@$el.css({
width: @options.size
height: @options.size
lineHeight: "#{@options.size}px"
})
_x = @options.size/2
@ctx.translate _x, _x
@ctx.shadowBlur = 2;
@ctx.shadowColor = @options.color
@
degToRed = (degree)->
factor = Math.PI / 180
degree * factor
renderTime = =>
_x = @options.size/2
_g = @options.gap
_r = _x - (@options.lineWidth + 2)
_r1 = _r - (_g + @options.lineWidth)
_r2 = _r1 - (_g + @options.lineWidth)
@ctx.fillStyle = @options.bgColor
@ctx.fillRect -_x, -_x, @options.size, @options.size
@ctx.strokeStyle = @options.color
@ctx.lineWidth = @options.lineWidth
@ctx.lineCap = @options.lineCap
now = new Date()
hours = now.getHours();
minutes = now.getMinutes();
seconds = now.getSeconds();
milliseconds = now.getMilliseconds();
newSeconds = seconds + (milliseconds / 1000);
time = now.toLocaleTimeString();
#Hours
@ctx.beginPath()
@ctx.arc(0, 0, _r, degToRed(270), degToRed((hours * 30) - 90))
@ctx.stroke()
#Minutes
@ctx.beginPath()
@ctx.arc(0, 0, _r1, degToRed(270), degToRed((minutes * 6) - 90))
@ctx.stroke()
#Seconds
@ctx.beginPath()
@ctx.arc(0, 0, _r2, degToRed(270), degToRed((newSeconds * 6) - 90))
@ctx.stroke()
#Time
@ctx.font = _this.options.fontStyle
@ctx.fillStyle = _this.options.color
@ctx.textAlign = "center"
@ctx.fillText(time, 0, 0)
@
setInterval renderTime, 40
@init()
$.bandClock.defaultOptions =
size : 300
color: '#18FFFF'
bgColor: '#212121'
lineWidth: 10
lineCap: 'butt'
gap: 5
fontStyle: '20px Verdana'
$.fn.bandClock = (options) ->
$.each @, (i, el) ->
$el = ($ el)
unless $el.data 'bandClock'
instanceOptions = $.extend {}, options , $el.data()
$el.data 'bandClock', new $.bandClock el, instanceOptions
undefined
)(jQuery)
| true | ###
Band Clock is a jquery plugin to display a dynamic band clock.
Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php)
and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses.
Built on top of the jQuery library (http://jquery.com)
@source: http://github.com/zaniitiin/band-clock/
@autor: PI:NAME:<NAME>END_PI
@version: 1.0
###
'use strict'
(($) ->
$.bandClock = (el, options) ->
@el = el
@$el = $ el
@$el.data 'bandClock', @
@init = =>
@options = $.extend {}, $.bandClock.defaultOptions, options
#creating canvas element
@canvas = $("<canvas width='#{@options.size}' height='#{@options.size}'
></canvas>").get(0)
@$el.append @canvas
@ctx = @canvas.getContext('2d')
if window.devicePixelRatio > 1
scaleBy = window.devicePixelRatio
$(@canvas).css({
width: @options.size
height: @options.size
})
@canvas.width *= scaleBy
@canvas.height *= scaleBy
@ctx.scale scaleBy, scaleBy
@$el.addClass 'bandClock'
@$el.css({
width: @options.size
height: @options.size
lineHeight: "#{@options.size}px"
})
_x = @options.size/2
@ctx.translate _x, _x
@ctx.shadowBlur = 2;
@ctx.shadowColor = @options.color
@
degToRed = (degree)->
factor = Math.PI / 180
degree * factor
renderTime = =>
_x = @options.size/2
_g = @options.gap
_r = _x - (@options.lineWidth + 2)
_r1 = _r - (_g + @options.lineWidth)
_r2 = _r1 - (_g + @options.lineWidth)
@ctx.fillStyle = @options.bgColor
@ctx.fillRect -_x, -_x, @options.size, @options.size
@ctx.strokeStyle = @options.color
@ctx.lineWidth = @options.lineWidth
@ctx.lineCap = @options.lineCap
now = new Date()
hours = now.getHours();
minutes = now.getMinutes();
seconds = now.getSeconds();
milliseconds = now.getMilliseconds();
newSeconds = seconds + (milliseconds / 1000);
time = now.toLocaleTimeString();
#Hours
@ctx.beginPath()
@ctx.arc(0, 0, _r, degToRed(270), degToRed((hours * 30) - 90))
@ctx.stroke()
#Minutes
@ctx.beginPath()
@ctx.arc(0, 0, _r1, degToRed(270), degToRed((minutes * 6) - 90))
@ctx.stroke()
#Seconds
@ctx.beginPath()
@ctx.arc(0, 0, _r2, degToRed(270), degToRed((newSeconds * 6) - 90))
@ctx.stroke()
#Time
@ctx.font = _this.options.fontStyle
@ctx.fillStyle = _this.options.color
@ctx.textAlign = "center"
@ctx.fillText(time, 0, 0)
@
setInterval renderTime, 40
@init()
$.bandClock.defaultOptions =
size : 300
color: '#18FFFF'
bgColor: '#212121'
lineWidth: 10
lineCap: 'butt'
gap: 5
fontStyle: '20px Verdana'
$.fn.bandClock = (options) ->
$.each @, (i, el) ->
$el = ($ el)
unless $el.data 'bandClock'
instanceOptions = $.extend {}, options , $el.data()
$el.data 'bandClock', new $.bandClock el, instanceOptions
undefined
)(jQuery)
|
[
{
"context": "on-email\"]').val()\r\n password = $('input[name=\"houston-password\"]').val()\r\n\r\n afterLogin = (error) ->",
"end": 258,
"score": 0.9660490155220032,
"start": 251,
"tag": "PASSWORD",
"value": "houston"
},
{
"context": "ateUser {\r\n email: email\r\n ... | packages/houston/client/admin_login.coffee | reillyisawesome/Nectar | 3 | Template._houston_login.helpers(
logged_in: -> Meteor.user()
)
Template._houston_login.events(
'submit #houston-sign-in-form': (e) ->
e.preventDefault()
email = $('input[name="houston-email"]').val()
password = $('input[name="houston-password"]').val()
afterLogin = (error) ->
# TODO error case that properly displays
if error
alert error
else
Houston._go 'home'
if Houston._admin_user_exists()
Meteor.loginWithPassword email, password, afterLogin
else
Accounts.createUser {
email: email
password: password
}, (error) ->
return afterLogin(error) if error
Houston.becomeAdmin()
'click #houston-logout': (e) ->
e.preventDefault()
Meteor.logout()
# going 'home' clears the side nav
Houston._go 'home'
'click #become-houston-admin': (e) ->
e.preventDefault()
Houston.becomeAdmin()
)
Template._houston_login.rendered = ->
$(window).unbind('scroll')
| 134049 | Template._houston_login.helpers(
logged_in: -> Meteor.user()
)
Template._houston_login.events(
'submit #houston-sign-in-form': (e) ->
e.preventDefault()
email = $('input[name="houston-email"]').val()
password = $('input[name="<PASSWORD>-password"]').val()
afterLogin = (error) ->
# TODO error case that properly displays
if error
alert error
else
Houston._go 'home'
if Houston._admin_user_exists()
Meteor.loginWithPassword email, password, afterLogin
else
Accounts.createUser {
email: email
password: <PASSWORD>
}, (error) ->
return afterLogin(error) if error
Houston.becomeAdmin()
'click #houston-logout': (e) ->
e.preventDefault()
Meteor.logout()
# going 'home' clears the side nav
Houston._go 'home'
'click #become-houston-admin': (e) ->
e.preventDefault()
Houston.becomeAdmin()
)
Template._houston_login.rendered = ->
$(window).unbind('scroll')
| true | Template._houston_login.helpers(
logged_in: -> Meteor.user()
)
Template._houston_login.events(
'submit #houston-sign-in-form': (e) ->
e.preventDefault()
email = $('input[name="houston-email"]').val()
password = $('input[name="PI:PASSWORD:<PASSWORD>END_PI-password"]').val()
afterLogin = (error) ->
# TODO error case that properly displays
if error
alert error
else
Houston._go 'home'
if Houston._admin_user_exists()
Meteor.loginWithPassword email, password, afterLogin
else
Accounts.createUser {
email: email
password: PI:PASSWORD:<PASSWORD>END_PI
}, (error) ->
return afterLogin(error) if error
Houston.becomeAdmin()
'click #houston-logout': (e) ->
e.preventDefault()
Meteor.logout()
# going 'home' clears the side nav
Houston._go 'home'
'click #become-houston-admin': (e) ->
e.preventDefault()
Houston.becomeAdmin()
)
Template._houston_login.rendered = ->
$(window).unbind('scroll')
|
[
{
"context": ")\n data: -> {\n supportEmail: \"<a href='mailto:support@codecombat.com'>support@codecombat.com</a>\"\n saving: true\n ",
"end": 212,
"score": 0.9999250173568726,
"start": 190,
"tag": "EMAIL",
"value": "support@codecombat.com"
},
{
"context": "rtEmail: \"<a hre... | app/views/core/CreateAccountModal/teacher/SetupAccountPanel.coffee | johanvl/codecombat | 2 | SetupAccountPanel = Vue.extend
name: 'setup-account-panel'
template: require('templates/core/create-account-modal/setup-account-panel')()
data: -> {
supportEmail: "<a href='mailto:support@codecombat.com'>support@codecombat.com</a>"
saving: true
error: ''
}
mounted: ->
@$store.dispatch('modal/createAccount')
.catch (e) =>
if e.i18n
@error = @$t(e.i18n)
else
@error = e.message
if not @error
@error = @$t('loading_error.unknown')
.then =>
@saving = false
methods:
clickFinish: ->
# Save annoucements subscribe info
me.fetch(cache: false)
.then =>
emails = _.assign({}, me.get('emails'))
emails.generalNews ?= {}
emails.generalNews.enabled = $('#subscribe-input').is(':checked')
me.set('emails', emails)
jqxhr = me.save()
if not jqxhr
console.error(me.validationError)
throw new Error('Could not save user')
new Promise(jqxhr.then)
.then =>
# Make sure to add conditions if we change this to be used on non-teacher path
window.tracker?.trackEvent 'CreateAccountModal Teacher SetupAccountPanel Finish Clicked', category: 'Teachers'
application.router.navigate('teachers/classes', {trigger: true})
document.location.reload()
clickBack: ->
window.tracker?.trackEvent 'CreateAccountModal Teacher SetupAccountPanel Back Clicked', category: 'Teachers'
@$emit('back')
module.exports = SetupAccountPanel
| 198055 | SetupAccountPanel = Vue.extend
name: 'setup-account-panel'
template: require('templates/core/create-account-modal/setup-account-panel')()
data: -> {
supportEmail: "<a href='mailto:<EMAIL>'><EMAIL></a>"
saving: true
error: ''
}
mounted: ->
@$store.dispatch('modal/createAccount')
.catch (e) =>
if e.i18n
@error = @$t(e.i18n)
else
@error = e.message
if not @error
@error = @$t('loading_error.unknown')
.then =>
@saving = false
methods:
clickFinish: ->
# Save annoucements subscribe info
me.fetch(cache: false)
.then =>
emails = _.assign({}, me.get('emails'))
emails.generalNews ?= {}
emails.generalNews.enabled = $('#subscribe-input').is(':checked')
me.set('emails', emails)
jqxhr = me.save()
if not jqxhr
console.error(me.validationError)
throw new Error('Could not save user')
new Promise(jqxhr.then)
.then =>
# Make sure to add conditions if we change this to be used on non-teacher path
window.tracker?.trackEvent 'CreateAccountModal Teacher SetupAccountPanel Finish Clicked', category: 'Teachers'
application.router.navigate('teachers/classes', {trigger: true})
document.location.reload()
clickBack: ->
window.tracker?.trackEvent 'CreateAccountModal Teacher SetupAccountPanel Back Clicked', category: 'Teachers'
@$emit('back')
module.exports = SetupAccountPanel
| true | SetupAccountPanel = Vue.extend
name: 'setup-account-panel'
template: require('templates/core/create-account-modal/setup-account-panel')()
data: -> {
supportEmail: "<a href='mailto:PI:EMAIL:<EMAIL>END_PI'>PI:EMAIL:<EMAIL>END_PI</a>"
saving: true
error: ''
}
mounted: ->
@$store.dispatch('modal/createAccount')
.catch (e) =>
if e.i18n
@error = @$t(e.i18n)
else
@error = e.message
if not @error
@error = @$t('loading_error.unknown')
.then =>
@saving = false
methods:
clickFinish: ->
# Save annoucements subscribe info
me.fetch(cache: false)
.then =>
emails = _.assign({}, me.get('emails'))
emails.generalNews ?= {}
emails.generalNews.enabled = $('#subscribe-input').is(':checked')
me.set('emails', emails)
jqxhr = me.save()
if not jqxhr
console.error(me.validationError)
throw new Error('Could not save user')
new Promise(jqxhr.then)
.then =>
# Make sure to add conditions if we change this to be used on non-teacher path
window.tracker?.trackEvent 'CreateAccountModal Teacher SetupAccountPanel Finish Clicked', category: 'Teachers'
application.router.navigate('teachers/classes', {trigger: true})
document.location.reload()
clickBack: ->
window.tracker?.trackEvent 'CreateAccountModal Teacher SetupAccountPanel Back Clicked', category: 'Teachers'
@$emit('back')
module.exports = SetupAccountPanel
|
[
{
"context": " admin: krb5\n .krb5.delprinc\n principal: \"nikita@#{krb5.realm}\"\n .krb5.addprinc\n principal:",
"end": 884,
"score": 0.9496657848358154,
"start": 878,
"tag": "EMAIL",
"value": "nikita"
},
{
"context": "krb5.realm}\"\n .krb5.addprinc\n princip... | packages/krb5/test/addprinc.coffee | DanielJohnHarty/node-nikita | 1 |
nikita = require '@nikitajs/core'
{tags, ssh, scratch, krb5} = require './test'
they = require('ssh2-they').configure ssh...
return unless tags.krb5_addprinc
describe 'krb5.addprinc', ->
it 'validate schema', ->
nikita
.krb5.addprinc
relax: true
options: {}
, (err) ->
err.errors.map( (err) -> err.message).should.eql [
'data should have required property \'admin\''
'data should have required property \'principal\''
]
.krb5.addprinc
relax: true
options:
admin:
principal: null
principal: 'nikita@REALM'
randkey: true
, (err) ->
err.message.should.eql 'data.admin.principal should be string'
.promise()
they 'create a new principal without a randkey', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "nikita@#{krb5.realm}"
.krb5.addprinc
principal: "nikita@#{krb5.realm}"
randkey: true
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "nikita@#{krb5.realm}"
randkey: true
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'create a new principal with a password', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "nikita@#{krb5.realm}"
.krb5.addprinc
principal: "nikita@#{krb5.realm}"
password: 'password1'
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "nikita@#{krb5.realm}"
password: 'password2'
password_sync: true
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "nikita@#{krb5.realm}"
password: 'password2'
password_sync: true
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'dont overwrite password', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "nikita@#{krb5.realm}"
.krb5.addprinc
principal: "nikita@#{krb5.realm}"
password: 'password1'
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "nikita@#{krb5.realm}"
password: 'password2'
password_sync: false # Default
, (err, {status}) ->
status.should.be.false() unless err
.system.execute
cmd: "echo password1 | kinit nikita@#{krb5.realm}"
.promise()
they 'call function with new style', ({ssh}) ->
user =
password: 'user123'
password_sync: true
principal: 'user2@NODE.DC1.CONSUL'
nikita
ssh: ssh
krb5: admin: krb5
.system.execute
cmd: 'rm -f /etc/security/keytabs/user1.service.keytab || true ; exit 0;'
.krb5.delprinc
principal: user.principal
.krb5.delprinc
principal: "user1/krb5@NODE.DC1.CONSUL"
.krb5.addprinc krb5,
principal: "user1/krb5@NODE.DC1.CONSUL"
randkey: true
keytab: '/etc/security/keytabs/user1.service.keytab'
.krb5.addprinc user, (err, {status}) ->
status.should.be.true() unless err
.system.execute
cmd: "echo #{user.password} | kinit #{user.principal}"
, (err, {status}) ->
status.should.be.true() unless err
.promise()
| 69560 |
nikita = require '@nikitajs/core'
{tags, ssh, scratch, krb5} = require './test'
they = require('ssh2-they').configure ssh...
return unless tags.krb5_addprinc
describe 'krb5.addprinc', ->
it 'validate schema', ->
nikita
.krb5.addprinc
relax: true
options: {}
, (err) ->
err.errors.map( (err) -> err.message).should.eql [
'data should have required property \'admin\''
'data should have required property \'principal\''
]
.krb5.addprinc
relax: true
options:
admin:
principal: null
principal: 'nikita@REALM'
randkey: true
, (err) ->
err.message.should.eql 'data.admin.principal should be string'
.promise()
they 'create a new principal without a randkey', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "<EMAIL>@#{krb5.realm}"
.krb5.addprinc
principal: "<EMAIL>@#{krb5.realm}"
randkey: true
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "<EMAIL>@#{krb5.realm}"
randkey: true
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'create a new principal with a password', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "<EMAIL>@#{krb5.realm}"
.krb5.addprinc
principal: "<EMAIL>@#{krb5.realm}"
password: '<PASSWORD>'
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "<EMAIL>@#{krb5.realm}"
password: '<PASSWORD>'
password_sync: true
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "nik<EMAIL>@#{krb5.realm}"
password: '<PASSWORD>'
password_sync: true
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'dont overwrite password', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "nik<EMAIL>@#{krb5.realm}"
.krb5.addprinc
principal: "nik<EMAIL>@#{krb5.realm}"
password: '<PASSWORD>'
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "nik<EMAIL>@#{krb5.realm}"
password: '<PASSWORD>'
password_sync: false # Default
, (err, {status}) ->
status.should.be.false() unless err
.system.execute
cmd: "echo password1 | kinit nikita@#{krb5.realm}"
.promise()
they 'call function with new style', ({ssh}) ->
user =
password: '<PASSWORD>'
password_sync: true
principal: 'user2@NODE.DC1.CONSUL'
nikita
ssh: ssh
krb5: admin: krb5
.system.execute
cmd: 'rm -f /etc/security/keytabs/user1.service.keytab || true ; exit 0;'
.krb5.delprinc
principal: user.principal
.krb5.delprinc
principal: "user1/krb5@NODE.DC1.CONSUL"
.krb5.addprinc krb5,
principal: "user1/krb5@NODE.DC1.CONSUL"
randkey: true
keytab: '/etc/security/keytabs/user1.service.keytab'
.krb5.addprinc user, (err, {status}) ->
status.should.be.true() unless err
.system.execute
cmd: "echo #{user.password} | kinit #{user.principal}"
, (err, {status}) ->
status.should.be.true() unless err
.promise()
| true |
nikita = require '@nikitajs/core'
{tags, ssh, scratch, krb5} = require './test'
they = require('ssh2-they').configure ssh...
return unless tags.krb5_addprinc
describe 'krb5.addprinc', ->
it 'validate schema', ->
nikita
.krb5.addprinc
relax: true
options: {}
, (err) ->
err.errors.map( (err) -> err.message).should.eql [
'data should have required property \'admin\''
'data should have required property \'principal\''
]
.krb5.addprinc
relax: true
options:
admin:
principal: null
principal: 'nikita@REALM'
randkey: true
, (err) ->
err.message.should.eql 'data.admin.principal should be string'
.promise()
they 'create a new principal without a randkey', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "PI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
.krb5.addprinc
principal: "PI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
randkey: true
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "PI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
randkey: true
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'create a new principal with a password', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "PI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
.krb5.addprinc
principal: "PI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "PI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
password_sync: true
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "nikPI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
password_sync: true
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'dont overwrite password', ({ssh}) ->
nikita
ssh: ssh
krb5: admin: krb5
.krb5.delprinc
principal: "nikPI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
.krb5.addprinc
principal: "nikPI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
, (err, {status}) ->
status.should.be.true() unless err
.krb5.addprinc
principal: "nikPI:EMAIL:<EMAIL>END_PI@#{krb5.realm}"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
password_sync: false # Default
, (err, {status}) ->
status.should.be.false() unless err
.system.execute
cmd: "echo password1 | kinit nikita@#{krb5.realm}"
.promise()
they 'call function with new style', ({ssh}) ->
user =
password: 'PI:PASSWORD:<PASSWORD>END_PI'
password_sync: true
principal: 'user2@NODE.DC1.CONSUL'
nikita
ssh: ssh
krb5: admin: krb5
.system.execute
cmd: 'rm -f /etc/security/keytabs/user1.service.keytab || true ; exit 0;'
.krb5.delprinc
principal: user.principal
.krb5.delprinc
principal: "user1/krb5@NODE.DC1.CONSUL"
.krb5.addprinc krb5,
principal: "user1/krb5@NODE.DC1.CONSUL"
randkey: true
keytab: '/etc/security/keytabs/user1.service.keytab'
.krb5.addprinc user, (err, {status}) ->
status.should.be.true() unless err
.system.execute
cmd: "echo #{user.password} | kinit #{user.principal}"
, (err, {status}) ->
status.should.be.true() unless err
.promise()
|
[
{
"context": " participantidsstore', ->\n\n testAccountId = 'koding_123'\n\n @reactor.dispatch actions.ADD_PARTICIPANT",
"end": 532,
"score": 0.971649706363678,
"start": 522,
"tag": "KEY",
"value": "koding_123"
},
{
"context": "articipantidsstore', ->\n\n testAccountId... | client/activity/lib/flux/createchannel/tests/participantidsstore.coffee | ezgikaysi/koding | 1 | expect = require 'expect'
Reactor = require 'app/flux/base/reactor'
ParticipantIdsStore = require 'activity/flux/createchannel/stores/participantidsstore'
actions = require 'activity/flux/createchannel/actions/actiontypes'
describe 'CreateNewChannelParticipantIdsStore', ->
beforeEach ->
@reactor = new Reactor
@reactor.registerStores participantIds : ParticipantIdsStore
describe '#handleAddParticipantToNewChannel', ->
it 'sets given accountId to participantidsstore', ->
testAccountId = 'koding_123'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId }
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId).toEqual testAccountId
describe '#handleRemoveParticipantFromNewChannel', ->
it 'remove given accountId to participantidsstore', ->
testAccountId_1 = 'koding'
testAccountId_2 = 'koding_123'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_1 }
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_2 }
@reactor.dispatch actions.REMOVE_PARTICIPANT_FROM_NEW_CHANNEL, { accountId : testAccountId_1 }
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId_2).toEqual testAccountId_2
expect(participants.get testAccountId_1).toBe undefined
describe '#handleRemoveAllParticipantsFromNewChannel', ->
it 'removes all participant ids from participantidsstore', ->
testAccountId_1 = 'koding'
testAccountId_2 = 'koding_123'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_1 }
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_2 }
@reactor.dispatch actions.REMOVE_ALL_PARTICIPANTS_FROM_NEW_CHANNEL
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId_1).toBe undefined
expect(participants.get testAccountId_2).toBe undefined
| 218320 | expect = require 'expect'
Reactor = require 'app/flux/base/reactor'
ParticipantIdsStore = require 'activity/flux/createchannel/stores/participantidsstore'
actions = require 'activity/flux/createchannel/actions/actiontypes'
describe 'CreateNewChannelParticipantIdsStore', ->
beforeEach ->
@reactor = new Reactor
@reactor.registerStores participantIds : ParticipantIdsStore
describe '#handleAddParticipantToNewChannel', ->
it 'sets given accountId to participantidsstore', ->
testAccountId = '<KEY>'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId }
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId).toEqual testAccountId
describe '#handleRemoveParticipantFromNewChannel', ->
it 'remove given accountId to participantidsstore', ->
testAccountId_1 = '<KEY>'
testAccountId_2 = '<KEY>'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_1 }
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_2 }
@reactor.dispatch actions.REMOVE_PARTICIPANT_FROM_NEW_CHANNEL, { accountId : testAccountId_1 }
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId_2).toEqual testAccountId_2
expect(participants.get testAccountId_1).toBe undefined
describe '#handleRemoveAllParticipantsFromNewChannel', ->
it 'removes all participant ids from participantidsstore', ->
testAccountId_1 = '<KEY>'
testAccountId_2 = '<KEY>'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_1 }
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_2 }
@reactor.dispatch actions.REMOVE_ALL_PARTICIPANTS_FROM_NEW_CHANNEL
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId_1).toBe undefined
expect(participants.get testAccountId_2).toBe undefined
| true | expect = require 'expect'
Reactor = require 'app/flux/base/reactor'
ParticipantIdsStore = require 'activity/flux/createchannel/stores/participantidsstore'
actions = require 'activity/flux/createchannel/actions/actiontypes'
describe 'CreateNewChannelParticipantIdsStore', ->
beforeEach ->
@reactor = new Reactor
@reactor.registerStores participantIds : ParticipantIdsStore
describe '#handleAddParticipantToNewChannel', ->
it 'sets given accountId to participantidsstore', ->
testAccountId = 'PI:KEY:<KEY>END_PI'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId }
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId).toEqual testAccountId
describe '#handleRemoveParticipantFromNewChannel', ->
it 'remove given accountId to participantidsstore', ->
testAccountId_1 = 'PI:KEY:<KEY>END_PI'
testAccountId_2 = 'PI:KEY:<KEY>END_PI'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_1 }
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_2 }
@reactor.dispatch actions.REMOVE_PARTICIPANT_FROM_NEW_CHANNEL, { accountId : testAccountId_1 }
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId_2).toEqual testAccountId_2
expect(participants.get testAccountId_1).toBe undefined
describe '#handleRemoveAllParticipantsFromNewChannel', ->
it 'removes all participant ids from participantidsstore', ->
testAccountId_1 = 'PI:KEY:<KEY>END_PI'
testAccountId_2 = 'PI:KEY:<KEY>END_PI'
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_1 }
@reactor.dispatch actions.ADD_PARTICIPANT_TO_NEW_CHANNEL, { accountId : testAccountId_2 }
@reactor.dispatch actions.REMOVE_ALL_PARTICIPANTS_FROM_NEW_CHANNEL
participants = @reactor.evaluate(['participantIds'])
expect(participants.get testAccountId_1).toBe undefined
expect(participants.get testAccountId_2).toBe undefined
|
[
{
"context": "\n request.post('/api/hello')\n .send({name: 'david'})\n .set('Accept', 'application/json')\n .ex",
"end": 152,
"score": 0.9976933598518372,
"start": 147,
"tag": "NAME",
"value": "david"
},
{
"context": "\n request.post('/api/hello')\n .send({name: 'ru... | src/server/hello/test.coffee | dataonlinedolv3/rupert-demo-app | 1 | request = superRupert(require('./route'))
describe "API", ->
it "checks for a hello", (done)->
request.post('/api/hello')
.send({name: 'david'})
.set('Accept', 'application/json')
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
response.body.should.have.property('name')
response.body.should.not.have.property('alreadymet')
response.body.name.should.equal('david')
catch e
reurn done e
done()
it "checks for a repeat hello", (done)->
request.post('/api/hello')
.send({name: 'rupert'})
.set('Accept', 'application/json')
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
# The first request should not have already met Rupert.
response.body.should.have.property('name')
response.body.should.not.have.property('alreadymet')
response.body.name.should.equal('rupert')
catch e
return done e
request.post('/api/hello')
.send({name: 'rupert'})
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
# The second request, of course, should have already met Rupert.
response.body.should.have.property('name')
response.body.should.have.property('alreadymet')
response.body.name.should.equal('rupert')
response.body.alreadymet.should.equal(true)
catch e
return done e
done()
| 40640 | request = superRupert(require('./route'))
describe "API", ->
it "checks for a hello", (done)->
request.post('/api/hello')
.send({name: '<NAME>'})
.set('Accept', 'application/json')
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
response.body.should.have.property('name')
response.body.should.not.have.property('alreadymet')
response.body.name.should.equal('david')
catch e
reurn done e
done()
it "checks for a repeat hello", (done)->
request.post('/api/hello')
.send({name: '<NAME>'})
.set('Accept', 'application/json')
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
# The first request should not have already met Rupert.
response.body.should.have.property('name')
response.body.should.not.have.property('alreadymet')
response.body.name.should.equal('<NAME>')
catch e
return done e
request.post('/api/hello')
.send({name: '<NAME>'})
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
# The second request, of course, should have already met Rupert.
response.body.should.have.property('name')
response.body.should.have.property('alreadymet')
response.body.name.should.equal('<NAME>')
response.body.alreadymet.should.equal(true)
catch e
return done e
done()
| true | request = superRupert(require('./route'))
describe "API", ->
it "checks for a hello", (done)->
request.post('/api/hello')
.send({name: 'PI:NAME:<NAME>END_PI'})
.set('Accept', 'application/json')
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
response.body.should.have.property('name')
response.body.should.not.have.property('alreadymet')
response.body.name.should.equal('david')
catch e
reurn done e
done()
it "checks for a repeat hello", (done)->
request.post('/api/hello')
.send({name: 'PI:NAME:<NAME>END_PI'})
.set('Accept', 'application/json')
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
# The first request should not have already met Rupert.
response.body.should.have.property('name')
response.body.should.not.have.property('alreadymet')
response.body.name.should.equal('PI:NAME:<NAME>END_PI')
catch e
return done e
request.post('/api/hello')
.send({name: 'PI:NAME:<NAME>END_PI'})
.expect(200)
.expect('Content-Type', /application\/json/)
.end (err, response)->
return done(err) if err
try
# The second request, of course, should have already met Rupert.
response.body.should.have.property('name')
response.body.should.have.property('alreadymet')
response.body.name.should.equal('PI:NAME:<NAME>END_PI')
response.body.alreadymet.should.equal(true)
catch e
return done e
done()
|
[
{
"context": " for facet in results.facets when facet.key is 'accountID'\n for account in facet.value\n ",
"end": 10453,
"score": 0.8559215068817139,
"start": 10444,
"tag": "KEY",
"value": "accountID"
}
] | server/controllers/messages.coffee | gelnior/cozy-emails | 0 | Message = require '../models/message'
Account = require '../models/account'
Mailbox = require '../models/mailbox'
{NotFound, BadRequest, AccountConfigError} = require '../utils/errors'
{MSGBYPAGE} = require '../utils/constants'
_ = require 'lodash'
async = require 'async'
querystring = require 'querystring'
multiparty = require 'multiparty'
crlf = require 'crlf-helper'
minify = require('html-minifier').minify
stream_to_buffer = require '../utils/stream_to_array'
log = require('../utils/logging')(prefix: 'controllers:mesage')
{normalizeMessageID} = require('../utils/jwz_tools')
uuid = require 'uuid'
ramStore = require '../models/store_account_and_boxes'
SaveOrSendMessage = require '../processes/message_save_or_send'
MessageMove = require '../processes/message_move'
minifierOpts =
removeComments: true
removeCommentsFromCDATA: true
collapseWhitespace: true
collapseBooleanAttributes: true
removeRedundantAttributes: true
removeEmptyAttributes: true
removeScriptTypeAttributes: true
removeStyleLinkTypeAttributes: true
removeOptionalTags: true
removeEmptyElements: true
keepClosingSlash: true
minifyJS: true
minifyCSS: true
# get a message and attach it to req.message
module.exports.fetch = (req, res, next) ->
id = req.params.messageID or req.body.id
Message.find id, (err, found) ->
return next err if err
return next new NotFound "Message #{id}" unless found
req.message = found
next()
module.exports.fetchMaybe = (req, res, next) ->
id = req.body.id
if id then module.exports.fetch req, res, next
else next()
# return a message's details
module.exports.details = (req, res, next) ->
res.send req.message.toClientObject()
module.exports.attachment = (req, res, next) ->
stream = req.message.getBinary req.params.attachment, (err) ->
return next err if err
if req.query?.download
encodedFileName = encodeURIComponent req.params.attachment
res.setHeader 'Content-disposition', """
attachment; filename*=UTF8''#{encodedFileName}
"""
stream.pipe res
module.exports.listByMailboxOptions = (req, res, next) ->
sort = if req.query.sort then req.query.sort
else '-date'
descending = sort.substring(0, 1)
if descending is '+' then descending = false
else if descending is '-' then descending = true
else return next new BadRequest "Unsuported sort order #{descending}"
pageAfter = req.query.pageAfter
sortField = sort.substring(1)
before = req.query.before
after = req.query.after
if sortField is 'date'
before ?= new Date(0).toISOString()
after ?= new Date().toISOString()
if new Date(before).toISOString() isnt before or
new Date(after).toISOString() isnt after
return next new BadRequest "before & after should be a valid JS " +
"date.toISOString()"
else if sortField is 'from' or sortField is 'dest'
before = if before then decodeURIComponent(before) else ''
after = if after then decodeURIComponent(after) else {}
pageAfter = if pageAfter then decodeURIComponent pageAfter
else
return next new BadRequest "Unsuported sort field #{sortField}"
FLAGS_CONVERT =
'seen' : '\\Seen'
'unseen' : '!\\Seen'
'flagged' : '\\Flagged'
'unflagged' : '!\\Flagged'
'answered' : '\\Answered'
'unanswered' : '!\\Answered'
'attach' : '\\Attachments'
flagcode = req.query.flag
if flagcode
unless flag = FLAGS_CONVERT[flagcode]
return next new BadRequest "Unsuported flag filter"
else
flag = null
req.sortField = sortField
req.descending = descending
req.before = before
req.sort = sort
req.after = after
req.pageAfter = pageAfter
req.flag = flag
req.flagcode = flagcode
next()
# list messages from a mailbox
# req.query possible
# sort = [+/-][date]
# flag in [seen, unseen, flagged, unflagged, answerred, unanswered]
module.exports.listByMailbox = (req, res, next) ->
mailboxID = req.params.mailboxID
Message.getResultsAndCount mailboxID,
sortField : req.sortField
descending : req.descending
before : req.before
after : req.after
resultsAfter : req.pageAfter
flag : req.flag
, (err, result) ->
return next err if err
messages = result.messages
if messages.length is MSGBYPAGE
last = messages[messages.length - 1]
# for 'from' and 'dest', we use pageAfter as the number of records
# to skip
if req.sortField is 'from' or req.sortField is 'dest'
pageAfter = messages.length + (parseInt(req.pageAfter, 10) or 0)
else
lastDate = last.date or new Date()
pageAfter = lastDate.toISOString()
links = next: "mailbox/#{mailboxID}/?" + querystring.stringify
flag: req.flagcode
sort: req.sort
before: req.before
after: req.after
pageAfter: pageAfter
else
links = {}
result.messages ?= []
result.mailboxID = mailboxID
result.messages = result.messages.map (msg) -> msg.toClientObject()
result.links = links
res.send result
# Middleware - parse the request form and buffer all its files
module.exports.parseSendForm = (req, res, next) ->
form = new multiparty.Form(autoFields: true)
nextonce = _.once next #this may be parano
fields = {}
files = {}
form.on 'field', (name, value) ->
fields[name] = value
form.on 'part', (part) ->
stream_to_buffer part, (err, bufs) ->
return nextonce err if err
files[part.name] =
filename: part.filename
headers: part.headers
content: Buffer.concat bufs
part.resume()
form.on 'error', (err) ->
nextonce err
form.on 'close', ->
req.body = JSON.parse fields.body
req.files = files
nextonce()
form.parse req
# send a message
# at some point in the future, we might want to merge it with above
# to allow streaming of upload
module.exports.send = (req, res, next) ->
log.debug "send"
isDraft = req.body.isDraft
delete req.body.isDraft
message = req.body
if message.html
message.html = minify message.html, minifierOpts
if message.text
message.text = crlf.setLineEnding message.text.trim(), 'CRLF'
proc = new SaveOrSendMessage
account: ramStore.getAccount req.body.accountID
previousState: req.message # can be null
message: message
newAttachments: req.files
isDraft: isDraft
proc.run (err) ->
return next err if err
out = proc.cozyMessage.toClientObject()
out.isDraft = isDraft
res.send out
# fetch messages with various methods
# expect one of conversationIDs, conversationID, or messageIDs in body
# attach the messages to req.messages
module.exports.batchFetch = (req, res, next) ->
if Object.keys(req.body).length is 0
req.body = req.query
handleMessages = (err, messages) ->
return next err if err
req.messages = messages
next()
if req.body.messageID
Message.find req.body.messageID, (err, message) ->
handleMessages err, [message]
else if req.body.conversationID
Message.byConversationID req.body.conversationID, handleMessages
else if req.body.messageIDs
Message.findMultiple req.body.messageIDs, handleMessages
else if req.body.conversationIDs
Message.byConversationIDs req.body.conversationIDs, handleMessages
else
next new BadRequest """
No conversationIDs, conversationID, or messageIDs in body.
"""
module.exports.batchSend = (req, res, next) ->
messages = req.messages.filter (msg) -> return msg?
.map (msg) -> msg?.toClientObject()
return next new NotFound "No message found" if messages.length is 0
res.send messages
# move several message to trash with one request
# expect req.messages
module.exports.batchTrash = (req, res, next) ->
accountInstance = ramStore.getAccount(req.body.accountID)
# the client should prevent this, but let's be safe
unless accountInstance
return next new BadRequest 'accountInstance'
trashBoxId = accountInstance.trashMailbox
# the client should prevent this, but let's be safe
unless trashBoxId
return next new AccountConfigError 'trashMailbox'
process = new MessageMove
messages: req.messages
to: trashBoxId
process.run (err) ->
res.send process.updatedMessages
# add a flag to several messages
# expect req.body.flag
module.exports.batchAddFlag = (req, res, next) ->
Message.batchAddFlag req.messages, req.body.flag, (err, updated) ->
return next err if err
res.send updated
# remove a flag from several messages
# expect req.body.flag
module.exports.batchRemoveFlag = (req, res, next) ->
Message.batchRemoveFlag req.messages, req.body.flag, (err, updated) ->
return next err if err
res.send updated
# move several message with one request
# expect & req.messages
# aim :
# - the conversation should not appears in from
# - the conversation should appears in to
# - drafts should stay in drafts
# - messages in trash should stay in trash
module.exports.batchMove = (req, res, next) ->
process = new MessageMove
messages: req.messages
to: req.body.to
from: req.body.from
process.run (err) ->
res.send process.updatedMessages
module.exports.search = (req, res, next) ->
return next new Error('search is disabled')
params =
query: req.query.search
facets: accountID: {}
if req.query.accountID
params.filter =
accountID: [[req.query.accountID, req.query.accountID]]
params.numByPage = req.query.pageSize or 10
params.numPage = req.query.page or 0
Message.search params, (err, results) ->
return next err if err
accounts = {}
for facet in results.facets when facet.key is 'accountID'
for account in facet.value
accounts[account.key] = account.value
res.send
accounts: accounts
rows: results.map (msg) -> msg.toClientObject()
# fetch from IMAP and send the raw rfc822 message
module.exports.raw = (req, res, next) ->
boxID = Object.keys(req.message.mailboxIDs)[0]
uid = req.message.mailboxIDs[boxID]
Mailbox.find boxID, (err, mailbox) ->
return next err if err
mailbox.doASAPWithBox (imap, imapbox, cbRelease) ->
try imap.fetchOneMailRaw uid, cbRelease
catch err then cbRelease err
, (err, message) ->
return next err if err
# should be message/rfc822 but text/plain allow to read the
# raw message in the browser
res.type 'text/plain'
res.send message
| 214397 | Message = require '../models/message'
Account = require '../models/account'
Mailbox = require '../models/mailbox'
{NotFound, BadRequest, AccountConfigError} = require '../utils/errors'
{MSGBYPAGE} = require '../utils/constants'
_ = require 'lodash'
async = require 'async'
querystring = require 'querystring'
multiparty = require 'multiparty'
crlf = require 'crlf-helper'
minify = require('html-minifier').minify
stream_to_buffer = require '../utils/stream_to_array'
log = require('../utils/logging')(prefix: 'controllers:mesage')
{normalizeMessageID} = require('../utils/jwz_tools')
uuid = require 'uuid'
ramStore = require '../models/store_account_and_boxes'
SaveOrSendMessage = require '../processes/message_save_or_send'
MessageMove = require '../processes/message_move'
minifierOpts =
removeComments: true
removeCommentsFromCDATA: true
collapseWhitespace: true
collapseBooleanAttributes: true
removeRedundantAttributes: true
removeEmptyAttributes: true
removeScriptTypeAttributes: true
removeStyleLinkTypeAttributes: true
removeOptionalTags: true
removeEmptyElements: true
keepClosingSlash: true
minifyJS: true
minifyCSS: true
# get a message and attach it to req.message
module.exports.fetch = (req, res, next) ->
id = req.params.messageID or req.body.id
Message.find id, (err, found) ->
return next err if err
return next new NotFound "Message #{id}" unless found
req.message = found
next()
module.exports.fetchMaybe = (req, res, next) ->
id = req.body.id
if id then module.exports.fetch req, res, next
else next()
# return a message's details
module.exports.details = (req, res, next) ->
res.send req.message.toClientObject()
module.exports.attachment = (req, res, next) ->
stream = req.message.getBinary req.params.attachment, (err) ->
return next err if err
if req.query?.download
encodedFileName = encodeURIComponent req.params.attachment
res.setHeader 'Content-disposition', """
attachment; filename*=UTF8''#{encodedFileName}
"""
stream.pipe res
module.exports.listByMailboxOptions = (req, res, next) ->
sort = if req.query.sort then req.query.sort
else '-date'
descending = sort.substring(0, 1)
if descending is '+' then descending = false
else if descending is '-' then descending = true
else return next new BadRequest "Unsuported sort order #{descending}"
pageAfter = req.query.pageAfter
sortField = sort.substring(1)
before = req.query.before
after = req.query.after
if sortField is 'date'
before ?= new Date(0).toISOString()
after ?= new Date().toISOString()
if new Date(before).toISOString() isnt before or
new Date(after).toISOString() isnt after
return next new BadRequest "before & after should be a valid JS " +
"date.toISOString()"
else if sortField is 'from' or sortField is 'dest'
before = if before then decodeURIComponent(before) else ''
after = if after then decodeURIComponent(after) else {}
pageAfter = if pageAfter then decodeURIComponent pageAfter
else
return next new BadRequest "Unsuported sort field #{sortField}"
FLAGS_CONVERT =
'seen' : '\\Seen'
'unseen' : '!\\Seen'
'flagged' : '\\Flagged'
'unflagged' : '!\\Flagged'
'answered' : '\\Answered'
'unanswered' : '!\\Answered'
'attach' : '\\Attachments'
flagcode = req.query.flag
if flagcode
unless flag = FLAGS_CONVERT[flagcode]
return next new BadRequest "Unsuported flag filter"
else
flag = null
req.sortField = sortField
req.descending = descending
req.before = before
req.sort = sort
req.after = after
req.pageAfter = pageAfter
req.flag = flag
req.flagcode = flagcode
next()
# list messages from a mailbox
# req.query possible
# sort = [+/-][date]
# flag in [seen, unseen, flagged, unflagged, answerred, unanswered]
module.exports.listByMailbox = (req, res, next) ->
mailboxID = req.params.mailboxID
Message.getResultsAndCount mailboxID,
sortField : req.sortField
descending : req.descending
before : req.before
after : req.after
resultsAfter : req.pageAfter
flag : req.flag
, (err, result) ->
return next err if err
messages = result.messages
if messages.length is MSGBYPAGE
last = messages[messages.length - 1]
# for 'from' and 'dest', we use pageAfter as the number of records
# to skip
if req.sortField is 'from' or req.sortField is 'dest'
pageAfter = messages.length + (parseInt(req.pageAfter, 10) or 0)
else
lastDate = last.date or new Date()
pageAfter = lastDate.toISOString()
links = next: "mailbox/#{mailboxID}/?" + querystring.stringify
flag: req.flagcode
sort: req.sort
before: req.before
after: req.after
pageAfter: pageAfter
else
links = {}
result.messages ?= []
result.mailboxID = mailboxID
result.messages = result.messages.map (msg) -> msg.toClientObject()
result.links = links
res.send result
# Middleware - parse the request form and buffer all its files
module.exports.parseSendForm = (req, res, next) ->
form = new multiparty.Form(autoFields: true)
nextonce = _.once next #this may be parano
fields = {}
files = {}
form.on 'field', (name, value) ->
fields[name] = value
form.on 'part', (part) ->
stream_to_buffer part, (err, bufs) ->
return nextonce err if err
files[part.name] =
filename: part.filename
headers: part.headers
content: Buffer.concat bufs
part.resume()
form.on 'error', (err) ->
nextonce err
form.on 'close', ->
req.body = JSON.parse fields.body
req.files = files
nextonce()
form.parse req
# send a message
# at some point in the future, we might want to merge it with above
# to allow streaming of upload
module.exports.send = (req, res, next) ->
log.debug "send"
isDraft = req.body.isDraft
delete req.body.isDraft
message = req.body
if message.html
message.html = minify message.html, minifierOpts
if message.text
message.text = crlf.setLineEnding message.text.trim(), 'CRLF'
proc = new SaveOrSendMessage
account: ramStore.getAccount req.body.accountID
previousState: req.message # can be null
message: message
newAttachments: req.files
isDraft: isDraft
proc.run (err) ->
return next err if err
out = proc.cozyMessage.toClientObject()
out.isDraft = isDraft
res.send out
# fetch messages with various methods
# expect one of conversationIDs, conversationID, or messageIDs in body
# attach the messages to req.messages
module.exports.batchFetch = (req, res, next) ->
if Object.keys(req.body).length is 0
req.body = req.query
handleMessages = (err, messages) ->
return next err if err
req.messages = messages
next()
if req.body.messageID
Message.find req.body.messageID, (err, message) ->
handleMessages err, [message]
else if req.body.conversationID
Message.byConversationID req.body.conversationID, handleMessages
else if req.body.messageIDs
Message.findMultiple req.body.messageIDs, handleMessages
else if req.body.conversationIDs
Message.byConversationIDs req.body.conversationIDs, handleMessages
else
next new BadRequest """
No conversationIDs, conversationID, or messageIDs in body.
"""
module.exports.batchSend = (req, res, next) ->
messages = req.messages.filter (msg) -> return msg?
.map (msg) -> msg?.toClientObject()
return next new NotFound "No message found" if messages.length is 0
res.send messages
# move several message to trash with one request
# expect req.messages
module.exports.batchTrash = (req, res, next) ->
accountInstance = ramStore.getAccount(req.body.accountID)
# the client should prevent this, but let's be safe
unless accountInstance
return next new BadRequest 'accountInstance'
trashBoxId = accountInstance.trashMailbox
# the client should prevent this, but let's be safe
unless trashBoxId
return next new AccountConfigError 'trashMailbox'
process = new MessageMove
messages: req.messages
to: trashBoxId
process.run (err) ->
res.send process.updatedMessages
# add a flag to several messages
# expect req.body.flag
module.exports.batchAddFlag = (req, res, next) ->
Message.batchAddFlag req.messages, req.body.flag, (err, updated) ->
return next err if err
res.send updated
# remove a flag from several messages
# expect req.body.flag
module.exports.batchRemoveFlag = (req, res, next) ->
Message.batchRemoveFlag req.messages, req.body.flag, (err, updated) ->
return next err if err
res.send updated
# move several message with one request
# expect & req.messages
# aim :
# - the conversation should not appears in from
# - the conversation should appears in to
# - drafts should stay in drafts
# - messages in trash should stay in trash
module.exports.batchMove = (req, res, next) ->
process = new MessageMove
messages: req.messages
to: req.body.to
from: req.body.from
process.run (err) ->
res.send process.updatedMessages
module.exports.search = (req, res, next) ->
return next new Error('search is disabled')
params =
query: req.query.search
facets: accountID: {}
if req.query.accountID
params.filter =
accountID: [[req.query.accountID, req.query.accountID]]
params.numByPage = req.query.pageSize or 10
params.numPage = req.query.page or 0
Message.search params, (err, results) ->
return next err if err
accounts = {}
for facet in results.facets when facet.key is '<KEY>'
for account in facet.value
accounts[account.key] = account.value
res.send
accounts: accounts
rows: results.map (msg) -> msg.toClientObject()
# fetch from IMAP and send the raw rfc822 message
module.exports.raw = (req, res, next) ->
boxID = Object.keys(req.message.mailboxIDs)[0]
uid = req.message.mailboxIDs[boxID]
Mailbox.find boxID, (err, mailbox) ->
return next err if err
mailbox.doASAPWithBox (imap, imapbox, cbRelease) ->
try imap.fetchOneMailRaw uid, cbRelease
catch err then cbRelease err
, (err, message) ->
return next err if err
# should be message/rfc822 but text/plain allow to read the
# raw message in the browser
res.type 'text/plain'
res.send message
| true | Message = require '../models/message'
Account = require '../models/account'
Mailbox = require '../models/mailbox'
{NotFound, BadRequest, AccountConfigError} = require '../utils/errors'
{MSGBYPAGE} = require '../utils/constants'
_ = require 'lodash'
async = require 'async'
querystring = require 'querystring'
multiparty = require 'multiparty'
crlf = require 'crlf-helper'
minify = require('html-minifier').minify
stream_to_buffer = require '../utils/stream_to_array'
log = require('../utils/logging')(prefix: 'controllers:mesage')
{normalizeMessageID} = require('../utils/jwz_tools')
uuid = require 'uuid'
ramStore = require '../models/store_account_and_boxes'
SaveOrSendMessage = require '../processes/message_save_or_send'
MessageMove = require '../processes/message_move'
minifierOpts =
removeComments: true
removeCommentsFromCDATA: true
collapseWhitespace: true
collapseBooleanAttributes: true
removeRedundantAttributes: true
removeEmptyAttributes: true
removeScriptTypeAttributes: true
removeStyleLinkTypeAttributes: true
removeOptionalTags: true
removeEmptyElements: true
keepClosingSlash: true
minifyJS: true
minifyCSS: true
# get a message and attach it to req.message
module.exports.fetch = (req, res, next) ->
id = req.params.messageID or req.body.id
Message.find id, (err, found) ->
return next err if err
return next new NotFound "Message #{id}" unless found
req.message = found
next()
module.exports.fetchMaybe = (req, res, next) ->
id = req.body.id
if id then module.exports.fetch req, res, next
else next()
# return a message's details
module.exports.details = (req, res, next) ->
res.send req.message.toClientObject()
module.exports.attachment = (req, res, next) ->
stream = req.message.getBinary req.params.attachment, (err) ->
return next err if err
if req.query?.download
encodedFileName = encodeURIComponent req.params.attachment
res.setHeader 'Content-disposition', """
attachment; filename*=UTF8''#{encodedFileName}
"""
stream.pipe res
module.exports.listByMailboxOptions = (req, res, next) ->
sort = if req.query.sort then req.query.sort
else '-date'
descending = sort.substring(0, 1)
if descending is '+' then descending = false
else if descending is '-' then descending = true
else return next new BadRequest "Unsuported sort order #{descending}"
pageAfter = req.query.pageAfter
sortField = sort.substring(1)
before = req.query.before
after = req.query.after
if sortField is 'date'
before ?= new Date(0).toISOString()
after ?= new Date().toISOString()
if new Date(before).toISOString() isnt before or
new Date(after).toISOString() isnt after
return next new BadRequest "before & after should be a valid JS " +
"date.toISOString()"
else if sortField is 'from' or sortField is 'dest'
before = if before then decodeURIComponent(before) else ''
after = if after then decodeURIComponent(after) else {}
pageAfter = if pageAfter then decodeURIComponent pageAfter
else
return next new BadRequest "Unsuported sort field #{sortField}"
FLAGS_CONVERT =
'seen' : '\\Seen'
'unseen' : '!\\Seen'
'flagged' : '\\Flagged'
'unflagged' : '!\\Flagged'
'answered' : '\\Answered'
'unanswered' : '!\\Answered'
'attach' : '\\Attachments'
flagcode = req.query.flag
if flagcode
unless flag = FLAGS_CONVERT[flagcode]
return next new BadRequest "Unsuported flag filter"
else
flag = null
req.sortField = sortField
req.descending = descending
req.before = before
req.sort = sort
req.after = after
req.pageAfter = pageAfter
req.flag = flag
req.flagcode = flagcode
next()
# list messages from a mailbox
# req.query possible
# sort = [+/-][date]
# flag in [seen, unseen, flagged, unflagged, answerred, unanswered]
module.exports.listByMailbox = (req, res, next) ->
mailboxID = req.params.mailboxID
Message.getResultsAndCount mailboxID,
sortField : req.sortField
descending : req.descending
before : req.before
after : req.after
resultsAfter : req.pageAfter
flag : req.flag
, (err, result) ->
return next err if err
messages = result.messages
if messages.length is MSGBYPAGE
last = messages[messages.length - 1]
# for 'from' and 'dest', we use pageAfter as the number of records
# to skip
if req.sortField is 'from' or req.sortField is 'dest'
pageAfter = messages.length + (parseInt(req.pageAfter, 10) or 0)
else
lastDate = last.date or new Date()
pageAfter = lastDate.toISOString()
links = next: "mailbox/#{mailboxID}/?" + querystring.stringify
flag: req.flagcode
sort: req.sort
before: req.before
after: req.after
pageAfter: pageAfter
else
links = {}
result.messages ?= []
result.mailboxID = mailboxID
result.messages = result.messages.map (msg) -> msg.toClientObject()
result.links = links
res.send result
# Middleware - parse the request form and buffer all its files
module.exports.parseSendForm = (req, res, next) ->
form = new multiparty.Form(autoFields: true)
nextonce = _.once next #this may be parano
fields = {}
files = {}
form.on 'field', (name, value) ->
fields[name] = value
form.on 'part', (part) ->
stream_to_buffer part, (err, bufs) ->
return nextonce err if err
files[part.name] =
filename: part.filename
headers: part.headers
content: Buffer.concat bufs
part.resume()
form.on 'error', (err) ->
nextonce err
form.on 'close', ->
req.body = JSON.parse fields.body
req.files = files
nextonce()
form.parse req
# send a message
# at some point in the future, we might want to merge it with above
# to allow streaming of upload
module.exports.send = (req, res, next) ->
log.debug "send"
isDraft = req.body.isDraft
delete req.body.isDraft
message = req.body
if message.html
message.html = minify message.html, minifierOpts
if message.text
message.text = crlf.setLineEnding message.text.trim(), 'CRLF'
proc = new SaveOrSendMessage
account: ramStore.getAccount req.body.accountID
previousState: req.message # can be null
message: message
newAttachments: req.files
isDraft: isDraft
proc.run (err) ->
return next err if err
out = proc.cozyMessage.toClientObject()
out.isDraft = isDraft
res.send out
# fetch messages with various methods
# expect one of conversationIDs, conversationID, or messageIDs in body
# attach the messages to req.messages
module.exports.batchFetch = (req, res, next) ->
if Object.keys(req.body).length is 0
req.body = req.query
handleMessages = (err, messages) ->
return next err if err
req.messages = messages
next()
if req.body.messageID
Message.find req.body.messageID, (err, message) ->
handleMessages err, [message]
else if req.body.conversationID
Message.byConversationID req.body.conversationID, handleMessages
else if req.body.messageIDs
Message.findMultiple req.body.messageIDs, handleMessages
else if req.body.conversationIDs
Message.byConversationIDs req.body.conversationIDs, handleMessages
else
next new BadRequest """
No conversationIDs, conversationID, or messageIDs in body.
"""
module.exports.batchSend = (req, res, next) ->
messages = req.messages.filter (msg) -> return msg?
.map (msg) -> msg?.toClientObject()
return next new NotFound "No message found" if messages.length is 0
res.send messages
# move several message to trash with one request
# expect req.messages
module.exports.batchTrash = (req, res, next) ->
accountInstance = ramStore.getAccount(req.body.accountID)
# the client should prevent this, but let's be safe
unless accountInstance
return next new BadRequest 'accountInstance'
trashBoxId = accountInstance.trashMailbox
# the client should prevent this, but let's be safe
unless trashBoxId
return next new AccountConfigError 'trashMailbox'
process = new MessageMove
messages: req.messages
to: trashBoxId
process.run (err) ->
res.send process.updatedMessages
# add a flag to several messages
# expect req.body.flag
module.exports.batchAddFlag = (req, res, next) ->
Message.batchAddFlag req.messages, req.body.flag, (err, updated) ->
return next err if err
res.send updated
# remove a flag from several messages
# expect req.body.flag
module.exports.batchRemoveFlag = (req, res, next) ->
Message.batchRemoveFlag req.messages, req.body.flag, (err, updated) ->
return next err if err
res.send updated
# move several message with one request
# expect & req.messages
# aim :
# - the conversation should not appears in from
# - the conversation should appears in to
# - drafts should stay in drafts
# - messages in trash should stay in trash
module.exports.batchMove = (req, res, next) ->
process = new MessageMove
messages: req.messages
to: req.body.to
from: req.body.from
process.run (err) ->
res.send process.updatedMessages
module.exports.search = (req, res, next) ->
return next new Error('search is disabled')
params =
query: req.query.search
facets: accountID: {}
if req.query.accountID
params.filter =
accountID: [[req.query.accountID, req.query.accountID]]
params.numByPage = req.query.pageSize or 10
params.numPage = req.query.page or 0
Message.search params, (err, results) ->
return next err if err
accounts = {}
for facet in results.facets when facet.key is 'PI:KEY:<KEY>END_PI'
for account in facet.value
accounts[account.key] = account.value
res.send
accounts: accounts
rows: results.map (msg) -> msg.toClientObject()
# fetch from IMAP and send the raw rfc822 message
module.exports.raw = (req, res, next) ->
boxID = Object.keys(req.message.mailboxIDs)[0]
uid = req.message.mailboxIDs[boxID]
Mailbox.find boxID, (err, mailbox) ->
return next err if err
mailbox.doASAPWithBox (imap, imapbox, cbRelease) ->
try imap.fetchOneMailRaw uid, cbRelease
catch err then cbRelease err
, (err, message) ->
return next err if err
# should be message/rfc822 but text/plain allow to read the
# raw message in the browser
res.type 'text/plain'
res.send message
|
[
{
"context": "ry'\n\nclippy = null\n\nlocked = false\n\nquotes = [\n 'Clever girl'\n 'Shoot her! Shoot her!'\n 'Hold on to your but",
"end": 108,
"score": 0.8437401056289673,
"start": 97,
"tag": "NAME",
"value": "Clever girl"
}
] | index.coffee | sibartlett/atom-raptorize | 2 | {Disposable} = require 'atom'
$ = require 'jquery'
clippy = null
locked = false
quotes = [
'Clever girl'
'Shoot her! Shoot her!'
'Hold on to your butts'
'Spared no expense'
'Life finds a way'
'It\'s a unix system! I know this!'
'God help us, we\'re in the hands of engineers.'
]
playSound = ->
sound = require './assets/raptor-sound'
audio = new Audio(sound())
audio.play()
animateRaptor = ->
$(document.body).append '<img id="elRaptor" style="display: none" src="atom://raptorize/assets/raptor.png" />'
raptor = $(document.body).find '#elRaptor'
raptor.css
position: 'fixed'
bottom: '-700px'
right: '0'
display: 'block'
raptor.animate bottom: '0', ->
raptor.animate bottom: '-130px', 100, ->
offset = raptor.position().left + 400
raptor.delay 300
.animate right: offset, 2200, ->
raptor.css
bottom: '-700px'
right: '0'
raptor.remove()
locked = false
run = ->
if !locked
locked = true
playSound()
animateRaptor()
if clippy
quote = quotes[Math.floor(Math.random() * quotes.length)];
clippy.speak quote
module.exports =
consumeClippyService: (service) ->
clippy = service
new Disposable -> clippy = null
activate: ->
atom.commands.add 'atom-workspace', 'raptorize', run
| 165923 | {Disposable} = require 'atom'
$ = require 'jquery'
clippy = null
locked = false
quotes = [
'<NAME>'
'Shoot her! Shoot her!'
'Hold on to your butts'
'Spared no expense'
'Life finds a way'
'It\'s a unix system! I know this!'
'God help us, we\'re in the hands of engineers.'
]
playSound = ->
sound = require './assets/raptor-sound'
audio = new Audio(sound())
audio.play()
animateRaptor = ->
$(document.body).append '<img id="elRaptor" style="display: none" src="atom://raptorize/assets/raptor.png" />'
raptor = $(document.body).find '#elRaptor'
raptor.css
position: 'fixed'
bottom: '-700px'
right: '0'
display: 'block'
raptor.animate bottom: '0', ->
raptor.animate bottom: '-130px', 100, ->
offset = raptor.position().left + 400
raptor.delay 300
.animate right: offset, 2200, ->
raptor.css
bottom: '-700px'
right: '0'
raptor.remove()
locked = false
run = ->
if !locked
locked = true
playSound()
animateRaptor()
if clippy
quote = quotes[Math.floor(Math.random() * quotes.length)];
clippy.speak quote
module.exports =
consumeClippyService: (service) ->
clippy = service
new Disposable -> clippy = null
activate: ->
atom.commands.add 'atom-workspace', 'raptorize', run
| true | {Disposable} = require 'atom'
$ = require 'jquery'
clippy = null
locked = false
quotes = [
'PI:NAME:<NAME>END_PI'
'Shoot her! Shoot her!'
'Hold on to your butts'
'Spared no expense'
'Life finds a way'
'It\'s a unix system! I know this!'
'God help us, we\'re in the hands of engineers.'
]
playSound = ->
sound = require './assets/raptor-sound'
audio = new Audio(sound())
audio.play()
animateRaptor = ->
$(document.body).append '<img id="elRaptor" style="display: none" src="atom://raptorize/assets/raptor.png" />'
raptor = $(document.body).find '#elRaptor'
raptor.css
position: 'fixed'
bottom: '-700px'
right: '0'
display: 'block'
raptor.animate bottom: '0', ->
raptor.animate bottom: '-130px', 100, ->
offset = raptor.position().left + 400
raptor.delay 300
.animate right: offset, 2200, ->
raptor.css
bottom: '-700px'
right: '0'
raptor.remove()
locked = false
run = ->
if !locked
locked = true
playSound()
animateRaptor()
if clippy
quote = quotes[Math.floor(Math.random() * quotes.length)];
clippy.speak quote
module.exports =
consumeClippyService: (service) ->
clippy = service
new Disposable -> clippy = null
activate: ->
atom.commands.add 'atom-workspace', 'raptorize', run
|
[
{
"context": "# Copyright (c) Konode. All rights reserved.\n# This source code is subje",
"end": 22,
"score": 0.9668498039245605,
"start": 16,
"tag": "NAME",
"value": "Konode"
}
] | src/clientFilePage/timeSpanToolbar.coffee | LogicalOutcomes/KoNote | 1 | # Copyright (c) Konode. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Buttons for analysisTab which modify the time span in predefined increments
Moment = require 'moment'
Imm = require 'immutable'
load = (win) ->
$ = win.jQuery
React = win.React
{PropTypes} = React
R = React.DOM
{FaIcon} = require('../utils').load(win)
TimeSpanToolbar = React.createFactory React.createClass
displayName: 'TimeSpanToolbar'
mixins: [React.addons.PureRenderMixin]
propTypes: {
updateTimeSpan: PropTypes.func.isRequired
timeSpan: PropTypes.instanceOf(Imm.Map).isRequired
lastDay: PropTypes.instanceOf(Moment).isRequired
firstDay: PropTypes.instanceOf(Moment).isRequired
dayRange: PropTypes.number.isRequired
}
render: ->
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
return R.div({className: 'timeSpanToolbar'},
R.div({className: 'btn-group btn-group-sm'},
R.button({
className: 'btn arrow'
onClick: @_shiftTimeSpanRange.bind(null, @props.lastDay, @props.firstDay, 'past')
disabled: not @_rangeIsValid(null, null, 'past')
},
FaIcon('caret-left')
)
# ToDo: refactor these buttons into a component. They are all pretty similar.
R.div({
className: [
'btn'
'selected' if spanSize is 1
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'day')
disabled: not @_rangeIsValid(1, 'day')
},
R.span({className: 'buttonWord'},
"Day"
)
R.span({className: 'buttonLetter'},
"D"
)
)
R.div({
className: [
'btn'
'selected' if spanSize is 7
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'week')
disabled: not @_rangeIsValid(1, 'week')
},
R.span({className: 'buttonWord'},
"Week"
)
R.span({className: 'buttonLetter'},
"W"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is 30 or spanSize is 31
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'month')
disabled: not @_rangeIsValid(1, 'month')
},
R.span({className: 'buttonWord'},
"1 Month"
)
R.span({className: 'buttonLetter'},
"1M"
)
)
R.button({
className: [
'btn'
'selected' if spanSize > 88 && spanSize < 93
].join ' '
onClick: @_setTimeSpanRange.bind(null, 3, 'months')
disabled: not @_rangeIsValid(3, 'months')
},
R.span({className: 'buttonWord'},
"3 Months"
)
R.span({className: 'buttonLetter'},
"3M"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is 365 or spanSize is 366
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'year')
disabled: not @_rangeIsValid(1, 'year')
},
R.span({className: 'buttonWord'},
"Year"
)
R.span({className: 'buttonLetter'},
"Y"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is @props.dayRange
].join ' '
onClick: @_showAllData.bind(null, @props.lastDay, @props.firstDay)
},
"All"
)
R.button({
className: 'btn arrow'
onClick: @_shiftTimeSpanRange.bind(null, @props.lastDay, @props.firstDay, 'future')
disabled: not @_rangeIsValid(null, null, 'future')
},
FaIcon('caret-right')
)
)
)
_rangeIsValid: (value, unit, direction = null) ->
if direction is 'future'
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
start = @props.timeSpan.get('start').clone().add(spanSize, 'days')
end = @props.timeSpan.get('end').clone().add(spanSize, 'days')
else if direction is 'past'
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
start = @props.timeSpan.get('start').clone().subtract(spanSize, 'days')
end = @props.timeSpan.get('end').clone().subtract(spanSize, 'days')
else
end = @props.timeSpan.get('end').clone()
start = end.clone().subtract(value, unit)
if end.isAfter(@props.lastDay.clone().add(1, 'day')) or start.isBefore(@props.firstDay)
return false
return true
_showAllData: (lastDay, firstDay) ->
timeSpan = Imm.Map {
start: firstDay
end: lastDay.clone().add(1, 'day')
}
@props.updateTimeSpan(timeSpan)
_setTimeSpanRange: (value, unit) ->
unless @_rangeIsValid value, unit
return
end = @props.timeSpan.get('end').clone()
start = end.clone().subtract(value, unit)
timeSpan = Imm.Map {
start
end
}
@props.updateTimeSpan(timeSpan)
_shiftTimeSpanRange: (lastDay, firstDay, direction) ->
start = @props.timeSpan.get('start').clone().startOf('day');
end = @props.timeSpan.get('end').clone().endOf('day');
difference = end.diff(start, 'days') + 1
if direction is 'future'
start.add(difference, 'days')
end.add(difference, 'days')
else if direction is 'past'
start.subtract(difference, 'days')
end.subtract(difference, 'days')
else
console.warn "Unknown span shift direction"
return
# unless end date is after lastDay or start is before first day
if end.isAfter(lastDay.clone().add(1, 'day')) or start.clone().isBefore(firstDay)
console.warn "Attempting to shift spanRange outside of data limits."
return
timeSpan = Imm.Map {
start
end
}
@props.updateTimeSpan(timeSpan)
return TimeSpanToolbar
module.exports = {load} | 211022 | # Copyright (c) <NAME>. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Buttons for analysisTab which modify the time span in predefined increments
Moment = require 'moment'
Imm = require 'immutable'
load = (win) ->
$ = win.jQuery
React = win.React
{PropTypes} = React
R = React.DOM
{FaIcon} = require('../utils').load(win)
TimeSpanToolbar = React.createFactory React.createClass
displayName: 'TimeSpanToolbar'
mixins: [React.addons.PureRenderMixin]
propTypes: {
updateTimeSpan: PropTypes.func.isRequired
timeSpan: PropTypes.instanceOf(Imm.Map).isRequired
lastDay: PropTypes.instanceOf(Moment).isRequired
firstDay: PropTypes.instanceOf(Moment).isRequired
dayRange: PropTypes.number.isRequired
}
render: ->
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
return R.div({className: 'timeSpanToolbar'},
R.div({className: 'btn-group btn-group-sm'},
R.button({
className: 'btn arrow'
onClick: @_shiftTimeSpanRange.bind(null, @props.lastDay, @props.firstDay, 'past')
disabled: not @_rangeIsValid(null, null, 'past')
},
FaIcon('caret-left')
)
# ToDo: refactor these buttons into a component. They are all pretty similar.
R.div({
className: [
'btn'
'selected' if spanSize is 1
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'day')
disabled: not @_rangeIsValid(1, 'day')
},
R.span({className: 'buttonWord'},
"Day"
)
R.span({className: 'buttonLetter'},
"D"
)
)
R.div({
className: [
'btn'
'selected' if spanSize is 7
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'week')
disabled: not @_rangeIsValid(1, 'week')
},
R.span({className: 'buttonWord'},
"Week"
)
R.span({className: 'buttonLetter'},
"W"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is 30 or spanSize is 31
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'month')
disabled: not @_rangeIsValid(1, 'month')
},
R.span({className: 'buttonWord'},
"1 Month"
)
R.span({className: 'buttonLetter'},
"1M"
)
)
R.button({
className: [
'btn'
'selected' if spanSize > 88 && spanSize < 93
].join ' '
onClick: @_setTimeSpanRange.bind(null, 3, 'months')
disabled: not @_rangeIsValid(3, 'months')
},
R.span({className: 'buttonWord'},
"3 Months"
)
R.span({className: 'buttonLetter'},
"3M"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is 365 or spanSize is 366
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'year')
disabled: not @_rangeIsValid(1, 'year')
},
R.span({className: 'buttonWord'},
"Year"
)
R.span({className: 'buttonLetter'},
"Y"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is @props.dayRange
].join ' '
onClick: @_showAllData.bind(null, @props.lastDay, @props.firstDay)
},
"All"
)
R.button({
className: 'btn arrow'
onClick: @_shiftTimeSpanRange.bind(null, @props.lastDay, @props.firstDay, 'future')
disabled: not @_rangeIsValid(null, null, 'future')
},
FaIcon('caret-right')
)
)
)
_rangeIsValid: (value, unit, direction = null) ->
if direction is 'future'
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
start = @props.timeSpan.get('start').clone().add(spanSize, 'days')
end = @props.timeSpan.get('end').clone().add(spanSize, 'days')
else if direction is 'past'
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
start = @props.timeSpan.get('start').clone().subtract(spanSize, 'days')
end = @props.timeSpan.get('end').clone().subtract(spanSize, 'days')
else
end = @props.timeSpan.get('end').clone()
start = end.clone().subtract(value, unit)
if end.isAfter(@props.lastDay.clone().add(1, 'day')) or start.isBefore(@props.firstDay)
return false
return true
_showAllData: (lastDay, firstDay) ->
timeSpan = Imm.Map {
start: firstDay
end: lastDay.clone().add(1, 'day')
}
@props.updateTimeSpan(timeSpan)
_setTimeSpanRange: (value, unit) ->
unless @_rangeIsValid value, unit
return
end = @props.timeSpan.get('end').clone()
start = end.clone().subtract(value, unit)
timeSpan = Imm.Map {
start
end
}
@props.updateTimeSpan(timeSpan)
_shiftTimeSpanRange: (lastDay, firstDay, direction) ->
start = @props.timeSpan.get('start').clone().startOf('day');
end = @props.timeSpan.get('end').clone().endOf('day');
difference = end.diff(start, 'days') + 1
if direction is 'future'
start.add(difference, 'days')
end.add(difference, 'days')
else if direction is 'past'
start.subtract(difference, 'days')
end.subtract(difference, 'days')
else
console.warn "Unknown span shift direction"
return
# unless end date is after lastDay or start is before first day
if end.isAfter(lastDay.clone().add(1, 'day')) or start.clone().isBefore(firstDay)
console.warn "Attempting to shift spanRange outside of data limits."
return
timeSpan = Imm.Map {
start
end
}
@props.updateTimeSpan(timeSpan)
return TimeSpanToolbar
module.exports = {load} | true | # Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Buttons for analysisTab which modify the time span in predefined increments
Moment = require 'moment'
Imm = require 'immutable'
load = (win) ->
$ = win.jQuery
React = win.React
{PropTypes} = React
R = React.DOM
{FaIcon} = require('../utils').load(win)
TimeSpanToolbar = React.createFactory React.createClass
displayName: 'TimeSpanToolbar'
mixins: [React.addons.PureRenderMixin]
propTypes: {
updateTimeSpan: PropTypes.func.isRequired
timeSpan: PropTypes.instanceOf(Imm.Map).isRequired
lastDay: PropTypes.instanceOf(Moment).isRequired
firstDay: PropTypes.instanceOf(Moment).isRequired
dayRange: PropTypes.number.isRequired
}
render: ->
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
return R.div({className: 'timeSpanToolbar'},
R.div({className: 'btn-group btn-group-sm'},
R.button({
className: 'btn arrow'
onClick: @_shiftTimeSpanRange.bind(null, @props.lastDay, @props.firstDay, 'past')
disabled: not @_rangeIsValid(null, null, 'past')
},
FaIcon('caret-left')
)
# ToDo: refactor these buttons into a component. They are all pretty similar.
R.div({
className: [
'btn'
'selected' if spanSize is 1
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'day')
disabled: not @_rangeIsValid(1, 'day')
},
R.span({className: 'buttonWord'},
"Day"
)
R.span({className: 'buttonLetter'},
"D"
)
)
R.div({
className: [
'btn'
'selected' if spanSize is 7
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'week')
disabled: not @_rangeIsValid(1, 'week')
},
R.span({className: 'buttonWord'},
"Week"
)
R.span({className: 'buttonLetter'},
"W"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is 30 or spanSize is 31
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'month')
disabled: not @_rangeIsValid(1, 'month')
},
R.span({className: 'buttonWord'},
"1 Month"
)
R.span({className: 'buttonLetter'},
"1M"
)
)
R.button({
className: [
'btn'
'selected' if spanSize > 88 && spanSize < 93
].join ' '
onClick: @_setTimeSpanRange.bind(null, 3, 'months')
disabled: not @_rangeIsValid(3, 'months')
},
R.span({className: 'buttonWord'},
"3 Months"
)
R.span({className: 'buttonLetter'},
"3M"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is 365 or spanSize is 366
].join ' '
onClick: @_setTimeSpanRange.bind(null, 1, 'year')
disabled: not @_rangeIsValid(1, 'year')
},
R.span({className: 'buttonWord'},
"Year"
)
R.span({className: 'buttonLetter'},
"Y"
)
)
R.button({
className: [
'btn'
'selected' if spanSize is @props.dayRange
].join ' '
onClick: @_showAllData.bind(null, @props.lastDay, @props.firstDay)
},
"All"
)
R.button({
className: 'btn arrow'
onClick: @_shiftTimeSpanRange.bind(null, @props.lastDay, @props.firstDay, 'future')
disabled: not @_rangeIsValid(null, null, 'future')
},
FaIcon('caret-right')
)
)
)
_rangeIsValid: (value, unit, direction = null) ->
if direction is 'future'
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
start = @props.timeSpan.get('start').clone().add(spanSize, 'days')
end = @props.timeSpan.get('end').clone().add(spanSize, 'days')
else if direction is 'past'
spanSize = @props.timeSpan.get('end').diff(@props.timeSpan.get('start'), 'days')
start = @props.timeSpan.get('start').clone().subtract(spanSize, 'days')
end = @props.timeSpan.get('end').clone().subtract(spanSize, 'days')
else
end = @props.timeSpan.get('end').clone()
start = end.clone().subtract(value, unit)
if end.isAfter(@props.lastDay.clone().add(1, 'day')) or start.isBefore(@props.firstDay)
return false
return true
_showAllData: (lastDay, firstDay) ->
timeSpan = Imm.Map {
start: firstDay
end: lastDay.clone().add(1, 'day')
}
@props.updateTimeSpan(timeSpan)
_setTimeSpanRange: (value, unit) ->
unless @_rangeIsValid value, unit
return
end = @props.timeSpan.get('end').clone()
start = end.clone().subtract(value, unit)
timeSpan = Imm.Map {
start
end
}
@props.updateTimeSpan(timeSpan)
_shiftTimeSpanRange: (lastDay, firstDay, direction) ->
start = @props.timeSpan.get('start').clone().startOf('day');
end = @props.timeSpan.get('end').clone().endOf('day');
difference = end.diff(start, 'days') + 1
if direction is 'future'
start.add(difference, 'days')
end.add(difference, 'days')
else if direction is 'past'
start.subtract(difference, 'days')
end.subtract(difference, 'days')
else
console.warn "Unknown span shift direction"
return
# unless end date is after lastDay or start is before first day
if end.isAfter(lastDay.clone().add(1, 'day')) or start.clone().isBefore(firstDay)
console.warn "Attempting to shift spanRange outside of data limits."
return
timeSpan = Imm.Map {
start
end
}
@props.updateTimeSpan(timeSpan)
return TimeSpanToolbar
module.exports = {load} |
[
{
"context": "# Cooltip.js\n# 1.0.0\n# https://github.com/jaketlarson/cooltip\n#\n# Copyright(c) 2015 Jake Larson <codere",
"end": 53,
"score": 0.9235854744911194,
"start": 42,
"tag": "USERNAME",
"value": "jaketlarson"
},
{
"context": "thub.com/jaketlarson/cooltip\n#\n# Copyright(c) ... | src/coffee/cooltip.coffee | jaketlarson/cooltip | 0 | # Cooltip.js
# 1.0.0
# https://github.com/jaketlarson/cooltip
#
# Copyright(c) 2015 Jake Larson <codereloadrepeat@gmail.com>
# MIT Licensed. http://www.opensource.org/licenses/mit-license.php
#
# jQuery plugin boilerplate used in this script can be found at:
# https://github.com/jquery-boilerplate/jquery-boilerplate/tree/master/src
(($, window, document) ->
pluginName = 'cooltip'
defaults = {
direction: 'top',
trigger: 'hover'
align: 'middle'
attr: 'title'
class: ''
enabled: true
}
## Options:
# direction: which side of the element the tooltip appears
# options: 'top', 'right', 'bottom', 'right'
#
# trigger: event that triggers tooltip
# options: 'hover' (for now)
#
# align: direction the tooltip aligns from in respect of the arrow
# if any direction: 'middle'
# if direction is 'top' or 'bottom': 'left', 'right'
# [coming soon] if direction is 'left' or 'right': 'up', 'down'
#
# enabled: whether or not to show tooltip on trigger event
# options: true, false
Cooltip = (target, options) ->
@target = target
@$target = $(target)
@options = $.extend {}, defaults, options
@_defaults = defaults
@_name = pluginName
@init()
Cooltip.prototype =
init: ->
# Generate a random ID for this tooltip
@uniq_id = Math.random().toString(36).slice(2)
# jQuery can't select pseudo-elements, so the left css property
# cannot be accessed. The property below needs to be the same
# as the left/right (depending on the direction) and width
# CSS of the .cooltip:after element.
# A better way should be seeked out.
@_aligning_arrow_buffer = .3*16 # rem to px
@_aligning_arrow_width = .8*16 # rem to px
@_initTip()
@_bindTrigger()
@_addRemovalWatch()
return
_initTip: ->
@$tip = $("<div/>", {id: @uniq_id, class: 'cooltip'})
@update()
# Add classes if they were passed as an option
if @options.class.length > 0
@$tip.addClass @options.class
@_enabled = !!@options.enabled
# Ensure arrow color is correct
@_matchArrowColor()
# Set z-index
@_setZIndex()
return
_positionTip: ->
@_setClass()
position = @_getPosition()
@$tip.css(
left: position.left
top: position.top
)
return
_addRemovalWatch: ->
@$target.bind 'destroyed', () =>
@destroy()
_calcPositionLeft: ->
left = null
if @options.direction == 'top' || @options.direction == 'bottom'
if @options.align == 'right'
left = @$target.offset().left + @$target.outerWidth(true)/2 - @_aligning_arrow_width/2 - @_aligning_arrow_buffer
else if @options.align == 'left'
left = @$target.offset().left - @$tip.outerWidth(true) + @$target.outerWidth(true)/2 + @_aligning_arrow_width/2 + @_aligning_arrow_buffer
else # default, align in middle
left = @$target.offset().left + @$target.outerWidth(true)/2 - @$tip.outerWidth(true)/2
else if @options.direction == 'left'
left = @$target.offset().left - @$tip.outerWidth((true))
else if @options.direction == 'right'
left = @$target.offset().left + @$target.outerWidth(true)
return left
_calcPositionTop: ->
top = null
if @options.direction == 'top'
top = @$target.offset().top - @$tip.outerHeight(true)
else if @options.direction == 'bottom'
top = @$target.offset().top + @$target.outerHeight(true)
else if @options.direction == 'left' || @options.direction == 'right'
if @options.align == 'top'
top = @$target.offset().top - @$tip.outerHeight(true) + @$target.outerHeight(true)/2 + @_aligning_arrow_width/2 + @_aligning_arrow_buffer
else if @options.align == 'bottom'
top = @$target.offset().top + @$target.outerHeight(true)/2 - @_aligning_arrow_width/2 - @_aligning_arrow_buffer
else # default, align in middle
top = @$target.offset().top + @$target.outerHeight(true)/2 - @$tip.outerHeight(true)/2
_getPosition: ->
position =
left: @_calcPositionLeft()
top: @_calcPositionTop()
return position
_setClass: ->
switch @options.direction
when 'top'
@$tip.addClass 'direction-top'
when 'right'
@$tip.addClass 'direction-right'
when 'bottom'
@$tip.addClass 'direction-bottom'
when 'left'
@$tip.addClass 'direction-left'
if @options.direction == 'top' || @options.direction == 'bottom'
switch @options.align
when 'left'
@$tip.addClass 'align-left'
when 'right'
@$tip.addClass 'align-right'
if @options.direction == 'right' || @options.direction == 'left'
switch @options.align
when 'top'
@$tip.addClass 'align-top'
when 'bottom'
@$tip.addClass 'align-bottom'
return
_bindTrigger: ->
bindAsHover = =>
@$target.hover (e) =>
# mouseenter
@showTip()
@_maskTitle()
, (e) =>
# mouseleave
@hideTip()
@_unmaskTitle()
return
bindAsFocus = =>
@$target.focus (e) =>
@showTip()
@$target.blur (e) =>
@hideTip()
return
switch @options.trigger
when 'hover'
bindAsHover()
when 'focus'
bindAsFocus()
else
bindAsHover()
return
_appendTip: ->
@$tip.appendTo $('body')
showTip: ->
if @_enabled
@_appendTip()
@_positionTip()
return
hideTip: ->
if @_enabled
@$tip.remove()
return
# If the attribute being copied into the tooltip is the title attribute,
# change the title attribute name to data-title attribute to temporarily.
_maskTitle: ->
is_using_title_attr = if @options.attr == 'title' then true else false
title_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false && @$target.attr('title').length > 0) then true else false
if is_using_title_attr && title_exists
@$target.data 'title', @$target.attr('title')
@$target.attr 'title', ''
return
# Restore the data-title to the title attribute.
_unmaskTitle: ->
data_title_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false) then true else false
# If title_already_exists (below) then the maskTitle function did not actually run, so let's leave it alone.
title_already_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false && @$target.attr('title').length > 0) then true else false
if data_title_exists && !title_already_exists
@$target.attr 'title', @$target.data('title')
@$target.data 'title', ''
return
# matchArrowColor will indrectly update the arrow color via border-color.
# Since it's a pseudo-element, it cannot be set directly.
# This will make customizing easier, by simply setting the background-color
# of the custom class.
_matchArrowColor: ->
$('body').append(@$tip)
if parseInt(@$tip.css('border-width')) > 0
tip_bg = @$tip.css('border-color')
else
tip_bg = @$tip.css('background-color')
@$tip.remove()
if tip_bg.length > 0
@$tip.css('border-color', tip_bg)
return
_setZIndex: ->
if @options.zIndex
@$tip.css 'z-index', @options.zIndex
return
addClass: (class_name) ->
if !@$tip.hasClass class_name
@$tip.addClass class_name
removeClass: (class_name) ->
if @$tip.hasClass class_name
@$tip.removeClass class_name
disable: ->
@_enabled = false
enable: ->
@_enabled = true
destroy: ->
# Possibly add event unbinding
@_disabled
@$tip.remove()
update: ->
@$tip.html @$target.attr @options.attr
@_positionTip()
$.fn[pluginName] = (options, arg) ->
@each ->
# Check if we are instantiating the plugin
unless $.data this, 'plugin_' + pluginName
$.data this, 'plugin_' + pluginName, new Cooltip(this, options)
else
# Not instantiating? Check if it's an API call.
if typeof options == 'string'
instance = $.data this, 'plugin_' + pluginName
switch options
when 'addClass'
instance.addClass(arg)
when 'removeClass'
instance.removeClass(arg)
when 'disable'
instance.disable()
when 'enable'
instance.enable()
when 'destroy'
instance.destroy()
when 'update'
instance.update()
# special event 'destroyed' provided by mtkopone
# http://stackoverflow.com/a/10172676/3179806
$.event.special.destroyed = remove: (o) ->
if o.handler
o.handler()
return
) jQuery, window, document
| 187 | # Cooltip.js
# 1.0.0
# https://github.com/jaketlarson/cooltip
#
# Copyright(c) 2015 <NAME> <<EMAIL>>
# MIT Licensed. http://www.opensource.org/licenses/mit-license.php
#
# jQuery plugin boilerplate used in this script can be found at:
# https://github.com/jquery-boilerplate/jquery-boilerplate/tree/master/src
(($, window, document) ->
pluginName = 'cooltip'
defaults = {
direction: 'top',
trigger: 'hover'
align: 'middle'
attr: 'title'
class: ''
enabled: true
}
## Options:
# direction: which side of the element the tooltip appears
# options: 'top', 'right', 'bottom', 'right'
#
# trigger: event that triggers tooltip
# options: 'hover' (for now)
#
# align: direction the tooltip aligns from in respect of the arrow
# if any direction: 'middle'
# if direction is 'top' or 'bottom': 'left', 'right'
# [coming soon] if direction is 'left' or 'right': 'up', 'down'
#
# enabled: whether or not to show tooltip on trigger event
# options: true, false
Cooltip = (target, options) ->
@target = target
@$target = $(target)
@options = $.extend {}, defaults, options
@_defaults = defaults
@_name = pluginName
@init()
Cooltip.prototype =
init: ->
# Generate a random ID for this tooltip
@uniq_id = Math.random().toString(36).slice(2)
# jQuery can't select pseudo-elements, so the left css property
# cannot be accessed. The property below needs to be the same
# as the left/right (depending on the direction) and width
# CSS of the .cooltip:after element.
# A better way should be seeked out.
@_aligning_arrow_buffer = .3*16 # rem to px
@_aligning_arrow_width = .8*16 # rem to px
@_initTip()
@_bindTrigger()
@_addRemovalWatch()
return
_initTip: ->
@$tip = $("<div/>", {id: @uniq_id, class: 'cooltip'})
@update()
# Add classes if they were passed as an option
if @options.class.length > 0
@$tip.addClass @options.class
@_enabled = !!@options.enabled
# Ensure arrow color is correct
@_matchArrowColor()
# Set z-index
@_setZIndex()
return
_positionTip: ->
@_setClass()
position = @_getPosition()
@$tip.css(
left: position.left
top: position.top
)
return
_addRemovalWatch: ->
@$target.bind 'destroyed', () =>
@destroy()
_calcPositionLeft: ->
left = null
if @options.direction == 'top' || @options.direction == 'bottom'
if @options.align == 'right'
left = @$target.offset().left + @$target.outerWidth(true)/2 - @_aligning_arrow_width/2 - @_aligning_arrow_buffer
else if @options.align == 'left'
left = @$target.offset().left - @$tip.outerWidth(true) + @$target.outerWidth(true)/2 + @_aligning_arrow_width/2 + @_aligning_arrow_buffer
else # default, align in middle
left = @$target.offset().left + @$target.outerWidth(true)/2 - @$tip.outerWidth(true)/2
else if @options.direction == 'left'
left = @$target.offset().left - @$tip.outerWidth((true))
else if @options.direction == 'right'
left = @$target.offset().left + @$target.outerWidth(true)
return left
_calcPositionTop: ->
top = null
if @options.direction == 'top'
top = @$target.offset().top - @$tip.outerHeight(true)
else if @options.direction == 'bottom'
top = @$target.offset().top + @$target.outerHeight(true)
else if @options.direction == 'left' || @options.direction == 'right'
if @options.align == 'top'
top = @$target.offset().top - @$tip.outerHeight(true) + @$target.outerHeight(true)/2 + @_aligning_arrow_width/2 + @_aligning_arrow_buffer
else if @options.align == 'bottom'
top = @$target.offset().top + @$target.outerHeight(true)/2 - @_aligning_arrow_width/2 - @_aligning_arrow_buffer
else # default, align in middle
top = @$target.offset().top + @$target.outerHeight(true)/2 - @$tip.outerHeight(true)/2
_getPosition: ->
position =
left: @_calcPositionLeft()
top: @_calcPositionTop()
return position
_setClass: ->
switch @options.direction
when 'top'
@$tip.addClass 'direction-top'
when 'right'
@$tip.addClass 'direction-right'
when 'bottom'
@$tip.addClass 'direction-bottom'
when 'left'
@$tip.addClass 'direction-left'
if @options.direction == 'top' || @options.direction == 'bottom'
switch @options.align
when 'left'
@$tip.addClass 'align-left'
when 'right'
@$tip.addClass 'align-right'
if @options.direction == 'right' || @options.direction == 'left'
switch @options.align
when 'top'
@$tip.addClass 'align-top'
when 'bottom'
@$tip.addClass 'align-bottom'
return
_bindTrigger: ->
bindAsHover = =>
@$target.hover (e) =>
# mouseenter
@showTip()
@_maskTitle()
, (e) =>
# mouseleave
@hideTip()
@_unmaskTitle()
return
bindAsFocus = =>
@$target.focus (e) =>
@showTip()
@$target.blur (e) =>
@hideTip()
return
switch @options.trigger
when 'hover'
bindAsHover()
when 'focus'
bindAsFocus()
else
bindAsHover()
return
_appendTip: ->
@$tip.appendTo $('body')
showTip: ->
if @_enabled
@_appendTip()
@_positionTip()
return
hideTip: ->
if @_enabled
@$tip.remove()
return
# If the attribute being copied into the tooltip is the title attribute,
# change the title attribute name to data-title attribute to temporarily.
_maskTitle: ->
is_using_title_attr = if @options.attr == 'title' then true else false
title_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false && @$target.attr('title').length > 0) then true else false
if is_using_title_attr && title_exists
@$target.data 'title', @$target.attr('title')
@$target.attr 'title', ''
return
# Restore the data-title to the title attribute.
_unmaskTitle: ->
data_title_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false) then true else false
# If title_already_exists (below) then the maskTitle function did not actually run, so let's leave it alone.
title_already_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false && @$target.attr('title').length > 0) then true else false
if data_title_exists && !title_already_exists
@$target.attr 'title', @$target.data('title')
@$target.data 'title', ''
return
# matchArrowColor will indrectly update the arrow color via border-color.
# Since it's a pseudo-element, it cannot be set directly.
# This will make customizing easier, by simply setting the background-color
# of the custom class.
_matchArrowColor: ->
$('body').append(@$tip)
if parseInt(@$tip.css('border-width')) > 0
tip_bg = @$tip.css('border-color')
else
tip_bg = @$tip.css('background-color')
@$tip.remove()
if tip_bg.length > 0
@$tip.css('border-color', tip_bg)
return
_setZIndex: ->
if @options.zIndex
@$tip.css 'z-index', @options.zIndex
return
addClass: (class_name) ->
if !@$tip.hasClass class_name
@$tip.addClass class_name
removeClass: (class_name) ->
if @$tip.hasClass class_name
@$tip.removeClass class_name
disable: ->
@_enabled = false
enable: ->
@_enabled = true
destroy: ->
# Possibly add event unbinding
@_disabled
@$tip.remove()
update: ->
@$tip.html @$target.attr @options.attr
@_positionTip()
$.fn[pluginName] = (options, arg) ->
@each ->
# Check if we are instantiating the plugin
unless $.data this, 'plugin_' + pluginName
$.data this, 'plugin_' + pluginName, new Cooltip(this, options)
else
# Not instantiating? Check if it's an API call.
if typeof options == 'string'
instance = $.data this, 'plugin_' + pluginName
switch options
when 'addClass'
instance.addClass(arg)
when 'removeClass'
instance.removeClass(arg)
when 'disable'
instance.disable()
when 'enable'
instance.enable()
when 'destroy'
instance.destroy()
when 'update'
instance.update()
# special event 'destroyed' provided by mtkopone
# http://stackoverflow.com/a/10172676/3179806
$.event.special.destroyed = remove: (o) ->
if o.handler
o.handler()
return
) jQuery, window, document
| true | # Cooltip.js
# 1.0.0
# https://github.com/jaketlarson/cooltip
#
# Copyright(c) 2015 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# MIT Licensed. http://www.opensource.org/licenses/mit-license.php
#
# jQuery plugin boilerplate used in this script can be found at:
# https://github.com/jquery-boilerplate/jquery-boilerplate/tree/master/src
(($, window, document) ->
pluginName = 'cooltip'
defaults = {
direction: 'top',
trigger: 'hover'
align: 'middle'
attr: 'title'
class: ''
enabled: true
}
## Options:
# direction: which side of the element the tooltip appears
# options: 'top', 'right', 'bottom', 'right'
#
# trigger: event that triggers tooltip
# options: 'hover' (for now)
#
# align: direction the tooltip aligns from in respect of the arrow
# if any direction: 'middle'
# if direction is 'top' or 'bottom': 'left', 'right'
# [coming soon] if direction is 'left' or 'right': 'up', 'down'
#
# enabled: whether or not to show tooltip on trigger event
# options: true, false
Cooltip = (target, options) ->
@target = target
@$target = $(target)
@options = $.extend {}, defaults, options
@_defaults = defaults
@_name = pluginName
@init()
Cooltip.prototype =
init: ->
# Generate a random ID for this tooltip
@uniq_id = Math.random().toString(36).slice(2)
# jQuery can't select pseudo-elements, so the left css property
# cannot be accessed. The property below needs to be the same
# as the left/right (depending on the direction) and width
# CSS of the .cooltip:after element.
# A better way should be seeked out.
@_aligning_arrow_buffer = .3*16 # rem to px
@_aligning_arrow_width = .8*16 # rem to px
@_initTip()
@_bindTrigger()
@_addRemovalWatch()
return
_initTip: ->
@$tip = $("<div/>", {id: @uniq_id, class: 'cooltip'})
@update()
# Add classes if they were passed as an option
if @options.class.length > 0
@$tip.addClass @options.class
@_enabled = !!@options.enabled
# Ensure arrow color is correct
@_matchArrowColor()
# Set z-index
@_setZIndex()
return
_positionTip: ->
@_setClass()
position = @_getPosition()
@$tip.css(
left: position.left
top: position.top
)
return
_addRemovalWatch: ->
@$target.bind 'destroyed', () =>
@destroy()
_calcPositionLeft: ->
left = null
if @options.direction == 'top' || @options.direction == 'bottom'
if @options.align == 'right'
left = @$target.offset().left + @$target.outerWidth(true)/2 - @_aligning_arrow_width/2 - @_aligning_arrow_buffer
else if @options.align == 'left'
left = @$target.offset().left - @$tip.outerWidth(true) + @$target.outerWidth(true)/2 + @_aligning_arrow_width/2 + @_aligning_arrow_buffer
else # default, align in middle
left = @$target.offset().left + @$target.outerWidth(true)/2 - @$tip.outerWidth(true)/2
else if @options.direction == 'left'
left = @$target.offset().left - @$tip.outerWidth((true))
else if @options.direction == 'right'
left = @$target.offset().left + @$target.outerWidth(true)
return left
_calcPositionTop: ->
top = null
if @options.direction == 'top'
top = @$target.offset().top - @$tip.outerHeight(true)
else if @options.direction == 'bottom'
top = @$target.offset().top + @$target.outerHeight(true)
else if @options.direction == 'left' || @options.direction == 'right'
if @options.align == 'top'
top = @$target.offset().top - @$tip.outerHeight(true) + @$target.outerHeight(true)/2 + @_aligning_arrow_width/2 + @_aligning_arrow_buffer
else if @options.align == 'bottom'
top = @$target.offset().top + @$target.outerHeight(true)/2 - @_aligning_arrow_width/2 - @_aligning_arrow_buffer
else # default, align in middle
top = @$target.offset().top + @$target.outerHeight(true)/2 - @$tip.outerHeight(true)/2
_getPosition: ->
position =
left: @_calcPositionLeft()
top: @_calcPositionTop()
return position
_setClass: ->
switch @options.direction
when 'top'
@$tip.addClass 'direction-top'
when 'right'
@$tip.addClass 'direction-right'
when 'bottom'
@$tip.addClass 'direction-bottom'
when 'left'
@$tip.addClass 'direction-left'
if @options.direction == 'top' || @options.direction == 'bottom'
switch @options.align
when 'left'
@$tip.addClass 'align-left'
when 'right'
@$tip.addClass 'align-right'
if @options.direction == 'right' || @options.direction == 'left'
switch @options.align
when 'top'
@$tip.addClass 'align-top'
when 'bottom'
@$tip.addClass 'align-bottom'
return
_bindTrigger: ->
bindAsHover = =>
@$target.hover (e) =>
# mouseenter
@showTip()
@_maskTitle()
, (e) =>
# mouseleave
@hideTip()
@_unmaskTitle()
return
bindAsFocus = =>
@$target.focus (e) =>
@showTip()
@$target.blur (e) =>
@hideTip()
return
switch @options.trigger
when 'hover'
bindAsHover()
when 'focus'
bindAsFocus()
else
bindAsHover()
return
_appendTip: ->
@$tip.appendTo $('body')
showTip: ->
if @_enabled
@_appendTip()
@_positionTip()
return
hideTip: ->
if @_enabled
@$tip.remove()
return
# If the attribute being copied into the tooltip is the title attribute,
# change the title attribute name to data-title attribute to temporarily.
_maskTitle: ->
is_using_title_attr = if @options.attr == 'title' then true else false
title_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false && @$target.attr('title').length > 0) then true else false
if is_using_title_attr && title_exists
@$target.data 'title', @$target.attr('title')
@$target.attr 'title', ''
return
# Restore the data-title to the title attribute.
_unmaskTitle: ->
data_title_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false) then true else false
# If title_already_exists (below) then the maskTitle function did not actually run, so let's leave it alone.
title_already_exists = if (typeof @$target.attr('title') != typeof undefined && @$target.attr('title') != false && @$target.attr('title').length > 0) then true else false
if data_title_exists && !title_already_exists
@$target.attr 'title', @$target.data('title')
@$target.data 'title', ''
return
# matchArrowColor will indrectly update the arrow color via border-color.
# Since it's a pseudo-element, it cannot be set directly.
# This will make customizing easier, by simply setting the background-color
# of the custom class.
_matchArrowColor: ->
$('body').append(@$tip)
if parseInt(@$tip.css('border-width')) > 0
tip_bg = @$tip.css('border-color')
else
tip_bg = @$tip.css('background-color')
@$tip.remove()
if tip_bg.length > 0
@$tip.css('border-color', tip_bg)
return
_setZIndex: ->
if @options.zIndex
@$tip.css 'z-index', @options.zIndex
return
addClass: (class_name) ->
if !@$tip.hasClass class_name
@$tip.addClass class_name
removeClass: (class_name) ->
if @$tip.hasClass class_name
@$tip.removeClass class_name
disable: ->
@_enabled = false
enable: ->
@_enabled = true
destroy: ->
# Possibly add event unbinding
@_disabled
@$tip.remove()
update: ->
@$tip.html @$target.attr @options.attr
@_positionTip()
$.fn[pluginName] = (options, arg) ->
@each ->
# Check if we are instantiating the plugin
unless $.data this, 'plugin_' + pluginName
$.data this, 'plugin_' + pluginName, new Cooltip(this, options)
else
# Not instantiating? Check if it's an API call.
if typeof options == 'string'
instance = $.data this, 'plugin_' + pluginName
switch options
when 'addClass'
instance.addClass(arg)
when 'removeClass'
instance.removeClass(arg)
when 'disable'
instance.disable()
when 'enable'
instance.enable()
when 'destroy'
instance.destroy()
when 'update'
instance.update()
# special event 'destroyed' provided by mtkopone
# http://stackoverflow.com/a/10172676/3179806
$.event.special.destroyed = remove: (o) ->
if o.handler
o.handler()
return
) jQuery, window, document
|
[
{
"context": "= login_session_b64\n user = to_user { email_or_username, uid }\n await generate_pdpka { km : pdpka4_km,",
"end": 6141,
"score": 0.626986563205719,
"start": 6133,
"tag": "USERNAME",
"value": "username"
},
{
"context": "ake_esc (err) -> cb err, null\n passphrase... | src/hilevel/account.iced | AngelKey/Angelkey.libweb | 15 |
triplesec = require "triplesec"
kbpgp = require 'kbpgp'
WordArray = triplesec.WordArray
{KeyManager} = kbpgp
{make_esc} = require 'iced-error'
{xor_buffers} = require '../base/util'
{athrow} = require('iced-utils').util
proofs = require 'keybase-proofs'
#=======================================================================================
# There's historically been trouble with the slice() operator on Browserify's
# buffers, so just do the safe/slow/stupid thing.
myslice = (buf, s, e) ->
l = e - s
out = Buffer.alloc l
for i in [0...l]
out.writeUInt8(buf.readUInt8(i+s), i)
out
bufsplit = (buf, lens) ->
s = 0
ret = []
for l in lens
e = s+l
ret.push myslice(buf,s,e)
s = e
return ret
#=======================================================================================
#
# Class for Keybase account manipulation
#
exports.Account = class Account
#
# @param {Config} config A Config object that explains how we'll
# do things like contacting the keybase server.
#
constructor : ({@config}) ->
{C} = @config
@triplesec_version = @config.C.triplesec.version
@nacl = {}
@lks = {}
@extra_keymaterial = C.pwh.derived_key_bytes +
C.nacl.eddsa_secret_key_bytes +
C.nacl.dh_secret_key_bytes +
C.device.lks_client_half_bytes
@new_tsenc()
#---------------
new_tsenc : () ->
@enc = new triplesec.Encryptor { version : @triplesec_version }
#---------------
resalt : ({ salt, progress_hook }, cb) ->
await @enc.resalt { salt, @extra_keymaterial, progress_hook }, defer err, keys
throw err if err?
cb keys
#---------------
# Given a passphrase and salt, hash it using Scrypt with the
# standard V3 parameters. We're doig this as somewhat of a hack,
# allocating the keys needed for triplesec'ing, and then using the
# "extra" keys after that.
#
# @param {string} key A key as a utf8-string that's the passphrase
# @param {Buffer} salt The salt as a buffer of binary data
# @param {function} progress_hook A progress hook if we care....
# @param {callback} cb Call when completed with the passphrase hash. The PWH
# is presented as a hex-encoded string, suitable for sending over AJAX.
scrypt_hash_passphrase : ({key, salt, progress_hook, encoding}, cb) ->
key = new triplesec.Buffer key, 'utf8'
@enc.set_key key
{C} = @config
await @resalt { salt, progress_hook }, defer keys
km = keys.extra
[pwh, @nacl.eddsa, @nacl.dh, @lks.clienf_half ] = bufsplit km, [
C.pwh.derived_key_bytes,
C.nacl.eddsa_secret_key_bytes,
C.nacl.dh_secret_key_bytes,
C.device.lks_client_half_bytes
]
if encoding? then pwh = pwh.toString encoding
cb pwh, @nacl.eddsa
#---------------
fix_signup_bundle : (bundle, cb) ->
err = null
nb = triplesec.V[@triplesec_version].salt_size
await kbpgp.rand.SRF().random_bytes nb, defer salt
await @scrypt_hash_passphrase { key : bundle.pw, salt, encoding : 'hex' }, defer bundle.pwh, secret32_eddsa
await secret32_to_signing_kid { secret32 : secret32_eddsa }, defer err, bundle.pdpka5_kid
bundle.salt = salt.toString 'hex'
bundle.pwh_version = @triplesec_version
delete bundle.pw
cb err
#---------------
# used during password change.
#
# @param {string} pw Passphrase as a utf8-encoded string
# @param {Buffer} salt The raw binary salt as a buffer, returned from
# pw_to_login below, most likely.
# @param {callback} cb called with err, pwh, pwh_version
gen_new_pwh : ({pw, salt}, cb) ->
await @scrypt_hash_passphrase { key : pw, salt, encoding : 'hex' }, defer pwh
pwh_version = @triplesec_version
cb null, pwh, pwh_version
#---------------
# Convert a pw into a password hash.
#
# @param {String} pw the input passprhase
# @param {String} email_or_username the email or username to use in the salt lookup.
# @param {Callback} cb callback with a quad: <Error,Buffer,Int,Buffer>, containing
# an error (if one happened), a Buffer with the pwh, an int for what version,
# and a buffer with the salt.
pw_to_pwh : ({pw, email_or_username, uid}, cb) ->
esc = make_esc cb, "pw_to_pwh"
err = pwh = pwh_version = salt = null
params = { email_or_username, uid, pdpka_login : true }
await @config.request { method : "GET", endpoint : 'getsalt', params }, esc defer res
if err? then # noop
else if not ((got = res?.body?.pwh_version) is @triplesec_version)
err = new Error "Can only support PW hash version #{@triplesec_version}; got #{got} for #{@config.escape_user_content email_or_username}"
else
salt = new triplesec.Buffer res.body.salt, 'hex'
await @scrypt_hash_passphrase { salt, key : pw, encoding : null }, defer pwh, secret32_eddsa
await secret32_to_signing_km { secret32 : pwh }, esc defer pdpka4_km
await secret32_to_signing_km { secret32 : secret32_eddsa }, esc defer pdpka5_km
pwh_version = @triplesec_version
cb err, pwh, pwh_version, salt, res?.body?.login_session, pdpka4_km, pdpka5_km
#---------------
pw_to_login : ({pw, email_or_username, hostname, uid}, cb) ->
esc = make_esc cb, "pw_to_login"
login_session = hmac_pwh = null
await @pw_to_pwh { pw, email_or_username, uid }, esc defer pwh, pwh_version, salt, login_session_b64, pdpka4_km, pdpka5_km
await @pwh_to_login_params { hostname, login_session_b64, pwh, email_or_username, uid, pdpka5_km, pdpka4_km }, esc defer { login_session, hmac_pwh, pdpka4, pdpka5 }
cb null, login_session, hmac_pwh, salt, pdpka4, pdpka5
#---------------
pwh_to_login_params : ({hostname, login_session_b64, pwh, email_or_username, uid, pdpka4_km, pdpka5_km}, cb) ->
esc = make_esc cb, "pwh_to_login_params"
login_session = new triplesec.Buffer login_session_b64, 'base64'
# Make a new HMAC-SHA512'er, and the key is the output of the
hmac = new triplesec.HMAC(WordArray.from_buffer(pwh))
hmac_pwh = hmac.update(WordArray.from_buffer(login_session)).finalize().to_hex()
session = login_session = login_session_b64
user = to_user { email_or_username, uid }
await generate_pdpka { km : pdpka4_km, user, session, hostname }, esc defer pdpka4
await generate_pdpka { km : pdpka5_km, user, session, hostname }, esc defer pdpka5
cb null, { login_session, hmac_pwh, pdpka4, pdpka5 }
#---------------
_change_passphrase_compute_login : ({hostname, uid, login_session_b64, pwh, eddsa}, cb) ->
esc = make_esc cb, "_change_passphrase_compute_login"
await secret32_to_signing_km { secret32 : pwh }, esc defer pdpka4_km
await secret32_to_signing_km { secret32 : eddsa }, esc defer pdpka5_km
await @pwh_to_login_params { hostname, login_session_b64, uid, pwh, pdpka4_km, pdpka5_km }, esc defer { hmac_pwh, pdpka4, pdpka5 }
cb null, hmac_pwh, pdpka4, pdpka5
#---------------
get_public_pgp_key: (username, cb) ->
err = ret = null
fields = "public_keys"
await @config.request { endpoint : "user/lookup", params : {username, fields} }, defer err, res
unless err?
ret = res?.body?.them?.public_keys?.primary?.bundle
err = new Error "Cannot find a public key for '#{@config.escape_user_content username}'" unless ret?
cb err, ret
#---------------
get_devices : ({username}, cb) ->
err = ret = null
fields = "devices"
await @config.request { endpoint : "user/lookup", params : { username, fields } }, defer err, res
unless err?
ret = res?.body?.them?.devices
err = new Error "Cannot find devices for '#{@config.escape_user_content username}" unless ret?
cb err, ret
#---------------
get_public_pgp_keys : (username, cb) ->
err = ret = null
fields = "public_keys"
await @config.request { endpoint : "user/lookup", params : {username} }, defer err, res
unless err?
ret = res?.body?.them?.public_keys?.pgp_public_keys
err = new Error "Cannot find a public key for '#{@config.escape_user_content username}'" unless ret?.length
cb err, ret
#---------------
get_unlocked_private_primary_pgp_key : (pw, cb) ->
@get_unlocked_private_pgp_key { pw }, cb
#---------------
get_unlocked_private_pgp_key : ({pw, kid, no_check_keys, time_travel}, cb) ->
esc = make_esc (err) -> cb err, null
passphrase = new triplesec.Buffer pw
await @config.request { method : "GET", endpoint : "me" }, esc defer res
bundle = null
if kid?
for sk in res?.body?.me?.private_keys?.all when sk.kid is kid
break if (bundle = sk.bundle)?
else
bundle = res?.body?.me?.private_keys?.primary?.bundle
sk = err = null
if bundle?
tsenc = @get_tsenc_for_decryption { passphrase }
await KeyManager.import_from_p3skb { raw: bundle, no_check_keys, time_travel }, esc defer sk
await sk.unlock_p3skb { tsenc, no_check_keys, time_travel }, esc defer()
err = null
unless sk?
err = new Error "Failed to get and unlock your private key"
cb err, sk
#---------------
get_unlocked_private_pgp_keys : (pw, cb) ->
esc = make_esc cb, "get_unlocked_private_pgp_keys"
sks = []
passphrase = new triplesec.Buffer pw
tsenc = @get_tsenc_for_decryption { passphrase }
await @config.request { method : "GET", endpoint : "me" }, esc defer res
for sk in res?.body?.me?.private_keys?.all when (sk.type is @config.C.key.key_type.P3KSB_PRIVATE)
await KeyManager.import_from_p3skb { raw: sk.bundle }, esc defer sk
await sk.unlock_p3skb { tsenc : tsenc.clone() }, esc defer()
sks.push sk
cb err, sks
#---------------
export_my_private_key: ({kid, pw, no_check_keys, time_travel}, cb) ->
esc = make_esc cb, "export_my_private_key"
err = armored_private = null
passphrase = new triplesec.Buffer pw
await @get_unlocked_private_pgp_key { kid, pw, no_check_keys, time_travel }, esc defer sk
await sk.sign {}, esc defer()
await sk.export_pgp_private_to_client {passphrase}, esc defer armored_private
cb null, armored_private
#---------------
# Run passphrase stretching on the given salt/passphrase
# combination, without side-effects.
_change_passphrase_derive_passphrase_components : ( { tsenc, salt, passphrase}, cb) ->
esc = make_esc cb, "_change_passphrase_derive_passphrase_components"
key = Buffer.from passphrase, 'utf8'
{C} = @config
tsenc or= new triplesec.Encryptor { version : @triplesec_version }
tsenc.set_key key
await tsenc.resalt { @extra_keymaterial, salt }, esc defer keys
km = keys.extra
[pwh, eddsa, _, lks_client_half ] = bufsplit km, [
C.pwh.derived_key_bytes,
C.nacl.eddsa_secret_key_bytes,
C.nacl.dh_secret_key_bytes,
C.device.lks_client_half_bytes
]
cb null, { tsenc, pwh, lks_client_half, eddsa }
#---------------
_change_passphrase_encrypt_lks_client_half : ( { me, client_half }, cb) ->
ret = {}
esc = make_esc cb, "_change_passphrase_encrypt_lks_client_half"
for deviceid, {keys} of me.devices
for {kid,key_role} in keys when (key_role is @config.C.key.key_role.ENCRYPTION)
await kbpgp.ukm.import_armored_public { armored : kid }, esc defer km
await kbpgp.kb.box { encrypt_for : km, msg : client_half }, esc defer ret[kid]
cb null, ret
#---------------
_change_passphrase_reencrypt_pgp_private_keys : ( { me, old_ppc, new_ppc, exclude_kids }, cb ) ->
outputs = []
exclude_kids or= []
esc = make_esc cb, "_change_passphrase_reencrypt_pgp_private_key"
for {kid,bundle} in (me?.private_keys?.all or []) when not (kid in exclude_kids)
await KeyManager.import_from_p3skb { armored : bundle }, esc defer km
await km.unlock_p3skb { tsenc : old_ppc.tsenc.clone() }, esc defer()
{tsenc,passphrase_generation} = new_ppc
await km.export_private_to_server {tsenc, passphrase_generation}, esc defer output
outputs.push output
cb null, outputs
#---------------
_change_passphrase_compute_lks_mask : ( { old_ppc, new_ppc}, cb) ->
lks_mask = xor_buffers(old_ppc.lks_client_half, new_ppc.lks_client_half).toString('hex')
cb null, lks_mask
#---------------
# Reset the user's passphrase given then authentication link 'l'.
reset_passphrase : ( {new_pp, uid, hostname, l}, cb) ->
esc = make_esc cb, "reset_passphrase"
params = { uid }
await @config.request { method : "GET", endpoint : 'getsalt', params }, esc defer res
salt = new triplesec.Buffer res.body.salt, 'hex'
await @scrypt_hash_passphrase { salt, key : new_pp, encoding : null }, defer pwh, secret32_eddsa
await secret32_to_signing_kid { secret32 : pwh }, esc defer pdpka4_kid
await secret32_to_signing_kid { secret32 : secret32_eddsa }, esc defer pdpka5_kid
params = {
pdpka4_kid : pdpka4_kid
pdpka5_kid : pdpka5_kid
l : l
}
await @config.request { method : "POST", endpoint : "passphrase/reset", params }, esc defer res
cb null, res
#---------------
#
# Use v2 of the passphrase change system, which changes the LKS mask
# and also encrypts the LKS client half for all known encryption devices.
# .. In addition to reencrypting PGP private keys...
#
# @param {string} old_pp The old passphrase
# @param {string} new_pp The new passphrase
# @param {vec<string>} exclude_kids Don't reencrypt these KIDs or include
# them in the upload. Primarily useful for testing
# @param {callback<error>} cb Callback, will fire with an Error
# if the update didn't work.
#
change_passphrase : ( {old_pp, new_pp, exclude_kids, hostname}, cb) ->
old_ppc = new_ppc = null
esc = make_esc cb, "change_passphrase"
params = { make_login_session : 1 }
await @config.request { method : "GET", endpoint : "me", params }, esc defer res
unless (me = res?.body?.me)? and (login_session_b64 = res?.body?.login_session)?
await athrow (new Error "Cannot load 'me' from server"), esc defer()
salt = Buffer.from me.basics.salt, 'hex'
await @_change_passphrase_derive_passphrase_components { tsenc : @enc, salt, passphrase : old_pp }, esc defer old_ppc
await @_change_passphrase_derive_passphrase_components { salt, passphrase : new_pp }, esc defer new_ppc
await @_change_passphrase_compute_login { uid : me.id, login_session_b64, pwh : old_ppc.pwh, eddsa : old_ppc.eddsa, hostname }, esc defer hmac_pwh, old_pdpka4, old_pdpka5
old_ppc.passphrase_generation = me.basics.passphrase_generation
new_ppc.passphrase_generation = old_ppc.passphrase_generation + 1
await @_change_passphrase_encrypt_lks_client_half { me, client_half : new_ppc.lks_client_half }, esc defer lksch
await @_change_passphrase_reencrypt_pgp_private_keys { me, old_ppc, new_ppc, exclude_kids}, esc defer private_keys
await @_change_passphrase_compute_lks_mask { old_ppc, new_ppc }, esc defer lks_mask
await secret32_to_signing_kid { secret32 : new_ppc.eddsa }, esc defer pdpka5_kid
params = {
hmac_pwh : hmac_pwh
old_pdpka4 : old_pdpka4
old_pdpka5 : old_pdpka5
pdpka5_kid : pdpka5_kid
login_session : login_session_b64
pwh : new_ppc.pwh.toString('hex'),
pwh_version : @triplesec_version,
ppgen : old_ppc.passphrase_generation,
lks_mask,
lks_client_halves : JSON.stringify(lksch),
private_keys
}
await @config.request { method : "POST", endpoint : "passphrase/replace", params }, esc defer res
# Now reset our internal triplesec to the new one.
@enc = new_ppc.tsenc
cb null, new_ppc
#---------------
# @param {Buffer} passphrase
get_tsenc_for_decryption : ({passphrase}) ->
@enc.set_key passphrase
@enc
#---------------
gen_nacl_eddsa_key : (params, cb) ->
gen = kbpgp.kb.KeyManager.generate
await gen { seed : @nacl.eddsa, split : true }, defer err, km
cb err, km
#---------------
gen_nacl_dh_key : (params, cb) ->
gen = kbpgp.kb.EncKeyManager.generate
await gen { seed : @nacl.dh, split : true }, defer err, km
cb err, km
#=======================================================================================
exports.secret32_to_signing_km = secret32_to_signing_km = ({secret32}, cb) ->
ret = err = null
await kbpgp.kb.KeyManager.generate { seed : secret32, split : false }, defer err, km
cb err, km
#=======================================================================================
exports.secret32_to_signing_kid = secret32_to_signing_kid = ({secret32}, cb) ->
ret = err = null
await secret32_to_signing_km { secret32 }, defer err, km
unless err?
await km.export_public {}, defer err, kid
cb err, kid, km
#=======================================================================================
exports.generate_pdpka = generate_pdpka = ({km, session, user, hostname}, cb) ->
await kbpgp.rand.SRF().random_bytes 16, defer nonce
arg =
sig_eng : km.make_sig_eng()
host : hostname
user : local : user
arg.nonce = nonce if nonce?
arg.session = session if session?
eng = new proofs.Auth arg
await eng.generate defer(err, sig), { dohash : true }
cb err, sig?.armored
#=======================================================================================
to_user = ({email_or_username, uid}) ->
user = {}
if uid? then user.uid = uid
if email_or_username?
if email_or_username.indexOf('@') >= 0 then user.email = email_or_username
else user.username = email_or_username
user
#=======================================================================================
| 90925 |
triplesec = require "triplesec"
kbpgp = require 'kbpgp'
WordArray = triplesec.WordArray
{KeyManager} = kbpgp
{make_esc} = require 'iced-error'
{xor_buffers} = require '../base/util'
{athrow} = require('iced-utils').util
proofs = require 'keybase-proofs'
#=======================================================================================
# There's historically been trouble with the slice() operator on Browserify's
# buffers, so just do the safe/slow/stupid thing.
myslice = (buf, s, e) ->
l = e - s
out = Buffer.alloc l
for i in [0...l]
out.writeUInt8(buf.readUInt8(i+s), i)
out
bufsplit = (buf, lens) ->
s = 0
ret = []
for l in lens
e = s+l
ret.push myslice(buf,s,e)
s = e
return ret
#=======================================================================================
#
# Class for Keybase account manipulation
#
exports.Account = class Account
#
# @param {Config} config A Config object that explains how we'll
# do things like contacting the keybase server.
#
constructor : ({@config}) ->
{C} = @config
@triplesec_version = @config.C.triplesec.version
@nacl = {}
@lks = {}
@extra_keymaterial = C.pwh.derived_key_bytes +
C.nacl.eddsa_secret_key_bytes +
C.nacl.dh_secret_key_bytes +
C.device.lks_client_half_bytes
@new_tsenc()
#---------------
new_tsenc : () ->
@enc = new triplesec.Encryptor { version : @triplesec_version }
#---------------
resalt : ({ salt, progress_hook }, cb) ->
await @enc.resalt { salt, @extra_keymaterial, progress_hook }, defer err, keys
throw err if err?
cb keys
#---------------
# Given a passphrase and salt, hash it using Scrypt with the
# standard V3 parameters. We're doig this as somewhat of a hack,
# allocating the keys needed for triplesec'ing, and then using the
# "extra" keys after that.
#
# @param {string} key A key as a utf8-string that's the passphrase
# @param {Buffer} salt The salt as a buffer of binary data
# @param {function} progress_hook A progress hook if we care....
# @param {callback} cb Call when completed with the passphrase hash. The PWH
# is presented as a hex-encoded string, suitable for sending over AJAX.
scrypt_hash_passphrase : ({key, salt, progress_hook, encoding}, cb) ->
key = new triplesec.Buffer key, 'utf8'
@enc.set_key key
{C} = @config
await @resalt { salt, progress_hook }, defer keys
km = keys.extra
[pwh, @nacl.eddsa, @nacl.dh, @lks.clienf_half ] = bufsplit km, [
C.pwh.derived_key_bytes,
C.nacl.eddsa_secret_key_bytes,
C.nacl.dh_secret_key_bytes,
C.device.lks_client_half_bytes
]
if encoding? then pwh = pwh.toString encoding
cb pwh, @nacl.eddsa
#---------------
fix_signup_bundle : (bundle, cb) ->
err = null
nb = triplesec.V[@triplesec_version].salt_size
await kbpgp.rand.SRF().random_bytes nb, defer salt
await @scrypt_hash_passphrase { key : bundle.pw, salt, encoding : 'hex' }, defer bundle.pwh, secret32_eddsa
await secret32_to_signing_kid { secret32 : secret32_eddsa }, defer err, bundle.pdpka5_kid
bundle.salt = salt.toString 'hex'
bundle.pwh_version = @triplesec_version
delete bundle.pw
cb err
#---------------
# used during password change.
#
# @param {string} pw Passphrase as a utf8-encoded string
# @param {Buffer} salt The raw binary salt as a buffer, returned from
# pw_to_login below, most likely.
# @param {callback} cb called with err, pwh, pwh_version
gen_new_pwh : ({pw, salt}, cb) ->
await @scrypt_hash_passphrase { key : pw, salt, encoding : 'hex' }, defer pwh
pwh_version = @triplesec_version
cb null, pwh, pwh_version
#---------------
# Convert a pw into a password hash.
#
# @param {String} pw the input passprhase
# @param {String} email_or_username the email or username to use in the salt lookup.
# @param {Callback} cb callback with a quad: <Error,Buffer,Int,Buffer>, containing
# an error (if one happened), a Buffer with the pwh, an int for what version,
# and a buffer with the salt.
pw_to_pwh : ({pw, email_or_username, uid}, cb) ->
esc = make_esc cb, "pw_to_pwh"
err = pwh = pwh_version = salt = null
params = { email_or_username, uid, pdpka_login : true }
await @config.request { method : "GET", endpoint : 'getsalt', params }, esc defer res
if err? then # noop
else if not ((got = res?.body?.pwh_version) is @triplesec_version)
err = new Error "Can only support PW hash version #{@triplesec_version}; got #{got} for #{@config.escape_user_content email_or_username}"
else
salt = new triplesec.Buffer res.body.salt, 'hex'
await @scrypt_hash_passphrase { salt, key : pw, encoding : null }, defer pwh, secret32_eddsa
await secret32_to_signing_km { secret32 : pwh }, esc defer pdpka4_km
await secret32_to_signing_km { secret32 : secret32_eddsa }, esc defer pdpka5_km
pwh_version = @triplesec_version
cb err, pwh, pwh_version, salt, res?.body?.login_session, pdpka4_km, pdpka5_km
#---------------
pw_to_login : ({pw, email_or_username, hostname, uid}, cb) ->
esc = make_esc cb, "pw_to_login"
login_session = hmac_pwh = null
await @pw_to_pwh { pw, email_or_username, uid }, esc defer pwh, pwh_version, salt, login_session_b64, pdpka4_km, pdpka5_km
await @pwh_to_login_params { hostname, login_session_b64, pwh, email_or_username, uid, pdpka5_km, pdpka4_km }, esc defer { login_session, hmac_pwh, pdpka4, pdpka5 }
cb null, login_session, hmac_pwh, salt, pdpka4, pdpka5
#---------------
pwh_to_login_params : ({hostname, login_session_b64, pwh, email_or_username, uid, pdpka4_km, pdpka5_km}, cb) ->
esc = make_esc cb, "pwh_to_login_params"
login_session = new triplesec.Buffer login_session_b64, 'base64'
# Make a new HMAC-SHA512'er, and the key is the output of the
hmac = new triplesec.HMAC(WordArray.from_buffer(pwh))
hmac_pwh = hmac.update(WordArray.from_buffer(login_session)).finalize().to_hex()
session = login_session = login_session_b64
user = to_user { email_or_username, uid }
await generate_pdpka { km : pdpka4_km, user, session, hostname }, esc defer pdpka4
await generate_pdpka { km : pdpka5_km, user, session, hostname }, esc defer pdpka5
cb null, { login_session, hmac_pwh, pdpka4, pdpka5 }
#---------------
_change_passphrase_compute_login : ({hostname, uid, login_session_b64, pwh, eddsa}, cb) ->
esc = make_esc cb, "_change_passphrase_compute_login"
await secret32_to_signing_km { secret32 : pwh }, esc defer pdpka4_km
await secret32_to_signing_km { secret32 : eddsa }, esc defer pdpka5_km
await @pwh_to_login_params { hostname, login_session_b64, uid, pwh, pdpka4_km, pdpka5_km }, esc defer { hmac_pwh, pdpka4, pdpka5 }
cb null, hmac_pwh, pdpka4, pdpka5
#---------------
get_public_pgp_key: (username, cb) ->
err = ret = null
fields = "public_keys"
await @config.request { endpoint : "user/lookup", params : {username, fields} }, defer err, res
unless err?
ret = res?.body?.them?.public_keys?.primary?.bundle
err = new Error "Cannot find a public key for '#{@config.escape_user_content username}'" unless ret?
cb err, ret
#---------------
get_devices : ({username}, cb) ->
err = ret = null
fields = "devices"
await @config.request { endpoint : "user/lookup", params : { username, fields } }, defer err, res
unless err?
ret = res?.body?.them?.devices
err = new Error "Cannot find devices for '#{@config.escape_user_content username}" unless ret?
cb err, ret
#---------------
get_public_pgp_keys : (username, cb) ->
err = ret = null
fields = "public_keys"
await @config.request { endpoint : "user/lookup", params : {username} }, defer err, res
unless err?
ret = res?.body?.them?.public_keys?.pgp_public_keys
err = new Error "Cannot find a public key for '#{@config.escape_user_content username}'" unless ret?.length
cb err, ret
#---------------
get_unlocked_private_primary_pgp_key : (pw, cb) ->
@get_unlocked_private_pgp_key { pw }, cb
#---------------
get_unlocked_private_pgp_key : ({pw, kid, no_check_keys, time_travel}, cb) ->
esc = make_esc (err) -> cb err, null
passphrase = new <PASSWORD>.Buffer pw
await @config.request { method : "GET", endpoint : "me" }, esc defer res
bundle = null
if kid?
for sk in res?.body?.me?.private_keys?.all when sk.kid is kid
break if (bundle = sk.bundle)?
else
bundle = res?.body?.me?.private_keys?.primary?.bundle
sk = err = null
if bundle?
tsenc = @get_tsenc_for_decryption { passphrase }
await KeyManager.import_from_p3skb { raw: bundle, no_check_keys, time_travel }, esc defer sk
await sk.unlock_p3skb { tsenc, no_check_keys, time_travel }, esc defer()
err = null
unless sk?
err = new Error "Failed to get and unlock your private key"
cb err, sk
#---------------
get_unlocked_private_pgp_keys : (pw, cb) ->
esc = make_esc cb, "get_unlocked_private_pgp_keys"
sks = []
passphrase = <PASSWORD>
tsenc = @get_tsenc_for_decryption { passphrase }
await @config.request { method : "GET", endpoint : "me" }, esc defer res
for sk in res?.body?.me?.private_keys?.all when (sk.type is @config.C.key.key_type.P3KSB_PRIVATE)
await KeyManager.import_from_p3skb { raw: sk.bundle }, esc defer sk
await sk.unlock_p3skb { tsenc : tsenc.clone() }, esc defer()
sks.push sk
cb err, sks
#---------------
export_my_private_key: ({kid, pw, no_check_keys, time_travel}, cb) ->
esc = make_esc cb, "export_my_private_key"
err = armored_private = null
passphrase = new triplesec.Buffer pw
await @get_unlocked_private_pgp_key { kid, pw, no_check_keys, time_travel }, esc defer sk
await sk.sign {}, esc defer()
await sk.export_pgp_private_to_client {passphrase}, esc defer armored_private
cb null, armored_private
#---------------
# Run passphrase stretching on the given salt/passphrase
# combination, without side-effects.
_change_passphrase_derive_passphrase_components : ( { tsenc, salt, passphrase}, cb) ->
esc = make_esc cb, "_change_passphrase_derive_passphrase_components"
key = Buffer.from passphrase, 'utf8'
{C} = @config
tsenc or= new triplesec.Encryptor { version : @triplesec_version }
tsenc.set_key key
await tsenc.resalt { @extra_keymaterial, salt }, esc defer keys
km = keys.extra
[pwh, eddsa, _, lks_client_half ] = bufsplit km, [
C.pwh.derived_key_bytes,
C.nacl.eddsa_secret_key_bytes,
C.nacl.dh_secret_key_bytes,
C.device.lks_client_half_bytes
]
cb null, { tsenc, pwh, lks_client_half, eddsa }
#---------------
_change_passphrase_encrypt_lks_client_half : ( { me, client_half }, cb) ->
ret = {}
esc = make_esc cb, "_change_passphrase_encrypt_lks_client_half"
for deviceid, {keys} of me.devices
for {kid,key_role} in keys when (key_role is @config.C.key.key_role.ENCRYPTION)
await kbpgp.ukm.import_armored_public { armored : kid }, esc defer km
await kbpgp.kb.box { encrypt_for : km, msg : client_half }, esc defer ret[kid]
cb null, ret
#---------------
_change_passphrase_reencrypt_pgp_private_keys : ( { me, old_ppc, new_ppc, exclude_kids }, cb ) ->
outputs = []
exclude_kids or= []
esc = make_esc cb, "_change_passphrase_reencrypt_pgp_private_key"
for {kid,bundle} in (me?.private_keys?.all or []) when not (kid in exclude_kids)
await KeyManager.import_from_p3skb { armored : bundle }, esc defer km
await km.unlock_p3skb { tsenc : old_ppc.tsenc.clone() }, esc defer()
{tsenc,passphrase_generation} = new_ppc
await km.export_private_to_server {tsenc, passphrase_generation}, esc defer output
outputs.push output
cb null, outputs
#---------------
_change_passphrase_compute_lks_mask : ( { old_ppc, new_ppc}, cb) ->
lks_mask = xor_buffers(old_ppc.lks_client_half, new_ppc.lks_client_half).toString('hex')
cb null, lks_mask
#---------------
# Reset the user's passphrase given then authentication link 'l'.
reset_passphrase : ( {new_pp, uid, hostname, l}, cb) ->
esc = make_esc cb, "reset_passphrase"
params = { uid }
await @config.request { method : "GET", endpoint : 'getsalt', params }, esc defer res
salt = new triplesec.Buffer res.body.salt, 'hex'
await @scrypt_hash_passphrase { salt, key : new_pp, encoding : null }, defer pwh, secret32_eddsa
await secret32_to_signing_kid { secret32 : pwh }, esc defer pdpka4_kid
await secret32_to_signing_kid { secret32 : secret32_eddsa }, esc defer pdpka5_kid
params = {
pdpka4_kid : pdpka4_kid
pdpka5_kid : pdpka5_kid
l : l
}
await @config.request { method : "POST", endpoint : "passphrase/reset", params }, esc defer res
cb null, res
#---------------
#
# Use v2 of the passphrase change system, which changes the LKS mask
# and also encrypts the LKS client half for all known encryption devices.
# .. In addition to reencrypting PGP private keys...
#
# @param {string} old_pp The old passphrase
# @param {string} new_pp The new passphrase
# @param {vec<string>} exclude_kids Don't reencrypt these KIDs or include
# them in the upload. Primarily useful for testing
# @param {callback<error>} cb Callback, will fire with an Error
# if the update didn't work.
#
change_passphrase : ( {old_pp, new_pp, exclude_kids, hostname}, cb) ->
old_ppc = new_ppc = null
esc = make_esc cb, "change_passphrase"
params = { make_login_session : 1 }
await @config.request { method : "GET", endpoint : "me", params }, esc defer res
unless (me = res?.body?.me)? and (login_session_b64 = res?.body?.login_session)?
await athrow (new Error "Cannot load 'me' from server"), esc defer()
salt = Buffer.from me.basics.salt, 'hex'
await @_change_passphrase_derive_passphrase_components { tsenc : @enc, salt, passphrase : <PASSWORD> }, esc defer old_ppc
await @_change_passphrase_derive_passphrase_components { salt, passphrase : <PASSWORD> }, esc defer new_ppc
await @_change_passphrase_compute_login { uid : me.id, login_session_b64, pwh : old_ppc.pwh, eddsa : old_ppc.eddsa, hostname }, esc defer hmac_pwh, old_pdpka4, old_pdpka5
old_ppc.passphrase_generation = me.basics.passphrase_generation
new_ppc.passphrase_generation = old_ppc.passphrase_generation + 1
await @_change_passphrase_encrypt_lks_client_half { me, client_half : new_ppc.lks_client_half }, esc defer lksch
await @_change_passphrase_reencrypt_pgp_private_keys { me, old_ppc, new_ppc, exclude_kids}, esc defer private_keys
await @_change_passphrase_compute_lks_mask { old_ppc, new_ppc }, esc defer lks_mask
await secret32_to_signing_kid { secret32 : new_ppc.eddsa }, esc defer pdpka5_kid
params = {
hmac_pwh : hmac_pwh
old_pdpka4 : old_pdpka4
old_pdpka5 : old_pdpka5
pdpka5_kid : pdpka5_kid
login_session : login_session_b64
pwh : new_ppc.pwh.toString('hex'),
pwh_version : @triplesec_version,
ppgen : old_ppc.passphrase_generation,
lks_mask,
lks_client_halves : JSON.stringify(lksch),
private_keys
}
await @config.request { method : "POST", endpoint : "passphrase/replace", params }, esc defer res
# Now reset our internal triplesec to the new one.
@enc = new_ppc.tsenc
cb null, new_ppc
#---------------
# @param {Buffer} passphrase
get_tsenc_for_decryption : ({passphrase}) ->
@enc.set_key passphrase
@enc
#---------------
gen_nacl_eddsa_key : (params, cb) ->
gen = kbpgp.kb.KeyManager.generate
await gen { seed : @nacl.eddsa, split : true }, defer err, km
cb err, km
#---------------
gen_nacl_dh_key : (params, cb) ->
gen = kbpgp.kb.EncKeyManager.generate
await gen { seed : @nacl.dh, split : true }, defer err, km
cb err, km
#=======================================================================================
exports.secret32_to_signing_km = secret32_to_signing_km = ({secret32}, cb) ->
ret = err = null
await kbpgp.kb.KeyManager.generate { seed : secret32, split : false }, defer err, km
cb err, km
#=======================================================================================
exports.secret32_to_signing_kid = secret32_to_signing_kid = ({secret32}, cb) ->
ret = err = null
await secret32_to_signing_km { secret32 }, defer err, km
unless err?
await km.export_public {}, defer err, kid
cb err, kid, km
#=======================================================================================
exports.generate_pdpka = generate_pdpka = ({km, session, user, hostname}, cb) ->
await kbpgp.rand.SRF().random_bytes 16, defer nonce
arg =
sig_eng : km.make_sig_eng()
host : hostname
user : local : user
arg.nonce = nonce if nonce?
arg.session = session if session?
eng = new proofs.Auth arg
await eng.generate defer(err, sig), { dohash : true }
cb err, sig?.armored
#=======================================================================================
to_user = ({email_or_username, uid}) ->
user = {}
if uid? then user.uid = uid
if email_or_username?
if email_or_username.indexOf('@') >= 0 then user.email = email_or_username
else user.username = email_or_username
user
#=======================================================================================
| true |
triplesec = require "triplesec"
kbpgp = require 'kbpgp'
WordArray = triplesec.WordArray
{KeyManager} = kbpgp
{make_esc} = require 'iced-error'
{xor_buffers} = require '../base/util'
{athrow} = require('iced-utils').util
proofs = require 'keybase-proofs'
#=======================================================================================
# There's historically been trouble with the slice() operator on Browserify's
# buffers, so just do the safe/slow/stupid thing.
myslice = (buf, s, e) ->
l = e - s
out = Buffer.alloc l
for i in [0...l]
out.writeUInt8(buf.readUInt8(i+s), i)
out
bufsplit = (buf, lens) ->
s = 0
ret = []
for l in lens
e = s+l
ret.push myslice(buf,s,e)
s = e
return ret
#=======================================================================================
#
# Class for Keybase account manipulation
#
exports.Account = class Account
#
# @param {Config} config A Config object that explains how we'll
# do things like contacting the keybase server.
#
constructor : ({@config}) ->
{C} = @config
@triplesec_version = @config.C.triplesec.version
@nacl = {}
@lks = {}
@extra_keymaterial = C.pwh.derived_key_bytes +
C.nacl.eddsa_secret_key_bytes +
C.nacl.dh_secret_key_bytes +
C.device.lks_client_half_bytes
@new_tsenc()
#---------------
new_tsenc : () ->
@enc = new triplesec.Encryptor { version : @triplesec_version }
#---------------
resalt : ({ salt, progress_hook }, cb) ->
await @enc.resalt { salt, @extra_keymaterial, progress_hook }, defer err, keys
throw err if err?
cb keys
#---------------
# Given a passphrase and salt, hash it using Scrypt with the
# standard V3 parameters. We're doig this as somewhat of a hack,
# allocating the keys needed for triplesec'ing, and then using the
# "extra" keys after that.
#
# @param {string} key A key as a utf8-string that's the passphrase
# @param {Buffer} salt The salt as a buffer of binary data
# @param {function} progress_hook A progress hook if we care....
# @param {callback} cb Call when completed with the passphrase hash. The PWH
# is presented as a hex-encoded string, suitable for sending over AJAX.
scrypt_hash_passphrase : ({key, salt, progress_hook, encoding}, cb) ->
key = new triplesec.Buffer key, 'utf8'
@enc.set_key key
{C} = @config
await @resalt { salt, progress_hook }, defer keys
km = keys.extra
[pwh, @nacl.eddsa, @nacl.dh, @lks.clienf_half ] = bufsplit km, [
C.pwh.derived_key_bytes,
C.nacl.eddsa_secret_key_bytes,
C.nacl.dh_secret_key_bytes,
C.device.lks_client_half_bytes
]
if encoding? then pwh = pwh.toString encoding
cb pwh, @nacl.eddsa
#---------------
fix_signup_bundle : (bundle, cb) ->
err = null
nb = triplesec.V[@triplesec_version].salt_size
await kbpgp.rand.SRF().random_bytes nb, defer salt
await @scrypt_hash_passphrase { key : bundle.pw, salt, encoding : 'hex' }, defer bundle.pwh, secret32_eddsa
await secret32_to_signing_kid { secret32 : secret32_eddsa }, defer err, bundle.pdpka5_kid
bundle.salt = salt.toString 'hex'
bundle.pwh_version = @triplesec_version
delete bundle.pw
cb err
#---------------
# used during password change.
#
# @param {string} pw Passphrase as a utf8-encoded string
# @param {Buffer} salt The raw binary salt as a buffer, returned from
# pw_to_login below, most likely.
# @param {callback} cb called with err, pwh, pwh_version
gen_new_pwh : ({pw, salt}, cb) ->
await @scrypt_hash_passphrase { key : pw, salt, encoding : 'hex' }, defer pwh
pwh_version = @triplesec_version
cb null, pwh, pwh_version
#---------------
# Convert a pw into a password hash.
#
# @param {String} pw the input passprhase
# @param {String} email_or_username the email or username to use in the salt lookup.
# @param {Callback} cb callback with a quad: <Error,Buffer,Int,Buffer>, containing
# an error (if one happened), a Buffer with the pwh, an int for what version,
# and a buffer with the salt.
pw_to_pwh : ({pw, email_or_username, uid}, cb) ->
esc = make_esc cb, "pw_to_pwh"
err = pwh = pwh_version = salt = null
params = { email_or_username, uid, pdpka_login : true }
await @config.request { method : "GET", endpoint : 'getsalt', params }, esc defer res
if err? then # noop
else if not ((got = res?.body?.pwh_version) is @triplesec_version)
err = new Error "Can only support PW hash version #{@triplesec_version}; got #{got} for #{@config.escape_user_content email_or_username}"
else
salt = new triplesec.Buffer res.body.salt, 'hex'
await @scrypt_hash_passphrase { salt, key : pw, encoding : null }, defer pwh, secret32_eddsa
await secret32_to_signing_km { secret32 : pwh }, esc defer pdpka4_km
await secret32_to_signing_km { secret32 : secret32_eddsa }, esc defer pdpka5_km
pwh_version = @triplesec_version
cb err, pwh, pwh_version, salt, res?.body?.login_session, pdpka4_km, pdpka5_km
#---------------
pw_to_login : ({pw, email_or_username, hostname, uid}, cb) ->
esc = make_esc cb, "pw_to_login"
login_session = hmac_pwh = null
await @pw_to_pwh { pw, email_or_username, uid }, esc defer pwh, pwh_version, salt, login_session_b64, pdpka4_km, pdpka5_km
await @pwh_to_login_params { hostname, login_session_b64, pwh, email_or_username, uid, pdpka5_km, pdpka4_km }, esc defer { login_session, hmac_pwh, pdpka4, pdpka5 }
cb null, login_session, hmac_pwh, salt, pdpka4, pdpka5
#---------------
pwh_to_login_params : ({hostname, login_session_b64, pwh, email_or_username, uid, pdpka4_km, pdpka5_km}, cb) ->
esc = make_esc cb, "pwh_to_login_params"
login_session = new triplesec.Buffer login_session_b64, 'base64'
# Make a new HMAC-SHA512'er, and the key is the output of the
hmac = new triplesec.HMAC(WordArray.from_buffer(pwh))
hmac_pwh = hmac.update(WordArray.from_buffer(login_session)).finalize().to_hex()
session = login_session = login_session_b64
user = to_user { email_or_username, uid }
await generate_pdpka { km : pdpka4_km, user, session, hostname }, esc defer pdpka4
await generate_pdpka { km : pdpka5_km, user, session, hostname }, esc defer pdpka5
cb null, { login_session, hmac_pwh, pdpka4, pdpka5 }
#---------------
_change_passphrase_compute_login : ({hostname, uid, login_session_b64, pwh, eddsa}, cb) ->
esc = make_esc cb, "_change_passphrase_compute_login"
await secret32_to_signing_km { secret32 : pwh }, esc defer pdpka4_km
await secret32_to_signing_km { secret32 : eddsa }, esc defer pdpka5_km
await @pwh_to_login_params { hostname, login_session_b64, uid, pwh, pdpka4_km, pdpka5_km }, esc defer { hmac_pwh, pdpka4, pdpka5 }
cb null, hmac_pwh, pdpka4, pdpka5
#---------------
get_public_pgp_key: (username, cb) ->
err = ret = null
fields = "public_keys"
await @config.request { endpoint : "user/lookup", params : {username, fields} }, defer err, res
unless err?
ret = res?.body?.them?.public_keys?.primary?.bundle
err = new Error "Cannot find a public key for '#{@config.escape_user_content username}'" unless ret?
cb err, ret
#---------------
get_devices : ({username}, cb) ->
err = ret = null
fields = "devices"
await @config.request { endpoint : "user/lookup", params : { username, fields } }, defer err, res
unless err?
ret = res?.body?.them?.devices
err = new Error "Cannot find devices for '#{@config.escape_user_content username}" unless ret?
cb err, ret
#---------------
get_public_pgp_keys : (username, cb) ->
err = ret = null
fields = "public_keys"
await @config.request { endpoint : "user/lookup", params : {username} }, defer err, res
unless err?
ret = res?.body?.them?.public_keys?.pgp_public_keys
err = new Error "Cannot find a public key for '#{@config.escape_user_content username}'" unless ret?.length
cb err, ret
#---------------
get_unlocked_private_primary_pgp_key : (pw, cb) ->
@get_unlocked_private_pgp_key { pw }, cb
#---------------
get_unlocked_private_pgp_key : ({pw, kid, no_check_keys, time_travel}, cb) ->
esc = make_esc (err) -> cb err, null
passphrase = new PI:PASSWORD:<PASSWORD>END_PI.Buffer pw
await @config.request { method : "GET", endpoint : "me" }, esc defer res
bundle = null
if kid?
for sk in res?.body?.me?.private_keys?.all when sk.kid is kid
break if (bundle = sk.bundle)?
else
bundle = res?.body?.me?.private_keys?.primary?.bundle
sk = err = null
if bundle?
tsenc = @get_tsenc_for_decryption { passphrase }
await KeyManager.import_from_p3skb { raw: bundle, no_check_keys, time_travel }, esc defer sk
await sk.unlock_p3skb { tsenc, no_check_keys, time_travel }, esc defer()
err = null
unless sk?
err = new Error "Failed to get and unlock your private key"
cb err, sk
#---------------
get_unlocked_private_pgp_keys : (pw, cb) ->
esc = make_esc cb, "get_unlocked_private_pgp_keys"
sks = []
passphrase = PI:PASSWORD:<PASSWORD>END_PI
tsenc = @get_tsenc_for_decryption { passphrase }
await @config.request { method : "GET", endpoint : "me" }, esc defer res
for sk in res?.body?.me?.private_keys?.all when (sk.type is @config.C.key.key_type.P3KSB_PRIVATE)
await KeyManager.import_from_p3skb { raw: sk.bundle }, esc defer sk
await sk.unlock_p3skb { tsenc : tsenc.clone() }, esc defer()
sks.push sk
cb err, sks
#---------------
export_my_private_key: ({kid, pw, no_check_keys, time_travel}, cb) ->
esc = make_esc cb, "export_my_private_key"
err = armored_private = null
passphrase = new triplesec.Buffer pw
await @get_unlocked_private_pgp_key { kid, pw, no_check_keys, time_travel }, esc defer sk
await sk.sign {}, esc defer()
await sk.export_pgp_private_to_client {passphrase}, esc defer armored_private
cb null, armored_private
#---------------
# Run passphrase stretching on the given salt/passphrase
# combination, without side-effects.
_change_passphrase_derive_passphrase_components : ( { tsenc, salt, passphrase}, cb) ->
esc = make_esc cb, "_change_passphrase_derive_passphrase_components"
key = Buffer.from passphrase, 'utf8'
{C} = @config
tsenc or= new triplesec.Encryptor { version : @triplesec_version }
tsenc.set_key key
await tsenc.resalt { @extra_keymaterial, salt }, esc defer keys
km = keys.extra
[pwh, eddsa, _, lks_client_half ] = bufsplit km, [
C.pwh.derived_key_bytes,
C.nacl.eddsa_secret_key_bytes,
C.nacl.dh_secret_key_bytes,
C.device.lks_client_half_bytes
]
cb null, { tsenc, pwh, lks_client_half, eddsa }
#---------------
_change_passphrase_encrypt_lks_client_half : ( { me, client_half }, cb) ->
ret = {}
esc = make_esc cb, "_change_passphrase_encrypt_lks_client_half"
for deviceid, {keys} of me.devices
for {kid,key_role} in keys when (key_role is @config.C.key.key_role.ENCRYPTION)
await kbpgp.ukm.import_armored_public { armored : kid }, esc defer km
await kbpgp.kb.box { encrypt_for : km, msg : client_half }, esc defer ret[kid]
cb null, ret
#---------------
_change_passphrase_reencrypt_pgp_private_keys : ( { me, old_ppc, new_ppc, exclude_kids }, cb ) ->
outputs = []
exclude_kids or= []
esc = make_esc cb, "_change_passphrase_reencrypt_pgp_private_key"
for {kid,bundle} in (me?.private_keys?.all or []) when not (kid in exclude_kids)
await KeyManager.import_from_p3skb { armored : bundle }, esc defer km
await km.unlock_p3skb { tsenc : old_ppc.tsenc.clone() }, esc defer()
{tsenc,passphrase_generation} = new_ppc
await km.export_private_to_server {tsenc, passphrase_generation}, esc defer output
outputs.push output
cb null, outputs
#---------------
_change_passphrase_compute_lks_mask : ( { old_ppc, new_ppc}, cb) ->
lks_mask = xor_buffers(old_ppc.lks_client_half, new_ppc.lks_client_half).toString('hex')
cb null, lks_mask
#---------------
# Reset the user's passphrase given then authentication link 'l'.
reset_passphrase : ( {new_pp, uid, hostname, l}, cb) ->
esc = make_esc cb, "reset_passphrase"
params = { uid }
await @config.request { method : "GET", endpoint : 'getsalt', params }, esc defer res
salt = new triplesec.Buffer res.body.salt, 'hex'
await @scrypt_hash_passphrase { salt, key : new_pp, encoding : null }, defer pwh, secret32_eddsa
await secret32_to_signing_kid { secret32 : pwh }, esc defer pdpka4_kid
await secret32_to_signing_kid { secret32 : secret32_eddsa }, esc defer pdpka5_kid
params = {
pdpka4_kid : pdpka4_kid
pdpka5_kid : pdpka5_kid
l : l
}
await @config.request { method : "POST", endpoint : "passphrase/reset", params }, esc defer res
cb null, res
#---------------
#
# Use v2 of the passphrase change system, which changes the LKS mask
# and also encrypts the LKS client half for all known encryption devices.
# .. In addition to reencrypting PGP private keys...
#
# @param {string} old_pp The old passphrase
# @param {string} new_pp The new passphrase
# @param {vec<string>} exclude_kids Don't reencrypt these KIDs or include
# them in the upload. Primarily useful for testing
# @param {callback<error>} cb Callback, will fire with an Error
# if the update didn't work.
#
change_passphrase : ( {old_pp, new_pp, exclude_kids, hostname}, cb) ->
old_ppc = new_ppc = null
esc = make_esc cb, "change_passphrase"
params = { make_login_session : 1 }
await @config.request { method : "GET", endpoint : "me", params }, esc defer res
unless (me = res?.body?.me)? and (login_session_b64 = res?.body?.login_session)?
await athrow (new Error "Cannot load 'me' from server"), esc defer()
salt = Buffer.from me.basics.salt, 'hex'
await @_change_passphrase_derive_passphrase_components { tsenc : @enc, salt, passphrase : PI:PASSWORD:<PASSWORD>END_PI }, esc defer old_ppc
await @_change_passphrase_derive_passphrase_components { salt, passphrase : PI:PASSWORD:<PASSWORD>END_PI }, esc defer new_ppc
await @_change_passphrase_compute_login { uid : me.id, login_session_b64, pwh : old_ppc.pwh, eddsa : old_ppc.eddsa, hostname }, esc defer hmac_pwh, old_pdpka4, old_pdpka5
old_ppc.passphrase_generation = me.basics.passphrase_generation
new_ppc.passphrase_generation = old_ppc.passphrase_generation + 1
await @_change_passphrase_encrypt_lks_client_half { me, client_half : new_ppc.lks_client_half }, esc defer lksch
await @_change_passphrase_reencrypt_pgp_private_keys { me, old_ppc, new_ppc, exclude_kids}, esc defer private_keys
await @_change_passphrase_compute_lks_mask { old_ppc, new_ppc }, esc defer lks_mask
await secret32_to_signing_kid { secret32 : new_ppc.eddsa }, esc defer pdpka5_kid
params = {
hmac_pwh : hmac_pwh
old_pdpka4 : old_pdpka4
old_pdpka5 : old_pdpka5
pdpka5_kid : pdpka5_kid
login_session : login_session_b64
pwh : new_ppc.pwh.toString('hex'),
pwh_version : @triplesec_version,
ppgen : old_ppc.passphrase_generation,
lks_mask,
lks_client_halves : JSON.stringify(lksch),
private_keys
}
await @config.request { method : "POST", endpoint : "passphrase/replace", params }, esc defer res
# Now reset our internal triplesec to the new one.
@enc = new_ppc.tsenc
cb null, new_ppc
#---------------
# @param {Buffer} passphrase
get_tsenc_for_decryption : ({passphrase}) ->
@enc.set_key passphrase
@enc
#---------------
gen_nacl_eddsa_key : (params, cb) ->
gen = kbpgp.kb.KeyManager.generate
await gen { seed : @nacl.eddsa, split : true }, defer err, km
cb err, km
#---------------
gen_nacl_dh_key : (params, cb) ->
gen = kbpgp.kb.EncKeyManager.generate
await gen { seed : @nacl.dh, split : true }, defer err, km
cb err, km
#=======================================================================================
exports.secret32_to_signing_km = secret32_to_signing_km = ({secret32}, cb) ->
ret = err = null
await kbpgp.kb.KeyManager.generate { seed : secret32, split : false }, defer err, km
cb err, km
#=======================================================================================
exports.secret32_to_signing_kid = secret32_to_signing_kid = ({secret32}, cb) ->
ret = err = null
await secret32_to_signing_km { secret32 }, defer err, km
unless err?
await km.export_public {}, defer err, kid
cb err, kid, km
#=======================================================================================
exports.generate_pdpka = generate_pdpka = ({km, session, user, hostname}, cb) ->
await kbpgp.rand.SRF().random_bytes 16, defer nonce
arg =
sig_eng : km.make_sig_eng()
host : hostname
user : local : user
arg.nonce = nonce if nonce?
arg.session = session if session?
eng = new proofs.Auth arg
await eng.generate defer(err, sig), { dohash : true }
cb err, sig?.armored
#=======================================================================================
to_user = ({email_or_username, uid}) ->
user = {}
if uid? then user.uid = uid
if email_or_username?
if email_or_username.indexOf('@') >= 0 then user.email = email_or_username
else user.username = email_or_username
user
#=======================================================================================
|
[
{
"context": "###\n copyright (c) 2014 Jess Austin <jess.austin@gmail.com>, MIT license\n\n jade-var-m",
"end": 35,
"score": 0.9997972249984741,
"start": 24,
"tag": "NAME",
"value": "Jess Austin"
},
{
"context": "###\n copyright (c) 2014 Jess Austin <jess.austin@gmail.com>, MIT licen... | jade-var-matter.coffee | jessaustin/jade-var-matter | 1 | ###
copyright (c) 2014 Jess Austin <jess.austin@gmail.com>, MIT license
jade-var-matter reads a Jade file, and parses the "unbuffered" code that
defines javascript `var`s.
###
{runInNewContext} = require 'vm'
regex = /(?:^|[\n;]) *- *(var [^\n;]*)[;\n]/g
module.exports = (jadeString='') ->
matches = (while arr = regex.exec jadeString then arr[1]).join ';\n'
runInNewContext matches, sandbox = {}
sandbox
| 101006 | ###
copyright (c) 2014 <NAME> <<EMAIL>>, MIT license
jade-var-matter reads a Jade file, and parses the "unbuffered" code that
defines javascript `var`s.
###
{runInNewContext} = require 'vm'
regex = /(?:^|[\n;]) *- *(var [^\n;]*)[;\n]/g
module.exports = (jadeString='') ->
matches = (while arr = regex.exec jadeString then arr[1]).join ';\n'
runInNewContext matches, sandbox = {}
sandbox
| true | ###
copyright (c) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>, MIT license
jade-var-matter reads a Jade file, and parses the "unbuffered" code that
defines javascript `var`s.
###
{runInNewContext} = require 'vm'
regex = /(?:^|[\n;]) *- *(var [^\n;]*)[;\n]/g
module.exports = (jadeString='') ->
matches = (while arr = regex.exec jadeString then arr[1]).join ';\n'
runInNewContext matches, sandbox = {}
sandbox
|
[
{
"context": "IENT_ID: 'e750db60ac506978fc70'\n CLIENT_SECRET: '3a33d2085cbd1176153f99781bbce7c6'\n COOKIE_DOMAIN: null\n CRITEO_ARTWORKS_ACCOUNT_",
"end": 619,
"score": 0.9985117316246033,
"start": 587,
"tag": "KEY",
"value": "3a33d2085cbd1176153f99781bbce7c6"
},
{
"context": " ... | src/mobile/config.coffee | kanaabe/force | 0 | #
# Using ["The Twelve-Factor App"](http://12factor.net/) as a reference
# all environment configuration will live in environment variables.
# This file simply lays out all of those environment variables with
# sensible defaults for development.
#
module.exports =
API_REQUEST_TIMEOUT: 5000
API_URL: 'http://localhost:3000'
APPLICATION_NAME: 'microgravity-staging'
APP_URL: 'http://localhost:3003'
ARTSY_EDITORIAL_CHANNEL: '5759e3efb5989e6f98f77993'
CALENDAR_URL: 'http://localhost:3003' # For redirecting to .ics files
CLIENT_ID: 'e750db60ac506978fc70'
CLIENT_SECRET: '3a33d2085cbd1176153f99781bbce7c6'
COOKIE_DOMAIN: null
CRITEO_ARTWORKS_ACCOUNT_NUMBER: 35250
CRITEO_AUCTIONS_ACCOUNT_NUMBER: 28539
DEFAULT_CACHE_TIME: 3600
DISABLE_IMAGE_PROXY: false
EDITORIAL_ADMINS: 'craig,halley,marina,casey,molly,cab,charles'
EMAIL_SIGNUP_IMAGES_ID: '572a7996b5989e6f98f77992'
EMBEDLY_KEY: null
EUROPA_URL: 'http://europa-production.herokuapp.com'
FACEBOOK_ID: null
FACEBOOK_SECRET: null
FAIR_CHANNEL_ID: '5759e4f3b5989e6f98f77998'
GALLERY_INSIGHTS_LIST: '95ac2900c4'
GEMINI_CLOUDFRONT_URL: 'https://d7hftxdivxxvm.cloudfront.net'
GOOGLE_ANALYTICS_ID: 'UA-12450662-6'
GOOGLE_MAPS_API_KEY: null
IMAGE_PROXY: 'GEMINI'
MAILCHIMP_AUCTION_LIST_ID: 'b7b9959ee0'
MAILCHIMP_KEY: null
MOBILE_MARKETING_SIGNUP_MODALS: '[{"slug":"ca1","copy":"An art collection for every budget","image":"http://files.artsy.net/images/modal-collect-art.jpg"},{"slug":"ca2","copy":"Buy art from the best galleries and auction houses","image":"http://files.artsy.net/images/modal-collect-art.jpg"},{"slug":"ca3","copy":"Discover and Buy Works from Seattle Art Fair 2017","image":"http://files.artsy.net/images/seattle-art-fair-modal.jpg","photoCredit":"Sarah Cain, waves, 2016; Courtesy of the artist and Galerie Lelong & Co., New York"}]'
MAX_POLLS_FOR_MAX_BIDS: 20
MAX_SOCKETS: -1
METAPHYSICS_ENDPOINT: 'https://metaphysics-production.artsy.net'
MIXPANEL_ID: null
NODE_ENV: 'development'
OPENREDIS_URL: null
PORT: 3003
POSITRON_URL: 'http://writer.artsy.net'
PREDICTION_URL: 'https://live.artsy.net'
S3_BUCKET: null
S3_KEY: null
S3_SECRET: null
SAILTHRU_KEY: ''
SAILTHRU_MASTER_LIST: 'Master List'
SAILTHRU_SECRET: ''
SECURE_IMAGES_URL: 'https://d1ycxz9plii3tb.cloudfront.net'
SEGMENT_WRITE_KEY: null
SEGMENT_WRITE_KEY_SERVER: null
SENTRY_PRIVATE_DSN: null
SENTRY_PUBLIC_DSN: null
SESSION_COOKIE_KEY: 'microgravity-sess'
SESSION_COOKIE_MAX_AGE: 31536000000
SESSION_SECRET: 'artsyoss'
SHOW_ANALYTICS_CALLS: false
STRIPE_PUBLISHABLE_KEY: null
TARGET_CAMPAIGN_URL: '/seattle-art-fair-2017'
TRACK_PAGELOAD_PATHS: null
TWITTER_CONSUMER_KEY: null
TWITTER_CONSUMER_SECRET: null
VENICE_2015_SECTION: null
VOLLEY_ENDPOINT: null
# Override any values with env variables if they exist.
# You can set JSON-y values for env variables as well such as "true" or
# "['foo']" and config will attempt to JSON.parse them into non-string types.
for key, val of module.exports
val = (process.env[key] or val)
module.exports[key] = try JSON.parse(val) catch then val
# Warn if this file is included client-side
alert("WARNING: Do not require config.coffee, please require('sharify').data instead.") if window?
| 64366 | #
# Using ["The Twelve-Factor App"](http://12factor.net/) as a reference
# all environment configuration will live in environment variables.
# This file simply lays out all of those environment variables with
# sensible defaults for development.
#
module.exports =
API_REQUEST_TIMEOUT: 5000
API_URL: 'http://localhost:3000'
APPLICATION_NAME: 'microgravity-staging'
APP_URL: 'http://localhost:3003'
ARTSY_EDITORIAL_CHANNEL: '5759e3efb5989e6f98f77993'
CALENDAR_URL: 'http://localhost:3003' # For redirecting to .ics files
CLIENT_ID: 'e750db60ac506978fc70'
CLIENT_SECRET: '<KEY>'
COOKIE_DOMAIN: null
CRITEO_ARTWORKS_ACCOUNT_NUMBER: 35250
CRITEO_AUCTIONS_ACCOUNT_NUMBER: 28539
DEFAULT_CACHE_TIME: 3600
DISABLE_IMAGE_PROXY: false
EDITORIAL_ADMINS: '<NAME>,<NAME>,<NAME>,<NAME>,<NAME>,<NAME>,<NAME>'
EMAIL_SIGNUP_IMAGES_ID: '572a7996b5989e6f98f77992'
EMBEDLY_KEY: null
EUROPA_URL: 'http://europa-production.herokuapp.com'
FACEBOOK_ID: null
FACEBOOK_SECRET: null
FAIR_CHANNEL_ID: '5759e4f3b5989e6f98f77998'
GALLERY_INSIGHTS_LIST: '95ac2900c4'
GEMINI_CLOUDFRONT_URL: 'https://d7hftxdivxxvm.cloudfront.net'
GOOGLE_ANALYTICS_ID: 'UA-12450<KEY>'
GOOGLE_MAPS_API_KEY: null
IMAGE_PROXY: 'GEMINI'
MAILCHIMP_AUCTION_LIST_ID: 'b7b9959ee0'
MAILCHIMP_KEY: null
MOBILE_MARKETING_SIGNUP_MODALS: '[{"slug":"ca1","copy":"An art collection for every budget","image":"http://files.artsy.net/images/modal-collect-art.jpg"},{"slug":"ca2","copy":"Buy art from the best galleries and auction houses","image":"http://files.artsy.net/images/modal-collect-art.jpg"},{"slug":"ca3","copy":"Discover and Buy Works from Seattle Art Fair 2017","image":"http://files.artsy.net/images/seattle-art-fair-modal.jpg","photoCredit":"<NAME>, waves, 2016; Courtesy of the artist and Galerie Lelong & Co., New York"}]'
MAX_POLLS_FOR_MAX_BIDS: 20
MAX_SOCKETS: -1
METAPHYSICS_ENDPOINT: 'https://metaphysics-production.artsy.net'
MIXPANEL_ID: null
NODE_ENV: 'development'
OPENREDIS_URL: null
PORT: 3003
POSITRON_URL: 'http://writer.artsy.net'
PREDICTION_URL: 'https://live.artsy.net'
S3_BUCKET: null
S3_KEY: null
S3_SECRET: null
SAILTHRU_KEY: ''
SAILTHRU_MASTER_LIST: 'Master List'
SAILTHRU_SECRET: ''
SECURE_IMAGES_URL: 'https://d1ycxz9plii3tb.cloudfront.net'
SEGMENT_WRITE_KEY: null
SEGMENT_WRITE_KEY_SERVER: null
SENTRY_PRIVATE_DSN: null
SENTRY_PUBLIC_DSN: null
SESSION_COOKIE_KEY: '<KEY>'
SESSION_COOKIE_MAX_AGE: 31536000000
SESSION_SECRET: '<KEY>syoss'
SHOW_ANALYTICS_CALLS: false
STRIPE_PUBLISHABLE_KEY: null
TARGET_CAMPAIGN_URL: '/seattle-art-fair-2017'
TRACK_PAGELOAD_PATHS: null
TWITTER_CONSUMER_KEY: null
TWITTER_CONSUMER_SECRET: null
VENICE_2015_SECTION: null
VOLLEY_ENDPOINT: null
# Override any values with env variables if they exist.
# You can set JSON-y values for env variables as well such as "true" or
# "['foo']" and config will attempt to JSON.parse them into non-string types.
for key, val of module.exports
val = (process.env[key] or val)
module.exports[key] = try JSON.parse(val) catch then val
# Warn if this file is included client-side
alert("WARNING: Do not require config.coffee, please require('sharify').data instead.") if window?
| true | #
# Using ["The Twelve-Factor App"](http://12factor.net/) as a reference
# all environment configuration will live in environment variables.
# This file simply lays out all of those environment variables with
# sensible defaults for development.
#
module.exports =
API_REQUEST_TIMEOUT: 5000
API_URL: 'http://localhost:3000'
APPLICATION_NAME: 'microgravity-staging'
APP_URL: 'http://localhost:3003'
ARTSY_EDITORIAL_CHANNEL: '5759e3efb5989e6f98f77993'
CALENDAR_URL: 'http://localhost:3003' # For redirecting to .ics files
CLIENT_ID: 'e750db60ac506978fc70'
CLIENT_SECRET: 'PI:KEY:<KEY>END_PI'
COOKIE_DOMAIN: null
CRITEO_ARTWORKS_ACCOUNT_NUMBER: 35250
CRITEO_AUCTIONS_ACCOUNT_NUMBER: 28539
DEFAULT_CACHE_TIME: 3600
DISABLE_IMAGE_PROXY: false
EDITORIAL_ADMINS: 'PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI'
EMAIL_SIGNUP_IMAGES_ID: '572a7996b5989e6f98f77992'
EMBEDLY_KEY: null
EUROPA_URL: 'http://europa-production.herokuapp.com'
FACEBOOK_ID: null
FACEBOOK_SECRET: null
FAIR_CHANNEL_ID: '5759e4f3b5989e6f98f77998'
GALLERY_INSIGHTS_LIST: '95ac2900c4'
GEMINI_CLOUDFRONT_URL: 'https://d7hftxdivxxvm.cloudfront.net'
GOOGLE_ANALYTICS_ID: 'UA-12450PI:KEY:<KEY>END_PI'
GOOGLE_MAPS_API_KEY: null
IMAGE_PROXY: 'GEMINI'
MAILCHIMP_AUCTION_LIST_ID: 'b7b9959ee0'
MAILCHIMP_KEY: null
MOBILE_MARKETING_SIGNUP_MODALS: '[{"slug":"ca1","copy":"An art collection for every budget","image":"http://files.artsy.net/images/modal-collect-art.jpg"},{"slug":"ca2","copy":"Buy art from the best galleries and auction houses","image":"http://files.artsy.net/images/modal-collect-art.jpg"},{"slug":"ca3","copy":"Discover and Buy Works from Seattle Art Fair 2017","image":"http://files.artsy.net/images/seattle-art-fair-modal.jpg","photoCredit":"PI:NAME:<NAME>END_PI, waves, 2016; Courtesy of the artist and Galerie Lelong & Co., New York"}]'
MAX_POLLS_FOR_MAX_BIDS: 20
MAX_SOCKETS: -1
METAPHYSICS_ENDPOINT: 'https://metaphysics-production.artsy.net'
MIXPANEL_ID: null
NODE_ENV: 'development'
OPENREDIS_URL: null
PORT: 3003
POSITRON_URL: 'http://writer.artsy.net'
PREDICTION_URL: 'https://live.artsy.net'
S3_BUCKET: null
S3_KEY: null
S3_SECRET: null
SAILTHRU_KEY: ''
SAILTHRU_MASTER_LIST: 'Master List'
SAILTHRU_SECRET: ''
SECURE_IMAGES_URL: 'https://d1ycxz9plii3tb.cloudfront.net'
SEGMENT_WRITE_KEY: null
SEGMENT_WRITE_KEY_SERVER: null
SENTRY_PRIVATE_DSN: null
SENTRY_PUBLIC_DSN: null
SESSION_COOKIE_KEY: 'PI:KEY:<KEY>END_PI'
SESSION_COOKIE_MAX_AGE: 31536000000
SESSION_SECRET: 'PI:KEY:<KEY>END_PIsyoss'
SHOW_ANALYTICS_CALLS: false
STRIPE_PUBLISHABLE_KEY: null
TARGET_CAMPAIGN_URL: '/seattle-art-fair-2017'
TRACK_PAGELOAD_PATHS: null
TWITTER_CONSUMER_KEY: null
TWITTER_CONSUMER_SECRET: null
VENICE_2015_SECTION: null
VOLLEY_ENDPOINT: null
# Override any values with env variables if they exist.
# You can set JSON-y values for env variables as well such as "true" or
# "['foo']" and config will attempt to JSON.parse them into non-string types.
for key, val of module.exports
val = (process.env[key] or val)
module.exports[key] = try JSON.parse(val) catch then val
# Warn if this file is included client-side
alert("WARNING: Do not require config.coffee, please require('sharify').data instead.") if window?
|
[
{
"context": "gorithm Name: TWOFISH\n#Principal Submitter: Bruce Schneier, Counterpane Systems\n#\n#\n\nout = []\nf = () ->\n ou",
"end": 277,
"score": 0.999869167804718,
"start": 263,
"tag": "NAME",
"value": "Bruce Schneier"
},
{
"context": "laintext : PT, ciphertext : CT } ... | test/gen/gen_twofish_ecb_tbl.iced | CyberFlameGO/triplesec | 274 | #
# From here: http://www.schneier.com/code/twofish-kat.zip
# FILENAME: "ecb_tbl.txt"
#
#Electronic Codebook (ECB) Mode
#Tables Known Answer Test
#Tests permutation tables and MDS matrix multiply tables.
#"
#Algorithm Name: TWOFISH
#Principal Submitter: Bruce Schneier, Counterpane Systems
#
#
out = []
f = () ->
out.push { key : KEY, plaintext : PT, ciphertext : CT }
I = "1"
KEY = "00000000000000000000000000000000"
PT = "00000000000000000000000000000000"
CT = "9F589F5CF6122C32B6BFEC2F2AE8C35A"
f()
I = "2"
KEY = "00000000000000000000000000000000"
PT = "9F589F5CF6122C32B6BFEC2F2AE8C35A"
CT = "D491DB16E7B1C39E86CB086B789F5419"
f()
I = "3"
KEY = "9F589F5CF6122C32B6BFEC2F2AE8C35A"
PT = "D491DB16E7B1C39E86CB086B789F5419"
CT = "019F9809DE1711858FAAC3A3BA20FBC3"
f()
I = "4"
KEY = "D491DB16E7B1C39E86CB086B789F5419"
PT = "019F9809DE1711858FAAC3A3BA20FBC3"
CT = "6363977DE839486297E661C6C9D668EB"
f()
I = "5"
KEY = "019F9809DE1711858FAAC3A3BA20FBC3"
PT = "6363977DE839486297E661C6C9D668EB"
CT = "816D5BD0FAE35342BF2A7412C246F752"
f()
I = "6"
KEY = "6363977DE839486297E661C6C9D668EB"
PT = "816D5BD0FAE35342BF2A7412C246F752"
CT = "5449ECA008FF5921155F598AF4CED4D0"
f()
I = "7"
KEY = "816D5BD0FAE35342BF2A7412C246F752"
PT = "5449ECA008FF5921155F598AF4CED4D0"
CT = "6600522E97AEB3094ED5F92AFCBCDD10"
f()
I = "8"
KEY = "5449ECA008FF5921155F598AF4CED4D0"
PT = "6600522E97AEB3094ED5F92AFCBCDD10"
CT = "34C8A5FB2D3D08A170D120AC6D26DBFA"
f()
I = "9"
KEY = "6600522E97AEB3094ED5F92AFCBCDD10"
PT = "34C8A5FB2D3D08A170D120AC6D26DBFA"
CT = "28530B358C1B42EF277DE6D4407FC591"
f()
I = "10"
KEY = "34C8A5FB2D3D08A170D120AC6D26DBFA"
PT = "28530B358C1B42EF277DE6D4407FC591"
CT = "8A8AB983310ED78C8C0ECDE030B8DCA4"
f()
I = "11"
KEY = "28530B358C1B42EF277DE6D4407FC591"
PT = "8A8AB983310ED78C8C0ECDE030B8DCA4"
CT = "48C758A6DFC1DD8B259FA165E1CE2B3C"
f()
I = "12"
KEY = "8A8AB983310ED78C8C0ECDE030B8DCA4"
PT = "48C758A6DFC1DD8B259FA165E1CE2B3C"
CT = "CE73C65C101680BBC251C5C16ABCF214"
f()
I = "13"
KEY = "48C758A6DFC1DD8B259FA165E1CE2B3C"
PT = "CE73C65C101680BBC251C5C16ABCF214"
CT = "C7ABD74AA060F78B244E24C71342BA89"
f()
I = "14"
KEY = "CE73C65C101680BBC251C5C16ABCF214"
PT = "C7ABD74AA060F78B244E24C71342BA89"
CT = "D0F8B3B6409EBCB666D29C916565ABFC"
f()
I = "15"
KEY = "C7ABD74AA060F78B244E24C71342BA89"
PT = "D0F8B3B6409EBCB666D29C916565ABFC"
CT = "DD42662908070054544FE09DA4263130"
f()
I = "16"
KEY = "D0F8B3B6409EBCB666D29C916565ABFC"
PT = "DD42662908070054544FE09DA4263130"
CT = "7007BACB42F7BF989CF30F78BC50EDCA"
f()
I = "17"
KEY = "DD42662908070054544FE09DA4263130"
PT = "7007BACB42F7BF989CF30F78BC50EDCA"
CT = "57B9A18EE97D90F435A16F69F0AC6F16"
f()
I = "18"
KEY = "7007BACB42F7BF989CF30F78BC50EDCA"
PT = "57B9A18EE97D90F435A16F69F0AC6F16"
CT = "06181F0D53267ABD8F3BB28455B198AD"
f()
I = "19"
KEY = "57B9A18EE97D90F435A16F69F0AC6F16"
PT = "06181F0D53267ABD8F3BB28455B198AD"
CT = "81A12D8449E9040BAAE7196338D8C8F2"
f()
I = "20"
KEY = "06181F0D53267ABD8F3BB28455B198AD"
PT = "81A12D8449E9040BAAE7196338D8C8F2"
CT = "BE422651C56F2622DA0201815A95A820"
f()
I = "21"
KEY = "81A12D8449E9040BAAE7196338D8C8F2"
PT = "BE422651C56F2622DA0201815A95A820"
CT = "113B19F2D778473990480CEE4DA238D1"
f()
I = "22"
KEY = "BE422651C56F2622DA0201815A95A820"
PT = "113B19F2D778473990480CEE4DA238D1"
CT = "E6942E9A86E544CF3E3364F20BE011DF"
f()
I = "23"
KEY = "113B19F2D778473990480CEE4DA238D1"
PT = "E6942E9A86E544CF3E3364F20BE011DF"
CT = "87CDC6AA487BFD0EA70188257D9B3859"
f()
I = "24"
KEY = "E6942E9A86E544CF3E3364F20BE011DF"
PT = "87CDC6AA487BFD0EA70188257D9B3859"
CT = "D5E2701253DD75A11A4CFB243714BD14"
f()
I = "25"
KEY = "87CDC6AA487BFD0EA70188257D9B3859"
PT = "D5E2701253DD75A11A4CFB243714BD14"
CT = "FD24812EEA107A9E6FAB8EABE0F0F48C"
f()
I = "26"
KEY = "D5E2701253DD75A11A4CFB243714BD14"
PT = "FD24812EEA107A9E6FAB8EABE0F0F48C"
CT = "DAFA84E31A297F372C3A807100CD783D"
f()
I = "27"
KEY = "FD24812EEA107A9E6FAB8EABE0F0F48C"
PT = "DAFA84E31A297F372C3A807100CD783D"
CT = "A55ED2D955EC8950FC0CC93B76ACBF91"
f()
I = "28"
KEY = "DAFA84E31A297F372C3A807100CD783D"
PT = "A55ED2D955EC8950FC0CC93B76ACBF91"
CT = "2ABEA2A4BF27ABDC6B6F278993264744"
f()
I = "29"
KEY = "A55ED2D955EC8950FC0CC93B76ACBF91"
PT = "2ABEA2A4BF27ABDC6B6F278993264744"
CT = "045383E219321D5A4435C0E491E7DE10"
f()
I = "30"
KEY = "2ABEA2A4BF27ABDC6B6F278993264744"
PT = "045383E219321D5A4435C0E491E7DE10"
CT = "7460A4CD4F312F32B1C7A94FA004E934"
f()
I = "31"
KEY = "045383E219321D5A4435C0E491E7DE10"
PT = "7460A4CD4F312F32B1C7A94FA004E934"
CT = "6BBF9186D32C2C5895649D746566050A"
f()
I = "32"
KEY = "7460A4CD4F312F32B1C7A94FA004E934"
PT = "6BBF9186D32C2C5895649D746566050A"
CT = "CDBDD19ACF40B8AC0328C80054266068"
f()
I = "33"
KEY = "6BBF9186D32C2C5895649D746566050A"
PT = "CDBDD19ACF40B8AC0328C80054266068"
CT = "1D2836CAE4223EAB5066867A71B1A1C3"
f()
I = "34"
KEY = "CDBDD19ACF40B8AC0328C80054266068"
PT = "1D2836CAE4223EAB5066867A71B1A1C3"
CT = "2D7F37121D0D2416D5E2767FF202061B"
f()
I = "35"
KEY = "1D2836CAE4223EAB5066867A71B1A1C3"
PT = "2D7F37121D0D2416D5E2767FF202061B"
CT = "D70736D1ABC7427A121CC816CD66D7FF"
f()
I = "36"
KEY = "2D7F37121D0D2416D5E2767FF202061B"
PT = "D70736D1ABC7427A121CC816CD66D7FF"
CT = "AC6CA71CBCBEDCC0EA849FB2E9377865"
f()
I = "37"
KEY = "D70736D1ABC7427A121CC816CD66D7FF"
PT = "AC6CA71CBCBEDCC0EA849FB2E9377865"
CT = "307265FF145CBBC7104B3E51C6C1D6B4"
f()
I = "38"
KEY = "AC6CA71CBCBEDCC0EA849FB2E9377865"
PT = "307265FF145CBBC7104B3E51C6C1D6B4"
CT = "934B7DB4B3544854DBCA81C4C5DE4EB1"
f()
I = "39"
KEY = "307265FF145CBBC7104B3E51C6C1D6B4"
PT = "934B7DB4B3544854DBCA81C4C5DE4EB1"
CT = "18759824AD9823D5961F84377D7EAEBF"
f()
I = "40"
KEY = "934B7DB4B3544854DBCA81C4C5DE4EB1"
PT = "18759824AD9823D5961F84377D7EAEBF"
CT = "DEDDAC6029B01574D9BABB099DC6CA6C"
f()
I = "41"
KEY = "18759824AD9823D5961F84377D7EAEBF"
PT = "DEDDAC6029B01574D9BABB099DC6CA6C"
CT = "5EA82EEA2244DED42CCA2F835D5615DF"
f()
I = "42"
KEY = "DEDDAC6029B01574D9BABB099DC6CA6C"
PT = "5EA82EEA2244DED42CCA2F835D5615DF"
CT = "1E3853F7FFA57091771DD8CDEE9414DE"
f()
I = "43"
KEY = "5EA82EEA2244DED42CCA2F835D5615DF"
PT = "1E3853F7FFA57091771DD8CDEE9414DE"
CT = "5C2EBBF75D31F30B5EA26EAC8782D8D1"
f()
I = "44"
KEY = "1E3853F7FFA57091771DD8CDEE9414DE"
PT = "5C2EBBF75D31F30B5EA26EAC8782D8D1"
CT = "3A3CFA1F13A136C94D76E5FA4A1109FF"
f()
I = "45"
KEY = "5C2EBBF75D31F30B5EA26EAC8782D8D1"
PT = "3A3CFA1F13A136C94D76E5FA4A1109FF"
CT = "91630CF96003B8032E695797E313A553"
f()
I = "46"
KEY = "3A3CFA1F13A136C94D76E5FA4A1109FF"
PT = "91630CF96003B8032E695797E313A553"
CT = "137A24CA47CD12BE818DF4D2F4355960"
f()
I = "47"
KEY = "91630CF96003B8032E695797E313A553"
PT = "137A24CA47CD12BE818DF4D2F4355960"
CT = "BCA724A54533C6987E14AA827952F921"
f()
I = "48"
KEY = "137A24CA47CD12BE818DF4D2F4355960"
PT = "BCA724A54533C6987E14AA827952F921"
CT = "6B459286F3FFD28D49F15B1581B08E42"
f()
I = "49"
KEY = "BCA724A54533C6987E14AA827952F921"
PT = "6B459286F3FFD28D49F15B1581B08E42"
CT = "5D9D4EEFFA9151575524F115815A12E0"
f()
I = "1"
KEY = "000000000000000000000000000000000000000000000000"
PT = "00000000000000000000000000000000"
CT = "EFA71F788965BD4453F860178FC19101"
f()
I = "2"
KEY = "000000000000000000000000000000000000000000000000"
PT = "EFA71F788965BD4453F860178FC19101"
CT = "88B2B2706B105E36B446BB6D731A1E88"
f()
I = "3"
KEY = "EFA71F788965BD4453F860178FC191010000000000000000"
PT = "88B2B2706B105E36B446BB6D731A1E88"
CT = "39DA69D6BA4997D585B6DC073CA341B2"
f()
I = "4"
KEY = "88B2B2706B105E36B446BB6D731A1E88EFA71F788965BD44"
PT = "39DA69D6BA4997D585B6DC073CA341B2"
CT = "182B02D81497EA45F9DAACDC29193A65"
f()
I = "5"
KEY = "39DA69D6BA4997D585B6DC073CA341B288B2B2706B105E36"
PT = "182B02D81497EA45F9DAACDC29193A65"
CT = "7AFF7A70CA2FF28AC31DD8AE5DAAAB63"
f()
I = "6"
KEY = "182B02D81497EA45F9DAACDC29193A6539DA69D6BA4997D5"
PT = "7AFF7A70CA2FF28AC31DD8AE5DAAAB63"
CT = "D1079B789F666649B6BD7D1629F1F77E"
f()
I = "7"
KEY = "7AFF7A70CA2FF28AC31DD8AE5DAAAB63182B02D81497EA45"
PT = "D1079B789F666649B6BD7D1629F1F77E"
CT = "3AF6F7CE5BD35EF18BEC6FA787AB506B"
f()
I = "8"
KEY = "D1079B789F666649B6BD7D1629F1F77E7AFF7A70CA2FF28A"
PT = "3AF6F7CE5BD35EF18BEC6FA787AB506B"
CT = "AE8109BFDA85C1F2C5038B34ED691BFF"
f()
I = "9"
KEY = "3AF6F7CE5BD35EF18BEC6FA787AB506BD1079B789F666649"
PT = "AE8109BFDA85C1F2C5038B34ED691BFF"
CT = "893FD67B98C550073571BD631263FC78"
f()
I = "10"
KEY = "AE8109BFDA85C1F2C5038B34ED691BFF3AF6F7CE5BD35EF1"
PT = "893FD67B98C550073571BD631263FC78"
CT = "16434FC9C8841A63D58700B5578E8F67"
f()
I = "11"
KEY = "893FD67B98C550073571BD631263FC78AE8109BFDA85C1F2"
PT = "16434FC9C8841A63D58700B5578E8F67"
CT = "9594CF62D48ACD347A68A3161F0F3EE7"
f()
I = "12"
KEY = "16434FC9C8841A63D58700B5578E8F67893FD67B98C55007"
PT = "9594CF62D48ACD347A68A3161F0F3EE7"
CT = "B23E8C2C731C514017D1F2B88D77D208"
f()
I = "13"
KEY = "9594CF62D48ACD347A68A3161F0F3EE716434FC9C8841A63"
PT = "B23E8C2C731C514017D1F2B88D77D208"
CT = "93CC592BC96D95FA8AC32DA894F6AB89"
f()
I = "14"
KEY = "B23E8C2C731C514017D1F2B88D77D2089594CF62D48ACD34"
PT = "93CC592BC96D95FA8AC32DA894F6AB89"
CT = "26516E6BD4AEF86AF4F4AD58FA41A14C"
f()
I = "15"
KEY = "93CC592BC96D95FA8AC32DA894F6AB89B23E8C2C731C5140"
PT = "26516E6BD4AEF86AF4F4AD58FA41A14C"
CT = "00A8FFFAB8616BE710A6592438FC40BE"
f()
I = "16"
KEY = "26516E6BD4AEF86AF4F4AD58FA41A14C93CC592BC96D95FA"
PT = "00A8FFFAB8616BE710A6592438FC40BE"
CT = "A19B811C77482D97C842EC62DB2EDCCE"
f()
I = "17"
KEY = "00A8FFFAB8616BE710A6592438FC40BE26516E6BD4AEF86A"
PT = "A19B811C77482D97C842EC62DB2EDCCE"
CT = "D7FF438678D818CAA26A676342F98E8B"
f()
I = "18"
KEY = "A19B811C77482D97C842EC62DB2EDCCE00A8FFFAB8616BE7"
PT = "D7FF438678D818CAA26A676342F98E8B"
CT = "84EAFFC0C091582ABB717086E0807A5F"
f()
I = "19"
KEY = "D7FF438678D818CAA26A676342F98E8BA19B811C77482D97"
PT = "84EAFFC0C091582ABB717086E0807A5F"
CT = "5500AF1C79503FEF1BACF35A81DC2865"
f()
I = "20"
KEY = "84EAFFC0C091582ABB717086E0807A5FD7FF438678D818CA"
PT = "5500AF1C79503FEF1BACF35A81DC2865"
CT = "7282B2F3E766C83664930A19D201D7E7"
f()
I = "21"
KEY = "5500AF1C79503FEF1BACF35A81DC286584EAFFC0C091582A"
PT = "7282B2F3E766C83664930A19D201D7E7"
CT = "6796085C32FBDD2AB43E81EAC1262743"
f()
I = "22"
KEY = "7282B2F3E766C83664930A19D201D7E75500AF1C79503FEF"
PT = "6796085C32FBDD2AB43E81EAC1262743"
CT = "5839F9E148B9FD2B5A52751D4F178FDC"
f()
I = "23"
KEY = "6796085C32FBDD2AB43E81EAC12627437282B2F3E766C836"
PT = "5839F9E148B9FD2B5A52751D4F178FDC"
CT = "A88F34056742E55408A7A9E7B6D4C8C0"
f()
I = "24"
KEY = "5839F9E148B9FD2B5A52751D4F178FDC6796085C32FBDD2A"
PT = "A88F34056742E55408A7A9E7B6D4C8C0"
CT = "9C8C304CB1F937C6E42528459FA8872F"
f()
I = "25"
KEY = "A88F34056742E55408A7A9E7B6D4C8C05839F9E148B9FD2B"
PT = "9C8C304CB1F937C6E42528459FA8872F"
CT = "EA3668C0D96529A7F3BF0F7C2B5C5BE2"
f()
I = "26"
KEY = "9C8C304CB1F937C6E42528459FA8872FA88F34056742E554"
PT = "EA3668C0D96529A7F3BF0F7C2B5C5BE2"
CT = "A8FB6EEFCAAF9C4041072D570984CED2"
f()
I = "27"
KEY = "EA3668C0D96529A7F3BF0F7C2B5C5BE29C8C304CB1F937C6"
PT = "A8FB6EEFCAAF9C4041072D570984CED2"
CT = "ABF4662E5D50F71B15FE3B428AFE3500"
f()
I = "28"
KEY = "A8FB6EEFCAAF9C4041072D570984CED2EA3668C0D96529A7"
PT = "ABF4662E5D50F71B15FE3B428AFE3500"
CT = "3B3AED23958DA6E2FA4493BCBE59A806"
f()
I = "29"
KEY = "ABF4662E5D50F71B15FE3B428AFE3500A8FB6EEFCAAF9C40"
PT = "3B3AED23958DA6E2FA4493BCBE59A806"
CT = "CFBF446E33C3DCD4DD5161CA00D4BA8F"
f()
I = "30"
KEY = "3B3AED23958DA6E2FA4493BCBE59A806ABF4662E5D50F71B"
PT = "CFBF446E33C3DCD4DD5161CA00D4BA8F"
CT = "072ADBFA7EB962BA199AFC720339FF29"
f()
I = "31"
KEY = "CFBF446E33C3DCD4DD5161CA00D4BA8F3B3AED23958DA6E2"
PT = "072ADBFA7EB962BA199AFC720339FF29"
CT = "AAB346D9123A8140AC563EF17E7054C8"
f()
I = "32"
KEY = "072ADBFA7EB962BA199AFC720339FF29CFBF446E33C3DCD4"
PT = "AAB346D9123A8140AC563EF17E7054C8"
CT = "3C7DDC5FE6E5888FE61DEDA0C69CD320"
f()
I = "33"
KEY = "AAB346D9123A8140AC563EF17E7054C8072ADBFA7EB962BA"
PT = "3C7DDC5FE6E5888FE61DEDA0C69CD320"
CT = "34AA645C7E3532098ADA91BB128ED821"
f()
I = "34"
KEY = "3C7DDC5FE6E5888FE61DEDA0C69CD320AAB346D9123A8140"
PT = "34AA645C7E3532098ADA91BB128ED821"
CT = "7A5D12E315038522DA01EC0834B1322C"
f()
I = "35"
KEY = "34AA645C7E3532098ADA91BB128ED8213C7DDC5FE6E5888F"
PT = "7A5D12E315038522DA01EC0834B1322C"
CT = "86E235A1CB091FF7FE6FBBCA0D73BE58"
f()
I = "36"
KEY = "7A5D12E315038522DA01EC0834B1322C34AA645C7E353209"
PT = "86E235A1CB091FF7FE6FBBCA0D73BE58"
CT = "F3CE81CACE5D6BA9C558111DCDB22F5D"
f()
I = "37"
KEY = "86E235A1CB091FF7FE6FBBCA0D73BE587A5D12E315038522"
PT = "F3CE81CACE5D6BA9C558111DCDB22F5D"
CT = "FB30A5D67D5F5BE4048C77E2AD3BC9D1"
f()
I = "38"
KEY = "F3CE81CACE5D6BA9C558111DCDB22F5D86E235A1CB091FF7"
PT = "FB30A5D67D5F5BE4048C77E2AD3BC9D1"
CT = "31E69DD3D2D90E81C9EBFAC257E9823D"
f()
I = "39"
KEY = "FB30A5D67D5F5BE4048C77E2AD3BC9D1F3CE81CACE5D6BA9"
PT = "31E69DD3D2D90E81C9EBFAC257E9823D"
CT = "87579B3F19A9CDE12BB882FFEAF665AE"
f()
I = "40"
KEY = "31E69DD3D2D90E81C9EBFAC257E9823DFB30A5D67D5F5BE4"
PT = "87579B3F19A9CDE12BB882FFEAF665AE"
CT = "24A8572AA844FEF25F7670FAE30F1CD2"
f()
I = "41"
KEY = "87579B3F19A9CDE12BB882FFEAF665AE31E69DD3D2D90E81"
PT = "24A8572AA844FEF25F7670FAE30F1CD2"
CT = "0CFA32E5E33F3B2DAC9F34D25979319A"
f()
I = "42"
KEY = "24A8572AA844FEF25F7670FAE30F1CD287579B3F19A9CDE1"
PT = "0CFA32E5E33F3B2DAC9F34D25979319A"
CT = "3C64D7FC881B9B82ABA21FF122B98F54"
f()
I = "43"
KEY = "0CFA32E5E33F3B2DAC9F34D25979319A24A8572AA844FEF2"
PT = "3C64D7FC881B9B82ABA21FF122B98F54"
CT = "A794CAEE6756281B7A64894E4E4F70A8"
f()
I = "44"
KEY = "3C64D7FC881B9B82ABA21FF122B98F540CFA32E5E33F3B2D"
PT = "A794CAEE6756281B7A64894E4E4F70A8"
CT = "89A9BF6B893BC5E6FEF4C77F3D0F29A6"
f()
I = "45"
KEY = "A794CAEE6756281B7A64894E4E4F70A83C64D7FC881B9B82"
PT = "89A9BF6B893BC5E6FEF4C77F3D0F29A6"
CT = "5DBE44032769DF543EAD7AD13A5F3310"
f()
I = "46"
KEY = "89A9BF6B893BC5E6FEF4C77F3D0F29A6A794CAEE6756281B"
PT = "5DBE44032769DF543EAD7AD13A5F3310"
CT = "DEA4F3DA75EC7A8EAC3861A9912402CD"
f()
I = "47"
KEY = "5DBE44032769DF543EAD7AD13A5F331089A9BF6B893BC5E6"
PT = "DEA4F3DA75EC7A8EAC3861A9912402CD"
CT = "FB66522C332FCC4C042ABE32FA9E902F"
f()
I = "48"
KEY = "DEA4F3DA75EC7A8EAC3861A9912402CD5DBE44032769DF54"
PT = "FB66522C332FCC4C042ABE32FA9E902F"
CT = "F0AB73301125FA21EF70BE5385FB76B6"
f()
I = "49"
KEY = "FB66522C332FCC4C042ABE32FA9E902FDEA4F3DA75EC7A8E"
PT = "F0AB73301125FA21EF70BE5385FB76B6"
CT = "E75449212BEEF9F4A390BD860A640941"
f()
I = "1"
KEY = "0000000000000000000000000000000000000000000000000000000000000000"
PT = "00000000000000000000000000000000"
CT = "57FF739D4DC92C1BD7FC01700CC8216F"
f()
I = "2"
KEY = "0000000000000000000000000000000000000000000000000000000000000000"
PT = "57FF739D4DC92C1BD7FC01700CC8216F"
CT = "D43BB7556EA32E46F2A282B7D45B4E0D"
f()
I = "3"
KEY = "57FF739D4DC92C1BD7FC01700CC8216F00000000000000000000000000000000"
PT = "D43BB7556EA32E46F2A282B7D45B4E0D"
CT = "90AFE91BB288544F2C32DC239B2635E6"
f()
I = "4"
KEY = "D43BB7556EA32E46F2A282B7D45B4E0D57FF739D4DC92C1BD7FC01700CC8216F"
PT = "90AFE91BB288544F2C32DC239B2635E6"
CT = "6CB4561C40BF0A9705931CB6D408E7FA"
f()
I = "5"
KEY = "90AFE91BB288544F2C32DC239B2635E6D43BB7556EA32E46F2A282B7D45B4E0D"
PT = "6CB4561C40BF0A9705931CB6D408E7FA"
CT = "3059D6D61753B958D92F4781C8640E58"
f()
I = "6"
KEY = "6CB4561C40BF0A9705931CB6D408E7FA90AFE91BB288544F2C32DC239B2635E6"
PT = "3059D6D61753B958D92F4781C8640E58"
CT = "E69465770505D7F80EF68CA38AB3A3D6"
f()
I = "7"
KEY = "3059D6D61753B958D92F4781C8640E586CB4561C40BF0A9705931CB6D408E7FA"
PT = "E69465770505D7F80EF68CA38AB3A3D6"
CT = "5AB67A5F8539A4A5FD9F0373BA463466"
f()
I = "8"
KEY = "E69465770505D7F80EF68CA38AB3A3D63059D6D61753B958D92F4781C8640E58"
PT = "5AB67A5F8539A4A5FD9F0373BA463466"
CT = "DC096BCD99FC72F79936D4C748E75AF7"
f()
I = "9"
KEY = "5AB67A5F8539A4A5FD9F0373BA463466E69465770505D7F80EF68CA38AB3A3D6"
PT = "DC096BCD99FC72F79936D4C748E75AF7"
CT = "C5A3E7CEE0F1B7260528A68FB4EA05F2"
f()
I = "10"
KEY = "DC096BCD99FC72F79936D4C748E75AF75AB67A5F8539A4A5FD9F0373BA463466"
PT = "C5A3E7CEE0F1B7260528A68FB4EA05F2"
CT = "43D5CEC327B24AB90AD34A79D0469151"
f()
I = "11"
KEY = "C5A3E7CEE0F1B7260528A68FB4EA05F2DC096BCD99FC72F79936D4C748E75AF7"
PT = "43D5CEC327B24AB90AD34A79D0469151"
CT = "05BA7BE9BE5274FBEB4AC5FD5FAAFA10"
f()
I = "12"
KEY = "43D5CEC327B24AB90AD34A79D0469151C5A3E7CEE0F1B7260528A68FB4EA05F2"
PT = "05BA7BE9BE5274FBEB4AC5FD5FAAFA10"
CT = "89D513B989E3CECE4D2E3E4C15D4E01C"
f()
I = "13"
KEY = "05BA7BE9BE5274FBEB4AC5FD5FAAFA1043D5CEC327B24AB90AD34A79D0469151"
PT = "89D513B989E3CECE4D2E3E4C15D4E01C"
CT = "E56E462AE2E66F800CE2224C4CAFACBE"
f()
I = "14"
KEY = "89D513B989E3CECE4D2E3E4C15D4E01C05BA7BE9BE5274FBEB4AC5FD5FAAFA10"
PT = "E56E462AE2E66F800CE2224C4CAFACBE"
CT = "F5DBEE5414D79CA46BE17346FDC72BF5"
f()
I = "15"
KEY = "E56E462AE2E66F800CE2224C4CAFACBE89D513B989E3CECE4D2E3E4C15D4E01C"
PT = "F5DBEE5414D79CA46BE17346FDC72BF5"
CT = "C780FC28815E667B3D224ECF1F4A7D65"
f()
I = "16"
KEY = "F5DBEE5414D79CA46BE17346FDC72BF5E56E462AE2E66F800CE2224C4CAFACBE"
PT = "C780FC28815E667B3D224ECF1F4A7D65"
CT = "2EF17A0D75440ECEC9AF18D29ABA3CDA"
f()
I = "17"
KEY = "C780FC28815E667B3D224ECF1F4A7D65F5DBEE5414D79CA46BE17346FDC72BF5"
PT = "2EF17A0D75440ECEC9AF18D29ABA3CDA"
CT = "D653FDFF96BCF89A929F6377AB41967A"
f()
I = "18"
KEY = "2EF17A0D75440ECEC9AF18D29ABA3CDAC780FC28815E667B3D224ECF1F4A7D65"
PT = "D653FDFF96BCF89A929F6377AB41967A"
CT = "36BBAD8838380C8793A7AEA0A11D04D2"
f()
I = "19"
KEY = "D653FDFF96BCF89A929F6377AB41967A2EF17A0D75440ECEC9AF18D29ABA3CDA"
PT = "36BBAD8838380C8793A7AEA0A11D04D2"
CT = "034EFECFC0EFC00D82C3345A8708AE78"
f()
I = "20"
KEY = "36BBAD8838380C8793A7AEA0A11D04D2D653FDFF96BCF89A929F6377AB41967A"
PT = "034EFECFC0EFC00D82C3345A8708AE78"
CT = "EA38887307337A2934A9EB802F6CFDDD"
f()
I = "21"
KEY = "034EFECFC0EFC00D82C3345A8708AE7836BBAD8838380C8793A7AEA0A11D04D2"
PT = "EA38887307337A2934A9EB802F6CFDDD"
CT = "7C279D47775AB4C26442E8A117943DD8"
f()
I = "22"
KEY = "EA38887307337A2934A9EB802F6CFDDD034EFECFC0EFC00D82C3345A8708AE78"
PT = "7C279D47775AB4C26442E8A117943DD8"
CT = "E3CF581056EBC1169CF451E930308726"
f()
I = "23"
KEY = "7C279D47775AB4C26442E8A117943DD8EA38887307337A2934A9EB802F6CFDDD"
PT = "E3CF581056EBC1169CF451E930308726"
CT = "65527B29D489C8CDDC62E5E7FFC4E924"
f()
I = "24"
KEY = "E3CF581056EBC1169CF451E9303087267C279D47775AB4C26442E8A117943DD8"
PT = "65527B29D489C8CDDC62E5E7FFC4E924"
CT = "3B491F930230A0170819CACB48F9030C"
f()
I = "25"
KEY = "65527B29D489C8CDDC62E5E7FFC4E924E3CF581056EBC1169CF451E930308726"
PT = "3B491F930230A0170819CACB48F9030C"
CT = "A7C8193F35AF63B51D7F9DEDCEC85866"
f()
I = "26"
KEY = "3B491F930230A0170819CACB48F9030C65527B29D489C8CDDC62E5E7FFC4E924"
PT = "A7C8193F35AF63B51D7F9DEDCEC85866"
CT = "009A48F9A1C27F9AA5F9BD909C848B60"
f()
I = "27"
KEY = "A7C8193F35AF63B51D7F9DEDCEC858663B491F930230A0170819CACB48F9030C"
PT = "009A48F9A1C27F9AA5F9BD909C848B60"
CT = "3B507E987B3F827A093B26C85CDFF6C6"
f()
I = "28"
KEY = "009A48F9A1C27F9AA5F9BD909C848B60A7C8193F35AF63B51D7F9DEDCEC85866"
PT = "3B507E987B3F827A093B26C85CDFF6C6"
CT = "EAFB3BA6C78E37214FE908669BC24F6F"
f()
I = "29"
KEY = "3B507E987B3F827A093B26C85CDFF6C6009A48F9A1C27F9AA5F9BD909C848B60"
PT = "EAFB3BA6C78E37214FE908669BC24F6F"
CT = "2B09F10D7F2B62A4D0DAFFB9F882B980"
f()
I = "30"
KEY = "EAFB3BA6C78E37214FE908669BC24F6F3B507E987B3F827A093B26C85CDFF6C6"
PT = "2B09F10D7F2B62A4D0DAFFB9F882B980"
CT = "E6B9FE907009B2DC649412DD630AE576"
f()
I = "31"
KEY = "2B09F10D7F2B62A4D0DAFFB9F882B980EAFB3BA6C78E37214FE908669BC24F6F"
PT = "E6B9FE907009B2DC649412DD630AE576"
CT = "F2E4DC89724CD58F85847BD09ED4500F"
f()
I = "32"
KEY = "E6B9FE907009B2DC649412DD630AE5762B09F10D7F2B62A4D0DAFFB9F882B980"
PT = "F2E4DC89724CD58F85847BD09ED4500F"
CT = "DC650444FCEBC2BD6E450EF4605FCABE"
f()
I = "33"
KEY = "F2E4DC89724CD58F85847BD09ED4500FE6B9FE907009B2DC649412DD630AE576"
PT = "DC650444FCEBC2BD6E450EF4605FCABE"
CT = "B7CFB31755FDA2AB0A67FBA09901A73E"
f()
I = "34"
KEY = "DC650444FCEBC2BD6E450EF4605FCABEF2E4DC89724CD58F85847BD09ED4500F"
PT = "B7CFB31755FDA2AB0A67FBA09901A73E"
CT = "9C7E7578F3D2923DCDABFED0A5EF86EE"
f()
I = "35"
KEY = "B7CFB31755FDA2AB0A67FBA09901A73EDC650444FCEBC2BD6E450EF4605FCABE"
PT = "9C7E7578F3D2923DCDABFED0A5EF86EE"
CT = "9465E9AE176483BD398C8033AA136F68"
f()
I = "36"
KEY = "9C7E7578F3D2923DCDABFED0A5EF86EEB7CFB31755FDA2AB0A67FBA09901A73E"
PT = "9465E9AE176483BD398C8033AA136F68"
CT = "8551279DE9991805FCFC1937C52AE9D4"
f()
I = "37"
KEY = "9465E9AE176483BD398C8033AA136F689C7E7578F3D2923DCDABFED0A5EF86EE"
PT = "8551279DE9991805FCFC1937C52AE9D4"
CT = "A830EB8DA52ECFF7F93B7627322CF9D7"
f()
I = "38"
KEY = "8551279DE9991805FCFC1937C52AE9D49465E9AE176483BD398C8033AA136F68"
PT = "A830EB8DA52ECFF7F93B7627322CF9D7"
CT = "EC89F0E5155425D4B92BD4B200C1A6E0"
f()
I = "39"
KEY = "A830EB8DA52ECFF7F93B7627322CF9D78551279DE9991805FCFC1937C52AE9D4"
PT = "EC89F0E5155425D4B92BD4B200C1A6E0"
CT = "8AF76A782061D38360C6C3CDDCBE1516"
f()
I = "40"
KEY = "EC89F0E5155425D4B92BD4B200C1A6E0A830EB8DA52ECFF7F93B7627322CF9D7"
PT = "8AF76A782061D38360C6C3CDDCBE1516"
CT = "D4EEAA6A069659D5D88590DE75515631"
f()
I = "41"
KEY = "8AF76A782061D38360C6C3CDDCBE1516EC89F0E5155425D4B92BD4B200C1A6E0"
PT = "D4EEAA6A069659D5D88590DE75515631"
CT = "259C021D37B077197B80FCDB07EA1AF2"
f()
I = "42"
KEY = "D4EEAA6A069659D5D88590DE755156318AF76A782061D38360C6C3CDDCBE1516"
PT = "259C021D37B077197B80FCDB07EA1AF2"
CT = "300CC8B4171F0E9BD75710FAD033C570"
f()
I = "43"
KEY = "259C021D37B077197B80FCDB07EA1AF2D4EEAA6A069659D5D88590DE75515631"
PT = "300CC8B4171F0E9BD75710FAD033C570"
CT = "5CFA5BD213A74F02E65390A4C14A1DF6"
f()
I = "44"
KEY = "300CC8B4171F0E9BD75710FAD033C570259C021D37B077197B80FCDB07EA1AF2"
PT = "5CFA5BD213A74F02E65390A4C14A1DF6"
CT = "A443EA1B2C5747CE7EC5F21D4FE0C147"
f()
I = "45"
KEY = "5CFA5BD213A74F02E65390A4C14A1DF6300CC8B4171F0E9BD75710FAD033C570"
PT = "A443EA1B2C5747CE7EC5F21D4FE0C147"
CT = "D2DED73E59319A8138E0331F0EA149EA"
f()
I = "46"
KEY = "A443EA1B2C5747CE7EC5F21D4FE0C1475CFA5BD213A74F02E65390A4C14A1DF6"
PT = "D2DED73E59319A8138E0331F0EA149EA"
CT = "2E2158BC3E5FC714C1EEECA0EA696D48"
f()
I = "47"
KEY = "D2DED73E59319A8138E0331F0EA149EAA443EA1B2C5747CE7EC5F21D4FE0C147"
PT = "2E2158BC3E5FC714C1EEECA0EA696D48"
CT = "248A7F3528B168ACFDD1386E3F51E30C"
f()
I = "48"
KEY = "2E2158BC3E5FC714C1EEECA0EA696D48D2DED73E59319A8138E0331F0EA149EA"
PT = "248A7F3528B168ACFDD1386E3F51E30C"
CT = "431058F4DBC7F734DA4F02F04CC4F459"
f()
I = "49"
KEY = "248A7F3528B168ACFDD1386E3F51E30C2E2158BC3E5FC714C1EEECA0EA696D48"
PT = "431058F4DBC7F734DA4F02F04CC4F459"
CT = "37FE26FF1CF66175F5DDF4C33B97A205"
f()
console.log "exports.data = #{JSON.stringify out, null, 4};"
| 143781 | #
# From here: http://www.schneier.com/code/twofish-kat.zip
# FILENAME: "ecb_tbl.txt"
#
#Electronic Codebook (ECB) Mode
#Tables Known Answer Test
#Tests permutation tables and MDS matrix multiply tables.
#"
#Algorithm Name: TWOFISH
#Principal Submitter: <NAME>, Counterpane Systems
#
#
out = []
f = () ->
out.push { key : KEY, plaintext : PT, ciphertext : CT }
I = "1"
KEY = "<KEY>"
PT = "00000000000000000000000000000000"
CT = "9F589F5CF6122C32B6BFEC2F2AE8C35A"
f()
I = "2"
KEY = "<KEY>"
PT = "9F589F5CF6122C32B6BFEC2F2AE8C35A"
CT = "D491DB16E7B1C39E86CB086B789F5<KEY>19"
f()
I = "3"
KEY = "<KEY>"
PT = "D491DB16E7B1C39E86CB086B789F5419"
CT = "019F9809DE1711858FAAC3A3BA20FBC3"
f()
I = "4"
KEY = "<KEY>9"
PT = "<KEY>"
CT = "6363977DE839486297E661C6C9D668EB"
f()
I = "5"
KEY = "<KEY>"
PT = "6363977DE839486297E661C6C9D668EB"
CT = "<KEY>"
f()
I = "6"
KEY = "<KEY>"
PT = "<KEY>"
CT = "5449ECA008FF5921155F598AF4CED4D0"
f()
I = "7"
KEY = "<KEY>"
PT = "5449ECA008FF5921155F598AF4CED4D0"
CT = "6600522E97AEB3094ED5F92AFCBCDD10"
f()
I = "8"
KEY = "<KEY>"
PT = "6600522E97AEB3094ED5F92AFCBCDD10"
CT = "34C8A5FB2D3D08A170D120AC6D26DBFA"
f()
I = "9"
KEY = "<KEY>"
PT = "<KEY>"
CT = "28<KEY>0<KEY>EF<KEY>1"
f()
I = "10"
KEY = "<KEY>"
PT = "285<KEY>0B<KEY>1B42EF<KEY>77<KEY>6D<KEY>407<KEY>5<KEY>1"
CT = "8A8AB983310ED78C8C0ECDE030B8DCA4"
f()
I = "11"
KEY = "<KEY>"
PT = "8A8AB983310ED78C8C0ECDE030B8DCA4"
CT = "48C758A6DFC1DD8B259<KEY>165<KEY>2B3C"
f()
I = "12"
KEY = "<KEY>"
PT = "4<KEY>"
CT = "CE<KEY>3C6<KEY>C<KEY>80BBC<KEY>"
f()
I = "13"
KEY = "<KEY>"
PT = "CE<KEY>"
CT = "C7ABD74AA060F<KEY>"
f()
I = "14"
KEY = "<KEY>"
PT = "C<KEY>"
CT = "D0F8B3B6409EBCB666D29C916565ABFC"
f()
I = "15"
KEY = "<KEY>"
PT = "D0F8B3B6409EBCB666D29C916565ABFC"
CT = "DD42662908070054544FE09DA4263130"
f()
I = "16"
KEY = "<KEY>"
PT = "DD42662908070054544FE09DA4263130"
CT = "<KEY>"
f()
I = "17"
KEY = "<KEY>"
PT = "<KEY>"
CT = "57B9A18EE97D90F435A16F69F0AC6F16"
f()
I = "18"
KEY = "<KEY>"
PT = "57B9A18EE97D9<KEY>"
CT = "06181F0D53267ABD8F3BB28455B198AD"
f()
I = "19"
KEY = "<KEY>"
PT = "06181F0D53267ABD8F3BB28455B198AD"
CT = "81A12D8449E9040BAAE7196338D8C8F2"
f()
I = "20"
KEY = "<KEY>"
PT = "81A12D8449E9040BAAE<KEY>6<KEY>8<KEY>8C8<KEY>2"
CT = "BE422651C56F2622DA0201815A95A820"
f()
I = "21"
KEY = "<KEY>"
PT = "BE422651C56F2622DA0201815A95A820"
CT = "113B19F2D778473990480CEE4DA238D1"
f()
I = "22"
KEY = "<KEY>"
PT = "113B19F2D778473990480CEE4DA238D1"
CT = "E6942E9A86E544CF3E3364F20BE011DF"
f()
I = "23"
KEY = "<KEY>"
PT = "E<KEY>"
CT = "87CDC6AA487BFD0EA70188257D9B3859"
f()
I = "24"
KEY = "<KEY>"
PT = "8<KEY>"
CT = "D5E2701253DD75A11A4CFB243714BD14"
f()
I = "25"
KEY = "<KEY>"
PT = "D5E2701253DD75A11A4CFB243714BD14"
CT = "FD24812EEA107A9E6FAB8EABE0F0F48C"
f()
I = "26"
KEY = "<KEY>"
PT = "FD24812EEA107A9E6FAB8EABE0F0F48C"
CT = "DAFA84E31A297F372C3A807100CD783D"
f()
I = "27"
KEY = "<KEY>"
PT = "DA<KEY>84E<KEY>2<KEY>CD7<KEY>"
CT = "A55ED2D955EC8950FC0CC93B76ACBF91"
f()
I = "28"
KEY = "<KEY>"
PT = "A55ED2D955EC8950FC0CC93B76ACBF91"
CT = "2ABEA2A4BF27ABDC6B6F278993264744"
f()
I = "29"
KEY = "<KEY>"
PT = "2ABEA2A4BF27ABDC6B6F278993264744"
CT = "045383E219321D5A4435C0E491E7DE10"
f()
I = "30"
KEY = "<KEY>"
PT = "045383E219321D5A4435C0E491E7DE10"
CT = "7460A4CD4F312F32B1C7A94<KEY>00<KEY>E934"
f()
I = "31"
KEY = "<KEY>"
PT = "7<KEY>"
CT = "6BBF9186D32C2C5895649D746566050A"
f()
I = "32"
KEY = "<KEY>C7A94FA004E934"
PT = "6BBF9186D32C2C5895649D746566050A"
CT = "CDBDD19ACF40B8AC0328C80054266068"
f()
I = "33"
KEY = "<KEY>"
PT = "CDBDD19ACF40B8AC0328C80054266068"
CT = "1D2836CAE4223EAB5066867A71B1A1C3"
f()
I = "34"
KEY = "<KEY>"
PT = "1D2836CAE4223EAB5066867A71B1A1C3"
CT = "2D7F37121D0D2416D5E2767FF202061B"
f()
I = "35"
KEY = "<KEY>"
PT = "2D7F37121D0D2416D5E2767FF202061B"
CT = "D70736D1ABC7427A121CC816CD66D7FF"
f()
I = "36"
KEY = "<KEY>"
PT = "D70736D1ABC7427A121CC816CD66D7FF"
CT = "AC6CA71CB<KEY>"
f()
I = "37"
KEY = "<KEY>"
PT = "<KEY>"
CT = "3<KEY>"
f()
I = "38"
KEY = "<KEY>"
PT = "<KEY>"
CT = "934B7DB4B3544854DBCA81C4C5DE4EB1"
f()
I = "39"
KEY = "<KEY>"
PT = "9<KEY>"
CT = "18759824AD9823D5961F84377D7EAEBF"
f()
I = "40"
KEY = "<KEY>"
PT = "18759824AD9823D5961F84377D7EAEBF"
CT = "DEDDAC6029B0<KEY>574D9BABB0<KEY>DC6CA6C"
f()
I = "41"
KEY = "<KEY>"
PT = "DEDDAC6029B0<KEY>4D9B<KEY>"
CT = "5EA82EEA2244DED42CCA2F835D5615DF"
f()
I = "42"
KEY = "<KEY>"
PT = "5EA82EEA2244DED42CCA2F835D5615DF"
CT = "1E3853F7FFA57091771DD8CDEE9414DE"
f()
I = "43"
KEY = "<KEY>"
PT = "1E3853F7FFA57091771DD8CDEE9414DE"
CT = "5C2EBBF75D31F30B5EA26EAC8782D8D1"
f()
I = "44"
KEY = "<KEY>"
PT = "5C2EBBF75D31F30B5EA26EAC8782D8D1"
CT = "3A3CFA1F13A136C94D76E5FA4A1109FF"
f()
I = "45"
KEY = "<KEY>"
PT = "3A3CFA1F13A136C94D76E5FA4A1109FF"
CT = "91630CF96003B8032E695797E313A553"
f()
I = "46"
KEY = "<KEY>A1109FF"
PT = "91630CF96003B8032E695797E313A553"
CT = "137A24CA47CD12BE818DF4D2F4355960"
f()
I = "47"
KEY = "<KEY>"
PT = "137A24CA47CD12BE818DF4D2F4355960"
CT = "BCA724A54533C6987E14AA8279<KEY>2<KEY>"
f()
I = "48"
KEY = "<KEY>"
PT = "BCA724A54533C6987E14AA827952F921"
CT = "6B459286F3FFD28D49F15B1581B08E42"
f()
I = "49"
KEY = "<KEY>"
PT = "6B459286F3FFD28D49F15B1581B08E42"
CT = "5D9D4EEFFA9151575524F115815A12E0"
f()
I = "1"
KEY = "<KEY>"
PT = "00000000000000000000000000000000"
CT = "E<KEY>"
f()
I = "2"
KEY = "<KEY>"
PT = "<KEY>"
CT = "8<KEY>88"
f()
I = "3"
KEY = "<KEY>"
PT = "<KEY>"
CT = "<KEY>"
f()
I = "4"
KEY = "<KEY>"
PT = "<KEY>"
CT = "182B02D81497EA45F9DAACDC29193A65"
f()
I = "5"
KEY = "<KEY>"
PT = "182B02D81497EA45F9DAACDC29193A<KEY>"
CT = "7AFF7A70CA2FF28AC31DD8AE5DAAAB63"
f()
I = "6"
KEY = "<KEY>"
PT = "7AFF7A70CA2FF28AC31DD8AE5DAAAB63"
CT = "D1079B789F666649B6BD7D1629F1F77E"
f()
I = "7"
KEY = "<KEY>"
PT = "D1079B789F666649B6BD7D1629F1F77E"
CT = "3AF6F7CE5BD35EF18BEC6FA787AB506B"
f()
I = "8"
KEY = "<KEY>"
PT = "3AF6F7CE5BD35EF18BEC6FA787AB506B"
CT = "AE8109BFDA85C1F2C5038B34ED691BFF"
f()
I = "9"
KEY = "<KEY>"
PT = "AE8109BFDA85C1F2C5038B34ED691BFF"
CT = "893FD67B98C550073571BD631263FC78"
f()
I = "10"
KEY = "<KEY>"
PT = "<KEY>"
CT = "16434FC9C8841A63D<KEY>"
f()
I = "11"
KEY = "<KEY>"
PT = "16<KEY>9C8841A6<KEY>"
CT = "9594CF62D48ACD347A68A3161F0F3EE7"
f()
I = "12"
KEY = "<KEY>"
PT = "9594CF62D48ACD347A68A3161F0F3EE7"
CT = "B23E8C2C731C<KEY>2B88D77D208"
f()
I = "13"
KEY = "<KEY>"
PT = "B23E8C2C731C514017D1F2B88D77D208"
CT = "93CC592BC96D95FA8AC32DA894F6AB89"
f()
I = "14"
KEY = "<KEY>"
PT = "<KEY>"
CT = "26516E6BD4AEF86AF4F4AD58FA41A14C"
f()
I = "15"
KEY = "<KEY>"
PT = "26516E6BD4AEF86AF4F4AD58FA41A14C"
CT = "00A8FFFAB8616BE710A6592438FC40BE"
f()
I = "16"
KEY = "<KEY>"
PT = "00A8FFFAB8616BE710A6592438FC40BE"
CT = "A19B811C77482D97C842EC62DB2EDCCE"
f()
I = "17"
KEY = "<KEY>"
PT = "A19B81<KEY>C77482D97C842EC62DB2EDCCE"
CT = "D7FF438678D818CAA26A676342F98E8B"
f()
I = "18"
KEY = "<KEY>EC62DB2EDCCE00A8FFFAB8616BE7"
PT = "D7FF438678D818CAA26A676342F98E8B"
CT = "<KEY>"
f()
I = "19"
KEY = "<KEY>"
PT = "<KEY>"
CT = "5500<KEY>03FEF1BAC<KEY>"
f()
I = "20"
KEY = "<KEY>"
PT = "<KEY>"
CT = "7282B2F3E766C83664930A19D201D7E7"
f()
I = "21"
KEY = "<KEY>"
PT = "7282B2F3E766C83664930A19D201D7E7"
CT = "6796085C32FBDD2AB43E81EAC1262743"
f()
I = "22"
KEY = "<KEY>00AF1C79503FEF"
PT = "6796085C32FBDD2AB43E81EAC1262743"
CT = "5839F9E148B9FD2B5A52751D4F178FDC"
f()
I = "23"
KEY = "<KEY>"
PT = "5839F9E148B9FD2B5A52751D4F178FDC"
CT = "A88F34056742E55408A7A9E7B6D4C8C0"
f()
I = "24"
KEY = "<KEY>"
PT = "A88F34056742E55408A7A9E7B6D4C8C0"
CT = "9C8C304CB1F937C6E4<KEY>2<KEY>459FA8872F"
f()
I = "25"
KEY = "<KEY>"
PT = "9C<KEY>C<KEY>0<KEY>CB1F937C6E4<KEY>8459FA8872F"
CT = "EA3668C0D96529A7F3BF0F7C2B5C5BE2"
f()
I = "26"
KEY = "<KEY>"
PT = "EA3668C0D96529A7F3BF0F7C2B5C5BE2"
CT = "A8FB6EEFCAAF9C4041072D570984CED2"
f()
I = "27"
KEY = "<KEY>"
PT = "A8FB6EEFCAAF9C4041072D570984CED2"
CT = "ABF4662E5D50F71B15FE3B428AFE3500"
f()
I = "28"
KEY = "<KEY>"
PT = "ABF4662E5D50F71B15FE3B428AFE3500"
CT = "3B3AED23958DA6E2FA4493BCBE59A806"
f()
I = "29"
KEY = "<KEY>"
PT = "3B3AED23958DA6E2FA4493BCBE59A806"
CT = "CFBF446E33C3DCD4DD5161CA00D4BA8F"
f()
I = "30"
KEY = "<KEY>"
PT = "CFBF<KEY>"
CT = "072ADBFA7EB962BA199AFC720339FF29"
f()
I = "31"
KEY = "<KEY>"
PT = "072ADBFA7EB962BA199AFC720339FF29"
CT = "AAB346D9123A8140AC563EF17E7054C8"
f()
I = "32"
KEY = "<KEY>"
PT = "AAB346D9123A8140AC563EF17E7054C8"
CT = "3C7DDC5FE6E5888FE61DEDA0C69CD320"
f()
I = "33"
KEY = "<KEY>"
PT = "3C7DDC5FE6E5888FE61DEDA0C69CD320"
CT = "34AA645C7E3532098ADA91BB128ED821"
f()
I = "34"
KEY = "<KEY>"
PT = "34AA645C7E3532098ADA91BB128ED821"
CT = "7A5D12E315038<KEY>"
f()
I = "35"
KEY = "<KEY>"
PT = "7A5D12E315038522DA01EC0834B1322C"
CT = "86E235A1CB091FF7FE6FBBCA0D73BE58"
f()
I = "36"
KEY = "<KEY>"
PT = "86E235A1CB091FF7FE6FBBCA0D73BE58"
CT = "F3CE81CACE5D6BA9C558111DCDB22F5D"
f()
I = "37"
KEY = "<KEY>"
PT = "F3CE81CACE5D6BA9C558111DCDB22F5D"
CT = "FB30A5D67D5F5BE4048C77E2AD3BC9D1"
f()
I = "38"
KEY = "<KEY>"
PT = "FB30A5D67D5F5BE4048C77E2AD3BC9D1"
CT = "31E69DD3D2D90E81C9EBFAC257E9823D"
f()
I = "3<KEY>"
KEY = "<KEY>"
PT = "31E69DD3D2D90E81C9EBFAC257E9823D"
CT = "87579B3F19A9CDE12BB882FFEAF665AE"
f()
I = "40"
KEY = "<KEY>"
PT = "87579B3F19A9CDE12BB882FFEAF665AE"
CT = "24A8572AA844FEF25F7670FAE30F1CD2"
f()
I = "41"
KEY = "<KEY>"
PT = "24A8572AA844FEF25F7670FAE30F1CD2"
CT = "0CFA32E5E33F3B2DAC9F34D25979319A"
f()
I = "42"
KEY = "<KEY>"
PT = "0CFA32E5E33F3B2DAC9F34D25979319A"
CT = "3C64D7FC881B9B82ABA21FF122B98F54"
f()
I = "43"
KEY = "<KEY>"
PT = "3<KEY>"
CT = "A794CAEE6756281B7A64894E4E4F70A8"
f()
I = "44"
KEY = "<KEY>"
PT = "A794CAEE6<KEY>62<KEY>1B<KEY>"
CT = "89A9BF6B893BC5E6FEF4C77F3D0F29A6"
f()
I = "45"
KEY = "<KEY>"
PT = "89A9BF6B893BC5E6FEF4C77F3D0F29A6"
CT = "5DBE44032769DF543EAD7AD13A5F3310"
f()
I = "46"
KEY = "<KEY>"
PT = "5DBE44032769DF543EAD7AD13A5F3310"
CT = "DEA4F3DA75EC7A8EAC3861A9912402CD"
f()
I = "47"
KEY = "<KEY>5F331089A9BF6B893BC5E6"
PT = "DEA4F3DA75EC7A8EAC3861A9912402CD"
CT = "FB66522C332FCC4C042ABE32FA9E902F"
f()
I = "48"
KEY = "<KEY>"
PT = "FB66522C332FCC4C042ABE32FA9E902F"
CT = "F0AB73301125FA21EF70BE5385FB76B6"
f()
I = "49"
KEY = "<KEY>"
PT = "F0AB73301125FA21EF70BE5385FB76B6"
CT = "E75449212BEEF9F4A390BD860A640941"
f()
I = "1"
KEY = "<KEY>"
PT = "0<KEY>00000000000000000000000000000"
CT = "57FF739D4DC92C1BD7FC01700CC8216F"
f()
I = "2"
KEY = "<KEY>000000000000000000000000000000000000000000"
PT = "57FF73<KEY>CC<KEY>F"
CT = "D43BB7556EA32E46F2A282B7D45B4E0D"
f()
I = "3"
KEY = "<KEY>"
PT = "D<KEY>BB<KEY>"
CT = "<KEY>"
f()
I = "4"
KEY = "<KEY>"
PT = "<KEY>"
CT = "<KEY>"
f()
I = "5"
KEY = "<KEY>"
PT = "<KEY>"
CT = "3059D6D61753B95<KEY>"
f()
I = "6"
KEY = "<KEY>"
PT = "3<KEY>"
CT = "E69465770505D7F80EF68CA38AB3A3D6"
f()
I = "7"
KEY = "<KEY>"
PT = "E69465770505D7F80EF68CA38AB3A3D6"
CT = "5AB67A5F8539A4A5FD9F0373BA463466"
f()
I = "8"
KEY = "<KEY>"
PT = "5AB67A5F8539A4A5FD9F0373BA463466"
CT = "DC096BCD99FC72F79936D4C748E75AF7"
f()
I = "9"
KEY = "<KEY>"
PT = "<KEY>"
CT = "C5A3E7CEE0F1B7260528A68FB4EA05F2"
f()
I = "10"
KEY = "<KEY>"
PT = "C5A3E7CEE0F1B7260528A68FB4EA05F2"
CT = "43D5CEC327B24AB90AD34A79D0469151"
f()
I = "11"
KEY = "<KEY>"
PT = "43D5CEC327B24AB90AD34A79D0469151"
CT = "05BA7BE9BE5274FBEB4AC5FD5FAAFA10"
f()
I = "12"
KEY = "<KEY>"
PT = "05BA7BE9BE5274FBEB4AC5FD5FAAFA10"
CT = "89D513B989E3CECE4D2E3E4C15D4E01C"
f()
I = "13"
KEY = "<KEY>FA10<KEY>3<KEY>"
PT = "89D513B989E3CECE4D2E3E4C15D4E01C"
CT = "E56E462AE2E66F800CE2224C4CAFACBE"
f()
I = "14"
KEY = "<KEY>"
PT = "E56E462AE2E66F800CE2224C4CAFACBE"
CT = "F5DBEE5414D79CA46BE17346FDC72BF5"
f()
I = "15"
KEY = "<KEY>"
PT = "F<KEY>"
CT = "C<KEY>80FC288<KEY>5<KEY>667B<KEY>"
f()
I = "16"
KEY = "<KEY>"
PT = "<KEY>"
CT = "2EF17A0D75440ECEC9AF18D29ABA3CDA"
f()
I = "17"
KEY = "<KEY>"
PT = "2EF17A0D75440ECEC9AF18D29ABA3CDA"
CT = "D653FDFF96BCF89A929F6377AB41967A"
f()
I = "18"
KEY = "<KEY>"
PT = "D653FDFF96BCF89A929F6377AB41967A"
CT = "36BBAD8838380C8793A7AEA0A11D04D2"
f()
I = "19"
KEY = "<KEY>"
PT = "36BBAD8838380C8793A7AEA0A11D04D2"
CT = "034EFECFC0EFC00D82C3345A8708AE78"
f()
I = "20"
KEY = "<KEY>"
PT = "034EFECFC0EFC00D82C3345A8708AE78"
CT = "EA3<KEY>"
f()
I = "21"
KEY = "<KEY>"
PT = "EA38887307337A2934A9EB802F6CFDDD"
CT = "7C279D47775AB4C26442E8A117943DD8"
f()
I = "22"
KEY = "<KEY>"
PT = "7C279D47775AB4C26442E8A117943DD8"
CT = "E3CF581056EBC1169CF451E930308726"
f()
I = "23"
KEY = "<KEY>"
PT = "E3CF581056EBC1169CF451E930308726"
CT = "65527B29D489C8CDDC62E5E7FFC4E924"
f()
I = "24"
KEY = "<KEY>943DD8"
PT = "65527B29D489C8CDDC62E5E7FFC4E924"
CT = "3B491F930230A0170819CACB48F9030C"
f()
I = "25"
KEY = "<KEY>"
PT = "3B491F930230A0170819CACB48F9030C"
CT = "A7C8193F35AF63B51D7F9DEDCEC85866"
f()
I = "26"
KEY = "<KEY>"
PT = "A7C8193F35AF63B51D7F9DEDCEC85866"
CT = "009A48F9A1C27F9AA5F9BD909C848B60"
f()
I = "27"
KEY = "<KEY>"
PT = "00<KEY>48<KEY>1C27<KEY>9AA5<KEY>9BD909C848<KEY>60"
CT = "3B507E987B3F827A093B26C85CDFF6C6"
f()
I = "28"
KEY = "<KEY>"
PT = "<KEY>"
CT = "EAFB3BA6C78E37214FE908669BC24F6F"
f()
I = "29"
KEY = "<KEY>"
PT = "EAFB3BA6C78E37214FE908669BC24F6F"
CT = "2B09F10D7F2B62A4D0DAFFB9F882B980"
f()
I = "30"
KEY = "<KEY>"
PT = "2B09F<KEY>0D7F2B62A4D0DAFFB9F882B980"
CT = "E6B9FE907009B2DC649412DD630AE576"
f()
I = "31"
KEY = "<KEY>"
PT = "E6B9FE907009B2DC649412DD<KEY>"
CT = "F2E4<KEY>8<KEY>724CD58<KEY>8<KEY>8<KEY>09ED4500F"
f()
I = "32"
KEY = "<KEY>"
PT = "F2E4<KEY>8<KEY>724CD58F85847<KEY>09ED4500F"
CT = "DC650<KEY>FCEBC2BD6E4<KEY>4605FCABE"
f()
I = "33"
KEY = "<KEY>"
PT = "DC<KEY>"
CT = "B7CFB31755FDA2AB0A67FBA09901A73E"
f()
I = "34"
KEY = "<KEY>"
PT = "B7CFB31755FDA2AB0A67FBA09901A73E"
CT = "9C7E7578F3D2923DCDABFED0A5EF86EE"
f()
I = "35"
KEY = "<KEY>"
PT = "<KEY>"
CT = "9465E9AE176483BD398C8033AA136F68"
f()
I = "36"
KEY = "<KEY>"
PT = "9465<KEY>76<KEY>83<KEY>3<KEY>13<KEY>68"
CT = "8551279DE9991805FCFC1937C52AE9D4"
f()
I = "37"
KEY = "<KEY>"
PT = "8551279DE9991805FCFC1937C52AE9D4"
CT = "A8<KEY>CF9D7"
f()
I = "38"
KEY = "<KEY>"
PT = "<KEY>"
CT = "EC89F0E5155425D4B92BD4B200C1A6E0"
f()
I = "39"
KEY = "<KEY>"
PT = "EC89F0E5155425D4B<KEY>2BD4B200C1A6E0"
CT = "8AF76A782061D38360C6C3CDDCBE1516"
f()
I = "40"
KEY = "<KEY>"
PT = "8AF76A782061D38360C6C3CDDCBE1516"
CT = "D4EEAA6A069659D5D88590DE75515631"
f()
I = "41"
KEY = "<KEY>"
PT = "D4EEAA6A069659D5D88590DE75515631"
CT = "259C021D37B077197B80FCDB07EA1AF2"
f()
I = "42"
KEY = "<KEY>"
PT = "25<KEY>"
CT = "300CC8B4171F0E9BD75710FAD033C570"
f()
I = "43"
KEY = "<KEY>"
PT = "3<KEY>0<KEY>"
CT = "5CFA5BD213A74F02E65390A4C14A1DF6"
f()
I = "44"
KEY = "<KEY>"
PT = "5CFA5BD213A74F02E65<KEY>0A4C14A1DF6"
CT = "A443EA1B2C5747CE7EC5F21D4FE0C147"
f()
I = "45"
KEY = "<KEY>"
PT = "A443EA1B2C5747CE7EC5F21D4FE0C147"
CT = "D2DED73E59319A8138E0331F0EA149EA"
f()
I = "46"
KEY = "<KEY>"
PT = "D2DED73E<KEY>"
CT = "2E2158BC3E5FC714C1EEECA0EA696D48"
f()
I = "47"
KEY = "<KEY>"
PT = "2E<KEY>8"
CT = "248<KEY>"
f()
I = "48"
KEY = "<KEY>"
PT = "2<KEY>C"
CT = "431058F4DBC7F734DA4F02F04CC4F459"
f()
I = "49"
KEY = "<KEY>"
PT = "<KEY>"
CT = "37FE26FF1CF66175F5DDF4C33B97A205"
f()
console.log "exports.data = #{JSON.stringify out, null, 4};"
| true | #
# From here: http://www.schneier.com/code/twofish-kat.zip
# FILENAME: "ecb_tbl.txt"
#
#Electronic Codebook (ECB) Mode
#Tables Known Answer Test
#Tests permutation tables and MDS matrix multiply tables.
#"
#Algorithm Name: TWOFISH
#Principal Submitter: PI:NAME:<NAME>END_PI, Counterpane Systems
#
#
out = []
f = () ->
out.push { key : KEY, plaintext : PT, ciphertext : CT }
I = "1"
KEY = "PI:KEY:<KEY>END_PI"
PT = "00000000000000000000000000000000"
CT = "9F589F5CF6122C32B6BFEC2F2AE8C35A"
f()
I = "2"
KEY = "PI:KEY:<KEY>END_PI"
PT = "9F589F5CF6122C32B6BFEC2F2AE8C35A"
CT = "D491DB16E7B1C39E86CB086B789F5PI:KEY:<KEY>END_PI19"
f()
I = "3"
KEY = "PI:KEY:<KEY>END_PI"
PT = "D491DB16E7B1C39E86CB086B789F5419"
CT = "019F9809DE1711858FAAC3A3BA20FBC3"
f()
I = "4"
KEY = "PI:KEY:<KEY>END_PI9"
PT = "PI:KEY:<KEY>END_PI"
CT = "6363977DE839486297E661C6C9D668EB"
f()
I = "5"
KEY = "PI:KEY:<KEY>END_PI"
PT = "6363977DE839486297E661C6C9D668EB"
CT = "PI:KEY:<KEY>END_PI"
f()
I = "6"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "5449ECA008FF5921155F598AF4CED4D0"
f()
I = "7"
KEY = "PI:KEY:<KEY>END_PI"
PT = "5449ECA008FF5921155F598AF4CED4D0"
CT = "6600522E97AEB3094ED5F92AFCBCDD10"
f()
I = "8"
KEY = "PI:KEY:<KEY>END_PI"
PT = "6600522E97AEB3094ED5F92AFCBCDD10"
CT = "34C8A5FB2D3D08A170D120AC6D26DBFA"
f()
I = "9"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "28PI:KEY:<KEY>END_PI0PI:KEY:<KEY>END_PIEFPI:KEY:<KEY>END_PI1"
f()
I = "10"
KEY = "PI:KEY:<KEY>END_PI"
PT = "285PI:KEY:<KEY>END_PI0BPI:KEY:<KEY>END_PI1B42EFPI:KEY:<KEY>END_PI77PI:KEY:<KEY>END_PI6DPI:KEY:<KEY>END_PI407PI:KEY:<KEY>END_PI5PI:KEY:<KEY>END_PI1"
CT = "8A8AB983310ED78C8C0ECDE030B8DCA4"
f()
I = "11"
KEY = "PI:KEY:<KEY>END_PI"
PT = "8A8AB983310ED78C8C0ECDE030B8DCA4"
CT = "48C758A6DFC1DD8B259PI:KEY:<KEY>END_PI165PI:KEY:<KEY>END_PI2B3C"
f()
I = "12"
KEY = "PI:KEY:<KEY>END_PI"
PT = "4PI:KEY:<KEY>END_PI"
CT = "CEPI:KEY:<KEY>END_PI3C6PI:KEY:<KEY>END_PICPI:KEY:<KEY>END_PI80BBCPI:KEY:<KEY>END_PI"
f()
I = "13"
KEY = "PI:KEY:<KEY>END_PI"
PT = "CEPI:KEY:<KEY>END_PI"
CT = "C7ABD74AA060FPI:KEY:<KEY>END_PI"
f()
I = "14"
KEY = "PI:KEY:<KEY>END_PI"
PT = "CPI:KEY:<KEY>END_PI"
CT = "D0F8B3B6409EBCB666D29C916565ABFC"
f()
I = "15"
KEY = "PI:KEY:<KEY>END_PI"
PT = "D0F8B3B6409EBCB666D29C916565ABFC"
CT = "DD42662908070054544FE09DA4263130"
f()
I = "16"
KEY = "PI:KEY:<KEY>END_PI"
PT = "DD42662908070054544FE09DA4263130"
CT = "PI:KEY:<KEY>END_PI"
f()
I = "17"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "57B9A18EE97D90F435A16F69F0AC6F16"
f()
I = "18"
KEY = "PI:KEY:<KEY>END_PI"
PT = "57B9A18EE97D9PI:KEY:<KEY>END_PI"
CT = "06181F0D53267ABD8F3BB28455B198AD"
f()
I = "19"
KEY = "PI:KEY:<KEY>END_PI"
PT = "06181F0D53267ABD8F3BB28455B198AD"
CT = "81A12D8449E9040BAAE7196338D8C8F2"
f()
I = "20"
KEY = "PI:KEY:<KEY>END_PI"
PT = "81A12D8449E9040BAAEPI:KEY:<KEY>END_PI6PI:KEY:<KEY>END_PI8PI:KEY:<KEY>END_PI8C8PI:KEY:<KEY>END_PI2"
CT = "BE422651C56F2622DA0201815A95A820"
f()
I = "21"
KEY = "PI:KEY:<KEY>END_PI"
PT = "BE422651C56F2622DA0201815A95A820"
CT = "113B19F2D778473990480CEE4DA238D1"
f()
I = "22"
KEY = "PI:KEY:<KEY>END_PI"
PT = "113B19F2D778473990480CEE4DA238D1"
CT = "E6942E9A86E544CF3E3364F20BE011DF"
f()
I = "23"
KEY = "PI:KEY:<KEY>END_PI"
PT = "EPI:KEY:<KEY>END_PI"
CT = "87CDC6AA487BFD0EA70188257D9B3859"
f()
I = "24"
KEY = "PI:KEY:<KEY>END_PI"
PT = "8PI:KEY:<KEY>END_PI"
CT = "D5E2701253DD75A11A4CFB243714BD14"
f()
I = "25"
KEY = "PI:KEY:<KEY>END_PI"
PT = "D5E2701253DD75A11A4CFB243714BD14"
CT = "FD24812EEA107A9E6FAB8EABE0F0F48C"
f()
I = "26"
KEY = "PI:KEY:<KEY>END_PI"
PT = "FD24812EEA107A9E6FAB8EABE0F0F48C"
CT = "DAFA84E31A297F372C3A807100CD783D"
f()
I = "27"
KEY = "PI:KEY:<KEY>END_PI"
PT = "DAPI:KEY:<KEY>END_PI84EPI:KEY:<KEY>END_PI2PI:KEY:<KEY>END_PICD7PI:KEY:<KEY>END_PI"
CT = "A55ED2D955EC8950FC0CC93B76ACBF91"
f()
I = "28"
KEY = "PI:KEY:<KEY>END_PI"
PT = "A55ED2D955EC8950FC0CC93B76ACBF91"
CT = "2ABEA2A4BF27ABDC6B6F278993264744"
f()
I = "29"
KEY = "PI:KEY:<KEY>END_PI"
PT = "2ABEA2A4BF27ABDC6B6F278993264744"
CT = "045383E219321D5A4435C0E491E7DE10"
f()
I = "30"
KEY = "PI:KEY:<KEY>END_PI"
PT = "045383E219321D5A4435C0E491E7DE10"
CT = "7460A4CD4F312F32B1C7A94PI:KEY:<KEY>END_PI00PI:KEY:<KEY>END_PIE934"
f()
I = "31"
KEY = "PI:KEY:<KEY>END_PI"
PT = "7PI:KEY:<KEY>END_PI"
CT = "6BBF9186D32C2C5895649D746566050A"
f()
I = "32"
KEY = "PI:KEY:<KEY>END_PIC7A94FA004E934"
PT = "6BBF9186D32C2C5895649D746566050A"
CT = "CDBDD19ACF40B8AC0328C80054266068"
f()
I = "33"
KEY = "PI:KEY:<KEY>END_PI"
PT = "CDBDD19ACF40B8AC0328C80054266068"
CT = "1D2836CAE4223EAB5066867A71B1A1C3"
f()
I = "34"
KEY = "PI:KEY:<KEY>END_PI"
PT = "1D2836CAE4223EAB5066867A71B1A1C3"
CT = "2D7F37121D0D2416D5E2767FF202061B"
f()
I = "35"
KEY = "PI:KEY:<KEY>END_PI"
PT = "2D7F37121D0D2416D5E2767FF202061B"
CT = "D70736D1ABC7427A121CC816CD66D7FF"
f()
I = "36"
KEY = "PI:KEY:<KEY>END_PI"
PT = "D70736D1ABC7427A121CC816CD66D7FF"
CT = "AC6CA71CBPI:KEY:<KEY>END_PI"
f()
I = "37"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "3PI:KEY:<KEY>END_PI"
f()
I = "38"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "934B7DB4B3544854DBCA81C4C5DE4EB1"
f()
I = "39"
KEY = "PI:KEY:<KEY>END_PI"
PT = "9PI:KEY:<KEY>END_PI"
CT = "18759824AD9823D5961F84377D7EAEBF"
f()
I = "40"
KEY = "PI:KEY:<KEY>END_PI"
PT = "18759824AD9823D5961F84377D7EAEBF"
CT = "DEDDAC6029B0PI:KEY:<KEY>END_PI574D9BABB0PI:KEY:<KEY>END_PIDC6CA6C"
f()
I = "41"
KEY = "PI:KEY:<KEY>END_PI"
PT = "DEDDAC6029B0PI:KEY:<KEY>END_PI4D9BPI:KEY:<KEY>END_PI"
CT = "5EA82EEA2244DED42CCA2F835D5615DF"
f()
I = "42"
KEY = "PI:KEY:<KEY>END_PI"
PT = "5EA82EEA2244DED42CCA2F835D5615DF"
CT = "1E3853F7FFA57091771DD8CDEE9414DE"
f()
I = "43"
KEY = "PI:KEY:<KEY>END_PI"
PT = "1E3853F7FFA57091771DD8CDEE9414DE"
CT = "5C2EBBF75D31F30B5EA26EAC8782D8D1"
f()
I = "44"
KEY = "PI:KEY:<KEY>END_PI"
PT = "5C2EBBF75D31F30B5EA26EAC8782D8D1"
CT = "3A3CFA1F13A136C94D76E5FA4A1109FF"
f()
I = "45"
KEY = "PI:KEY:<KEY>END_PI"
PT = "3A3CFA1F13A136C94D76E5FA4A1109FF"
CT = "91630CF96003B8032E695797E313A553"
f()
I = "46"
KEY = "PI:KEY:<KEY>END_PIA1109FF"
PT = "91630CF96003B8032E695797E313A553"
CT = "137A24CA47CD12BE818DF4D2F4355960"
f()
I = "47"
KEY = "PI:KEY:<KEY>END_PI"
PT = "137A24CA47CD12BE818DF4D2F4355960"
CT = "BCA724A54533C6987E14AA8279PI:KEY:<KEY>END_PI2PI:KEY:<KEY>END_PI"
f()
I = "48"
KEY = "PI:KEY:<KEY>END_PI"
PT = "BCA724A54533C6987E14AA827952F921"
CT = "6B459286F3FFD28D49F15B1581B08E42"
f()
I = "49"
KEY = "PI:KEY:<KEY>END_PI"
PT = "6B459286F3FFD28D49F15B1581B08E42"
CT = "5D9D4EEFFA9151575524F115815A12E0"
f()
I = "1"
KEY = "PI:KEY:<KEY>END_PI"
PT = "00000000000000000000000000000000"
CT = "EPI:KEY:<KEY>END_PI"
f()
I = "2"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "8PI:KEY:<KEY>END_PI88"
f()
I = "3"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "PI:KEY:<KEY>END_PI"
f()
I = "4"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "182B02D81497EA45F9DAACDC29193A65"
f()
I = "5"
KEY = "PI:KEY:<KEY>END_PI"
PT = "182B02D81497EA45F9DAACDC29193API:KEY:<KEY>END_PI"
CT = "7AFF7A70CA2FF28AC31DD8AE5DAAAB63"
f()
I = "6"
KEY = "PI:KEY:<KEY>END_PI"
PT = "7AFF7A70CA2FF28AC31DD8AE5DAAAB63"
CT = "D1079B789F666649B6BD7D1629F1F77E"
f()
I = "7"
KEY = "PI:KEY:<KEY>END_PI"
PT = "D1079B789F666649B6BD7D1629F1F77E"
CT = "3AF6F7CE5BD35EF18BEC6FA787AB506B"
f()
I = "8"
KEY = "PI:KEY:<KEY>END_PI"
PT = "3AF6F7CE5BD35EF18BEC6FA787AB506B"
CT = "AE8109BFDA85C1F2C5038B34ED691BFF"
f()
I = "9"
KEY = "PI:KEY:<KEY>END_PI"
PT = "AE8109BFDA85C1F2C5038B34ED691BFF"
CT = "893FD67B98C550073571BD631263FC78"
f()
I = "10"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "16434FC9C8841A63DPI:KEY:<KEY>END_PI"
f()
I = "11"
KEY = "PI:KEY:<KEY>END_PI"
PT = "16PI:KEY:<KEY>END_PI9C8841A6PI:KEY:<KEY>END_PI"
CT = "9594CF62D48ACD347A68A3161F0F3EE7"
f()
I = "12"
KEY = "PI:KEY:<KEY>END_PI"
PT = "9594CF62D48ACD347A68A3161F0F3EE7"
CT = "B23E8C2C731CPI:KEY:<KEY>END_PI2B88D77D208"
f()
I = "13"
KEY = "PI:KEY:<KEY>END_PI"
PT = "B23E8C2C731C514017D1F2B88D77D208"
CT = "93CC592BC96D95FA8AC32DA894F6AB89"
f()
I = "14"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "26516E6BD4AEF86AF4F4AD58FA41A14C"
f()
I = "15"
KEY = "PI:KEY:<KEY>END_PI"
PT = "26516E6BD4AEF86AF4F4AD58FA41A14C"
CT = "00A8FFFAB8616BE710A6592438FC40BE"
f()
I = "16"
KEY = "PI:KEY:<KEY>END_PI"
PT = "00A8FFFAB8616BE710A6592438FC40BE"
CT = "A19B811C77482D97C842EC62DB2EDCCE"
f()
I = "17"
KEY = "PI:KEY:<KEY>END_PI"
PT = "A19B81PI:KEY:<KEY>END_PIC77482D97C842EC62DB2EDCCE"
CT = "D7FF438678D818CAA26A676342F98E8B"
f()
I = "18"
KEY = "PI:KEY:<KEY>END_PIEC62DB2EDCCE00A8FFFAB8616BE7"
PT = "D7FF438678D818CAA26A676342F98E8B"
CT = "PI:KEY:<KEY>END_PI"
f()
I = "19"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "5500PI:KEY:<KEY>END_PI03FEF1BACPI:KEY:<KEY>END_PI"
f()
I = "20"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "7282B2F3E766C83664930A19D201D7E7"
f()
I = "21"
KEY = "PI:KEY:<KEY>END_PI"
PT = "7282B2F3E766C83664930A19D201D7E7"
CT = "6796085C32FBDD2AB43E81EAC1262743"
f()
I = "22"
KEY = "PI:KEY:<KEY>END_PI00AF1C79503FEF"
PT = "6796085C32FBDD2AB43E81EAC1262743"
CT = "5839F9E148B9FD2B5A52751D4F178FDC"
f()
I = "23"
KEY = "PI:KEY:<KEY>END_PI"
PT = "5839F9E148B9FD2B5A52751D4F178FDC"
CT = "A88F34056742E55408A7A9E7B6D4C8C0"
f()
I = "24"
KEY = "PI:KEY:<KEY>END_PI"
PT = "A88F34056742E55408A7A9E7B6D4C8C0"
CT = "9C8C304CB1F937C6E4PI:KEY:<KEY>END_PI2PI:KEY:<KEY>END_PI459FA8872F"
f()
I = "25"
KEY = "PI:KEY:<KEY>END_PI"
PT = "9CPI:KEY:<KEY>END_PICPI:KEY:<KEY>END_PI0PI:KEY:<KEY>END_PICB1F937C6E4PI:KEY:<KEY>END_PI8459FA8872F"
CT = "EA3668C0D96529A7F3BF0F7C2B5C5BE2"
f()
I = "26"
KEY = "PI:KEY:<KEY>END_PI"
PT = "EA3668C0D96529A7F3BF0F7C2B5C5BE2"
CT = "A8FB6EEFCAAF9C4041072D570984CED2"
f()
I = "27"
KEY = "PI:KEY:<KEY>END_PI"
PT = "A8FB6EEFCAAF9C4041072D570984CED2"
CT = "ABF4662E5D50F71B15FE3B428AFE3500"
f()
I = "28"
KEY = "PI:KEY:<KEY>END_PI"
PT = "ABF4662E5D50F71B15FE3B428AFE3500"
CT = "3B3AED23958DA6E2FA4493BCBE59A806"
f()
I = "29"
KEY = "PI:KEY:<KEY>END_PI"
PT = "3B3AED23958DA6E2FA4493BCBE59A806"
CT = "CFBF446E33C3DCD4DD5161CA00D4BA8F"
f()
I = "30"
KEY = "PI:KEY:<KEY>END_PI"
PT = "CFBFPI:KEY:<KEY>END_PI"
CT = "072ADBFA7EB962BA199AFC720339FF29"
f()
I = "31"
KEY = "PI:KEY:<KEY>END_PI"
PT = "072ADBFA7EB962BA199AFC720339FF29"
CT = "AAB346D9123A8140AC563EF17E7054C8"
f()
I = "32"
KEY = "PI:KEY:<KEY>END_PI"
PT = "AAB346D9123A8140AC563EF17E7054C8"
CT = "3C7DDC5FE6E5888FE61DEDA0C69CD320"
f()
I = "33"
KEY = "PI:KEY:<KEY>END_PI"
PT = "3C7DDC5FE6E5888FE61DEDA0C69CD320"
CT = "34AA645C7E3532098ADA91BB128ED821"
f()
I = "34"
KEY = "PI:KEY:<KEY>END_PI"
PT = "34AA645C7E3532098ADA91BB128ED821"
CT = "7A5D12E315038PI:KEY:<KEY>END_PI"
f()
I = "35"
KEY = "PI:KEY:<KEY>END_PI"
PT = "7A5D12E315038522DA01EC0834B1322C"
CT = "86E235A1CB091FF7FE6FBBCA0D73BE58"
f()
I = "36"
KEY = "PI:KEY:<KEY>END_PI"
PT = "86E235A1CB091FF7FE6FBBCA0D73BE58"
CT = "F3CE81CACE5D6BA9C558111DCDB22F5D"
f()
I = "37"
KEY = "PI:KEY:<KEY>END_PI"
PT = "F3CE81CACE5D6BA9C558111DCDB22F5D"
CT = "FB30A5D67D5F5BE4048C77E2AD3BC9D1"
f()
I = "38"
KEY = "PI:KEY:<KEY>END_PI"
PT = "FB30A5D67D5F5BE4048C77E2AD3BC9D1"
CT = "31E69DD3D2D90E81C9EBFAC257E9823D"
f()
I = "3PI:KEY:<KEY>END_PI"
KEY = "PI:KEY:<KEY>END_PI"
PT = "31E69DD3D2D90E81C9EBFAC257E9823D"
CT = "87579B3F19A9CDE12BB882FFEAF665AE"
f()
I = "40"
KEY = "PI:KEY:<KEY>END_PI"
PT = "87579B3F19A9CDE12BB882FFEAF665AE"
CT = "24A8572AA844FEF25F7670FAE30F1CD2"
f()
I = "41"
KEY = "PI:KEY:<KEY>END_PI"
PT = "24A8572AA844FEF25F7670FAE30F1CD2"
CT = "0CFA32E5E33F3B2DAC9F34D25979319A"
f()
I = "42"
KEY = "PI:KEY:<KEY>END_PI"
PT = "0CFA32E5E33F3B2DAC9F34D25979319A"
CT = "3C64D7FC881B9B82ABA21FF122B98F54"
f()
I = "43"
KEY = "PI:KEY:<KEY>END_PI"
PT = "3PI:KEY:<KEY>END_PI"
CT = "A794CAEE6756281B7A64894E4E4F70A8"
f()
I = "44"
KEY = "PI:KEY:<KEY>END_PI"
PT = "A794CAEE6PI:KEY:<KEY>END_PI62PI:KEY:<KEY>END_PI1BPI:KEY:<KEY>END_PI"
CT = "89A9BF6B893BC5E6FEF4C77F3D0F29A6"
f()
I = "45"
KEY = "PI:KEY:<KEY>END_PI"
PT = "89A9BF6B893BC5E6FEF4C77F3D0F29A6"
CT = "5DBE44032769DF543EAD7AD13A5F3310"
f()
I = "46"
KEY = "PI:KEY:<KEY>END_PI"
PT = "5DBE44032769DF543EAD7AD13A5F3310"
CT = "DEA4F3DA75EC7A8EAC3861A9912402CD"
f()
I = "47"
KEY = "PI:KEY:<KEY>END_PI5F331089A9BF6B893BC5E6"
PT = "DEA4F3DA75EC7A8EAC3861A9912402CD"
CT = "FB66522C332FCC4C042ABE32FA9E902F"
f()
I = "48"
KEY = "PI:KEY:<KEY>END_PI"
PT = "FB66522C332FCC4C042ABE32FA9E902F"
CT = "F0AB73301125FA21EF70BE5385FB76B6"
f()
I = "49"
KEY = "PI:KEY:<KEY>END_PI"
PT = "F0AB73301125FA21EF70BE5385FB76B6"
CT = "E75449212BEEF9F4A390BD860A640941"
f()
I = "1"
KEY = "PI:KEY:<KEY>END_PI"
PT = "0PI:KEY:<KEY>END_PI00000000000000000000000000000"
CT = "57FF739D4DC92C1BD7FC01700CC8216F"
f()
I = "2"
KEY = "PI:KEY:<KEY>END_PI000000000000000000000000000000000000000000"
PT = "57FF73PI:KEY:<KEY>END_PICCPI:KEY:<KEY>END_PIF"
CT = "D43BB7556EA32E46F2A282B7D45B4E0D"
f()
I = "3"
KEY = "PI:KEY:<KEY>END_PI"
PT = "DPI:KEY:<KEY>END_PIBBPI:KEY:<KEY>END_PI"
CT = "PI:KEY:<KEY>END_PI"
f()
I = "4"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "PI:KEY:<KEY>END_PI"
f()
I = "5"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "3059D6D61753B95PI:KEY:<KEY>END_PI"
f()
I = "6"
KEY = "PI:KEY:<KEY>END_PI"
PT = "3PI:KEY:<KEY>END_PI"
CT = "E69465770505D7F80EF68CA38AB3A3D6"
f()
I = "7"
KEY = "PI:KEY:<KEY>END_PI"
PT = "E69465770505D7F80EF68CA38AB3A3D6"
CT = "5AB67A5F8539A4A5FD9F0373BA463466"
f()
I = "8"
KEY = "PI:KEY:<KEY>END_PI"
PT = "5AB67A5F8539A4A5FD9F0373BA463466"
CT = "DC096BCD99FC72F79936D4C748E75AF7"
f()
I = "9"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "C5A3E7CEE0F1B7260528A68FB4EA05F2"
f()
I = "10"
KEY = "PI:KEY:<KEY>END_PI"
PT = "C5A3E7CEE0F1B7260528A68FB4EA05F2"
CT = "43D5CEC327B24AB90AD34A79D0469151"
f()
I = "11"
KEY = "PI:KEY:<KEY>END_PI"
PT = "43D5CEC327B24AB90AD34A79D0469151"
CT = "05BA7BE9BE5274FBEB4AC5FD5FAAFA10"
f()
I = "12"
KEY = "PI:KEY:<KEY>END_PI"
PT = "05BA7BE9BE5274FBEB4AC5FD5FAAFA10"
CT = "89D513B989E3CECE4D2E3E4C15D4E01C"
f()
I = "13"
KEY = "PI:KEY:<KEY>END_PIFA10PI:KEY:<KEY>END_PI3PI:KEY:<KEY>END_PI"
PT = "89D513B989E3CECE4D2E3E4C15D4E01C"
CT = "E56E462AE2E66F800CE2224C4CAFACBE"
f()
I = "14"
KEY = "PI:KEY:<KEY>END_PI"
PT = "E56E462AE2E66F800CE2224C4CAFACBE"
CT = "F5DBEE5414D79CA46BE17346FDC72BF5"
f()
I = "15"
KEY = "PI:KEY:<KEY>END_PI"
PT = "FPI:KEY:<KEY>END_PI"
CT = "CPI:KEY:<KEY>END_PI80FC288PI:KEY:<KEY>END_PI5PI:KEY:<KEY>END_PI667BPI:KEY:<KEY>END_PI"
f()
I = "16"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "2EF17A0D75440ECEC9AF18D29ABA3CDA"
f()
I = "17"
KEY = "PI:KEY:<KEY>END_PI"
PT = "2EF17A0D75440ECEC9AF18D29ABA3CDA"
CT = "D653FDFF96BCF89A929F6377AB41967A"
f()
I = "18"
KEY = "PI:KEY:<KEY>END_PI"
PT = "D653FDFF96BCF89A929F6377AB41967A"
CT = "36BBAD8838380C8793A7AEA0A11D04D2"
f()
I = "19"
KEY = "PI:KEY:<KEY>END_PI"
PT = "36BBAD8838380C8793A7AEA0A11D04D2"
CT = "034EFECFC0EFC00D82C3345A8708AE78"
f()
I = "20"
KEY = "PI:KEY:<KEY>END_PI"
PT = "034EFECFC0EFC00D82C3345A8708AE78"
CT = "EA3PI:KEY:<KEY>END_PI"
f()
I = "21"
KEY = "PI:KEY:<KEY>END_PI"
PT = "EA38887307337A2934A9EB802F6CFDDD"
CT = "7C279D47775AB4C26442E8A117943DD8"
f()
I = "22"
KEY = "PI:KEY:<KEY>END_PI"
PT = "7C279D47775AB4C26442E8A117943DD8"
CT = "E3CF581056EBC1169CF451E930308726"
f()
I = "23"
KEY = "PI:KEY:<KEY>END_PI"
PT = "E3CF581056EBC1169CF451E930308726"
CT = "65527B29D489C8CDDC62E5E7FFC4E924"
f()
I = "24"
KEY = "PI:KEY:<KEY>END_PI943DD8"
PT = "65527B29D489C8CDDC62E5E7FFC4E924"
CT = "3B491F930230A0170819CACB48F9030C"
f()
I = "25"
KEY = "PI:KEY:<KEY>END_PI"
PT = "3B491F930230A0170819CACB48F9030C"
CT = "A7C8193F35AF63B51D7F9DEDCEC85866"
f()
I = "26"
KEY = "PI:KEY:<KEY>END_PI"
PT = "A7C8193F35AF63B51D7F9DEDCEC85866"
CT = "009A48F9A1C27F9AA5F9BD909C848B60"
f()
I = "27"
KEY = "PI:KEY:<KEY>END_PI"
PT = "00PI:KEY:<KEY>END_PI48PI:KEY:<KEY>END_PI1C27PI:KEY:<KEY>END_PI9AA5PI:KEY:<KEY>END_PI9BD909C848PI:KEY:<KEY>END_PI60"
CT = "3B507E987B3F827A093B26C85CDFF6C6"
f()
I = "28"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "EAFB3BA6C78E37214FE908669BC24F6F"
f()
I = "29"
KEY = "PI:KEY:<KEY>END_PI"
PT = "EAFB3BA6C78E37214FE908669BC24F6F"
CT = "2B09F10D7F2B62A4D0DAFFB9F882B980"
f()
I = "30"
KEY = "PI:KEY:<KEY>END_PI"
PT = "2B09FPI:KEY:<KEY>END_PI0D7F2B62A4D0DAFFB9F882B980"
CT = "E6B9FE907009B2DC649412DD630AE576"
f()
I = "31"
KEY = "PI:KEY:<KEY>END_PI"
PT = "E6B9FE907009B2DC649412DDPI:KEY:<KEY>END_PI"
CT = "F2E4PI:KEY:<KEY>END_PI8PI:KEY:<KEY>END_PI724CD58PI:KEY:<KEY>END_PI8PI:KEY:<KEY>END_PI8PI:KEY:<KEY>END_PI09ED4500F"
f()
I = "32"
KEY = "PI:KEY:<KEY>END_PI"
PT = "F2E4PI:KEY:<KEY>END_PI8PI:KEY:<KEY>END_PI724CD58F85847PI:KEY:<KEY>END_PI09ED4500F"
CT = "DC650PI:KEY:<KEY>END_PIFCEBC2BD6E4PI:KEY:<KEY>END_PI4605FCABE"
f()
I = "33"
KEY = "PI:KEY:<KEY>END_PI"
PT = "DCPI:KEY:<KEY>END_PI"
CT = "B7CFB31755FDA2AB0A67FBA09901A73E"
f()
I = "34"
KEY = "PI:KEY:<KEY>END_PI"
PT = "B7CFB31755FDA2AB0A67FBA09901A73E"
CT = "9C7E7578F3D2923DCDABFED0A5EF86EE"
f()
I = "35"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "9465E9AE176483BD398C8033AA136F68"
f()
I = "36"
KEY = "PI:KEY:<KEY>END_PI"
PT = "9465PI:KEY:<KEY>END_PI76PI:KEY:<KEY>END_PI83PI:KEY:<KEY>END_PI3PI:KEY:<KEY>END_PI13PI:KEY:<KEY>END_PI68"
CT = "8551279DE9991805FCFC1937C52AE9D4"
f()
I = "37"
KEY = "PI:KEY:<KEY>END_PI"
PT = "8551279DE9991805FCFC1937C52AE9D4"
CT = "A8PI:KEY:<KEY>END_PICF9D7"
f()
I = "38"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "EC89F0E5155425D4B92BD4B200C1A6E0"
f()
I = "39"
KEY = "PI:KEY:<KEY>END_PI"
PT = "EC89F0E5155425D4BPI:KEY:<KEY>END_PI2BD4B200C1A6E0"
CT = "8AF76A782061D38360C6C3CDDCBE1516"
f()
I = "40"
KEY = "PI:KEY:<KEY>END_PI"
PT = "8AF76A782061D38360C6C3CDDCBE1516"
CT = "D4EEAA6A069659D5D88590DE75515631"
f()
I = "41"
KEY = "PI:KEY:<KEY>END_PI"
PT = "D4EEAA6A069659D5D88590DE75515631"
CT = "259C021D37B077197B80FCDB07EA1AF2"
f()
I = "42"
KEY = "PI:KEY:<KEY>END_PI"
PT = "25PI:KEY:<KEY>END_PI"
CT = "300CC8B4171F0E9BD75710FAD033C570"
f()
I = "43"
KEY = "PI:KEY:<KEY>END_PI"
PT = "3PI:KEY:<KEY>END_PI0PI:KEY:<KEY>END_PI"
CT = "5CFA5BD213A74F02E65390A4C14A1DF6"
f()
I = "44"
KEY = "PI:KEY:<KEY>END_PI"
PT = "5CFA5BD213A74F02E65PI:KEY:<KEY>END_PI0A4C14A1DF6"
CT = "A443EA1B2C5747CE7EC5F21D4FE0C147"
f()
I = "45"
KEY = "PI:KEY:<KEY>END_PI"
PT = "A443EA1B2C5747CE7EC5F21D4FE0C147"
CT = "D2DED73E59319A8138E0331F0EA149EA"
f()
I = "46"
KEY = "PI:KEY:<KEY>END_PI"
PT = "D2DED73EPI:KEY:<KEY>END_PI"
CT = "2E2158BC3E5FC714C1EEECA0EA696D48"
f()
I = "47"
KEY = "PI:KEY:<KEY>END_PI"
PT = "2EPI:KEY:<KEY>END_PI8"
CT = "248PI:KEY:<KEY>END_PI"
f()
I = "48"
KEY = "PI:KEY:<KEY>END_PI"
PT = "2PI:KEY:<KEY>END_PIC"
CT = "431058F4DBC7F734DA4F02F04CC4F459"
f()
I = "49"
KEY = "PI:KEY:<KEY>END_PI"
PT = "PI:KEY:<KEY>END_PI"
CT = "37FE26FF1CF66175F5DDF4C33B97A205"
f()
console.log "exports.data = #{JSON.stringify out, null, 4};"
|
[
{
"context": "# Try POH\n# author: Leonardone @ NEETSDKASU\n# ==================================",
"end": 30,
"score": 0.9992059469223022,
"start": 20,
"tag": "NAME",
"value": "Leonardone"
},
{
"context": "# Try POH\n# author: Leonardone @ NEETSDKASU\n# ==============================... | POH8/Mizugi/Main.coffee | neetsdkasu/Paiza-POH-MyAnswers | 3 | # Try POH
# author: Leonardone @ NEETSDKASU
# ===================================================
solve = (mr) ->
[n, m] = mr.gis()
s = mr.gs()
g = mr.gs()
x = (0 for _ in [0 .. 27])
a = 'a'.charCodeAt 0
for c in s
x[c.charCodeAt(0) - a]++
for c in g
x[c.charCodeAt(0) - a]--
needs = 0
needs -= i for i in x when i < 0
console.log needs
# ===================================================
(() ->
buf = ''
process.stdin.resume()
process.stdin.setEncoding 'utf8'
process.stdin.on 'data', (chunk) -> buf += chunk.toString()
process.stdin.on 'end', () ->
lines = buf.split '\n'
cur = 0
gs = -> lines[cur++]
gi = -> parseInt gs()
gss = -> gs().split ' '
gis = -> gss().map (x) -> parseInt x
ngt = (n, f) -> f() for _ in [1..n]
ngs = (n) -> ngt n, gs
ngi = (n) -> ngt n, gi
ngss = (n) -> ngt n, gss
ngis = (n) -> ngt n, gis
solve "gs": gs, "gi": gi, "gss": gss, "gis": gis, "ngs": ngs, "ngi": ngi, "ngss": ngss, "ngis": ngis
)()
# ===================================================
| 189946 | # Try POH
# author: <NAME> @ NEETSDKASU
# ===================================================
solve = (mr) ->
[n, m] = mr.gis()
s = mr.gs()
g = mr.gs()
x = (0 for _ in [0 .. 27])
a = 'a'.charCodeAt 0
for c in s
x[c.charCodeAt(0) - a]++
for c in g
x[c.charCodeAt(0) - a]--
needs = 0
needs -= i for i in x when i < 0
console.log needs
# ===================================================
(() ->
buf = ''
process.stdin.resume()
process.stdin.setEncoding 'utf8'
process.stdin.on 'data', (chunk) -> buf += chunk.toString()
process.stdin.on 'end', () ->
lines = buf.split '\n'
cur = 0
gs = -> lines[cur++]
gi = -> parseInt gs()
gss = -> gs().split ' '
gis = -> gss().map (x) -> parseInt x
ngt = (n, f) -> f() for _ in [1..n]
ngs = (n) -> ngt n, gs
ngi = (n) -> ngt n, gi
ngss = (n) -> ngt n, gss
ngis = (n) -> ngt n, gis
solve "gs": gs, "gi": gi, "gss": gss, "gis": gis, "ngs": ngs, "ngi": ngi, "ngss": ngss, "ngis": ngis
)()
# ===================================================
| true | # Try POH
# author: PI:NAME:<NAME>END_PI @ NEETSDKASU
# ===================================================
solve = (mr) ->
[n, m] = mr.gis()
s = mr.gs()
g = mr.gs()
x = (0 for _ in [0 .. 27])
a = 'a'.charCodeAt 0
for c in s
x[c.charCodeAt(0) - a]++
for c in g
x[c.charCodeAt(0) - a]--
needs = 0
needs -= i for i in x when i < 0
console.log needs
# ===================================================
(() ->
buf = ''
process.stdin.resume()
process.stdin.setEncoding 'utf8'
process.stdin.on 'data', (chunk) -> buf += chunk.toString()
process.stdin.on 'end', () ->
lines = buf.split '\n'
cur = 0
gs = -> lines[cur++]
gi = -> parseInt gs()
gss = -> gs().split ' '
gis = -> gss().map (x) -> parseInt x
ngt = (n, f) -> f() for _ in [1..n]
ngs = (n) -> ngt n, gs
ngi = (n) -> ngt n, gi
ngss = (n) -> ngt n, gss
ngis = (n) -> ngt n, gis
solve "gs": gs, "gi": gi, "gss": gss, "gis": gis, "ngs": ngs, "ngi": ngi, "ngss": ngss, "ngis": ngis
)()
# ===================================================
|
[
{
"context": "pyright 2014 VictorOps, Inc.\n# https://github.com/victorops/hubot-victorops/blob/master/LICENSE\n#============",
"end": 139,
"score": 0.9995520114898682,
"start": 130,
"tag": "USERNAME",
"value": "victorops"
},
{
"context": " 'wss://chat.victorops.com/chat' )\n @pas... | src/victorops.coffee | victorops/hubot-victorops | 2 | #==========================================================================
# Copyright 2014 VictorOps, Inc.
# https://github.com/victorops/hubot-victorops/blob/master/LICENSE
#==========================================================================
Path = require 'path'
Readline = require 'readline'
WebSocket = require 'ws'
{Adapter,Robot,TextMessage} = require 'hubot'
class Shell
constructor: (robot, vo) ->
@robot = robot
stdin = process.openStdin()
stdout = process.stdout
@vo = vo
@user = @robot.brain.userForId '1', name: 'Shell', room: 'Shell'
process.on 'uncaughtException', (err) =>
@robot.logger.error err.stack
@repl = Readline.createInterface stdin, stdout, null
@repl.on 'close', =>
@robot.logger.info()
stdin.destroy()
@robot.shutdown()
process.exit 0
@repl.on 'line', (buffer) =>
if buffer.trim().length > 0
@repl.close() if buffer.toLowerCase() is 'exit'
@vo.send( @robot.name, buffer )
@robot.receive new TextMessage @user, buffer, 'messageId'
@repl.prompt()
@repl.setPrompt "#{@robot.name} >> "
@repl.prompt()
prompt: ->
@repl.prompt()
class VictorOps extends Adapter
constructor: (robot) ->
@wsURL = @envWithDefault( process.env.HUBOT_VICTOROPS_URL, 'wss://chat.victorops.com/chat' )
@password = process.env.HUBOT_VICTOROPS_KEY
@robot = robot
@loggedIn = false
@loginAttempts = @getLoginAttempts()
@loginRetryInterval = @envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_INTERVAL, 5 ) * 1000
@pongLimit = @envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_PONG_LIMIT, 17000 )
@rcvdStatusList = false
super robot
envWithDefault: (envVar, defVal) ->
if (envVar?)
envVar
else
defVal
envIntWithDefault: (envVar, defVal) ->
parseInt( @envWithDefault( envVar, "#{defVal}" ), 10 )
getLoginAttempts: () ->
@envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_ATTEMPTS, 15 )
generateUUID: ->
d = new Date().getTime()
'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) ->
r = (d + Math.random()*16)%16 | 0;
v = if c is 'x' then r else (r & 0x3|0x8)
v.toString(16)
)
login: () ->
msg = {
"MESSAGE": "ROBOT_LOGIN_REQUEST_MESSAGE",
"TRANSACTION_ID": @generateUUID(),
"PAYLOAD": {
"PROTOCOL": "1.0",
"NAME": @robot.name,
"KEY": @password,
"DEVICE_NAME": "hubot"
}
}
chat: (msg) ->
msg = {
"MESSAGE": "CHAT_ACTION_MESSAGE",
"TRANSACTION_ID": @generateUUID(),
"PAYLOAD": {
"CHAT": {
"IS_ONCALL": false,
"IS_ROBOT": true,
"TEXT": msg,
"ROOM_ID": "*"
}
}
}
sendToVO: (js) ->
m = JSON.stringify(js)
message = "VO-MESSAGE:" + m.length + "\n" + m
@robot.logger.info "send to chat server: #{message}" if js.MESSAGE != "PING"
@ws.send( message )
send: (user, strings...) ->
@sendToVO( @chat( strings.join "\n" ) )
reply: (user, strings...) ->
strings = "@#{user.user.id}: #{strings.join "\n"}"
@sendToVO @chat( strings )
respond: (regex, callback) ->
@hear regex, callback
ping: () ->
msg = {
"MESSAGE": "PING",
"TRANSACTION_ID": @generateUUID()
}
@sendToVO(msg)
connectToVO: () ->
@disconnect()
if @loginAttempts-- <= 0
@robot.logger.info "Unable to connect; giving up."
process.exit 1
@robot.logger.info "Attempting connection to VictorOps at #{@wsURL}..."
@ws = new WebSocket(@wsURL)
@ws.on "open", () =>
@sendToVO(@login())
@ws.on "error", (error) =>
@disconnect(error)
@ws.on "message", (message) =>
@receiveWS(message)
@ws.on 'close', () =>
@loggedIn = false
@robot.logger.info 'WebSocket closed.'
disconnect: (error) ->
@lastPong = new Date()
@loggedIn = false
@rcvdStatusList = false
if @ws?
@robot.logger.info("#{error} - disconnecting...")
@ws.terminate()
@ws = null
rcvVOEvent: ( typ, obj ) ->
user = @robot.brain.userForId "VictorOps"
hubotMsg = "#{@robot.name} VictorOps #{typ} #{JSON.stringify(obj)}"
@robot.logger.info hubotMsg
@receive new TextMessage user, hubotMsg
receiveWS: (msg) ->
data = JSON.parse( msg.replace /VO-MESSAGE:[^\{]*/, "" )
@robot.logger.info "Received #{data.MESSAGE}" if data.MESSAGE != "PONG"
# Turn on for debugging
#@robot.logger.info msg
if data.MESSAGE == "CHAT_NOTIFY_MESSAGE" && data.PAYLOAD.CHAT.USER_ID != @robot.name
user = @robot.brain.userForId data.PAYLOAD.CHAT.USER_ID
@receive new TextMessage user, data.PAYLOAD.CHAT.TEXT.replace(/"/g,'"')
else if data.MESSAGE == "PONG"
@lastPong = new Date()
else if data.MESSAGE == "STATE_NOTIFY_MESSAGE" && data.PAYLOAD.ONCALL_LIST?
@rcvdStatusList = true
else if data.MESSAGE == "ENTITY_STATE_NOTIFY_MESSAGE"
user = @robot.brain.userForId "VictorOps"
@rcvVOEvent 'entitystate', entity for entity in data.PAYLOAD.SYSTEM_ALERT_STATE_LIST
else if data.MESSAGE == "TIMELINE_LIST_REPLY_MESSAGE"
for item in data.PAYLOAD.TIMELINE_LIST
if item.ALERT?
# get a list of current victor ops incident keys in the brain
voIKeys = @robot.brain.get "VO_INCIDENT_KEYS"
# catch null lists and init as blank
if not voIKeys?
voIKeys = []
# name the new key and set the brain
voCurIName = item.ALERT["INCIDENT_NAME"]
@robot.brain.set voCurIName, item.ALERT
# update the list of current victor ops incident keys in the brain
voIKeys.push
name: voCurIName
timestamp: new Date
@robot.brain.set "VO_INCIDENT_KEYS", voIKeys
# clean up victor ops incident keys in the brain
@cleanupBrain()
@robot.emit "alert", item.ALERT
@rcvVOEvent 'alert', item.ALERT
else
@robot.logger.info "Not an alert."
else if data.MESSAGE == "LOGIN_REPLY_MESSAGE"
if data.PAYLOAD.STATUS != "200"
@robot.logger.info "Failed to log in: #{data.PAYLOAD.DESCRIPTION}"
@loggedIn = false
@ws.terminate()
else
@loginAttempts = @getLoginAttempts()
@loggedIn = true
setTimeout =>
if ( ! @rcvdStatusList )
@robot.logger.info "Did not get status list in time; reconnecting..."
@disconnect()
, 5000
@shell.prompt() if data.MESSAGE != "PONG"
cleanupBrain: ->
# get a list of all the victor ops incident keys in the brain
voIKeys = @robot.brain.get "VO_INCIDENT_KEYS"
# remove keys from the victor ops incident keys list and from the brain
# if they are older than 24 hours
voIKeysFiltered = voIKeys.filter((item) ->
return (new Date(item.timestamp).getDate() + 1 >= new Date)
)
# set the victor ops incident keys list in the the brain to the updated
# list value
@robot.brain.set "VO_INCIDENT_KEYS", voIKeysFiltered
run: ->
pkg = require Path.join __dirname, '..', 'package.json'
@robot.logger.info "VictorOps adapter version #{pkg.version}"
@shell = new Shell( @robot, @ )
@connectToVO()
setInterval =>
pongInterval = new Date().getTime() - @lastPong.getTime()
if ( ! @ws? || @ws.readyState != WebSocket.OPEN || ! @loggedIn || pongInterval > @pongLimit )
@connectToVO()
else
@ping()
, @loginRetryInterval
@emit "connected"
exports.VictorOps = VictorOps
exports.use = (robot) ->
new VictorOps robot
| 173624 | #==========================================================================
# Copyright 2014 VictorOps, Inc.
# https://github.com/victorops/hubot-victorops/blob/master/LICENSE
#==========================================================================
Path = require 'path'
Readline = require 'readline'
WebSocket = require 'ws'
{Adapter,Robot,TextMessage} = require 'hubot'
class Shell
constructor: (robot, vo) ->
@robot = robot
stdin = process.openStdin()
stdout = process.stdout
@vo = vo
@user = @robot.brain.userForId '1', name: 'Shell', room: 'Shell'
process.on 'uncaughtException', (err) =>
@robot.logger.error err.stack
@repl = Readline.createInterface stdin, stdout, null
@repl.on 'close', =>
@robot.logger.info()
stdin.destroy()
@robot.shutdown()
process.exit 0
@repl.on 'line', (buffer) =>
if buffer.trim().length > 0
@repl.close() if buffer.toLowerCase() is 'exit'
@vo.send( @robot.name, buffer )
@robot.receive new TextMessage @user, buffer, 'messageId'
@repl.prompt()
@repl.setPrompt "#{@robot.name} >> "
@repl.prompt()
prompt: ->
@repl.prompt()
class VictorOps extends Adapter
constructor: (robot) ->
@wsURL = @envWithDefault( process.env.HUBOT_VICTOROPS_URL, 'wss://chat.victorops.com/chat' )
@password = <PASSWORD>OPS_KEY
@robot = robot
@loggedIn = false
@loginAttempts = @getLoginAttempts()
@loginRetryInterval = @envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_INTERVAL, 5 ) * 1000
@pongLimit = @envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_PONG_LIMIT, 17000 )
@rcvdStatusList = false
super robot
envWithDefault: (envVar, defVal) ->
if (envVar?)
envVar
else
defVal
envIntWithDefault: (envVar, defVal) ->
parseInt( @envWithDefault( envVar, "#{defVal}" ), 10 )
getLoginAttempts: () ->
@envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_ATTEMPTS, 15 )
generateUUID: ->
d = new Date().getTime()
'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) ->
r = (d + Math.random()*16)%16 | 0;
v = if c is 'x' then r else (r & 0x3|0x8)
v.toString(16)
)
login: () ->
msg = {
"MESSAGE": "ROBOT_LOGIN_REQUEST_MESSAGE",
"TRANSACTION_ID": @generateUUID(),
"PAYLOAD": {
"PROTOCOL": "1.0",
"NAME": @robot.name,
"KEY": @password,
"DEVICE_NAME": "hubot"
}
}
chat: (msg) ->
msg = {
"MESSAGE": "CHAT_ACTION_MESSAGE",
"TRANSACTION_ID": @generateUUID(),
"PAYLOAD": {
"CHAT": {
"IS_ONCALL": false,
"IS_ROBOT": true,
"TEXT": msg,
"ROOM_ID": "*"
}
}
}
sendToVO: (js) ->
m = JSON.stringify(js)
message = "VO-MESSAGE:" + m.length + "\n" + m
@robot.logger.info "send to chat server: #{message}" if js.MESSAGE != "PING"
@ws.send( message )
send: (user, strings...) ->
@sendToVO( @chat( strings.join "\n" ) )
reply: (user, strings...) ->
strings = "@#{user.user.id}: #{strings.join "\n"}"
@sendToVO @chat( strings )
respond: (regex, callback) ->
@hear regex, callback
ping: () ->
msg = {
"MESSAGE": "PING",
"TRANSACTION_ID": @generateUUID()
}
@sendToVO(msg)
connectToVO: () ->
@disconnect()
if @loginAttempts-- <= 0
@robot.logger.info "Unable to connect; giving up."
process.exit 1
@robot.logger.info "Attempting connection to VictorOps at #{@wsURL}..."
@ws = new WebSocket(@wsURL)
@ws.on "open", () =>
@sendToVO(@login())
@ws.on "error", (error) =>
@disconnect(error)
@ws.on "message", (message) =>
@receiveWS(message)
@ws.on 'close', () =>
@loggedIn = false
@robot.logger.info 'WebSocket closed.'
disconnect: (error) ->
@lastPong = new Date()
@loggedIn = false
@rcvdStatusList = false
if @ws?
@robot.logger.info("#{error} - disconnecting...")
@ws.terminate()
@ws = null
rcvVOEvent: ( typ, obj ) ->
user = @robot.brain.userForId "VictorOps"
hubotMsg = "#{@robot.name} VictorOps #{typ} #{JSON.stringify(obj)}"
@robot.logger.info hubotMsg
@receive new TextMessage user, hubotMsg
receiveWS: (msg) ->
data = JSON.parse( msg.replace /VO-MESSAGE:[^\{]*/, "" )
@robot.logger.info "Received #{data.MESSAGE}" if data.MESSAGE != "PONG"
# Turn on for debugging
#@robot.logger.info msg
if data.MESSAGE == "CHAT_NOTIFY_MESSAGE" && data.PAYLOAD.CHAT.USER_ID != @robot.name
user = @robot.brain.userForId data.PAYLOAD.CHAT.USER_ID
@receive new TextMessage user, data.PAYLOAD.CHAT.TEXT.replace(/"/g,'"')
else if data.MESSAGE == "PONG"
@lastPong = new Date()
else if data.MESSAGE == "STATE_NOTIFY_MESSAGE" && data.PAYLOAD.ONCALL_LIST?
@rcvdStatusList = true
else if data.MESSAGE == "ENTITY_STATE_NOTIFY_MESSAGE"
user = @robot.brain.userForId "VictorOps"
@rcvVOEvent 'entitystate', entity for entity in data.PAYLOAD.SYSTEM_ALERT_STATE_LIST
else if data.MESSAGE == "TIMELINE_LIST_REPLY_MESSAGE"
for item in data.PAYLOAD.TIMELINE_LIST
if item.ALERT?
# get a list of current victor ops incident keys in the brain
voIKeys = @robot.brain.get "VO_INCIDENT_KEYS"
# catch null lists and init as blank
if not voIKeys?
voIKeys = []
# name the new key and set the brain
voCurIName = item.ALERT["INCIDENT_NAME"]
@robot.brain.set voCurIName, item.ALERT
# update the list of current victor ops incident keys in the brain
voIKeys.push
name: voCurIName
timestamp: new Date
@robot.brain.set "VO_INCIDENT_KEYS", voIKeys
# clean up victor ops incident keys in the brain
@cleanupBrain()
@robot.emit "alert", item.ALERT
@rcvVOEvent 'alert', item.ALERT
else
@robot.logger.info "Not an alert."
else if data.MESSAGE == "LOGIN_REPLY_MESSAGE"
if data.PAYLOAD.STATUS != "200"
@robot.logger.info "Failed to log in: #{data.PAYLOAD.DESCRIPTION}"
@loggedIn = false
@ws.terminate()
else
@loginAttempts = @getLoginAttempts()
@loggedIn = true
setTimeout =>
if ( ! @rcvdStatusList )
@robot.logger.info "Did not get status list in time; reconnecting..."
@disconnect()
, 5000
@shell.prompt() if data.MESSAGE != "PONG"
cleanupBrain: ->
# get a list of all the victor ops incident keys in the brain
voIKeys = @robot.brain.get "VO_INCIDENT_KEYS"
# remove keys from the victor ops incident keys list and from the brain
# if they are older than 24 hours
voIKeysFiltered = voIKeys.filter((item) ->
return (new Date(item.timestamp).getDate() + 1 >= new Date)
)
# set the victor ops incident keys list in the the brain to the updated
# list value
@robot.brain.set "VO_INCIDENT_KEYS", voIKeysFiltered
run: ->
pkg = require Path.join __dirname, '..', 'package.json'
@robot.logger.info "VictorOps adapter version #{pkg.version}"
@shell = new Shell( @robot, @ )
@connectToVO()
setInterval =>
pongInterval = new Date().getTime() - @lastPong.getTime()
if ( ! @ws? || @ws.readyState != WebSocket.OPEN || ! @loggedIn || pongInterval > @pongLimit )
@connectToVO()
else
@ping()
, @loginRetryInterval
@emit "connected"
exports.VictorOps = VictorOps
exports.use = (robot) ->
new VictorOps robot
| true | #==========================================================================
# Copyright 2014 VictorOps, Inc.
# https://github.com/victorops/hubot-victorops/blob/master/LICENSE
#==========================================================================
Path = require 'path'
Readline = require 'readline'
WebSocket = require 'ws'
{Adapter,Robot,TextMessage} = require 'hubot'
class Shell
constructor: (robot, vo) ->
@robot = robot
stdin = process.openStdin()
stdout = process.stdout
@vo = vo
@user = @robot.brain.userForId '1', name: 'Shell', room: 'Shell'
process.on 'uncaughtException', (err) =>
@robot.logger.error err.stack
@repl = Readline.createInterface stdin, stdout, null
@repl.on 'close', =>
@robot.logger.info()
stdin.destroy()
@robot.shutdown()
process.exit 0
@repl.on 'line', (buffer) =>
if buffer.trim().length > 0
@repl.close() if buffer.toLowerCase() is 'exit'
@vo.send( @robot.name, buffer )
@robot.receive new TextMessage @user, buffer, 'messageId'
@repl.prompt()
@repl.setPrompt "#{@robot.name} >> "
@repl.prompt()
prompt: ->
@repl.prompt()
class VictorOps extends Adapter
constructor: (robot) ->
@wsURL = @envWithDefault( process.env.HUBOT_VICTOROPS_URL, 'wss://chat.victorops.com/chat' )
@password = PI:PASSWORD:<PASSWORD>END_PIOPS_KEY
@robot = robot
@loggedIn = false
@loginAttempts = @getLoginAttempts()
@loginRetryInterval = @envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_INTERVAL, 5 ) * 1000
@pongLimit = @envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_PONG_LIMIT, 17000 )
@rcvdStatusList = false
super robot
envWithDefault: (envVar, defVal) ->
if (envVar?)
envVar
else
defVal
envIntWithDefault: (envVar, defVal) ->
parseInt( @envWithDefault( envVar, "#{defVal}" ), 10 )
getLoginAttempts: () ->
@envIntWithDefault( process.env.HUBOT_VICTOROPS_LOGIN_ATTEMPTS, 15 )
generateUUID: ->
d = new Date().getTime()
'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) ->
r = (d + Math.random()*16)%16 | 0;
v = if c is 'x' then r else (r & 0x3|0x8)
v.toString(16)
)
login: () ->
msg = {
"MESSAGE": "ROBOT_LOGIN_REQUEST_MESSAGE",
"TRANSACTION_ID": @generateUUID(),
"PAYLOAD": {
"PROTOCOL": "1.0",
"NAME": @robot.name,
"KEY": @password,
"DEVICE_NAME": "hubot"
}
}
chat: (msg) ->
msg = {
"MESSAGE": "CHAT_ACTION_MESSAGE",
"TRANSACTION_ID": @generateUUID(),
"PAYLOAD": {
"CHAT": {
"IS_ONCALL": false,
"IS_ROBOT": true,
"TEXT": msg,
"ROOM_ID": "*"
}
}
}
sendToVO: (js) ->
m = JSON.stringify(js)
message = "VO-MESSAGE:" + m.length + "\n" + m
@robot.logger.info "send to chat server: #{message}" if js.MESSAGE != "PING"
@ws.send( message )
send: (user, strings...) ->
@sendToVO( @chat( strings.join "\n" ) )
reply: (user, strings...) ->
strings = "@#{user.user.id}: #{strings.join "\n"}"
@sendToVO @chat( strings )
respond: (regex, callback) ->
@hear regex, callback
ping: () ->
msg = {
"MESSAGE": "PING",
"TRANSACTION_ID": @generateUUID()
}
@sendToVO(msg)
connectToVO: () ->
@disconnect()
if @loginAttempts-- <= 0
@robot.logger.info "Unable to connect; giving up."
process.exit 1
@robot.logger.info "Attempting connection to VictorOps at #{@wsURL}..."
@ws = new WebSocket(@wsURL)
@ws.on "open", () =>
@sendToVO(@login())
@ws.on "error", (error) =>
@disconnect(error)
@ws.on "message", (message) =>
@receiveWS(message)
@ws.on 'close', () =>
@loggedIn = false
@robot.logger.info 'WebSocket closed.'
disconnect: (error) ->
@lastPong = new Date()
@loggedIn = false
@rcvdStatusList = false
if @ws?
@robot.logger.info("#{error} - disconnecting...")
@ws.terminate()
@ws = null
rcvVOEvent: ( typ, obj ) ->
user = @robot.brain.userForId "VictorOps"
hubotMsg = "#{@robot.name} VictorOps #{typ} #{JSON.stringify(obj)}"
@robot.logger.info hubotMsg
@receive new TextMessage user, hubotMsg
receiveWS: (msg) ->
data = JSON.parse( msg.replace /VO-MESSAGE:[^\{]*/, "" )
@robot.logger.info "Received #{data.MESSAGE}" if data.MESSAGE != "PONG"
# Turn on for debugging
#@robot.logger.info msg
if data.MESSAGE == "CHAT_NOTIFY_MESSAGE" && data.PAYLOAD.CHAT.USER_ID != @robot.name
user = @robot.brain.userForId data.PAYLOAD.CHAT.USER_ID
@receive new TextMessage user, data.PAYLOAD.CHAT.TEXT.replace(/"/g,'"')
else if data.MESSAGE == "PONG"
@lastPong = new Date()
else if data.MESSAGE == "STATE_NOTIFY_MESSAGE" && data.PAYLOAD.ONCALL_LIST?
@rcvdStatusList = true
else if data.MESSAGE == "ENTITY_STATE_NOTIFY_MESSAGE"
user = @robot.brain.userForId "VictorOps"
@rcvVOEvent 'entitystate', entity for entity in data.PAYLOAD.SYSTEM_ALERT_STATE_LIST
else if data.MESSAGE == "TIMELINE_LIST_REPLY_MESSAGE"
for item in data.PAYLOAD.TIMELINE_LIST
if item.ALERT?
# get a list of current victor ops incident keys in the brain
voIKeys = @robot.brain.get "VO_INCIDENT_KEYS"
# catch null lists and init as blank
if not voIKeys?
voIKeys = []
# name the new key and set the brain
voCurIName = item.ALERT["INCIDENT_NAME"]
@robot.brain.set voCurIName, item.ALERT
# update the list of current victor ops incident keys in the brain
voIKeys.push
name: voCurIName
timestamp: new Date
@robot.brain.set "VO_INCIDENT_KEYS", voIKeys
# clean up victor ops incident keys in the brain
@cleanupBrain()
@robot.emit "alert", item.ALERT
@rcvVOEvent 'alert', item.ALERT
else
@robot.logger.info "Not an alert."
else if data.MESSAGE == "LOGIN_REPLY_MESSAGE"
if data.PAYLOAD.STATUS != "200"
@robot.logger.info "Failed to log in: #{data.PAYLOAD.DESCRIPTION}"
@loggedIn = false
@ws.terminate()
else
@loginAttempts = @getLoginAttempts()
@loggedIn = true
setTimeout =>
if ( ! @rcvdStatusList )
@robot.logger.info "Did not get status list in time; reconnecting..."
@disconnect()
, 5000
@shell.prompt() if data.MESSAGE != "PONG"
cleanupBrain: ->
# get a list of all the victor ops incident keys in the brain
voIKeys = @robot.brain.get "VO_INCIDENT_KEYS"
# remove keys from the victor ops incident keys list and from the brain
# if they are older than 24 hours
voIKeysFiltered = voIKeys.filter((item) ->
return (new Date(item.timestamp).getDate() + 1 >= new Date)
)
# set the victor ops incident keys list in the the brain to the updated
# list value
@robot.brain.set "VO_INCIDENT_KEYS", voIKeysFiltered
run: ->
pkg = require Path.join __dirname, '..', 'package.json'
@robot.logger.info "VictorOps adapter version #{pkg.version}"
@shell = new Shell( @robot, @ )
@connectToVO()
setInterval =>
pongInterval = new Date().getTime() - @lastPong.getTime()
if ( ! @ws? || @ws.readyState != WebSocket.OPEN || ! @loggedIn || pongInterval > @pongLimit )
@connectToVO()
else
@ping()
, @loginRetryInterval
@emit "connected"
exports.VictorOps = VictorOps
exports.use = (robot) ->
new VictorOps robot
|
[
{
"context": " else\n user = {name: user.full_name_display, avatar: avatar}\n\n ctx = {\n ",
"end": 6322,
"score": 0.7698987126350403,
"start": 6310,
"tag": "USERNAME",
"value": "name_display"
},
{
"context": "te.instant(\"WIKI.DATETIME\"))\n ... | app/coffee/modules/wiki/main.coffee | threefoldtech/Threefold-Circles-front | 0 | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/wiki/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
groupBy = @.taiga.groupBy
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
module = angular.module("taigaWiki")
#############################################################################
## Wiki Detail Controller
#############################################################################
class WikiDetailController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgModel",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$filter",
"$log",
"tgAppMetaService",
"$tgNavUrls",
"$tgAnalytics",
"$translate",
"tgErrorHandlingService",
"tgProjectService"
]
constructor: (@scope, @rootscope, @repo, @model, @confirm, @rs, @params, @q, @location,
@filter, @log, @appMetaService, @navUrls, @analytics, @translate, @errorHandlingService, @projectService) ->
@scope.$on("wiki:links:move", @.moveLink)
@scope.$on("wikipage:add", @.loadWiki)
@scope.projectSlug = @params.pslug
@scope.wikiSlug = @params.slug
@scope.sectionName = "Wiki"
@scope.linksVisible = false
promise = @.loadInitialData()
# On Success
promise.then () => @._setMeta()
# On Error
promise.then null, @.onInitialDataError.bind(@)
_setMeta: ->
title = @translate.instant("WIKI.PAGE_TITLE", {
wikiPageName: @scope.wikiSlug
projectName: @scope.project.name
})
description = @translate.instant("WIKI.PAGE_DESCRIPTION", {
wikiPageContent: angular.element(@scope.wiki?.html or "").text()
totalEditions: @scope.wiki?.editions or 0
lastModifiedDate: moment(@scope.wiki?.modified_date).format(@translate.instant("WIKI.DATETIME"))
})
@appMetaService.setAll(title, description)
loadProject: ->
project = @projectService.project.toJS()
if not project.is_wiki_activated
@errorHandlingService.permissionDenied()
@scope.projectId = project.id
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadWiki: =>
promise = @rs.wiki.getBySlug(@scope.projectId, @params.slug)
promise.then (wiki) =>
@scope.wiki = wiki
@scope.wikiId = wiki.id
return @scope.wiki
promise.then null, (xhr) =>
@scope.wikiId = null
if @scope.project.my_permissions.indexOf("add_wiki_page") == -1
return null
data = {
project: @scope.projectId
slug: @scope.wikiSlug
content: ""
}
@scope.wiki = @model.make_model("wiki", data)
return @scope.wiki
loadWikiLinks: ->
return @rs.wiki.listLinks(@scope.projectId).then (wikiLinks) =>
@scope.wikiLinks = wikiLinks
for link in @scope.wikiLinks
link.url = @navUrls.resolve("project-wiki-page", {
project: @scope.projectSlug
slug: link.href
})
selectedWikiLink = _.find(wikiLinks, {href: @scope.wikiSlug})
loadInitialData: ->
project = @.loadProject()
@.fillUsersAndRoles(project.members, project.roles)
@q.all([@.loadWikiLinks(), @.loadWiki()]).then @.checkLinksPerms.bind(this)
checkLinksPerms: ->
if @scope.project.my_permissions.indexOf("add_wiki_link") != -1 ||
(@scope.project.my_permissions.indexOf("view_wiki_links") != -1 && @scope.wikiLinks.length)
@scope.linksVisible = true
delete: ->
title = @translate.instant("WIKI.DELETE_LIGHTBOX_TITLE")
message = @scope.wikiSlug
@confirm.askOnDelete(title, message).then (askResponse) =>
onSuccess = =>
askResponse.finish()
ctx = {project: @scope.projectSlug}
@location.path(@navUrls.resolve("project-wiki", ctx))
@confirm.notify("success")
@.loadWiki()
onError = =>
askResponse.finish(false)
@confirm.notify("error")
@repo.remove(@scope.wiki).then onSuccess, onError
moveLink: (ctx, item, itemIndex) =>
values = @scope.wikiLinks
r = values.indexOf(item)
values.splice(r, 1)
values.splice(itemIndex, 0, item)
_.each values, (value, index) ->
value.order = index
@repo.saveAll(values)
module.controller("WikiDetailController", WikiDetailController)
#############################################################################
## Wiki Summary Directive
#############################################################################
WikiSummaryDirective = ($log, $template, $compile, $translate, avatarService) ->
template = $template.get("wiki/wiki-summary.html", true)
link = ($scope, $el, $attrs, $model) ->
render = (wiki) ->
if not $scope.usersById?
$log.error "WikiSummaryDirective requires userById set in scope."
else
user = $scope.usersById[wiki.last_modifier]
avatar = avatarService.getAvatar(user)
if user is undefined
user = {name: "unknown", avatar: avatar}
else
user = {name: user.full_name_display, avatar: avatar}
ctx = {
totalEditions: wiki.editions
lastModifiedDate: moment(wiki.modified_date).format($translate.instant("WIKI.DATETIME"))
user: user
}
html = template(ctx)
html = $compile(html)($scope)
$el.html(html)
$scope.$watch $attrs.ngModel, (wikiPage) ->
return if not wikiPage
render(wikiPage)
$scope.$on "$destroy", ->
$el.off()
return {
link: link
restrict: "EA"
require: "ngModel"
}
module.directive("tgWikiSummary", ["$log", "$tgTemplate", "$compile", "$translate", "tgAvatarService", WikiSummaryDirective])
WikiWysiwyg = ($modelTransform, $rootscope, $confirm, attachmentsFullService,
$qqueue, $repo, $analytics, activityService) ->
link = ($scope, $el, $attrs) ->
$scope.editableDescription = false
$scope.saveDescription = $qqueue.bindAdd (description, cb) ->
onSuccess = (wikiPage) ->
if not $scope.item.id?
$analytics.trackEvent("wikipage", "create", "create wiki page", 1)
$scope.$emit("wikipage:add")
activityService.fetchEntries(true)
$confirm.notify("success")
onError = ->
$confirm.notify("error")
$scope.item.content = description
if $scope.item.id?
promise = $repo.save($scope.item).then(onSuccess, onError)
else
promise = $repo.create("wiki", $scope.item).then(onSuccess, onError)
promise.finally(cb)
uploadFile = (file, cb) ->
return attachmentsFullService.addAttachment($scope.project.id, $scope.item.id, 'wiki_page', file).then (result) ->
cb(result.getIn(['file', 'name']), result.getIn(['file', 'url']))
$scope.uploadFiles = (files, cb) ->
for file in files
uploadFile(file, cb)
$scope.$watch $attrs.model, (value) ->
return if not value
$scope.item = value
$scope.version = value.version
$scope.storageKey = $scope.project.id + "-" + value.id + "-wiki"
$scope.$watch 'project', (project) ->
return if !project
$scope.editableDescription = project.my_permissions.indexOf("modify_wiki_page") != -1
return {
scope: true,
link: link,
template: """
<div>
<tg-wysiwyg
ng-if="editableDescription"
version='version'
storage-key='storageKey'
content='item.content'
on-save='saveDescription(text, cb)'
on-upload-file='uploadFiles(files, cb)'>
</tg-wysiwyg>
<div
class="wysiwyg"
ng-if="!editableDescription && item.content.length"
ng-bind-html="item.content | markdownToHTML"></div>
<div
class="wysiwyg"
ng-if="!editableDescription && !item.content.length">
{{'COMMON.DESCRIPTION.NO_DESCRIPTION' | translate}}
</div>
</div>
"""
}
module.directive("tgWikiWysiwyg", [
"$tgQueueModelTransformation",
"$rootScope",
"$tgConfirm",
"tgAttachmentsFullService",
"$tgQqueue", "$tgRepo", "$tgAnalytics", "tgActivityService"
WikiWysiwyg])
| 99660 | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/wiki/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
groupBy = @.taiga.groupBy
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
module = angular.module("taigaWiki")
#############################################################################
## Wiki Detail Controller
#############################################################################
class WikiDetailController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgModel",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$filter",
"$log",
"tgAppMetaService",
"$tgNavUrls",
"$tgAnalytics",
"$translate",
"tgErrorHandlingService",
"tgProjectService"
]
constructor: (@scope, @rootscope, @repo, @model, @confirm, @rs, @params, @q, @location,
@filter, @log, @appMetaService, @navUrls, @analytics, @translate, @errorHandlingService, @projectService) ->
@scope.$on("wiki:links:move", @.moveLink)
@scope.$on("wikipage:add", @.loadWiki)
@scope.projectSlug = @params.pslug
@scope.wikiSlug = @params.slug
@scope.sectionName = "Wiki"
@scope.linksVisible = false
promise = @.loadInitialData()
# On Success
promise.then () => @._setMeta()
# On Error
promise.then null, @.onInitialDataError.bind(@)
_setMeta: ->
title = @translate.instant("WIKI.PAGE_TITLE", {
wikiPageName: @scope.wikiSlug
projectName: @scope.project.name
})
description = @translate.instant("WIKI.PAGE_DESCRIPTION", {
wikiPageContent: angular.element(@scope.wiki?.html or "").text()
totalEditions: @scope.wiki?.editions or 0
lastModifiedDate: moment(@scope.wiki?.modified_date).format(@translate.instant("WIKI.DATETIME"))
})
@appMetaService.setAll(title, description)
loadProject: ->
project = @projectService.project.toJS()
if not project.is_wiki_activated
@errorHandlingService.permissionDenied()
@scope.projectId = project.id
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadWiki: =>
promise = @rs.wiki.getBySlug(@scope.projectId, @params.slug)
promise.then (wiki) =>
@scope.wiki = wiki
@scope.wikiId = wiki.id
return @scope.wiki
promise.then null, (xhr) =>
@scope.wikiId = null
if @scope.project.my_permissions.indexOf("add_wiki_page") == -1
return null
data = {
project: @scope.projectId
slug: @scope.wikiSlug
content: ""
}
@scope.wiki = @model.make_model("wiki", data)
return @scope.wiki
loadWikiLinks: ->
return @rs.wiki.listLinks(@scope.projectId).then (wikiLinks) =>
@scope.wikiLinks = wikiLinks
for link in @scope.wikiLinks
link.url = @navUrls.resolve("project-wiki-page", {
project: @scope.projectSlug
slug: link.href
})
selectedWikiLink = _.find(wikiLinks, {href: @scope.wikiSlug})
loadInitialData: ->
project = @.loadProject()
@.fillUsersAndRoles(project.members, project.roles)
@q.all([@.loadWikiLinks(), @.loadWiki()]).then @.checkLinksPerms.bind(this)
checkLinksPerms: ->
if @scope.project.my_permissions.indexOf("add_wiki_link") != -1 ||
(@scope.project.my_permissions.indexOf("view_wiki_links") != -1 && @scope.wikiLinks.length)
@scope.linksVisible = true
delete: ->
title = @translate.instant("WIKI.DELETE_LIGHTBOX_TITLE")
message = @scope.wikiSlug
@confirm.askOnDelete(title, message).then (askResponse) =>
onSuccess = =>
askResponse.finish()
ctx = {project: @scope.projectSlug}
@location.path(@navUrls.resolve("project-wiki", ctx))
@confirm.notify("success")
@.loadWiki()
onError = =>
askResponse.finish(false)
@confirm.notify("error")
@repo.remove(@scope.wiki).then onSuccess, onError
moveLink: (ctx, item, itemIndex) =>
values = @scope.wikiLinks
r = values.indexOf(item)
values.splice(r, 1)
values.splice(itemIndex, 0, item)
_.each values, (value, index) ->
value.order = index
@repo.saveAll(values)
module.controller("WikiDetailController", WikiDetailController)
#############################################################################
## Wiki Summary Directive
#############################################################################
WikiSummaryDirective = ($log, $template, $compile, $translate, avatarService) ->
template = $template.get("wiki/wiki-summary.html", true)
link = ($scope, $el, $attrs, $model) ->
render = (wiki) ->
if not $scope.usersById?
$log.error "WikiSummaryDirective requires userById set in scope."
else
user = $scope.usersById[wiki.last_modifier]
avatar = avatarService.getAvatar(user)
if user is undefined
user = {name: "unknown", avatar: avatar}
else
user = {name: user.full_name_display, avatar: avatar}
ctx = {
totalEditions: wiki.editions
lastModifiedDate: moment(wiki.modified_date).format($translate.instant("WIKI.DATETIME"))
user: user
}
html = template(ctx)
html = $compile(html)($scope)
$el.html(html)
$scope.$watch $attrs.ngModel, (wikiPage) ->
return if not wikiPage
render(wikiPage)
$scope.$on "$destroy", ->
$el.off()
return {
link: link
restrict: "EA"
require: "ngModel"
}
module.directive("tgWikiSummary", ["$log", "$tgTemplate", "$compile", "$translate", "tgAvatarService", WikiSummaryDirective])
WikiWysiwyg = ($modelTransform, $rootscope, $confirm, attachmentsFullService,
$qqueue, $repo, $analytics, activityService) ->
link = ($scope, $el, $attrs) ->
$scope.editableDescription = false
$scope.saveDescription = $qqueue.bindAdd (description, cb) ->
onSuccess = (wikiPage) ->
if not $scope.item.id?
$analytics.trackEvent("wikipage", "create", "create wiki page", 1)
$scope.$emit("wikipage:add")
activityService.fetchEntries(true)
$confirm.notify("success")
onError = ->
$confirm.notify("error")
$scope.item.content = description
if $scope.item.id?
promise = $repo.save($scope.item).then(onSuccess, onError)
else
promise = $repo.create("wiki", $scope.item).then(onSuccess, onError)
promise.finally(cb)
uploadFile = (file, cb) ->
return attachmentsFullService.addAttachment($scope.project.id, $scope.item.id, 'wiki_page', file).then (result) ->
cb(result.getIn(['file', 'name']), result.getIn(['file', 'url']))
$scope.uploadFiles = (files, cb) ->
for file in files
uploadFile(file, cb)
$scope.$watch $attrs.model, (value) ->
return if not value
$scope.item = value
$scope.version = value.version
$scope.storageKey = $scope.project.<KEY> + <KEY> + value.<KEY> + <KEY>"
$scope.$watch 'project', (project) ->
return if !project
$scope.editableDescription = project.my_permissions.indexOf("modify_wiki_page") != -1
return {
scope: true,
link: link,
template: """
<div>
<tg-wysiwyg
ng-if="editableDescription"
version='version'
storage-key='storageKey'
content='item.content'
on-save='saveDescription(text, cb)'
on-upload-file='uploadFiles(files, cb)'>
</tg-wysiwyg>
<div
class="wysiwyg"
ng-if="!editableDescription && item.content.length"
ng-bind-html="item.content | markdownToHTML"></div>
<div
class="wysiwyg"
ng-if="!editableDescription && !item.content.length">
{{'COMMON.DESCRIPTION.NO_DESCRIPTION' | translate}}
</div>
</div>
"""
}
module.directive("tgWikiWysiwyg", [
"$tgQueueModelTransformation",
"$rootScope",
"$tgConfirm",
"tgAttachmentsFullService",
"$tgQqueue", "$tgRepo", "$tgAnalytics", "tgActivityService"
WikiWysiwyg])
| true | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/wiki/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
groupBy = @.taiga.groupBy
bindOnce = @.taiga.bindOnce
debounce = @.taiga.debounce
module = angular.module("taigaWiki")
#############################################################################
## Wiki Detail Controller
#############################################################################
class WikiDetailController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgModel",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$filter",
"$log",
"tgAppMetaService",
"$tgNavUrls",
"$tgAnalytics",
"$translate",
"tgErrorHandlingService",
"tgProjectService"
]
constructor: (@scope, @rootscope, @repo, @model, @confirm, @rs, @params, @q, @location,
@filter, @log, @appMetaService, @navUrls, @analytics, @translate, @errorHandlingService, @projectService) ->
@scope.$on("wiki:links:move", @.moveLink)
@scope.$on("wikipage:add", @.loadWiki)
@scope.projectSlug = @params.pslug
@scope.wikiSlug = @params.slug
@scope.sectionName = "Wiki"
@scope.linksVisible = false
promise = @.loadInitialData()
# On Success
promise.then () => @._setMeta()
# On Error
promise.then null, @.onInitialDataError.bind(@)
_setMeta: ->
title = @translate.instant("WIKI.PAGE_TITLE", {
wikiPageName: @scope.wikiSlug
projectName: @scope.project.name
})
description = @translate.instant("WIKI.PAGE_DESCRIPTION", {
wikiPageContent: angular.element(@scope.wiki?.html or "").text()
totalEditions: @scope.wiki?.editions or 0
lastModifiedDate: moment(@scope.wiki?.modified_date).format(@translate.instant("WIKI.DATETIME"))
})
@appMetaService.setAll(title, description)
loadProject: ->
project = @projectService.project.toJS()
if not project.is_wiki_activated
@errorHandlingService.permissionDenied()
@scope.projectId = project.id
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadWiki: =>
promise = @rs.wiki.getBySlug(@scope.projectId, @params.slug)
promise.then (wiki) =>
@scope.wiki = wiki
@scope.wikiId = wiki.id
return @scope.wiki
promise.then null, (xhr) =>
@scope.wikiId = null
if @scope.project.my_permissions.indexOf("add_wiki_page") == -1
return null
data = {
project: @scope.projectId
slug: @scope.wikiSlug
content: ""
}
@scope.wiki = @model.make_model("wiki", data)
return @scope.wiki
loadWikiLinks: ->
return @rs.wiki.listLinks(@scope.projectId).then (wikiLinks) =>
@scope.wikiLinks = wikiLinks
for link in @scope.wikiLinks
link.url = @navUrls.resolve("project-wiki-page", {
project: @scope.projectSlug
slug: link.href
})
selectedWikiLink = _.find(wikiLinks, {href: @scope.wikiSlug})
loadInitialData: ->
project = @.loadProject()
@.fillUsersAndRoles(project.members, project.roles)
@q.all([@.loadWikiLinks(), @.loadWiki()]).then @.checkLinksPerms.bind(this)
checkLinksPerms: ->
if @scope.project.my_permissions.indexOf("add_wiki_link") != -1 ||
(@scope.project.my_permissions.indexOf("view_wiki_links") != -1 && @scope.wikiLinks.length)
@scope.linksVisible = true
delete: ->
title = @translate.instant("WIKI.DELETE_LIGHTBOX_TITLE")
message = @scope.wikiSlug
@confirm.askOnDelete(title, message).then (askResponse) =>
onSuccess = =>
askResponse.finish()
ctx = {project: @scope.projectSlug}
@location.path(@navUrls.resolve("project-wiki", ctx))
@confirm.notify("success")
@.loadWiki()
onError = =>
askResponse.finish(false)
@confirm.notify("error")
@repo.remove(@scope.wiki).then onSuccess, onError
moveLink: (ctx, item, itemIndex) =>
values = @scope.wikiLinks
r = values.indexOf(item)
values.splice(r, 1)
values.splice(itemIndex, 0, item)
_.each values, (value, index) ->
value.order = index
@repo.saveAll(values)
module.controller("WikiDetailController", WikiDetailController)
#############################################################################
## Wiki Summary Directive
#############################################################################
WikiSummaryDirective = ($log, $template, $compile, $translate, avatarService) ->
template = $template.get("wiki/wiki-summary.html", true)
link = ($scope, $el, $attrs, $model) ->
render = (wiki) ->
if not $scope.usersById?
$log.error "WikiSummaryDirective requires userById set in scope."
else
user = $scope.usersById[wiki.last_modifier]
avatar = avatarService.getAvatar(user)
if user is undefined
user = {name: "unknown", avatar: avatar}
else
user = {name: user.full_name_display, avatar: avatar}
ctx = {
totalEditions: wiki.editions
lastModifiedDate: moment(wiki.modified_date).format($translate.instant("WIKI.DATETIME"))
user: user
}
html = template(ctx)
html = $compile(html)($scope)
$el.html(html)
$scope.$watch $attrs.ngModel, (wikiPage) ->
return if not wikiPage
render(wikiPage)
$scope.$on "$destroy", ->
$el.off()
return {
link: link
restrict: "EA"
require: "ngModel"
}
module.directive("tgWikiSummary", ["$log", "$tgTemplate", "$compile", "$translate", "tgAvatarService", WikiSummaryDirective])
WikiWysiwyg = ($modelTransform, $rootscope, $confirm, attachmentsFullService,
$qqueue, $repo, $analytics, activityService) ->
link = ($scope, $el, $attrs) ->
$scope.editableDescription = false
$scope.saveDescription = $qqueue.bindAdd (description, cb) ->
onSuccess = (wikiPage) ->
if not $scope.item.id?
$analytics.trackEvent("wikipage", "create", "create wiki page", 1)
$scope.$emit("wikipage:add")
activityService.fetchEntries(true)
$confirm.notify("success")
onError = ->
$confirm.notify("error")
$scope.item.content = description
if $scope.item.id?
promise = $repo.save($scope.item).then(onSuccess, onError)
else
promise = $repo.create("wiki", $scope.item).then(onSuccess, onError)
promise.finally(cb)
uploadFile = (file, cb) ->
return attachmentsFullService.addAttachment($scope.project.id, $scope.item.id, 'wiki_page', file).then (result) ->
cb(result.getIn(['file', 'name']), result.getIn(['file', 'url']))
$scope.uploadFiles = (files, cb) ->
for file in files
uploadFile(file, cb)
$scope.$watch $attrs.model, (value) ->
return if not value
$scope.item = value
$scope.version = value.version
$scope.storageKey = $scope.project.PI:KEY:<KEY>END_PI + PI:KEY:<KEY>END_PI + value.PI:KEY:<KEY>END_PI + PI:KEY:<KEY>END_PI"
$scope.$watch 'project', (project) ->
return if !project
$scope.editableDescription = project.my_permissions.indexOf("modify_wiki_page") != -1
return {
scope: true,
link: link,
template: """
<div>
<tg-wysiwyg
ng-if="editableDescription"
version='version'
storage-key='storageKey'
content='item.content'
on-save='saveDescription(text, cb)'
on-upload-file='uploadFiles(files, cb)'>
</tg-wysiwyg>
<div
class="wysiwyg"
ng-if="!editableDescription && item.content.length"
ng-bind-html="item.content | markdownToHTML"></div>
<div
class="wysiwyg"
ng-if="!editableDescription && !item.content.length">
{{'COMMON.DESCRIPTION.NO_DESCRIPTION' | translate}}
</div>
</div>
"""
}
module.directive("tgWikiWysiwyg", [
"$tgQueueModelTransformation",
"$rootScope",
"$tgConfirm",
"tgAttachmentsFullService",
"$tgQqueue", "$tgRepo", "$tgAnalytics", "tgActivityService"
WikiWysiwyg])
|
[
{
"context": "rtsy\", image: \"img.jpg\", photoCredit: \"Photo by Artsy\"}]\n @req = query: { 'm-id': 'foo' }\n @res =",
"end": 268,
"score": 0.7652468681335449,
"start": 265,
"tag": "NAME",
"value": "tsy"
},
{
"context": "he modal if logged in', ->\n @req.user = name: 'Andy... | src/test/lib/middleware/marketing_models.coffee | kanaabe/force | 0 | sinon = require 'sinon'
rewire = require 'rewire'
middleware = rewire '../../../lib/middleware/marketing_modals'
describe 'showMarketingSignupModal', ->
beforeEach ->
data = [{slug: "foo", copy: "welcome to artsy", image: "img.jpg", photoCredit: "Photo by Artsy"}]
@req = query: { 'm-id': 'foo' }
@res =
locals:
sd:
IS_MOBILE: true
APP_URL: 'http://www.artsy.net'
MOBILE_MARKETING_SIGNUP_MODALS: data
MARKETING_SIGNUP_MODALS: data
modal: {slug: "foo", copy: "welcome to artsy", image: "img.jpg"}
middleware.__set__ "JSONPage", class MockJSONPage
get: ->
new Promise (resolve, reject) ->
resolve({modals: data})
it 'shows the modal if coming from a campaign', ->
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.be.ok()
it 'does not show the modal if coming from artsy', ->
@req.query = {}
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.not.be.ok()
it 'does not show the modal if logged in', ->
@req.user = name: 'Andy'
@req.path = '/foo'
@req.get = sinon.stub().returns 'google.com'
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.not.be.ok()
| 50903 | sinon = require 'sinon'
rewire = require 'rewire'
middleware = rewire '../../../lib/middleware/marketing_modals'
describe 'showMarketingSignupModal', ->
beforeEach ->
data = [{slug: "foo", copy: "welcome to artsy", image: "img.jpg", photoCredit: "Photo by Ar<NAME>"}]
@req = query: { 'm-id': 'foo' }
@res =
locals:
sd:
IS_MOBILE: true
APP_URL: 'http://www.artsy.net'
MOBILE_MARKETING_SIGNUP_MODALS: data
MARKETING_SIGNUP_MODALS: data
modal: {slug: "foo", copy: "welcome to artsy", image: "img.jpg"}
middleware.__set__ "JSONPage", class MockJSONPage
get: ->
new Promise (resolve, reject) ->
resolve({modals: data})
it 'shows the modal if coming from a campaign', ->
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.be.ok()
it 'does not show the modal if coming from artsy', ->
@req.query = {}
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.not.be.ok()
it 'does not show the modal if logged in', ->
@req.user = name: '<NAME>'
@req.path = '/foo'
@req.get = sinon.stub().returns 'google.com'
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.not.be.ok()
| true | sinon = require 'sinon'
rewire = require 'rewire'
middleware = rewire '../../../lib/middleware/marketing_modals'
describe 'showMarketingSignupModal', ->
beforeEach ->
data = [{slug: "foo", copy: "welcome to artsy", image: "img.jpg", photoCredit: "Photo by ArPI:NAME:<NAME>END_PI"}]
@req = query: { 'm-id': 'foo' }
@res =
locals:
sd:
IS_MOBILE: true
APP_URL: 'http://www.artsy.net'
MOBILE_MARKETING_SIGNUP_MODALS: data
MARKETING_SIGNUP_MODALS: data
modal: {slug: "foo", copy: "welcome to artsy", image: "img.jpg"}
middleware.__set__ "JSONPage", class MockJSONPage
get: ->
new Promise (resolve, reject) ->
resolve({modals: data})
it 'shows the modal if coming from a campaign', ->
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.be.ok()
it 'does not show the modal if coming from artsy', ->
@req.query = {}
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.not.be.ok()
it 'does not show the modal if logged in', ->
@req.user = name: 'PI:NAME:<NAME>END_PI'
@req.path = '/foo'
@req.get = sinon.stub().returns 'google.com'
middleware @req, @res, =>
(@res.locals.showMarketingSignupModal?).should.not.be.ok()
|
[
{
"context": "ame=\"csrf-param\" />\n <meta content=\"123456\" name=\"csrf-token\" />\n \"\"\"\n expect(",
"end": 4337,
"score": 0.6709045767784119,
"start": 4336,
"tag": "PASSWORD",
"value": "6"
},
{
"context": "equestData()['test_authenticity_token']).toEqual... | testapp/app/assets/javascripts/spec.coffee | t3n3t/rest_in_place | 62 | rip = null
makeRip = (html) ->
$(html)
.find('span')
.restInPlace()
.data('restInPlaceEditor')
beforeEach -> rip = null
describe "Setup", ->
describe "looking up attributes in parents", ->
beforeEach ->
rip = makeRip """
<p data-url="localhorst" data-formtype="textarea" data-object="person" data-attribute="name" data-placeholder="Enter name">
<span>Blubb</span>
</p>"""
it "should find the data-url" , -> expect(rip.url).toEqual('localhorst')
it "should find the data-formtype" , -> expect(rip.formType).toEqual('textarea')
it "should find the data-object" , -> expect(rip.objectName).toEqual('person')
it "should find the data-attribute" , -> expect(rip.attributeName).toEqual('name')
it "should find the data-placeholder", -> expect(rip.placeholder).toEqual('Enter name')
it "should prefer inner settings over outer", ->
rip = makeRip """<div data-object="outer"><p data-url="inner"><span>Blubb</span></p></div>"""
expect(rip.url).toEqual('inner')
describe "guessing objectName from Rails", ->
describe 'without parent-provided info', ->
beforeEach -> rip = makeRip """<p id="person_123"><span>Blubb</span></p>"""
it "should derive the objectName from a railslike id", -> expect(rip.objectName).toEqual('person')
describe 'with parent-provided info', ->
beforeEach -> rip = makeRip """<div data-object="customer"><p id="person_123"><span>Blubb</span></p></div>"""
it "should not overwrite the explicit value with the guess", -> expect(rip.objectName).not.toEqual('person')
describe "own data attributes", ->
it "url should default to the current path", ->
rip = makeRip '<p><span>Blubb</span></p>'
expect(rip.url).toEqual(document.location.pathname)
it "formtype should default to input", ->
rip = makeRip '<p><span>Blubb</span></p>'
expect(rip.formType).toEqual('input')
it "should take precedence over anything set through parents", ->
rip = makeRip """
<p data-url="localhorst" data-formtype="textarea1" data-object="person" data-attribute="name" data-placeholder="placeholder">
<span data-url="localhorst2" data-formtype="textarea" data-object="person2" data-attribute="name2" data-placeholder="placeholder2">Blubb</span>
</p>"""
expect(rip.url).toEqual('localhorst2')
expect(rip.formType).toEqual('textarea')
expect(rip.objectName).toEqual('person2')
expect(rip.attributeName).toEqual('name2')
expect(rip.placeholder).toEqual('placeholder2')
describe "Server communication", ->
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age" data-placeholder="placeholder">Blubb</span></p>'
describe "when processing the response from the server", ->
it "should not include_root_in_json by defafult", ->
expect(rip.include_root_in_json).toBe(false)
it "should unwrap the object if include_root_in_json is set", ->
rip.include_root_in_json = true
expect(rip.extractAttributeFromData(person : {age:10})).toEqual(10)
it "should directly access the attribute if include_root_in_json is not set", ->
rip.include_root_in_json = false
expect(rip.extractAttributeFromData(age:12)).toEqual(12)
describe "when sending the update", ->
csrf_metatags = null
describe "when not changing fields", ->
beforeEach ->
spyOn(rip, 'getValue').andReturn('placeholder')
csrf_metatags = $('meta[name=csrf-param], meta[name=csrf-token]')
csrf_metatags.remove()
afterEach ->
csrf_metatags.appendTo($('head'))
it "should not send the data", ->
expect(rip.requestData()['person[age]']).toEqual(null)
describe "when changing fields", ->
beforeEach ->
spyOn(rip, 'getValue').andReturn(111)
csrf_metatags = $('meta[name=csrf-param], meta[name=csrf-token]')
csrf_metatags.remove()
afterEach ->
csrf_metatags.appendTo($('head'))
it "should include the data", ->
expect(rip.requestData()['person[age]']).toEqual(111)
it "should include rails csrf stuff if its in the HTML", ->
$('head').append """
<meta content="test_authenticity_token" name="csrf-param" />
<meta content="123456" name="csrf-token" />
"""
expect(rip.requestData()['test_authenticity_token']).toEqual('123456')
it "should not include rails csrf stuff if its not in the HTML", ->
expect(rip.requestData()['authenticity_token']).toBeUndefined()
describe "after updating", ->
jqXHR = null
beforeEach ->
jqXHR = new $.Deferred()
spyOn(rip, 'ajax').andCallFake (options = {}) ->
options.url = @url
options.dataType = "json"
jqXHR
describe "when receiving HTTP 204 No Content", ->
newValue = 12
beforeEach ->
spyOn(rip, 'getValue').andCallFake () ->
newValue
spyOn(rip, 'loadSuccessCallback')
rip.update()
it "should abort", ->
jqXHR.status = 204
jqXHR.resolve('', '', jqXHR)
expect(rip.loadSuccessCallback).toHaveBeenCalledWith(newValue, true)
describe "when receiving an empty body", ->
beforeEach ->
spyOn(rip, 'loadViaGET')
rip.update()
it "should load via get", ->
jqXHR.status = 200
jqXHR.resolve('', '', jqXHR)
expect(rip.loadViaGET).toHaveBeenCalled()
describe "when receiving a body with data", ->
response = age : 12
beforeEach ->
spyOn(rip, 'loadSuccessCallback')
rip.update(response)
it "should load the success callback", ->
jqXHR.resolve(response)
expect(rip.loadSuccessCallback).toHaveBeenCalledWith(response)
describe "when receiving unparseable data", ->
beforeEach ->
spyOn(rip, 'loadViaGET')
rip.update()
it "should load via get", ->
jqXHR.status = 200
jqXHR.reject(jqXHR, 'parsererror')
expect(rip.loadViaGET).toHaveBeenCalled()
describe "when receiving any other error", ->
beforeEach ->
spyOn(rip, 'abort')
rip.update()
it "should abort", ->
jqXHR.status = 500
jqXHR.reject(jqXHR)
expect(rip.abort).toHaveBeenCalled()
describe "when receiving HTML", ->
response = age : "<strong></strong>"
beforeEach ->
rip.update(response)
it "should escape the HTML", ->
jqXHR.resolve(response)
expect(rip.$element.html()).toEqual("<strong></strong>")
describe "User Interaction", ->
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age">Blubb</span></p>'
describe "when clicked", ->
it "should be turned rip-active", ->
rip.$element.click()
expect(rip.$element.hasClass('rip-active')).toBe(true)
it "should call activate", ->
spyOn(rip, 'activate')
rip.$element.click()
expect(rip.activate).toHaveBeenCalled()
xit "should remove the click handler"
describe "when aborting", ->
beforeEach ->
rip.activate()
it "should remove rip-active", ->
rip.abort()
expect(rip.$element.hasClass('rip-active')).toBe(false)
describe "jQuery Interface", ->
it "should automatically convert elements with class rest-in-place", ->
rip = $('#autoload-sample').data('restInPlaceEditor')
expect(typeof rip.activate).toEqual("function")
it "should convert jQuery objects with the restInPlace() function ", ->
rip = $('<p><span data-object="person" data-attribute="age">Blubb</span></p>')
.find('span')
.restInPlace()
.data('restInPlaceEditor')
expect(typeof rip.activate).toEqual("function")
describe "Events", ->
handlers = {}
jqXHR = null
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age">Blubb</span></p>'
handlers =
activate : ->
success : ->
failure : ->
update : ->
abort : ->
ready : ->
jqXHR = new $.Deferred()
spyOn(rip, 'ajax').andCallFake (options = {}) ->
options.url = @url
options.dataType = "json"
jqXHR
it "should dispatch activate.rest-in-place", ->
spyOn(handlers, 'activate')
rip.$element.bind("activate.rest-in-place", handlers.activate)
rip.activate()
expect(handlers.activate).toHaveBeenCalled()
it "should dispatch success.rest-in-place", ->
spyOn(handlers, 'success')
rip.$element.bind("success.rest-in-place", handlers.success)
rip.loadSuccessCallback({person: {age: 666}})
expect(handlers.success).toHaveBeenCalled()
describe "on failure", ->
responseJSON = {"some": "json"}
beforeEach ->
spyOn(handlers, 'failure')
rip.$element.bind("failure.rest-in-place", handlers.failure)
jqXHR.status = 404
jqXHR.responseJSON = responseJSON
jqXHR.reject(jqXHR, "Response text")
it "loadViaGET should dispatch failure.rest-in-place", ->
rip.loadViaGET()
expect(handlers.failure).toHaveBeenCalledWith(jasmine.any(jQuery.Event),responseJSON)
# Test POST failure
it "update should dispatch failure.rest-in-place", ->
rip.update()
expect(handlers.failure).toHaveBeenCalledWith(jasmine.any(jQuery.Event), responseJSON)
it "should dispatch update.rest-in-place", ->
spyOn(handlers, 'update')
rip.$element.bind("update.rest-in-place", handlers.update)
rip.update()
expect(handlers.update).toHaveBeenCalled()
it "should dispatch abort.rest-in-place", ->
spyOn(handlers, 'abort')
rip.$element.bind("abort.rest-in-place", handlers.abort)
rip.activate()
rip.abort()
expect(handlers.abort).toHaveBeenCalled()
it "should dispatch ready.rest-in-place", ->
spyOn(handlers, 'ready')
rip.$element.bind("ready.rest-in-place", handlers.ready)
rip.activate()
expect(handlers.ready).toHaveBeenCalled()
describe "Placeholder", ->
beforeEach ->
rip = makeRip '<p><span data-placeholder="Enter age"></span></p>'
it "sets a placeholder when the value is empty", ->
expect(rip.$element.html()).toEqual('<span class="rest-in-placeholder">Enter age</span>')
it "dosen't set a placeholder when a value is present", ->
rip = makeRip '<p><span data-object="person" data-attribute="age" data-placeholder="Enter age">123</span></p>'
expect(rip.$element.html()).toNotEqual('<span class="rest-in-placeholder">Enter age</span>')
it "switches to placeholder attribute when activated", ->
rip.activate()
expect(rip.$element.find('input')[0].hasAttribute('placeholder'));
| 81636 | rip = null
makeRip = (html) ->
$(html)
.find('span')
.restInPlace()
.data('restInPlaceEditor')
beforeEach -> rip = null
describe "Setup", ->
describe "looking up attributes in parents", ->
beforeEach ->
rip = makeRip """
<p data-url="localhorst" data-formtype="textarea" data-object="person" data-attribute="name" data-placeholder="Enter name">
<span>Blubb</span>
</p>"""
it "should find the data-url" , -> expect(rip.url).toEqual('localhorst')
it "should find the data-formtype" , -> expect(rip.formType).toEqual('textarea')
it "should find the data-object" , -> expect(rip.objectName).toEqual('person')
it "should find the data-attribute" , -> expect(rip.attributeName).toEqual('name')
it "should find the data-placeholder", -> expect(rip.placeholder).toEqual('Enter name')
it "should prefer inner settings over outer", ->
rip = makeRip """<div data-object="outer"><p data-url="inner"><span>Blubb</span></p></div>"""
expect(rip.url).toEqual('inner')
describe "guessing objectName from Rails", ->
describe 'without parent-provided info', ->
beforeEach -> rip = makeRip """<p id="person_123"><span>Blubb</span></p>"""
it "should derive the objectName from a railslike id", -> expect(rip.objectName).toEqual('person')
describe 'with parent-provided info', ->
beforeEach -> rip = makeRip """<div data-object="customer"><p id="person_123"><span>Blubb</span></p></div>"""
it "should not overwrite the explicit value with the guess", -> expect(rip.objectName).not.toEqual('person')
describe "own data attributes", ->
it "url should default to the current path", ->
rip = makeRip '<p><span>Blubb</span></p>'
expect(rip.url).toEqual(document.location.pathname)
it "formtype should default to input", ->
rip = makeRip '<p><span>Blubb</span></p>'
expect(rip.formType).toEqual('input')
it "should take precedence over anything set through parents", ->
rip = makeRip """
<p data-url="localhorst" data-formtype="textarea1" data-object="person" data-attribute="name" data-placeholder="placeholder">
<span data-url="localhorst2" data-formtype="textarea" data-object="person2" data-attribute="name2" data-placeholder="placeholder2">Blubb</span>
</p>"""
expect(rip.url).toEqual('localhorst2')
expect(rip.formType).toEqual('textarea')
expect(rip.objectName).toEqual('person2')
expect(rip.attributeName).toEqual('name2')
expect(rip.placeholder).toEqual('placeholder2')
describe "Server communication", ->
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age" data-placeholder="placeholder">Blubb</span></p>'
describe "when processing the response from the server", ->
it "should not include_root_in_json by defafult", ->
expect(rip.include_root_in_json).toBe(false)
it "should unwrap the object if include_root_in_json is set", ->
rip.include_root_in_json = true
expect(rip.extractAttributeFromData(person : {age:10})).toEqual(10)
it "should directly access the attribute if include_root_in_json is not set", ->
rip.include_root_in_json = false
expect(rip.extractAttributeFromData(age:12)).toEqual(12)
describe "when sending the update", ->
csrf_metatags = null
describe "when not changing fields", ->
beforeEach ->
spyOn(rip, 'getValue').andReturn('placeholder')
csrf_metatags = $('meta[name=csrf-param], meta[name=csrf-token]')
csrf_metatags.remove()
afterEach ->
csrf_metatags.appendTo($('head'))
it "should not send the data", ->
expect(rip.requestData()['person[age]']).toEqual(null)
describe "when changing fields", ->
beforeEach ->
spyOn(rip, 'getValue').andReturn(111)
csrf_metatags = $('meta[name=csrf-param], meta[name=csrf-token]')
csrf_metatags.remove()
afterEach ->
csrf_metatags.appendTo($('head'))
it "should include the data", ->
expect(rip.requestData()['person[age]']).toEqual(111)
it "should include rails csrf stuff if its in the HTML", ->
$('head').append """
<meta content="test_authenticity_token" name="csrf-param" />
<meta content="12345<PASSWORD>" name="csrf-token" />
"""
expect(rip.requestData()['test_authenticity_token']).toEqual('<PASSWORD>')
it "should not include rails csrf stuff if its not in the HTML", ->
expect(rip.requestData()['authenticity_token']).toBeUndefined()
describe "after updating", ->
jqXHR = null
beforeEach ->
jqXHR = new $.Deferred()
spyOn(rip, 'ajax').andCallFake (options = {}) ->
options.url = @url
options.dataType = "json"
jqXHR
describe "when receiving HTTP 204 No Content", ->
newValue = 12
beforeEach ->
spyOn(rip, 'getValue').andCallFake () ->
newValue
spyOn(rip, 'loadSuccessCallback')
rip.update()
it "should abort", ->
jqXHR.status = 204
jqXHR.resolve('', '', jqXHR)
expect(rip.loadSuccessCallback).toHaveBeenCalledWith(newValue, true)
describe "when receiving an empty body", ->
beforeEach ->
spyOn(rip, 'loadViaGET')
rip.update()
it "should load via get", ->
jqXHR.status = 200
jqXHR.resolve('', '', jqXHR)
expect(rip.loadViaGET).toHaveBeenCalled()
describe "when receiving a body with data", ->
response = age : 12
beforeEach ->
spyOn(rip, 'loadSuccessCallback')
rip.update(response)
it "should load the success callback", ->
jqXHR.resolve(response)
expect(rip.loadSuccessCallback).toHaveBeenCalledWith(response)
describe "when receiving unparseable data", ->
beforeEach ->
spyOn(rip, 'loadViaGET')
rip.update()
it "should load via get", ->
jqXHR.status = 200
jqXHR.reject(jqXHR, 'parsererror')
expect(rip.loadViaGET).toHaveBeenCalled()
describe "when receiving any other error", ->
beforeEach ->
spyOn(rip, 'abort')
rip.update()
it "should abort", ->
jqXHR.status = 500
jqXHR.reject(jqXHR)
expect(rip.abort).toHaveBeenCalled()
describe "when receiving HTML", ->
response = age : "<strong></strong>"
beforeEach ->
rip.update(response)
it "should escape the HTML", ->
jqXHR.resolve(response)
expect(rip.$element.html()).toEqual("<strong></strong>")
describe "User Interaction", ->
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age">Blubb</span></p>'
describe "when clicked", ->
it "should be turned rip-active", ->
rip.$element.click()
expect(rip.$element.hasClass('rip-active')).toBe(true)
it "should call activate", ->
spyOn(rip, 'activate')
rip.$element.click()
expect(rip.activate).toHaveBeenCalled()
xit "should remove the click handler"
describe "when aborting", ->
beforeEach ->
rip.activate()
it "should remove rip-active", ->
rip.abort()
expect(rip.$element.hasClass('rip-active')).toBe(false)
describe "jQuery Interface", ->
it "should automatically convert elements with class rest-in-place", ->
rip = $('#autoload-sample').data('restInPlaceEditor')
expect(typeof rip.activate).toEqual("function")
it "should convert jQuery objects with the restInPlace() function ", ->
rip = $('<p><span data-object="person" data-attribute="age">Blubb</span></p>')
.find('span')
.restInPlace()
.data('restInPlaceEditor')
expect(typeof rip.activate).toEqual("function")
describe "Events", ->
handlers = {}
jqXHR = null
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age">Blubb</span></p>'
handlers =
activate : ->
success : ->
failure : ->
update : ->
abort : ->
ready : ->
jqXHR = new $.Deferred()
spyOn(rip, 'ajax').andCallFake (options = {}) ->
options.url = @url
options.dataType = "json"
jqXHR
it "should dispatch activate.rest-in-place", ->
spyOn(handlers, 'activate')
rip.$element.bind("activate.rest-in-place", handlers.activate)
rip.activate()
expect(handlers.activate).toHaveBeenCalled()
it "should dispatch success.rest-in-place", ->
spyOn(handlers, 'success')
rip.$element.bind("success.rest-in-place", handlers.success)
rip.loadSuccessCallback({person: {age: 666}})
expect(handlers.success).toHaveBeenCalled()
describe "on failure", ->
responseJSON = {"some": "json"}
beforeEach ->
spyOn(handlers, 'failure')
rip.$element.bind("failure.rest-in-place", handlers.failure)
jqXHR.status = 404
jqXHR.responseJSON = responseJSON
jqXHR.reject(jqXHR, "Response text")
it "loadViaGET should dispatch failure.rest-in-place", ->
rip.loadViaGET()
expect(handlers.failure).toHaveBeenCalledWith(jasmine.any(jQuery.Event),responseJSON)
# Test POST failure
it "update should dispatch failure.rest-in-place", ->
rip.update()
expect(handlers.failure).toHaveBeenCalledWith(jasmine.any(jQuery.Event), responseJSON)
it "should dispatch update.rest-in-place", ->
spyOn(handlers, 'update')
rip.$element.bind("update.rest-in-place", handlers.update)
rip.update()
expect(handlers.update).toHaveBeenCalled()
it "should dispatch abort.rest-in-place", ->
spyOn(handlers, 'abort')
rip.$element.bind("abort.rest-in-place", handlers.abort)
rip.activate()
rip.abort()
expect(handlers.abort).toHaveBeenCalled()
it "should dispatch ready.rest-in-place", ->
spyOn(handlers, 'ready')
rip.$element.bind("ready.rest-in-place", handlers.ready)
rip.activate()
expect(handlers.ready).toHaveBeenCalled()
describe "Placeholder", ->
beforeEach ->
rip = makeRip '<p><span data-placeholder="Enter age"></span></p>'
it "sets a placeholder when the value is empty", ->
expect(rip.$element.html()).toEqual('<span class="rest-in-placeholder">Enter age</span>')
it "dosen't set a placeholder when a value is present", ->
rip = makeRip '<p><span data-object="person" data-attribute="age" data-placeholder="Enter age">123</span></p>'
expect(rip.$element.html()).toNotEqual('<span class="rest-in-placeholder">Enter age</span>')
it "switches to placeholder attribute when activated", ->
rip.activate()
expect(rip.$element.find('input')[0].hasAttribute('placeholder'));
| true | rip = null
makeRip = (html) ->
$(html)
.find('span')
.restInPlace()
.data('restInPlaceEditor')
beforeEach -> rip = null
describe "Setup", ->
describe "looking up attributes in parents", ->
beforeEach ->
rip = makeRip """
<p data-url="localhorst" data-formtype="textarea" data-object="person" data-attribute="name" data-placeholder="Enter name">
<span>Blubb</span>
</p>"""
it "should find the data-url" , -> expect(rip.url).toEqual('localhorst')
it "should find the data-formtype" , -> expect(rip.formType).toEqual('textarea')
it "should find the data-object" , -> expect(rip.objectName).toEqual('person')
it "should find the data-attribute" , -> expect(rip.attributeName).toEqual('name')
it "should find the data-placeholder", -> expect(rip.placeholder).toEqual('Enter name')
it "should prefer inner settings over outer", ->
rip = makeRip """<div data-object="outer"><p data-url="inner"><span>Blubb</span></p></div>"""
expect(rip.url).toEqual('inner')
describe "guessing objectName from Rails", ->
describe 'without parent-provided info', ->
beforeEach -> rip = makeRip """<p id="person_123"><span>Blubb</span></p>"""
it "should derive the objectName from a railslike id", -> expect(rip.objectName).toEqual('person')
describe 'with parent-provided info', ->
beforeEach -> rip = makeRip """<div data-object="customer"><p id="person_123"><span>Blubb</span></p></div>"""
it "should not overwrite the explicit value with the guess", -> expect(rip.objectName).not.toEqual('person')
describe "own data attributes", ->
it "url should default to the current path", ->
rip = makeRip '<p><span>Blubb</span></p>'
expect(rip.url).toEqual(document.location.pathname)
it "formtype should default to input", ->
rip = makeRip '<p><span>Blubb</span></p>'
expect(rip.formType).toEqual('input')
it "should take precedence over anything set through parents", ->
rip = makeRip """
<p data-url="localhorst" data-formtype="textarea1" data-object="person" data-attribute="name" data-placeholder="placeholder">
<span data-url="localhorst2" data-formtype="textarea" data-object="person2" data-attribute="name2" data-placeholder="placeholder2">Blubb</span>
</p>"""
expect(rip.url).toEqual('localhorst2')
expect(rip.formType).toEqual('textarea')
expect(rip.objectName).toEqual('person2')
expect(rip.attributeName).toEqual('name2')
expect(rip.placeholder).toEqual('placeholder2')
describe "Server communication", ->
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age" data-placeholder="placeholder">Blubb</span></p>'
describe "when processing the response from the server", ->
it "should not include_root_in_json by defafult", ->
expect(rip.include_root_in_json).toBe(false)
it "should unwrap the object if include_root_in_json is set", ->
rip.include_root_in_json = true
expect(rip.extractAttributeFromData(person : {age:10})).toEqual(10)
it "should directly access the attribute if include_root_in_json is not set", ->
rip.include_root_in_json = false
expect(rip.extractAttributeFromData(age:12)).toEqual(12)
describe "when sending the update", ->
csrf_metatags = null
describe "when not changing fields", ->
beforeEach ->
spyOn(rip, 'getValue').andReturn('placeholder')
csrf_metatags = $('meta[name=csrf-param], meta[name=csrf-token]')
csrf_metatags.remove()
afterEach ->
csrf_metatags.appendTo($('head'))
it "should not send the data", ->
expect(rip.requestData()['person[age]']).toEqual(null)
describe "when changing fields", ->
beforeEach ->
spyOn(rip, 'getValue').andReturn(111)
csrf_metatags = $('meta[name=csrf-param], meta[name=csrf-token]')
csrf_metatags.remove()
afterEach ->
csrf_metatags.appendTo($('head'))
it "should include the data", ->
expect(rip.requestData()['person[age]']).toEqual(111)
it "should include rails csrf stuff if its in the HTML", ->
$('head').append """
<meta content="test_authenticity_token" name="csrf-param" />
<meta content="12345PI:PASSWORD:<PASSWORD>END_PI" name="csrf-token" />
"""
expect(rip.requestData()['test_authenticity_token']).toEqual('PI:PASSWORD:<PASSWORD>END_PI')
it "should not include rails csrf stuff if its not in the HTML", ->
expect(rip.requestData()['authenticity_token']).toBeUndefined()
describe "after updating", ->
jqXHR = null
beforeEach ->
jqXHR = new $.Deferred()
spyOn(rip, 'ajax').andCallFake (options = {}) ->
options.url = @url
options.dataType = "json"
jqXHR
describe "when receiving HTTP 204 No Content", ->
newValue = 12
beforeEach ->
spyOn(rip, 'getValue').andCallFake () ->
newValue
spyOn(rip, 'loadSuccessCallback')
rip.update()
it "should abort", ->
jqXHR.status = 204
jqXHR.resolve('', '', jqXHR)
expect(rip.loadSuccessCallback).toHaveBeenCalledWith(newValue, true)
describe "when receiving an empty body", ->
beforeEach ->
spyOn(rip, 'loadViaGET')
rip.update()
it "should load via get", ->
jqXHR.status = 200
jqXHR.resolve('', '', jqXHR)
expect(rip.loadViaGET).toHaveBeenCalled()
describe "when receiving a body with data", ->
response = age : 12
beforeEach ->
spyOn(rip, 'loadSuccessCallback')
rip.update(response)
it "should load the success callback", ->
jqXHR.resolve(response)
expect(rip.loadSuccessCallback).toHaveBeenCalledWith(response)
describe "when receiving unparseable data", ->
beforeEach ->
spyOn(rip, 'loadViaGET')
rip.update()
it "should load via get", ->
jqXHR.status = 200
jqXHR.reject(jqXHR, 'parsererror')
expect(rip.loadViaGET).toHaveBeenCalled()
describe "when receiving any other error", ->
beforeEach ->
spyOn(rip, 'abort')
rip.update()
it "should abort", ->
jqXHR.status = 500
jqXHR.reject(jqXHR)
expect(rip.abort).toHaveBeenCalled()
describe "when receiving HTML", ->
response = age : "<strong></strong>"
beforeEach ->
rip.update(response)
it "should escape the HTML", ->
jqXHR.resolve(response)
expect(rip.$element.html()).toEqual("<strong></strong>")
describe "User Interaction", ->
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age">Blubb</span></p>'
describe "when clicked", ->
it "should be turned rip-active", ->
rip.$element.click()
expect(rip.$element.hasClass('rip-active')).toBe(true)
it "should call activate", ->
spyOn(rip, 'activate')
rip.$element.click()
expect(rip.activate).toHaveBeenCalled()
xit "should remove the click handler"
describe "when aborting", ->
beforeEach ->
rip.activate()
it "should remove rip-active", ->
rip.abort()
expect(rip.$element.hasClass('rip-active')).toBe(false)
describe "jQuery Interface", ->
it "should automatically convert elements with class rest-in-place", ->
rip = $('#autoload-sample').data('restInPlaceEditor')
expect(typeof rip.activate).toEqual("function")
it "should convert jQuery objects with the restInPlace() function ", ->
rip = $('<p><span data-object="person" data-attribute="age">Blubb</span></p>')
.find('span')
.restInPlace()
.data('restInPlaceEditor')
expect(typeof rip.activate).toEqual("function")
describe "Events", ->
handlers = {}
jqXHR = null
beforeEach ->
rip = makeRip '<p><span data-object="person" data-attribute="age">Blubb</span></p>'
handlers =
activate : ->
success : ->
failure : ->
update : ->
abort : ->
ready : ->
jqXHR = new $.Deferred()
spyOn(rip, 'ajax').andCallFake (options = {}) ->
options.url = @url
options.dataType = "json"
jqXHR
it "should dispatch activate.rest-in-place", ->
spyOn(handlers, 'activate')
rip.$element.bind("activate.rest-in-place", handlers.activate)
rip.activate()
expect(handlers.activate).toHaveBeenCalled()
it "should dispatch success.rest-in-place", ->
spyOn(handlers, 'success')
rip.$element.bind("success.rest-in-place", handlers.success)
rip.loadSuccessCallback({person: {age: 666}})
expect(handlers.success).toHaveBeenCalled()
describe "on failure", ->
responseJSON = {"some": "json"}
beforeEach ->
spyOn(handlers, 'failure')
rip.$element.bind("failure.rest-in-place", handlers.failure)
jqXHR.status = 404
jqXHR.responseJSON = responseJSON
jqXHR.reject(jqXHR, "Response text")
it "loadViaGET should dispatch failure.rest-in-place", ->
rip.loadViaGET()
expect(handlers.failure).toHaveBeenCalledWith(jasmine.any(jQuery.Event),responseJSON)
# Test POST failure
it "update should dispatch failure.rest-in-place", ->
rip.update()
expect(handlers.failure).toHaveBeenCalledWith(jasmine.any(jQuery.Event), responseJSON)
it "should dispatch update.rest-in-place", ->
spyOn(handlers, 'update')
rip.$element.bind("update.rest-in-place", handlers.update)
rip.update()
expect(handlers.update).toHaveBeenCalled()
it "should dispatch abort.rest-in-place", ->
spyOn(handlers, 'abort')
rip.$element.bind("abort.rest-in-place", handlers.abort)
rip.activate()
rip.abort()
expect(handlers.abort).toHaveBeenCalled()
it "should dispatch ready.rest-in-place", ->
spyOn(handlers, 'ready')
rip.$element.bind("ready.rest-in-place", handlers.ready)
rip.activate()
expect(handlers.ready).toHaveBeenCalled()
describe "Placeholder", ->
beforeEach ->
rip = makeRip '<p><span data-placeholder="Enter age"></span></p>'
it "sets a placeholder when the value is empty", ->
expect(rip.$element.html()).toEqual('<span class="rest-in-placeholder">Enter age</span>')
it "dosen't set a placeholder when a value is present", ->
rip = makeRip '<p><span data-object="person" data-attribute="age" data-placeholder="Enter age">123</span></p>'
expect(rip.$element.html()).toNotEqual('<span class="rest-in-placeholder">Enter age</span>')
it "switches to placeholder attribute when activated", ->
rip.activate()
expect(rip.$element.find('input')[0].hasAttribute('placeholder'));
|
[
{
"context": "mediately shown/hidden\n#\n# Copyright (C) 2011-2012 Nikolay Nemshilov\n#\nElement.include\n\n #\n # The basic attributes h",
"end": 314,
"score": 0.9998892545700073,
"start": 297,
"tag": "NAME",
"value": "Nikolay Nemshilov"
},
{
"context": "e])\n\n else if value i... | stl/dom/src/element/commons.coffee | lovely-io/lovely.io-stl | 2 | #
# This file contains the common use methods
#
# NOTE: some methods, like `#show`, `#hide`, etc
# might take a visual effect settings, those settings
# will work only if the `fx` module is included on the page,
# otherwise the element will be immediately shown/hidden
#
# Copyright (C) 2011-2012 Nikolay Nemshilov
#
Element.include
#
# The basic attributes handling method
#
# :coffee
# element.attr('name') # -> getting attribute
# element.attr('name', 'value') # -> setting attribute
# element.attr('name', null) # -> removing attribute
# element.attr('name') is null # -> checking attribute
#
# element.attr
# name1: 'value1'
# name2: 'value2'
# ....
#
# @param {String|Object} attribute name or a hash of attributes
# @param {String|undefined} attribute value
# @return {String|Element} attribute value or element reference
#
attr: (name, value) ->
if typeof(name) is 'string'
if value is undefined # reading an attribute
value = @_[name] || @_.getAttribute(name)
return if value is '' then null else value
else if value is null # erazing an attribute
@_.removeAttribute(name)
delete @_[name]
else if name is 'style' # bypassing the styles into the #style method
@style value
else # setting an attribute
element = @_
element.setAttribute(name, value) unless name of element
element[name] = value
else # assuming it's a hash to set
for value of name
@attr(value, name[value])
return @
#
# Sets/gets the `data-smth` data attribute and
# automatically converts everything in/out JSON
#
# @param {String} key name
# @param {mixed} data or `undefined` to erase
# @return {Element|mixed} self or extracted data
#
data: (key, value)->
if isObject(key)
for name of key
value = @data(name, key[name])
else if value is undefined
key = dasherize('data-'+key)
result = {}
match = false
for attr in @_.attributes
value = attr.value
try
value = JSON.parse(value)
catch e
if attr.name is key
result = value
match = true
break
else if attr.name.indexOf(key) is 0
result[camelize(attr.name.substring(key.length+1))] = value
match = true
value = if match then result else null
else
key = dasherize('data-'+ key)
value = {'': value} unless isObject(value)
for name of value
attr = if `name == false` then key else dasherize(key+'-'+name)
if value[name] is null
@_.removeAttribute(attr)
else
@_.setAttribute(attr, if isString(value[name]) then value[name] else JSON.stringify(value[name]))
value = @
return value
#
# Checks if the element is hidden
#
# @return {Boolean} check result
#
hidden: ->
@style('display') is 'none'
#
# Checks if the element is visible
#
# @return {Boolean} check result
#
visible: ->
!@hidden()
#
# Hides an element (optionally with fx)
#
# @return {Element} this
#
hide: ->
if @visible()
@_old_display = @style('display')
@_.style.display = 'none'
return @
#
# Shows an element (optionally with fx)
#
# @return {Element} this
#
show: ->
if @hidden()
element = @_
value = @_old_display
if !value || value is 'none'
dummy = new Element(element.tagName).insertTo(HTML)
value = dummy.style('display') || 'none'
dummy.remove()
element.style.display = if value is 'none' then 'block' else value
return @
#
# Toggles an element's visual state (optionally with fx)
#
# @return {Element} this
#
toggle: ->
if @hidden() then @show() else @hide()
#
# hides all the sibling elements and shows this one (optionally with fx)
#
# @return {Element} this
#
radio: ->
@siblings().forEach('hide')
@show()
#
# Returns the element's owner document reference
#
# @return {Document} wrapped owner document
#
document: ->
wrap @_.ownerDocument
#
# Returns the element's owner window reference
#
# @return {Window} wrapped owner window
#
#
window: ->
@document().window()
| 54084 | #
# This file contains the common use methods
#
# NOTE: some methods, like `#show`, `#hide`, etc
# might take a visual effect settings, those settings
# will work only if the `fx` module is included on the page,
# otherwise the element will be immediately shown/hidden
#
# Copyright (C) 2011-2012 <NAME>
#
Element.include
#
# The basic attributes handling method
#
# :coffee
# element.attr('name') # -> getting attribute
# element.attr('name', 'value') # -> setting attribute
# element.attr('name', null) # -> removing attribute
# element.attr('name') is null # -> checking attribute
#
# element.attr
# name1: 'value1'
# name2: 'value2'
# ....
#
# @param {String|Object} attribute name or a hash of attributes
# @param {String|undefined} attribute value
# @return {String|Element} attribute value or element reference
#
attr: (name, value) ->
if typeof(name) is 'string'
if value is undefined # reading an attribute
value = @_[name] || @_.getAttribute(name)
return if value is '' then null else value
else if value is null # erazing an attribute
@_.removeAttribute(name)
delete @_[name]
else if name is 'style' # bypassing the styles into the #style method
@style value
else # setting an attribute
element = @_
element.setAttribute(name, value) unless name of element
element[name] = value
else # assuming it's a hash to set
for value of name
@attr(value, name[value])
return @
#
# Sets/gets the `data-smth` data attribute and
# automatically converts everything in/out JSON
#
# @param {String} key name
# @param {mixed} data or `undefined` to erase
# @return {Element|mixed} self or extracted data
#
data: (key, value)->
if isObject(key)
for name of key
value = @data(name, key[name])
else if value is undefined
key = <KEY>er<KEY>('<KEY>'+key)
result = {}
match = false
for attr in @_.attributes
value = attr.value
try
value = JSON.parse(value)
catch e
if attr.name is key
result = value
match = true
break
else if attr.name.indexOf(key) is 0
result[camelize(attr.name.substring(key.length+1))] = value
match = true
value = if match then result else null
else
key = dasherize('data-'+ key)
value = {'': value} unless isObject(value)
for name of value
attr = if `name == false` then key else dasherize(key+'-'+name)
if value[name] is null
@_.removeAttribute(attr)
else
@_.setAttribute(attr, if isString(value[name]) then value[name] else JSON.stringify(value[name]))
value = @
return value
#
# Checks if the element is hidden
#
# @return {Boolean} check result
#
hidden: ->
@style('display') is 'none'
#
# Checks if the element is visible
#
# @return {Boolean} check result
#
visible: ->
!@hidden()
#
# Hides an element (optionally with fx)
#
# @return {Element} this
#
hide: ->
if @visible()
@_old_display = @style('display')
@_.style.display = 'none'
return @
#
# Shows an element (optionally with fx)
#
# @return {Element} this
#
show: ->
if @hidden()
element = @_
value = @_old_display
if !value || value is 'none'
dummy = new Element(element.tagName).insertTo(HTML)
value = dummy.style('display') || 'none'
dummy.remove()
element.style.display = if value is 'none' then 'block' else value
return @
#
# Toggles an element's visual state (optionally with fx)
#
# @return {Element} this
#
toggle: ->
if @hidden() then @show() else @hide()
#
# hides all the sibling elements and shows this one (optionally with fx)
#
# @return {Element} this
#
radio: ->
@siblings().forEach('hide')
@show()
#
# Returns the element's owner document reference
#
# @return {Document} wrapped owner document
#
document: ->
wrap @_.ownerDocument
#
# Returns the element's owner window reference
#
# @return {Window} wrapped owner window
#
#
window: ->
@document().window()
| true | #
# This file contains the common use methods
#
# NOTE: some methods, like `#show`, `#hide`, etc
# might take a visual effect settings, those settings
# will work only if the `fx` module is included on the page,
# otherwise the element will be immediately shown/hidden
#
# Copyright (C) 2011-2012 PI:NAME:<NAME>END_PI
#
Element.include
#
# The basic attributes handling method
#
# :coffee
# element.attr('name') # -> getting attribute
# element.attr('name', 'value') # -> setting attribute
# element.attr('name', null) # -> removing attribute
# element.attr('name') is null # -> checking attribute
#
# element.attr
# name1: 'value1'
# name2: 'value2'
# ....
#
# @param {String|Object} attribute name or a hash of attributes
# @param {String|undefined} attribute value
# @return {String|Element} attribute value or element reference
#
attr: (name, value) ->
if typeof(name) is 'string'
if value is undefined # reading an attribute
value = @_[name] || @_.getAttribute(name)
return if value is '' then null else value
else if value is null # erazing an attribute
@_.removeAttribute(name)
delete @_[name]
else if name is 'style' # bypassing the styles into the #style method
@style value
else # setting an attribute
element = @_
element.setAttribute(name, value) unless name of element
element[name] = value
else # assuming it's a hash to set
for value of name
@attr(value, name[value])
return @
#
# Sets/gets the `data-smth` data attribute and
# automatically converts everything in/out JSON
#
# @param {String} key name
# @param {mixed} data or `undefined` to erase
# @return {Element|mixed} self or extracted data
#
data: (key, value)->
if isObject(key)
for name of key
value = @data(name, key[name])
else if value is undefined
key = PI:KEY:<KEY>END_PIerPI:KEY:<KEY>END_PI('PI:KEY:<KEY>END_PI'+key)
result = {}
match = false
for attr in @_.attributes
value = attr.value
try
value = JSON.parse(value)
catch e
if attr.name is key
result = value
match = true
break
else if attr.name.indexOf(key) is 0
result[camelize(attr.name.substring(key.length+1))] = value
match = true
value = if match then result else null
else
key = dasherize('data-'+ key)
value = {'': value} unless isObject(value)
for name of value
attr = if `name == false` then key else dasherize(key+'-'+name)
if value[name] is null
@_.removeAttribute(attr)
else
@_.setAttribute(attr, if isString(value[name]) then value[name] else JSON.stringify(value[name]))
value = @
return value
#
# Checks if the element is hidden
#
# @return {Boolean} check result
#
hidden: ->
@style('display') is 'none'
#
# Checks if the element is visible
#
# @return {Boolean} check result
#
visible: ->
!@hidden()
#
# Hides an element (optionally with fx)
#
# @return {Element} this
#
hide: ->
if @visible()
@_old_display = @style('display')
@_.style.display = 'none'
return @
#
# Shows an element (optionally with fx)
#
# @return {Element} this
#
show: ->
if @hidden()
element = @_
value = @_old_display
if !value || value is 'none'
dummy = new Element(element.tagName).insertTo(HTML)
value = dummy.style('display') || 'none'
dummy.remove()
element.style.display = if value is 'none' then 'block' else value
return @
#
# Toggles an element's visual state (optionally with fx)
#
# @return {Element} this
#
toggle: ->
if @hidden() then @show() else @hide()
#
# hides all the sibling elements and shows this one (optionally with fx)
#
# @return {Element} this
#
radio: ->
@siblings().forEach('hide')
@show()
#
# Returns the element's owner document reference
#
# @return {Document} wrapped owner document
#
document: ->
wrap @_.ownerDocument
#
# Returns the element's owner window reference
#
# @return {Window} wrapped owner window
#
#
window: ->
@document().window()
|
[
{
"context": "# Copyright (c) Konode. All rights reserved.\n# This source code is subje",
"end": 22,
"score": 0.9818058013916016,
"start": 16,
"tag": "NAME",
"value": "Konode"
}
] | src/clientFilePage/planTab/planTarget.coffee | LogicalOutcomes/KoNote | 1 | # Copyright (c) Konode. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
Imm = require 'immutable'
Decorate = require 'es-decorate'
Term = require '../../term'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
{findDOMNode} = win.ReactDOM
{DragSource, DropTarget} = win.ReactDnD
StatusButtonGroup = require('./statusButtonGroup').load(win)
ModifyTargetStatusDialog = require('../modifyTargetStatusDialog').load(win)
MetricLookupField = require('../../metricLookupField').load(win)
ExpandingTextArea = require('../../expandingTextArea').load(win)
MetricWidget = require('../../metricWidget').load(win)
{FaIcon, scrollToElement} = require('../../utils').load(win)
PlanTarget = React.createClass
displayName: 'PlanTarget'
mixins: [React.addons.PureRenderMixin]
propTypes: {
# DnD
connectDragSource: React.PropTypes.func.isRequired
connectDragPreview: React.PropTypes.func.isRequired
connectDropTarget: React.PropTypes.func.isRequired
isDragging: React.PropTypes.bool.isRequired
# DnD props
index: React.PropTypes.number.isRequired
# Raw data
target: React.PropTypes.instanceOf(Imm.Map).isRequired
# Methods
reorderTargetId: React.PropTypes.func.isRequired
}
render: ->
{
target, sectionIsInactive
isSelected, isReadOnly, isCollapsed, isExistingTarget, hasChanges
connectDropTarget, connectDragPreview, connectDragSource
onExpandTarget
} = @props
{id, status, name, description, metricIds} = target.toObject()
canChangeStatus = (
isSelected and
not sectionIsInactive and
not isReadOnly and
isExistingTarget
)
isDisabled = (
isReadOnly or
status isnt 'default' or
sectionIsInactive or isReadOnly
)
return connectDropTarget connectDragPreview (
R.div({
id: "target-#{id}"
className: [
'planTarget'
"status-#{status}"
'isSelected' if isSelected
'hasChanges' if hasChanges or not isExistingTarget
'isCollapsed' if isCollapsed
'readOnly' if isReadOnly
'dragging' if @props.isDragging
].join ' '
onClick: @props.onTargetSelection
},
connectDragSource (
R.div({
className: 'dragSource'
},
FaIcon('arrows-v')
)
)
R.div({className: 'planTargetContainer'},
(if not isExistingTarget
R.div({className: 'deleteTarget', onClick: @props.onRemoveNewTarget},
FaIcon 'times'
)
)
R.div({className: 'nameContainer'},
(if isCollapsed
R.span({
className: 'name field static'
onClick: @props.onExpandTarget
},
name
)
else
R.input({
ref: 'nameField'
className: 'form-control name field'
type: 'text'
value: name
placeholder: "Name of #{Term 'target'}"
onChange: @_updateField.bind null, 'name'
onFocus: @props.onTargetSelection
onClick: @props.onTargetSelection
disabled: isDisabled
})
)
(if canChangeStatus
StatusButtonGroup({
planElementType: Term 'Target'
data: target
isExisting: isExistingTarget
status
onRemove: null
dialog: ModifyTargetStatusDialog
})
)
)
R.div({className: 'descriptionContainer'},
ExpandingTextArea({
className: 'description field'
placeholder: "Details (optional)"
value: description
disabled: isDisabled
onChange: @_updateField.bind null, 'description'
onFocus: @props.onTargetSelection
onClick: @props.onTargetSelection
activeTabId: @props.activeTabId
})
)
(if not metricIds.isEmpty() or @props.isSelected
R.div({className: 'metrics'},
R.div({className: 'metricsList'},
(metricIds.map (metricId) =>
metric = @props.metricsById.get(metricId)
MetricWidget({
name: metric.get('name')
definition: metric.get('definition')
value: metric.get('value')
key: metricId
tooltipViewport: '.sections'
isEditable: false
allowDeleting: not isDisabled
onDelete: @props.deleteMetricFromTarget.bind(
null, id, metricId
)
})
)
(if @props.isSelected and not isDisabled
R.button({
className: "btn btn-link addMetricButton animated fadeIn"
onClick: @_focusMetricLookupField.bind(null, id)
},
FaIcon('plus')
" Add #{Term 'metric'}"
)
)
)
(unless isDisabled
R.div({
ref: 'metricLookup'
className: 'metricLookupContainer'
},
MetricLookupField({
metrics: @props.metricsById.valueSeq().filter (metric) => metric.get('status') is 'default'
onSelection: @props.addMetricToTarget.bind(
null, id, @_hideMetricInput
)
placeholder: "Find / Define a #{Term 'Metric'}"
isReadOnly: @props.isReadOnly
onBlur: @_hideMetricInput
})
)
)
)
)
)
)
)
_updateField: (fieldName, event) ->
newValue = @props.target.set fieldName, event.target.value
@props.onTargetUpdate(newValue)
###
# todo: do we need this?
_onTargetClick: (event) ->
classList = event.target.classList
# Prevent distracting switching of selectedTarget while re-ordering targets
return if classList.contains 'dragSource'
# Clicking anywhere but the fields or buttons will focus the name field
shouldFocusNameField = not (
(classList.contains 'field') or
(classList.contains 'lookupField') or
(classList.contains 'btn') or
@props.isReadOnly
)
@props.setSelectedTarget @props.target.get('id'), =>
@_focusNameField() if shouldFocusNameField
###
_focusNameField: ->
@refs.nameField.focus() if @refs.nameField?
_focusMetricLookupField: ->
$(@refs.metricLookup).show()
$('.lookupField').focus()
# scroll down so metric results are never hidden below view
$parent = win.document.getElementById('planView')
$element = findDOMNode(@refs.metricLookup)
scrollToElement $parent, $element, 1000, 'easeInOutQuad', 200, (->)
_hideMetricInput: ->
$(@refs.metricLookup).hide()
# Drag source contract
targetSource = {
beginDrag: ({id, index, sectionIndex}) -> {id, index, sectionIndex}
}
targetDestination = {
hover: (props, monitor, component) ->
draggingTargetProps = monitor.getItem()
sectionIndex = draggingTargetProps.sectionIndex
dragIndex = draggingTargetProps.index
hoverIndex = props.index
# Don't replace items with themselves
return if dragIndex is hoverIndex
# Can't drag target to another section
return if sectionIndex isnt props.sectionIndex
# Determine rectangle on screen
hoverBoundingRect = findDOMNode(component).getBoundingClientRect()
# Get vertical middle
hoverMiddleTopY = (hoverBoundingRect.bottom - hoverBoundingRect.top) / 4
hoverMiddleBottomY = hoverMiddleTopY * 3
# Determine mouse position
clientOffset = monitor.getClientOffset()
# Get pixels to the top
hoverClientY = clientOffset.y - hoverBoundingRect.top
# Only perform the move when the mouse has crossed half of the item's height
# When dragging downwards, only move when the cursor is below 50%
# When dragging upwards, only move when the cursor is above 50%
# Dragging downwards
return if dragIndex < hoverIndex and hoverClientY < hoverMiddleTopY
# Dragging upwards
return if dragIndex > hoverIndex and hoverClientY > hoverMiddleBottomY
# Time to actually perform the action
props.reorderTargetId(sectionIndex, dragIndex, hoverIndex)
# (Example says to mutate here, but we're using Imm data)
monitor.getItem().index = hoverIndex;
}
# Specify props to inject into component
collectSource = (connect, monitor) -> {
connectDragSource: connect.dragSource()
connectDragPreview: connect.dragPreview()
isDragging: monitor.isDragging()
}
connectDestination = (connect) -> {
connectDropTarget: connect.dropTarget()
}
# Wrap (decorate) planTarget with drag-drop
return React.createFactory Decorate [
DropTarget('target', targetDestination, connectDestination)
DragSource('target', targetSource, collectSource)
], PlanTarget
module.exports = {load}
| 37143 | # Copyright (c) <NAME>. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
Imm = require 'immutable'
Decorate = require 'es-decorate'
Term = require '../../term'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
{findDOMNode} = win.ReactDOM
{DragSource, DropTarget} = win.ReactDnD
StatusButtonGroup = require('./statusButtonGroup').load(win)
ModifyTargetStatusDialog = require('../modifyTargetStatusDialog').load(win)
MetricLookupField = require('../../metricLookupField').load(win)
ExpandingTextArea = require('../../expandingTextArea').load(win)
MetricWidget = require('../../metricWidget').load(win)
{FaIcon, scrollToElement} = require('../../utils').load(win)
PlanTarget = React.createClass
displayName: 'PlanTarget'
mixins: [React.addons.PureRenderMixin]
propTypes: {
# DnD
connectDragSource: React.PropTypes.func.isRequired
connectDragPreview: React.PropTypes.func.isRequired
connectDropTarget: React.PropTypes.func.isRequired
isDragging: React.PropTypes.bool.isRequired
# DnD props
index: React.PropTypes.number.isRequired
# Raw data
target: React.PropTypes.instanceOf(Imm.Map).isRequired
# Methods
reorderTargetId: React.PropTypes.func.isRequired
}
render: ->
{
target, sectionIsInactive
isSelected, isReadOnly, isCollapsed, isExistingTarget, hasChanges
connectDropTarget, connectDragPreview, connectDragSource
onExpandTarget
} = @props
{id, status, name, description, metricIds} = target.toObject()
canChangeStatus = (
isSelected and
not sectionIsInactive and
not isReadOnly and
isExistingTarget
)
isDisabled = (
isReadOnly or
status isnt 'default' or
sectionIsInactive or isReadOnly
)
return connectDropTarget connectDragPreview (
R.div({
id: "target-#{id}"
className: [
'planTarget'
"status-#{status}"
'isSelected' if isSelected
'hasChanges' if hasChanges or not isExistingTarget
'isCollapsed' if isCollapsed
'readOnly' if isReadOnly
'dragging' if @props.isDragging
].join ' '
onClick: @props.onTargetSelection
},
connectDragSource (
R.div({
className: 'dragSource'
},
FaIcon('arrows-v')
)
)
R.div({className: 'planTargetContainer'},
(if not isExistingTarget
R.div({className: 'deleteTarget', onClick: @props.onRemoveNewTarget},
FaIcon 'times'
)
)
R.div({className: 'nameContainer'},
(if isCollapsed
R.span({
className: 'name field static'
onClick: @props.onExpandTarget
},
name
)
else
R.input({
ref: 'nameField'
className: 'form-control name field'
type: 'text'
value: name
placeholder: "Name of #{Term 'target'}"
onChange: @_updateField.bind null, 'name'
onFocus: @props.onTargetSelection
onClick: @props.onTargetSelection
disabled: isDisabled
})
)
(if canChangeStatus
StatusButtonGroup({
planElementType: Term 'Target'
data: target
isExisting: isExistingTarget
status
onRemove: null
dialog: ModifyTargetStatusDialog
})
)
)
R.div({className: 'descriptionContainer'},
ExpandingTextArea({
className: 'description field'
placeholder: "Details (optional)"
value: description
disabled: isDisabled
onChange: @_updateField.bind null, 'description'
onFocus: @props.onTargetSelection
onClick: @props.onTargetSelection
activeTabId: @props.activeTabId
})
)
(if not metricIds.isEmpty() or @props.isSelected
R.div({className: 'metrics'},
R.div({className: 'metricsList'},
(metricIds.map (metricId) =>
metric = @props.metricsById.get(metricId)
MetricWidget({
name: metric.get('name')
definition: metric.get('definition')
value: metric.get('value')
key: metricId
tooltipViewport: '.sections'
isEditable: false
allowDeleting: not isDisabled
onDelete: @props.deleteMetricFromTarget.bind(
null, id, metricId
)
})
)
(if @props.isSelected and not isDisabled
R.button({
className: "btn btn-link addMetricButton animated fadeIn"
onClick: @_focusMetricLookupField.bind(null, id)
},
FaIcon('plus')
" Add #{Term 'metric'}"
)
)
)
(unless isDisabled
R.div({
ref: 'metricLookup'
className: 'metricLookupContainer'
},
MetricLookupField({
metrics: @props.metricsById.valueSeq().filter (metric) => metric.get('status') is 'default'
onSelection: @props.addMetricToTarget.bind(
null, id, @_hideMetricInput
)
placeholder: "Find / Define a #{Term 'Metric'}"
isReadOnly: @props.isReadOnly
onBlur: @_hideMetricInput
})
)
)
)
)
)
)
)
_updateField: (fieldName, event) ->
newValue = @props.target.set fieldName, event.target.value
@props.onTargetUpdate(newValue)
###
# todo: do we need this?
_onTargetClick: (event) ->
classList = event.target.classList
# Prevent distracting switching of selectedTarget while re-ordering targets
return if classList.contains 'dragSource'
# Clicking anywhere but the fields or buttons will focus the name field
shouldFocusNameField = not (
(classList.contains 'field') or
(classList.contains 'lookupField') or
(classList.contains 'btn') or
@props.isReadOnly
)
@props.setSelectedTarget @props.target.get('id'), =>
@_focusNameField() if shouldFocusNameField
###
_focusNameField: ->
@refs.nameField.focus() if @refs.nameField?
_focusMetricLookupField: ->
$(@refs.metricLookup).show()
$('.lookupField').focus()
# scroll down so metric results are never hidden below view
$parent = win.document.getElementById('planView')
$element = findDOMNode(@refs.metricLookup)
scrollToElement $parent, $element, 1000, 'easeInOutQuad', 200, (->)
_hideMetricInput: ->
$(@refs.metricLookup).hide()
# Drag source contract
targetSource = {
beginDrag: ({id, index, sectionIndex}) -> {id, index, sectionIndex}
}
targetDestination = {
hover: (props, monitor, component) ->
draggingTargetProps = monitor.getItem()
sectionIndex = draggingTargetProps.sectionIndex
dragIndex = draggingTargetProps.index
hoverIndex = props.index
# Don't replace items with themselves
return if dragIndex is hoverIndex
# Can't drag target to another section
return if sectionIndex isnt props.sectionIndex
# Determine rectangle on screen
hoverBoundingRect = findDOMNode(component).getBoundingClientRect()
# Get vertical middle
hoverMiddleTopY = (hoverBoundingRect.bottom - hoverBoundingRect.top) / 4
hoverMiddleBottomY = hoverMiddleTopY * 3
# Determine mouse position
clientOffset = monitor.getClientOffset()
# Get pixels to the top
hoverClientY = clientOffset.y - hoverBoundingRect.top
# Only perform the move when the mouse has crossed half of the item's height
# When dragging downwards, only move when the cursor is below 50%
# When dragging upwards, only move when the cursor is above 50%
# Dragging downwards
return if dragIndex < hoverIndex and hoverClientY < hoverMiddleTopY
# Dragging upwards
return if dragIndex > hoverIndex and hoverClientY > hoverMiddleBottomY
# Time to actually perform the action
props.reorderTargetId(sectionIndex, dragIndex, hoverIndex)
# (Example says to mutate here, but we're using Imm data)
monitor.getItem().index = hoverIndex;
}
# Specify props to inject into component
collectSource = (connect, monitor) -> {
connectDragSource: connect.dragSource()
connectDragPreview: connect.dragPreview()
isDragging: monitor.isDragging()
}
connectDestination = (connect) -> {
connectDropTarget: connect.dropTarget()
}
# Wrap (decorate) planTarget with drag-drop
return React.createFactory Decorate [
DropTarget('target', targetDestination, connectDestination)
DragSource('target', targetSource, collectSource)
], PlanTarget
module.exports = {load}
| true | # Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
Imm = require 'immutable'
Decorate = require 'es-decorate'
Term = require '../../term'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
{findDOMNode} = win.ReactDOM
{DragSource, DropTarget} = win.ReactDnD
StatusButtonGroup = require('./statusButtonGroup').load(win)
ModifyTargetStatusDialog = require('../modifyTargetStatusDialog').load(win)
MetricLookupField = require('../../metricLookupField').load(win)
ExpandingTextArea = require('../../expandingTextArea').load(win)
MetricWidget = require('../../metricWidget').load(win)
{FaIcon, scrollToElement} = require('../../utils').load(win)
PlanTarget = React.createClass
displayName: 'PlanTarget'
mixins: [React.addons.PureRenderMixin]
propTypes: {
# DnD
connectDragSource: React.PropTypes.func.isRequired
connectDragPreview: React.PropTypes.func.isRequired
connectDropTarget: React.PropTypes.func.isRequired
isDragging: React.PropTypes.bool.isRequired
# DnD props
index: React.PropTypes.number.isRequired
# Raw data
target: React.PropTypes.instanceOf(Imm.Map).isRequired
# Methods
reorderTargetId: React.PropTypes.func.isRequired
}
render: ->
{
target, sectionIsInactive
isSelected, isReadOnly, isCollapsed, isExistingTarget, hasChanges
connectDropTarget, connectDragPreview, connectDragSource
onExpandTarget
} = @props
{id, status, name, description, metricIds} = target.toObject()
canChangeStatus = (
isSelected and
not sectionIsInactive and
not isReadOnly and
isExistingTarget
)
isDisabled = (
isReadOnly or
status isnt 'default' or
sectionIsInactive or isReadOnly
)
return connectDropTarget connectDragPreview (
R.div({
id: "target-#{id}"
className: [
'planTarget'
"status-#{status}"
'isSelected' if isSelected
'hasChanges' if hasChanges or not isExistingTarget
'isCollapsed' if isCollapsed
'readOnly' if isReadOnly
'dragging' if @props.isDragging
].join ' '
onClick: @props.onTargetSelection
},
connectDragSource (
R.div({
className: 'dragSource'
},
FaIcon('arrows-v')
)
)
R.div({className: 'planTargetContainer'},
(if not isExistingTarget
R.div({className: 'deleteTarget', onClick: @props.onRemoveNewTarget},
FaIcon 'times'
)
)
R.div({className: 'nameContainer'},
(if isCollapsed
R.span({
className: 'name field static'
onClick: @props.onExpandTarget
},
name
)
else
R.input({
ref: 'nameField'
className: 'form-control name field'
type: 'text'
value: name
placeholder: "Name of #{Term 'target'}"
onChange: @_updateField.bind null, 'name'
onFocus: @props.onTargetSelection
onClick: @props.onTargetSelection
disabled: isDisabled
})
)
(if canChangeStatus
StatusButtonGroup({
planElementType: Term 'Target'
data: target
isExisting: isExistingTarget
status
onRemove: null
dialog: ModifyTargetStatusDialog
})
)
)
R.div({className: 'descriptionContainer'},
ExpandingTextArea({
className: 'description field'
placeholder: "Details (optional)"
value: description
disabled: isDisabled
onChange: @_updateField.bind null, 'description'
onFocus: @props.onTargetSelection
onClick: @props.onTargetSelection
activeTabId: @props.activeTabId
})
)
(if not metricIds.isEmpty() or @props.isSelected
R.div({className: 'metrics'},
R.div({className: 'metricsList'},
(metricIds.map (metricId) =>
metric = @props.metricsById.get(metricId)
MetricWidget({
name: metric.get('name')
definition: metric.get('definition')
value: metric.get('value')
key: metricId
tooltipViewport: '.sections'
isEditable: false
allowDeleting: not isDisabled
onDelete: @props.deleteMetricFromTarget.bind(
null, id, metricId
)
})
)
(if @props.isSelected and not isDisabled
R.button({
className: "btn btn-link addMetricButton animated fadeIn"
onClick: @_focusMetricLookupField.bind(null, id)
},
FaIcon('plus')
" Add #{Term 'metric'}"
)
)
)
(unless isDisabled
R.div({
ref: 'metricLookup'
className: 'metricLookupContainer'
},
MetricLookupField({
metrics: @props.metricsById.valueSeq().filter (metric) => metric.get('status') is 'default'
onSelection: @props.addMetricToTarget.bind(
null, id, @_hideMetricInput
)
placeholder: "Find / Define a #{Term 'Metric'}"
isReadOnly: @props.isReadOnly
onBlur: @_hideMetricInput
})
)
)
)
)
)
)
)
_updateField: (fieldName, event) ->
newValue = @props.target.set fieldName, event.target.value
@props.onTargetUpdate(newValue)
###
# todo: do we need this?
_onTargetClick: (event) ->
classList = event.target.classList
# Prevent distracting switching of selectedTarget while re-ordering targets
return if classList.contains 'dragSource'
# Clicking anywhere but the fields or buttons will focus the name field
shouldFocusNameField = not (
(classList.contains 'field') or
(classList.contains 'lookupField') or
(classList.contains 'btn') or
@props.isReadOnly
)
@props.setSelectedTarget @props.target.get('id'), =>
@_focusNameField() if shouldFocusNameField
###
_focusNameField: ->
@refs.nameField.focus() if @refs.nameField?
_focusMetricLookupField: ->
$(@refs.metricLookup).show()
$('.lookupField').focus()
# scroll down so metric results are never hidden below view
$parent = win.document.getElementById('planView')
$element = findDOMNode(@refs.metricLookup)
scrollToElement $parent, $element, 1000, 'easeInOutQuad', 200, (->)
_hideMetricInput: ->
$(@refs.metricLookup).hide()
# Drag source contract
targetSource = {
beginDrag: ({id, index, sectionIndex}) -> {id, index, sectionIndex}
}
targetDestination = {
hover: (props, monitor, component) ->
draggingTargetProps = monitor.getItem()
sectionIndex = draggingTargetProps.sectionIndex
dragIndex = draggingTargetProps.index
hoverIndex = props.index
# Don't replace items with themselves
return if dragIndex is hoverIndex
# Can't drag target to another section
return if sectionIndex isnt props.sectionIndex
# Determine rectangle on screen
hoverBoundingRect = findDOMNode(component).getBoundingClientRect()
# Get vertical middle
hoverMiddleTopY = (hoverBoundingRect.bottom - hoverBoundingRect.top) / 4
hoverMiddleBottomY = hoverMiddleTopY * 3
# Determine mouse position
clientOffset = monitor.getClientOffset()
# Get pixels to the top
hoverClientY = clientOffset.y - hoverBoundingRect.top
# Only perform the move when the mouse has crossed half of the item's height
# When dragging downwards, only move when the cursor is below 50%
# When dragging upwards, only move when the cursor is above 50%
# Dragging downwards
return if dragIndex < hoverIndex and hoverClientY < hoverMiddleTopY
# Dragging upwards
return if dragIndex > hoverIndex and hoverClientY > hoverMiddleBottomY
# Time to actually perform the action
props.reorderTargetId(sectionIndex, dragIndex, hoverIndex)
# (Example says to mutate here, but we're using Imm data)
monitor.getItem().index = hoverIndex;
}
# Specify props to inject into component
collectSource = (connect, monitor) -> {
connectDragSource: connect.dragSource()
connectDragPreview: connect.dragPreview()
isDragging: monitor.isDragging()
}
connectDestination = (connect) -> {
connectDropTarget: connect.dropTarget()
}
# Wrap (decorate) planTarget with drag-drop
return React.createFactory Decorate [
DropTarget('target', targetDestination, connectDestination)
DragSource('target', targetSource, collectSource)
], PlanTarget
module.exports = {load}
|
[
{
"context": "espace COOLSTRAP.Fallback\n# @class iOS\n#\n# @author Abraham Barrera <abarrerac@gmail.com> || @abraham_barrera\n###\n\nCO",
"end": 118,
"score": 0.9998976588249207,
"start": 103,
"tag": "NAME",
"value": "Abraham Barrera"
},
{
"context": "allback\n# @class iOS\n#\n# @au... | coolstrap-core/app/assets/javascripts/coolstrap/fallback/_Coolstrap.Fallback.iOS.coffee | cristianferrarig/coolstrap | 0 | ###
# Fallback to iOS unexpected behaviors.
#
# @namespace COOLSTRAP.Fallback
# @class iOS
#
# @author Abraham Barrera <abarrerac@gmail.com> || @abraham_barrera
###
COOLSTRAP.Fallback.iOS = ((cool, document) ->
VIEWPORT_META = document.querySelector and document.querySelector("meta[name=\"viewport\"]")
###
# fix for iPhone viewport scale bug
# http://www.blog.highub.com/mobile-2/a-fix-for-iphone-viewport-scale-bug/
#
# @method scaleFix
###
scaleFix = ->
env = cool.Util.Platform.environment()
_gestureStart = ->
VIEWPORT_META.content = "width=device-width, minimum-scale=0.25, maximum-scale=1.6"
if VIEWPORT_META and (env.isMobile and env.os.name is "ios")
VIEWPORT_META.content = "width=device-width, minimum-scale=1.0, maximum-scale=1.0"
document.addEventListener "gesturestart", _gestureStart, false
scaleFix: scaleFix
)(COOLSTRAP, document) | 133879 | ###
# Fallback to iOS unexpected behaviors.
#
# @namespace COOLSTRAP.Fallback
# @class iOS
#
# @author <NAME> <<EMAIL>> || @abraham_barrera
###
COOLSTRAP.Fallback.iOS = ((cool, document) ->
VIEWPORT_META = document.querySelector and document.querySelector("meta[name=\"viewport\"]")
###
# fix for iPhone viewport scale bug
# http://www.blog.highub.com/mobile-2/a-fix-for-iphone-viewport-scale-bug/
#
# @method scaleFix
###
scaleFix = ->
env = cool.Util.Platform.environment()
_gestureStart = ->
VIEWPORT_META.content = "width=device-width, minimum-scale=0.25, maximum-scale=1.6"
if VIEWPORT_META and (env.isMobile and env.os.name is "ios")
VIEWPORT_META.content = "width=device-width, minimum-scale=1.0, maximum-scale=1.0"
document.addEventListener "gesturestart", _gestureStart, false
scaleFix: scaleFix
)(COOLSTRAP, document) | true | ###
# Fallback to iOS unexpected behaviors.
#
# @namespace COOLSTRAP.Fallback
# @class iOS
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> || @abraham_barrera
###
COOLSTRAP.Fallback.iOS = ((cool, document) ->
VIEWPORT_META = document.querySelector and document.querySelector("meta[name=\"viewport\"]")
###
# fix for iPhone viewport scale bug
# http://www.blog.highub.com/mobile-2/a-fix-for-iphone-viewport-scale-bug/
#
# @method scaleFix
###
scaleFix = ->
env = cool.Util.Platform.environment()
_gestureStart = ->
VIEWPORT_META.content = "width=device-width, minimum-scale=0.25, maximum-scale=1.6"
if VIEWPORT_META and (env.isMobile and env.os.name is "ios")
VIEWPORT_META.content = "width=device-width, minimum-scale=1.0, maximum-scale=1.0"
document.addEventListener "gesturestart", _gestureStart, false
scaleFix: scaleFix
)(COOLSTRAP, document) |
[
{
"context": "= []\n\n game.players.push({\n id: 0,\n name: 'Gonçalo',\n score: 0,\n orientation: 'right',\n pos",
"end": 665,
"score": 0.9997570514678955,
"start": 658,
"tag": "NAME",
"value": "Gonçalo"
}
] | res/jule.coffee | danimt/yummy-giggle | 1 | #
game = {
boardWidth: 300,
boardHeight: 300,
boardSpacing: 30,
gameInterval: 100,
playerHeight: 5,
playerDepth: 1,
camAngle: 0
camSmoothness: 5
}
Number::mod = (n) ->
(this % n + n) % n
#
# Run fn
#
game.initGame = ->
# Initialize board
game.initBoard()
# Bind
game.bind()
# Generate context
game.getCanvas()
window.setInterval ->
game.updateGame()
, game.gameInterval
#
#
#
game.initBoard = ->
game.board = []
for line in [0..game.boardHeight]
game.board.push([])
for col in [0..game.boardWidth]
game.board[line].push(null)
game.players = []
game.players.push({
id: 0,
name: 'Gonçalo',
score: 0,
orientation: 'right',
position: [1, 1],
lastPosition: null,
color: 0xff0000
})
#
#
#
game.detectCollision = (pos, dir) ->
switch dir
when 'up'
true if pos[1]-1 < 0 or game.board[pos[1]-1][pos[0]] isnt null
when 'down'
true if pos[1]+1 >= game.boardHeight or game.board[pos[1]+1][pos[0]] isnt null
when 'left'
true if pos[0]-1 < 0 or game.board[pos[1]][pos[0]-1] isnt null
when 'right'
true if pos[0]+1 >= game.boardWidth or game.board[pos[1]][pos[0]+1] isnt null
#
#
#
game.playerLost = (player) ->
player.lost = true
# Clear from board
for line, lineIndex in game.board
for col, colIndex in line
game.board[lineIndex][colIndex] = null if col is player.id
game.destroyPlayerObject(player)
game.initBoard()
#
#
#
game.updatePlayer = (player) ->
return if player.lost
switch player.orientation
when 'up'
game.board[player.position[1]-1][player.position[0]] = player.id
player.position[1]--
when 'down'
game.board[player.position[1]+1][player.position[0]] = player.id
player.position[1]++
when 'left'
game.board[player.position[1]][player.position[0]-1] = player.id
player.position[0]--
when 'right'
game.board[player.position[1]][player.position[0]+1] = player.id
player.position[0]++
player.lastOrientation = player.orientation
#
game.renderPlayer(player)
#
#
#
game.playerMove = (player, dir) ->
if !player
# Current player
player = game.players[0]
newDir = 'down' if dir is 'right' and player.lastOrientation is 'right'
newDir = 'up' if dir is 'left' and player.lastOrientation is 'right'
newDir = 'up' if dir is 'right' and player.lastOrientation is 'left'
newDir = 'down' if dir is 'left' and player.lastOrientation is 'left'
newDir = 'left' if dir is 'right' and player.lastOrientation is 'down'
newDir = 'right' if dir is 'left' and player.lastOrientation is 'down'
newDir = 'right' if dir is 'right' and player.lastOrientation is 'up'
newDir = 'left' if dir is 'left' and player.lastOrientation is 'up'
# Set player orientation
player.orientation = newDir
#
#
#
game.bind = ->
document.addEventListener 'keydown', (e) ->
#if e.keyCode is 87 or e.keyCode is 38
# game.playerMove(false, 'up')
#if e.keyCode is 83 or e.keyCode is 40
# game.playerMove(false, 'down')
if e.keyCode is 65 or e.keyCode is 37
game.playerMove(false, 'left')
if e.keyCode is 68 or e.keyCode is 39
game.playerMove(false, 'right')
, true
return
#
#
#
game.getPlayerById = (id) ->
for player in game.players
if id is player.id
player
#
#
#
game.updateGame = ->
# Calculate players board occupation
for player in game.players
game.playerLost(player) if game.detectCollision(player.position, player.orientation)
game.updatePlayer(player)
#
#
#
game.updateCamera = ->
# Get player
player = game.getPlayerById(0)[0]
#playerX = player.position[0] * game.boardSpacing - (game.boardSpacing * game.boardWidth / 2)
#playerY = player.position[1] * game.boardSpacing - (game.boardSpacing * game.boardHeight / 2)
if player._rawPos
playerX = player._rawPos[0]
playerY = player._rawPos[1]
# Computes camera position
game.camera.position.y = 150
game.camera.position.z = playerY - 100 * Math.cos(game.camAngle) - 100 * Math.sin(game.camAngle)
game.camera.position.x = playerX + 100 * Math.sin(game.camAngle) - 100 * Math.cos(game.camAngle)
# Calculate camera angle
if player.orientation is 'right'
desiredAngle = 0
else if player.orientation is 'left'
desiredAngle = Math.PI
else if player.orientation is 'down'
desiredAngle = Math.PI / 2
else if player.orientation is 'up'
desiredAngle = 3 * Math.PI / 2
difference = Math.abs(desiredAngle - game.camAngle)
# Select shortest way to rotate camera -> to be completed
if difference < 2 * Math.PI - difference
camStep = difference / game.camSmoothness
else
camStep = - (2 * Math.PI - difference) / game.camSmoothness
# Go counter clockwise
if desiredAngle - game.camAngle < 0
camStep = - camStep
if player._rawPos
# Update game
game.camera.lookAt(new THREE.Vector3(player._rawPos[0], 0, player._rawPos[1]))
game.camAngle = (game.camAngle + camStep).mod(2 * Math.PI)
#
#
#
game.renderPlayer = (player) ->
game.createPlayerObject(player)
#
# Get canvas env
#
game.getCanvas = ->
#
game.scene = new THREE.Scene()
#
game.camera = new THREE.PerspectiveCamera(75, window.innerWidth/window.innerHeight, 0.1, 1000)
#
game.renderer = new THREE.WebGLRenderer()
game.renderer.setSize(window.innerWidth, window.innerHeight)
#
document.body.appendChild(game.renderer.domElement)
#
geometry = new THREE.BoxGeometry game.boardWidth*game.boardSpacing, 0, game.boardHeight*game.boardSpacing
material = new THREE.MeshBasicMaterial
color: 0x000000
game.floor = new THREE.Mesh geometry, material
game.scene.add game.floor
#
# Build board lines
#
boardLineMaterial = new THREE.LineBasicMaterial
color: 0x333333
for colIndex in [0..game.boardWidth]
geometry = new THREE.Geometry()
geometry.vertices.push(
new THREE.Vector3( game.boardSpacing*colIndex - (game.boardSpacing*game.boardWidth / 2) , 1, -(game.boardSpacing*game.boardHeight / 2)),
new THREE.Vector3( game.boardSpacing*colIndex - (game.boardSpacing*game.boardWidth / 2), 1, (game.boardSpacing*game.boardHeight / 2) )
)
line = new THREE.Line geometry, boardLineMaterial
game.scene.add line
for rowIndex in [0..game.boardHeight]
geometry = new THREE.Geometry()
geometry.vertices.push(
new THREE.Vector3( -(game.boardSpacing*game.boardWidth / 2) , 1, game.boardSpacing*rowIndex - (game.boardSpacing*game.boardHeight / 2)),
new THREE.Vector3( (game.boardSpacing*game.boardWidth / 2), 1, game.boardSpacing*rowIndex - (game.boardSpacing*game.boardHeight / 2))
)
line = new THREE.Line geometry, boardLineMaterial
game.scene.add line
#
game.camera.position.z = 10
game.camera.position.x = 0
game.camera.position.y = 50
game.camera.lookAt(new THREE.Vector3(0, 0, 0))
#
game.executeSequence()
game.createPlayerObject(0)
#
#
#
game.createPlayerObject = (player) ->
# Check player
return if !player
# global static
geometry = new THREE.BoxGeometry game.boardSpacing, game.playerHeight, game.playerDepth
material = new THREE.MeshBasicMaterial
color: player.color
obj = new THREE.Mesh geometry, material
# Initialize object array
if !player._object
player._object = []
# Set orientation
if player.orientation is 'up' or player.orientation is 'down'
# Rotate 90deg
obj.rotateOnAxis new THREE.Vector3(0, 1, 0), Math.PI/2
# Set current position
obj.position.x = player.position[0] * game.boardSpacing - (game.boardWidth / 2) * game.boardSpacing
obj.position.y = game.playerHeight / 2
obj.position.z = player.position[1] * game.boardSpacing - (game.boardHeight / 2) * game.boardSpacing
switch player.orientation
when 'up'
obj.position.z = obj.position.z + game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation(game.boardSpacing / 2, 0, 0))
when 'left'
obj.position.x = obj.position.x + game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation( -game.boardSpacing/2, 0, 0 ) )
when 'right'
obj.position.x = obj.position.x - game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation( game.boardSpacing/2, 0, 0 ) )
when 'down'
obj.position.z = obj.position.z - game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation(- game.boardSpacing/2, 0 , 0 ) )
obj.scale.x = 0.000001
# Create player object
player._object.push obj
# Add player object to scene
game.scene.add obj
animStep = 0
orientation = player.orientation
playerAnim = window.setInterval ->
animStep = animStep + 0.1
obj.scale.x = animStep
player._rawPos = [
obj.position.x ,
obj.position.z
]
switch orientation
when 'up'
player._rawPos[1] -= animStep * game.boardSpacing
when 'left'
player._rawPos[0] -= animStep * game.boardSpacing
when 'right'
player._rawPos[0] += animStep * game.boardSpacing
when 'down'
player._rawPos[1] += animStep * game.boardSpacing
console.log player._rawPos[0] + ' ' + player._rawPos[1]
if animStep >= 0.99
window.clearInterval playerAnim
, game.gameInterval / 10
#
#
#
game.destroyPlayerObject = (player) ->
# Check player
return if !player
#
for obj in player._object
game.scene.remove obj
#
#
#
game.executeSequence = ->
#
requestAnimationFrame(game.executeSequence);
# Update camera realtime
game.updateCamera()
#
game.renderer.render(game.scene, game.camera);
# Execute
game.initGame() | 191408 | #
game = {
boardWidth: 300,
boardHeight: 300,
boardSpacing: 30,
gameInterval: 100,
playerHeight: 5,
playerDepth: 1,
camAngle: 0
camSmoothness: 5
}
Number::mod = (n) ->
(this % n + n) % n
#
# Run fn
#
game.initGame = ->
# Initialize board
game.initBoard()
# Bind
game.bind()
# Generate context
game.getCanvas()
window.setInterval ->
game.updateGame()
, game.gameInterval
#
#
#
game.initBoard = ->
game.board = []
for line in [0..game.boardHeight]
game.board.push([])
for col in [0..game.boardWidth]
game.board[line].push(null)
game.players = []
game.players.push({
id: 0,
name: '<NAME>',
score: 0,
orientation: 'right',
position: [1, 1],
lastPosition: null,
color: 0xff0000
})
#
#
#
game.detectCollision = (pos, dir) ->
switch dir
when 'up'
true if pos[1]-1 < 0 or game.board[pos[1]-1][pos[0]] isnt null
when 'down'
true if pos[1]+1 >= game.boardHeight or game.board[pos[1]+1][pos[0]] isnt null
when 'left'
true if pos[0]-1 < 0 or game.board[pos[1]][pos[0]-1] isnt null
when 'right'
true if pos[0]+1 >= game.boardWidth or game.board[pos[1]][pos[0]+1] isnt null
#
#
#
game.playerLost = (player) ->
player.lost = true
# Clear from board
for line, lineIndex in game.board
for col, colIndex in line
game.board[lineIndex][colIndex] = null if col is player.id
game.destroyPlayerObject(player)
game.initBoard()
#
#
#
game.updatePlayer = (player) ->
return if player.lost
switch player.orientation
when 'up'
game.board[player.position[1]-1][player.position[0]] = player.id
player.position[1]--
when 'down'
game.board[player.position[1]+1][player.position[0]] = player.id
player.position[1]++
when 'left'
game.board[player.position[1]][player.position[0]-1] = player.id
player.position[0]--
when 'right'
game.board[player.position[1]][player.position[0]+1] = player.id
player.position[0]++
player.lastOrientation = player.orientation
#
game.renderPlayer(player)
#
#
#
game.playerMove = (player, dir) ->
if !player
# Current player
player = game.players[0]
newDir = 'down' if dir is 'right' and player.lastOrientation is 'right'
newDir = 'up' if dir is 'left' and player.lastOrientation is 'right'
newDir = 'up' if dir is 'right' and player.lastOrientation is 'left'
newDir = 'down' if dir is 'left' and player.lastOrientation is 'left'
newDir = 'left' if dir is 'right' and player.lastOrientation is 'down'
newDir = 'right' if dir is 'left' and player.lastOrientation is 'down'
newDir = 'right' if dir is 'right' and player.lastOrientation is 'up'
newDir = 'left' if dir is 'left' and player.lastOrientation is 'up'
# Set player orientation
player.orientation = newDir
#
#
#
game.bind = ->
document.addEventListener 'keydown', (e) ->
#if e.keyCode is 87 or e.keyCode is 38
# game.playerMove(false, 'up')
#if e.keyCode is 83 or e.keyCode is 40
# game.playerMove(false, 'down')
if e.keyCode is 65 or e.keyCode is 37
game.playerMove(false, 'left')
if e.keyCode is 68 or e.keyCode is 39
game.playerMove(false, 'right')
, true
return
#
#
#
game.getPlayerById = (id) ->
for player in game.players
if id is player.id
player
#
#
#
game.updateGame = ->
# Calculate players board occupation
for player in game.players
game.playerLost(player) if game.detectCollision(player.position, player.orientation)
game.updatePlayer(player)
#
#
#
game.updateCamera = ->
# Get player
player = game.getPlayerById(0)[0]
#playerX = player.position[0] * game.boardSpacing - (game.boardSpacing * game.boardWidth / 2)
#playerY = player.position[1] * game.boardSpacing - (game.boardSpacing * game.boardHeight / 2)
if player._rawPos
playerX = player._rawPos[0]
playerY = player._rawPos[1]
# Computes camera position
game.camera.position.y = 150
game.camera.position.z = playerY - 100 * Math.cos(game.camAngle) - 100 * Math.sin(game.camAngle)
game.camera.position.x = playerX + 100 * Math.sin(game.camAngle) - 100 * Math.cos(game.camAngle)
# Calculate camera angle
if player.orientation is 'right'
desiredAngle = 0
else if player.orientation is 'left'
desiredAngle = Math.PI
else if player.orientation is 'down'
desiredAngle = Math.PI / 2
else if player.orientation is 'up'
desiredAngle = 3 * Math.PI / 2
difference = Math.abs(desiredAngle - game.camAngle)
# Select shortest way to rotate camera -> to be completed
if difference < 2 * Math.PI - difference
camStep = difference / game.camSmoothness
else
camStep = - (2 * Math.PI - difference) / game.camSmoothness
# Go counter clockwise
if desiredAngle - game.camAngle < 0
camStep = - camStep
if player._rawPos
# Update game
game.camera.lookAt(new THREE.Vector3(player._rawPos[0], 0, player._rawPos[1]))
game.camAngle = (game.camAngle + camStep).mod(2 * Math.PI)
#
#
#
game.renderPlayer = (player) ->
game.createPlayerObject(player)
#
# Get canvas env
#
game.getCanvas = ->
#
game.scene = new THREE.Scene()
#
game.camera = new THREE.PerspectiveCamera(75, window.innerWidth/window.innerHeight, 0.1, 1000)
#
game.renderer = new THREE.WebGLRenderer()
game.renderer.setSize(window.innerWidth, window.innerHeight)
#
document.body.appendChild(game.renderer.domElement)
#
geometry = new THREE.BoxGeometry game.boardWidth*game.boardSpacing, 0, game.boardHeight*game.boardSpacing
material = new THREE.MeshBasicMaterial
color: 0x000000
game.floor = new THREE.Mesh geometry, material
game.scene.add game.floor
#
# Build board lines
#
boardLineMaterial = new THREE.LineBasicMaterial
color: 0x333333
for colIndex in [0..game.boardWidth]
geometry = new THREE.Geometry()
geometry.vertices.push(
new THREE.Vector3( game.boardSpacing*colIndex - (game.boardSpacing*game.boardWidth / 2) , 1, -(game.boardSpacing*game.boardHeight / 2)),
new THREE.Vector3( game.boardSpacing*colIndex - (game.boardSpacing*game.boardWidth / 2), 1, (game.boardSpacing*game.boardHeight / 2) )
)
line = new THREE.Line geometry, boardLineMaterial
game.scene.add line
for rowIndex in [0..game.boardHeight]
geometry = new THREE.Geometry()
geometry.vertices.push(
new THREE.Vector3( -(game.boardSpacing*game.boardWidth / 2) , 1, game.boardSpacing*rowIndex - (game.boardSpacing*game.boardHeight / 2)),
new THREE.Vector3( (game.boardSpacing*game.boardWidth / 2), 1, game.boardSpacing*rowIndex - (game.boardSpacing*game.boardHeight / 2))
)
line = new THREE.Line geometry, boardLineMaterial
game.scene.add line
#
game.camera.position.z = 10
game.camera.position.x = 0
game.camera.position.y = 50
game.camera.lookAt(new THREE.Vector3(0, 0, 0))
#
game.executeSequence()
game.createPlayerObject(0)
#
#
#
game.createPlayerObject = (player) ->
# Check player
return if !player
# global static
geometry = new THREE.BoxGeometry game.boardSpacing, game.playerHeight, game.playerDepth
material = new THREE.MeshBasicMaterial
color: player.color
obj = new THREE.Mesh geometry, material
# Initialize object array
if !player._object
player._object = []
# Set orientation
if player.orientation is 'up' or player.orientation is 'down'
# Rotate 90deg
obj.rotateOnAxis new THREE.Vector3(0, 1, 0), Math.PI/2
# Set current position
obj.position.x = player.position[0] * game.boardSpacing - (game.boardWidth / 2) * game.boardSpacing
obj.position.y = game.playerHeight / 2
obj.position.z = player.position[1] * game.boardSpacing - (game.boardHeight / 2) * game.boardSpacing
switch player.orientation
when 'up'
obj.position.z = obj.position.z + game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation(game.boardSpacing / 2, 0, 0))
when 'left'
obj.position.x = obj.position.x + game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation( -game.boardSpacing/2, 0, 0 ) )
when 'right'
obj.position.x = obj.position.x - game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation( game.boardSpacing/2, 0, 0 ) )
when 'down'
obj.position.z = obj.position.z - game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation(- game.boardSpacing/2, 0 , 0 ) )
obj.scale.x = 0.000001
# Create player object
player._object.push obj
# Add player object to scene
game.scene.add obj
animStep = 0
orientation = player.orientation
playerAnim = window.setInterval ->
animStep = animStep + 0.1
obj.scale.x = animStep
player._rawPos = [
obj.position.x ,
obj.position.z
]
switch orientation
when 'up'
player._rawPos[1] -= animStep * game.boardSpacing
when 'left'
player._rawPos[0] -= animStep * game.boardSpacing
when 'right'
player._rawPos[0] += animStep * game.boardSpacing
when 'down'
player._rawPos[1] += animStep * game.boardSpacing
console.log player._rawPos[0] + ' ' + player._rawPos[1]
if animStep >= 0.99
window.clearInterval playerAnim
, game.gameInterval / 10
#
#
#
game.destroyPlayerObject = (player) ->
# Check player
return if !player
#
for obj in player._object
game.scene.remove obj
#
#
#
game.executeSequence = ->
#
requestAnimationFrame(game.executeSequence);
# Update camera realtime
game.updateCamera()
#
game.renderer.render(game.scene, game.camera);
# Execute
game.initGame() | true | #
game = {
boardWidth: 300,
boardHeight: 300,
boardSpacing: 30,
gameInterval: 100,
playerHeight: 5,
playerDepth: 1,
camAngle: 0
camSmoothness: 5
}
Number::mod = (n) ->
(this % n + n) % n
#
# Run fn
#
game.initGame = ->
# Initialize board
game.initBoard()
# Bind
game.bind()
# Generate context
game.getCanvas()
window.setInterval ->
game.updateGame()
, game.gameInterval
#
#
#
game.initBoard = ->
game.board = []
for line in [0..game.boardHeight]
game.board.push([])
for col in [0..game.boardWidth]
game.board[line].push(null)
game.players = []
game.players.push({
id: 0,
name: 'PI:NAME:<NAME>END_PI',
score: 0,
orientation: 'right',
position: [1, 1],
lastPosition: null,
color: 0xff0000
})
#
#
#
game.detectCollision = (pos, dir) ->
switch dir
when 'up'
true if pos[1]-1 < 0 or game.board[pos[1]-1][pos[0]] isnt null
when 'down'
true if pos[1]+1 >= game.boardHeight or game.board[pos[1]+1][pos[0]] isnt null
when 'left'
true if pos[0]-1 < 0 or game.board[pos[1]][pos[0]-1] isnt null
when 'right'
true if pos[0]+1 >= game.boardWidth or game.board[pos[1]][pos[0]+1] isnt null
#
#
#
game.playerLost = (player) ->
player.lost = true
# Clear from board
for line, lineIndex in game.board
for col, colIndex in line
game.board[lineIndex][colIndex] = null if col is player.id
game.destroyPlayerObject(player)
game.initBoard()
#
#
#
game.updatePlayer = (player) ->
return if player.lost
switch player.orientation
when 'up'
game.board[player.position[1]-1][player.position[0]] = player.id
player.position[1]--
when 'down'
game.board[player.position[1]+1][player.position[0]] = player.id
player.position[1]++
when 'left'
game.board[player.position[1]][player.position[0]-1] = player.id
player.position[0]--
when 'right'
game.board[player.position[1]][player.position[0]+1] = player.id
player.position[0]++
player.lastOrientation = player.orientation
#
game.renderPlayer(player)
#
#
#
game.playerMove = (player, dir) ->
if !player
# Current player
player = game.players[0]
newDir = 'down' if dir is 'right' and player.lastOrientation is 'right'
newDir = 'up' if dir is 'left' and player.lastOrientation is 'right'
newDir = 'up' if dir is 'right' and player.lastOrientation is 'left'
newDir = 'down' if dir is 'left' and player.lastOrientation is 'left'
newDir = 'left' if dir is 'right' and player.lastOrientation is 'down'
newDir = 'right' if dir is 'left' and player.lastOrientation is 'down'
newDir = 'right' if dir is 'right' and player.lastOrientation is 'up'
newDir = 'left' if dir is 'left' and player.lastOrientation is 'up'
# Set player orientation
player.orientation = newDir
#
#
#
game.bind = ->
document.addEventListener 'keydown', (e) ->
#if e.keyCode is 87 or e.keyCode is 38
# game.playerMove(false, 'up')
#if e.keyCode is 83 or e.keyCode is 40
# game.playerMove(false, 'down')
if e.keyCode is 65 or e.keyCode is 37
game.playerMove(false, 'left')
if e.keyCode is 68 or e.keyCode is 39
game.playerMove(false, 'right')
, true
return
#
#
#
game.getPlayerById = (id) ->
for player in game.players
if id is player.id
player
#
#
#
game.updateGame = ->
# Calculate players board occupation
for player in game.players
game.playerLost(player) if game.detectCollision(player.position, player.orientation)
game.updatePlayer(player)
#
#
#
game.updateCamera = ->
# Get player
player = game.getPlayerById(0)[0]
#playerX = player.position[0] * game.boardSpacing - (game.boardSpacing * game.boardWidth / 2)
#playerY = player.position[1] * game.boardSpacing - (game.boardSpacing * game.boardHeight / 2)
if player._rawPos
playerX = player._rawPos[0]
playerY = player._rawPos[1]
# Computes camera position
game.camera.position.y = 150
game.camera.position.z = playerY - 100 * Math.cos(game.camAngle) - 100 * Math.sin(game.camAngle)
game.camera.position.x = playerX + 100 * Math.sin(game.camAngle) - 100 * Math.cos(game.camAngle)
# Calculate camera angle
if player.orientation is 'right'
desiredAngle = 0
else if player.orientation is 'left'
desiredAngle = Math.PI
else if player.orientation is 'down'
desiredAngle = Math.PI / 2
else if player.orientation is 'up'
desiredAngle = 3 * Math.PI / 2
difference = Math.abs(desiredAngle - game.camAngle)
# Select shortest way to rotate camera -> to be completed
if difference < 2 * Math.PI - difference
camStep = difference / game.camSmoothness
else
camStep = - (2 * Math.PI - difference) / game.camSmoothness
# Go counter clockwise
if desiredAngle - game.camAngle < 0
camStep = - camStep
if player._rawPos
# Update game
game.camera.lookAt(new THREE.Vector3(player._rawPos[0], 0, player._rawPos[1]))
game.camAngle = (game.camAngle + camStep).mod(2 * Math.PI)
#
#
#
game.renderPlayer = (player) ->
game.createPlayerObject(player)
#
# Get canvas env
#
game.getCanvas = ->
#
game.scene = new THREE.Scene()
#
game.camera = new THREE.PerspectiveCamera(75, window.innerWidth/window.innerHeight, 0.1, 1000)
#
game.renderer = new THREE.WebGLRenderer()
game.renderer.setSize(window.innerWidth, window.innerHeight)
#
document.body.appendChild(game.renderer.domElement)
#
geometry = new THREE.BoxGeometry game.boardWidth*game.boardSpacing, 0, game.boardHeight*game.boardSpacing
material = new THREE.MeshBasicMaterial
color: 0x000000
game.floor = new THREE.Mesh geometry, material
game.scene.add game.floor
#
# Build board lines
#
boardLineMaterial = new THREE.LineBasicMaterial
color: 0x333333
for colIndex in [0..game.boardWidth]
geometry = new THREE.Geometry()
geometry.vertices.push(
new THREE.Vector3( game.boardSpacing*colIndex - (game.boardSpacing*game.boardWidth / 2) , 1, -(game.boardSpacing*game.boardHeight / 2)),
new THREE.Vector3( game.boardSpacing*colIndex - (game.boardSpacing*game.boardWidth / 2), 1, (game.boardSpacing*game.boardHeight / 2) )
)
line = new THREE.Line geometry, boardLineMaterial
game.scene.add line
for rowIndex in [0..game.boardHeight]
geometry = new THREE.Geometry()
geometry.vertices.push(
new THREE.Vector3( -(game.boardSpacing*game.boardWidth / 2) , 1, game.boardSpacing*rowIndex - (game.boardSpacing*game.boardHeight / 2)),
new THREE.Vector3( (game.boardSpacing*game.boardWidth / 2), 1, game.boardSpacing*rowIndex - (game.boardSpacing*game.boardHeight / 2))
)
line = new THREE.Line geometry, boardLineMaterial
game.scene.add line
#
game.camera.position.z = 10
game.camera.position.x = 0
game.camera.position.y = 50
game.camera.lookAt(new THREE.Vector3(0, 0, 0))
#
game.executeSequence()
game.createPlayerObject(0)
#
#
#
game.createPlayerObject = (player) ->
# Check player
return if !player
# global static
geometry = new THREE.BoxGeometry game.boardSpacing, game.playerHeight, game.playerDepth
material = new THREE.MeshBasicMaterial
color: player.color
obj = new THREE.Mesh geometry, material
# Initialize object array
if !player._object
player._object = []
# Set orientation
if player.orientation is 'up' or player.orientation is 'down'
# Rotate 90deg
obj.rotateOnAxis new THREE.Vector3(0, 1, 0), Math.PI/2
# Set current position
obj.position.x = player.position[0] * game.boardSpacing - (game.boardWidth / 2) * game.boardSpacing
obj.position.y = game.playerHeight / 2
obj.position.z = player.position[1] * game.boardSpacing - (game.boardHeight / 2) * game.boardSpacing
switch player.orientation
when 'up'
obj.position.z = obj.position.z + game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation(game.boardSpacing / 2, 0, 0))
when 'left'
obj.position.x = obj.position.x + game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation( -game.boardSpacing/2, 0, 0 ) )
when 'right'
obj.position.x = obj.position.x - game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation( game.boardSpacing/2, 0, 0 ) )
when 'down'
obj.position.z = obj.position.z - game.boardSpacing
geometry.applyMatrix( new THREE.Matrix4().makeTranslation(- game.boardSpacing/2, 0 , 0 ) )
obj.scale.x = 0.000001
# Create player object
player._object.push obj
# Add player object to scene
game.scene.add obj
animStep = 0
orientation = player.orientation
playerAnim = window.setInterval ->
animStep = animStep + 0.1
obj.scale.x = animStep
player._rawPos = [
obj.position.x ,
obj.position.z
]
switch orientation
when 'up'
player._rawPos[1] -= animStep * game.boardSpacing
when 'left'
player._rawPos[0] -= animStep * game.boardSpacing
when 'right'
player._rawPos[0] += animStep * game.boardSpacing
when 'down'
player._rawPos[1] += animStep * game.boardSpacing
console.log player._rawPos[0] + ' ' + player._rawPos[1]
if animStep >= 0.99
window.clearInterval playerAnim
, game.gameInterval / 10
#
#
#
game.destroyPlayerObject = (player) ->
# Check player
return if !player
#
for obj in player._object
game.scene.remove obj
#
#
#
game.executeSequence = ->
#
requestAnimationFrame(game.executeSequence);
# Update camera realtime
game.updateCamera()
#
game.renderer.render(game.scene, game.camera);
# Execute
game.initGame() |
[
{
"context": "s.create = (test) ->\n loginData =\n userName: 'user'\n password: 'pw'\n appName: 'app'\n server",
"end": 146,
"score": 0.9989441633224487,
"start": 142,
"tag": "USERNAME",
"value": "user"
},
{
"context": " loginData =\n userName: 'user'\n password: ... | test/unit/login7-payload-test.coffee | arthurschreiber/tedious | 1 | require('../../src/buffertools')
Login7Payload = require('../../src/login7-payload')
exports.create = (test) ->
loginData =
userName: 'user'
password: 'pw'
appName: 'app'
serverName: 'server'
language: 'lang'
database: 'db'
packetSize: 1024
tdsVersion: '7_2'
#start = new Date().getTime()
#for c in [1..1000]
# payload = new Login7Payload(loginData)
#end = new Date().getTime()
#console.log(end - start)
payload = new Login7Payload(loginData)
expectedLength =
4 + # Length
32 + # Variable
2 + 2 + (2 * payload.hostname.length) +
2 + 2 + (2 * loginData.userName.length) +
2 + 2 + (2 * loginData.password.length) +
2 + 2 + (2 * loginData.appName.length) +
2 + 2 + (2 * loginData.serverName.length) +
2 + 2 + (2 * 0) + # Reserved
2 + 2 + (2 * payload.libraryName.length) +
2 + 2 + (2 * loginData.language.length) +
2 + 2 + (2 * loginData.database.length) +
payload.clientId.length +
2 + 2 + (2 * payload.sspi.length) +
2 + 2 + (2 * payload.attachDbFile.length) +
2 + 2 + (2 * payload.changePassword.length) +
4 # cbSSPILong
test.strictEqual(payload.data.length, expectedLength)
passwordStart = payload.data.readUInt16LE(4 + 32 + (2 * 4))
passwordEnd = passwordStart + (2 * loginData.password.length)
passwordExpected = new Buffer([0xa2, 0xa5, 0xd2, 0xa5])
test.ok(payload.data.slice(passwordStart, passwordEnd).equals(passwordExpected))
#console.log(payload.toString(''))
test.done()
exports.createNTLM = (test) ->
loginData =
userName: 'user'
password: 'pw'
appName: 'app'
serverName: 'server'
domain: 'domain'
language: 'lang'
database: 'db'
packetSize: 1024
tdsVersion: '7_2'
payload = new Login7Payload(loginData)
expectedLength =
4 + # Length
32 + # Variable
2 + 2 + (2 * payload.hostname.length) +
2 + 2 + (2 * loginData.userName.length) +
2 + 2 + (2 * loginData.password.length) +
2 + 2 + (2 * loginData.appName.length) +
2 + 2 + (2 * loginData.serverName.length) +
2 + 2 + (2 * 0) + # Reserved
2 + 2 + (2 * payload.libraryName.length) +
2 + 2 + (2 * loginData.language.length) +
2 + 2 + (2 * loginData.database.length) +
payload.clientId.length +
2 + 2 + payload.ntlmPacket.length + # NTLM
2 + 2 + (2 * payload.attachDbFile.length) +
2 + 2 + (2 * payload.changePassword.length) +
4 # cbSSPILong
test.strictEqual(payload.data.length, expectedLength)
protocolHeader = payload.ntlmPacket.slice(0, 8).toString('utf8')
test.strictEqual(protocolHeader, 'NTLMSSP\u0000')
domainName = payload.ntlmPacket.slice(payload.ntlmPacket.length - 6).toString('ascii')
test.strictEqual(domainName, 'DOMAIN')
passwordStart = payload.data.readUInt16LE(4 + 32 + (2 * 4))
passwordEnd = passwordStart + (2 * loginData.password.length)
passwordExpected = new Buffer([0xa2, 0xa5, 0xd2, 0xa5])
test.ok(payload.data.slice(passwordStart, passwordEnd).equals(passwordExpected))
test.done() | 17138 | require('../../src/buffertools')
Login7Payload = require('../../src/login7-payload')
exports.create = (test) ->
loginData =
userName: 'user'
password: '<PASSWORD>'
appName: 'app'
serverName: 'server'
language: 'lang'
database: 'db'
packetSize: 1024
tdsVersion: '7_2'
#start = new Date().getTime()
#for c in [1..1000]
# payload = new Login7Payload(loginData)
#end = new Date().getTime()
#console.log(end - start)
payload = new Login7Payload(loginData)
expectedLength =
4 + # Length
32 + # Variable
2 + 2 + (2 * payload.hostname.length) +
2 + 2 + (2 * loginData.userName.length) +
2 + 2 + (2 * loginData.password.length) +
2 + 2 + (2 * loginData.appName.length) +
2 + 2 + (2 * loginData.serverName.length) +
2 + 2 + (2 * 0) + # Reserved
2 + 2 + (2 * payload.libraryName.length) +
2 + 2 + (2 * loginData.language.length) +
2 + 2 + (2 * loginData.database.length) +
payload.clientId.length +
2 + 2 + (2 * payload.sspi.length) +
2 + 2 + (2 * payload.attachDbFile.length) +
2 + 2 + (2 * payload.changePassword.length) +
4 # cbSSPILong
test.strictEqual(payload.data.length, expectedLength)
passwordStart = payload.data.readUInt16LE(4 + 32 + (2 * 4))
passwordEnd = passwordStart + (2 * loginData.password.length)
passwordExpected = new Buffer([0xa2, 0xa5, 0xd2, 0xa5])
test.ok(payload.data.slice(passwordStart, passwordEnd).equals(passwordExpected))
#console.log(payload.toString(''))
test.done()
exports.createNTLM = (test) ->
loginData =
userName: 'user'
password: '<PASSWORD>'
appName: 'app'
serverName: 'server'
domain: 'domain'
language: 'lang'
database: 'db'
packetSize: 1024
tdsVersion: '7_2'
payload = new Login7Payload(loginData)
expectedLength =
4 + # Length
32 + # Variable
2 + 2 + (2 * payload.hostname.length) +
2 + 2 + (2 * loginData.userName.length) +
2 + 2 + (2 * loginData.password.length) +
2 + 2 + (2 * loginData.appName.length) +
2 + 2 + (2 * loginData.serverName.length) +
2 + 2 + (2 * 0) + # Reserved
2 + 2 + (2 * payload.libraryName.length) +
2 + 2 + (2 * loginData.language.length) +
2 + 2 + (2 * loginData.database.length) +
payload.clientId.length +
2 + 2 + payload.ntlmPacket.length + # NTLM
2 + 2 + (2 * payload.attachDbFile.length) +
2 + 2 + (2 * payload.changePassword.length) +
4 # cbSSPILong
test.strictEqual(payload.data.length, expectedLength)
protocolHeader = payload.ntlmPacket.slice(0, 8).toString('utf8')
test.strictEqual(protocolHeader, 'NTLMSSP\u0000')
domainName = payload.ntlmPacket.slice(payload.ntlmPacket.length - 6).toString('ascii')
test.strictEqual(domainName, 'DOMAIN')
passwordStart = payload.data.readUInt16LE(4 + 32 + (2 * 4))
passwordEnd = passwordStart + (2 * loginData.password.length)
passwordExpected = new Buffer([0xa2, 0xa5, 0xd2, 0xa5])
test.ok(payload.data.slice(passwordStart, passwordEnd).equals(passwordExpected))
test.done() | true | require('../../src/buffertools')
Login7Payload = require('../../src/login7-payload')
exports.create = (test) ->
loginData =
userName: 'user'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
appName: 'app'
serverName: 'server'
language: 'lang'
database: 'db'
packetSize: 1024
tdsVersion: '7_2'
#start = new Date().getTime()
#for c in [1..1000]
# payload = new Login7Payload(loginData)
#end = new Date().getTime()
#console.log(end - start)
payload = new Login7Payload(loginData)
expectedLength =
4 + # Length
32 + # Variable
2 + 2 + (2 * payload.hostname.length) +
2 + 2 + (2 * loginData.userName.length) +
2 + 2 + (2 * loginData.password.length) +
2 + 2 + (2 * loginData.appName.length) +
2 + 2 + (2 * loginData.serverName.length) +
2 + 2 + (2 * 0) + # Reserved
2 + 2 + (2 * payload.libraryName.length) +
2 + 2 + (2 * loginData.language.length) +
2 + 2 + (2 * loginData.database.length) +
payload.clientId.length +
2 + 2 + (2 * payload.sspi.length) +
2 + 2 + (2 * payload.attachDbFile.length) +
2 + 2 + (2 * payload.changePassword.length) +
4 # cbSSPILong
test.strictEqual(payload.data.length, expectedLength)
passwordStart = payload.data.readUInt16LE(4 + 32 + (2 * 4))
passwordEnd = passwordStart + (2 * loginData.password.length)
passwordExpected = new Buffer([0xa2, 0xa5, 0xd2, 0xa5])
test.ok(payload.data.slice(passwordStart, passwordEnd).equals(passwordExpected))
#console.log(payload.toString(''))
test.done()
exports.createNTLM = (test) ->
loginData =
userName: 'user'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
appName: 'app'
serverName: 'server'
domain: 'domain'
language: 'lang'
database: 'db'
packetSize: 1024
tdsVersion: '7_2'
payload = new Login7Payload(loginData)
expectedLength =
4 + # Length
32 + # Variable
2 + 2 + (2 * payload.hostname.length) +
2 + 2 + (2 * loginData.userName.length) +
2 + 2 + (2 * loginData.password.length) +
2 + 2 + (2 * loginData.appName.length) +
2 + 2 + (2 * loginData.serverName.length) +
2 + 2 + (2 * 0) + # Reserved
2 + 2 + (2 * payload.libraryName.length) +
2 + 2 + (2 * loginData.language.length) +
2 + 2 + (2 * loginData.database.length) +
payload.clientId.length +
2 + 2 + payload.ntlmPacket.length + # NTLM
2 + 2 + (2 * payload.attachDbFile.length) +
2 + 2 + (2 * payload.changePassword.length) +
4 # cbSSPILong
test.strictEqual(payload.data.length, expectedLength)
protocolHeader = payload.ntlmPacket.slice(0, 8).toString('utf8')
test.strictEqual(protocolHeader, 'NTLMSSP\u0000')
domainName = payload.ntlmPacket.slice(payload.ntlmPacket.length - 6).toString('ascii')
test.strictEqual(domainName, 'DOMAIN')
passwordStart = payload.data.readUInt16LE(4 + 32 + (2 * 4))
passwordEnd = passwordStart + (2 * loginData.password.length)
passwordExpected = new Buffer([0xa2, 0xa5, 0xd2, 0xa5])
test.ok(payload.data.slice(passwordStart, passwordEnd).equals(passwordExpected))
test.done() |
[
{
"context": "rlie: ['1', '2', '3']\n david:\n sophie: 'a'\n result = common.extend object, prope",
"end": 2436,
"score": 0.7841894030570984,
"start": 2430,
"tag": "NAME",
"value": "sophie"
}
] | test/server/common/index.coffee | valueflowquality/gi-util-update | 0 | assert = require('chai').assert
expect = require('chai').expect
sinon = require 'sinon'
proxyquire = require 'proxyquire'
path = require 'path'
globalcommon = require '../../common'
dal = require './dal'
crudModelFactory = require './crudModelFactory'
crudControllerFactory = require './crudControllerFactory'
module.exports = () ->
describe 'Common', ->
stubs = null
common = null
mongooseMock = null
beforeEach (done) ->
dir = path.normalize __dirname + '../../../../server'
stubs =
'./crudControllerFactory':
name: 'crudControllerFactory'
'./crudModelFactory':
name: 'crudModelFactory'
'./rest': 'restStub'
'./dal': 'dalStub'
'../../common/timePatterns': 'timePatternsStub'
mongooseMock = sinon.spy()
common = proxyquire dir + '/common', stubs
done()
describe 'Exports', ->
it 'crudControllerFactory', (done) ->
assert.property common, 'crudControllerFactory'
, 'common does not export crudControllerFactory'
expect(common.crudControllerFactory.name)
.to.equal 'crudControllerFactory'
done()
it 'crudModelFactory', (done) ->
assert.property common, 'crudModelFactory'
, 'common does not export crudModelFactory'
expect(common.crudModelFactory.name).to.equal 'crudModelFactory'
done()
it 'extend', (done) ->
expect(common, 'common does not export extend')
.to.have.ownProperty 'extend'
done()
it 'rest', (done) ->
expect(common, 'common does not export rest')
.to.have.ownProperty 'rest'
expect(common.rest).to.equal 'restStub'
done()
it 'timePatterns', (done) ->
expect(common, 'common does not export timePatterns')
.to.have.ownProperty 'timePatterns'
expect(common.timePatterns).to.equal 'timePatternsStub'
done()
it 'dal', (done) ->
expect(common, 'common does not export dal')
.to.have.ownProperty 'dal'
expect(common.dal).to.equal 'dalStub'
done()
crudControllerFactory()
crudModelFactory()
describe 'extend: (object,properties) -> object', ->
it 'appends properties onto object', (done) ->
object =
alice: '1'
properties =
bob: 2
charlie: ['1', '2', '3']
david:
sophie: 'a'
result = common.extend object, properties
expect(result).to.have.ownProperty 'alice', '1'
expect(result).to.have.ownProperty 'bob', 2
# expect(result).to.have.ownProperty('charlie').with.length 3
expect(result).to.have.ownProperty 'david'
expect(result.david).to.have.ownProperty 'sophie', 'a'
done()
globalcommon()
dal() | 146208 | assert = require('chai').assert
expect = require('chai').expect
sinon = require 'sinon'
proxyquire = require 'proxyquire'
path = require 'path'
globalcommon = require '../../common'
dal = require './dal'
crudModelFactory = require './crudModelFactory'
crudControllerFactory = require './crudControllerFactory'
module.exports = () ->
describe 'Common', ->
stubs = null
common = null
mongooseMock = null
beforeEach (done) ->
dir = path.normalize __dirname + '../../../../server'
stubs =
'./crudControllerFactory':
name: 'crudControllerFactory'
'./crudModelFactory':
name: 'crudModelFactory'
'./rest': 'restStub'
'./dal': 'dalStub'
'../../common/timePatterns': 'timePatternsStub'
mongooseMock = sinon.spy()
common = proxyquire dir + '/common', stubs
done()
describe 'Exports', ->
it 'crudControllerFactory', (done) ->
assert.property common, 'crudControllerFactory'
, 'common does not export crudControllerFactory'
expect(common.crudControllerFactory.name)
.to.equal 'crudControllerFactory'
done()
it 'crudModelFactory', (done) ->
assert.property common, 'crudModelFactory'
, 'common does not export crudModelFactory'
expect(common.crudModelFactory.name).to.equal 'crudModelFactory'
done()
it 'extend', (done) ->
expect(common, 'common does not export extend')
.to.have.ownProperty 'extend'
done()
it 'rest', (done) ->
expect(common, 'common does not export rest')
.to.have.ownProperty 'rest'
expect(common.rest).to.equal 'restStub'
done()
it 'timePatterns', (done) ->
expect(common, 'common does not export timePatterns')
.to.have.ownProperty 'timePatterns'
expect(common.timePatterns).to.equal 'timePatternsStub'
done()
it 'dal', (done) ->
expect(common, 'common does not export dal')
.to.have.ownProperty 'dal'
expect(common.dal).to.equal 'dalStub'
done()
crudControllerFactory()
crudModelFactory()
describe 'extend: (object,properties) -> object', ->
it 'appends properties onto object', (done) ->
object =
alice: '1'
properties =
bob: 2
charlie: ['1', '2', '3']
david:
<NAME>: 'a'
result = common.extend object, properties
expect(result).to.have.ownProperty 'alice', '1'
expect(result).to.have.ownProperty 'bob', 2
# expect(result).to.have.ownProperty('charlie').with.length 3
expect(result).to.have.ownProperty 'david'
expect(result.david).to.have.ownProperty 'sophie', 'a'
done()
globalcommon()
dal() | true | assert = require('chai').assert
expect = require('chai').expect
sinon = require 'sinon'
proxyquire = require 'proxyquire'
path = require 'path'
globalcommon = require '../../common'
dal = require './dal'
crudModelFactory = require './crudModelFactory'
crudControllerFactory = require './crudControllerFactory'
module.exports = () ->
describe 'Common', ->
stubs = null
common = null
mongooseMock = null
beforeEach (done) ->
dir = path.normalize __dirname + '../../../../server'
stubs =
'./crudControllerFactory':
name: 'crudControllerFactory'
'./crudModelFactory':
name: 'crudModelFactory'
'./rest': 'restStub'
'./dal': 'dalStub'
'../../common/timePatterns': 'timePatternsStub'
mongooseMock = sinon.spy()
common = proxyquire dir + '/common', stubs
done()
describe 'Exports', ->
it 'crudControllerFactory', (done) ->
assert.property common, 'crudControllerFactory'
, 'common does not export crudControllerFactory'
expect(common.crudControllerFactory.name)
.to.equal 'crudControllerFactory'
done()
it 'crudModelFactory', (done) ->
assert.property common, 'crudModelFactory'
, 'common does not export crudModelFactory'
expect(common.crudModelFactory.name).to.equal 'crudModelFactory'
done()
it 'extend', (done) ->
expect(common, 'common does not export extend')
.to.have.ownProperty 'extend'
done()
it 'rest', (done) ->
expect(common, 'common does not export rest')
.to.have.ownProperty 'rest'
expect(common.rest).to.equal 'restStub'
done()
it 'timePatterns', (done) ->
expect(common, 'common does not export timePatterns')
.to.have.ownProperty 'timePatterns'
expect(common.timePatterns).to.equal 'timePatternsStub'
done()
it 'dal', (done) ->
expect(common, 'common does not export dal')
.to.have.ownProperty 'dal'
expect(common.dal).to.equal 'dalStub'
done()
crudControllerFactory()
crudModelFactory()
describe 'extend: (object,properties) -> object', ->
it 'appends properties onto object', (done) ->
object =
alice: '1'
properties =
bob: 2
charlie: ['1', '2', '3']
david:
PI:NAME:<NAME>END_PI: 'a'
result = common.extend object, properties
expect(result).to.have.ownProperty 'alice', '1'
expect(result).to.have.ownProperty 'bob', 2
# expect(result).to.have.ownProperty('charlie').with.length 3
expect(result).to.have.ownProperty 'david'
expect(result.david).to.have.ownProperty 'sophie', 'a'
done()
globalcommon()
dal() |
[
{
"context": "e-Version: 1.0\nSubject: Test deliver webhook\nFrom: Bob <bob@mail.jianliao.com>\nTo: Alice <alice@example.",
"end": 668,
"score": 0.9997053742408752,
"start": 665,
"tag": "NAME",
"value": "Bob"
},
{
"context": "ion: 1.0\nSubject: Test deliver webhook\nFrom: Bob <bob@ma... | test/services/mailgun.coffee | jianliaoim/talk-services | 40 | should = require 'should'
Promise = require 'bluebird'
requireDir = require 'require-dir'
loader = require '../../src/loader'
{req} = require '../util'
$mailgun = loader.load 'mailgun'
payloads = requireDir './mailgun_assets'
describe 'Mailgun#Webhook', ->
it 'receive delivered', (done) ->
req.body = payloads['delivered'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Delivered messages
Received: by luna.mailgun.net with SMTP mgrt 8734663311733; Fri, 03 May 2013 18:26:27 +0000
Mime-Version: 1.0
Subject: Test deliver webhook
From: Bob <bob@mail.jianliao.com>
To: Alice <alice@example.com>
Message-Id: <20130503182626.18666.16540@mail.jianliao.com>
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 18:26:27 +0000
Sender: bob@mail.jianliao.com
'''
.nodeify done
it 'receive dropped', (done) ->
req.body = payloads['dropped'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Dropped messages
reason: hardfail
Received: by luna.mailgun.net with SMTP mgrt 8755546751405; Fri, 03 May 2013 19:26:59 +0000
Mime-Version: 1.0
Subject: Test drop webhook
From: Bob <bob@mail.jianliao.com>
To: Alice <alice@example.com>
Message-Id: <20130503192659.13651.20287@mail.jianliao.com>
List-Unsubscribe: <mailto:u+na6tmy3ege4tgnldmyytqojqmfsdembyme3tmy3cha4wcndbgaydqyrgoi6wszdpovrhi5dinfzw63tfmv4gs43uomstimdhnvqws3bomnxw2jtuhusteqjgmq6tm@mail.jianliao.com>
X-Mailgun-Sid: WyIwNzI5MCIsICJpZG91YnR0aGlzb25lZXhpc3RzQGdtYWlsLmNvbSIsICI2Il0=
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 19:26:59 +0000
Sender: bob@mail.jianliao.com
'''
.nodeify done
it 'receive clicks', (done) ->
req.body = payloads['clicks'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Clicks
url: http://mailgun.net
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
it 'receive hard', (done) ->
req.body = payloads['hard'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Hard bounces
error: 5.1.1 The email account that you tried to reach does not exist. Please try\n5.1.1 double-checking the recipient's email address for typos or\n5.1.1 unnecessary spaces. Learn more at\n5.1.1 http://support.example.com/mail/bin/answer.py
Received: by luna.mailgun.net with SMTP mgrt 8734663311733; Fri, 03 May 2013 18:26:27 +0000
Mime-Version: 1.0
Subject: Test bounces webhook
From: Bob <bob@mail.jianliao.com>
To: Alice <alice@example.com>
Message-Id: <20130503182626.18666.16540@mail.jianliao.com>
List-Unsubscribe: <mailto:u+na6tmy3ege4tgnldmyytqojqmfsdembyme3tmy3cha4wcndbgaydqyrgoi6wszdpovrhi5dinfzw63tfmv4gs43uomstimdhnvqws3bomnxw2jtuhusteqjgmq6tm@mail.jianliao.com>
X-Mailgun-Sid: WyIwNzI5MCIsICJhbGljZUBleGFtcGxlLmNvbSIsICI2Il0=
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 18:26:27 +0000
Sender: bob@mail.jianliao.com
'''
.nodeify done
it 'receive open', (done) ->
req.body = payloads['open'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Opens
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
it 'receive spam', (done) ->
req.body = payloads['spam'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Spam complaints
Mime-Version: 1.0
Return-Path: <bounce+ad27a4.345-alice=example.com@mail.jianliao.com>
Received-Spf: pass (mta1122.mail.sk1.example.com: domain of bc=example+example.com=example@mail.jianliao.com designates 173.193.210.33 as permitted sender)
X-Originating-Ip: [173.193.210.33]
Authentication-Results: mta1122.mail.sk1.example.com from=mail.jianliao.com; domainkeys=pass (ok); from=mail.jianliao.com; dkim=pass (ok)
Received: from 127.0.0.1 (EHLO mail-luna33.mailgun.org) (173.193.210.33) by mta1122.mail.sk1.example.com with SMTP; Mon, 14 Feb 2011 21:57:01 -0800
Dkim-Signature: a=rsa-sha256; v=1; c=relaxed/relaxed; d=mail.jianliao.com; q=dns/txt; s=mg; t=1297749420; h=MIME-Version: Subject: From: To: Date: Message-Id: List-Id: Sender: Content-Type: Content-Transfer-Encoding; bh=gYbP9hMgpeW3ea3yNJlie/Yt+URsh5LwB24aU1Oe1Uo=; b=Vr6ipa2P79dYKAtYtgZSiMXInPvthTzaQBs2XzJLEu7lc0s6bmHEApy3r2dVsI+MoJ+GtjWt pkQVbwX2ZipJsdGUigT60aiTX45ll1QG5X83N+mKR4cIDmVJD8vtwjJcLfSMdDTuOK6jI41B NSYVlT1YWPh3sh3Tdl0ZxolDlys=
Domainkey-Signature: a=rsa-sha1; c=nofws; d=mail.jianliao.com; s=mg; h=MIME-Version: Subject: From: To: Date: Message-Id: List-Id: Sender: Content-Type: Content-Transfer-Encoding; b=QhZX2rhdVYccjPsUTMw1WASPEgsDg0KSBGHHwItsZd0xopzvgK2iQAuSJiJXo7yomFgj5R /Cz/iTv9I4Jdt6JPaEc5wf5X2JWqBCO+F1FTyYcVWzMG+WhGCdFn6sw82ma8VVY7UUU0TGsS tJe+1JkAQ1ILlm4rdXmS9jlG4H/ZE=
Received: from web3 (184-106-70-82.static.cloud-ips.com [184.106.70.82]) by mxa.mailgun.org with ESMTPSA id EB508F0127B for <alice@example.com>; Tue, 15 Feb 2011 05:56:45 +0000 (UTC)
Subject: Hi Alice
From: Bob <bob@mail.jianliao.com>
To: Alice <alice@example.com>
Date: Tue, 15 Feb 2011 05:56:45 -0000
Message-Id: <20110215055645.25246.63817@mail.jianliao.com>
Sender: SRS0=1U0y=VM=example.com=example@mail.jianliao.com
Content-Length: 629
'''
.nodeify done
it 'receive unsubscribe', (done) ->
req.body = payloads['unsubscribe'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Unsubscribes
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
| 28827 | should = require 'should'
Promise = require 'bluebird'
requireDir = require 'require-dir'
loader = require '../../src/loader'
{req} = require '../util'
$mailgun = loader.load 'mailgun'
payloads = requireDir './mailgun_assets'
describe 'Mailgun#Webhook', ->
it 'receive delivered', (done) ->
req.body = payloads['delivered'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Delivered messages
Received: by luna.mailgun.net with SMTP mgrt 8734663311733; Fri, 03 May 2013 18:26:27 +0000
Mime-Version: 1.0
Subject: Test deliver webhook
From: <NAME> <<EMAIL>>
To: <NAME> <<EMAIL>>
Message-Id: <<EMAIL>>
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 18:26:27 +0000
Sender: <EMAIL>
'''
.nodeify done
it 'receive dropped', (done) ->
req.body = payloads['dropped'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Dropped messages
reason: hardfail
Received: by luna.mailgun.net with SMTP mgrt 8755546751405; Fri, 03 May 2013 19:26:59 +0000
Mime-Version: 1.0
Subject: Test drop webhook
From: <NAME> <<EMAIL>>
To: <NAME> <<EMAIL>>
Message-Id: <<EMAIL>03192<EMAIL>>
List-Unsubscribe: <mailto:<EMAIL>>
X-Mailgun-Sid: WyIwNzI5MCIsICJpZG91YnR0aGlzb25lZXhpc3RzQGdtYWlsLmNvbSIsICI2Il0=
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 19:26:59 +0000
Sender: <EMAIL>
'''
.nodeify done
it 'receive clicks', (done) ->
req.body = payloads['clicks'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Clicks
url: http://mailgun.net
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
it 'receive hard', (done) ->
req.body = payloads['hard'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Hard bounces
error: 5.1.1 The email account that you tried to reach does not exist. Please try\n5.1.1 double-checking the recipient's email address for typos or\n5.1.1 unnecessary spaces. Learn more at\n5.1.1 http://support.example.com/mail/bin/answer.py
Received: by luna.mailgun.net with SMTP mgrt 8734663311733; Fri, 03 May 2013 18:26:27 +0000
Mime-Version: 1.0
Subject: Test bounces webhook
From: <NAME> <<EMAIL>>
To: <NAME> <<EMAIL>>
Message-Id: <<EMAIL>>
List-Unsubscribe: <mailto:<EMAIL>>
X-Mailgun-Sid: WyIwNzI5MCIsICJhbGljZUBleGFtcGxlLmNvbSIsICI2Il0=
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 18:26:27 +0000
Sender: <EMAIL>
'''
.nodeify done
it 'receive open', (done) ->
req.body = payloads['open'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Opens
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
it 'receive spam', (done) ->
req.body = payloads['spam'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Spam complaints
Mime-Version: 1.0
Return-Path: <bounce+ad27a4.345-alice=<EMAIL>>
Received-Spf: pass (mta1122.mail.sk1.example.com: domain of bc=example+example.com=<EMAIL> designates 192.168.3.11 as permitted sender)
X-Originating-Ip: [192.168.3.11]
Authentication-Results: mta1122.mail.sk1.example.com from=mail.jianliao.com; domainkeys=pass (ok); from=mail.jianliao.com; dkim=pass (ok)
Received: from 127.0.0.1 (EHLO mail-luna33.mailgun.org) (192.168.3.11) by mta1122.mail.sk1.example.com with SMTP; Mon, 14 Feb 2011 21:57:01 -0800
Dkim-Signature: a=rsa-sha256; v=1; c=relaxed/relaxed; d=mail.jianliao.com; q=dns/txt; s=mg; t=1297749420; h=MIME-Version: Subject: From: To: Date: Message-Id: List-Id: Sender: Content-Type: Content-Transfer-Encoding; bh=gYbP9hMgpeW3ea3yNJlie/Yt+URsh5LwB24aU1Oe1Uo=; b=Vr6ipa2P79dYKAtYtgZSiMXInPvthTzaQBs2XzJLEu7lc0s6bmHEApy3r2dVsI+MoJ+GtjWt pkQVbwX2ZipJsdGUigT60aiTX45ll1QG5X83N+mKR4cIDmVJD8vtwjJcLfSMdDTuOK6jI41B NSYVlT1YWPh3sh3Tdl0ZxolDlys=
Domainkey-Signature: a=rsa-sha1; c=nofws; d=mail.jianliao.com; s=mg; h=MIME-Version: Subject: From: To: Date: Message-Id: List-Id: Sender: Content-Type: Content-Transfer-Encoding; b=QhZX2rhdVYccjPsUTMw1WASPEgsDg0KSBGHHwItsZd0xopzvgK2iQAuSJiJXo7yomFgj5R /Cz/iTv9I4Jdt6JPaEc5wf5X2JWqBCO+F1FTyYcVWzMG+WhGCdFn6sw82ma8VVY7UUU0TGsS tJe+1JkAQ1ILlm4rdXmS9jlG4H/ZE=
Received: from web3 (184-106-70-82.static.cloud-ips.com [184.106.70.82]) by mxa.mailgun.org with ESMTPSA id EB508F0127B for <<EMAIL>>; Tue, 15 Feb 2011 05:56:45 +0000 (UTC)
Subject: Hi <NAME>
From: <NAME> <<EMAIL>>
To: <NAME> <<EMAIL>>
Date: Tue, 15 Feb 2011 05:56:45 -0000
Message-Id: <201102<EMAIL>0<EMAIL>>
Sender: SRS0=1U0y=VM=example.com=<EMAIL>
Content-Length: 629
'''
.nodeify done
it 'receive unsubscribe', (done) ->
req.body = payloads['unsubscribe'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Unsubscribes
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
| true | should = require 'should'
Promise = require 'bluebird'
requireDir = require 'require-dir'
loader = require '../../src/loader'
{req} = require '../util'
$mailgun = loader.load 'mailgun'
payloads = requireDir './mailgun_assets'
describe 'Mailgun#Webhook', ->
it 'receive delivered', (done) ->
req.body = payloads['delivered'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Delivered messages
Received: by luna.mailgun.net with SMTP mgrt 8734663311733; Fri, 03 May 2013 18:26:27 +0000
Mime-Version: 1.0
Subject: Test deliver webhook
From: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
To: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Message-Id: <PI:EMAIL:<EMAIL>END_PI>
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 18:26:27 +0000
Sender: PI:EMAIL:<EMAIL>END_PI
'''
.nodeify done
it 'receive dropped', (done) ->
req.body = payloads['dropped'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Dropped messages
reason: hardfail
Received: by luna.mailgun.net with SMTP mgrt 8755546751405; Fri, 03 May 2013 19:26:59 +0000
Mime-Version: 1.0
Subject: Test drop webhook
From: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
To: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Message-Id: <PI:EMAIL:<EMAIL>END_PI03192PI:EMAIL:<EMAIL>END_PI>
List-Unsubscribe: <mailto:PI:EMAIL:<EMAIL>END_PI>
X-Mailgun-Sid: WyIwNzI5MCIsICJpZG91YnR0aGlzb25lZXhpc3RzQGdtYWlsLmNvbSIsICI2Il0=
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 19:26:59 +0000
Sender: PI:EMAIL:<EMAIL>END_PI
'''
.nodeify done
it 'receive clicks', (done) ->
req.body = payloads['clicks'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Clicks
url: http://mailgun.net
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
it 'receive hard', (done) ->
req.body = payloads['hard'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Hard bounces
error: 5.1.1 The email account that you tried to reach does not exist. Please try\n5.1.1 double-checking the recipient's email address for typos or\n5.1.1 unnecessary spaces. Learn more at\n5.1.1 http://support.example.com/mail/bin/answer.py
Received: by luna.mailgun.net with SMTP mgrt 8734663311733; Fri, 03 May 2013 18:26:27 +0000
Mime-Version: 1.0
Subject: Test bounces webhook
From: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
To: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Message-Id: <PI:EMAIL:<EMAIL>END_PI>
List-Unsubscribe: <mailto:PI:EMAIL:<EMAIL>END_PI>
X-Mailgun-Sid: WyIwNzI5MCIsICJhbGljZUBleGFtcGxlLmNvbSIsICI2Il0=
X-Mailgun-Variables: {"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}
Date: Fri, 03 May 2013 18:26:27 +0000
Sender: PI:EMAIL:<EMAIL>END_PI
'''
.nodeify done
it 'receive open', (done) ->
req.body = payloads['open'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Opens
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
it 'receive spam', (done) ->
req.body = payloads['spam'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Spam complaints
Mime-Version: 1.0
Return-Path: <bounce+ad27a4.345-alice=PI:EMAIL:<EMAIL>END_PI>
Received-Spf: pass (mta1122.mail.sk1.example.com: domain of bc=example+example.com=PI:EMAIL:<EMAIL>END_PI designates PI:IP_ADDRESS:192.168.3.11END_PI as permitted sender)
X-Originating-Ip: [PI:IP_ADDRESS:192.168.3.11END_PI]
Authentication-Results: mta1122.mail.sk1.example.com from=mail.jianliao.com; domainkeys=pass (ok); from=mail.jianliao.com; dkim=pass (ok)
Received: from 127.0.0.1 (EHLO mail-luna33.mailgun.org) (PI:IP_ADDRESS:192.168.3.11END_PI) by mta1122.mail.sk1.example.com with SMTP; Mon, 14 Feb 2011 21:57:01 -0800
Dkim-Signature: a=rsa-sha256; v=1; c=relaxed/relaxed; d=mail.jianliao.com; q=dns/txt; s=mg; t=1297749420; h=MIME-Version: Subject: From: To: Date: Message-Id: List-Id: Sender: Content-Type: Content-Transfer-Encoding; bh=gYbP9hMgpeW3ea3yNJlie/Yt+URsh5LwB24aU1Oe1Uo=; b=Vr6ipa2P79dYKAtYtgZSiMXInPvthTzaQBs2XzJLEu7lc0s6bmHEApy3r2dVsI+MoJ+GtjWt pkQVbwX2ZipJsdGUigT60aiTX45ll1QG5X83N+mKR4cIDmVJD8vtwjJcLfSMdDTuOK6jI41B NSYVlT1YWPh3sh3Tdl0ZxolDlys=
Domainkey-Signature: a=rsa-sha1; c=nofws; d=mail.jianliao.com; s=mg; h=MIME-Version: Subject: From: To: Date: Message-Id: List-Id: Sender: Content-Type: Content-Transfer-Encoding; b=QhZX2rhdVYccjPsUTMw1WASPEgsDg0KSBGHHwItsZd0xopzvgK2iQAuSJiJXo7yomFgj5R /Cz/iTv9I4Jdt6JPaEc5wf5X2JWqBCO+F1FTyYcVWzMG+WhGCdFn6sw82ma8VVY7UUU0TGsS tJe+1JkAQ1ILlm4rdXmS9jlG4H/ZE=
Received: from web3 (184-106-70-82.static.cloud-ips.com [184.106.70.82]) by mxa.mailgun.org with ESMTPSA id EB508F0127B for <PI:EMAIL:<EMAIL>END_PI>; Tue, 15 Feb 2011 05:56:45 +0000 (UTC)
Subject: Hi PI:NAME:<NAME>END_PI
From: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
To: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Date: Tue, 15 Feb 2011 05:56:45 -0000
Message-Id: <201102PI:EMAIL:<EMAIL>END_PI0PI:EMAIL:<EMAIL>END_PI>
Sender: SRS0=1U0y=VM=example.com=PI:EMAIL:<EMAIL>END_PI
Content-Length: 629
'''
.nodeify done
it 'receive unsubscribe', (done) ->
req.body = payloads['unsubscribe'].body
$mailgun.then (mailgun) ->
mailgun.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql '''
Unsubscribes
user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31
'''
.nodeify done
|
[
{
"context": "\n petType: 'dog'\n petName: 'Andrew'\n petId: 5\n\n @expectedOptions",
"end": 3910,
"score": 0.9992977380752563,
"start": 3904,
"tag": "NAME",
"value": "Andrew"
},
{
"context": " pet_type: \"dog\"\n pet_nam... | test/generator/yeoman-spec.coffee | octoblu/swagger-device-generator | 0 | _ = require 'lodash'
fs = require 'fs-extra'
os = require 'os'
path = require 'path'
assert = require('yeoman-generator').assert
helpers = require('yeoman-generator').test
ProxyDeviceYeoman = require '../../generators/app/index'
describe 'app', ->
describe 'when called with a swagger file', ->
beforeEach (done) ->
@optionsBuilderRoot = path.join __dirname, 'generated-files'
@optionsBuilderPath = path.join @optionsBuilderRoot, 'options-builder.coffee'
helpers.run(path.join(__dirname, '../../generators/app')).
inDir(@optionsBuilderRoot).
withOptions('skip-install': true).
withPrompts({
swaggerFile: '../../../test/samples/swagger/pet-store-2-0-swagger.json'
}).on 'end', =>
console.log fs.readFileSync @optionsBuilderPath, 'utf8'
done()
it 'creates files', ->
assert.file [
'options-builder.coffee'
]
afterEach ->
fs.removeSync @optionsBuilderRoot
_.each require.cache, (cacheValue, cacheName) =>
delete require.cache[cacheName] if _.contains cacheName, 'options-builder'
describe 'when OptionsBuilder is instantiated', ->
beforeEach ->
OptionsBuilder = require @optionsBuilderPath
@sut = new OptionsBuilder
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
describe 'when called with a proxy-config', ->
beforeEach (done) ->
@optionsBuilderRoot = path.join __dirname, 'generated-files'
@optionsBuilderPath = path.join @optionsBuilderRoot, 'options-builder.coffee'
helpers.run(path.join(__dirname, '../../generators/app')).
inDir(@optionsBuilderRoot).
withOptions({
'skip-install': true
'proxy-config': '../../samples/proxy-config/sample1.json'
}).
on 'end', =>
console.log fs.readFileSync @optionsBuilderPath, 'utf8'
done()
afterEach ->
fs.removeSync @optionsBuilderRoot
_.each require.cache, (cacheValue, cacheName) =>
delete require.cache[cacheName] if _.contains cacheName, 'options-builder'
delete require.cache[@optionsBuilderRoot]
it 'creates files', ->
assert.file [
'options-builder.coffee'
]
describe 'when OptionsBuilder is instantiated', ->
beforeEach ->
OptionsBuilder = require @optionsBuilderPath
@sut = new OptionsBuilder
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
describe 'when convertMessageNames is called with message options and a map', ->
beforeEach ->
@result = @sut.convertMessageNames(
{
bandit: "Ignito Montoya"
banditCaptain: "Tyrannosaurus Rex"
species: "dog"
}
{
bandit: "Bandit"
banditCaptain: "bandit_captain"
}
)
it 'should return a message with the transformed keys', ->
expect(@result).to.deep.equal(
Bandit: "Ignito Montoya"
bandit_captain: "Tyrannosaurus Rex"
species: "dog"
)
it 'should return a message with the transformed keys', ->
expect(@result).to.deep.equal(
Bandit: "Ignito Montoya"
bandit_captain: "Tyrannosaurus Rex"
species: "dog"
)
describe 'when OptionsBuilder.createPet is run', ->
beforeEach (done) ->
payload =
petType: 'dog'
petName: 'Andrew'
petId: 5
@expectedOptions =
method: 'GET'
uri: 'https://petfinder2.com/pets/5'
qs:
pet_type: "dog"
pet_name: 'Andrew'
@sut.getPet payload, (error, options) =>
@result = options
done()
it 'should return the appropriate request parameters', ->
expect(@result).to.deep.equal @expectedOptions
| 97694 | _ = require 'lodash'
fs = require 'fs-extra'
os = require 'os'
path = require 'path'
assert = require('yeoman-generator').assert
helpers = require('yeoman-generator').test
ProxyDeviceYeoman = require '../../generators/app/index'
describe 'app', ->
describe 'when called with a swagger file', ->
beforeEach (done) ->
@optionsBuilderRoot = path.join __dirname, 'generated-files'
@optionsBuilderPath = path.join @optionsBuilderRoot, 'options-builder.coffee'
helpers.run(path.join(__dirname, '../../generators/app')).
inDir(@optionsBuilderRoot).
withOptions('skip-install': true).
withPrompts({
swaggerFile: '../../../test/samples/swagger/pet-store-2-0-swagger.json'
}).on 'end', =>
console.log fs.readFileSync @optionsBuilderPath, 'utf8'
done()
it 'creates files', ->
assert.file [
'options-builder.coffee'
]
afterEach ->
fs.removeSync @optionsBuilderRoot
_.each require.cache, (cacheValue, cacheName) =>
delete require.cache[cacheName] if _.contains cacheName, 'options-builder'
describe 'when OptionsBuilder is instantiated', ->
beforeEach ->
OptionsBuilder = require @optionsBuilderPath
@sut = new OptionsBuilder
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
describe 'when called with a proxy-config', ->
beforeEach (done) ->
@optionsBuilderRoot = path.join __dirname, 'generated-files'
@optionsBuilderPath = path.join @optionsBuilderRoot, 'options-builder.coffee'
helpers.run(path.join(__dirname, '../../generators/app')).
inDir(@optionsBuilderRoot).
withOptions({
'skip-install': true
'proxy-config': '../../samples/proxy-config/sample1.json'
}).
on 'end', =>
console.log fs.readFileSync @optionsBuilderPath, 'utf8'
done()
afterEach ->
fs.removeSync @optionsBuilderRoot
_.each require.cache, (cacheValue, cacheName) =>
delete require.cache[cacheName] if _.contains cacheName, 'options-builder'
delete require.cache[@optionsBuilderRoot]
it 'creates files', ->
assert.file [
'options-builder.coffee'
]
describe 'when OptionsBuilder is instantiated', ->
beforeEach ->
OptionsBuilder = require @optionsBuilderPath
@sut = new OptionsBuilder
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
describe 'when convertMessageNames is called with message options and a map', ->
beforeEach ->
@result = @sut.convertMessageNames(
{
bandit: "Ignito Montoya"
banditCaptain: "Tyrannosaurus Rex"
species: "dog"
}
{
bandit: "Bandit"
banditCaptain: "bandit_captain"
}
)
it 'should return a message with the transformed keys', ->
expect(@result).to.deep.equal(
Bandit: "Ignito Montoya"
bandit_captain: "Tyrannosaurus Rex"
species: "dog"
)
it 'should return a message with the transformed keys', ->
expect(@result).to.deep.equal(
Bandit: "Ignito Montoya"
bandit_captain: "Tyrannosaurus Rex"
species: "dog"
)
describe 'when OptionsBuilder.createPet is run', ->
beforeEach (done) ->
payload =
petType: 'dog'
petName: '<NAME>'
petId: 5
@expectedOptions =
method: 'GET'
uri: 'https://petfinder2.com/pets/5'
qs:
pet_type: "dog"
pet_name: '<NAME>'
@sut.getPet payload, (error, options) =>
@result = options
done()
it 'should return the appropriate request parameters', ->
expect(@result).to.deep.equal @expectedOptions
| true | _ = require 'lodash'
fs = require 'fs-extra'
os = require 'os'
path = require 'path'
assert = require('yeoman-generator').assert
helpers = require('yeoman-generator').test
ProxyDeviceYeoman = require '../../generators/app/index'
describe 'app', ->
describe 'when called with a swagger file', ->
beforeEach (done) ->
@optionsBuilderRoot = path.join __dirname, 'generated-files'
@optionsBuilderPath = path.join @optionsBuilderRoot, 'options-builder.coffee'
helpers.run(path.join(__dirname, '../../generators/app')).
inDir(@optionsBuilderRoot).
withOptions('skip-install': true).
withPrompts({
swaggerFile: '../../../test/samples/swagger/pet-store-2-0-swagger.json'
}).on 'end', =>
console.log fs.readFileSync @optionsBuilderPath, 'utf8'
done()
it 'creates files', ->
assert.file [
'options-builder.coffee'
]
afterEach ->
fs.removeSync @optionsBuilderRoot
_.each require.cache, (cacheValue, cacheName) =>
delete require.cache[cacheName] if _.contains cacheName, 'options-builder'
describe 'when OptionsBuilder is instantiated', ->
beforeEach ->
OptionsBuilder = require @optionsBuilderPath
@sut = new OptionsBuilder
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
describe 'when called with a proxy-config', ->
beforeEach (done) ->
@optionsBuilderRoot = path.join __dirname, 'generated-files'
@optionsBuilderPath = path.join @optionsBuilderRoot, 'options-builder.coffee'
helpers.run(path.join(__dirname, '../../generators/app')).
inDir(@optionsBuilderRoot).
withOptions({
'skip-install': true
'proxy-config': '../../samples/proxy-config/sample1.json'
}).
on 'end', =>
console.log fs.readFileSync @optionsBuilderPath, 'utf8'
done()
afterEach ->
fs.removeSync @optionsBuilderRoot
_.each require.cache, (cacheValue, cacheName) =>
delete require.cache[cacheName] if _.contains cacheName, 'options-builder'
delete require.cache[@optionsBuilderRoot]
it 'creates files', ->
assert.file [
'options-builder.coffee'
]
describe 'when OptionsBuilder is instantiated', ->
beforeEach ->
OptionsBuilder = require @optionsBuilderPath
@sut = new OptionsBuilder
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
it 'should have getAllPets and createPets as keys', ->
functions = _.keys @sut
expect(functions).to.contain 'getAllPets', 'createPet, getPet'
describe 'when convertMessageNames is called with message options and a map', ->
beforeEach ->
@result = @sut.convertMessageNames(
{
bandit: "Ignito Montoya"
banditCaptain: "Tyrannosaurus Rex"
species: "dog"
}
{
bandit: "Bandit"
banditCaptain: "bandit_captain"
}
)
it 'should return a message with the transformed keys', ->
expect(@result).to.deep.equal(
Bandit: "Ignito Montoya"
bandit_captain: "Tyrannosaurus Rex"
species: "dog"
)
it 'should return a message with the transformed keys', ->
expect(@result).to.deep.equal(
Bandit: "Ignito Montoya"
bandit_captain: "Tyrannosaurus Rex"
species: "dog"
)
describe 'when OptionsBuilder.createPet is run', ->
beforeEach (done) ->
payload =
petType: 'dog'
petName: 'PI:NAME:<NAME>END_PI'
petId: 5
@expectedOptions =
method: 'GET'
uri: 'https://petfinder2.com/pets/5'
qs:
pet_type: "dog"
pet_name: 'PI:NAME:<NAME>END_PI'
@sut.getPet payload, (error, options) =>
@result = options
done()
it 'should return the appropriate request parameters', ->
expect(@result).to.deep.equal @expectedOptions
|
[
{
"context": " c1 = TextElement size: \"childrenSize\", text: \"Hi\"\n c2 = RectangleElement color: '#ccc', size:",
"end": 2532,
"score": 0.939603328704834,
"start": 2530,
"tag": "NAME",
"value": "Hi"
},
{
"context": " c1 = TextElement size: \"childrenSize\", text: \"Hi\"\... | test/tests/Art.Engine/Core/Layout/Flow/Basics.coffee | Imikimi-LLC/art.engine | 4 | Foundation = require 'art-foundation'
Atomic = require 'art-atomic'
Canvas = require 'art-canvas'
Engine = require 'art-engine'
StateEpochTestHelper = require '../../StateEpochTestHelper'
{inspect, log, isArray, min, max} = Foundation
{point, matrix, Matrix} = Atomic
{stateEpochTest, drawAndTestElement} = StateEpochTestHelper
{Element, TextElement, RectangleElement} = require 'art-engine/Factories'
{LinearLayout} = Engine.Layout
testLogBitmap = (name, setup, tests...) ->
test name, ->
{root, test} = setup()
root.toBitmapBasic area:"logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap, name
test?()
module.exports = suite: ->
testLogBitmap "flow layout", ->
root: root = Element
size: 100
childrenLayout: "flow"
RectangleElement color:"red", size: 30
RectangleElement color:"green", size: 50
RectangleElement color:"blue", size: 40
test: ->
assert.eq sizes = (c.currentSize for c in root.children), [point(30), point(50), point(40)]
assert.eq locations = (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(0, 50)]
log sizes: sizes, locations:locations
drawAndTestElement "flow and childrenLayout (constrained)", ->
element: root = Element
size:
w: (ps, cs) -> min 100, cs.x
hch: 1
name: "flow and childrenLayout element"
childrenLayout: "flow"
RectangleElement size: 30, color: "red"
RectangleElement size: 50, color: "green"
RectangleElement size: 40, color: "blue"
test: ->
assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(0, 50)]
assert.eq root.currentSize, point 80, 90
drawAndTestElement "flow and childrenLayout (unconstrained)", ->
element: root = Element
size:
wcw: 1
h: (ps, cs) -> min 100, cs.y
name: "flow and childrenLayout element"
childrenLayout: "flow"
RectangleElement size: 30, color: "red"
RectangleElement size: 50, color: "green"
RectangleElement size: 40, color: "blue"
test: ->
assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(80, 0)]
assert.eq root.currentSize, point 120, 50
testLogBitmap "horizontal line should be the width of the wider word", ->
root: root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
c1 = TextElement size: "childrenSize", text: "Hi"
c2 = RectangleElement color: '#ccc', size: wpw:1, h:10
c3 = TextElement size: "childrenSize", text: "world."
# test: ->
# assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(0, 20), point(0, 30)]
# assert.within c2.currentSize, point(41, 10), point(42, 10)
# assert.within root.currentSize, point(41, 50), point(42, 50)
testLogBitmap "horizontal line with right alignment", ->
root: root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
childrenAlignment: "right"
c1 = TextElement size: "childrenSize", text: "Hi"
c2 = RectangleElement color: '#ccc', size: wpw:1, h:10
c3 = TextElement size: "childrenSize", text: "world."
test: ->
assert.within c1.currentLocation, point(25,0), point(26,0)
assert.eq c2.currentLocation, point 0, 12
assert.eq c3.currentLocation, point 0, 22
assert.within c2.currentSize, point(41, 10), point(42, 10)
assert.within root.currentSize, point(41, 34), point(42, 34)
test "flow with layout {scs:1}: child with layout ss:1 should work the same with or without inFlow: false, ", ->
root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc' # has size:point0 for flow because it's size is parent-circular
c2 = RectangleElement color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "Hi"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(), point(0, 12)]
assert.eq c1.currentSize, root.currentSize
assert.eq c2.currentSize, root.currentSize
assert.within root.currentSize, point(41, 24), point(42, 24)
test "flow with fixed size: inFlow: false required to have background", ->
root = Element
size: 50
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "Hi"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(0, 12)]
assert.eq c1.currentSize, root.currentSize
test "flow with fixed size: ss:.5 child is placed in flow", ->
root = Element
size: 50
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc', size: ps:.5
TextElement size: "childrenSize", text: "Hi"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(25, 0), point(0, 25)]
assert.eq c1.currentSize, point 25
assert.eq root.currentSize, point 50
test "all full-width", ->
root = Element
size: hch:1, w:50
childrenLayout: "flow"
RectangleElement color: '#fcc', size: wpw:1, h:10
RectangleElement color: '#cfc', size: wpw:1, h:10
RectangleElement color: '#ccf', size: wpw:1, h:10
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(0, 10), point(0, 20)]
test "all full-height", ->
root = Element
size: wcw:1, h:50
childrenLayout: "flow"
RectangleElement color: '#fcc', size: hph:1, w:10
RectangleElement color: '#cfc', size: hph:1, w:10
RectangleElement color: '#ccf', size: hph:1, w:10
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(10, 0), point(20, 0)]
testLogBitmap "flow with child ss:1 and child ww:1, h:10", ->
root:newRoot = Element
size: cs:1
RectangleElement color: '#eee', size: ps:1
root = Element
size: cs:1
padding: 10
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc'
TextElement size: "childrenSize", text: "Hi"
c2 = RectangleElement color: '#777', size: wpw:1, h:10
TextElement size: "childrenSize", text: "world."
test: ->
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(0, 12), point(0, 22)]
assert.eq c1.currentSize, root.currentSize.sub(20)
assert.within root.currentSize, point(61, 54), point(62, 54)
testLogBitmap "padding, right-aligned with inFlow:false child", ->
root:
root = Element
size: cs:1 #, max: ww:1
padding: 10
childrenLayout: "flow"
childrenAlignment: "right"
c1 = RectangleElement name:"inflowfalse", color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "Hi"
c2 = RectangleElement name:"h-line", color: '#777', size: wpw:1, h:10
TextElement size: "childrenSize", text: "world."
test: ->
assert.eq root.currentSize.sub(20), c1.currentSize
stateEpochTest "min layout with children-dependent height", ->
p = Element
size:175
childrenLayout: "flow"
name: "parent"
c = Element
name: "child"
size:
x: (ps) -> ps.x
y: (ps, cs) -> max 35, cs.y
->
assert.eq c.currentSize, point 175, 35
stateEpochTest "flow and update", ->
Element
size: 200
childrenLayout: "flow"
Element
size: w:125, h:50
child = Element
size: w:125, hch:1
grandchild = RectangleElement
size: w:125, h:50
color: "red"
->
l1 = child.currentLocation
assert.neq l1, point()
grandchild.color = "blue"
->
l2 = child.currentLocation
assert.eq l1, l2
assert.neq l2, point()
| 98061 | Foundation = require 'art-foundation'
Atomic = require 'art-atomic'
Canvas = require 'art-canvas'
Engine = require 'art-engine'
StateEpochTestHelper = require '../../StateEpochTestHelper'
{inspect, log, isArray, min, max} = Foundation
{point, matrix, Matrix} = Atomic
{stateEpochTest, drawAndTestElement} = StateEpochTestHelper
{Element, TextElement, RectangleElement} = require 'art-engine/Factories'
{LinearLayout} = Engine.Layout
testLogBitmap = (name, setup, tests...) ->
test name, ->
{root, test} = setup()
root.toBitmapBasic area:"logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap, name
test?()
module.exports = suite: ->
testLogBitmap "flow layout", ->
root: root = Element
size: 100
childrenLayout: "flow"
RectangleElement color:"red", size: 30
RectangleElement color:"green", size: 50
RectangleElement color:"blue", size: 40
test: ->
assert.eq sizes = (c.currentSize for c in root.children), [point(30), point(50), point(40)]
assert.eq locations = (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(0, 50)]
log sizes: sizes, locations:locations
drawAndTestElement "flow and childrenLayout (constrained)", ->
element: root = Element
size:
w: (ps, cs) -> min 100, cs.x
hch: 1
name: "flow and childrenLayout element"
childrenLayout: "flow"
RectangleElement size: 30, color: "red"
RectangleElement size: 50, color: "green"
RectangleElement size: 40, color: "blue"
test: ->
assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(0, 50)]
assert.eq root.currentSize, point 80, 90
drawAndTestElement "flow and childrenLayout (unconstrained)", ->
element: root = Element
size:
wcw: 1
h: (ps, cs) -> min 100, cs.y
name: "flow and childrenLayout element"
childrenLayout: "flow"
RectangleElement size: 30, color: "red"
RectangleElement size: 50, color: "green"
RectangleElement size: 40, color: "blue"
test: ->
assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(80, 0)]
assert.eq root.currentSize, point 120, 50
testLogBitmap "horizontal line should be the width of the wider word", ->
root: root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
c1 = TextElement size: "childrenSize", text: "<NAME>"
c2 = RectangleElement color: '#ccc', size: wpw:1, h:10
c3 = TextElement size: "childrenSize", text: "world."
# test: ->
# assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(0, 20), point(0, 30)]
# assert.within c2.currentSize, point(41, 10), point(42, 10)
# assert.within root.currentSize, point(41, 50), point(42, 50)
testLogBitmap "horizontal line with right alignment", ->
root: root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
childrenAlignment: "right"
c1 = TextElement size: "childrenSize", text: "<NAME>"
c2 = RectangleElement color: '#ccc', size: wpw:1, h:10
c3 = TextElement size: "childrenSize", text: "world."
test: ->
assert.within c1.currentLocation, point(25,0), point(26,0)
assert.eq c2.currentLocation, point 0, 12
assert.eq c3.currentLocation, point 0, 22
assert.within c2.currentSize, point(41, 10), point(42, 10)
assert.within root.currentSize, point(41, 34), point(42, 34)
test "flow with layout {scs:1}: child with layout ss:1 should work the same with or without inFlow: false, ", ->
root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc' # has size:point0 for flow because it's size is parent-circular
c2 = RectangleElement color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "<NAME>"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(), point(0, 12)]
assert.eq c1.currentSize, root.currentSize
assert.eq c2.currentSize, root.currentSize
assert.within root.currentSize, point(41, 24), point(42, 24)
test "flow with fixed size: inFlow: false required to have background", ->
root = Element
size: 50
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "<NAME>"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(0, 12)]
assert.eq c1.currentSize, root.currentSize
test "flow with fixed size: ss:.5 child is placed in flow", ->
root = Element
size: 50
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc', size: ps:.5
TextElement size: "childrenSize", text: "<NAME>"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(25, 0), point(0, 25)]
assert.eq c1.currentSize, point 25
assert.eq root.currentSize, point 50
test "all full-width", ->
root = Element
size: hch:1, w:50
childrenLayout: "flow"
RectangleElement color: '#fcc', size: wpw:1, h:10
RectangleElement color: '#cfc', size: wpw:1, h:10
RectangleElement color: '#ccf', size: wpw:1, h:10
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(0, 10), point(0, 20)]
test "all full-height", ->
root = Element
size: wcw:1, h:50
childrenLayout: "flow"
RectangleElement color: '#fcc', size: hph:1, w:10
RectangleElement color: '#cfc', size: hph:1, w:10
RectangleElement color: '#ccf', size: hph:1, w:10
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(10, 0), point(20, 0)]
testLogBitmap "flow with child ss:1 and child ww:1, h:10", ->
root:newRoot = Element
size: cs:1
RectangleElement color: '#eee', size: ps:1
root = Element
size: cs:1
padding: 10
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc'
TextElement size: "childrenSize", text: "<NAME>"
c2 = RectangleElement color: '#777', size: wpw:1, h:10
TextElement size: "childrenSize", text: "world."
test: ->
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(0, 12), point(0, 22)]
assert.eq c1.currentSize, root.currentSize.sub(20)
assert.within root.currentSize, point(61, 54), point(62, 54)
testLogBitmap "padding, right-aligned with inFlow:false child", ->
root:
root = Element
size: cs:1 #, max: ww:1
padding: 10
childrenLayout: "flow"
childrenAlignment: "right"
c1 = RectangleElement name:"inflowfalse", color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "<NAME>"
c2 = RectangleElement name:"h-line", color: '#777', size: wpw:1, h:10
TextElement size: "childrenSize", text: "world."
test: ->
assert.eq root.currentSize.sub(20), c1.currentSize
stateEpochTest "min layout with children-dependent height", ->
p = Element
size:175
childrenLayout: "flow"
name: "parent"
c = Element
name: "child"
size:
x: (ps) -> ps.x
y: (ps, cs) -> max 35, cs.y
->
assert.eq c.currentSize, point 175, 35
stateEpochTest "flow and update", ->
Element
size: 200
childrenLayout: "flow"
Element
size: w:125, h:50
child = Element
size: w:125, hch:1
grandchild = RectangleElement
size: w:125, h:50
color: "red"
->
l1 = child.currentLocation
assert.neq l1, point()
grandchild.color = "blue"
->
l2 = child.currentLocation
assert.eq l1, l2
assert.neq l2, point()
| true | Foundation = require 'art-foundation'
Atomic = require 'art-atomic'
Canvas = require 'art-canvas'
Engine = require 'art-engine'
StateEpochTestHelper = require '../../StateEpochTestHelper'
{inspect, log, isArray, min, max} = Foundation
{point, matrix, Matrix} = Atomic
{stateEpochTest, drawAndTestElement} = StateEpochTestHelper
{Element, TextElement, RectangleElement} = require 'art-engine/Factories'
{LinearLayout} = Engine.Layout
testLogBitmap = (name, setup, tests...) ->
test name, ->
{root, test} = setup()
root.toBitmapBasic area:"logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap, name
test?()
module.exports = suite: ->
testLogBitmap "flow layout", ->
root: root = Element
size: 100
childrenLayout: "flow"
RectangleElement color:"red", size: 30
RectangleElement color:"green", size: 50
RectangleElement color:"blue", size: 40
test: ->
assert.eq sizes = (c.currentSize for c in root.children), [point(30), point(50), point(40)]
assert.eq locations = (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(0, 50)]
log sizes: sizes, locations:locations
drawAndTestElement "flow and childrenLayout (constrained)", ->
element: root = Element
size:
w: (ps, cs) -> min 100, cs.x
hch: 1
name: "flow and childrenLayout element"
childrenLayout: "flow"
RectangleElement size: 30, color: "red"
RectangleElement size: 50, color: "green"
RectangleElement size: 40, color: "blue"
test: ->
assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(0, 50)]
assert.eq root.currentSize, point 80, 90
drawAndTestElement "flow and childrenLayout (unconstrained)", ->
element: root = Element
size:
wcw: 1
h: (ps, cs) -> min 100, cs.y
name: "flow and childrenLayout element"
childrenLayout: "flow"
RectangleElement size: 30, color: "red"
RectangleElement size: 50, color: "green"
RectangleElement size: 40, color: "blue"
test: ->
assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(30, 0), point(80, 0)]
assert.eq root.currentSize, point 120, 50
testLogBitmap "horizontal line should be the width of the wider word", ->
root: root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
c1 = TextElement size: "childrenSize", text: "PI:NAME:<NAME>END_PI"
c2 = RectangleElement color: '#ccc', size: wpw:1, h:10
c3 = TextElement size: "childrenSize", text: "world."
# test: ->
# assert.eq (c.currentLocation for c in root.children), [point(0, 0), point(0, 20), point(0, 30)]
# assert.within c2.currentSize, point(41, 10), point(42, 10)
# assert.within root.currentSize, point(41, 50), point(42, 50)
testLogBitmap "horizontal line with right alignment", ->
root: root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
childrenAlignment: "right"
c1 = TextElement size: "childrenSize", text: "PI:NAME:<NAME>END_PI"
c2 = RectangleElement color: '#ccc', size: wpw:1, h:10
c3 = TextElement size: "childrenSize", text: "world."
test: ->
assert.within c1.currentLocation, point(25,0), point(26,0)
assert.eq c2.currentLocation, point 0, 12
assert.eq c3.currentLocation, point 0, 22
assert.within c2.currentSize, point(41, 10), point(42, 10)
assert.within root.currentSize, point(41, 34), point(42, 34)
test "flow with layout {scs:1}: child with layout ss:1 should work the same with or without inFlow: false, ", ->
root = Element
size:
w: (ps, cs) -> min 50, cs.x
hch: 1
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc' # has size:point0 for flow because it's size is parent-circular
c2 = RectangleElement color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "PI:NAME:<NAME>END_PI"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(), point(0, 12)]
assert.eq c1.currentSize, root.currentSize
assert.eq c2.currentSize, root.currentSize
assert.within root.currentSize, point(41, 24), point(42, 24)
test "flow with fixed size: inFlow: false required to have background", ->
root = Element
size: 50
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "PI:NAME:<NAME>END_PI"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(0, 12)]
assert.eq c1.currentSize, root.currentSize
test "flow with fixed size: ss:.5 child is placed in flow", ->
root = Element
size: 50
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc', size: ps:.5
TextElement size: "childrenSize", text: "PI:NAME:<NAME>END_PI"
TextElement size: "childrenSize", text: "world."
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(25, 0), point(0, 25)]
assert.eq c1.currentSize, point 25
assert.eq root.currentSize, point 50
test "all full-width", ->
root = Element
size: hch:1, w:50
childrenLayout: "flow"
RectangleElement color: '#fcc', size: wpw:1, h:10
RectangleElement color: '#cfc', size: wpw:1, h:10
RectangleElement color: '#ccf', size: wpw:1, h:10
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(0, 10), point(0, 20)]
test "all full-height", ->
root = Element
size: wcw:1, h:50
childrenLayout: "flow"
RectangleElement color: '#fcc', size: hph:1, w:10
RectangleElement color: '#cfc', size: hph:1, w:10
RectangleElement color: '#ccf', size: hph:1, w:10
root.toBitmapBasic area: "logicalArea", elementToTargetMatrix:Matrix.scale(2)
.then (bitmap) ->
log bitmap
assert.eq (c.currentLocation for c in root.children), [point(), point(10, 0), point(20, 0)]
testLogBitmap "flow with child ss:1 and child ww:1, h:10", ->
root:newRoot = Element
size: cs:1
RectangleElement color: '#eee', size: ps:1
root = Element
size: cs:1
padding: 10
childrenLayout: "flow"
c1 = RectangleElement color: '#ccc'
TextElement size: "childrenSize", text: "PI:NAME:<NAME>END_PI"
c2 = RectangleElement color: '#777', size: wpw:1, h:10
TextElement size: "childrenSize", text: "world."
test: ->
assert.eq (c.currentLocation for c in root.children), [point(), point(), point(0, 12), point(0, 22)]
assert.eq c1.currentSize, root.currentSize.sub(20)
assert.within root.currentSize, point(61, 54), point(62, 54)
testLogBitmap "padding, right-aligned with inFlow:false child", ->
root:
root = Element
size: cs:1 #, max: ww:1
padding: 10
childrenLayout: "flow"
childrenAlignment: "right"
c1 = RectangleElement name:"inflowfalse", color: '#ccc', inFlow: false
TextElement size: "childrenSize", text: "PI:NAME:<NAME>END_PI"
c2 = RectangleElement name:"h-line", color: '#777', size: wpw:1, h:10
TextElement size: "childrenSize", text: "world."
test: ->
assert.eq root.currentSize.sub(20), c1.currentSize
stateEpochTest "min layout with children-dependent height", ->
p = Element
size:175
childrenLayout: "flow"
name: "parent"
c = Element
name: "child"
size:
x: (ps) -> ps.x
y: (ps, cs) -> max 35, cs.y
->
assert.eq c.currentSize, point 175, 35
stateEpochTest "flow and update", ->
Element
size: 200
childrenLayout: "flow"
Element
size: w:125, h:50
child = Element
size: w:125, hch:1
grandchild = RectangleElement
size: w:125, h:50
color: "red"
->
l1 = child.currentLocation
assert.neq l1, point()
grandchild.color = "blue"
->
l2 = child.currentLocation
assert.eq l1, l2
assert.neq l2, point()
|
[
{
"context": "year it is in the simulation.\n\nhttps://github.com/NicMcPhee/whooping-crane-model\n\nCopyright (c) 2015 Nic McPh",
"end": 116,
"score": 0.9975280165672302,
"start": 107,
"tag": "USERNAME",
"value": "NicMcPhee"
},
{
"context": "NicMcPhee/whooping-crane-model\n\nCopyrigh... | src/lib/clock.coffee | NicMcPhee/whooping-crane-model | 0 | ###
The global clock for the simulation that knows what
year it is in the simulation.
https://github.com/NicMcPhee/whooping-crane-model
Copyright (c) 2015 Nic McPhee
Licensed under the MIT license.
###
'use strict'
class Clock
@currentYear: 0
@reset: ->
@currentYear = 0
@incrementYear: ->
@currentYear = @currentYear + 1
@setYear: (year) ->
@currentYear = year
module.exports = Clock
| 187269 | ###
The global clock for the simulation that knows what
year it is in the simulation.
https://github.com/NicMcPhee/whooping-crane-model
Copyright (c) 2015 <NAME>
Licensed under the MIT license.
###
'use strict'
class Clock
@currentYear: 0
@reset: ->
@currentYear = 0
@incrementYear: ->
@currentYear = @currentYear + 1
@setYear: (year) ->
@currentYear = year
module.exports = Clock
| true | ###
The global clock for the simulation that knows what
year it is in the simulation.
https://github.com/NicMcPhee/whooping-crane-model
Copyright (c) 2015 PI:NAME:<NAME>END_PI
Licensed under the MIT license.
###
'use strict'
class Clock
@currentYear: 0
@reset: ->
@currentYear = 0
@incrementYear: ->
@currentYear = @currentYear + 1
@setYear: (year) ->
@currentYear = year
module.exports = Clock
|
[
{
"context": "agic constant to generate handshake\nKEY_SUFFIX = \"258EAFA5-E914-47DA-95CA-C5AB0DC85B11\"\n\nlowerObjKeys = (obj) ->\n keys = (key for key o",
"end": 380,
"score": 0.999731183052063,
"start": 344,
"tag": "KEY",
"value": "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
},
{
"con... | service/node_modules/node-simple-router/src/ws.coffee | sesam-community/slack | 21 | events = require("events")
http = require("http")
crypto = require("crypto")
util = require("util")
URL = require('url')
uuid = require('./uuid')
# opcodes for WebSocket frames
# http://tools.ietf.org/html/rfc6455#section-5.2
opcodes =
TEXT: 1
BINARY: 2
CLOSE: 8
PING: 9
PONG: 10
#magic constant to generate handshake
KEY_SUFFIX = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
lowerObjKeys = (obj) ->
keys = (key for key of obj)
lkeys = keys.map (key) -> key.toLowerCase()
resp = {}
resp[lkeys[index]] = obj[keys[index]] for key, index in keys
resp
hashWebSocketKey = (key) ->
sha1 = crypto.createHash("sha1")
sha1.update key + KEY_SUFFIX, "ascii"
sha1.digest "base64"
genWebSocketKey = ->
key = new Buffer(16)
for _, index in key
key.writeUInt8 Math.floor(Math.random() * 256), index
key.toString('base64')
genMask = ->
mask = new Buffer(4)
for _, index in mask
mask.writeUInt8 Math.floor(Math.random() * 256), index
mask
unmask = (maskBytes, data) ->
payload = new Buffer(data.length)
i = 0
while i < data.length
payload[i] = maskBytes[i % 4] ^ data[i]
i++
payload
encodeMessage = (opcode, payload, useMask = false) ->
buf = undefined
mask = undefined
maskLen = if useMask then 4 else 0
# first byte: fin and opcode
b1 = 0x80 | opcode # always send message as one frame (fin)
# second byte: mask and length part 1
# followed by 0, 2, or 4 additional bytes of continued length
b2 = if useMask then 0x80 else 0
length = payload.length
if useMask
mask = genMask()
payload = unmask(mask, payload)
if length < 126
buf = new Buffer(payload.length + 2 + 0 + maskLen) # zero extra bytes
b2 |= length
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
payload.copy(buf, 2 + maskLen)
mask.copy(buf, 2) if useMask
else if length < (1 << 16)
buf = new Buffer(payload.length + 2 + 2 + maskLen) # two bytes extra
b2 |= 126
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
# add two byte length
buf.writeUInt16BE length, 2
payload.copy(buf, 4 + maskLen)
mask.copy(buf, 4) if useMask
else
buf = new Buffer(payload.length + 2 + 8 + maskLen) # eight bytes extra
b2 |= 127
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
# add eight byte length
# note: this implementation cannot handle lengths greater than 2^32
# the 32 bit length is prefixed with 0x0000
buf.writeUInt32BE 0, 2
buf.writeUInt32BE length, 6
payload.copy buf, 10 + maskLen
mask.copy buf, 10 if useMask
#console.log "Returning this buffer:", buf
buf
WebSocketClientConnection = (url, options) ->
parsed_url = URL.parse(url)
throw new TypeError "URL scheme must be 'ws' or 'wss'" if parsed_url.protocol not in ['ws:', 'wss:']
self = @
@options =
hostname: parsed_url.hostname
port: parsed_url.port or (if parsed_url.protocol.match /ss/ then 443 else 80)
path: parsed_url.path or "/"
headers: {}
@options.headers.Host = "#{@options.hostname}:#{@options.port}"
@options.headers.Connection = "Upgrade"
@options.headers.Upgrade = "websocket"
@options.headers.Origin = "#{if parsed_url.protocol.match /ss/ then 'https' else 'http'}://#{@options.hostname}:#{@options.port}"
@options.headers['Sec-WebSocket-Version'] = 13
@options.headers['Sec-WebSocket-Key'] = genWebSocketKey()
@options.headers['Sec-WebSocket-Protocol'] = options['Sec-WebSocket-Protocol'] if options?['Sec-WebSocket-Protocol']?
@options.headers['Sec-WebSocket-Extensions'] = options['Sec-WebSocket-Extensions'] if options?['Sec-WebSocket-Extensions']?
@request = http.request @options
@request.on 'upgrade', (response, socket, upgradeHead) ->
self.socket = socket
self.socket.on 'error', (err) ->
console.log 'Client Socket error:', err.message
self.socket.on "data", (buf) ->
#console.log "Raw data:", buf
self.buffer = Buffer.concat([
self.buffer
buf
])
# process buffer while it contains complete frames
continue while self._processBuffer()
return
self.socket.on "close", (had_error) ->
unless self.closed
self.emit "close", 1006
self.closed = true
return
self.emit 'open', if self.id then self.id else null
@request.end()
@buffer = new Buffer(0)
@closed = false
@currentRoundTrip = 0
return
util.inherits WebSocketClientConnection, events.EventEmitter
Object.defineProperty WebSocketClientConnection::, 'readyState',
get: -> @socket?.readyState
WebSocketClientConnection::_doSend = (opcode, payload) ->
@socket?.write encodeMessage(opcode, payload, true)
return
WebSocketServerConnection = (request, socket, upgradeHead) ->
self = @
key = hashWebSocketKey(lowerObjKeys(request.headers)["sec-websocket-key"])
protocol = (->
if 'sec-websocket-protocol' of request.headers
protocols = lowerObjKeys(request.headers)["sec-websocket-protocol"].split /\s*,\s*/
#console.log "Protocol: #{protocols[0]}"
return protocols[0]
else
return null
)()
lines = []
# handshake response
# http://tools.ietf.org/html/rfc6455#section-4.2.2
lines.push "HTTP/1.1 101 Web Socket Protocol Handshake\r\n"
lines.push "Upgrade: WebSocket\r\n"
lines.push "Connection: Upgrade\r\n"
lines.push "sec-websocket-accept: #{key}"
lines.push "\r\nsec-websocket-protocol: #{protocol}" if protocol
lines.push "\r\n\r\n"
socket.write lines.join('')
socket.on 'connect', (evt) ->
self.emit 'open', if self.id then self.id else null
socket.on 'error', (err) ->
console.log 'Server Socket error:', err.message
socket.on "data", (buf) ->
self.buffer = Buffer.concat([
self.buffer
buf
])
# process buffer while it contains complete frames
continue while self._processBuffer()
return
socket.on "close", (had_error) ->
unless self.closed
self.emit "close", 1006
self.closed = true
return
# initialize connection state
@request = request
@socket = socket
@buffer = new Buffer(0)
@closed = false
@currentRoundTrip = 0
return
util.inherits WebSocketServerConnection, events.EventEmitter
Object.defineProperty WebSocketServerConnection::, 'readyState',
get: -> @socket.readyState
# Ping method
WebSocketClientConnection::ping = WebSocketServerConnection::ping = ->
@_doSend opcodes.PING, new Buffer(new Date().getTime().toString())
# Send a text or binary message on the WebSocket connection
WebSocketClientConnection::send = WebSocketServerConnection::send = (obj) ->
opcode = undefined
payload = undefined
if Buffer.isBuffer(obj)
opcode = opcodes.BINARY
payload = obj
else if typeof obj is "string"
opcode = opcodes.TEXT
# create a new buffer containing the UTF-8 encoded string
payload = new Buffer(obj, "utf8")
else
try
obj = JSON.stringify obj
opcode = opcodes.TEXT
payload = new Buffer(obj, "utf8")
catch e
throw new Error("Cannot send object. Must be string or Buffer")
@_doSend opcode, payload
return
# Close the WebSocket connection
WebSocketClientConnection::close = WebSocketServerConnection::close = (code, reason) ->
opcode = opcodes.CLOSE
buffer = undefined
# encode close and reason
if code
buffer = new Buffer(Buffer.byteLength(reason) + 2)
buffer.writeUInt16BE code, 0
buffer.write reason, 2
else
buffer = new Buffer(0)
@_doSend opcode, buffer
@closed = true
try
@socket.end()
@socket.destroy()
catch e
console.log "Error while destroying underlying raw socket:", e.message
return
# Process incoming bytes
WebSocketClientConnection::_processBuffer = WebSocketServerConnection::_processBuffer = ->
buf = @buffer
# insufficient data read
return if buf.length < 2
idx = 2
b1 = buf.readUInt8(0)
fin = b1 & 0x80
opcode = b1 & 0x0f # low four bits
b2 = buf.readUInt8(1)
mask = b2 & 0x80
length = b2 & 0x7f # low 7 bits
if length > 125
# insufficient data read
return if buf.length < 8
if length is 126
length = buf.readUInt16BE(2)
idx += 2
else if length is 127
# discard high 4 bits because this server cannot handle huge lengths
highBits = buf.readUInt32BE(2)
@close 1009, "" unless highBits is 0
length = buf.readUInt32BE(6)
idx += 8
# insufficient data read
return if buf.length < (idx + (if mask isnt 0 then 4 else 0) + length)
if mask isnt 0
maskBytes = buf.slice(idx, idx + 4)
idx += 4
payload = buf.slice(idx, idx + length)
if mask isnt 0
payload = unmask(maskBytes, payload)
@_handleFrame opcode, payload
@buffer = buf.slice(idx + length)
true
WebSocketClientConnection::_handleFrame = WebSocketServerConnection::_handleFrame = (opcode, buffer) ->
payload = undefined
switch opcode
when opcodes.TEXT
payload = buffer.toString("utf8")
@emit "data", opcode, payload
when opcodes.BINARY
payload = buffer
@emit "data", opcode, payload
when opcodes.PING
# respond to pings with pongs
@_doSend opcodes.PONG, buffer
# process pongs
when opcodes.PONG
#process.stdout.write "\nReceived PONG: #{buffer.toString('utf8')}\n"
pong_millis = new Date().getTime()
ping_millis = parseInt(buffer.toString('utf8'))
@currentRoundTrip = (pong_millis - ping_millis) / 1000
@emit "heartbeat", @currentRoundTrip, pong_millis
when opcodes.CLOSE
# parse close and reason
code = undefined
reason = undefined
if buffer.length >= 2
code = buffer.readUInt16BE(0)
reason = buffer.toString("utf8", 2)
@close code, reason
@emit "close", code, reason
else
@close 1002, "unknown opcode"
@emit "close", 1002, "unknown opcode"
return
# Format and send a WebSocket message
WebSocketServerConnection::_doSend = (opcode, payload) ->
@socket?.write encodeMessage(opcode, payload, false)
return
WebSocketServer = (handler) ->
if handler and handler.constructor.name is "Function"
@connectionHandler = handler
else
throw new Error("Must provide a socket handler function to instantiate a WebSocketServer")
return
util.inherits WebSocketServer, events.EventEmitter
WebSocketServer::listen = (port, host, route = "/") ->
srv = undefined
self = @
switch port.constructor.name
when "Server"
srv = port
when "String"
srv = http.createServer((request, response) ->
response.end "websocket server"
return
)
srv.listen port
when "Number"
srv = http.createServer((request, response) ->
response.end "websocket server"
return
)
srv.listen port, (if host then host else "0.0.0.0")
else
if port._handle
srv = port
else
throw new TypeError "WebSocketServer only listens on something that has a _handle."
srv.on 'listening', => @emit 'listening'
srv.on "upgrade", (request, socket, upgradeHead) ->
if URL.parse(request.url).path isnt route
return
#console.log "websocket out of path, aborting."
#ws = new WebSocketServerConnection(request, socket, upgradeHead)
#ws.close()
else
ws = new WebSocketServerConnection(request, socket, upgradeHead)
self.connectionHandler ws
setTimeout (-> ws.periodicPing = setInterval (-> ws.ping() if ws.readyState is 'open'), 2000), 1000
ws.on 'close', ->
#console.log "Closing server websocket connection", ws.id
clearInterval ws.periodicPing if ws.periodicPing?
self.emit 'upgrade'
###
# Didn't work because request doesn't register upgrade event. Must be done at server level.
WebSocketServer::listenOnRoute = (router, path, socket_handler_fn = null) ->
self = @
socket_handler_fn = socket_handler_fn or self.connectionHandler # use ad-hoc socket handler if provided, else use "default" socket handler
obj = router.get_route_handler(path, 'get')
if obj
http_handler_fn = obj.handler_obj.handler
else
http_handler_fn = (request, response) ->
response.end 'websocket server listening at ' + path
path = "/#{path}" unless path.charAt(0) is "/"
router.get path, (request, response) ->
if request.headers['upgrade'] or request.headers['Upgrade']
( (response, socket, upgradeHead) ->
console.log "Received upgrade request on path: #{request.url}"
ws = new WebSocketServerConnection(request, socket, upgradeHead)
socket_handler_fn ws
setTimeout (-> ws.periodicPing = setInterval (-> ws.ping() if ws.readyState is 'open'), 2000), 1000
ws.on 'close', ->
#console.log "Closing server websocket connection", ws.id
clearInterval ws.periodicPing if ws.periodicPing?
self.emit 'upgrade')(null, request.socket, '')
http_handler_fn request, response
###
createWebSocketServer = (socket_handler_fn) ->
new WebSocketServer(socket_handler_fn)
module?.exports = exports = {createWebSocketServer, WebSocketServer, WebSocketServerConnection, WebSocketClientConnection, opcodes}
#Test to execute when invoked stand-alone.
test = ->
reverseServer = createWebSocketServer((sock) ->
sock.on "data", (opcode, data) ->
sock.send data.split("").reverse().join("")
return
return
)
reverseServer.listen 8000
console.log "Reverse WebSocket Server listening on port 8000"
return
test() unless module?.parent
| 66009 | events = require("events")
http = require("http")
crypto = require("crypto")
util = require("util")
URL = require('url')
uuid = require('./uuid')
# opcodes for WebSocket frames
# http://tools.ietf.org/html/rfc6455#section-5.2
opcodes =
TEXT: 1
BINARY: 2
CLOSE: 8
PING: 9
PONG: 10
#magic constant to generate handshake
KEY_SUFFIX = "<KEY>"
lowerObjKeys = (obj) ->
keys = (key for key of obj)
lkeys = keys.map (key) -> key.toLowerCase()
resp = {}
resp[lkeys[index]] = obj[keys[index]] for key, index in keys
resp
hashWebSocketKey = (key) ->
sha1 = crypto.createHash("sha1")
sha1.update key + KEY_SUFFIX, "ascii"
sha1.digest "base64"
genWebSocketKey = ->
key = new Buffer(16)
for _, index in key
key.writeUInt8 Math.floor(Math.random() * 256), index
key.toString('base64')
genMask = ->
mask = new Buffer(4)
for _, index in mask
mask.writeUInt8 Math.floor(Math.random() * 256), index
mask
unmask = (maskBytes, data) ->
payload = new Buffer(data.length)
i = 0
while i < data.length
payload[i] = maskBytes[i % 4] ^ data[i]
i++
payload
encodeMessage = (opcode, payload, useMask = false) ->
buf = undefined
mask = undefined
maskLen = if useMask then 4 else 0
# first byte: fin and opcode
b1 = 0x80 | opcode # always send message as one frame (fin)
# second byte: mask and length part 1
# followed by 0, 2, or 4 additional bytes of continued length
b2 = if useMask then 0x80 else 0
length = payload.length
if useMask
mask = genMask()
payload = unmask(mask, payload)
if length < 126
buf = new Buffer(payload.length + 2 + 0 + maskLen) # zero extra bytes
b2 |= length
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
payload.copy(buf, 2 + maskLen)
mask.copy(buf, 2) if useMask
else if length < (1 << 16)
buf = new Buffer(payload.length + 2 + 2 + maskLen) # two bytes extra
b2 |= 126
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
# add two byte length
buf.writeUInt16BE length, 2
payload.copy(buf, 4 + maskLen)
mask.copy(buf, 4) if useMask
else
buf = new Buffer(payload.length + 2 + 8 + maskLen) # eight bytes extra
b2 |= 127
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
# add eight byte length
# note: this implementation cannot handle lengths greater than 2^32
# the 32 bit length is prefixed with 0x0000
buf.writeUInt32BE 0, 2
buf.writeUInt32BE length, 6
payload.copy buf, 10 + maskLen
mask.copy buf, 10 if useMask
#console.log "Returning this buffer:", buf
buf
WebSocketClientConnection = (url, options) ->
parsed_url = URL.parse(url)
throw new TypeError "URL scheme must be 'ws' or 'wss'" if parsed_url.protocol not in ['ws:', 'wss:']
self = @
@options =
hostname: parsed_url.hostname
port: parsed_url.port or (if parsed_url.protocol.match /ss/ then 443 else 80)
path: parsed_url.path or "/"
headers: {}
@options.headers.Host = "#{@options.hostname}:#{@options.port}"
@options.headers.Connection = "Upgrade"
@options.headers.Upgrade = "websocket"
@options.headers.Origin = "#{if parsed_url.protocol.match /ss/ then 'https' else 'http'}://#{@options.hostname}:#{@options.port}"
@options.headers['Sec-WebSocket-Version'] = 13
@options.headers['Sec-WebSocket-Key'] = genWebSocketKey()
@options.headers['Sec-WebSocket-Protocol'] = options['Sec-WebSocket-Protocol'] if options?['Sec-WebSocket-Protocol']?
@options.headers['Sec-WebSocket-Extensions'] = options['Sec-WebSocket-Extensions'] if options?['Sec-WebSocket-Extensions']?
@request = http.request @options
@request.on 'upgrade', (response, socket, upgradeHead) ->
self.socket = socket
self.socket.on 'error', (err) ->
console.log 'Client Socket error:', err.message
self.socket.on "data", (buf) ->
#console.log "Raw data:", buf
self.buffer = Buffer.concat([
self.buffer
buf
])
# process buffer while it contains complete frames
continue while self._processBuffer()
return
self.socket.on "close", (had_error) ->
unless self.closed
self.emit "close", 1006
self.closed = true
return
self.emit 'open', if self.id then self.id else null
@request.end()
@buffer = new Buffer(0)
@closed = false
@currentRoundTrip = 0
return
util.inherits WebSocketClientConnection, events.EventEmitter
Object.defineProperty WebSocketClientConnection::, 'readyState',
get: -> @socket?.readyState
WebSocketClientConnection::_doSend = (opcode, payload) ->
@socket?.write encodeMessage(opcode, payload, true)
return
WebSocketServerConnection = (request, socket, upgradeHead) ->
self = @
key = <KEY>(lowerObjKeys(request.headers)["<KEY>"])
protocol = (->
if 'sec-websocket-protocol' of request.headers
protocols = lowerObjKeys(request.headers)["sec-websocket-protocol"].split /\s*,\s*/
#console.log "Protocol: #{protocols[0]}"
return protocols[0]
else
return null
)()
lines = []
# handshake response
# http://tools.ietf.org/html/rfc6455#section-4.2.2
lines.push "HTTP/1.1 101 Web Socket Protocol Handshake\r\n"
lines.push "Upgrade: WebSocket\r\n"
lines.push "Connection: Upgrade\r\n"
lines.push "sec-websocket-accept: #{key}"
lines.push "\r\nsec-websocket-protocol: #{protocol}" if protocol
lines.push "\r\n\r\n"
socket.write lines.join('')
socket.on 'connect', (evt) ->
self.emit 'open', if self.id then self.id else null
socket.on 'error', (err) ->
console.log 'Server Socket error:', err.message
socket.on "data", (buf) ->
self.buffer = Buffer.concat([
self.buffer
buf
])
# process buffer while it contains complete frames
continue while self._processBuffer()
return
socket.on "close", (had_error) ->
unless self.closed
self.emit "close", 1006
self.closed = true
return
# initialize connection state
@request = request
@socket = socket
@buffer = new Buffer(0)
@closed = false
@currentRoundTrip = 0
return
util.inherits WebSocketServerConnection, events.EventEmitter
Object.defineProperty WebSocketServerConnection::, 'readyState',
get: -> @socket.readyState
# Ping method
WebSocketClientConnection::ping = WebSocketServerConnection::ping = ->
@_doSend opcodes.PING, new Buffer(new Date().getTime().toString())
# Send a text or binary message on the WebSocket connection
WebSocketClientConnection::send = WebSocketServerConnection::send = (obj) ->
opcode = undefined
payload = undefined
if Buffer.isBuffer(obj)
opcode = opcodes.BINARY
payload = obj
else if typeof obj is "string"
opcode = opcodes.TEXT
# create a new buffer containing the UTF-8 encoded string
payload = new Buffer(obj, "utf8")
else
try
obj = JSON.stringify obj
opcode = opcodes.TEXT
payload = new Buffer(obj, "utf8")
catch e
throw new Error("Cannot send object. Must be string or Buffer")
@_doSend opcode, payload
return
# Close the WebSocket connection
WebSocketClientConnection::close = WebSocketServerConnection::close = (code, reason) ->
opcode = opcodes.CLOSE
buffer = undefined
# encode close and reason
if code
buffer = new Buffer(Buffer.byteLength(reason) + 2)
buffer.writeUInt16BE code, 0
buffer.write reason, 2
else
buffer = new Buffer(0)
@_doSend opcode, buffer
@closed = true
try
@socket.end()
@socket.destroy()
catch e
console.log "Error while destroying underlying raw socket:", e.message
return
# Process incoming bytes
WebSocketClientConnection::_processBuffer = WebSocketServerConnection::_processBuffer = ->
buf = @buffer
# insufficient data read
return if buf.length < 2
idx = 2
b1 = buf.readUInt8(0)
fin = b1 & 0x80
opcode = b1 & 0x0f # low four bits
b2 = buf.readUInt8(1)
mask = b2 & 0x80
length = b2 & 0x7f # low 7 bits
if length > 125
# insufficient data read
return if buf.length < 8
if length is 126
length = buf.readUInt16BE(2)
idx += 2
else if length is 127
# discard high 4 bits because this server cannot handle huge lengths
highBits = buf.readUInt32BE(2)
@close 1009, "" unless highBits is 0
length = buf.readUInt32BE(6)
idx += 8
# insufficient data read
return if buf.length < (idx + (if mask isnt 0 then 4 else 0) + length)
if mask isnt 0
maskBytes = buf.slice(idx, idx + 4)
idx += 4
payload = buf.slice(idx, idx + length)
if mask isnt 0
payload = unmask(maskBytes, payload)
@_handleFrame opcode, payload
@buffer = buf.slice(idx + length)
true
WebSocketClientConnection::_handleFrame = WebSocketServerConnection::_handleFrame = (opcode, buffer) ->
payload = undefined
switch opcode
when opcodes.TEXT
payload = buffer.toString("utf8")
@emit "data", opcode, payload
when opcodes.BINARY
payload = buffer
@emit "data", opcode, payload
when opcodes.PING
# respond to pings with pongs
@_doSend opcodes.PONG, buffer
# process pongs
when opcodes.PONG
#process.stdout.write "\nReceived PONG: #{buffer.toString('utf8')}\n"
pong_millis = new Date().getTime()
ping_millis = parseInt(buffer.toString('utf8'))
@currentRoundTrip = (pong_millis - ping_millis) / 1000
@emit "heartbeat", @currentRoundTrip, pong_millis
when opcodes.CLOSE
# parse close and reason
code = undefined
reason = undefined
if buffer.length >= 2
code = buffer.readUInt16BE(0)
reason = buffer.toString("utf8", 2)
@close code, reason
@emit "close", code, reason
else
@close 1002, "unknown opcode"
@emit "close", 1002, "unknown opcode"
return
# Format and send a WebSocket message
WebSocketServerConnection::_doSend = (opcode, payload) ->
@socket?.write encodeMessage(opcode, payload, false)
return
WebSocketServer = (handler) ->
if handler and handler.constructor.name is "Function"
@connectionHandler = handler
else
throw new Error("Must provide a socket handler function to instantiate a WebSocketServer")
return
util.inherits WebSocketServer, events.EventEmitter
WebSocketServer::listen = (port, host, route = "/") ->
srv = undefined
self = @
switch port.constructor.name
when "Server"
srv = port
when "String"
srv = http.createServer((request, response) ->
response.end "websocket server"
return
)
srv.listen port
when "Number"
srv = http.createServer((request, response) ->
response.end "websocket server"
return
)
srv.listen port, (if host then host else "0.0.0.0")
else
if port._handle
srv = port
else
throw new TypeError "WebSocketServer only listens on something that has a _handle."
srv.on 'listening', => @emit 'listening'
srv.on "upgrade", (request, socket, upgradeHead) ->
if URL.parse(request.url).path isnt route
return
#console.log "websocket out of path, aborting."
#ws = new WebSocketServerConnection(request, socket, upgradeHead)
#ws.close()
else
ws = new WebSocketServerConnection(request, socket, upgradeHead)
self.connectionHandler ws
setTimeout (-> ws.periodicPing = setInterval (-> ws.ping() if ws.readyState is 'open'), 2000), 1000
ws.on 'close', ->
#console.log "Closing server websocket connection", ws.id
clearInterval ws.periodicPing if ws.periodicPing?
self.emit 'upgrade'
###
# Didn't work because request doesn't register upgrade event. Must be done at server level.
WebSocketServer::listenOnRoute = (router, path, socket_handler_fn = null) ->
self = @
socket_handler_fn = socket_handler_fn or self.connectionHandler # use ad-hoc socket handler if provided, else use "default" socket handler
obj = router.get_route_handler(path, 'get')
if obj
http_handler_fn = obj.handler_obj.handler
else
http_handler_fn = (request, response) ->
response.end 'websocket server listening at ' + path
path = "/#{path}" unless path.charAt(0) is "/"
router.get path, (request, response) ->
if request.headers['upgrade'] or request.headers['Upgrade']
( (response, socket, upgradeHead) ->
console.log "Received upgrade request on path: #{request.url}"
ws = new WebSocketServerConnection(request, socket, upgradeHead)
socket_handler_fn ws
setTimeout (-> ws.periodicPing = setInterval (-> ws.ping() if ws.readyState is 'open'), 2000), 1000
ws.on 'close', ->
#console.log "Closing server websocket connection", ws.id
clearInterval ws.periodicPing if ws.periodicPing?
self.emit 'upgrade')(null, request.socket, '')
http_handler_fn request, response
###
createWebSocketServer = (socket_handler_fn) ->
new WebSocketServer(socket_handler_fn)
module?.exports = exports = {createWebSocketServer, WebSocketServer, WebSocketServerConnection, WebSocketClientConnection, opcodes}
#Test to execute when invoked stand-alone.
test = ->
reverseServer = createWebSocketServer((sock) ->
sock.on "data", (opcode, data) ->
sock.send data.split("").reverse().join("")
return
return
)
reverseServer.listen 8000
console.log "Reverse WebSocket Server listening on port 8000"
return
test() unless module?.parent
| true | events = require("events")
http = require("http")
crypto = require("crypto")
util = require("util")
URL = require('url')
uuid = require('./uuid')
# opcodes for WebSocket frames
# http://tools.ietf.org/html/rfc6455#section-5.2
opcodes =
TEXT: 1
BINARY: 2
CLOSE: 8
PING: 9
PONG: 10
#magic constant to generate handshake
KEY_SUFFIX = "PI:KEY:<KEY>END_PI"
lowerObjKeys = (obj) ->
keys = (key for key of obj)
lkeys = keys.map (key) -> key.toLowerCase()
resp = {}
resp[lkeys[index]] = obj[keys[index]] for key, index in keys
resp
hashWebSocketKey = (key) ->
sha1 = crypto.createHash("sha1")
sha1.update key + KEY_SUFFIX, "ascii"
sha1.digest "base64"
genWebSocketKey = ->
key = new Buffer(16)
for _, index in key
key.writeUInt8 Math.floor(Math.random() * 256), index
key.toString('base64')
genMask = ->
mask = new Buffer(4)
for _, index in mask
mask.writeUInt8 Math.floor(Math.random() * 256), index
mask
unmask = (maskBytes, data) ->
payload = new Buffer(data.length)
i = 0
while i < data.length
payload[i] = maskBytes[i % 4] ^ data[i]
i++
payload
encodeMessage = (opcode, payload, useMask = false) ->
buf = undefined
mask = undefined
maskLen = if useMask then 4 else 0
# first byte: fin and opcode
b1 = 0x80 | opcode # always send message as one frame (fin)
# second byte: mask and length part 1
# followed by 0, 2, or 4 additional bytes of continued length
b2 = if useMask then 0x80 else 0
length = payload.length
if useMask
mask = genMask()
payload = unmask(mask, payload)
if length < 126
buf = new Buffer(payload.length + 2 + 0 + maskLen) # zero extra bytes
b2 |= length
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
payload.copy(buf, 2 + maskLen)
mask.copy(buf, 2) if useMask
else if length < (1 << 16)
buf = new Buffer(payload.length + 2 + 2 + maskLen) # two bytes extra
b2 |= 126
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
# add two byte length
buf.writeUInt16BE length, 2
payload.copy(buf, 4 + maskLen)
mask.copy(buf, 4) if useMask
else
buf = new Buffer(payload.length + 2 + 8 + maskLen) # eight bytes extra
b2 |= 127
buf.writeUInt8 b1, 0
buf.writeUInt8 b2, 1
# add eight byte length
# note: this implementation cannot handle lengths greater than 2^32
# the 32 bit length is prefixed with 0x0000
buf.writeUInt32BE 0, 2
buf.writeUInt32BE length, 6
payload.copy buf, 10 + maskLen
mask.copy buf, 10 if useMask
#console.log "Returning this buffer:", buf
buf
WebSocketClientConnection = (url, options) ->
parsed_url = URL.parse(url)
throw new TypeError "URL scheme must be 'ws' or 'wss'" if parsed_url.protocol not in ['ws:', 'wss:']
self = @
@options =
hostname: parsed_url.hostname
port: parsed_url.port or (if parsed_url.protocol.match /ss/ then 443 else 80)
path: parsed_url.path or "/"
headers: {}
@options.headers.Host = "#{@options.hostname}:#{@options.port}"
@options.headers.Connection = "Upgrade"
@options.headers.Upgrade = "websocket"
@options.headers.Origin = "#{if parsed_url.protocol.match /ss/ then 'https' else 'http'}://#{@options.hostname}:#{@options.port}"
@options.headers['Sec-WebSocket-Version'] = 13
@options.headers['Sec-WebSocket-Key'] = genWebSocketKey()
@options.headers['Sec-WebSocket-Protocol'] = options['Sec-WebSocket-Protocol'] if options?['Sec-WebSocket-Protocol']?
@options.headers['Sec-WebSocket-Extensions'] = options['Sec-WebSocket-Extensions'] if options?['Sec-WebSocket-Extensions']?
@request = http.request @options
@request.on 'upgrade', (response, socket, upgradeHead) ->
self.socket = socket
self.socket.on 'error', (err) ->
console.log 'Client Socket error:', err.message
self.socket.on "data", (buf) ->
#console.log "Raw data:", buf
self.buffer = Buffer.concat([
self.buffer
buf
])
# process buffer while it contains complete frames
continue while self._processBuffer()
return
self.socket.on "close", (had_error) ->
unless self.closed
self.emit "close", 1006
self.closed = true
return
self.emit 'open', if self.id then self.id else null
@request.end()
@buffer = new Buffer(0)
@closed = false
@currentRoundTrip = 0
return
util.inherits WebSocketClientConnection, events.EventEmitter
Object.defineProperty WebSocketClientConnection::, 'readyState',
get: -> @socket?.readyState
WebSocketClientConnection::_doSend = (opcode, payload) ->
@socket?.write encodeMessage(opcode, payload, true)
return
WebSocketServerConnection = (request, socket, upgradeHead) ->
self = @
key = PI:KEY:<KEY>END_PI(lowerObjKeys(request.headers)["PI:KEY:<KEY>END_PI"])
protocol = (->
if 'sec-websocket-protocol' of request.headers
protocols = lowerObjKeys(request.headers)["sec-websocket-protocol"].split /\s*,\s*/
#console.log "Protocol: #{protocols[0]}"
return protocols[0]
else
return null
)()
lines = []
# handshake response
# http://tools.ietf.org/html/rfc6455#section-4.2.2
lines.push "HTTP/1.1 101 Web Socket Protocol Handshake\r\n"
lines.push "Upgrade: WebSocket\r\n"
lines.push "Connection: Upgrade\r\n"
lines.push "sec-websocket-accept: #{key}"
lines.push "\r\nsec-websocket-protocol: #{protocol}" if protocol
lines.push "\r\n\r\n"
socket.write lines.join('')
socket.on 'connect', (evt) ->
self.emit 'open', if self.id then self.id else null
socket.on 'error', (err) ->
console.log 'Server Socket error:', err.message
socket.on "data", (buf) ->
self.buffer = Buffer.concat([
self.buffer
buf
])
# process buffer while it contains complete frames
continue while self._processBuffer()
return
socket.on "close", (had_error) ->
unless self.closed
self.emit "close", 1006
self.closed = true
return
# initialize connection state
@request = request
@socket = socket
@buffer = new Buffer(0)
@closed = false
@currentRoundTrip = 0
return
util.inherits WebSocketServerConnection, events.EventEmitter
Object.defineProperty WebSocketServerConnection::, 'readyState',
get: -> @socket.readyState
# Ping method
WebSocketClientConnection::ping = WebSocketServerConnection::ping = ->
@_doSend opcodes.PING, new Buffer(new Date().getTime().toString())
# Send a text or binary message on the WebSocket connection
WebSocketClientConnection::send = WebSocketServerConnection::send = (obj) ->
opcode = undefined
payload = undefined
if Buffer.isBuffer(obj)
opcode = opcodes.BINARY
payload = obj
else if typeof obj is "string"
opcode = opcodes.TEXT
# create a new buffer containing the UTF-8 encoded string
payload = new Buffer(obj, "utf8")
else
try
obj = JSON.stringify obj
opcode = opcodes.TEXT
payload = new Buffer(obj, "utf8")
catch e
throw new Error("Cannot send object. Must be string or Buffer")
@_doSend opcode, payload
return
# Close the WebSocket connection
WebSocketClientConnection::close = WebSocketServerConnection::close = (code, reason) ->
opcode = opcodes.CLOSE
buffer = undefined
# encode close and reason
if code
buffer = new Buffer(Buffer.byteLength(reason) + 2)
buffer.writeUInt16BE code, 0
buffer.write reason, 2
else
buffer = new Buffer(0)
@_doSend opcode, buffer
@closed = true
try
@socket.end()
@socket.destroy()
catch e
console.log "Error while destroying underlying raw socket:", e.message
return
# Process incoming bytes
WebSocketClientConnection::_processBuffer = WebSocketServerConnection::_processBuffer = ->
buf = @buffer
# insufficient data read
return if buf.length < 2
idx = 2
b1 = buf.readUInt8(0)
fin = b1 & 0x80
opcode = b1 & 0x0f # low four bits
b2 = buf.readUInt8(1)
mask = b2 & 0x80
length = b2 & 0x7f # low 7 bits
if length > 125
# insufficient data read
return if buf.length < 8
if length is 126
length = buf.readUInt16BE(2)
idx += 2
else if length is 127
# discard high 4 bits because this server cannot handle huge lengths
highBits = buf.readUInt32BE(2)
@close 1009, "" unless highBits is 0
length = buf.readUInt32BE(6)
idx += 8
# insufficient data read
return if buf.length < (idx + (if mask isnt 0 then 4 else 0) + length)
if mask isnt 0
maskBytes = buf.slice(idx, idx + 4)
idx += 4
payload = buf.slice(idx, idx + length)
if mask isnt 0
payload = unmask(maskBytes, payload)
@_handleFrame opcode, payload
@buffer = buf.slice(idx + length)
true
WebSocketClientConnection::_handleFrame = WebSocketServerConnection::_handleFrame = (opcode, buffer) ->
payload = undefined
switch opcode
when opcodes.TEXT
payload = buffer.toString("utf8")
@emit "data", opcode, payload
when opcodes.BINARY
payload = buffer
@emit "data", opcode, payload
when opcodes.PING
# respond to pings with pongs
@_doSend opcodes.PONG, buffer
# process pongs
when opcodes.PONG
#process.stdout.write "\nReceived PONG: #{buffer.toString('utf8')}\n"
pong_millis = new Date().getTime()
ping_millis = parseInt(buffer.toString('utf8'))
@currentRoundTrip = (pong_millis - ping_millis) / 1000
@emit "heartbeat", @currentRoundTrip, pong_millis
when opcodes.CLOSE
# parse close and reason
code = undefined
reason = undefined
if buffer.length >= 2
code = buffer.readUInt16BE(0)
reason = buffer.toString("utf8", 2)
@close code, reason
@emit "close", code, reason
else
@close 1002, "unknown opcode"
@emit "close", 1002, "unknown opcode"
return
# Format and send a WebSocket message
WebSocketServerConnection::_doSend = (opcode, payload) ->
@socket?.write encodeMessage(opcode, payload, false)
return
WebSocketServer = (handler) ->
if handler and handler.constructor.name is "Function"
@connectionHandler = handler
else
throw new Error("Must provide a socket handler function to instantiate a WebSocketServer")
return
util.inherits WebSocketServer, events.EventEmitter
WebSocketServer::listen = (port, host, route = "/") ->
srv = undefined
self = @
switch port.constructor.name
when "Server"
srv = port
when "String"
srv = http.createServer((request, response) ->
response.end "websocket server"
return
)
srv.listen port
when "Number"
srv = http.createServer((request, response) ->
response.end "websocket server"
return
)
srv.listen port, (if host then host else "0.0.0.0")
else
if port._handle
srv = port
else
throw new TypeError "WebSocketServer only listens on something that has a _handle."
srv.on 'listening', => @emit 'listening'
srv.on "upgrade", (request, socket, upgradeHead) ->
if URL.parse(request.url).path isnt route
return
#console.log "websocket out of path, aborting."
#ws = new WebSocketServerConnection(request, socket, upgradeHead)
#ws.close()
else
ws = new WebSocketServerConnection(request, socket, upgradeHead)
self.connectionHandler ws
setTimeout (-> ws.periodicPing = setInterval (-> ws.ping() if ws.readyState is 'open'), 2000), 1000
ws.on 'close', ->
#console.log "Closing server websocket connection", ws.id
clearInterval ws.periodicPing if ws.periodicPing?
self.emit 'upgrade'
###
# Didn't work because request doesn't register upgrade event. Must be done at server level.
WebSocketServer::listenOnRoute = (router, path, socket_handler_fn = null) ->
self = @
socket_handler_fn = socket_handler_fn or self.connectionHandler # use ad-hoc socket handler if provided, else use "default" socket handler
obj = router.get_route_handler(path, 'get')
if obj
http_handler_fn = obj.handler_obj.handler
else
http_handler_fn = (request, response) ->
response.end 'websocket server listening at ' + path
path = "/#{path}" unless path.charAt(0) is "/"
router.get path, (request, response) ->
if request.headers['upgrade'] or request.headers['Upgrade']
( (response, socket, upgradeHead) ->
console.log "Received upgrade request on path: #{request.url}"
ws = new WebSocketServerConnection(request, socket, upgradeHead)
socket_handler_fn ws
setTimeout (-> ws.periodicPing = setInterval (-> ws.ping() if ws.readyState is 'open'), 2000), 1000
ws.on 'close', ->
#console.log "Closing server websocket connection", ws.id
clearInterval ws.periodicPing if ws.periodicPing?
self.emit 'upgrade')(null, request.socket, '')
http_handler_fn request, response
###
createWebSocketServer = (socket_handler_fn) ->
new WebSocketServer(socket_handler_fn)
module?.exports = exports = {createWebSocketServer, WebSocketServer, WebSocketServerConnection, WebSocketClientConnection, opcodes}
#Test to execute when invoked stand-alone.
test = ->
reverseServer = createWebSocketServer((sock) ->
sock.on "data", (opcode, data) ->
sock.send data.split("").reverse().join("")
return
return
)
reverseServer.listen 8000
console.log "Reverse WebSocket Server listening on port 8000"
return
test() unless module?.parent
|
[
{
"context": "# Automatically creates project files\n#\n# Author: Anshul Kharbanda\n# Created: 10 - 20 - 2017\nSelectListView = requir",
"end": 89,
"score": 0.9998710751533508,
"start": 73,
"tag": "NAME",
"value": "Anshul Kharbanda"
}
] | lib/template-selector.coffee | andydevs/auto-create-files | 5 | # Auto Create Files
#
# Automatically creates project files
#
# Author: Anshul Kharbanda
# Created: 10 - 20 - 2017
SelectListView = require 'atom-select-list'
https = require 'https'
fs = require 'fs'
path = require 'path'
# Configuration for gitignore
GITHUB_API_CONFIG =
hostname: 'api.github.com'
headers:
'Accept': 'application/vnd.github.drax-preview+json'
'User-Agent': 'Atom-Gitignores-Package'
# Returns a github api config for the given path
#
# @param path the path of the api
#
# @return github api config
githubApiGet = (path) ->
config = GITHUB_API_CONFIG
config.path = path
config
# Pulls entire text from an https get
#
# @param url the url to get from
#
# @return entire text from https get
httpsPullText = (url, callback) ->
https.get githubApiGet(url), (response) ->
data = ''
response.on 'data', (chunk) ->
data += chunk
response.on 'end', ->
callback(data)
# Parses placeholders in text if text is MIT license
#
# @param text text to parse
#
# @return parsed text
parseMITLicense = (text) ->
return text.replace /\[(\w+)\]/, (match, word) ->
switch word
when 'year' then return (new Date()).getFullYear()
when 'fullname' then return atom.config.get('auto-create-files.fullname')
else return match
# Selection of Templates
class TemplateSelector
# Members
closePanel: null
filename: null
apiUrl: null
filepath: null
responseMapper: null
getSource: null
selectorView: null
# Creates a new TemplateSelectorView
constructor: (props) ->
# Get props
@closePanel = props.closePanel
@filename = props.filename
@apiUrl = props.apiUrl
getRootDir = () ->
dirs = atom.project.getDirectories()
defaultPath = dirs[0]?.path # first directory in tree view
return defaultPath if dirs.length < 2
activeFilePath = document.querySelector('.tree-view .selected')?.getPath?() ||
atom.workspace.getActivePaneItem()?.buffer?.file?.path
if activeFilePath?
for dir in dirs
return dir.path if activeFilePath.indexOf(dir.path) is 0
atom.notifications.addWarning(
"No project open in editor view: creating in '" + defaultPath + "'"
)
defaultPath
@filepath = path.join getRootDir(), @filename
@responseMapper = props.responseMapper
@getSource = props.getSource
# Create SelectListView
@selectorView = new SelectListView
items: []
elementForItem: (item) => @itemView(item)
didCancelSelection: => @closePanel()
didConfirmSelection: (type) => @create(type)
@selectorView.element.classList.add 'auto-create-files'
# Get gitignore templates
httpsPullText @apiUrl, (data) =>
console.log(data)
items = JSON.parse(data).map @responseMapper
console.log 'Available '+@filename+' templates:'
console.log items
@selectorView.update
items: items
# Destroy TemplateSelector
destroy: ->
@selectorView.destroy()
# View for item
itemView: (item) ->
elem = document.createElement 'li'
elem.textContent = item
elem
# Creates a new file of the given type
create: (type) ->
# Print message
console.log ('Creating '+type+'...')
# Get file and write
httpsPullText (@apiUrl+'/'+type), (data) =>
source = @getSource(JSON.parse data)
if type == 'MIT'
source = parseMITLicense source
fs.writeFile @filepath, source, (err) =>
throw err if err?
console.log (type+' '+@filename+' created!')
atom.notifications.addSuccess (type+' '+@filename+' created!')
# Close panel
@closePanel()
# Export class
module.exports = TemplateSelector
| 208159 | # Auto Create Files
#
# Automatically creates project files
#
# Author: <NAME>
# Created: 10 - 20 - 2017
SelectListView = require 'atom-select-list'
https = require 'https'
fs = require 'fs'
path = require 'path'
# Configuration for gitignore
GITHUB_API_CONFIG =
hostname: 'api.github.com'
headers:
'Accept': 'application/vnd.github.drax-preview+json'
'User-Agent': 'Atom-Gitignores-Package'
# Returns a github api config for the given path
#
# @param path the path of the api
#
# @return github api config
githubApiGet = (path) ->
config = GITHUB_API_CONFIG
config.path = path
config
# Pulls entire text from an https get
#
# @param url the url to get from
#
# @return entire text from https get
httpsPullText = (url, callback) ->
https.get githubApiGet(url), (response) ->
data = ''
response.on 'data', (chunk) ->
data += chunk
response.on 'end', ->
callback(data)
# Parses placeholders in text if text is MIT license
#
# @param text text to parse
#
# @return parsed text
parseMITLicense = (text) ->
return text.replace /\[(\w+)\]/, (match, word) ->
switch word
when 'year' then return (new Date()).getFullYear()
when 'fullname' then return atom.config.get('auto-create-files.fullname')
else return match
# Selection of Templates
class TemplateSelector
# Members
closePanel: null
filename: null
apiUrl: null
filepath: null
responseMapper: null
getSource: null
selectorView: null
# Creates a new TemplateSelectorView
constructor: (props) ->
# Get props
@closePanel = props.closePanel
@filename = props.filename
@apiUrl = props.apiUrl
getRootDir = () ->
dirs = atom.project.getDirectories()
defaultPath = dirs[0]?.path # first directory in tree view
return defaultPath if dirs.length < 2
activeFilePath = document.querySelector('.tree-view .selected')?.getPath?() ||
atom.workspace.getActivePaneItem()?.buffer?.file?.path
if activeFilePath?
for dir in dirs
return dir.path if activeFilePath.indexOf(dir.path) is 0
atom.notifications.addWarning(
"No project open in editor view: creating in '" + defaultPath + "'"
)
defaultPath
@filepath = path.join getRootDir(), @filename
@responseMapper = props.responseMapper
@getSource = props.getSource
# Create SelectListView
@selectorView = new SelectListView
items: []
elementForItem: (item) => @itemView(item)
didCancelSelection: => @closePanel()
didConfirmSelection: (type) => @create(type)
@selectorView.element.classList.add 'auto-create-files'
# Get gitignore templates
httpsPullText @apiUrl, (data) =>
console.log(data)
items = JSON.parse(data).map @responseMapper
console.log 'Available '+@filename+' templates:'
console.log items
@selectorView.update
items: items
# Destroy TemplateSelector
destroy: ->
@selectorView.destroy()
# View for item
itemView: (item) ->
elem = document.createElement 'li'
elem.textContent = item
elem
# Creates a new file of the given type
create: (type) ->
# Print message
console.log ('Creating '+type+'...')
# Get file and write
httpsPullText (@apiUrl+'/'+type), (data) =>
source = @getSource(JSON.parse data)
if type == 'MIT'
source = parseMITLicense source
fs.writeFile @filepath, source, (err) =>
throw err if err?
console.log (type+' '+@filename+' created!')
atom.notifications.addSuccess (type+' '+@filename+' created!')
# Close panel
@closePanel()
# Export class
module.exports = TemplateSelector
| true | # Auto Create Files
#
# Automatically creates project files
#
# Author: PI:NAME:<NAME>END_PI
# Created: 10 - 20 - 2017
SelectListView = require 'atom-select-list'
https = require 'https'
fs = require 'fs'
path = require 'path'
# Configuration for gitignore
GITHUB_API_CONFIG =
hostname: 'api.github.com'
headers:
'Accept': 'application/vnd.github.drax-preview+json'
'User-Agent': 'Atom-Gitignores-Package'
# Returns a github api config for the given path
#
# @param path the path of the api
#
# @return github api config
githubApiGet = (path) ->
config = GITHUB_API_CONFIG
config.path = path
config
# Pulls entire text from an https get
#
# @param url the url to get from
#
# @return entire text from https get
httpsPullText = (url, callback) ->
https.get githubApiGet(url), (response) ->
data = ''
response.on 'data', (chunk) ->
data += chunk
response.on 'end', ->
callback(data)
# Parses placeholders in text if text is MIT license
#
# @param text text to parse
#
# @return parsed text
parseMITLicense = (text) ->
return text.replace /\[(\w+)\]/, (match, word) ->
switch word
when 'year' then return (new Date()).getFullYear()
when 'fullname' then return atom.config.get('auto-create-files.fullname')
else return match
# Selection of Templates
class TemplateSelector
# Members
closePanel: null
filename: null
apiUrl: null
filepath: null
responseMapper: null
getSource: null
selectorView: null
# Creates a new TemplateSelectorView
constructor: (props) ->
# Get props
@closePanel = props.closePanel
@filename = props.filename
@apiUrl = props.apiUrl
getRootDir = () ->
dirs = atom.project.getDirectories()
defaultPath = dirs[0]?.path # first directory in tree view
return defaultPath if dirs.length < 2
activeFilePath = document.querySelector('.tree-view .selected')?.getPath?() ||
atom.workspace.getActivePaneItem()?.buffer?.file?.path
if activeFilePath?
for dir in dirs
return dir.path if activeFilePath.indexOf(dir.path) is 0
atom.notifications.addWarning(
"No project open in editor view: creating in '" + defaultPath + "'"
)
defaultPath
@filepath = path.join getRootDir(), @filename
@responseMapper = props.responseMapper
@getSource = props.getSource
# Create SelectListView
@selectorView = new SelectListView
items: []
elementForItem: (item) => @itemView(item)
didCancelSelection: => @closePanel()
didConfirmSelection: (type) => @create(type)
@selectorView.element.classList.add 'auto-create-files'
# Get gitignore templates
httpsPullText @apiUrl, (data) =>
console.log(data)
items = JSON.parse(data).map @responseMapper
console.log 'Available '+@filename+' templates:'
console.log items
@selectorView.update
items: items
# Destroy TemplateSelector
destroy: ->
@selectorView.destroy()
# View for item
itemView: (item) ->
elem = document.createElement 'li'
elem.textContent = item
elem
# Creates a new file of the given type
create: (type) ->
# Print message
console.log ('Creating '+type+'...')
# Get file and write
httpsPullText (@apiUrl+'/'+type), (data) =>
source = @getSource(JSON.parse data)
if type == 'MIT'
source = parseMITLicense source
fs.writeFile @filepath, source, (err) =>
throw err if err?
console.log (type+' '+@filename+' created!')
atom.notifications.addSuccess (type+' '+@filename+' created!')
# Close panel
@closePanel()
# Export class
module.exports = TemplateSelector
|
[
{
"context": "$output = getOutput $block\n\n if not key\n key = uniqueId()\n $block.attr('data-example', key)\n $outpu",
"end": 1008,
"score": 0.9951204061508179,
"start": 1000,
"tag": "KEY",
"value": "uniqueId"
}
] | node_modules/tether/docs/coffee/intro.coffee | ZebTheWizard/TwitchBot | 625 | {uniqueId} = Tether.Utils
SETUP_JS = """
yellowBox = $('.yellow-box', $output);
greenBox = $('.green-box', $output);
scrollBox = $('.scroll-box', $output);
"""
OUTPUT_HTML = (key) -> """
<div class="scroll-box">
<div class="scroll-content">
<div class="yellow-box" data-example="#{ key }"></div>
<div class="green-box" data-example="#{ key }"></div>
</div>
</div>
"""
tethers = {}
getOutput = ($block) ->
key = $block.data('example')
if key and typeof key is 'string'
return $("output[data-example='#{ key }']")
else
return $block.parents('pre').nextAll('output').first()
run = (key) ->
if typeof key is 'string'
$block = $("code[data-example='#{ key }']")
else
$block = key
key = $block.attr('data-example')
$output = getOutput $block
code = $block.text()
code = SETUP_JS + code
window.$output = $output
tethers[key] = eval code
setupBlock = ($block) ->
key = $block.data('example')
$output = getOutput $block
if not key
key = uniqueId()
$block.attr('data-example', key)
$output.attr('data-example', key)
$output.find('.tether-element').attr('data-example', key)
$output.html OUTPUT_HTML(key)
$scrollBox = $output.find('.scroll-box')
$scrollContent = $scrollBox.find('.scroll-content')
$scrollBox.scrollTop(parseInt($scrollContent.css('height')) / 2 - $scrollBox.height() / 2)
$scrollBox.scrollLeft(parseInt($scrollContent.css('width')) / 2 - $scrollBox.width() / 2)
setTimeout ->
$scrollBox.on 'scroll', ->
$output.addClass 'scrolled'
$scrollBox.css 'height', "#{ $block.parent().outerHeight() }px"
if not $output.attr('deactivated')?
run $block
$(document.body).on 'click', (e) ->
if $(e.target).is('output[deactivated]')
activate $(e.target)
false
else if $(e.target).is('output[activated]')
deactivate $(e.target)
false
activate = ($output) ->
$block = $output.prev().find('code')
run $block
$output.find('.tether-element').show()
key = $output.data('example')
$(tethers[key].element).show()
tethers[key].enable()
$output.removeAttr('deactivated')
$output.attr('activated', true)
deactivate = ($output) ->
$block = $output.prev().find('code')
key = $output.data('example')
tethers[key].disable()
$el = $(tethers[key].element)
$el.detach()
$output.find('.scroll-content').append $el
$el.hide()
$output.removeAttr('activated')
$output.attr('deactivated', true)
init = ->
$blocks = $('code[data-example]')
setupBlock($ block) for block in $blocks
window.EXECUTR_OPTIONS =
codeSelector: 'code[executable]'
$ init
| 35616 | {uniqueId} = Tether.Utils
SETUP_JS = """
yellowBox = $('.yellow-box', $output);
greenBox = $('.green-box', $output);
scrollBox = $('.scroll-box', $output);
"""
OUTPUT_HTML = (key) -> """
<div class="scroll-box">
<div class="scroll-content">
<div class="yellow-box" data-example="#{ key }"></div>
<div class="green-box" data-example="#{ key }"></div>
</div>
</div>
"""
tethers = {}
getOutput = ($block) ->
key = $block.data('example')
if key and typeof key is 'string'
return $("output[data-example='#{ key }']")
else
return $block.parents('pre').nextAll('output').first()
run = (key) ->
if typeof key is 'string'
$block = $("code[data-example='#{ key }']")
else
$block = key
key = $block.attr('data-example')
$output = getOutput $block
code = $block.text()
code = SETUP_JS + code
window.$output = $output
tethers[key] = eval code
setupBlock = ($block) ->
key = $block.data('example')
$output = getOutput $block
if not key
key = <KEY>()
$block.attr('data-example', key)
$output.attr('data-example', key)
$output.find('.tether-element').attr('data-example', key)
$output.html OUTPUT_HTML(key)
$scrollBox = $output.find('.scroll-box')
$scrollContent = $scrollBox.find('.scroll-content')
$scrollBox.scrollTop(parseInt($scrollContent.css('height')) / 2 - $scrollBox.height() / 2)
$scrollBox.scrollLeft(parseInt($scrollContent.css('width')) / 2 - $scrollBox.width() / 2)
setTimeout ->
$scrollBox.on 'scroll', ->
$output.addClass 'scrolled'
$scrollBox.css 'height', "#{ $block.parent().outerHeight() }px"
if not $output.attr('deactivated')?
run $block
$(document.body).on 'click', (e) ->
if $(e.target).is('output[deactivated]')
activate $(e.target)
false
else if $(e.target).is('output[activated]')
deactivate $(e.target)
false
activate = ($output) ->
$block = $output.prev().find('code')
run $block
$output.find('.tether-element').show()
key = $output.data('example')
$(tethers[key].element).show()
tethers[key].enable()
$output.removeAttr('deactivated')
$output.attr('activated', true)
deactivate = ($output) ->
$block = $output.prev().find('code')
key = $output.data('example')
tethers[key].disable()
$el = $(tethers[key].element)
$el.detach()
$output.find('.scroll-content').append $el
$el.hide()
$output.removeAttr('activated')
$output.attr('deactivated', true)
init = ->
$blocks = $('code[data-example]')
setupBlock($ block) for block in $blocks
window.EXECUTR_OPTIONS =
codeSelector: 'code[executable]'
$ init
| true | {uniqueId} = Tether.Utils
SETUP_JS = """
yellowBox = $('.yellow-box', $output);
greenBox = $('.green-box', $output);
scrollBox = $('.scroll-box', $output);
"""
OUTPUT_HTML = (key) -> """
<div class="scroll-box">
<div class="scroll-content">
<div class="yellow-box" data-example="#{ key }"></div>
<div class="green-box" data-example="#{ key }"></div>
</div>
</div>
"""
tethers = {}
getOutput = ($block) ->
key = $block.data('example')
if key and typeof key is 'string'
return $("output[data-example='#{ key }']")
else
return $block.parents('pre').nextAll('output').first()
run = (key) ->
if typeof key is 'string'
$block = $("code[data-example='#{ key }']")
else
$block = key
key = $block.attr('data-example')
$output = getOutput $block
code = $block.text()
code = SETUP_JS + code
window.$output = $output
tethers[key] = eval code
setupBlock = ($block) ->
key = $block.data('example')
$output = getOutput $block
if not key
key = PI:KEY:<KEY>END_PI()
$block.attr('data-example', key)
$output.attr('data-example', key)
$output.find('.tether-element').attr('data-example', key)
$output.html OUTPUT_HTML(key)
$scrollBox = $output.find('.scroll-box')
$scrollContent = $scrollBox.find('.scroll-content')
$scrollBox.scrollTop(parseInt($scrollContent.css('height')) / 2 - $scrollBox.height() / 2)
$scrollBox.scrollLeft(parseInt($scrollContent.css('width')) / 2 - $scrollBox.width() / 2)
setTimeout ->
$scrollBox.on 'scroll', ->
$output.addClass 'scrolled'
$scrollBox.css 'height', "#{ $block.parent().outerHeight() }px"
if not $output.attr('deactivated')?
run $block
$(document.body).on 'click', (e) ->
if $(e.target).is('output[deactivated]')
activate $(e.target)
false
else if $(e.target).is('output[activated]')
deactivate $(e.target)
false
activate = ($output) ->
$block = $output.prev().find('code')
run $block
$output.find('.tether-element').show()
key = $output.data('example')
$(tethers[key].element).show()
tethers[key].enable()
$output.removeAttr('deactivated')
$output.attr('activated', true)
deactivate = ($output) ->
$block = $output.prev().find('code')
key = $output.data('example')
tethers[key].disable()
$el = $(tethers[key].element)
$el.detach()
$output.find('.scroll-content').append $el
$el.hide()
$output.removeAttr('activated')
$output.attr('deactivated', true)
init = ->
$blocks = $('code[data-example]')
setupBlock($ block) for block in $blocks
window.EXECUTR_OPTIONS =
codeSelector: 'code[executable]'
$ init
|
[
{
"context": " (with the port!) to get to rundeck\n#\n# Author:\n# Liam Bennett\n\n_ = require('underscore')\nsys = require 'sys' # ",
"end": 1192,
"score": 0.9998548626899719,
"start": 1180,
"tag": "NAME",
"value": "Liam Bennett"
},
{
"context": " if executions.length > 0\n ... | src/rundeck.coffee | boie0025/hubot-rundeck | 18 | # Description
# Rundeck integration with hubot
#
# Dependencies:
# "underscore": "^1.6.0"
# "strftime": "^0.8.0"
# "xml2js": "^0.4.1"
#
# Commands:
# hubot rundeck projects [alias] - Gets a list of the projects for the given server alias
# hubut rundeck jobs '[project]' [alias] - Gets a list of all the jobs in the given project for the given server alias
# hubot rundeck trigger '[job]' '[project]' [alias] [args] - Triggers the given job for the given project
# hubot rundeck status '[job]' '[project]' [alias] - Shows the current status for the latest execution of the given job
# hubot rundeck show aliases - shows the aliases for the list of rundeck instances
# hubot rundeck add alias [alias name] [url] [authToken] - sets the alias for a given url and authentication token
# hubot rundeck clear alias [alias name] - removed the given alias
#
#rundeck show status of (.*) (?:in|for) (.*) (?:in|for) (.*)
# Notes:
# The server must be a fqdn (with the port!) to get to rundeck
#
# Author:
# Liam Bennett
_ = require('underscore')
sys = require 'sys' # Used for debugging
Parser = require('xml2js').Parser
_rundeckAliases = {}
class Rundeck
constructor: (@robot, @url, @authToken) ->
@logger = @robot.logger
@baseUrl = "#{@url}/api/12"
@headers =
"Accept": "application/xml"
"Content-Type": "application/xml"
"X-Rundeck-Auth-Token": "#{@authToken}"
@plainTextHeaders =
"Accept": "text/plain"
"Content-Type": "text/plain"
"X-Rundeck-Auth-Token": "#{@authToken}"
jobs: (project) -> new Jobs(@, project)
projects: -> new Projects(@)
executions: (job) -> new Executions(@, job)
getOutput: (url, cb) ->
@robot.http("#{@baseUrl}/#{url}").headers(@plainTextHeaders).get() (err, res, body) =>
if err?
@logger.err JSON.stringify(err)
else
cb body
get: (url, cb) ->
@logger.debug url
parser = new Parser()
@robot.http("#{@baseUrl}/#{url}").headers(@headers).get() (err, res, body) =>
console.log "#{@baseUrl}/#{url}"
if err?
@logger.error JSON.stringify(err)
else
parser.parseString body, (e, json) ->
cb json
class Projects
constructor: (@rundeck) ->
@logger = @rundeck.logger
list: (cb) ->
projects = []
@rundeck.get "projects", (results) ->
for project in results.projects.project
projects.push new Project(project)
cb projects
class Project
constructor: (data) ->
@name = data.name[0]
@description = data.description[0]
formatList: ->
"#{@name} - #{@description}"
class Jobs
constructor: (@rundeck, @project) ->
@logger = @rundeck.logger
list: (cb) ->
jobs = []
@rundeck.get "project/#{@project}/jobs", (results) ->
for job in results.jobs.job
jobs.push new Job(job)
cb jobs
find: (name, cb) ->
@list (jobs) =>
job = _.findWhere jobs, { name: name }
if job
cb job
else
cb false
run: (name, args, cb) ->
@find name, (job) =>
if job
uri = "job/#{job.id}/run"
if args?
uri += "?argString=#{args}"
@rundeck.get uri, (results) ->
cb job, results
else
cb null, false
class Job
constructor: (data) ->
@id = data["$"].id
@name = data.name[0]
@description = data.description[0]
@group = data.group[0]
@project = data.project[0]
formatList: ->
"#{@name} - #{@description}"
class Executions
constructor: (@rundeck, @job) ->
@logger = @rundeck.logger
list: (cb) ->
executions = []
@rundeck.get "job/#{@job.id}/executions", (results) ->
for execution in results.result.executions[0].execution
exec = new Execution(execution)
executions.push exec
cb executions
class Execution
constructor: (@data) ->
@id = data["$"].id
@href = data["$"].href
@status = data["$"].status
formatList: ->
"#{@id} - #{@status} - #{@href}"
module.exports = (robot) ->
logger = robot.logger
robot.brain.on 'loaded', ->
if robot.brain.data.rundeck_aliases?
_rundeckAliases = robot.brain.data.rundeck_aliases
showAliases = (msg) ->
if _rundeckAliases == null || Object.keys(_rundeckAliases).length == 0
msg.send("I cannot find any rundeck system aliases")
else
for alias of _rundeckAliases
msg.send("I found '#{alias}' as an alias for the system: #{_rundeckAliases[alias]['url']} - #{_rundeckAliases[alias]['authToken']}")
clearAlias = (msg, alias) ->
delete _rundeckAliases[alias]
robot.brain.data.rundeck_aliases = _rundeckAliases
msg.send("The rundeck system alias #{alias} has been removed")
setAlias = (msg, alias, url, token) ->
_rundeckAliases[alias] = { url: url, authToken: token }
robot.brain.data.rundeck_aliases = _rundeckAliases
msg.send("The rundeck system alias #{alias} for #{url} has been added to the brain")
#hubot rundeck projects myrundeck-alias
robot.respond /rundeck projects (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
alias = msg.match[1]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.projects().list (projects) ->
if projects.length > 0
for project in projects
msg.send project.formatList()
else
msg.send "No rundeck projects found."
#hubot rundeck 'MyProject' jobs myrundeck-alias
robot.respond /rundeck '(.*)' jobs (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
project = msg.match[1]
alias = msg.match[2]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).list (jobs) ->
if jobs.length > 0
for job in jobs
msg.send job.formatList()
else
msg.send "No jobs found for rundeck #{project}"
#hubot rundeck trigger 'my-job' 'MyProject' myrundeck-alias args:<optional args>
robot.respond /rundeck trigger '(.*)'\s'(.*)'\s([\w]+)(?: args:)?(.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
name = msg.match[1]
project = msg.match[2]
args = msg.match[4]
alias = msg.match[3]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).run name, args, (job, results) ->
if job
msg.send "Successfully triggered a run for the job: #{name}"
else
msg.send "Could not execute rundeck job \"#{name}\"."
robot.respond /rundeck status '(.*)' '(.*)' '(.*)'/i, (msg) ->
if msg.message.user.id is robot.name
return
name = msg.match[1]
project = msg.match[2]
alias = msg.match[3]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).find name, (job) ->
if job
rundeck.executions(job).list (executions) ->
if executions.length > 0
keys = []
for item in executions
keys.push item.id
key = keys.sort()[keys.length - 1]
for execution in executions
if execution.id == key
msg.send execution.formatList()
else
msg.send "No executions found"
else
msg.send "Could not find rundeck job \"#{name}\"."
robot.respond /rundeck show aliases/i, (msg) ->
if msg.message.user.id is robot.name
return
showAliases msg, (text) ->
msg.send(text)
robot.respond /rundeck add alias (.*) (.*) (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
setAlias msg, msg.match[1], msg.match[2], msg.match[3], (text) ->
msg.send(text)
robot.respond /rundeck clear alias (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
clearAlias msg, msg.match[1], (text) ->
msg.send(text)
| 147866 | # Description
# Rundeck integration with hubot
#
# Dependencies:
# "underscore": "^1.6.0"
# "strftime": "^0.8.0"
# "xml2js": "^0.4.1"
#
# Commands:
# hubot rundeck projects [alias] - Gets a list of the projects for the given server alias
# hubut rundeck jobs '[project]' [alias] - Gets a list of all the jobs in the given project for the given server alias
# hubot rundeck trigger '[job]' '[project]' [alias] [args] - Triggers the given job for the given project
# hubot rundeck status '[job]' '[project]' [alias] - Shows the current status for the latest execution of the given job
# hubot rundeck show aliases - shows the aliases for the list of rundeck instances
# hubot rundeck add alias [alias name] [url] [authToken] - sets the alias for a given url and authentication token
# hubot rundeck clear alias [alias name] - removed the given alias
#
#rundeck show status of (.*) (?:in|for) (.*) (?:in|for) (.*)
# Notes:
# The server must be a fqdn (with the port!) to get to rundeck
#
# Author:
# <NAME>
_ = require('underscore')
sys = require 'sys' # Used for debugging
Parser = require('xml2js').Parser
_rundeckAliases = {}
class Rundeck
constructor: (@robot, @url, @authToken) ->
@logger = @robot.logger
@baseUrl = "#{@url}/api/12"
@headers =
"Accept": "application/xml"
"Content-Type": "application/xml"
"X-Rundeck-Auth-Token": "#{@authToken}"
@plainTextHeaders =
"Accept": "text/plain"
"Content-Type": "text/plain"
"X-Rundeck-Auth-Token": "#{@authToken}"
jobs: (project) -> new Jobs(@, project)
projects: -> new Projects(@)
executions: (job) -> new Executions(@, job)
getOutput: (url, cb) ->
@robot.http("#{@baseUrl}/#{url}").headers(@plainTextHeaders).get() (err, res, body) =>
if err?
@logger.err JSON.stringify(err)
else
cb body
get: (url, cb) ->
@logger.debug url
parser = new Parser()
@robot.http("#{@baseUrl}/#{url}").headers(@headers).get() (err, res, body) =>
console.log "#{@baseUrl}/#{url}"
if err?
@logger.error JSON.stringify(err)
else
parser.parseString body, (e, json) ->
cb json
class Projects
constructor: (@rundeck) ->
@logger = @rundeck.logger
list: (cb) ->
projects = []
@rundeck.get "projects", (results) ->
for project in results.projects.project
projects.push new Project(project)
cb projects
class Project
constructor: (data) ->
@name = data.name[0]
@description = data.description[0]
formatList: ->
"#{@name} - #{@description}"
class Jobs
constructor: (@rundeck, @project) ->
@logger = @rundeck.logger
list: (cb) ->
jobs = []
@rundeck.get "project/#{@project}/jobs", (results) ->
for job in results.jobs.job
jobs.push new Job(job)
cb jobs
find: (name, cb) ->
@list (jobs) =>
job = _.findWhere jobs, { name: name }
if job
cb job
else
cb false
run: (name, args, cb) ->
@find name, (job) =>
if job
uri = "job/#{job.id}/run"
if args?
uri += "?argString=#{args}"
@rundeck.get uri, (results) ->
cb job, results
else
cb null, false
class Job
constructor: (data) ->
@id = data["$"].id
@name = data.name[0]
@description = data.description[0]
@group = data.group[0]
@project = data.project[0]
formatList: ->
"#{@name} - #{@description}"
class Executions
constructor: (@rundeck, @job) ->
@logger = @rundeck.logger
list: (cb) ->
executions = []
@rundeck.get "job/#{@job.id}/executions", (results) ->
for execution in results.result.executions[0].execution
exec = new Execution(execution)
executions.push exec
cb executions
class Execution
constructor: (@data) ->
@id = data["$"].id
@href = data["$"].href
@status = data["$"].status
formatList: ->
"#{@id} - #{@status} - #{@href}"
module.exports = (robot) ->
logger = robot.logger
robot.brain.on 'loaded', ->
if robot.brain.data.rundeck_aliases?
_rundeckAliases = robot.brain.data.rundeck_aliases
showAliases = (msg) ->
if _rundeckAliases == null || Object.keys(_rundeckAliases).length == 0
msg.send("I cannot find any rundeck system aliases")
else
for alias of _rundeckAliases
msg.send("I found '#{alias}' as an alias for the system: #{_rundeckAliases[alias]['url']} - #{_rundeckAliases[alias]['authToken']}")
clearAlias = (msg, alias) ->
delete _rundeckAliases[alias]
robot.brain.data.rundeck_aliases = _rundeckAliases
msg.send("The rundeck system alias #{alias} has been removed")
setAlias = (msg, alias, url, token) ->
_rundeckAliases[alias] = { url: url, authToken: token }
robot.brain.data.rundeck_aliases = _rundeckAliases
msg.send("The rundeck system alias #{alias} for #{url} has been added to the brain")
#hubot rundeck projects myrundeck-alias
robot.respond /rundeck projects (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
alias = msg.match[1]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.projects().list (projects) ->
if projects.length > 0
for project in projects
msg.send project.formatList()
else
msg.send "No rundeck projects found."
#hubot rundeck 'MyProject' jobs myrundeck-alias
robot.respond /rundeck '(.*)' jobs (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
project = msg.match[1]
alias = msg.match[2]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).list (jobs) ->
if jobs.length > 0
for job in jobs
msg.send job.formatList()
else
msg.send "No jobs found for rundeck #{project}"
#hubot rundeck trigger 'my-job' 'MyProject' myrundeck-alias args:<optional args>
robot.respond /rundeck trigger '(.*)'\s'(.*)'\s([\w]+)(?: args:)?(.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
name = msg.match[1]
project = msg.match[2]
args = msg.match[4]
alias = msg.match[3]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).run name, args, (job, results) ->
if job
msg.send "Successfully triggered a run for the job: #{name}"
else
msg.send "Could not execute rundeck job \"#{name}\"."
robot.respond /rundeck status '(.*)' '(.*)' '(.*)'/i, (msg) ->
if msg.message.user.id is robot.name
return
name = msg.match[1]
project = msg.match[2]
alias = msg.match[3]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).find name, (job) ->
if job
rundeck.executions(job).list (executions) ->
if executions.length > 0
keys = <KEY>
for item in executions
keys.push item.<KEY>
key = <KEY>keys.<KEY>
for execution in executions
if execution.id == key
msg.send execution.formatList()
else
msg.send "No executions found"
else
msg.send "Could not find rundeck job \"#{name}\"."
robot.respond /rundeck show aliases/i, (msg) ->
if msg.message.user.id is robot.name
return
showAliases msg, (text) ->
msg.send(text)
robot.respond /rundeck add alias (.*) (.*) (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
setAlias msg, msg.match[1], msg.match[2], msg.match[3], (text) ->
msg.send(text)
robot.respond /rundeck clear alias (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
clearAlias msg, msg.match[1], (text) ->
msg.send(text)
| true | # Description
# Rundeck integration with hubot
#
# Dependencies:
# "underscore": "^1.6.0"
# "strftime": "^0.8.0"
# "xml2js": "^0.4.1"
#
# Commands:
# hubot rundeck projects [alias] - Gets a list of the projects for the given server alias
# hubut rundeck jobs '[project]' [alias] - Gets a list of all the jobs in the given project for the given server alias
# hubot rundeck trigger '[job]' '[project]' [alias] [args] - Triggers the given job for the given project
# hubot rundeck status '[job]' '[project]' [alias] - Shows the current status for the latest execution of the given job
# hubot rundeck show aliases - shows the aliases for the list of rundeck instances
# hubot rundeck add alias [alias name] [url] [authToken] - sets the alias for a given url and authentication token
# hubot rundeck clear alias [alias name] - removed the given alias
#
#rundeck show status of (.*) (?:in|for) (.*) (?:in|for) (.*)
# Notes:
# The server must be a fqdn (with the port!) to get to rundeck
#
# Author:
# PI:NAME:<NAME>END_PI
_ = require('underscore')
sys = require 'sys' # Used for debugging
Parser = require('xml2js').Parser
_rundeckAliases = {}
class Rundeck
constructor: (@robot, @url, @authToken) ->
@logger = @robot.logger
@baseUrl = "#{@url}/api/12"
@headers =
"Accept": "application/xml"
"Content-Type": "application/xml"
"X-Rundeck-Auth-Token": "#{@authToken}"
@plainTextHeaders =
"Accept": "text/plain"
"Content-Type": "text/plain"
"X-Rundeck-Auth-Token": "#{@authToken}"
jobs: (project) -> new Jobs(@, project)
projects: -> new Projects(@)
executions: (job) -> new Executions(@, job)
getOutput: (url, cb) ->
@robot.http("#{@baseUrl}/#{url}").headers(@plainTextHeaders).get() (err, res, body) =>
if err?
@logger.err JSON.stringify(err)
else
cb body
get: (url, cb) ->
@logger.debug url
parser = new Parser()
@robot.http("#{@baseUrl}/#{url}").headers(@headers).get() (err, res, body) =>
console.log "#{@baseUrl}/#{url}"
if err?
@logger.error JSON.stringify(err)
else
parser.parseString body, (e, json) ->
cb json
class Projects
constructor: (@rundeck) ->
@logger = @rundeck.logger
list: (cb) ->
projects = []
@rundeck.get "projects", (results) ->
for project in results.projects.project
projects.push new Project(project)
cb projects
class Project
constructor: (data) ->
@name = data.name[0]
@description = data.description[0]
formatList: ->
"#{@name} - #{@description}"
class Jobs
constructor: (@rundeck, @project) ->
@logger = @rundeck.logger
list: (cb) ->
jobs = []
@rundeck.get "project/#{@project}/jobs", (results) ->
for job in results.jobs.job
jobs.push new Job(job)
cb jobs
find: (name, cb) ->
@list (jobs) =>
job = _.findWhere jobs, { name: name }
if job
cb job
else
cb false
run: (name, args, cb) ->
@find name, (job) =>
if job
uri = "job/#{job.id}/run"
if args?
uri += "?argString=#{args}"
@rundeck.get uri, (results) ->
cb job, results
else
cb null, false
class Job
constructor: (data) ->
@id = data["$"].id
@name = data.name[0]
@description = data.description[0]
@group = data.group[0]
@project = data.project[0]
formatList: ->
"#{@name} - #{@description}"
class Executions
constructor: (@rundeck, @job) ->
@logger = @rundeck.logger
list: (cb) ->
executions = []
@rundeck.get "job/#{@job.id}/executions", (results) ->
for execution in results.result.executions[0].execution
exec = new Execution(execution)
executions.push exec
cb executions
class Execution
constructor: (@data) ->
@id = data["$"].id
@href = data["$"].href
@status = data["$"].status
formatList: ->
"#{@id} - #{@status} - #{@href}"
module.exports = (robot) ->
logger = robot.logger
robot.brain.on 'loaded', ->
if robot.brain.data.rundeck_aliases?
_rundeckAliases = robot.brain.data.rundeck_aliases
showAliases = (msg) ->
if _rundeckAliases == null || Object.keys(_rundeckAliases).length == 0
msg.send("I cannot find any rundeck system aliases")
else
for alias of _rundeckAliases
msg.send("I found '#{alias}' as an alias for the system: #{_rundeckAliases[alias]['url']} - #{_rundeckAliases[alias]['authToken']}")
clearAlias = (msg, alias) ->
delete _rundeckAliases[alias]
robot.brain.data.rundeck_aliases = _rundeckAliases
msg.send("The rundeck system alias #{alias} has been removed")
setAlias = (msg, alias, url, token) ->
_rundeckAliases[alias] = { url: url, authToken: token }
robot.brain.data.rundeck_aliases = _rundeckAliases
msg.send("The rundeck system alias #{alias} for #{url} has been added to the brain")
#hubot rundeck projects myrundeck-alias
robot.respond /rundeck projects (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
alias = msg.match[1]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.projects().list (projects) ->
if projects.length > 0
for project in projects
msg.send project.formatList()
else
msg.send "No rundeck projects found."
#hubot rundeck 'MyProject' jobs myrundeck-alias
robot.respond /rundeck '(.*)' jobs (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
project = msg.match[1]
alias = msg.match[2]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).list (jobs) ->
if jobs.length > 0
for job in jobs
msg.send job.formatList()
else
msg.send "No jobs found for rundeck #{project}"
#hubot rundeck trigger 'my-job' 'MyProject' myrundeck-alias args:<optional args>
robot.respond /rundeck trigger '(.*)'\s'(.*)'\s([\w]+)(?: args:)?(.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
name = msg.match[1]
project = msg.match[2]
args = msg.match[4]
alias = msg.match[3]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).run name, args, (job, results) ->
if job
msg.send "Successfully triggered a run for the job: #{name}"
else
msg.send "Could not execute rundeck job \"#{name}\"."
robot.respond /rundeck status '(.*)' '(.*)' '(.*)'/i, (msg) ->
if msg.message.user.id is robot.name
return
name = msg.match[1]
project = msg.match[2]
alias = msg.match[3]
url = _rundeckAliases[alias]['url']
token = _rundeckAliases[alias]['authToken']
if url == null || url == undefined || token == null || token == undefined
msg.send "Do not recognise rundeck system alias #{alias}"
else
rundeck = new Rundeck(robot, url, token)
rundeck.jobs(project).find name, (job) ->
if job
rundeck.executions(job).list (executions) ->
if executions.length > 0
keys = PI:KEY:<KEY>END_PI
for item in executions
keys.push item.PI:KEY:<KEY>END_PI
key = PI:KEY:<KEY>END_PIkeys.PI:KEY:<KEY>END_PI
for execution in executions
if execution.id == key
msg.send execution.formatList()
else
msg.send "No executions found"
else
msg.send "Could not find rundeck job \"#{name}\"."
robot.respond /rundeck show aliases/i, (msg) ->
if msg.message.user.id is robot.name
return
showAliases msg, (text) ->
msg.send(text)
robot.respond /rundeck add alias (.*) (.*) (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
setAlias msg, msg.match[1], msg.match[2], msg.match[3], (text) ->
msg.send(text)
robot.respond /rundeck clear alias (.*)/i, (msg) ->
if msg.message.user.id is robot.name
return
clearAlias msg, msg.match[1], (text) ->
msg.send(text)
|
[
{
"context": " setPassword callback\n else\n password = setword\n if callback\n doc = {};\n doc[\"",
"end": 745,
"score": 0.9830820560455322,
"start": 738,
"tag": "PASSWORD",
"value": "setword"
}
] | src/index.coffee | NomadThanatos/CEVE-Login | 4 | fs = require "fs"
auth = require "./network.coffee"
execEVE = require "./exec.coffee"
{loadConfig, saveConfig} = require "./data.coffee"
doc = null
password = null
getData = (callback) ->
password = prompt "Please input local password.", ""
key = new Buffer password
loadConfig "./data.dat", key, (res, err) ->
if err
alert "Wrong local password!"
getData callback;
else
doc = res
callback doc
setPassword = (callback) ->
setword = prompt "Please set local password.", ""
if setword
confirmword = prompt "Please re-enter local password.", ""
if setword != confirmword
alert "The inputs do not match,please set again!"
setPassword callback
else
password = setword
if callback
doc = {};
doc["path"] = prompt "EVE run path:", "?:\\EVE\\bin\\exefile.exe"
doc["account"] = [];
key = new Buffer password
saveConfig "./data.dat", key, doc
callback doc
else
setPassword callback
render = (doc) ->
$("#commonSelect").empty()
$("#uncommonSelect").empty()
for acc in doc["account"]
if acc.common
$("#commonSelect").append("<option value=\"#{acc.username}\">#{acc.username}</option>");
else
$("#uncommonSelect").append("<option value=\"#{acc.username}\">#{acc.username}</option>");
$(document).ready ->
if fs.existsSync("./data.dat")
getData render
else
setPassword render
$("#moveLeft").click ->
list = []
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text();
for pending in list
for acc in doc["account"]
if pending == acc.username
acc.common = true
render doc
$("#moveRight").click ->
list = []
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text();
for pending in list
for acc in doc["account"]
if pending == acc.username
acc.common = false
render doc
$("#addAccount").click ->
tmp = {};
tmp["common"] = true;
tmp["username"] = prompt "Please input username.", ""
tmp["password"] = prompt "Please input password.", ""
if tmp["username"] and tmp["password"]
cardPath = prompt "Please input card path, if none, please enter nothing.", ""
if cardPath
file = fs.readFileSync cardPath, "utf-8"
alert file
tmp["card"] = {}
count = 0
codeArray = [];
for line in file.split('\r\n')
codeArray.push.apply codeArray, line.split(" ")
for i in ['1', '2', '3', '4', '5', '6', '7', '8', '9']
for c in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I']
tmp["card"][c + i] = codeArray[count]
count = count + 1
doc["account"].push tmp
render doc
else
alert "Username or password is null!"
$("#saveLayout").click ->
key = new Buffer password
saveConfig "./data.dat", key, doc
$("#delAccount").click ->
list = []
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text()
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text()
tmp = [];
for acc in doc["account"]
del = false
for pending in list
if pending == acc.username
del = true
if not del
tmp.push acc;
doc["account"] = tmp
render doc
$("#modifyPath").click ->
doc["path"] = prompt "EVE run path:", doc["path"]
$("#modifyPassword").click ->
setPassword null
$("#startCommon").click ->
for acc in doc["account"]
if acc.common
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
$("#startUncommon").click ->
for acc in doc["account"]
if not acc.common
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
$("#startSelected").click ->
list = [];
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text()
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text()
for pending in list
for acc in doc["account"]
if pending == acc.username
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
| 32026 | fs = require "fs"
auth = require "./network.coffee"
execEVE = require "./exec.coffee"
{loadConfig, saveConfig} = require "./data.coffee"
doc = null
password = null
getData = (callback) ->
password = prompt "Please input local password.", ""
key = new Buffer password
loadConfig "./data.dat", key, (res, err) ->
if err
alert "Wrong local password!"
getData callback;
else
doc = res
callback doc
setPassword = (callback) ->
setword = prompt "Please set local password.", ""
if setword
confirmword = prompt "Please re-enter local password.", ""
if setword != confirmword
alert "The inputs do not match,please set again!"
setPassword callback
else
password = <PASSWORD>
if callback
doc = {};
doc["path"] = prompt "EVE run path:", "?:\\EVE\\bin\\exefile.exe"
doc["account"] = [];
key = new Buffer password
saveConfig "./data.dat", key, doc
callback doc
else
setPassword callback
render = (doc) ->
$("#commonSelect").empty()
$("#uncommonSelect").empty()
for acc in doc["account"]
if acc.common
$("#commonSelect").append("<option value=\"#{acc.username}\">#{acc.username}</option>");
else
$("#uncommonSelect").append("<option value=\"#{acc.username}\">#{acc.username}</option>");
$(document).ready ->
if fs.existsSync("./data.dat")
getData render
else
setPassword render
$("#moveLeft").click ->
list = []
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text();
for pending in list
for acc in doc["account"]
if pending == acc.username
acc.common = true
render doc
$("#moveRight").click ->
list = []
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text();
for pending in list
for acc in doc["account"]
if pending == acc.username
acc.common = false
render doc
$("#addAccount").click ->
tmp = {};
tmp["common"] = true;
tmp["username"] = prompt "Please input username.", ""
tmp["password"] = prompt "Please input password.", ""
if tmp["username"] and tmp["password"]
cardPath = prompt "Please input card path, if none, please enter nothing.", ""
if cardPath
file = fs.readFileSync cardPath, "utf-8"
alert file
tmp["card"] = {}
count = 0
codeArray = [];
for line in file.split('\r\n')
codeArray.push.apply codeArray, line.split(" ")
for i in ['1', '2', '3', '4', '5', '6', '7', '8', '9']
for c in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I']
tmp["card"][c + i] = codeArray[count]
count = count + 1
doc["account"].push tmp
render doc
else
alert "Username or password is null!"
$("#saveLayout").click ->
key = new Buffer password
saveConfig "./data.dat", key, doc
$("#delAccount").click ->
list = []
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text()
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text()
tmp = [];
for acc in doc["account"]
del = false
for pending in list
if pending == acc.username
del = true
if not del
tmp.push acc;
doc["account"] = tmp
render doc
$("#modifyPath").click ->
doc["path"] = prompt "EVE run path:", doc["path"]
$("#modifyPassword").click ->
setPassword null
$("#startCommon").click ->
for acc in doc["account"]
if acc.common
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
$("#startUncommon").click ->
for acc in doc["account"]
if not acc.common
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
$("#startSelected").click ->
list = [];
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text()
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text()
for pending in list
for acc in doc["account"]
if pending == acc.username
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
| true | fs = require "fs"
auth = require "./network.coffee"
execEVE = require "./exec.coffee"
{loadConfig, saveConfig} = require "./data.coffee"
doc = null
password = null
getData = (callback) ->
password = prompt "Please input local password.", ""
key = new Buffer password
loadConfig "./data.dat", key, (res, err) ->
if err
alert "Wrong local password!"
getData callback;
else
doc = res
callback doc
setPassword = (callback) ->
setword = prompt "Please set local password.", ""
if setword
confirmword = prompt "Please re-enter local password.", ""
if setword != confirmword
alert "The inputs do not match,please set again!"
setPassword callback
else
password = PI:PASSWORD:<PASSWORD>END_PI
if callback
doc = {};
doc["path"] = prompt "EVE run path:", "?:\\EVE\\bin\\exefile.exe"
doc["account"] = [];
key = new Buffer password
saveConfig "./data.dat", key, doc
callback doc
else
setPassword callback
render = (doc) ->
$("#commonSelect").empty()
$("#uncommonSelect").empty()
for acc in doc["account"]
if acc.common
$("#commonSelect").append("<option value=\"#{acc.username}\">#{acc.username}</option>");
else
$("#uncommonSelect").append("<option value=\"#{acc.username}\">#{acc.username}</option>");
$(document).ready ->
if fs.existsSync("./data.dat")
getData render
else
setPassword render
$("#moveLeft").click ->
list = []
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text();
for pending in list
for acc in doc["account"]
if pending == acc.username
acc.common = true
render doc
$("#moveRight").click ->
list = []
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text();
for pending in list
for acc in doc["account"]
if pending == acc.username
acc.common = false
render doc
$("#addAccount").click ->
tmp = {};
tmp["common"] = true;
tmp["username"] = prompt "Please input username.", ""
tmp["password"] = prompt "Please input password.", ""
if tmp["username"] and tmp["password"]
cardPath = prompt "Please input card path, if none, please enter nothing.", ""
if cardPath
file = fs.readFileSync cardPath, "utf-8"
alert file
tmp["card"] = {}
count = 0
codeArray = [];
for line in file.split('\r\n')
codeArray.push.apply codeArray, line.split(" ")
for i in ['1', '2', '3', '4', '5', '6', '7', '8', '9']
for c in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I']
tmp["card"][c + i] = codeArray[count]
count = count + 1
doc["account"].push tmp
render doc
else
alert "Username or password is null!"
$("#saveLayout").click ->
key = new Buffer password
saveConfig "./data.dat", key, doc
$("#delAccount").click ->
list = []
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text()
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text()
tmp = [];
for acc in doc["account"]
del = false
for pending in list
if pending == acc.username
del = true
if not del
tmp.push acc;
doc["account"] = tmp
render doc
$("#modifyPath").click ->
doc["path"] = prompt "EVE run path:", doc["path"]
$("#modifyPassword").click ->
setPassword null
$("#startCommon").click ->
for acc in doc["account"]
if acc.common
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
$("#startUncommon").click ->
for acc in doc["account"]
if not acc.common
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
$("#startSelected").click ->
list = [];
$("#commonSelect :selected").each (i, selected) ->
list.push $(selected).text()
$("#uncommonSelect :selected").each (i, selected) ->
list.push $(selected).text()
for pending in list
for acc in doc["account"]
if pending == acc.username
auth acc.username, acc.password, acc.card, (token, err) ->
alert err if err
execEVE doc["path"], token
|
[
{
"context": " .expectJSON '',\n name: 'ウズラ'\n .expectJSON 'distributions.*',\n ",
"end": 11085,
"score": 0.9986138343811035,
"start": 11082,
"tag": "NAME",
"value": "ウズラ"
},
{
"context": " .expectJSON '',\n# name: 'ウズラ'... | app/specs/router_spec.coffee | KamataRyo/bird-api | 1 | frisby = require 'frisby'
APIurl = require('../utilities').getAPIurl
frisby
.create 'bad request(1)'
.get APIurl 'some_strange_directory'
.expectStatus 404
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.toss()
frisby
.create 'bad request(2)'
.get APIurl 'some_strange_directory/not_acceptable'
.expectStatus 404
.toss()
frisby
.create 'GET document, test of structure'
.get APIurl ''
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes 'document',
title: String
links: Array
.expectJSONTypes 'document.links.*',
rel: String
href: String
.toss()
frisby
.create 'GET birds'
.get APIurl 'birds'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONTypes 'species.*',
sc: String
ja: String
alien: Boolean
upper_id: String
.expectJSON 'species.*', rank: 'species'
.toss()
frisby
.create 'GET birds with fields query'
.get APIurl 'birds?fields=ja,rank'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONTypes 'species.*',
rank: String
sc: undefined
ja: String
alien: undefined
upper_id: undefined
.toss()
frisby
.create 'GET birds with limit query'
.get APIurl 'birds?limit=20'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONLength 'species', 20
.toss()
frisby
.create 'GET species with too many offset query'
.get APIurl 'species?offset=100000000'
.expectStatus 404
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.toss()
frisby
.create 'GET genuses'
.get APIurl 'genuses'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', genuses: Array
.expectJSONTypes 'genuses.*',
sc: String
ja: String
upper_id: String
.expectJSON 'genuses.*', rank: 'genus'
.toss()
frisby
.create 'GET families'
.get APIurl 'families'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', families: Array
.expectJSONTypes 'families.*',
sc: String
ja: String
upper_id: String
.expectJSON 'families.*', rank: 'family'
.toss()
frisby
.create 'GET orders'
.get APIurl 'orders'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', orders: Array
.expectJSONTypes 'orders.*',
sc: String
ja: String
.expectJSON 'orders.*', rank: 'order'
.toss()
frisby
.create 'GET birds/スズメ'
.get APIurl 'birds/スズメ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: String
ja: String
alien: Boolean
rank: String
upper_id: String
.expectJSONTypes 'taxonomies.*',
sc: String
ja: String
rank: String
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=ja,alien'
.get APIurl 'birds/ヒドリガモ?fields=ja,alien'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: undefined
ja: String
alien: Boolean
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.*',
sc: undefined
ja: String
rank: undefined
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=ja,unknownField'
.get APIurl 'birds/ヒドリガモ?fields=ja,unknownField'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: undefined
ja: String
alien: undefined
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.?',
sc: undefined
ja: String
rank: undefined
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=onlyUnknownField'
.get APIurl 'birds/ヒドリガモ?fields=onlyUnknownField'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
_id: String
sc: undefined
ja: undefined
alien: undefined
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.*',
_id: String
sc: undefined
ja: undefined
alien: undefined
rank: undefined
upper_id: undefined
.toss()
frisby
.create 'GET existence/undefined-bird-species'
.get APIurl 'existence/undefined-bird-species'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name: undefined
.expectJSON '',
existence: false
.toss()
frisby
.create 'GET existence/マガモ'
.get APIurl 'existence/マガモ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name: Object
.expectJSON '',
existence: true
.toss()
frisby
.create 'GET existence/マガモ?fields=ja'
.get APIurl 'existence/マガモ?fields=ja'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name:
_id: String
ja: String
sc: undefined
rank: undefined
upper_id: undefined
.expectJSON '',
existence: true
.toss()
content = '''
日本ではカモ類の多くは渡り鳥ですが、カルガモは留鳥で、年中観察することができます。
マガモは渡りを行いますが、日本で繁殖する場合もあります。
滋賀県米原市にある三島池はマガモの繁殖の南限地として有名です。
琵琶湖では、コガモ、オナガガモ、キンクロハジロ、ホシハジロ、スズガモなどのカモ類が多く見られます。
これらのうち、コガモ、オナガガモ、キンクロハジロ、ホシハジロは狩猟鳥です。
コガモは狩猟者から「べ」と呼ばれます。
'''
birdsRefered = ['カルガモ','マガモ','コガモ','オナガガモ','キンクロハジロ','ホシハジロ','スズガモ']
frisby
.create 'GET inclusion success'
.get APIurl "inclusion?content=#{content}"
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
histogram: Array
.expectJSONTypes 'histogram.*',
species: Object
frequency: Number
.expectJSONLength 'histogram', birdsRefered.length
.toss()
# frisby
# .create 'GET inclusion success with fields query'
# .get APIurl "inclusion?content=#{content}&fields=ja"
# .expectStatus 200
# .expectHeaderContains 'Content-Type', 'application/json'
# .expectHeaderContains 'Content-Type', 'charset=UTF-8'
# .expectHeaderContains 'Access-Control-Allow-Origin', '*'
# .expectJSONTypes '',
# histogram: Array
# .expectJSONTypes 'histogram.*',
# species:
# ja: String
# rank: undefined
# upper: undefined
# upper_id: undefined
# alien: undefined
# _id: undefined
# frequency: Number
# .expectJSONLength 'histogram', birdsRefered.length
# .toss()
frisby
.create 'GET inclusion with nocontent'
.get APIurl 'inclusion'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSON '',
histogram: []
.toss()
frisby
.create 'POST distributions'
.post APIurl('distributions'), {ja: 'ウズラ', place: '横浜'}, {json:true}
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.after (err, res, body) ->
frisby
.create 'GET AFTER POST'
.get APIurl 'distributions/ウズラ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSON '',
name: 'ウズラ'
.expectJSON 'distributions.*',
place: '横浜'
.expectJSONTypes 'distributions.*',
name_id: String
.toss()
.toss()
# frisby
# .create 'POST distributions'
# .post APIurl('distributions'), {ja: 'カワガラス', place: '川'}, {json:true}
# .post APIurl('distributions'), {ja: 'カワガラス', place: '渓谷'}, {json:true}
# .post APIurl('distributions'), {ja: 'ウミガラス', place: '海'}, {json:true}
# .after (err, res, body) ->
# frisby
# .create 'GET AFTER POST'
# .get APIurl 'distributions'
# .expectStatus 200
# .expectHeaderContains 'Content-Type', 'application/json'
# .expectHeaderContains 'Content-Type', 'charset=UTF-8'
# .expectHeaderContains 'Access-Control-Allow-Origin', '*'
# .expectJSON '',
# name: 'ウズラ'
# .expectJSON 'distributions.*',
# place: '横浜'
# .expectJSONTypes 'distributions.*',
# name_id: String
# .toss()
# .toss()
| 179797 | frisby = require 'frisby'
APIurl = require('../utilities').getAPIurl
frisby
.create 'bad request(1)'
.get APIurl 'some_strange_directory'
.expectStatus 404
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.toss()
frisby
.create 'bad request(2)'
.get APIurl 'some_strange_directory/not_acceptable'
.expectStatus 404
.toss()
frisby
.create 'GET document, test of structure'
.get APIurl ''
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes 'document',
title: String
links: Array
.expectJSONTypes 'document.links.*',
rel: String
href: String
.toss()
frisby
.create 'GET birds'
.get APIurl 'birds'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONTypes 'species.*',
sc: String
ja: String
alien: Boolean
upper_id: String
.expectJSON 'species.*', rank: 'species'
.toss()
frisby
.create 'GET birds with fields query'
.get APIurl 'birds?fields=ja,rank'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONTypes 'species.*',
rank: String
sc: undefined
ja: String
alien: undefined
upper_id: undefined
.toss()
frisby
.create 'GET birds with limit query'
.get APIurl 'birds?limit=20'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONLength 'species', 20
.toss()
frisby
.create 'GET species with too many offset query'
.get APIurl 'species?offset=100000000'
.expectStatus 404
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.toss()
frisby
.create 'GET genuses'
.get APIurl 'genuses'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', genuses: Array
.expectJSONTypes 'genuses.*',
sc: String
ja: String
upper_id: String
.expectJSON 'genuses.*', rank: 'genus'
.toss()
frisby
.create 'GET families'
.get APIurl 'families'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', families: Array
.expectJSONTypes 'families.*',
sc: String
ja: String
upper_id: String
.expectJSON 'families.*', rank: 'family'
.toss()
frisby
.create 'GET orders'
.get APIurl 'orders'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', orders: Array
.expectJSONTypes 'orders.*',
sc: String
ja: String
.expectJSON 'orders.*', rank: 'order'
.toss()
frisby
.create 'GET birds/スズメ'
.get APIurl 'birds/スズメ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: String
ja: String
alien: Boolean
rank: String
upper_id: String
.expectJSONTypes 'taxonomies.*',
sc: String
ja: String
rank: String
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=ja,alien'
.get APIurl 'birds/ヒドリガモ?fields=ja,alien'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: undefined
ja: String
alien: Boolean
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.*',
sc: undefined
ja: String
rank: undefined
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=ja,unknownField'
.get APIurl 'birds/ヒドリガモ?fields=ja,unknownField'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: undefined
ja: String
alien: undefined
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.?',
sc: undefined
ja: String
rank: undefined
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=onlyUnknownField'
.get APIurl 'birds/ヒドリガモ?fields=onlyUnknownField'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
_id: String
sc: undefined
ja: undefined
alien: undefined
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.*',
_id: String
sc: undefined
ja: undefined
alien: undefined
rank: undefined
upper_id: undefined
.toss()
frisby
.create 'GET existence/undefined-bird-species'
.get APIurl 'existence/undefined-bird-species'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name: undefined
.expectJSON '',
existence: false
.toss()
frisby
.create 'GET existence/マガモ'
.get APIurl 'existence/マガモ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name: Object
.expectJSON '',
existence: true
.toss()
frisby
.create 'GET existence/マガモ?fields=ja'
.get APIurl 'existence/マガモ?fields=ja'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name:
_id: String
ja: String
sc: undefined
rank: undefined
upper_id: undefined
.expectJSON '',
existence: true
.toss()
content = '''
日本ではカモ類の多くは渡り鳥ですが、カルガモは留鳥で、年中観察することができます。
マガモは渡りを行いますが、日本で繁殖する場合もあります。
滋賀県米原市にある三島池はマガモの繁殖の南限地として有名です。
琵琶湖では、コガモ、オナガガモ、キンクロハジロ、ホシハジロ、スズガモなどのカモ類が多く見られます。
これらのうち、コガモ、オナガガモ、キンクロハジロ、ホシハジロは狩猟鳥です。
コガモは狩猟者から「べ」と呼ばれます。
'''
birdsRefered = ['カルガモ','マガモ','コガモ','オナガガモ','キンクロハジロ','ホシハジロ','スズガモ']
frisby
.create 'GET inclusion success'
.get APIurl "inclusion?content=#{content}"
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
histogram: Array
.expectJSONTypes 'histogram.*',
species: Object
frequency: Number
.expectJSONLength 'histogram', birdsRefered.length
.toss()
# frisby
# .create 'GET inclusion success with fields query'
# .get APIurl "inclusion?content=#{content}&fields=ja"
# .expectStatus 200
# .expectHeaderContains 'Content-Type', 'application/json'
# .expectHeaderContains 'Content-Type', 'charset=UTF-8'
# .expectHeaderContains 'Access-Control-Allow-Origin', '*'
# .expectJSONTypes '',
# histogram: Array
# .expectJSONTypes 'histogram.*',
# species:
# ja: String
# rank: undefined
# upper: undefined
# upper_id: undefined
# alien: undefined
# _id: undefined
# frequency: Number
# .expectJSONLength 'histogram', birdsRefered.length
# .toss()
frisby
.create 'GET inclusion with nocontent'
.get APIurl 'inclusion'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSON '',
histogram: []
.toss()
frisby
.create 'POST distributions'
.post APIurl('distributions'), {ja: 'ウズラ', place: '横浜'}, {json:true}
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.after (err, res, body) ->
frisby
.create 'GET AFTER POST'
.get APIurl 'distributions/ウズラ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSON '',
name: '<NAME>'
.expectJSON 'distributions.*',
place: '横浜'
.expectJSONTypes 'distributions.*',
name_id: String
.toss()
.toss()
# frisby
# .create 'POST distributions'
# .post APIurl('distributions'), {ja: 'カワガラス', place: '川'}, {json:true}
# .post APIurl('distributions'), {ja: 'カワガラス', place: '渓谷'}, {json:true}
# .post APIurl('distributions'), {ja: 'ウミガラス', place: '海'}, {json:true}
# .after (err, res, body) ->
# frisby
# .create 'GET AFTER POST'
# .get APIurl 'distributions'
# .expectStatus 200
# .expectHeaderContains 'Content-Type', 'application/json'
# .expectHeaderContains 'Content-Type', 'charset=UTF-8'
# .expectHeaderContains 'Access-Control-Allow-Origin', '*'
# .expectJSON '',
# name: '<NAME>'
# .expectJSON 'distributions.*',
# place: '横浜'
# .expectJSONTypes 'distributions.*',
# name_id: String
# .toss()
# .toss()
| true | frisby = require 'frisby'
APIurl = require('../utilities').getAPIurl
frisby
.create 'bad request(1)'
.get APIurl 'some_strange_directory'
.expectStatus 404
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.toss()
frisby
.create 'bad request(2)'
.get APIurl 'some_strange_directory/not_acceptable'
.expectStatus 404
.toss()
frisby
.create 'GET document, test of structure'
.get APIurl ''
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes 'document',
title: String
links: Array
.expectJSONTypes 'document.links.*',
rel: String
href: String
.toss()
frisby
.create 'GET birds'
.get APIurl 'birds'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONTypes 'species.*',
sc: String
ja: String
alien: Boolean
upper_id: String
.expectJSON 'species.*', rank: 'species'
.toss()
frisby
.create 'GET birds with fields query'
.get APIurl 'birds?fields=ja,rank'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONTypes 'species.*',
rank: String
sc: undefined
ja: String
alien: undefined
upper_id: undefined
.toss()
frisby
.create 'GET birds with limit query'
.get APIurl 'birds?limit=20'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', species: Array
.expectJSONLength 'species', 20
.toss()
frisby
.create 'GET species with too many offset query'
.get APIurl 'species?offset=100000000'
.expectStatus 404
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.toss()
frisby
.create 'GET genuses'
.get APIurl 'genuses'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', genuses: Array
.expectJSONTypes 'genuses.*',
sc: String
ja: String
upper_id: String
.expectJSON 'genuses.*', rank: 'genus'
.toss()
frisby
.create 'GET families'
.get APIurl 'families'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', families: Array
.expectJSONTypes 'families.*',
sc: String
ja: String
upper_id: String
.expectJSON 'families.*', rank: 'family'
.toss()
frisby
.create 'GET orders'
.get APIurl 'orders'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '', orders: Array
.expectJSONTypes 'orders.*',
sc: String
ja: String
.expectJSON 'orders.*', rank: 'order'
.toss()
frisby
.create 'GET birds/スズメ'
.get APIurl 'birds/スズメ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: String
ja: String
alien: Boolean
rank: String
upper_id: String
.expectJSONTypes 'taxonomies.*',
sc: String
ja: String
rank: String
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=ja,alien'
.get APIurl 'birds/ヒドリガモ?fields=ja,alien'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: undefined
ja: String
alien: Boolean
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.*',
sc: undefined
ja: String
rank: undefined
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=ja,unknownField'
.get APIurl 'birds/ヒドリガモ?fields=ja,unknownField'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
sc: undefined
ja: String
alien: undefined
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.?',
sc: undefined
ja: String
rank: undefined
.toss()
frisby
.create 'GET birds/ヒドリガモ?fields=onlyUnknownField'
.get APIurl 'birds/ヒドリガモ?fields=onlyUnknownField'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
name: Object
binomen: String
taxonomies: Array
.expectJSONTypes 'name',
_id: String
sc: undefined
ja: undefined
alien: undefined
rank: undefined
upper_id: undefined
.expectJSONTypes 'taxonomies.*',
_id: String
sc: undefined
ja: undefined
alien: undefined
rank: undefined
upper_id: undefined
.toss()
frisby
.create 'GET existence/undefined-bird-species'
.get APIurl 'existence/undefined-bird-species'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name: undefined
.expectJSON '',
existence: false
.toss()
frisby
.create 'GET existence/マガモ'
.get APIurl 'existence/マガモ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name: Object
.expectJSON '',
existence: true
.toss()
frisby
.create 'GET existence/マガモ?fields=ja'
.get APIurl 'existence/マガモ?fields=ja'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
existence: Boolean
name:
_id: String
ja: String
sc: undefined
rank: undefined
upper_id: undefined
.expectJSON '',
existence: true
.toss()
content = '''
日本ではカモ類の多くは渡り鳥ですが、カルガモは留鳥で、年中観察することができます。
マガモは渡りを行いますが、日本で繁殖する場合もあります。
滋賀県米原市にある三島池はマガモの繁殖の南限地として有名です。
琵琶湖では、コガモ、オナガガモ、キンクロハジロ、ホシハジロ、スズガモなどのカモ類が多く見られます。
これらのうち、コガモ、オナガガモ、キンクロハジロ、ホシハジロは狩猟鳥です。
コガモは狩猟者から「べ」と呼ばれます。
'''
birdsRefered = ['カルガモ','マガモ','コガモ','オナガガモ','キンクロハジロ','ホシハジロ','スズガモ']
frisby
.create 'GET inclusion success'
.get APIurl "inclusion?content=#{content}"
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSONTypes '',
histogram: Array
.expectJSONTypes 'histogram.*',
species: Object
frequency: Number
.expectJSONLength 'histogram', birdsRefered.length
.toss()
# frisby
# .create 'GET inclusion success with fields query'
# .get APIurl "inclusion?content=#{content}&fields=ja"
# .expectStatus 200
# .expectHeaderContains 'Content-Type', 'application/json'
# .expectHeaderContains 'Content-Type', 'charset=UTF-8'
# .expectHeaderContains 'Access-Control-Allow-Origin', '*'
# .expectJSONTypes '',
# histogram: Array
# .expectJSONTypes 'histogram.*',
# species:
# ja: String
# rank: undefined
# upper: undefined
# upper_id: undefined
# alien: undefined
# _id: undefined
# frequency: Number
# .expectJSONLength 'histogram', birdsRefered.length
# .toss()
frisby
.create 'GET inclusion with nocontent'
.get APIurl 'inclusion'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSON '',
histogram: []
.toss()
frisby
.create 'POST distributions'
.post APIurl('distributions'), {ja: 'ウズラ', place: '横浜'}, {json:true}
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.after (err, res, body) ->
frisby
.create 'GET AFTER POST'
.get APIurl 'distributions/ウズラ'
.expectStatus 200
.expectHeaderContains 'Content-Type', 'application/json'
.expectHeaderContains 'Content-Type', 'charset=UTF-8'
.expectHeaderContains 'Access-Control-Allow-Origin', '*'
.expectJSON '',
name: 'PI:NAME:<NAME>END_PI'
.expectJSON 'distributions.*',
place: '横浜'
.expectJSONTypes 'distributions.*',
name_id: String
.toss()
.toss()
# frisby
# .create 'POST distributions'
# .post APIurl('distributions'), {ja: 'カワガラス', place: '川'}, {json:true}
# .post APIurl('distributions'), {ja: 'カワガラス', place: '渓谷'}, {json:true}
# .post APIurl('distributions'), {ja: 'ウミガラス', place: '海'}, {json:true}
# .after (err, res, body) ->
# frisby
# .create 'GET AFTER POST'
# .get APIurl 'distributions'
# .expectStatus 200
# .expectHeaderContains 'Content-Type', 'application/json'
# .expectHeaderContains 'Content-Type', 'charset=UTF-8'
# .expectHeaderContains 'Access-Control-Allow-Origin', '*'
# .expectJSON '',
# name: 'PI:NAME:<NAME>END_PI'
# .expectJSON 'distributions.*',
# place: '横浜'
# .expectJSONTypes 'distributions.*',
# name_id: String
# .toss()
# .toss()
|
[
{
"context": "up'\n\nmeryl.h 'GET /', (req, resp) ->\n people = ['bob', 'alice', 'meryl']\n resp.render 'layout', conte",
"end": 115,
"score": 0.9202876687049866,
"start": 112,
"tag": "USERNAME",
"value": "bob"
},
{
"context": "ryl.h 'GET /', (req, resp) ->\n people = ['bob', 'ali... | node_modules/ethercalc/node_modules/zappajs/node_modules/coffeecup/examples/meryl/app.coffee | kurakuradave/Etherboard | 21 | meryl = require 'meryl'
coffeecup = require '../../src/coffeecup'
meryl.h 'GET /', (req, resp) ->
people = ['bob', 'alice', 'meryl']
resp.render 'layout', content: 'index', context: {people: people}
meryl.run
templateDir: 'templates'
templateExt: '.coffee'
templateFunc: coffeecup.adapters.meryl
console.log 'Listening on 3000...'
| 117059 | meryl = require 'meryl'
coffeecup = require '../../src/coffeecup'
meryl.h 'GET /', (req, resp) ->
people = ['bob', '<NAME>', '<NAME>']
resp.render 'layout', content: 'index', context: {people: people}
meryl.run
templateDir: 'templates'
templateExt: '.coffee'
templateFunc: coffeecup.adapters.meryl
console.log 'Listening on 3000...'
| true | meryl = require 'meryl'
coffeecup = require '../../src/coffeecup'
meryl.h 'GET /', (req, resp) ->
people = ['bob', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
resp.render 'layout', content: 'index', context: {people: people}
meryl.run
templateDir: 'templates'
templateExt: '.coffee'
templateFunc: coffeecup.adapters.meryl
console.log 'Listening on 3000...'
|
[
{
"context": "ess task\n @TaskModel.create\n name: @name\n group: group\n progress_at:",
"end": 1790,
"score": 0.7852158546447754,
"start": 1790,
"tag": "NAME",
"value": ""
},
{
"context": "s task\n @TaskModel.create\n name: @name\n ... | task.coffee | jysperm/pomo-mailer | 1 | {EventEmitter} = require 'events'
Mabolo = require 'mabolo'
moment = require 'moment-timezone'
_ = require 'lodash'
Q = require 'q'
module.exports = class Task extends EventEmitter
defaults:
name: null
worker: ->
groupBy: ->
mongodb: 'mongodb://localhost/pomo-mailer'
timeout: 600 * 1000
nextGroup: -> 3600 * 1000
logger: console
###
Event: `error`
* `err` {Error}
* `context` (optional) {Object}
###
###
Public: Define a task.
* `task` {Object}
* `name` {String}
* `mongodb` {String} Uri of MongoDB.
* `groupBy` {Function} `-> String`.
* `worker` {Function} `(task) -> Promise`
* `timeout` (optional) {Number} Default `600 * 1000`.
* `nextGroup` (optional) {Function} `-> Number|Date|Moment`, default `-> 3600 * 1000`.
* `logger` (optional) {Object} Default `console`
###
constructor: (options) ->
_.extend @, _.defaults options, @defualts
@mabolo = new Mabolo @mongodb
@TaskModel = @mabolo.model 'Task',
name:
type: String
required: true
group:
type: String
required: true
progress: Object
progress_at: Date
finished_at: Date
@TaskModel.ensureIndex
name: 1
group: 1
,
unique: true
dropDups: true
.done =>
@triggerTask()
, (err) =>
@emit 'error', _.extend err,
context:
when: 'createTask'
@on 'error', (err) =>
@logger?.log err, err.context
###
Public: Stop task.
###
stop: ->
clearTimeout @timeoutId
@stopped = true
triggerTask: ->
group = @groupBy()
@TaskModel.findOne
name: @name
group: group
.done (task) =>
unless task
@TaskModel.create
name: @name
group: group
progress_at: new Date()
.done (task) =>
@runTask task
, (err) =>
if err.message.match /duplicate/
setImmediate @triggerTask
else
@emit 'error', _.extend err,
context:
when: 'createTask'
, (err) =>
@emit 'error', err,
when: 'findTask'
@resumeTasks()
@waitNextGroup()
runTask: (task) ->
if @stopped
return
Q(@worker task).progress (progress) ->
task.update
$set:
progress: progress
progress_at: new Date()
.catch (err) =>
@emit 'error', _.extend err,
context:
when: 'updateProgress'
task: task
progress: progress
.done =>
task.update
$set:
finished_at: new Date()
.catch (err) =>
@emit 'error', err,
context:
when: 'finishTask'
task: task
, (err) =>
@emit 'error', err,
context:
when: 'runTask'
task: task
resumeTasks: ->
@TaskModel.findOneAndUpdate
name: @name
finished_at:
$exists: false
progress_at:
$lt: new Date Date.now() - @timeout
,
$set:
progress_at: new Date()
.done (task) =>
if task
@runTask task
@resumeTasks()
, (err) =>
@emit 'error', err,
context:
when: 'resumeTasks'
waitNextGroup: ->
next = @nextGroup()
if moment.isMoment next
next = next.toDate()
if _.isDate next
interval = next.getTime() - Date.now()
else
interval = next
@timeoutId = setTimeout =>
@triggerTask()
, interval
| 24272 | {EventEmitter} = require 'events'
Mabolo = require 'mabolo'
moment = require 'moment-timezone'
_ = require 'lodash'
Q = require 'q'
module.exports = class Task extends EventEmitter
defaults:
name: null
worker: ->
groupBy: ->
mongodb: 'mongodb://localhost/pomo-mailer'
timeout: 600 * 1000
nextGroup: -> 3600 * 1000
logger: console
###
Event: `error`
* `err` {Error}
* `context` (optional) {Object}
###
###
Public: Define a task.
* `task` {Object}
* `name` {String}
* `mongodb` {String} Uri of MongoDB.
* `groupBy` {Function} `-> String`.
* `worker` {Function} `(task) -> Promise`
* `timeout` (optional) {Number} Default `600 * 1000`.
* `nextGroup` (optional) {Function} `-> Number|Date|Moment`, default `-> 3600 * 1000`.
* `logger` (optional) {Object} Default `console`
###
constructor: (options) ->
_.extend @, _.defaults options, @defualts
@mabolo = new Mabolo @mongodb
@TaskModel = @mabolo.model 'Task',
name:
type: String
required: true
group:
type: String
required: true
progress: Object
progress_at: Date
finished_at: Date
@TaskModel.ensureIndex
name: 1
group: 1
,
unique: true
dropDups: true
.done =>
@triggerTask()
, (err) =>
@emit 'error', _.extend err,
context:
when: 'createTask'
@on 'error', (err) =>
@logger?.log err, err.context
###
Public: Stop task.
###
stop: ->
clearTimeout @timeoutId
@stopped = true
triggerTask: ->
group = @groupBy()
@TaskModel.findOne
name: @name
group: group
.done (task) =>
unless task
@TaskModel.create
name:<NAME> @name
group: group
progress_at: new Date()
.done (task) =>
@runTask task
, (err) =>
if err.message.match /duplicate/
setImmediate @triggerTask
else
@emit 'error', _.extend err,
context:
when: 'createTask'
, (err) =>
@emit 'error', err,
when: 'findTask'
@resumeTasks()
@waitNextGroup()
runTask: (task) ->
if @stopped
return
Q(@worker task).progress (progress) ->
task.update
$set:
progress: progress
progress_at: new Date()
.catch (err) =>
@emit 'error', _.extend err,
context:
when: 'updateProgress'
task: task
progress: progress
.done =>
task.update
$set:
finished_at: new Date()
.catch (err) =>
@emit 'error', err,
context:
when: 'finishTask'
task: task
, (err) =>
@emit 'error', err,
context:
when: 'runTask'
task: task
resumeTasks: ->
@TaskModel.findOneAndUpdate
name:<NAME> @name
finished_at:
$exists: false
progress_at:
$lt: new Date Date.now() - @timeout
,
$set:
progress_at: new Date()
.done (task) =>
if task
@runTask task
@resumeTasks()
, (err) =>
@emit 'error', err,
context:
when: 'resumeTasks'
waitNextGroup: ->
next = @nextGroup()
if moment.isMoment next
next = next.toDate()
if _.isDate next
interval = next.getTime() - Date.now()
else
interval = next
@timeoutId = setTimeout =>
@triggerTask()
, interval
| true | {EventEmitter} = require 'events'
Mabolo = require 'mabolo'
moment = require 'moment-timezone'
_ = require 'lodash'
Q = require 'q'
module.exports = class Task extends EventEmitter
defaults:
name: null
worker: ->
groupBy: ->
mongodb: 'mongodb://localhost/pomo-mailer'
timeout: 600 * 1000
nextGroup: -> 3600 * 1000
logger: console
###
Event: `error`
* `err` {Error}
* `context` (optional) {Object}
###
###
Public: Define a task.
* `task` {Object}
* `name` {String}
* `mongodb` {String} Uri of MongoDB.
* `groupBy` {Function} `-> String`.
* `worker` {Function} `(task) -> Promise`
* `timeout` (optional) {Number} Default `600 * 1000`.
* `nextGroup` (optional) {Function} `-> Number|Date|Moment`, default `-> 3600 * 1000`.
* `logger` (optional) {Object} Default `console`
###
constructor: (options) ->
_.extend @, _.defaults options, @defualts
@mabolo = new Mabolo @mongodb
@TaskModel = @mabolo.model 'Task',
name:
type: String
required: true
group:
type: String
required: true
progress: Object
progress_at: Date
finished_at: Date
@TaskModel.ensureIndex
name: 1
group: 1
,
unique: true
dropDups: true
.done =>
@triggerTask()
, (err) =>
@emit 'error', _.extend err,
context:
when: 'createTask'
@on 'error', (err) =>
@logger?.log err, err.context
###
Public: Stop task.
###
stop: ->
clearTimeout @timeoutId
@stopped = true
triggerTask: ->
group = @groupBy()
@TaskModel.findOne
name: @name
group: group
.done (task) =>
unless task
@TaskModel.create
name:PI:NAME:<NAME>END_PI @name
group: group
progress_at: new Date()
.done (task) =>
@runTask task
, (err) =>
if err.message.match /duplicate/
setImmediate @triggerTask
else
@emit 'error', _.extend err,
context:
when: 'createTask'
, (err) =>
@emit 'error', err,
when: 'findTask'
@resumeTasks()
@waitNextGroup()
runTask: (task) ->
if @stopped
return
Q(@worker task).progress (progress) ->
task.update
$set:
progress: progress
progress_at: new Date()
.catch (err) =>
@emit 'error', _.extend err,
context:
when: 'updateProgress'
task: task
progress: progress
.done =>
task.update
$set:
finished_at: new Date()
.catch (err) =>
@emit 'error', err,
context:
when: 'finishTask'
task: task
, (err) =>
@emit 'error', err,
context:
when: 'runTask'
task: task
resumeTasks: ->
@TaskModel.findOneAndUpdate
name:PI:NAME:<NAME>END_PI @name
finished_at:
$exists: false
progress_at:
$lt: new Date Date.now() - @timeout
,
$set:
progress_at: new Date()
.done (task) =>
if task
@runTask task
@resumeTasks()
, (err) =>
@emit 'error', err,
context:
when: 'resumeTasks'
waitNextGroup: ->
next = @nextGroup()
if moment.isMoment next
next = next.toDate()
if _.isDate next
interval = next.getTime() - Date.now()
else
interval = next
@timeoutId = setTimeout =>
@triggerTask()
, interval
|
[
{
"context": "# Copyright 2010-2019 Dan Elliott, Russell Valentine\n#\n# Licensed under the Apach",
"end": 35,
"score": 0.9998342394828796,
"start": 24,
"tag": "NAME",
"value": "Dan Elliott"
},
{
"context": "# Copyright 2010-2019 Dan Elliott, Russell Valentine\n#\n# Licensed ... | clients/www/src/coffee/handler/YourAccountSettings.coffee | bluthen/isadore_server | 0 | # Copyright 2010-2019 Dan Elliott, Russell Valentine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class window.YourAccountSettings
@_TEMPLATE: """
<div class="your_account_settings">
<ul>
<li><a href="#your_account_settings_details">Details</a></li>
<li><a href="#your_account_settings_alarms">Alarms</a></li>
</ul>
<div id="your_account_settings_details">
<h1>Change Password</h1>
<div><ul id="your_account_settings_chpw_msgs"></ul></div>
<div class="label">
<label for="your_account_settings_old_pw">Current Password</label>
</div>
<div>
<input type="password" id="your_account_settings_old_pw"/>
</div>
<div class="label">
<label for="your_account_settings_new_pw">New Password</label>
</div>
<div>
<input type="password" id="your_account_settings_new_pw"/>
</div>
<div class="label">
<label for="your_account_settings_new_pw2">Re-typed New Password</label>
</div>
<div>
<input type="password" id="your_account_settings_new_pw2"/>
</div>
<div class="label">
<button id="your_account_settings_save_pw" type="button" value="Save">Change Password</button><span class="pwsave_status"></span>
</div>
<h1>Contact Settings</h1>
<div class="label">Note: This setting does not effect alarms.</div>
<div>
<input type="checkbox" id="your_account_contact_preference_news"/><label for="your_account_contact_preference_news" class="label"> Contact me about Isadore news, status, and updates.</label>
</div>
<div class="label">
<button id="your_account_settings_save_contact" type="button" value="Save">Save</button><span class="contact_save_status"></span>
</div>
</div>
<div id="your_account_settings_alarms">
<input id="your_account_settings_alarms_disable_all" type="checkbox"/><label for="your_account_settings_alarms_disable_all">Disable all notifications</label>
<span style="font-weight: bold">Alarm:</span><select id="your_account_settings_alarms_select"></select>
<div id="your_account_settings_alarms_levels"></div>
<button id="your_account_settings_alarms_save" type="button" value="Save">Save</button><span class="ya_alarms_save_status"></span>
</div>
</div>
"""
constructor: ($parentWrap) ->
#TODO: Somewhere need to contact alarms system to get blacklist like things
$('.info_header > .account').html('<img src="imgs/icon_profile.png" style="position: relative; top: 6px;"/>'+IsadoreData.selfAccount.name).click(() =>
@show()
)
@_$selfdiv = $(window.YourAccountSettings._TEMPLATE)
$parentWrap.append(@_$selfdiv)
@_workingAlarmSettings = {}
@_$selfdiv.tabs({activate: () =>
if IsadoreData.selfAccount.configs.alarm_settings?
@_workingAlarmSettings = _.deepCopy(IsadoreData.selfAccount.configs.alarm_settings)
else
@_workingAlarmsSettings = {"alarm_setup": {}}
@_alarmSelect()
})
@_$selfdiv.tabs("option", "disabled", [1])
@_dialog = new IsadoreDialog(@_$selfdiv, {width: 700, title: 'Account Settings'});
@_$pwtab = $('#your_account_settings_details', @_$selfdiv)
@_$almtab = $('#your_account_settings_alarms', @_$selfdiv)
$('#your_account_settings_save_pw', @_$pwtab).click(() =>
@saveNewPassword()
)
$('#your_account_settings_old_pw, #your_account_settings_new_pw, #your_account_settings_new_pw2', @_$pwtab).on('input keypress focus blur change', () =>
@_validatePasswords()
)
$('#your_account_settings_save_contact', @_$pwtab).click(() =>
@_saveContactPrefs()
)
$("#your_account_settings_alarms_disable_all", @_$pwtab).change(() =>
@_updateWorkingAlarmSettings()
)
$select = $('#your_account_settings_alarms_select', @_$almtab)
if IsadoreData.general.configs.alarms?
for alarm_name, alarm_info of IsadoreData.general.configs.alarms
$select.append('<option value="'+alarm_name+'">'+alarm_name+'</option>')
$select.change(() =>
@_alarmSelect()
)
_alarmSelect: () ->
levels = ['Info', 'Warn', 'Concern', 'Urgent']
level_options= '<select multiple><option value="email">Email</option><option value="sms">SMS</option><option value="voice">Voice</option></select>'
$select = $('#your_account_settings_alarms_select', @_$almtab)
$alarmLevelDiv = $('#your_account_settings_alarms_levels', @_$almtab).empty()
alarmName = $select.val()
alarm = IsadoreData.general.configs.alarms[alarmName]
$alarmLevelDiv.append("<h1>#{alarmName}</h1>")
for level in levels
lclevel = level.toLowerCase()
if alarm.condition?.hasOwnProperty(lclevel)
$leveldiv = $('<div style="margin-left: 2ex;"></div>')
label_for = "your_account_settings_alarms_#{lclevel}"
$leveldiv.append('<div style="display:inline-block; vertical-align: top; margin-right: 2ex;">'+level+': <input id="#{label_for}_enabled" data-level="#{lclabel}" type="checkbox"/><label for="#{label_for}_enabled">Enabled</label></div>')
$select_level = $(level_options)
$select_level.attr('id', label_for+"_"+lclevel)
$select_level.attr('data-level', lclevel)
$leveldiv.append($select_level)
if alarm.level_descriptions?.hasOwnProperty(lclevel)
$leveldiv.append('<div style="display:inline-block; vertical-align: top; margin-left: 2ex;">'+alarm.level_descriptions[lclevel]+'</div>')
$alarmLevelDiv.append($leveldiv)
$select_level.change(() =>
@_updateWorkingAlarmSettings()
)
$('input', $leveldiv).change( () =>
@_updateWorkingAlarmSettings()
)
if @_workingAlarmSettings.alarm_setup?.hasOwnProperty(alarmName)
as = @_workingAlarmSettings.alarm_setup[alarmName]
for level, values of as
label_for = "your_account_settings_alarms_#{lclevel}"
$select_level = $('#'+label_for+"_"+level, $alarmLevelDiv)
if values.enabled
$('#'+label_for+'_enabled', $alarmLevelDiv).prop('checked', true)
selected = []
if values.email
selected.push('email')
if values.sms
selected.push('sms')
if values.voice
selected.push('voice')
$select_level.val(values)
_updateWorkingAlarmSettings: () ->
alarmName = $('#your_account_settings_alarms_select', @_$almtab).val()
if not @_workingAlarmSettings.alarm_setup.hasOwnProperty(alarmName)
@_workingAlarmSettings.alarm_setup[alarmName] = {}
$('input[type="checkbox"]', $alarmLevelDiv).each((index, value) =>
$value = $(value)
level = $value.attr('data-level')
if not @_workingAlarmSettings.alarm_setup[alarmName].hasOwnProperty(level)
@_workingAlarmSettings.alarm_setup[alarmName][level] = {}
level_enabled = $value.is(':checked')
@_workingAlarmSettings.alarm_setup[alarmName][level].enabled = level_enabled
)
$('select', $alarmLevelDiv).each((index, value) =>
$value = $(value)
level = $value.attr('data-level')
selects = $value.val()
@_workingAlarmSettings.alarm_setup[alarmName][level].sms = false
@_workingAlarmSettings.alarm_setup[alarmName][level].email = false
@_workingAlarmSettings.alarm_setup[alarmName][level].voice = false
for s in selects
@_workingAlarmSettings.alarm_setup[alarmName][level][s]=true
)
_saveAlarmSettings: () ->
i = 1
#TODO: save account config with new alarm_setup
#TODO: Update IsadoreData.selfAccount
_validatePasswords: (hard=false) ->
console.log('_validatePasswords')
#TODO: Add zxcvbn password strength meter
#https://github.com/dropbox/zxcvbn
oldPW = $('#your_account_settings_old_pw', @_$pwtab).val()
newPW = $('#your_account_settings_new_pw', @_$pwtab).val()
newPW2 = $('#your_account_settings_new_pw2', @_$pwtab).val()
errors = []
if (!oldPW)
errors.push('Please enter your old password.')
console.log(newPW)
console.log(newPW2)
$ul = $('ul#your_account_settings_chpw_msgs', @_$pwtab)
if (!newPW || !newPW2 || newPW != newPW2)
errors.push('New password and Re-typed Password must match.')
if errors.length == 0
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', false)
$ul.empty()
else
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
msg = ''
for err in errors
msg += '<li>'+err+'</li>'
$ul.html(msg)
_clearPasswords: () ->
$('ul#your_account_settings_chpw_msgs', @_$pwtab).empty()
$('input[type="password"]', @_$pwtab).empty()
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
$('.contact_save_status', @_$pwtab).empty()
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
$('#your_account_contact_preference_news', @_$pwtab).prop('checked', IsadoreData.selfAccount.contact_news)
saveNewPassword: () ->
oldPW = $('#your_account_settings_old_pw', @_$pwtab).val()
newPW = $('#your_account_settings_new_pw', @_$pwtab).val()
#send current password and new password to change password server side
$('.pwsave_status', @_$pwtab).text('Saving...').fadeIn(0)
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
$.ajax({
url: "../resources/accounts/password"
type: "PUT"
dataType: 'text'
data: {old_password: oldPW, new_password: newPW}
success: (d) =>
$('.pwsave_status', @_$pwtab).text('Password Changed!').fadeIn(0).fadeOut(3000)
@_clearPasswords()
error: (jqXHR, textStatus, errorThrown) =>
$('.pwsave_status', @_$pwtab).text('Failed').fadeIn(0).fadeOut(3000)
$ul = $('ul#your_account_settings_chpw_msgs', @_$pwtab)
$ul.html('<li>Please enter the correct current password.</li>')
})
_saveContactPrefs: () ->
$('.contact_save_status', @_$pwtab).empty()
contact_prefs = $('#your_account_contact_preference_news', @_$pwtab).is(':checked')
#send current password and new password to change password server side
$('.contact_save_status', @_$pwtab).text('Saving...').fadeIn(0)
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', true)
$.ajax({
url: "../resources/accounts/"+IsadoreData.selfAccount.id
type: "PUT"
dataType: 'text'
data: {contact_news: contact_prefs}
success: (d) =>
$('.contact_save_status', @_$pwtab).text('Saved!').fadeIn(0).fadeOut(3000)
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
error: (jqXHR, textStatus, errorThrown) =>
$('.contact_save_status', @_$pwtab).text('Failed')
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
})
show: () ->
if !@_dialog.isOpen()
#Clear password fields.
@_clearPasswords()
@_dialog.open()
| 91664 | # Copyright 2010-2019 <NAME>, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class window.YourAccountSettings
@_TEMPLATE: """
<div class="your_account_settings">
<ul>
<li><a href="#your_account_settings_details">Details</a></li>
<li><a href="#your_account_settings_alarms">Alarms</a></li>
</ul>
<div id="your_account_settings_details">
<h1>Change Password</h1>
<div><ul id="your_account_settings_chpw_msgs"></ul></div>
<div class="label">
<label for="your_account_settings_old_pw">Current Password</label>
</div>
<div>
<input type="password" id="your_account_settings_old_pw"/>
</div>
<div class="label">
<label for="your_account_settings_new_pw">New Password</label>
</div>
<div>
<input type="password" id="your_account_settings_new_pw"/>
</div>
<div class="label">
<label for="your_account_settings_new_pw2">Re-typed New Password</label>
</div>
<div>
<input type="password" id="your_account_settings_new_pw2"/>
</div>
<div class="label">
<button id="your_account_settings_save_pw" type="button" value="Save">Change Password</button><span class="pwsave_status"></span>
</div>
<h1>Contact Settings</h1>
<div class="label">Note: This setting does not effect alarms.</div>
<div>
<input type="checkbox" id="your_account_contact_preference_news"/><label for="your_account_contact_preference_news" class="label"> Contact me about Isadore news, status, and updates.</label>
</div>
<div class="label">
<button id="your_account_settings_save_contact" type="button" value="Save">Save</button><span class="contact_save_status"></span>
</div>
</div>
<div id="your_account_settings_alarms">
<input id="your_account_settings_alarms_disable_all" type="checkbox"/><label for="your_account_settings_alarms_disable_all">Disable all notifications</label>
<span style="font-weight: bold">Alarm:</span><select id="your_account_settings_alarms_select"></select>
<div id="your_account_settings_alarms_levels"></div>
<button id="your_account_settings_alarms_save" type="button" value="Save">Save</button><span class="ya_alarms_save_status"></span>
</div>
</div>
"""
constructor: ($parentWrap) ->
#TODO: Somewhere need to contact alarms system to get blacklist like things
$('.info_header > .account').html('<img src="imgs/icon_profile.png" style="position: relative; top: 6px;"/>'+IsadoreData.selfAccount.name).click(() =>
@show()
)
@_$selfdiv = $(window.YourAccountSettings._TEMPLATE)
$parentWrap.append(@_$selfdiv)
@_workingAlarmSettings = {}
@_$selfdiv.tabs({activate: () =>
if IsadoreData.selfAccount.configs.alarm_settings?
@_workingAlarmSettings = _.deepCopy(IsadoreData.selfAccount.configs.alarm_settings)
else
@_workingAlarmsSettings = {"alarm_setup": {}}
@_alarmSelect()
})
@_$selfdiv.tabs("option", "disabled", [1])
@_dialog = new IsadoreDialog(@_$selfdiv, {width: 700, title: 'Account Settings'});
@_$pwtab = $('#your_account_settings_details', @_$selfdiv)
@_$almtab = $('#your_account_settings_alarms', @_$selfdiv)
$('#your_account_settings_save_pw', @_$pwtab).click(() =>
@saveNewPassword()
)
$('#your_account_settings_old_pw, #your_account_settings_new_pw, #your_account_settings_new_pw2', @_$pwtab).on('input keypress focus blur change', () =>
@_validatePasswords()
)
$('#your_account_settings_save_contact', @_$pwtab).click(() =>
@_saveContactPrefs()
)
$("#your_account_settings_alarms_disable_all", @_$pwtab).change(() =>
@_updateWorkingAlarmSettings()
)
$select = $('#your_account_settings_alarms_select', @_$almtab)
if IsadoreData.general.configs.alarms?
for alarm_name, alarm_info of IsadoreData.general.configs.alarms
$select.append('<option value="'+alarm_name+'">'+alarm_name+'</option>')
$select.change(() =>
@_alarmSelect()
)
_alarmSelect: () ->
levels = ['Info', 'Warn', 'Concern', 'Urgent']
level_options= '<select multiple><option value="email">Email</option><option value="sms">SMS</option><option value="voice">Voice</option></select>'
$select = $('#your_account_settings_alarms_select', @_$almtab)
$alarmLevelDiv = $('#your_account_settings_alarms_levels', @_$almtab).empty()
alarmName = $select.val()
alarm = IsadoreData.general.configs.alarms[alarmName]
$alarmLevelDiv.append("<h1>#{alarmName}</h1>")
for level in levels
lclevel = level.toLowerCase()
if alarm.condition?.hasOwnProperty(lclevel)
$leveldiv = $('<div style="margin-left: 2ex;"></div>')
label_for = "your_account_settings_alarms_#{lclevel}"
$leveldiv.append('<div style="display:inline-block; vertical-align: top; margin-right: 2ex;">'+level+': <input id="#{label_for}_enabled" data-level="#{lclabel}" type="checkbox"/><label for="#{label_for}_enabled">Enabled</label></div>')
$select_level = $(level_options)
$select_level.attr('id', label_for+"_"+lclevel)
$select_level.attr('data-level', lclevel)
$leveldiv.append($select_level)
if alarm.level_descriptions?.hasOwnProperty(lclevel)
$leveldiv.append('<div style="display:inline-block; vertical-align: top; margin-left: 2ex;">'+alarm.level_descriptions[lclevel]+'</div>')
$alarmLevelDiv.append($leveldiv)
$select_level.change(() =>
@_updateWorkingAlarmSettings()
)
$('input', $leveldiv).change( () =>
@_updateWorkingAlarmSettings()
)
if @_workingAlarmSettings.alarm_setup?.hasOwnProperty(alarmName)
as = @_workingAlarmSettings.alarm_setup[alarmName]
for level, values of as
label_for = "your_account_settings_alarms_#{lclevel}"
$select_level = $('#'+label_for+"_"+level, $alarmLevelDiv)
if values.enabled
$('#'+label_for+'_enabled', $alarmLevelDiv).prop('checked', true)
selected = []
if values.email
selected.push('email')
if values.sms
selected.push('sms')
if values.voice
selected.push('voice')
$select_level.val(values)
_updateWorkingAlarmSettings: () ->
alarmName = $('#your_account_settings_alarms_select', @_$almtab).val()
if not @_workingAlarmSettings.alarm_setup.hasOwnProperty(alarmName)
@_workingAlarmSettings.alarm_setup[alarmName] = {}
$('input[type="checkbox"]', $alarmLevelDiv).each((index, value) =>
$value = $(value)
level = $value.attr('data-level')
if not @_workingAlarmSettings.alarm_setup[alarmName].hasOwnProperty(level)
@_workingAlarmSettings.alarm_setup[alarmName][level] = {}
level_enabled = $value.is(':checked')
@_workingAlarmSettings.alarm_setup[alarmName][level].enabled = level_enabled
)
$('select', $alarmLevelDiv).each((index, value) =>
$value = $(value)
level = $value.attr('data-level')
selects = $value.val()
@_workingAlarmSettings.alarm_setup[alarmName][level].sms = false
@_workingAlarmSettings.alarm_setup[alarmName][level].email = false
@_workingAlarmSettings.alarm_setup[alarmName][level].voice = false
for s in selects
@_workingAlarmSettings.alarm_setup[alarmName][level][s]=true
)
_saveAlarmSettings: () ->
i = 1
#TODO: save account config with new alarm_setup
#TODO: Update IsadoreData.selfAccount
_validatePasswords: (hard=false) ->
console.log('_validatePasswords')
#TODO: Add zxcvbn password strength meter
#https://github.com/dropbox/zxcvbn
oldPW = $('#your_account_settings_old_pw', @_$pwtab).val()
newPW = $('#your_account_settings_new_pw', @_$pwtab).val()
newPW2 = $('#your_account_settings_new_pw2', @_$pwtab).val()
errors = []
if (!oldPW)
errors.push('Please enter your old password.')
console.log(newPW)
console.log(newPW2)
$ul = $('ul#your_account_settings_chpw_msgs', @_$pwtab)
if (!newPW || !newPW2 || newPW != newPW2)
errors.push('New password and Re-typed Password must match.')
if errors.length == 0
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', false)
$ul.empty()
else
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
msg = ''
for err in errors
msg += '<li>'+err+'</li>'
$ul.html(msg)
_clearPasswords: () ->
$('ul#your_account_settings_chpw_msgs', @_$pwtab).empty()
$('input[type="password"]', @_$pwtab).empty()
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
$('.contact_save_status', @_$pwtab).empty()
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
$('#your_account_contact_preference_news', @_$pwtab).prop('checked', IsadoreData.selfAccount.contact_news)
saveNewPassword: () ->
oldPW = $('#your_account_settings_old_pw', @_$pwtab).val()
newPW = $('#your_account_settings_new_pw', @_$pwtab).val()
#send current password and new password to change password server side
$('.pwsave_status', @_$pwtab).text('Saving...').fadeIn(0)
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
$.ajax({
url: "../resources/accounts/password"
type: "PUT"
dataType: 'text'
data: {old_password: <PASSWORD>, new_password: <PASSWORD>}
success: (d) =>
$('.pwsave_status', @_$pwtab).text('Password Changed!').fadeIn(0).fadeOut(3000)
@_clearPasswords()
error: (jqXHR, textStatus, errorThrown) =>
$('.pwsave_status', @_$pwtab).text('Failed').fadeIn(0).fadeOut(3000)
$ul = $('ul#your_account_settings_chpw_msgs', @_$pwtab)
$ul.html('<li>Please enter the correct current password.</li>')
})
_saveContactPrefs: () ->
$('.contact_save_status', @_$pwtab).empty()
contact_prefs = $('#your_account_contact_preference_news', @_$pwtab).is(':checked')
#send current password and new password to change password server side
$('.contact_save_status', @_$pwtab).text('Saving...').fadeIn(0)
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', true)
$.ajax({
url: "../resources/accounts/"+IsadoreData.selfAccount.id
type: "PUT"
dataType: 'text'
data: {contact_news: contact_prefs}
success: (d) =>
$('.contact_save_status', @_$pwtab).text('Saved!').fadeIn(0).fadeOut(3000)
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
error: (jqXHR, textStatus, errorThrown) =>
$('.contact_save_status', @_$pwtab).text('Failed')
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
})
show: () ->
if !@_dialog.isOpen()
#Clear password fields.
@_clearPasswords()
@_dialog.open()
| true | # Copyright 2010-2019 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class window.YourAccountSettings
@_TEMPLATE: """
<div class="your_account_settings">
<ul>
<li><a href="#your_account_settings_details">Details</a></li>
<li><a href="#your_account_settings_alarms">Alarms</a></li>
</ul>
<div id="your_account_settings_details">
<h1>Change Password</h1>
<div><ul id="your_account_settings_chpw_msgs"></ul></div>
<div class="label">
<label for="your_account_settings_old_pw">Current Password</label>
</div>
<div>
<input type="password" id="your_account_settings_old_pw"/>
</div>
<div class="label">
<label for="your_account_settings_new_pw">New Password</label>
</div>
<div>
<input type="password" id="your_account_settings_new_pw"/>
</div>
<div class="label">
<label for="your_account_settings_new_pw2">Re-typed New Password</label>
</div>
<div>
<input type="password" id="your_account_settings_new_pw2"/>
</div>
<div class="label">
<button id="your_account_settings_save_pw" type="button" value="Save">Change Password</button><span class="pwsave_status"></span>
</div>
<h1>Contact Settings</h1>
<div class="label">Note: This setting does not effect alarms.</div>
<div>
<input type="checkbox" id="your_account_contact_preference_news"/><label for="your_account_contact_preference_news" class="label"> Contact me about Isadore news, status, and updates.</label>
</div>
<div class="label">
<button id="your_account_settings_save_contact" type="button" value="Save">Save</button><span class="contact_save_status"></span>
</div>
</div>
<div id="your_account_settings_alarms">
<input id="your_account_settings_alarms_disable_all" type="checkbox"/><label for="your_account_settings_alarms_disable_all">Disable all notifications</label>
<span style="font-weight: bold">Alarm:</span><select id="your_account_settings_alarms_select"></select>
<div id="your_account_settings_alarms_levels"></div>
<button id="your_account_settings_alarms_save" type="button" value="Save">Save</button><span class="ya_alarms_save_status"></span>
</div>
</div>
"""
constructor: ($parentWrap) ->
#TODO: Somewhere need to contact alarms system to get blacklist like things
$('.info_header > .account').html('<img src="imgs/icon_profile.png" style="position: relative; top: 6px;"/>'+IsadoreData.selfAccount.name).click(() =>
@show()
)
@_$selfdiv = $(window.YourAccountSettings._TEMPLATE)
$parentWrap.append(@_$selfdiv)
@_workingAlarmSettings = {}
@_$selfdiv.tabs({activate: () =>
if IsadoreData.selfAccount.configs.alarm_settings?
@_workingAlarmSettings = _.deepCopy(IsadoreData.selfAccount.configs.alarm_settings)
else
@_workingAlarmsSettings = {"alarm_setup": {}}
@_alarmSelect()
})
@_$selfdiv.tabs("option", "disabled", [1])
@_dialog = new IsadoreDialog(@_$selfdiv, {width: 700, title: 'Account Settings'});
@_$pwtab = $('#your_account_settings_details', @_$selfdiv)
@_$almtab = $('#your_account_settings_alarms', @_$selfdiv)
$('#your_account_settings_save_pw', @_$pwtab).click(() =>
@saveNewPassword()
)
$('#your_account_settings_old_pw, #your_account_settings_new_pw, #your_account_settings_new_pw2', @_$pwtab).on('input keypress focus blur change', () =>
@_validatePasswords()
)
$('#your_account_settings_save_contact', @_$pwtab).click(() =>
@_saveContactPrefs()
)
$("#your_account_settings_alarms_disable_all", @_$pwtab).change(() =>
@_updateWorkingAlarmSettings()
)
$select = $('#your_account_settings_alarms_select', @_$almtab)
if IsadoreData.general.configs.alarms?
for alarm_name, alarm_info of IsadoreData.general.configs.alarms
$select.append('<option value="'+alarm_name+'">'+alarm_name+'</option>')
$select.change(() =>
@_alarmSelect()
)
_alarmSelect: () ->
levels = ['Info', 'Warn', 'Concern', 'Urgent']
level_options= '<select multiple><option value="email">Email</option><option value="sms">SMS</option><option value="voice">Voice</option></select>'
$select = $('#your_account_settings_alarms_select', @_$almtab)
$alarmLevelDiv = $('#your_account_settings_alarms_levels', @_$almtab).empty()
alarmName = $select.val()
alarm = IsadoreData.general.configs.alarms[alarmName]
$alarmLevelDiv.append("<h1>#{alarmName}</h1>")
for level in levels
lclevel = level.toLowerCase()
if alarm.condition?.hasOwnProperty(lclevel)
$leveldiv = $('<div style="margin-left: 2ex;"></div>')
label_for = "your_account_settings_alarms_#{lclevel}"
$leveldiv.append('<div style="display:inline-block; vertical-align: top; margin-right: 2ex;">'+level+': <input id="#{label_for}_enabled" data-level="#{lclabel}" type="checkbox"/><label for="#{label_for}_enabled">Enabled</label></div>')
$select_level = $(level_options)
$select_level.attr('id', label_for+"_"+lclevel)
$select_level.attr('data-level', lclevel)
$leveldiv.append($select_level)
if alarm.level_descriptions?.hasOwnProperty(lclevel)
$leveldiv.append('<div style="display:inline-block; vertical-align: top; margin-left: 2ex;">'+alarm.level_descriptions[lclevel]+'</div>')
$alarmLevelDiv.append($leveldiv)
$select_level.change(() =>
@_updateWorkingAlarmSettings()
)
$('input', $leveldiv).change( () =>
@_updateWorkingAlarmSettings()
)
if @_workingAlarmSettings.alarm_setup?.hasOwnProperty(alarmName)
as = @_workingAlarmSettings.alarm_setup[alarmName]
for level, values of as
label_for = "your_account_settings_alarms_#{lclevel}"
$select_level = $('#'+label_for+"_"+level, $alarmLevelDiv)
if values.enabled
$('#'+label_for+'_enabled', $alarmLevelDiv).prop('checked', true)
selected = []
if values.email
selected.push('email')
if values.sms
selected.push('sms')
if values.voice
selected.push('voice')
$select_level.val(values)
_updateWorkingAlarmSettings: () ->
alarmName = $('#your_account_settings_alarms_select', @_$almtab).val()
if not @_workingAlarmSettings.alarm_setup.hasOwnProperty(alarmName)
@_workingAlarmSettings.alarm_setup[alarmName] = {}
$('input[type="checkbox"]', $alarmLevelDiv).each((index, value) =>
$value = $(value)
level = $value.attr('data-level')
if not @_workingAlarmSettings.alarm_setup[alarmName].hasOwnProperty(level)
@_workingAlarmSettings.alarm_setup[alarmName][level] = {}
level_enabled = $value.is(':checked')
@_workingAlarmSettings.alarm_setup[alarmName][level].enabled = level_enabled
)
$('select', $alarmLevelDiv).each((index, value) =>
$value = $(value)
level = $value.attr('data-level')
selects = $value.val()
@_workingAlarmSettings.alarm_setup[alarmName][level].sms = false
@_workingAlarmSettings.alarm_setup[alarmName][level].email = false
@_workingAlarmSettings.alarm_setup[alarmName][level].voice = false
for s in selects
@_workingAlarmSettings.alarm_setup[alarmName][level][s]=true
)
_saveAlarmSettings: () ->
i = 1
#TODO: save account config with new alarm_setup
#TODO: Update IsadoreData.selfAccount
_validatePasswords: (hard=false) ->
console.log('_validatePasswords')
#TODO: Add zxcvbn password strength meter
#https://github.com/dropbox/zxcvbn
oldPW = $('#your_account_settings_old_pw', @_$pwtab).val()
newPW = $('#your_account_settings_new_pw', @_$pwtab).val()
newPW2 = $('#your_account_settings_new_pw2', @_$pwtab).val()
errors = []
if (!oldPW)
errors.push('Please enter your old password.')
console.log(newPW)
console.log(newPW2)
$ul = $('ul#your_account_settings_chpw_msgs', @_$pwtab)
if (!newPW || !newPW2 || newPW != newPW2)
errors.push('New password and Re-typed Password must match.')
if errors.length == 0
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', false)
$ul.empty()
else
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
msg = ''
for err in errors
msg += '<li>'+err+'</li>'
$ul.html(msg)
_clearPasswords: () ->
$('ul#your_account_settings_chpw_msgs', @_$pwtab).empty()
$('input[type="password"]', @_$pwtab).empty()
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
$('.contact_save_status', @_$pwtab).empty()
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
$('#your_account_contact_preference_news', @_$pwtab).prop('checked', IsadoreData.selfAccount.contact_news)
saveNewPassword: () ->
oldPW = $('#your_account_settings_old_pw', @_$pwtab).val()
newPW = $('#your_account_settings_new_pw', @_$pwtab).val()
#send current password and new password to change password server side
$('.pwsave_status', @_$pwtab).text('Saving...').fadeIn(0)
$('#your_account_settings_save_pw', @_$pwtab).prop('disabled', true)
$.ajax({
url: "../resources/accounts/password"
type: "PUT"
dataType: 'text'
data: {old_password: PI:PASSWORD:<PASSWORD>END_PI, new_password: PI:PASSWORD:<PASSWORD>END_PI}
success: (d) =>
$('.pwsave_status', @_$pwtab).text('Password Changed!').fadeIn(0).fadeOut(3000)
@_clearPasswords()
error: (jqXHR, textStatus, errorThrown) =>
$('.pwsave_status', @_$pwtab).text('Failed').fadeIn(0).fadeOut(3000)
$ul = $('ul#your_account_settings_chpw_msgs', @_$pwtab)
$ul.html('<li>Please enter the correct current password.</li>')
})
_saveContactPrefs: () ->
$('.contact_save_status', @_$pwtab).empty()
contact_prefs = $('#your_account_contact_preference_news', @_$pwtab).is(':checked')
#send current password and new password to change password server side
$('.contact_save_status', @_$pwtab).text('Saving...').fadeIn(0)
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', true)
$.ajax({
url: "../resources/accounts/"+IsadoreData.selfAccount.id
type: "PUT"
dataType: 'text'
data: {contact_news: contact_prefs}
success: (d) =>
$('.contact_save_status', @_$pwtab).text('Saved!').fadeIn(0).fadeOut(3000)
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
error: (jqXHR, textStatus, errorThrown) =>
$('.contact_save_status', @_$pwtab).text('Failed')
$('#your_account_settings_save_contact', @_$pwtab).prop('disabled', false)
})
show: () ->
if !@_dialog.isOpen()
#Clear password fields.
@_clearPasswords()
@_dialog.open()
|
[
{
"context": " the artist top track Spotify URL>\n#\n# Author:\n# Lucas Moore <hey@thelucasmoore.com>\n\nmodule.exports = (robot)",
"end": 325,
"score": 0.9998676180839539,
"start": 314,
"tag": "NAME",
"value": "Lucas Moore"
},
{
"context": "p track Spotify URL>\n#\n# Author:\n# ... | src/spotify.coffee | TheLucasMoore/hubot-spotify-tracks | 0 | # Description
# grabs an artists top tracks
#
# Configuration:
# None needed. Uses Spotify Public API and my Last.fm API key. Youre welcome.
#
# Commands:
# spotify ARTIST NAME - <returns the artists top tracks Spotify URL>
# jam to ARTIST NAME - <returns the artist top track Spotify URL>
#
# Author:
# Lucas Moore <hey@thelucasmoore.com>
module.exports = (robot) ->
robot.hear /spotify (.*)/i, (response) ->
artistName = response.match[1].toLowerCase()
if artistName is "hoobastank"
response.send "Sorry, I only search for music."
else
searchName = artistName.replace(" ", "+")
robot.http("http://ws.audioscrobbler.com/2.0/?method=artist.getinfo&artist=#{searchName}&api_key=66e74ba0c979b3e6f0613f6830fc21a1&format=json")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
bio = data.artist.bio.summary.split("<a")
response.send "#{bio[0]}"
robot.http("https://api.spotify.com/v1/search?q=#{searchName}&type=artist")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
response.send "#{data.artists.items[0].external_urls.spotify}"
robot.hear /jam to (.*)/i, (response) ->
artistName = response.match[1]
searchName = artistName.replace(" ", "+")
robot.http("https://api.spotify.com/v1/search?q=#{searchName}&type=artist")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
id = data.artists.items[0].id
robot.http("https://api.spotify.com/v1/artists/#{id}/top-tracks?country=US")
.get() (err, res, body) ->
data = JSON.parse body
url = data.tracks[0].external_urls.spotify
artist_name = artistName.charAt(0).toUpperCase() + artistName.slice(1);
track_name = data.tracks[0].name
response.send "#{artist_name}'s top hit is #{track_name}.\n#{url}" | 203287 | # Description
# grabs an artists top tracks
#
# Configuration:
# None needed. Uses Spotify Public API and my Last.fm API key. Youre welcome.
#
# Commands:
# spotify ARTIST NAME - <returns the artists top tracks Spotify URL>
# jam to ARTIST NAME - <returns the artist top track Spotify URL>
#
# Author:
# <NAME> <<EMAIL>>
module.exports = (robot) ->
robot.hear /spotify (.*)/i, (response) ->
artistName = response.match[1].toLowerCase()
if artistName is "hoobastank"
response.send "Sorry, I only search for music."
else
searchName = artistName.replace(" ", "+")
robot.http("http://ws.audioscrobbler.com/2.0/?method=artist.getinfo&artist=#{searchName}&api_key=<KEY>&format=json")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
bio = data.artist.bio.summary.split("<a")
response.send "#{bio[0]}"
robot.http("https://api.spotify.com/v1/search?q=#{searchName}&type=artist")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
response.send "#{data.artists.items[0].external_urls.spotify}"
robot.hear /jam to (.*)/i, (response) ->
artistName = response.match[1]
searchName = artistName.replace(" ", "+")
robot.http("https://api.spotify.com/v1/search?q=#{searchName}&type=artist")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
id = data.artists.items[0].id
robot.http("https://api.spotify.com/v1/artists/#{id}/top-tracks?country=US")
.get() (err, res, body) ->
data = JSON.parse body
url = data.tracks[0].external_urls.spotify
artist_name = artistName.charAt(0).toUpperCase() + artistName.slice(1);
track_name = data.tracks[0].name
response.send "#{artist_name}'s top hit is #{track_name}.\n#{url}" | true | # Description
# grabs an artists top tracks
#
# Configuration:
# None needed. Uses Spotify Public API and my Last.fm API key. Youre welcome.
#
# Commands:
# spotify ARTIST NAME - <returns the artists top tracks Spotify URL>
# jam to ARTIST NAME - <returns the artist top track Spotify URL>
#
# Author:
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
module.exports = (robot) ->
robot.hear /spotify (.*)/i, (response) ->
artistName = response.match[1].toLowerCase()
if artistName is "hoobastank"
response.send "Sorry, I only search for music."
else
searchName = artistName.replace(" ", "+")
robot.http("http://ws.audioscrobbler.com/2.0/?method=artist.getinfo&artist=#{searchName}&api_key=PI:KEY:<KEY>END_PI&format=json")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
bio = data.artist.bio.summary.split("<a")
response.send "#{bio[0]}"
robot.http("https://api.spotify.com/v1/search?q=#{searchName}&type=artist")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
response.send "#{data.artists.items[0].external_urls.spotify}"
robot.hear /jam to (.*)/i, (response) ->
artistName = response.match[1]
searchName = artistName.replace(" ", "+")
robot.http("https://api.spotify.com/v1/search?q=#{searchName}&type=artist")
.get() (err, res, body) ->
if err
response.send "Oh noes! #{err}"
return
data = JSON.parse body
id = data.artists.items[0].id
robot.http("https://api.spotify.com/v1/artists/#{id}/top-tracks?country=US")
.get() (err, res, body) ->
data = JSON.parse body
url = data.tracks[0].external_urls.spotify
artist_name = artistName.charAt(0).toUpperCase() + artistName.slice(1);
track_name = data.tracks[0].name
response.send "#{artist_name}'s top hit is #{track_name}.\n#{url}" |
[
{
"context": "cess.env.HUBOT_WHATSAPP_NICKNAME\n password: process.env.HUBOT_WHATSAPP_PASSWORD\n ccode: process.env.HUBOT_WHATSAPP_COUNTRY",
"end": 479,
"score": 0.9992150664329529,
"start": 444,
"tag": "PASSWORD",
"value": "process.env.HUBOT_WHATSAPP_PASSWORD"
}
] | src/whatsapp.coffee | jannickfahlbusch/hubot-whatsapp | 7 | {Robot, Adapter, TextMessage, EnterMessage, LeaveMessage, TopicMessage} = require 'hubot'
whatsapi = require('whatsapi');
class Whatsapp extends Adapter
constructor: (@robot) ->
@robot = robot
receive: (message) ->
@robot.receive message
run: ->
self = @
@wa = whatsapi.createAdapter({
msisdn: process.env.HUBOT_WHATSAPP_PHONENUMBER
username: process.env.HUBOT_WHATSAPP_NICKNAME
password: process.env.HUBOT_WHATSAPP_PASSWORD
ccode: process.env.HUBOT_WHATSAPP_COUNTRYCODE
}, false)
@wa.connect (err) ->
if err
console.log err
return
console.log 'Connected'
self.wa.login self.logged()
@wa.on 'receivedMessage', (message) ->
self.newMsg message
@emit 'connected'
newMsg: (message) ->
from = message.from.split('@')[0]
@robot.receive new TextMessage from, message.body, message.id
logged: (err) ->
if err
console.log err
return
console.log 'Logged in'
@wa.sendMessage process.env.HUBOT_WHATSAPP_OWNERNUMBER, 'I started successfully', (err, id) ->
if err
console.log err
return
console.log 'Server recieved message ' + id
@sendIsOnline()
send: (envelope, strings...) ->
console.log 'Sending reply'
console.log envelope
console.log strings
recipient = envelope.user
for msg in strings
@wa.sendMessage recipient, msg, (err, id) ->
if err
console.log 'There was an ERROR!'
console.log err.message
return
console.log 'Server received message %s', id
return
emote: (envelope, strings...) ->
console.log 'emote'
@send envelope, "* #{str}" for str in strings
reply: (envelope, strings...) ->
console.log 'reply'
strings = strings.map (s) -> "#{envelope.user.name}: #{s}"
@send envelope, strings...
exports.use = (robot) ->
new Whatsapp robot
| 208460 | {Robot, Adapter, TextMessage, EnterMessage, LeaveMessage, TopicMessage} = require 'hubot'
whatsapi = require('whatsapi');
class Whatsapp extends Adapter
constructor: (@robot) ->
@robot = robot
receive: (message) ->
@robot.receive message
run: ->
self = @
@wa = whatsapi.createAdapter({
msisdn: process.env.HUBOT_WHATSAPP_PHONENUMBER
username: process.env.HUBOT_WHATSAPP_NICKNAME
password: <PASSWORD>
ccode: process.env.HUBOT_WHATSAPP_COUNTRYCODE
}, false)
@wa.connect (err) ->
if err
console.log err
return
console.log 'Connected'
self.wa.login self.logged()
@wa.on 'receivedMessage', (message) ->
self.newMsg message
@emit 'connected'
newMsg: (message) ->
from = message.from.split('@')[0]
@robot.receive new TextMessage from, message.body, message.id
logged: (err) ->
if err
console.log err
return
console.log 'Logged in'
@wa.sendMessage process.env.HUBOT_WHATSAPP_OWNERNUMBER, 'I started successfully', (err, id) ->
if err
console.log err
return
console.log 'Server recieved message ' + id
@sendIsOnline()
send: (envelope, strings...) ->
console.log 'Sending reply'
console.log envelope
console.log strings
recipient = envelope.user
for msg in strings
@wa.sendMessage recipient, msg, (err, id) ->
if err
console.log 'There was an ERROR!'
console.log err.message
return
console.log 'Server received message %s', id
return
emote: (envelope, strings...) ->
console.log 'emote'
@send envelope, "* #{str}" for str in strings
reply: (envelope, strings...) ->
console.log 'reply'
strings = strings.map (s) -> "#{envelope.user.name}: #{s}"
@send envelope, strings...
exports.use = (robot) ->
new Whatsapp robot
| true | {Robot, Adapter, TextMessage, EnterMessage, LeaveMessage, TopicMessage} = require 'hubot'
whatsapi = require('whatsapi');
class Whatsapp extends Adapter
constructor: (@robot) ->
@robot = robot
receive: (message) ->
@robot.receive message
run: ->
self = @
@wa = whatsapi.createAdapter({
msisdn: process.env.HUBOT_WHATSAPP_PHONENUMBER
username: process.env.HUBOT_WHATSAPP_NICKNAME
password: PI:PASSWORD:<PASSWORD>END_PI
ccode: process.env.HUBOT_WHATSAPP_COUNTRYCODE
}, false)
@wa.connect (err) ->
if err
console.log err
return
console.log 'Connected'
self.wa.login self.logged()
@wa.on 'receivedMessage', (message) ->
self.newMsg message
@emit 'connected'
newMsg: (message) ->
from = message.from.split('@')[0]
@robot.receive new TextMessage from, message.body, message.id
logged: (err) ->
if err
console.log err
return
console.log 'Logged in'
@wa.sendMessage process.env.HUBOT_WHATSAPP_OWNERNUMBER, 'I started successfully', (err, id) ->
if err
console.log err
return
console.log 'Server recieved message ' + id
@sendIsOnline()
send: (envelope, strings...) ->
console.log 'Sending reply'
console.log envelope
console.log strings
recipient = envelope.user
for msg in strings
@wa.sendMessage recipient, msg, (err, id) ->
if err
console.log 'There was an ERROR!'
console.log err.message
return
console.log 'Server received message %s', id
return
emote: (envelope, strings...) ->
console.log 'emote'
@send envelope, "* #{str}" for str in strings
reply: (envelope, strings...) ->
console.log 'reply'
strings = strings.map (s) -> "#{envelope.user.name}: #{s}"
@send envelope, strings...
exports.use = (robot) ->
new Whatsapp robot
|
[
{
"context": "'\n 'emm':\n 'prefix': 'EMM'\n 'body': 'Hola Elena :)'\n 'exp':\n 'prefix': 'EXP'\n 'body': 'EXP",
"end": 1601,
"score": 0.7481861710548401,
"start": 1597,
"tag": "NAME",
"value": "lena"
}
] | snippets/language-gsql.cson | alejandropoveda/language-gsql | 7 | '.source.gsql':
'load table':
'prefix': 'LOAD'
'body': 'LOAD ${1:filename}'
'flatten':
'prefix': 'FLATTEN'
'body': 'FLATTEN(${1:var})'
'matches':
'prefix': 'MATCHES'
'body': "MATCHES '${1:pattern}'"
'average':
'prefix': 'AVG'
'body': 'AVG(${1:expr})'
'concat':
'prefix': 'CONCAT'
'body': 'CONCAT(${1:expr}, ${2:expr})'
'count':
'prefix': 'COUNT'
'body': 'COUNT(${1:expr})'
'count_star':
'prefix': 'COUNT_STAR'
'body': 'COUNT_STAR(${1:expr})'
'diff':
'prefix': 'DIFF'
'body': 'DIFF(${1:var}, ${2:expr})'
'IsEmpty':
'prefix': 'IsEmpty'
'body': 'IsEmpty(${1:expr})'
'max':
'prefix': 'MAX'
'body': 'MAX(${1:expr})'
'min':
'prefix': 'MIN'
'body': 'MIN(${1:expr})'
'size':
'prefix': 'SIZE'
'body': 'SIZE(${1:expr})'
'sum':
'prefix': 'SUM'
'body': 'SUM(${1:expr})'
'tokenize':
'prefix': 'TOKENIZE'
'body': 'TOKENIZE(${1:expr})'
'tokenize w/ delim':
'prefix': 'TOKENIZE'
'body': "TOKENIZE(${1:expr}, '${2:field_delimiter}')"
'abs':
'prefix': 'ABS'
'body': 'ABS(${1:expr})'
'acos':
'prefix': 'ACOS'
'body': 'ACOS(${1:expr})'
'asin':
'prefix': 'ASIN'
'body': 'ASIN(${1:expr})'
'atan':
'prefix': 'ATAN'
'body': 'ATAN(${1:expr})'
'cbrt':
'prefix': 'CBRT'
'body': 'CBRT(${1:expr})'
'ceil':
'prefix': 'CEIL'
'body': 'CEIL(${1:expr})'
'cos':
'prefix': 'COS'
'body': 'COS(${1:expr})'
'cosh':
'prefix': 'COSH'
'body': 'COSH(${1:expr})'
'emm':
'prefix': 'EMM'
'body': 'Hola Elena :)'
'exp':
'prefix': 'EXP'
'body': 'EXP(${1:expr})'
'floor':
'prefix': 'FLOOR'
'body': 'FLOOR(${1:expr})'
'log':
'prefix': 'LOG'
'body': 'LOG(${1:expr})'
'log10':
'prefix': 'LOG10'
'body': 'LOG10(${1:expr})'
'random':
'prefix': 'RANDOM'
'body': 'RANDOM(${1:expr})'
'round':
'prefix': 'ROUND'
'body': 'ROUND(${1:expr})'
'sin':
'prefix': 'SIN'
'body': 'SIN(${1:expr})'
'sinh':
'prefix': 'SINH'
'body': 'SINH(${1:expr})'
'sqrt':
'prefix': 'SQRT'
'body': 'SQRT(${1:expr})'
'tan':
'prefix': 'TAN'
'body': 'TAN(${1:expr})'
'tanh':
'prefix': 'TANH'
'body': 'TANH(${1:expr})'
'indexof':
'prefix': 'INDEXOF'
'body': "INDEXOF(${1:string}, '${2:character}, ${3:startIndex})"
'last_index_of':
'prefix': 'LAST_INDEX_OF'
'body': "LAST_INDEX_OF(${1:string}, '${2:character}', ${3:startIndex})"
'lower':
'prefix': 'LOWER'
'body': 'LOWER(${1:string})'
'regex_extract':
'prefix': 'REGEX_EXTRACT'
'body': "REGEX_EXTRACT(${1:string}, '${2:expression}', ${3:index})"
'regex_extract_all':
'prefix': 'REGEX_EXTRACT_ALL '
'body': "REGEX_EXTRACT_ALL(${1:string}, '${2:expression}')"
'replace':
'prefix': 'REPLACE'
'body': "REPLACE(${1:string}, '${2:oldChar}', '${3:newChar}')"
'strsplit':
'prefix': 'STRSPLIT'
'body': "STRSPLIT(${1:string}, '${2:expression}', ${3:limit})'"
'substring':
'prefix': 'SUBSTRING'
'body': "SUBSTRING(${1:string}, ${2:startIndex}, ${3:stopIndex})"
'trim':
'prefix': 'TRIM'
'body': 'TRIM(${1:string})'
'upper':
'prefix': 'UPPER'
'body': 'UPPER(${1:string})'
"LTRIM":
'prefix': "LTRIM"
"body" : "LTRIM(${1:string})"
"RTRIM":
'prefix': "RTRIM"
"body" : "RTRIM(${1:string})"
"int":
'prefix': "int"
"long":
'prefix': "long"
"float":
'prefix': "float"
"double":
'prefix': "double"
"chararray":
'prefix': "chararray"
"bytearray":
'prefix': "bytearray"
"boolean":
'prefix': "boolean"
"datetime":
'prefix': "datetime"
"tuple":
'prefix': "tuple"
"bag":
'prefix': "bag"
"map":
'prefix': "map"
"ALL ":
'prefix': "ALL "
"AS ":
'prefix': "AS "
"ASSERT ":
'prefix': "ASSERT "
"BY ":
'prefix': "BY "
"CROSS ":
'prefix': "CROSS "
"DEFINE ":
'prefix': "DEFINE "
"DISTINCT ":
'prefix': "DISTINCT "
"FILTER ":
'prefix': "FILTER "
"FOREACH ":
'prefix': "FOREACH "
"GROUP ":
'prefix': "GROUP "
"IMPORT ":
'prefix': "IMPORT "
"INTO ":
'prefix': "INTO "
"JOIN ":
'prefix': "JOIN "
"LIMIT ":
'prefix': "LIMIT "
"LOAD ":
'prefix': "LOAD "
"MAPREDUCE ":
'prefix': "MAPREDUCE "
"ORDER":
'prefix': "ORDER BY "
"SAMPLE ":
'prefix': "SAMPLE "
"SPLIT ":
'prefix': "SPLIT "
"STORE ":
'prefix': "STORE "
"STREAM ":
'prefix': "STREAM "
"THROUGH ":
'prefix': "THROUGH "
"UNION ":
'prefix': "UNION "
"RETURNS ":
'prefix': "RETURNS "
"RANK ":
'prefix': "RANK "
"CUBE ":
'prefix': "CUBE "
"ROLLUP ":
'prefix': "ROLLUP "
"DESC ":
'prefix': "DESC "
"DUMP ":
'prefix': "DUMP "
"DESCRIBE ":
'prefix': "DESCRIBE "
"EXPLAIN ":
'prefix': "EXPLAIN "
"GENERATE ":
'prefix': "GENERATE "
"ILLUSTRATE ":
'prefix': "ILLUSTRATE "
"USING ":
'prefix': "USING "
# # "abs":
# # 'prefix': "abs"
# # "acos":
# # 'prefix': "acos"
# "add_days":
# 'prefix': "add_days"
# "add_months":
# 'prefix': "add_months"
# "and": 'prefix': "and"
# "append_array": 'prefix': "append_array"
# "array": 'prefix': "array"
# "array_contains": 'prefix': "array_contains"
# "array_freq_count": 'prefix': "array_freq_count"
# "array_index": 'prefix': "array_index"
# "ascii": 'prefix': "ascii"
# "asin": 'prefix': "asin"
# "assert_equals": 'prefix': "assert_equals"
# "assert_less_than": 'prefix': "assert_less_than"
# "assert_true": 'prefix': "assert_true"
# "atan": 'prefix': "atan"
# "avg": 'prefix': "avg"
# "base64": 'prefix': "base64"
# "between": 'prefix': "between"
# "bin": 'prefix': "bin"
# "bloom": 'prefix': "bloom"
# "bloom_and": 'prefix': "bloom_and"
# "bloom_contains": 'prefix': "bloom_contains"
# "bloom_not": 'prefix': "bloom_not"
# "bloom_or": 'prefix': "bloom_or"
# "booking_week": 'prefix': "booking_week"
# "case": 'prefix': "case"
# "cast_array": 'prefix': "cast_array"
# "cast_map": 'prefix': "cast_map"
# "ceil": 'prefix': "ceil"
# "ceiling": 'prefix': "ceiling"
# "coalesce": 'prefix': "coalesce"
# "collect": 'prefix': "collect"
# "collect_freq_count": 'prefix': "collect_freq_count"
# "collect_list": 'prefix': "collect_list"
# "collect_max": 'prefix': "collect_max"
# "collect_merge_max": 'prefix': "collect_merge_max"
# "collect_set": 'prefix': "collect_set"
# "combine": 'prefix': "combine"
# "combine_hyperloglog": 'prefix': "combine_hyperloglog"
# "combine_previous_sketch": 'prefix': "combine_previous_sketch"
# "combine_sketch": 'prefix': "combine_sketch"
# "combine_unique": 'prefix': "combine_unique"
# "compute_stats": 'prefix': "compute_stats"
# "concat": 'prefix': "concat"
# "concat_ws": 'prefix': "concat_ws"
# "conditional_emit": 'prefix': "conditional_emit"
# "context_ngrams": 'prefix': "context_ngrams"
# "conv": 'prefix': "conv"
# "convert_to_sketch": 'prefix': "convert_to_sketch"
# "corr": 'prefix': "corr"
# "cos": 'prefix': "cos"
# "count": 'prefix': "count"
# "covar_pop": 'prefix': "covar_pop"
# "covar_samp": 'prefix': "covar_samp"
# "create_union": 'prefix': "create_union"
# "cume_dist": 'prefix': "cume_dist"
# "current_database": 'prefix': "current_database"
# "current_date": 'prefix': "current_date"
# "current_timestamp": 'prefix': "current_timestamp"
# "current_user": 'prefix': "current_user"
# "date_add": 'prefix': "date_add"
# "date_range": 'prefix': "date_range"
# "date_sub": 'prefix': "date_sub"
# "date_to_start_quarter": 'prefix': "date_to_start_quarter"
# "date_to_start_week": 'prefix': "date_to_start_week"
# "datediff": 'prefix': "datediff"
# "day": 'prefix': "day"
# "dayofmonth": 'prefix': "dayofmonth"
# "decode": 'prefix': "decode"
# "degrees": 'prefix': "degrees"
# "dense_rank": 'prefix': "dense_rank"
# "distributed_bloom": 'prefix': "distributed_bloom"
# "distributed_map": 'prefix': "distributed_map"
# "div": 'prefix': "div"
# "e": 'prefix': "e"
# "elt": 'prefix': "elt"
# "encode": 'prefix': "encode"
# "estimated_reach": 'prefix': "estimated_reach"
# "event_parser": 'prefix': "event_parser"
# "ewah_bitmap": 'prefix': "ewah_bitmap"
# "ewah_bitmap_and": 'prefix': "ewah_bitmap_and"
# "ewah_bitmap_empty": 'prefix': "ewah_bitmap_empty"
# "ewah_bitmap_or": 'prefix': "ewah_bitmap_or"
# "exp": 'prefix': "exp"
# "experiments": 'prefix': "experiments"
# "explode": 'prefix': "explode"
# "field": 'prefix': "field"
# "find_in_set": 'prefix': "find_in_set"
# "first_index": 'prefix': "first_index"
# "first_value": 'prefix': "first_value"
# "flatten_array": 'prefix': "flatten_array"
# "floor": 'prefix': "floor"
# "format_number": 'prefix': "format_number"
# "from_camel_case": 'prefix': "from_camel_case"
# "from_json": 'prefix': "from_json"
# "from_unixtime": 'prefix': "from_unixtime"
# "from_utc_timestamp": 'prefix': "from_utc_timestamp"
# "get_json_object": 'prefix': "get_json_object"
# "geturl": 'prefix': "geturl"
# "greatest": 'prefix': "greatest"
# "group_concat": 'prefix': "group_concat"
# "group_count": 'prefix': "group_count"
# "grouped_rank": 'prefix': "grouped_rank"
# "hash": 'prefix': "hash"
# "hash_md5": 'prefix': "hash_md5"
# "hbase_balanced_key": 'prefix': "hbase_balanced_key"
# "hbase_batch_get": 'prefix': "hbase_batch_get"
# "hbase_batch_put": 'prefix': "hbase_batch_put"
# "hbase_cached_get": 'prefix': "hbase_cached_get"
# "hbase_get": 'prefix': "hbase_get"
# "hbase_put": 'prefix': "hbase_put"
# "hex": 'prefix': "hex"
# "histogram_numeric": 'prefix': "histogram_numeric"
# "hll_est_cardinality": 'prefix': "hll_est_cardinality"
# "hour": 'prefix': "hour"
# "hyperloglog": 'prefix': "hyperloglog"
# "if": 'prefix': "if"
# "in": 'prefix': "in"
# "in_file": 'prefix': "in_file"
# "index": 'prefix': "index"
# "inet_aton": 'prefix': "inet_aton"
# "inet_ntoa": 'prefix': "inet_ntoa"
# "initcap": 'prefix': "initcap"
# "inline": 'prefix': "inline"
# "instr": 'prefix': "instr"
# "intersect_array": 'prefix': "intersect_array"
# "ip2country": 'prefix': "ip2country"
# "ip2latlon": 'prefix': "ip2latlon"
# "ip2timezone": 'prefix': "ip2timezone"
# "ipcountry": 'prefix': "ipcountry"
# "isnotnull": 'prefix': "isnotnull"
# "isnull": 'prefix': "isnull"
# "java_method": 'prefix': "java_method"
# "join_array": 'prefix': "join_array"
# "json_map": 'prefix': "json_map"
# "json_split": 'prefix': "json_split"
# "json_tuple": 'prefix': "json_tuple"
# "label": 'prefix': "label"
# "lag": 'prefix': "lag"
# "last_day": 'prefix': "last_day"
# "last_index": 'prefix': "last_index"
# "last_value": 'prefix': "last_value"
# "lcase": 'prefix': "lcase"
# "lead": 'prefix': "lead"
# "least": 'prefix': "least"
# "length": 'prefix': "length"
# "like": 'prefix': "like"
# "ln": 'prefix': "ln"
# "locate": 'prefix': "locate"
# "log": 'prefix': "log"
# "log10": 'prefix': "log10"
# "log2": 'prefix': "log2"
# "lower": 'prefix': "lower"
# "lpad": 'prefix': "lpad"
# "ltrim": 'prefix': "ltrim"
# "map": 'prefix': "map"
# "map_filter_keys": 'prefix': "map_filter_keys"
# "map_index": 'prefix': "map_index"
# "map_key_values": 'prefix': "map_key_values"
# "map_keys": 'prefix': "map_keys"
# "map_mode": 'prefix': "map_mode"
# "map_values": 'prefix': "map_values"
# "matchpath": 'prefix': "matchpath"
# "max": 'prefix': "max"
# "md5": 'prefix': "md5"
# "min": 'prefix': "min"
# "minute": 'prefix': "minute"
# "month": 'prefix': "month"
# "moving_avg": 'prefix': "moving_avg"
# "multiday_count": 'prefix': "multiday_count"
# "named_struct": 'prefix': "named_struct"
# "negative": 'prefix': "negative"
# "next_day": 'prefix': "next_day"
# "ngrams": 'prefix': "ngrams"
# "noop": 'prefix': "noop"
# "noopstreaming": 'prefix': "noopstreaming"
# "noopwithmap": 'prefix': "noopwithmap"
# "noopwithmapstreaming": 'prefix': "noopwithmapstreaming"
# "not": 'prefix': "not"
# "now": 'prefix': "now"
# "ntile": 'prefix': "ntile"
# "numeric_range": 'prefix': "numeric_range"
# "nvl": 'prefix': "nvl"
# "or": 'prefix': "or"
# "parse_url": 'prefix': "parse_url"
# "parse_url_tuple": 'prefix': "parse_url_tuple"
# "percent_rank": 'prefix': "percent_rank"
# "percentile": 'prefix': "percentile"
# "percentile_approx": 'prefix': "percentile_approx"
# "pi": 'prefix': "pi"
# "pmod": 'prefix': "pmod"
# "posexplode": 'prefix': "posexplode"
# "positive": 'prefix': "positive"
# "pow": 'prefix': "pow"
# "power": 'prefix': "power"
# "printf": 'prefix': "printf"
# "quarter": 'prefix': "quarter"
# "radians": 'prefix': "radians"
# "rand": 'prefix': "rand"
# "rank": 'prefix': "rank"
# "ranked_long_diff": 'prefix': "ranked_long_diff"
# "ranked_long_sum": 'prefix': "ranked_long_sum"
# "reflect": 'prefix': "reflect"
# "reflect2": 'prefix': "reflect2"
# "regexp": 'prefix': "regexp"
# "regexp_extract": 'prefix': "regexp_extract"
# "regexp_replace": 'prefix': "regexp_replace"
# "repeat": 'prefix': "repeat"
# "reverse": 'prefix': "reverse"
# "rlike": 'prefix': "rlike"
# "round": 'prefix': "round"
# "row_number": 'prefix': "row_number"
# "row_sequence": 'prefix': "row_sequence"
# "rpad": 'prefix': "rpad"
# "rtrim": 'prefix': "rtrim"
# "running_count": 'prefix': "running_count"
# "running_sum": 'prefix': "running_sum"
# "salted_bigint": 'prefix': "salted_bigint"
# "salted_bigint_key": 'prefix': "salted_bigint_key"
# "second": 'prefix': "second"
# "sentences": 'prefix': "sentences"
# "set_difference": 'prefix': "set_difference"
# "set_similarity": 'prefix': "set_similarity"
# "sha1": 'prefix': "sha1"
# "sha2": 'prefix': "sha2"
# "sign": 'prefix': "sign"
# "sin": 'prefix': "sin"
# "size": 'prefix': "size"
# "sketch_hashes": 'prefix': "sketch_hashes"
# "sketch_set": 'prefix': "sketch_set"
# "sort_array": 'prefix': "sort_array"
# "space": 'prefix': "space"
# "split": 'prefix': "split"
# "sqrt": 'prefix': "sqrt"
# "stack": 'prefix': "stack"
# "std": 'prefix': "std"
# "stddev": 'prefix': "stddev"
# "stddev_pop": 'prefix': "stddev_pop"
# "stddev_samp": 'prefix': "stddev_samp"
# "str_to_map": 'prefix': "str_to_map"
# "struct": 'prefix': "struct"
# "substr": 'prefix': "substr"
# "substring": 'prefix': "substring"
# "sum": 'prefix': "sum"
# "sum_array": 'prefix': "sum_array"
# "tan": 'prefix': "tan"
# "throw_error": 'prefix': "throw_error"
# "to_camel_case": 'prefix': "to_camel_case"
# "to_date": 'prefix': "to_date"
# "to_json": 'prefix': "to_json"
# "to_unix_timestamp": 'prefix': "to_unix_timestamp"
# "to_utc_timestamp": 'prefix': "to_utc_timestamp"
# "translate": 'prefix': "translate"
# "trim": 'prefix': "trim"
# "truncate_array": 'prefix': "truncate_array"
# "ucase": 'prefix': "ucase"
# "unbase64": 'prefix': "unbase64"
# "unhex": 'prefix': "unhex"
# "union_hyperloglog": 'prefix': "union_hyperloglog"
# "union_map": 'prefix': "union_map"
# "union_max": 'prefix': "union_max"
# "union_sketch": 'prefix': "union_sketch"
# "union_vector_sum": 'prefix': "union_vector_sum"
# "unix_timestamp": 'prefix': "unix_timestamp"
# "upper": 'prefix': "upper"
# "var_pop": 'prefix': "var_pop"
# "var_samp": 'prefix': "var_samp"
# "variance": 'prefix': "variance"
# "vector_add": 'prefix': "vector_add"
# "vector_cross_product": 'prefix': "vector_cross_product"
# "vector_dot_product": 'prefix': "vector_dot_product"
# "vector_magnitude": 'prefix': "vector_magnitude"
# "vector_scalar_mult": 'prefix': "vector_scalar_mult"
# "visitor_deprecated": 'prefix': "visitor_deprecated"
# "weekofyear": 'prefix': "weekofyear"
# "when": 'prefix': "when"
# "windowingtablefunction": 'prefix': "windowingtablefunction"
# "write_to_graphite": 'prefix': "write_to_graphite"
# "write_to_tsdb": 'prefix': "write_to_tsdb"
# "xpath": 'prefix': "xpath"
# "xpath_boolean": 'prefix': "xpath_boolean"
# "xpath_double": 'prefix': "xpath_double"
# "xpath_float": 'prefix': "xpath_float"
# "xpath_int": 'prefix': "xpath_int"
# "xpath_long": 'prefix': "xpath_long"
# "xpath_number": 'prefix': "xpath_number"
# "xpath_short": 'prefix': "xpath_short"
# "xpath_string": 'prefix': "xpath_string"
# "year": 'prefix': "year"
| 170785 | '.source.gsql':
'load table':
'prefix': 'LOAD'
'body': 'LOAD ${1:filename}'
'flatten':
'prefix': 'FLATTEN'
'body': 'FLATTEN(${1:var})'
'matches':
'prefix': 'MATCHES'
'body': "MATCHES '${1:pattern}'"
'average':
'prefix': 'AVG'
'body': 'AVG(${1:expr})'
'concat':
'prefix': 'CONCAT'
'body': 'CONCAT(${1:expr}, ${2:expr})'
'count':
'prefix': 'COUNT'
'body': 'COUNT(${1:expr})'
'count_star':
'prefix': 'COUNT_STAR'
'body': 'COUNT_STAR(${1:expr})'
'diff':
'prefix': 'DIFF'
'body': 'DIFF(${1:var}, ${2:expr})'
'IsEmpty':
'prefix': 'IsEmpty'
'body': 'IsEmpty(${1:expr})'
'max':
'prefix': 'MAX'
'body': 'MAX(${1:expr})'
'min':
'prefix': 'MIN'
'body': 'MIN(${1:expr})'
'size':
'prefix': 'SIZE'
'body': 'SIZE(${1:expr})'
'sum':
'prefix': 'SUM'
'body': 'SUM(${1:expr})'
'tokenize':
'prefix': 'TOKENIZE'
'body': 'TOKENIZE(${1:expr})'
'tokenize w/ delim':
'prefix': 'TOKENIZE'
'body': "TOKENIZE(${1:expr}, '${2:field_delimiter}')"
'abs':
'prefix': 'ABS'
'body': 'ABS(${1:expr})'
'acos':
'prefix': 'ACOS'
'body': 'ACOS(${1:expr})'
'asin':
'prefix': 'ASIN'
'body': 'ASIN(${1:expr})'
'atan':
'prefix': 'ATAN'
'body': 'ATAN(${1:expr})'
'cbrt':
'prefix': 'CBRT'
'body': 'CBRT(${1:expr})'
'ceil':
'prefix': 'CEIL'
'body': 'CEIL(${1:expr})'
'cos':
'prefix': 'COS'
'body': 'COS(${1:expr})'
'cosh':
'prefix': 'COSH'
'body': 'COSH(${1:expr})'
'emm':
'prefix': 'EMM'
'body': 'Hola E<NAME> :)'
'exp':
'prefix': 'EXP'
'body': 'EXP(${1:expr})'
'floor':
'prefix': 'FLOOR'
'body': 'FLOOR(${1:expr})'
'log':
'prefix': 'LOG'
'body': 'LOG(${1:expr})'
'log10':
'prefix': 'LOG10'
'body': 'LOG10(${1:expr})'
'random':
'prefix': 'RANDOM'
'body': 'RANDOM(${1:expr})'
'round':
'prefix': 'ROUND'
'body': 'ROUND(${1:expr})'
'sin':
'prefix': 'SIN'
'body': 'SIN(${1:expr})'
'sinh':
'prefix': 'SINH'
'body': 'SINH(${1:expr})'
'sqrt':
'prefix': 'SQRT'
'body': 'SQRT(${1:expr})'
'tan':
'prefix': 'TAN'
'body': 'TAN(${1:expr})'
'tanh':
'prefix': 'TANH'
'body': 'TANH(${1:expr})'
'indexof':
'prefix': 'INDEXOF'
'body': "INDEXOF(${1:string}, '${2:character}, ${3:startIndex})"
'last_index_of':
'prefix': 'LAST_INDEX_OF'
'body': "LAST_INDEX_OF(${1:string}, '${2:character}', ${3:startIndex})"
'lower':
'prefix': 'LOWER'
'body': 'LOWER(${1:string})'
'regex_extract':
'prefix': 'REGEX_EXTRACT'
'body': "REGEX_EXTRACT(${1:string}, '${2:expression}', ${3:index})"
'regex_extract_all':
'prefix': 'REGEX_EXTRACT_ALL '
'body': "REGEX_EXTRACT_ALL(${1:string}, '${2:expression}')"
'replace':
'prefix': 'REPLACE'
'body': "REPLACE(${1:string}, '${2:oldChar}', '${3:newChar}')"
'strsplit':
'prefix': 'STRSPLIT'
'body': "STRSPLIT(${1:string}, '${2:expression}', ${3:limit})'"
'substring':
'prefix': 'SUBSTRING'
'body': "SUBSTRING(${1:string}, ${2:startIndex}, ${3:stopIndex})"
'trim':
'prefix': 'TRIM'
'body': 'TRIM(${1:string})'
'upper':
'prefix': 'UPPER'
'body': 'UPPER(${1:string})'
"LTRIM":
'prefix': "LTRIM"
"body" : "LTRIM(${1:string})"
"RTRIM":
'prefix': "RTRIM"
"body" : "RTRIM(${1:string})"
"int":
'prefix': "int"
"long":
'prefix': "long"
"float":
'prefix': "float"
"double":
'prefix': "double"
"chararray":
'prefix': "chararray"
"bytearray":
'prefix': "bytearray"
"boolean":
'prefix': "boolean"
"datetime":
'prefix': "datetime"
"tuple":
'prefix': "tuple"
"bag":
'prefix': "bag"
"map":
'prefix': "map"
"ALL ":
'prefix': "ALL "
"AS ":
'prefix': "AS "
"ASSERT ":
'prefix': "ASSERT "
"BY ":
'prefix': "BY "
"CROSS ":
'prefix': "CROSS "
"DEFINE ":
'prefix': "DEFINE "
"DISTINCT ":
'prefix': "DISTINCT "
"FILTER ":
'prefix': "FILTER "
"FOREACH ":
'prefix': "FOREACH "
"GROUP ":
'prefix': "GROUP "
"IMPORT ":
'prefix': "IMPORT "
"INTO ":
'prefix': "INTO "
"JOIN ":
'prefix': "JOIN "
"LIMIT ":
'prefix': "LIMIT "
"LOAD ":
'prefix': "LOAD "
"MAPREDUCE ":
'prefix': "MAPREDUCE "
"ORDER":
'prefix': "ORDER BY "
"SAMPLE ":
'prefix': "SAMPLE "
"SPLIT ":
'prefix': "SPLIT "
"STORE ":
'prefix': "STORE "
"STREAM ":
'prefix': "STREAM "
"THROUGH ":
'prefix': "THROUGH "
"UNION ":
'prefix': "UNION "
"RETURNS ":
'prefix': "RETURNS "
"RANK ":
'prefix': "RANK "
"CUBE ":
'prefix': "CUBE "
"ROLLUP ":
'prefix': "ROLLUP "
"DESC ":
'prefix': "DESC "
"DUMP ":
'prefix': "DUMP "
"DESCRIBE ":
'prefix': "DESCRIBE "
"EXPLAIN ":
'prefix': "EXPLAIN "
"GENERATE ":
'prefix': "GENERATE "
"ILLUSTRATE ":
'prefix': "ILLUSTRATE "
"USING ":
'prefix': "USING "
# # "abs":
# # 'prefix': "abs"
# # "acos":
# # 'prefix': "acos"
# "add_days":
# 'prefix': "add_days"
# "add_months":
# 'prefix': "add_months"
# "and": 'prefix': "and"
# "append_array": 'prefix': "append_array"
# "array": 'prefix': "array"
# "array_contains": 'prefix': "array_contains"
# "array_freq_count": 'prefix': "array_freq_count"
# "array_index": 'prefix': "array_index"
# "ascii": 'prefix': "ascii"
# "asin": 'prefix': "asin"
# "assert_equals": 'prefix': "assert_equals"
# "assert_less_than": 'prefix': "assert_less_than"
# "assert_true": 'prefix': "assert_true"
# "atan": 'prefix': "atan"
# "avg": 'prefix': "avg"
# "base64": 'prefix': "base64"
# "between": 'prefix': "between"
# "bin": 'prefix': "bin"
# "bloom": 'prefix': "bloom"
# "bloom_and": 'prefix': "bloom_and"
# "bloom_contains": 'prefix': "bloom_contains"
# "bloom_not": 'prefix': "bloom_not"
# "bloom_or": 'prefix': "bloom_or"
# "booking_week": 'prefix': "booking_week"
# "case": 'prefix': "case"
# "cast_array": 'prefix': "cast_array"
# "cast_map": 'prefix': "cast_map"
# "ceil": 'prefix': "ceil"
# "ceiling": 'prefix': "ceiling"
# "coalesce": 'prefix': "coalesce"
# "collect": 'prefix': "collect"
# "collect_freq_count": 'prefix': "collect_freq_count"
# "collect_list": 'prefix': "collect_list"
# "collect_max": 'prefix': "collect_max"
# "collect_merge_max": 'prefix': "collect_merge_max"
# "collect_set": 'prefix': "collect_set"
# "combine": 'prefix': "combine"
# "combine_hyperloglog": 'prefix': "combine_hyperloglog"
# "combine_previous_sketch": 'prefix': "combine_previous_sketch"
# "combine_sketch": 'prefix': "combine_sketch"
# "combine_unique": 'prefix': "combine_unique"
# "compute_stats": 'prefix': "compute_stats"
# "concat": 'prefix': "concat"
# "concat_ws": 'prefix': "concat_ws"
# "conditional_emit": 'prefix': "conditional_emit"
# "context_ngrams": 'prefix': "context_ngrams"
# "conv": 'prefix': "conv"
# "convert_to_sketch": 'prefix': "convert_to_sketch"
# "corr": 'prefix': "corr"
# "cos": 'prefix': "cos"
# "count": 'prefix': "count"
# "covar_pop": 'prefix': "covar_pop"
# "covar_samp": 'prefix': "covar_samp"
# "create_union": 'prefix': "create_union"
# "cume_dist": 'prefix': "cume_dist"
# "current_database": 'prefix': "current_database"
# "current_date": 'prefix': "current_date"
# "current_timestamp": 'prefix': "current_timestamp"
# "current_user": 'prefix': "current_user"
# "date_add": 'prefix': "date_add"
# "date_range": 'prefix': "date_range"
# "date_sub": 'prefix': "date_sub"
# "date_to_start_quarter": 'prefix': "date_to_start_quarter"
# "date_to_start_week": 'prefix': "date_to_start_week"
# "datediff": 'prefix': "datediff"
# "day": 'prefix': "day"
# "dayofmonth": 'prefix': "dayofmonth"
# "decode": 'prefix': "decode"
# "degrees": 'prefix': "degrees"
# "dense_rank": 'prefix': "dense_rank"
# "distributed_bloom": 'prefix': "distributed_bloom"
# "distributed_map": 'prefix': "distributed_map"
# "div": 'prefix': "div"
# "e": 'prefix': "e"
# "elt": 'prefix': "elt"
# "encode": 'prefix': "encode"
# "estimated_reach": 'prefix': "estimated_reach"
# "event_parser": 'prefix': "event_parser"
# "ewah_bitmap": 'prefix': "ewah_bitmap"
# "ewah_bitmap_and": 'prefix': "ewah_bitmap_and"
# "ewah_bitmap_empty": 'prefix': "ewah_bitmap_empty"
# "ewah_bitmap_or": 'prefix': "ewah_bitmap_or"
# "exp": 'prefix': "exp"
# "experiments": 'prefix': "experiments"
# "explode": 'prefix': "explode"
# "field": 'prefix': "field"
# "find_in_set": 'prefix': "find_in_set"
# "first_index": 'prefix': "first_index"
# "first_value": 'prefix': "first_value"
# "flatten_array": 'prefix': "flatten_array"
# "floor": 'prefix': "floor"
# "format_number": 'prefix': "format_number"
# "from_camel_case": 'prefix': "from_camel_case"
# "from_json": 'prefix': "from_json"
# "from_unixtime": 'prefix': "from_unixtime"
# "from_utc_timestamp": 'prefix': "from_utc_timestamp"
# "get_json_object": 'prefix': "get_json_object"
# "geturl": 'prefix': "geturl"
# "greatest": 'prefix': "greatest"
# "group_concat": 'prefix': "group_concat"
# "group_count": 'prefix': "group_count"
# "grouped_rank": 'prefix': "grouped_rank"
# "hash": 'prefix': "hash"
# "hash_md5": 'prefix': "hash_md5"
# "hbase_balanced_key": 'prefix': "hbase_balanced_key"
# "hbase_batch_get": 'prefix': "hbase_batch_get"
# "hbase_batch_put": 'prefix': "hbase_batch_put"
# "hbase_cached_get": 'prefix': "hbase_cached_get"
# "hbase_get": 'prefix': "hbase_get"
# "hbase_put": 'prefix': "hbase_put"
# "hex": 'prefix': "hex"
# "histogram_numeric": 'prefix': "histogram_numeric"
# "hll_est_cardinality": 'prefix': "hll_est_cardinality"
# "hour": 'prefix': "hour"
# "hyperloglog": 'prefix': "hyperloglog"
# "if": 'prefix': "if"
# "in": 'prefix': "in"
# "in_file": 'prefix': "in_file"
# "index": 'prefix': "index"
# "inet_aton": 'prefix': "inet_aton"
# "inet_ntoa": 'prefix': "inet_ntoa"
# "initcap": 'prefix': "initcap"
# "inline": 'prefix': "inline"
# "instr": 'prefix': "instr"
# "intersect_array": 'prefix': "intersect_array"
# "ip2country": 'prefix': "ip2country"
# "ip2latlon": 'prefix': "ip2latlon"
# "ip2timezone": 'prefix': "ip2timezone"
# "ipcountry": 'prefix': "ipcountry"
# "isnotnull": 'prefix': "isnotnull"
# "isnull": 'prefix': "isnull"
# "java_method": 'prefix': "java_method"
# "join_array": 'prefix': "join_array"
# "json_map": 'prefix': "json_map"
# "json_split": 'prefix': "json_split"
# "json_tuple": 'prefix': "json_tuple"
# "label": 'prefix': "label"
# "lag": 'prefix': "lag"
# "last_day": 'prefix': "last_day"
# "last_index": 'prefix': "last_index"
# "last_value": 'prefix': "last_value"
# "lcase": 'prefix': "lcase"
# "lead": 'prefix': "lead"
# "least": 'prefix': "least"
# "length": 'prefix': "length"
# "like": 'prefix': "like"
# "ln": 'prefix': "ln"
# "locate": 'prefix': "locate"
# "log": 'prefix': "log"
# "log10": 'prefix': "log10"
# "log2": 'prefix': "log2"
# "lower": 'prefix': "lower"
# "lpad": 'prefix': "lpad"
# "ltrim": 'prefix': "ltrim"
# "map": 'prefix': "map"
# "map_filter_keys": 'prefix': "map_filter_keys"
# "map_index": 'prefix': "map_index"
# "map_key_values": 'prefix': "map_key_values"
# "map_keys": 'prefix': "map_keys"
# "map_mode": 'prefix': "map_mode"
# "map_values": 'prefix': "map_values"
# "matchpath": 'prefix': "matchpath"
# "max": 'prefix': "max"
# "md5": 'prefix': "md5"
# "min": 'prefix': "min"
# "minute": 'prefix': "minute"
# "month": 'prefix': "month"
# "moving_avg": 'prefix': "moving_avg"
# "multiday_count": 'prefix': "multiday_count"
# "named_struct": 'prefix': "named_struct"
# "negative": 'prefix': "negative"
# "next_day": 'prefix': "next_day"
# "ngrams": 'prefix': "ngrams"
# "noop": 'prefix': "noop"
# "noopstreaming": 'prefix': "noopstreaming"
# "noopwithmap": 'prefix': "noopwithmap"
# "noopwithmapstreaming": 'prefix': "noopwithmapstreaming"
# "not": 'prefix': "not"
# "now": 'prefix': "now"
# "ntile": 'prefix': "ntile"
# "numeric_range": 'prefix': "numeric_range"
# "nvl": 'prefix': "nvl"
# "or": 'prefix': "or"
# "parse_url": 'prefix': "parse_url"
# "parse_url_tuple": 'prefix': "parse_url_tuple"
# "percent_rank": 'prefix': "percent_rank"
# "percentile": 'prefix': "percentile"
# "percentile_approx": 'prefix': "percentile_approx"
# "pi": 'prefix': "pi"
# "pmod": 'prefix': "pmod"
# "posexplode": 'prefix': "posexplode"
# "positive": 'prefix': "positive"
# "pow": 'prefix': "pow"
# "power": 'prefix': "power"
# "printf": 'prefix': "printf"
# "quarter": 'prefix': "quarter"
# "radians": 'prefix': "radians"
# "rand": 'prefix': "rand"
# "rank": 'prefix': "rank"
# "ranked_long_diff": 'prefix': "ranked_long_diff"
# "ranked_long_sum": 'prefix': "ranked_long_sum"
# "reflect": 'prefix': "reflect"
# "reflect2": 'prefix': "reflect2"
# "regexp": 'prefix': "regexp"
# "regexp_extract": 'prefix': "regexp_extract"
# "regexp_replace": 'prefix': "regexp_replace"
# "repeat": 'prefix': "repeat"
# "reverse": 'prefix': "reverse"
# "rlike": 'prefix': "rlike"
# "round": 'prefix': "round"
# "row_number": 'prefix': "row_number"
# "row_sequence": 'prefix': "row_sequence"
# "rpad": 'prefix': "rpad"
# "rtrim": 'prefix': "rtrim"
# "running_count": 'prefix': "running_count"
# "running_sum": 'prefix': "running_sum"
# "salted_bigint": 'prefix': "salted_bigint"
# "salted_bigint_key": 'prefix': "salted_bigint_key"
# "second": 'prefix': "second"
# "sentences": 'prefix': "sentences"
# "set_difference": 'prefix': "set_difference"
# "set_similarity": 'prefix': "set_similarity"
# "sha1": 'prefix': "sha1"
# "sha2": 'prefix': "sha2"
# "sign": 'prefix': "sign"
# "sin": 'prefix': "sin"
# "size": 'prefix': "size"
# "sketch_hashes": 'prefix': "sketch_hashes"
# "sketch_set": 'prefix': "sketch_set"
# "sort_array": 'prefix': "sort_array"
# "space": 'prefix': "space"
# "split": 'prefix': "split"
# "sqrt": 'prefix': "sqrt"
# "stack": 'prefix': "stack"
# "std": 'prefix': "std"
# "stddev": 'prefix': "stddev"
# "stddev_pop": 'prefix': "stddev_pop"
# "stddev_samp": 'prefix': "stddev_samp"
# "str_to_map": 'prefix': "str_to_map"
# "struct": 'prefix': "struct"
# "substr": 'prefix': "substr"
# "substring": 'prefix': "substring"
# "sum": 'prefix': "sum"
# "sum_array": 'prefix': "sum_array"
# "tan": 'prefix': "tan"
# "throw_error": 'prefix': "throw_error"
# "to_camel_case": 'prefix': "to_camel_case"
# "to_date": 'prefix': "to_date"
# "to_json": 'prefix': "to_json"
# "to_unix_timestamp": 'prefix': "to_unix_timestamp"
# "to_utc_timestamp": 'prefix': "to_utc_timestamp"
# "translate": 'prefix': "translate"
# "trim": 'prefix': "trim"
# "truncate_array": 'prefix': "truncate_array"
# "ucase": 'prefix': "ucase"
# "unbase64": 'prefix': "unbase64"
# "unhex": 'prefix': "unhex"
# "union_hyperloglog": 'prefix': "union_hyperloglog"
# "union_map": 'prefix': "union_map"
# "union_max": 'prefix': "union_max"
# "union_sketch": 'prefix': "union_sketch"
# "union_vector_sum": 'prefix': "union_vector_sum"
# "unix_timestamp": 'prefix': "unix_timestamp"
# "upper": 'prefix': "upper"
# "var_pop": 'prefix': "var_pop"
# "var_samp": 'prefix': "var_samp"
# "variance": 'prefix': "variance"
# "vector_add": 'prefix': "vector_add"
# "vector_cross_product": 'prefix': "vector_cross_product"
# "vector_dot_product": 'prefix': "vector_dot_product"
# "vector_magnitude": 'prefix': "vector_magnitude"
# "vector_scalar_mult": 'prefix': "vector_scalar_mult"
# "visitor_deprecated": 'prefix': "visitor_deprecated"
# "weekofyear": 'prefix': "weekofyear"
# "when": 'prefix': "when"
# "windowingtablefunction": 'prefix': "windowingtablefunction"
# "write_to_graphite": 'prefix': "write_to_graphite"
# "write_to_tsdb": 'prefix': "write_to_tsdb"
# "xpath": 'prefix': "xpath"
# "xpath_boolean": 'prefix': "xpath_boolean"
# "xpath_double": 'prefix': "xpath_double"
# "xpath_float": 'prefix': "xpath_float"
# "xpath_int": 'prefix': "xpath_int"
# "xpath_long": 'prefix': "xpath_long"
# "xpath_number": 'prefix': "xpath_number"
# "xpath_short": 'prefix': "xpath_short"
# "xpath_string": 'prefix': "xpath_string"
# "year": 'prefix': "year"
| true | '.source.gsql':
'load table':
'prefix': 'LOAD'
'body': 'LOAD ${1:filename}'
'flatten':
'prefix': 'FLATTEN'
'body': 'FLATTEN(${1:var})'
'matches':
'prefix': 'MATCHES'
'body': "MATCHES '${1:pattern}'"
'average':
'prefix': 'AVG'
'body': 'AVG(${1:expr})'
'concat':
'prefix': 'CONCAT'
'body': 'CONCAT(${1:expr}, ${2:expr})'
'count':
'prefix': 'COUNT'
'body': 'COUNT(${1:expr})'
'count_star':
'prefix': 'COUNT_STAR'
'body': 'COUNT_STAR(${1:expr})'
'diff':
'prefix': 'DIFF'
'body': 'DIFF(${1:var}, ${2:expr})'
'IsEmpty':
'prefix': 'IsEmpty'
'body': 'IsEmpty(${1:expr})'
'max':
'prefix': 'MAX'
'body': 'MAX(${1:expr})'
'min':
'prefix': 'MIN'
'body': 'MIN(${1:expr})'
'size':
'prefix': 'SIZE'
'body': 'SIZE(${1:expr})'
'sum':
'prefix': 'SUM'
'body': 'SUM(${1:expr})'
'tokenize':
'prefix': 'TOKENIZE'
'body': 'TOKENIZE(${1:expr})'
'tokenize w/ delim':
'prefix': 'TOKENIZE'
'body': "TOKENIZE(${1:expr}, '${2:field_delimiter}')"
'abs':
'prefix': 'ABS'
'body': 'ABS(${1:expr})'
'acos':
'prefix': 'ACOS'
'body': 'ACOS(${1:expr})'
'asin':
'prefix': 'ASIN'
'body': 'ASIN(${1:expr})'
'atan':
'prefix': 'ATAN'
'body': 'ATAN(${1:expr})'
'cbrt':
'prefix': 'CBRT'
'body': 'CBRT(${1:expr})'
'ceil':
'prefix': 'CEIL'
'body': 'CEIL(${1:expr})'
'cos':
'prefix': 'COS'
'body': 'COS(${1:expr})'
'cosh':
'prefix': 'COSH'
'body': 'COSH(${1:expr})'
'emm':
'prefix': 'EMM'
'body': 'Hola EPI:NAME:<NAME>END_PI :)'
'exp':
'prefix': 'EXP'
'body': 'EXP(${1:expr})'
'floor':
'prefix': 'FLOOR'
'body': 'FLOOR(${1:expr})'
'log':
'prefix': 'LOG'
'body': 'LOG(${1:expr})'
'log10':
'prefix': 'LOG10'
'body': 'LOG10(${1:expr})'
'random':
'prefix': 'RANDOM'
'body': 'RANDOM(${1:expr})'
'round':
'prefix': 'ROUND'
'body': 'ROUND(${1:expr})'
'sin':
'prefix': 'SIN'
'body': 'SIN(${1:expr})'
'sinh':
'prefix': 'SINH'
'body': 'SINH(${1:expr})'
'sqrt':
'prefix': 'SQRT'
'body': 'SQRT(${1:expr})'
'tan':
'prefix': 'TAN'
'body': 'TAN(${1:expr})'
'tanh':
'prefix': 'TANH'
'body': 'TANH(${1:expr})'
'indexof':
'prefix': 'INDEXOF'
'body': "INDEXOF(${1:string}, '${2:character}, ${3:startIndex})"
'last_index_of':
'prefix': 'LAST_INDEX_OF'
'body': "LAST_INDEX_OF(${1:string}, '${2:character}', ${3:startIndex})"
'lower':
'prefix': 'LOWER'
'body': 'LOWER(${1:string})'
'regex_extract':
'prefix': 'REGEX_EXTRACT'
'body': "REGEX_EXTRACT(${1:string}, '${2:expression}', ${3:index})"
'regex_extract_all':
'prefix': 'REGEX_EXTRACT_ALL '
'body': "REGEX_EXTRACT_ALL(${1:string}, '${2:expression}')"
'replace':
'prefix': 'REPLACE'
'body': "REPLACE(${1:string}, '${2:oldChar}', '${3:newChar}')"
'strsplit':
'prefix': 'STRSPLIT'
'body': "STRSPLIT(${1:string}, '${2:expression}', ${3:limit})'"
'substring':
'prefix': 'SUBSTRING'
'body': "SUBSTRING(${1:string}, ${2:startIndex}, ${3:stopIndex})"
'trim':
'prefix': 'TRIM'
'body': 'TRIM(${1:string})'
'upper':
'prefix': 'UPPER'
'body': 'UPPER(${1:string})'
"LTRIM":
'prefix': "LTRIM"
"body" : "LTRIM(${1:string})"
"RTRIM":
'prefix': "RTRIM"
"body" : "RTRIM(${1:string})"
"int":
'prefix': "int"
"long":
'prefix': "long"
"float":
'prefix': "float"
"double":
'prefix': "double"
"chararray":
'prefix': "chararray"
"bytearray":
'prefix': "bytearray"
"boolean":
'prefix': "boolean"
"datetime":
'prefix': "datetime"
"tuple":
'prefix': "tuple"
"bag":
'prefix': "bag"
"map":
'prefix': "map"
"ALL ":
'prefix': "ALL "
"AS ":
'prefix': "AS "
"ASSERT ":
'prefix': "ASSERT "
"BY ":
'prefix': "BY "
"CROSS ":
'prefix': "CROSS "
"DEFINE ":
'prefix': "DEFINE "
"DISTINCT ":
'prefix': "DISTINCT "
"FILTER ":
'prefix': "FILTER "
"FOREACH ":
'prefix': "FOREACH "
"GROUP ":
'prefix': "GROUP "
"IMPORT ":
'prefix': "IMPORT "
"INTO ":
'prefix': "INTO "
"JOIN ":
'prefix': "JOIN "
"LIMIT ":
'prefix': "LIMIT "
"LOAD ":
'prefix': "LOAD "
"MAPREDUCE ":
'prefix': "MAPREDUCE "
"ORDER":
'prefix': "ORDER BY "
"SAMPLE ":
'prefix': "SAMPLE "
"SPLIT ":
'prefix': "SPLIT "
"STORE ":
'prefix': "STORE "
"STREAM ":
'prefix': "STREAM "
"THROUGH ":
'prefix': "THROUGH "
"UNION ":
'prefix': "UNION "
"RETURNS ":
'prefix': "RETURNS "
"RANK ":
'prefix': "RANK "
"CUBE ":
'prefix': "CUBE "
"ROLLUP ":
'prefix': "ROLLUP "
"DESC ":
'prefix': "DESC "
"DUMP ":
'prefix': "DUMP "
"DESCRIBE ":
'prefix': "DESCRIBE "
"EXPLAIN ":
'prefix': "EXPLAIN "
"GENERATE ":
'prefix': "GENERATE "
"ILLUSTRATE ":
'prefix': "ILLUSTRATE "
"USING ":
'prefix': "USING "
# # "abs":
# # 'prefix': "abs"
# # "acos":
# # 'prefix': "acos"
# "add_days":
# 'prefix': "add_days"
# "add_months":
# 'prefix': "add_months"
# "and": 'prefix': "and"
# "append_array": 'prefix': "append_array"
# "array": 'prefix': "array"
# "array_contains": 'prefix': "array_contains"
# "array_freq_count": 'prefix': "array_freq_count"
# "array_index": 'prefix': "array_index"
# "ascii": 'prefix': "ascii"
# "asin": 'prefix': "asin"
# "assert_equals": 'prefix': "assert_equals"
# "assert_less_than": 'prefix': "assert_less_than"
# "assert_true": 'prefix': "assert_true"
# "atan": 'prefix': "atan"
# "avg": 'prefix': "avg"
# "base64": 'prefix': "base64"
# "between": 'prefix': "between"
# "bin": 'prefix': "bin"
# "bloom": 'prefix': "bloom"
# "bloom_and": 'prefix': "bloom_and"
# "bloom_contains": 'prefix': "bloom_contains"
# "bloom_not": 'prefix': "bloom_not"
# "bloom_or": 'prefix': "bloom_or"
# "booking_week": 'prefix': "booking_week"
# "case": 'prefix': "case"
# "cast_array": 'prefix': "cast_array"
# "cast_map": 'prefix': "cast_map"
# "ceil": 'prefix': "ceil"
# "ceiling": 'prefix': "ceiling"
# "coalesce": 'prefix': "coalesce"
# "collect": 'prefix': "collect"
# "collect_freq_count": 'prefix': "collect_freq_count"
# "collect_list": 'prefix': "collect_list"
# "collect_max": 'prefix': "collect_max"
# "collect_merge_max": 'prefix': "collect_merge_max"
# "collect_set": 'prefix': "collect_set"
# "combine": 'prefix': "combine"
# "combine_hyperloglog": 'prefix': "combine_hyperloglog"
# "combine_previous_sketch": 'prefix': "combine_previous_sketch"
# "combine_sketch": 'prefix': "combine_sketch"
# "combine_unique": 'prefix': "combine_unique"
# "compute_stats": 'prefix': "compute_stats"
# "concat": 'prefix': "concat"
# "concat_ws": 'prefix': "concat_ws"
# "conditional_emit": 'prefix': "conditional_emit"
# "context_ngrams": 'prefix': "context_ngrams"
# "conv": 'prefix': "conv"
# "convert_to_sketch": 'prefix': "convert_to_sketch"
# "corr": 'prefix': "corr"
# "cos": 'prefix': "cos"
# "count": 'prefix': "count"
# "covar_pop": 'prefix': "covar_pop"
# "covar_samp": 'prefix': "covar_samp"
# "create_union": 'prefix': "create_union"
# "cume_dist": 'prefix': "cume_dist"
# "current_database": 'prefix': "current_database"
# "current_date": 'prefix': "current_date"
# "current_timestamp": 'prefix': "current_timestamp"
# "current_user": 'prefix': "current_user"
# "date_add": 'prefix': "date_add"
# "date_range": 'prefix': "date_range"
# "date_sub": 'prefix': "date_sub"
# "date_to_start_quarter": 'prefix': "date_to_start_quarter"
# "date_to_start_week": 'prefix': "date_to_start_week"
# "datediff": 'prefix': "datediff"
# "day": 'prefix': "day"
# "dayofmonth": 'prefix': "dayofmonth"
# "decode": 'prefix': "decode"
# "degrees": 'prefix': "degrees"
# "dense_rank": 'prefix': "dense_rank"
# "distributed_bloom": 'prefix': "distributed_bloom"
# "distributed_map": 'prefix': "distributed_map"
# "div": 'prefix': "div"
# "e": 'prefix': "e"
# "elt": 'prefix': "elt"
# "encode": 'prefix': "encode"
# "estimated_reach": 'prefix': "estimated_reach"
# "event_parser": 'prefix': "event_parser"
# "ewah_bitmap": 'prefix': "ewah_bitmap"
# "ewah_bitmap_and": 'prefix': "ewah_bitmap_and"
# "ewah_bitmap_empty": 'prefix': "ewah_bitmap_empty"
# "ewah_bitmap_or": 'prefix': "ewah_bitmap_or"
# "exp": 'prefix': "exp"
# "experiments": 'prefix': "experiments"
# "explode": 'prefix': "explode"
# "field": 'prefix': "field"
# "find_in_set": 'prefix': "find_in_set"
# "first_index": 'prefix': "first_index"
# "first_value": 'prefix': "first_value"
# "flatten_array": 'prefix': "flatten_array"
# "floor": 'prefix': "floor"
# "format_number": 'prefix': "format_number"
# "from_camel_case": 'prefix': "from_camel_case"
# "from_json": 'prefix': "from_json"
# "from_unixtime": 'prefix': "from_unixtime"
# "from_utc_timestamp": 'prefix': "from_utc_timestamp"
# "get_json_object": 'prefix': "get_json_object"
# "geturl": 'prefix': "geturl"
# "greatest": 'prefix': "greatest"
# "group_concat": 'prefix': "group_concat"
# "group_count": 'prefix': "group_count"
# "grouped_rank": 'prefix': "grouped_rank"
# "hash": 'prefix': "hash"
# "hash_md5": 'prefix': "hash_md5"
# "hbase_balanced_key": 'prefix': "hbase_balanced_key"
# "hbase_batch_get": 'prefix': "hbase_batch_get"
# "hbase_batch_put": 'prefix': "hbase_batch_put"
# "hbase_cached_get": 'prefix': "hbase_cached_get"
# "hbase_get": 'prefix': "hbase_get"
# "hbase_put": 'prefix': "hbase_put"
# "hex": 'prefix': "hex"
# "histogram_numeric": 'prefix': "histogram_numeric"
# "hll_est_cardinality": 'prefix': "hll_est_cardinality"
# "hour": 'prefix': "hour"
# "hyperloglog": 'prefix': "hyperloglog"
# "if": 'prefix': "if"
# "in": 'prefix': "in"
# "in_file": 'prefix': "in_file"
# "index": 'prefix': "index"
# "inet_aton": 'prefix': "inet_aton"
# "inet_ntoa": 'prefix': "inet_ntoa"
# "initcap": 'prefix': "initcap"
# "inline": 'prefix': "inline"
# "instr": 'prefix': "instr"
# "intersect_array": 'prefix': "intersect_array"
# "ip2country": 'prefix': "ip2country"
# "ip2latlon": 'prefix': "ip2latlon"
# "ip2timezone": 'prefix': "ip2timezone"
# "ipcountry": 'prefix': "ipcountry"
# "isnotnull": 'prefix': "isnotnull"
# "isnull": 'prefix': "isnull"
# "java_method": 'prefix': "java_method"
# "join_array": 'prefix': "join_array"
# "json_map": 'prefix': "json_map"
# "json_split": 'prefix': "json_split"
# "json_tuple": 'prefix': "json_tuple"
# "label": 'prefix': "label"
# "lag": 'prefix': "lag"
# "last_day": 'prefix': "last_day"
# "last_index": 'prefix': "last_index"
# "last_value": 'prefix': "last_value"
# "lcase": 'prefix': "lcase"
# "lead": 'prefix': "lead"
# "least": 'prefix': "least"
# "length": 'prefix': "length"
# "like": 'prefix': "like"
# "ln": 'prefix': "ln"
# "locate": 'prefix': "locate"
# "log": 'prefix': "log"
# "log10": 'prefix': "log10"
# "log2": 'prefix': "log2"
# "lower": 'prefix': "lower"
# "lpad": 'prefix': "lpad"
# "ltrim": 'prefix': "ltrim"
# "map": 'prefix': "map"
# "map_filter_keys": 'prefix': "map_filter_keys"
# "map_index": 'prefix': "map_index"
# "map_key_values": 'prefix': "map_key_values"
# "map_keys": 'prefix': "map_keys"
# "map_mode": 'prefix': "map_mode"
# "map_values": 'prefix': "map_values"
# "matchpath": 'prefix': "matchpath"
# "max": 'prefix': "max"
# "md5": 'prefix': "md5"
# "min": 'prefix': "min"
# "minute": 'prefix': "minute"
# "month": 'prefix': "month"
# "moving_avg": 'prefix': "moving_avg"
# "multiday_count": 'prefix': "multiday_count"
# "named_struct": 'prefix': "named_struct"
# "negative": 'prefix': "negative"
# "next_day": 'prefix': "next_day"
# "ngrams": 'prefix': "ngrams"
# "noop": 'prefix': "noop"
# "noopstreaming": 'prefix': "noopstreaming"
# "noopwithmap": 'prefix': "noopwithmap"
# "noopwithmapstreaming": 'prefix': "noopwithmapstreaming"
# "not": 'prefix': "not"
# "now": 'prefix': "now"
# "ntile": 'prefix': "ntile"
# "numeric_range": 'prefix': "numeric_range"
# "nvl": 'prefix': "nvl"
# "or": 'prefix': "or"
# "parse_url": 'prefix': "parse_url"
# "parse_url_tuple": 'prefix': "parse_url_tuple"
# "percent_rank": 'prefix': "percent_rank"
# "percentile": 'prefix': "percentile"
# "percentile_approx": 'prefix': "percentile_approx"
# "pi": 'prefix': "pi"
# "pmod": 'prefix': "pmod"
# "posexplode": 'prefix': "posexplode"
# "positive": 'prefix': "positive"
# "pow": 'prefix': "pow"
# "power": 'prefix': "power"
# "printf": 'prefix': "printf"
# "quarter": 'prefix': "quarter"
# "radians": 'prefix': "radians"
# "rand": 'prefix': "rand"
# "rank": 'prefix': "rank"
# "ranked_long_diff": 'prefix': "ranked_long_diff"
# "ranked_long_sum": 'prefix': "ranked_long_sum"
# "reflect": 'prefix': "reflect"
# "reflect2": 'prefix': "reflect2"
# "regexp": 'prefix': "regexp"
# "regexp_extract": 'prefix': "regexp_extract"
# "regexp_replace": 'prefix': "regexp_replace"
# "repeat": 'prefix': "repeat"
# "reverse": 'prefix': "reverse"
# "rlike": 'prefix': "rlike"
# "round": 'prefix': "round"
# "row_number": 'prefix': "row_number"
# "row_sequence": 'prefix': "row_sequence"
# "rpad": 'prefix': "rpad"
# "rtrim": 'prefix': "rtrim"
# "running_count": 'prefix': "running_count"
# "running_sum": 'prefix': "running_sum"
# "salted_bigint": 'prefix': "salted_bigint"
# "salted_bigint_key": 'prefix': "salted_bigint_key"
# "second": 'prefix': "second"
# "sentences": 'prefix': "sentences"
# "set_difference": 'prefix': "set_difference"
# "set_similarity": 'prefix': "set_similarity"
# "sha1": 'prefix': "sha1"
# "sha2": 'prefix': "sha2"
# "sign": 'prefix': "sign"
# "sin": 'prefix': "sin"
# "size": 'prefix': "size"
# "sketch_hashes": 'prefix': "sketch_hashes"
# "sketch_set": 'prefix': "sketch_set"
# "sort_array": 'prefix': "sort_array"
# "space": 'prefix': "space"
# "split": 'prefix': "split"
# "sqrt": 'prefix': "sqrt"
# "stack": 'prefix': "stack"
# "std": 'prefix': "std"
# "stddev": 'prefix': "stddev"
# "stddev_pop": 'prefix': "stddev_pop"
# "stddev_samp": 'prefix': "stddev_samp"
# "str_to_map": 'prefix': "str_to_map"
# "struct": 'prefix': "struct"
# "substr": 'prefix': "substr"
# "substring": 'prefix': "substring"
# "sum": 'prefix': "sum"
# "sum_array": 'prefix': "sum_array"
# "tan": 'prefix': "tan"
# "throw_error": 'prefix': "throw_error"
# "to_camel_case": 'prefix': "to_camel_case"
# "to_date": 'prefix': "to_date"
# "to_json": 'prefix': "to_json"
# "to_unix_timestamp": 'prefix': "to_unix_timestamp"
# "to_utc_timestamp": 'prefix': "to_utc_timestamp"
# "translate": 'prefix': "translate"
# "trim": 'prefix': "trim"
# "truncate_array": 'prefix': "truncate_array"
# "ucase": 'prefix': "ucase"
# "unbase64": 'prefix': "unbase64"
# "unhex": 'prefix': "unhex"
# "union_hyperloglog": 'prefix': "union_hyperloglog"
# "union_map": 'prefix': "union_map"
# "union_max": 'prefix': "union_max"
# "union_sketch": 'prefix': "union_sketch"
# "union_vector_sum": 'prefix': "union_vector_sum"
# "unix_timestamp": 'prefix': "unix_timestamp"
# "upper": 'prefix': "upper"
# "var_pop": 'prefix': "var_pop"
# "var_samp": 'prefix': "var_samp"
# "variance": 'prefix': "variance"
# "vector_add": 'prefix': "vector_add"
# "vector_cross_product": 'prefix': "vector_cross_product"
# "vector_dot_product": 'prefix': "vector_dot_product"
# "vector_magnitude": 'prefix': "vector_magnitude"
# "vector_scalar_mult": 'prefix': "vector_scalar_mult"
# "visitor_deprecated": 'prefix': "visitor_deprecated"
# "weekofyear": 'prefix': "weekofyear"
# "when": 'prefix': "when"
# "windowingtablefunction": 'prefix': "windowingtablefunction"
# "write_to_graphite": 'prefix': "write_to_graphite"
# "write_to_tsdb": 'prefix': "write_to_tsdb"
# "xpath": 'prefix': "xpath"
# "xpath_boolean": 'prefix': "xpath_boolean"
# "xpath_double": 'prefix': "xpath_double"
# "xpath_float": 'prefix': "xpath_float"
# "xpath_int": 'prefix': "xpath_int"
# "xpath_long": 'prefix': "xpath_long"
# "xpath_number": 'prefix': "xpath_number"
# "xpath_short": 'prefix': "xpath_short"
# "xpath_string": 'prefix': "xpath_string"
# "year": 'prefix': "year"
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.999431312084198,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-microtask-queue-integration.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
test = (scheduleMicrotask) ->
nextTickCalled = false
expected++
scheduleMicrotask ->
process.nextTick ->
nextTickCalled = true
return
setTimeout (->
assert nextTickCalled
done++
return
), 0
return
return
common = require("../common")
assert = require("assert")
implementations = [
(fn) ->
Promise.resolve().then fn
(fn) ->
obj = {}
Object.observe obj, fn
obj.a = 1
]
expected = 0
done = 0
process.on "exit", ->
assert.equal done, expected
return
# first tick case
implementations.forEach test
# tick callback case
setTimeout (->
implementations.forEach (impl) ->
process.nextTick test.bind(null, impl)
return
return
), 0
| 58825 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
test = (scheduleMicrotask) ->
nextTickCalled = false
expected++
scheduleMicrotask ->
process.nextTick ->
nextTickCalled = true
return
setTimeout (->
assert nextTickCalled
done++
return
), 0
return
return
common = require("../common")
assert = require("assert")
implementations = [
(fn) ->
Promise.resolve().then fn
(fn) ->
obj = {}
Object.observe obj, fn
obj.a = 1
]
expected = 0
done = 0
process.on "exit", ->
assert.equal done, expected
return
# first tick case
implementations.forEach test
# tick callback case
setTimeout (->
implementations.forEach (impl) ->
process.nextTick test.bind(null, impl)
return
return
), 0
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
test = (scheduleMicrotask) ->
nextTickCalled = false
expected++
scheduleMicrotask ->
process.nextTick ->
nextTickCalled = true
return
setTimeout (->
assert nextTickCalled
done++
return
), 0
return
return
common = require("../common")
assert = require("assert")
implementations = [
(fn) ->
Promise.resolve().then fn
(fn) ->
obj = {}
Object.observe obj, fn
obj.a = 1
]
expected = 0
done = 0
process.on "exit", ->
assert.equal done, expected
return
# first tick case
implementations.forEach test
# tick callback case
setTimeout (->
implementations.forEach (impl) ->
process.nextTick test.bind(null, impl)
return
return
), 0
|
[
{
"context": " @fill('form#loginform',\n 'username': profile\n 'password': profile\n , true) # s",
"end": 726,
"score": 0.9947927594184875,
"start": 719,
"tag": "USERNAME",
"value": "profile"
},
{
"context": " 'username': profile\n 'passw... | tests/integration/utils.coffee | christophetd/courseadvisor | 19 | shared = require '../utils-shared.coffee'
# === casper-only utilities ===
utils = module.exports =
# saves a screenshot of the current page to tests/screenshots/{name}
screenshot: (name) -> casper.capture("../screenshots/#{name}.png")
# logs-in with margarita as {profile}. When {direct} is set to false (default),
# the session first navigates to /login (with the optional {next} parameter).
# When {direct} is set to true, it is assumed that casper is already on the login page.
login: ({profile, next, direct}) ->
if !direct
target = if next then '/login?next='+next else '/login'
casper.thenOpen utils.url(target)
casper.then ->
@fill('form#loginform',
'username': profile
'password': profile
, true) # submit
waitForPage: (cb) -> casper.waitFor( ( -> @evaluate -> @_loaded ), cb)
# Performs an XMLHttpRequest in the context of the page.
# sets global flags on the window object `window.TEST_XHR_DONE` and `window.TEST_XHR_RESULT`
# enableing the user to poll for XHR status using casper.evaluate.
doXHR: (url) ->
casper.evaluate ((url) ->
window.TEST_XHR_DONE = false
window.TEST_XHR_RESULT = null
xhr = new XMLHttpRequest();
xhr.open('GET', url)
xhr.onerror = -> window.TEST_XHR_RESULT = 'failure'
xhr.onload = -> window.TEST_XHR_RESULT = 'success'
xhr.onreadystatechange = -> window.TEST_XHR_DONE = (xhr.readyState == 4)
xhr.send()
), url
# extend shared utilities
shared.extend(utils, shared)
| 29440 | shared = require '../utils-shared.coffee'
# === casper-only utilities ===
utils = module.exports =
# saves a screenshot of the current page to tests/screenshots/{name}
screenshot: (name) -> casper.capture("../screenshots/#{name}.png")
# logs-in with margarita as {profile}. When {direct} is set to false (default),
# the session first navigates to /login (with the optional {next} parameter).
# When {direct} is set to true, it is assumed that casper is already on the login page.
login: ({profile, next, direct}) ->
if !direct
target = if next then '/login?next='+next else '/login'
casper.thenOpen utils.url(target)
casper.then ->
@fill('form#loginform',
'username': profile
'password': <PASSWORD>
, true) # submit
waitForPage: (cb) -> casper.waitFor( ( -> @evaluate -> @_loaded ), cb)
# Performs an XMLHttpRequest in the context of the page.
# sets global flags on the window object `window.TEST_XHR_DONE` and `window.TEST_XHR_RESULT`
# enableing the user to poll for XHR status using casper.evaluate.
doXHR: (url) ->
casper.evaluate ((url) ->
window.TEST_XHR_DONE = false
window.TEST_XHR_RESULT = null
xhr = new XMLHttpRequest();
xhr.open('GET', url)
xhr.onerror = -> window.TEST_XHR_RESULT = 'failure'
xhr.onload = -> window.TEST_XHR_RESULT = 'success'
xhr.onreadystatechange = -> window.TEST_XHR_DONE = (xhr.readyState == 4)
xhr.send()
), url
# extend shared utilities
shared.extend(utils, shared)
| true | shared = require '../utils-shared.coffee'
# === casper-only utilities ===
utils = module.exports =
# saves a screenshot of the current page to tests/screenshots/{name}
screenshot: (name) -> casper.capture("../screenshots/#{name}.png")
# logs-in with margarita as {profile}. When {direct} is set to false (default),
# the session first navigates to /login (with the optional {next} parameter).
# When {direct} is set to true, it is assumed that casper is already on the login page.
login: ({profile, next, direct}) ->
if !direct
target = if next then '/login?next='+next else '/login'
casper.thenOpen utils.url(target)
casper.then ->
@fill('form#loginform',
'username': profile
'password': PI:PASSWORD:<PASSWORD>END_PI
, true) # submit
waitForPage: (cb) -> casper.waitFor( ( -> @evaluate -> @_loaded ), cb)
# Performs an XMLHttpRequest in the context of the page.
# sets global flags on the window object `window.TEST_XHR_DONE` and `window.TEST_XHR_RESULT`
# enableing the user to poll for XHR status using casper.evaluate.
doXHR: (url) ->
casper.evaluate ((url) ->
window.TEST_XHR_DONE = false
window.TEST_XHR_RESULT = null
xhr = new XMLHttpRequest();
xhr.open('GET', url)
xhr.onerror = -> window.TEST_XHR_RESULT = 'failure'
xhr.onload = -> window.TEST_XHR_RESULT = 'success'
xhr.onreadystatechange = -> window.TEST_XHR_DONE = (xhr.readyState == 4)
xhr.send()
), url
# extend shared utilities
shared.extend(utils, shared)
|
[
{
"context": "\n sshPort: \"22\"\n username: \"cmcginty\"\n saveable: true\n }\n {\n ",
"end": 13868,
"score": 0.9996255040168762,
"start": 13860,
"tag": "USERNAME",
"value": "cmcginty"
},
{
"context": "\n sshPort: \"22\"\n ... | atom/config.cson | db-pj/dotfiles | 26 | "*":
"atom-beautify":
general:
_analyticsUserId: "aab204c6-1c8e-47d3-afaa-31c509007127"
"autocomplete-plus":
strictMatching: true
"autocomplete-python":
useKite: false
core:
automaticallyUpdate: false
customFileTypes:
"source.ini": [
".hgrc"
".buckconfig"
".flowconfig"
]
"source.json": [
".arcconfig"
".jfconfig"
".watchmanconfig"
"BUCK.autodeps"
]
"source.python": [
"BUILD_DEFS"
"TARGETS"
"BUCK_WIN"
"bzl"
"cconf"
"cinc"
"ctest"
"ctw"
"mcconf"
"tw"
"thrift-cvalidator"
"BUCK"
]
"source.reason": [
".re"
]
"source.yaml": [
"yarn.lock"
]
disabledPackages: [
"markdown-preview"
"metrics"
"encoding-selector"
"line-ending-selector"
"fuzzy-finder"
"linter"
"github"
"tree-view"
"image-view"
"git-diff"
"haskell-grammar"
]
ignoredNames: [
".git"
".hg"
".svn"
".DS_Store"
"._*"
"Thumbs.db"
"desktop.ini"
"**/xplat/**/.*.metadata"
]
telemetryConsent: "no"
themes: [
"one-dark-ui"
"monokai"
]
useTreeSitterParsers: false
editor:
fontFamily: "UbuntuMono Nerd Font"
lineHeight: 1.1
showIndentGuide: true
"exception-reporting":
userId: "b3bd855a-38b4-44af-961e-0c6cfa5383f2"
"markdown-preview-plus":
syncConfig:
syncPreviewOnChange: true
syntaxThemeName: "solarized-dark-syntax"
useGitHubStyle: true
minimap:
plugins:
"split-diff": true
"split-diffDecorationsZIndex": 0
notifications:
showErrorsInDevMode: true
nuclide:
"atom-ide-code-format":
formatOnSave: false
formatOnType: true
"atom-ide-console":
diffUrlPattern: "https://phabricator.intern.facebook.com/%s"
fontScale: 0.9
maximumMessageCount: 10000
maximumSerializedHistory: 20
maximumSerializedMessages: 10
taskUrlPattern: "https://our.intern.facebook.com/intern/tasks?t=%s"
"atom-ide-datatip":
datatipDebounceDelay: 200
datatipInteractedWithDebounceDelay: 1000
onlyTopDatatip: false
"atom-ide-debugger":
showDebuggerOnBpSet: true
"atom-ide-diagnostics-ui":
autoVisibility: false
showDirectoryColumn: false
statusBarPosition: "left"
"atom-ide-find-references":
defaultLocationForPane: "bottom"
"atom-ide-outline-view":
nameOnly: false
"atom-ide-signature-help":
enable: true
"atom-ide-terminal":
allowTransparency: false
black: "#2e3436"
blue: "#3465a4"
brightBlack: "#555753"
brightBlue: "#729fcf"
brightCyan: "#34e2e2"
brightGreen: "#8ae234"
brightMagenta: "#ad7fa8"
brightRed: "#ef2929"
brightWhite: "#eeeeec"
brightYellow: "#fce94f"
charAtlas: "dynamic"
cursorBlink: false
cursorStyle: "block"
cyan: "#06989a"
documentationMessage: true
fontFamily: "courier-new, courier, monospace"
fontScale: 1
green: "#4e9a06"
lineHeight: 1.5
magenta: "#75507b"
optionIsMeta: false
red: "#cc0000"
rendererType: "auto"
scrollback: 500000
white: "#d3d7cf"
yellow: "#c4a000"
"fb-ai-code-search":
usePrefixOnly: true
"fb-ai-complete-blocks":
enableDevelopmentMode: false
"fb-android-native-debugger":
"devserver-android-tunnel": true
"fb-arc-lint":
blacklistedLinters: [
"FBHACK"
"FLAKE8"
"FLOW"
"HACK"
"HACKSYNTAX"
"PEP8"
]
lintTimeout: 60000
"fb-atomprojects":
localPaths: []
remotePaths: []
"fb-aurora-language-server":
logLevel: "INFO"
"fb-biggrep":
filterToProject: true
"fb-codex-pages":
priority: 30
"fb-component-search":
resultCountLimit: -1
shouldDisplayFullJSX: false
"fb-componentscript":
warnGraphQLChanged: false
"fb-cquery":
"enable-libclang-logs": false
"indexer-threads": 0
"memory-limit": 30
"use-cquery": true
"use-cquery-gk-result": true
"fb-debugger-mobilejs":
autoAttachComponentScript: "None"
autoAttachReactNative: "None"
logInspectorProxyMessages: false
showConsoleMessages: false
"fb-eslint-client":
"disable-arc-lint-results": true
logLevel: "INFO"
"fb-generated-file-support":
allowEdits: true
"fb-go":
goLanguageFeaturesProvider: "Default"
"fb-graphql-metrics":
priority: 50
"fb-highlight-selected":
allowedCharactersToSelect: "$@%-"
hideHighlightOnSelectedWord: false
highlightBackground: false
highlightInPanes: true
ignoreCase: false
lightTheme: false
minimumLength: 2
onlyHighlightWholeWords: true
showInStatusBar: true
showResultsOnScrollBar: false
statusBarString: "Highlighted: %c"
timeout: 20
"fb-intellij":
intelliJKeybindings: false
"fb-interactive-smartlog":
autorunPostResolveScripts: false
conflictResolverEnabled: true
externalMergeToolArguments: ""
externalMergeToolEnabled: false
externalMergeToolPath: ""
fetchCommitTemplateOption: "File"
fetchOption: "jf get"
modalComparison: true
pullOption: "hg pull"
showCommitInfoViewInline: false
showTools: true
syncMessagesToPhabricator: false
"fb-java":
autoBuild: false
autoFold: true
coverageType: "Symbols"
eclipseLogLevel: "ERROR"
languageServer: "Java Language Server"
logLevel: "Finer"
problemsToIgnore: ""
showCompleteDiagnostics: true
"fb-jellyfish":
pathToJellyfish: "jf"
"fb-miso":
authType: "token"
gateway: "http://localhost:8888"
"fb-new-file-template":
oncall_name: "rel_eng"
"fb-notes":
"fb-notes": {}
"fb-on-demand":
autoSetCwd: true
"close-behavior": "ask"
"duo-push-immediately": false
"mobile-android-tunnel": true
"mobile-fbandroid-target": "fb4a-noexo"
"mobile-fbobjc-target": "fbios"
"mobile-subdirectory-notification": true
"mobile-xplat-metro": true
"fb-oneworld":
apertureAppExecPath: ""
apertureAppUri: "https://our.intern.facebook.com/intern/aperture/app"
"fb-osmeta-core":
hideInternalTargets: true
"fb-package-manager":
"fuzzy-finder": false
github: false
linter: false
metrics: false
overriddenThirdPartySettings: []
showChangelogs: true
"fb-phabricator-comments-client":
autoRefreshMinutes: 3
commentOpener: "Editor"
showComments: true
showCommentsInDiffView: true
showCommentsInEditors: true
showOnlyMostRecentDiff: true
showResolvedComments: false
"fb-populate-symbols":
populateSymbolsView: true
"fb-prettier":
formatAlreadyFormattedFilesOnSave: true
"fb-rating":
rating: 5
"fb-similar-code-detector":
debug: false
"fb-skip":
includeTypeErrors: true
logLevel: "INFO"
pathToSkip: ""
remotePathToSkip: "~/www/scripts/skip"
useLSP: true
"fb-symbol-wiki":
priority: 10
"fb-terminal-dash-provider":
defaultTerminalPane: "bottom"
"fb-vscode-ext":
"fb-cquery":
"disable-flag-fetching": false
"enable-code-lens": false
"enable-libclang-logs": false
"indexer-threads": 0
"local-cache-directory": ""
"memory-limit": 30
"remote-cache-directory": ""
"use-cquery": true
"use-cquery-gk-result": true
javascript:
format:
enable: false
validate:
enable: false
"js-imports":
componentModulePathFilter: "html/js"
"nuclide-js-imports-client":
componentModulePathFilter: "html/js"
diagnosticsWhitelist: [
"/nuclide"
"/www"
"/instance-ide"
"/xplat/js"
"/oculus-skyline"
"/Software/Apps/skyline"
"/www-hg"
"/trunk-hg-www"
]
logLevel: "INFO"
requiresWhitelist: [
"/www"
"/instance-ide"
"/xplat/js"
"/www-hg"
"/trunk-hg-www"
]
"nuclide-ocaml":
codeLens: true
codeLensCopy: true
codeLensMultiLine: false
logLevel: "DEBUG"
pathToMerlin: "ocamlmerlin"
pathToOcpIndent: ""
pathToRefmt: "refmt"
refmtFlags: ""
pyls:
BuckFormatOnSave: false
DsfmtFormatOnSave: false
InstagramFormatOnSave: false
formatAlreadyFormattedFilesOnSave: false
plugins:
fb_buck_format:
enabled: true
fb_dataswarm_format:
enabled: false
fb_dataswarm_linter: {}
fb_dataswarm_macros_definition:
enabled: true
fb_dataswarm_macros_hover:
enabled: true
fb_dataswarm_roots:
enabled: true
fb_dataswarm_table_definition:
enabled: true
fb_dataswarm_udf_completion:
enabled: true
fb_dataswarm_udf_hover:
enabled: true
fb_ig_format:
enabled: true
fb_ig_source_roots:
enabled: true
fb_linktree:
enabled: true
jedi_highlight:
enabled: true
source_roots:
enabled: true
hyperclick:
darwinTriggerKeys: "metaKey"
linuxTriggerKeys: "ctrlKey"
win32TriggerKeys: "ctrlKey"
installRecommendedPackages: true
"nuclide-adb-logcat":
pathToAdb: "adb"
whitelistedTags: "^((unknown:|)React|ReactNativeJS|ACRA|AndroidRuntime|TTI|BuckReporting|unknown)$"
"nuclide-bookshelf":
changeActiveBookmarkBehavior: "Always Ignore"
"nuclide-buck":
buildDbErrorNotify: true
compileErrorDiagnostics: true
pathToBuck: "buck"
suggestTaskRunner: false
"nuclide-clang":
defaultDiagnostics: false
enableDefaultFlags: true
libclangPath: ""
serverProcessMemoryLimit: 15
"nuclide-code-search":
localTool: ""
localUseVcsSearch: true
maxResults: 30
remoteTool: ""
remoteUseVcsSearch: true
"nuclide-css-lsp-client":
logLevel: "INFO"
"nuclide-ctags":
disableWithHack: true
"nuclide-debugger-java":
sourceFilePaths: ""
"nuclide-debugger-php":
hhvmRuntimeArgs: ""
hhvmRuntimePath: "/usr/local/bin/hhvm"
hhvmServerAttachPort: 8999
"nuclide-definition-preview":
editorHeight: 50
priority: 20
"nuclide-distraction-free-mode":
hideFindAndReplace: false
hideStatusBar: false
hideToolBar: false
"nuclide-file-tree":
allowKeyboardPrefixNavigation: true
autoExpandSingleChild: true
focusEditorOnFileSelection: false
hideIgnoredNames: true
hideVcsIgnoredPaths: true
revealFileOnSwitch: false
showOpenFiles: true
showUncommittedChanges: true
showUncommittedChangesKind: "Uncommitted changes"
"nuclide-flow":
canUseFlowBin: false
excludeOtherAutocomplete: false
flowAutocompleteResultsFirst: true
functionSnippetShouldIncludeArguments: true
liveSyntaxErrors: true
logLevel: "DEBUG"
pathToFlow: "flow"
stopFlowOnExit: true
warnOnNotInstalled: false
"nuclide-fuzzy-filename-provider":
smartCase: false
"nuclide-hack":
hhClientPath: ""
legacyHackIde: false
logLevel: "DEBUG"
"nuclide-health":
analyticsTimeout: 20
viewTimeout: 20
"nuclide-hg-repository":
enableDiffStats: true
"nuclide-home":
showHome: true
"nuclide-infer-al":
pathToInfer: "infer"
"nuclide-ios-simulator-logs":
pathToSyslog: "syslog"
whitelistedTags: "^(core\\.react|tid:com\\.facebook\\.react\\.JavaScript|com.apple.console)$"
"nuclide-js-imports-client":
componentModulePathFilter: "html/js"
diagnosticsWhitelist: [
"/nuclide"
"/www"
"/instance-ide"
"/xplat/js"
"/oculus-skyline"
"/Software/Apps/skyline"
"/www-hg"
"/trunk-hg-www"
]
logLevel: "INFO"
requiresWhitelist: [
"/www"
"/instance-ide"
"/xplat/js"
"/www-hg"
"/trunk-hg-www"
]
"nuclide-notifications":
onlyNativeFriendly: true
whenFocused: false
"nuclide-objc":
enableAutomaticSquareBracketInsertion: true
"nuclide-ocaml":
codeLens: true
codeLensCopy: true
logLevel: "DEBUG"
pathToMerlin: "ocamlmerlin"
pathToOcpIndent: ""
pathToRefmt: "refmt"
refmtFlags: ""
"nuclide-python":
autocompleteArguments: true
enableLinting: true
includeOptionalArguments: false
pathToFlake8: "flake8"
pathToPython: "python"
showGlobalVariables: true
showSignatureHelp: false
"nuclide-quick-open":
useSelection: true
"nuclide-remote-atom":
enableLocalCommandService: false
shouldNotifyWhenCommandLineIsWaitingOnFile: false
"nuclide-remote-projects":
connectionProfiles: [
{
deletable: true
displayTitle: "www"
params:
authMethod: "PASSWORD"
cwd: "/home/cmcginty/www"
displayTitle: "www"
pathToPrivateKey: "/Users/cmcginty/.ssh/fb_id_rsa"
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "fbsource"
params:
authMethod: "PASSWORD"
cwd: "/home/cmcginty/fbsource"
displayTitle: "fbsource"
pathToPrivateKey: "/Users/cmcginty/.ssh/fb_id_rsa"
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "configerator"
params:
authMethod: "SSL_AGENT"
cwd: "/data/users/cmcginty/configerator"
displayTitle: "configerator"
pathToPrivateKey: ""
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "configerator-dsi"
params:
cwd: "/home/cmcginty/configerator-dsi"
displayTitle: "configerator-dsi"
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
]
shutdownServerAfterDisconnection: false
"nuclide-swift":
sourceKittenDisabled: false
sourceKittenPath: "/usr/local/bin/sourcekitten"
swiftToolchainPath: ""
"nuclide-type-coverage":
colorizeStatusBar: true
"nuclide-vcs-log":
showDifferentialRevision: true
"nuclide-welcome-page":
hiddenTopics: [
"fb-nuclide-welcome-page"
]
"nuclide-working-sets": {}
use:
"atom-ide-busy-signal": "default"
"atom-ide-code-actions": "default"
"atom-ide-code-format": "default"
"atom-ide-code-highlight": "default"
"atom-ide-console": "default"
"atom-ide-datatip": "default"
"atom-ide-debugger": "default"
"atom-ide-debugger-java": "default"
"atom-ide-debugger-java-android": "default"
"atom-ide-debugger-native-gdb": "default"
"atom-ide-debugger-node": "default"
"atom-ide-debugger-ocaml": "default"
"atom-ide-debugger-python": "default"
"atom-ide-debugger-react-native": "default"
"atom-ide-definitions": "default"
"atom-ide-diagnostics": "default"
"atom-ide-diagnostics-ui": "default"
"atom-ide-find-references": "default"
"atom-ide-global": "default"
"atom-ide-outline-view": "default"
"atom-ide-refactor": "default"
"atom-ide-signature-help": "default"
"atom-ide-terminal": "default"
"fb-ai-code-search": "default"
"fb-ai-complete-blocks": "default"
"fb-ai-google-code-search": "default"
"fb-alitepath-provider": "default"
"fb-analytics-client": "default"
"fb-android-menu-items": "default"
"fb-android-native-debugger": "default"
"fb-android-welcome-page": "default"
"fb-arc-lint": "default"
"fb-arc-paste": "default"
"fb-arcanist": "default"
"fb-arcanist-editor": "default"
"fb-atom-command-dash-provider": "default"
"fb-atom-snippets": "default"
"fb-atomprojects": "default"
"fb-aurora-language-server": "default"
"fb-biggrep": "default"
"fb-biggrep-dash-provider": "default"
"fb-breadcrumbs": "default"
"fb-buck-java": "default"
"fb-buck-mobilejs": "default"
"fb-code-patterns": "default"
"fb-codegraph": "default"
"fb-codex": "default"
"fb-codex-pages": "default"
"fb-component-search": "default"
"fb-componentscript": "default"
"fb-componentscript-fiddle": "default"
"fb-cquery": "default"
"fb-css-constants": "default"
"fb-dash": "default"
"fb-dataswarm": "default"
"fb-debugger-mobilejs": "default"
"fb-debugger-native-config-resolver": "default"
"fb-debugger-source-paths": "default"
"fb-devserver-checkup": "default"
"fb-diff-view": "default"
"fb-disable-lf-utf8": "default"
"fb-dumpapp": "default"
"fb-employee": "default"
"fb-entschema": "default"
"fb-eslint-client": "default"
"fb-file-a-bug": "default"
"fb-file-family-data-plugins": "default"
"fb-fuzzy-file-dash-provider": "default"
"fb-gatekeeper": "default"
"fb-generated-file-support": "default"
"fb-go": "default"
"fb-go-to-file-symbol-dash-provider": "default"
"fb-go-to-line-dash-provider": "default"
"fb-go-to-project-symbol-dash-provider": "default"
"fb-graphql-metrics": "default"
"fb-hack-playground": "default"
"fb-hackast-refactorizer": "default"
"fb-haskell-lsp": "default"
"fb-hg-dash-provider": "default"
"fb-home": "default"
"fb-http-request-sender": "default"
"fb-instarn": "default"
"fb-intellij": "default"
"fb-interactive-smartlog": "default"
"fb-intern-dash-provider": "default"
"fb-intl-fbt-str": "default"
"fb-java": "default"
"fb-java-test-runner": "default"
"fb-jellyfish": "default"
"fb-jest-test-runner": "default"
"fb-js-refactorizer": "default"
"fb-js-require-size-client": "default"
"fb-language-capirca": "default"
"fb-language-dataswarm": "default"
"fb-login": "default"
"fb-miso": "default"
"fb-neuralcodesearch-provider": "default"
"fb-new-file-template": "default"
"fb-notes": "default"
"fb-nuclide-beta-indicator": "default"
"fb-nuclide-node-playground": "default"
"fb-nuclide-onboarding": "default"
"fb-nuclide-onboarding-core-tasks": "default"
"fb-nuclide-remote-transfer": "default"
"fb-nux": "default"
"fb-observe-www-directory": "default"
"fb-on-demand": "default"
"fb-oneworld": "default"
"fb-osmeta": "default"
"fb-osmeta-core": "default"
"fb-package-center": "default"
"fb-package-manager": "default"
"fb-perf-gutter": "default"
"fb-phabricator": "default"
"fb-phabricator-comments-client": "default"
"fb-populate-symbols": "default"
"fb-prettier": "default"
"fb-product-alert": "default"
"fb-pyre-language-server": "default"
"fb-python-lsp": "default"
"fb-rating": "default"
"fb-relay": "default"
"fb-remote-uri-formatters": "default"
"fb-rust-language-server": "default"
"fb-sample-dash-provider": "never"
"fb-scala-lsp": "default"
"fb-scmquery": "default"
"fb-services": "default"
"fb-similar-code-detector": "default"
"fb-sitevar-notification": "default"
"fb-skip": "default"
"fb-slog": "default"
"fb-smoothscroll": "default"
"fb-sparse-file-preview": "default"
"fb-symbol-wiki": "default"
"fb-terminal-dash-provider": "default"
"fb-terminal-onboarding": "default"
"fb-test-pilot-test-runner": "default"
"fb-test-runner": "default"
"fb-test-runner-ui": "default"
"fb-thrift-client-service": "default"
"fb-thrift-language-server": "default"
"fb-trace-collector": "default"
"fb-ui-component-tools": "default"
"fb-xml": "default"
hyperclick: "default"
"nuclide-adb-logcat": "default"
"nuclide-atom-notifications": "default"
"nuclide-autocomplete": "default"
"nuclide-blame": "default"
"nuclide-blame-provider-hg": "default"
"nuclide-bookshelf": "default"
"nuclide-buck": "default"
"nuclide-buck-ios": "default"
"nuclide-clang": "default"
"nuclide-clipboard-path": "default"
"nuclide-code-search": "default"
"nuclide-context-view": "default"
"nuclide-css-lsp-client": "default"
"nuclide-ctags": "default"
"nuclide-current-working-directory": "default"
"nuclide-debugger-java": "default"
"nuclide-debugger-native": "default"
"nuclide-debugger-php": "default"
"nuclide-debugger-prepack": "default"
"nuclide-debugger-vsp": "default"
"nuclide-deep-link": "default"
"nuclide-definition-preview": "default"
"nuclide-definition-preview-provider": "default"
"nuclide-deprecation-cop": "default"
"nuclide-device-panel": "default"
"nuclide-device-panel-android": "default"
"nuclide-device-panel-ios": "default"
"nuclide-diagnostics-extras": "default"
"nuclide-diff-gutters": "default"
"nuclide-distraction-free-mode": "default"
"nuclide-file-family": "default"
"nuclide-file-family-projectionist": "default"
"nuclide-file-tree": "default"
"nuclide-file-watcher": "default"
"nuclide-flow": "default"
"nuclide-fuzzy-filename-provider": "default"
"nuclide-graphql": "default"
"nuclide-hack": "default"
"nuclide-health": "default"
"nuclide-hg-repository": "default"
"nuclide-hhvm": "default"
"nuclide-home": "default"
"nuclide-http-request-sender": "default"
"nuclide-image-view": "default"
"nuclide-infer-al": "default"
"nuclide-ios-simulator-logs": "default"
"nuclide-js-imports-client": "default"
"nuclide-json": "default"
"nuclide-key-binding-hint-status": "default"
"nuclide-language-hack": "default"
"nuclide-language-infer-al": "default"
"nuclide-language-reason": "default"
"nuclide-language-service-client": "default"
"nuclide-language-status": "default"
"nuclide-metro": "default"
"nuclide-move-item-to-available-pane": "default"
"nuclide-navigation-stack": "default"
"nuclide-navigation-stack-status-bar": "default"
"nuclide-notifications": "default"
"nuclide-nux": "default"
"nuclide-objc": "default"
"nuclide-ocaml": "default"
"nuclide-open-filenames-provider": "default"
"nuclide-outline-view-extras": "default"
"nuclide-project": "default"
"nuclide-python": "default"
"nuclide-quick-open": "default"
"nuclide-react-inspector": "default"
"nuclide-recent-files-provider": "default"
"nuclide-recent-files-service": "default"
"nuclide-reload-deep-link": "default"
"nuclide-remote-atom": "default"
"nuclide-remote-projects": "default"
"nuclide-settings": "default"
"nuclide-ssh-tunnel": "default"
"nuclide-swift": "default"
"nuclide-syntactic-selection": "default"
"nuclide-task-runner": "default"
"nuclide-test-runner": "default"
"nuclide-type-coverage": "default"
"nuclide-type-hint": "default"
"nuclide-ui": "default"
"nuclide-unicode-datatip": "default"
"nuclide-url-hyperclick": "default"
"nuclide-vcs-log": "default"
"nuclide-welcome-page": "default"
"nuclide-working-sets": "default"
useLocalRpc: true
"spell-check":
addKnownWords: true
grammars: [
"source.asciidoc"
"source.gfm"
"text.git-commit"
"text.plain"
"text.plain.null-grammar"
"source.rst"
"text.restructuredtext"
"text.html.php"
"text.html.hack"
"source.python"
"text.md"
]
knownWords: [
"awaitable"
"contbuild"
"contbuilds"
"darray"
"dict"
"nullthrows"
"oncall"
"unlanded"
"vec"
"deduped"
"Traversables"
]
"tool-bar":
iconSize: "16px"
position: "Left"
useTouchBar: false
"vim-mode-plus":
ignoreCaseForSearch: true
welcome:
showOnStartup: false
whitespace:
ignoreWhitespaceOnCurrentLine: false
".python.regexp.source":
editor:
preferredLineLength: 88
".python.source":
editor:
preferredLineLength: 88
".text.xml":
editor:
nonWordCharacters: "/\\()\"',.;<>~!@#$%^&*|+=[]{}`?…"
| 123829 | "*":
"atom-beautify":
general:
_analyticsUserId: "aab204c6-1c8e-47d3-afaa-31c509007127"
"autocomplete-plus":
strictMatching: true
"autocomplete-python":
useKite: false
core:
automaticallyUpdate: false
customFileTypes:
"source.ini": [
".hgrc"
".buckconfig"
".flowconfig"
]
"source.json": [
".arcconfig"
".jfconfig"
".watchmanconfig"
"BUCK.autodeps"
]
"source.python": [
"BUILD_DEFS"
"TARGETS"
"BUCK_WIN"
"bzl"
"cconf"
"cinc"
"ctest"
"ctw"
"mcconf"
"tw"
"thrift-cvalidator"
"BUCK"
]
"source.reason": [
".re"
]
"source.yaml": [
"yarn.lock"
]
disabledPackages: [
"markdown-preview"
"metrics"
"encoding-selector"
"line-ending-selector"
"fuzzy-finder"
"linter"
"github"
"tree-view"
"image-view"
"git-diff"
"haskell-grammar"
]
ignoredNames: [
".git"
".hg"
".svn"
".DS_Store"
"._*"
"Thumbs.db"
"desktop.ini"
"**/xplat/**/.*.metadata"
]
telemetryConsent: "no"
themes: [
"one-dark-ui"
"monokai"
]
useTreeSitterParsers: false
editor:
fontFamily: "UbuntuMono Nerd Font"
lineHeight: 1.1
showIndentGuide: true
"exception-reporting":
userId: "b3bd855a-38b4-44af-961e-0c6cfa5383f2"
"markdown-preview-plus":
syncConfig:
syncPreviewOnChange: true
syntaxThemeName: "solarized-dark-syntax"
useGitHubStyle: true
minimap:
plugins:
"split-diff": true
"split-diffDecorationsZIndex": 0
notifications:
showErrorsInDevMode: true
nuclide:
"atom-ide-code-format":
formatOnSave: false
formatOnType: true
"atom-ide-console":
diffUrlPattern: "https://phabricator.intern.facebook.com/%s"
fontScale: 0.9
maximumMessageCount: 10000
maximumSerializedHistory: 20
maximumSerializedMessages: 10
taskUrlPattern: "https://our.intern.facebook.com/intern/tasks?t=%s"
"atom-ide-datatip":
datatipDebounceDelay: 200
datatipInteractedWithDebounceDelay: 1000
onlyTopDatatip: false
"atom-ide-debugger":
showDebuggerOnBpSet: true
"atom-ide-diagnostics-ui":
autoVisibility: false
showDirectoryColumn: false
statusBarPosition: "left"
"atom-ide-find-references":
defaultLocationForPane: "bottom"
"atom-ide-outline-view":
nameOnly: false
"atom-ide-signature-help":
enable: true
"atom-ide-terminal":
allowTransparency: false
black: "#2e3436"
blue: "#3465a4"
brightBlack: "#555753"
brightBlue: "#729fcf"
brightCyan: "#34e2e2"
brightGreen: "#8ae234"
brightMagenta: "#ad7fa8"
brightRed: "#ef2929"
brightWhite: "#eeeeec"
brightYellow: "#fce94f"
charAtlas: "dynamic"
cursorBlink: false
cursorStyle: "block"
cyan: "#06989a"
documentationMessage: true
fontFamily: "courier-new, courier, monospace"
fontScale: 1
green: "#4e9a06"
lineHeight: 1.5
magenta: "#75507b"
optionIsMeta: false
red: "#cc0000"
rendererType: "auto"
scrollback: 500000
white: "#d3d7cf"
yellow: "#c4a000"
"fb-ai-code-search":
usePrefixOnly: true
"fb-ai-complete-blocks":
enableDevelopmentMode: false
"fb-android-native-debugger":
"devserver-android-tunnel": true
"fb-arc-lint":
blacklistedLinters: [
"FBHACK"
"FLAKE8"
"FLOW"
"HACK"
"HACKSYNTAX"
"PEP8"
]
lintTimeout: 60000
"fb-atomprojects":
localPaths: []
remotePaths: []
"fb-aurora-language-server":
logLevel: "INFO"
"fb-biggrep":
filterToProject: true
"fb-codex-pages":
priority: 30
"fb-component-search":
resultCountLimit: -1
shouldDisplayFullJSX: false
"fb-componentscript":
warnGraphQLChanged: false
"fb-cquery":
"enable-libclang-logs": false
"indexer-threads": 0
"memory-limit": 30
"use-cquery": true
"use-cquery-gk-result": true
"fb-debugger-mobilejs":
autoAttachComponentScript: "None"
autoAttachReactNative: "None"
logInspectorProxyMessages: false
showConsoleMessages: false
"fb-eslint-client":
"disable-arc-lint-results": true
logLevel: "INFO"
"fb-generated-file-support":
allowEdits: true
"fb-go":
goLanguageFeaturesProvider: "Default"
"fb-graphql-metrics":
priority: 50
"fb-highlight-selected":
allowedCharactersToSelect: "$@%-"
hideHighlightOnSelectedWord: false
highlightBackground: false
highlightInPanes: true
ignoreCase: false
lightTheme: false
minimumLength: 2
onlyHighlightWholeWords: true
showInStatusBar: true
showResultsOnScrollBar: false
statusBarString: "Highlighted: %c"
timeout: 20
"fb-intellij":
intelliJKeybindings: false
"fb-interactive-smartlog":
autorunPostResolveScripts: false
conflictResolverEnabled: true
externalMergeToolArguments: ""
externalMergeToolEnabled: false
externalMergeToolPath: ""
fetchCommitTemplateOption: "File"
fetchOption: "jf get"
modalComparison: true
pullOption: "hg pull"
showCommitInfoViewInline: false
showTools: true
syncMessagesToPhabricator: false
"fb-java":
autoBuild: false
autoFold: true
coverageType: "Symbols"
eclipseLogLevel: "ERROR"
languageServer: "Java Language Server"
logLevel: "Finer"
problemsToIgnore: ""
showCompleteDiagnostics: true
"fb-jellyfish":
pathToJellyfish: "jf"
"fb-miso":
authType: "token"
gateway: "http://localhost:8888"
"fb-new-file-template":
oncall_name: "rel_eng"
"fb-notes":
"fb-notes": {}
"fb-on-demand":
autoSetCwd: true
"close-behavior": "ask"
"duo-push-immediately": false
"mobile-android-tunnel": true
"mobile-fbandroid-target": "fb4a-noexo"
"mobile-fbobjc-target": "fbios"
"mobile-subdirectory-notification": true
"mobile-xplat-metro": true
"fb-oneworld":
apertureAppExecPath: ""
apertureAppUri: "https://our.intern.facebook.com/intern/aperture/app"
"fb-osmeta-core":
hideInternalTargets: true
"fb-package-manager":
"fuzzy-finder": false
github: false
linter: false
metrics: false
overriddenThirdPartySettings: []
showChangelogs: true
"fb-phabricator-comments-client":
autoRefreshMinutes: 3
commentOpener: "Editor"
showComments: true
showCommentsInDiffView: true
showCommentsInEditors: true
showOnlyMostRecentDiff: true
showResolvedComments: false
"fb-populate-symbols":
populateSymbolsView: true
"fb-prettier":
formatAlreadyFormattedFilesOnSave: true
"fb-rating":
rating: 5
"fb-similar-code-detector":
debug: false
"fb-skip":
includeTypeErrors: true
logLevel: "INFO"
pathToSkip: ""
remotePathToSkip: "~/www/scripts/skip"
useLSP: true
"fb-symbol-wiki":
priority: 10
"fb-terminal-dash-provider":
defaultTerminalPane: "bottom"
"fb-vscode-ext":
"fb-cquery":
"disable-flag-fetching": false
"enable-code-lens": false
"enable-libclang-logs": false
"indexer-threads": 0
"local-cache-directory": ""
"memory-limit": 30
"remote-cache-directory": ""
"use-cquery": true
"use-cquery-gk-result": true
javascript:
format:
enable: false
validate:
enable: false
"js-imports":
componentModulePathFilter: "html/js"
"nuclide-js-imports-client":
componentModulePathFilter: "html/js"
diagnosticsWhitelist: [
"/nuclide"
"/www"
"/instance-ide"
"/xplat/js"
"/oculus-skyline"
"/Software/Apps/skyline"
"/www-hg"
"/trunk-hg-www"
]
logLevel: "INFO"
requiresWhitelist: [
"/www"
"/instance-ide"
"/xplat/js"
"/www-hg"
"/trunk-hg-www"
]
"nuclide-ocaml":
codeLens: true
codeLensCopy: true
codeLensMultiLine: false
logLevel: "DEBUG"
pathToMerlin: "ocamlmerlin"
pathToOcpIndent: ""
pathToRefmt: "refmt"
refmtFlags: ""
pyls:
BuckFormatOnSave: false
DsfmtFormatOnSave: false
InstagramFormatOnSave: false
formatAlreadyFormattedFilesOnSave: false
plugins:
fb_buck_format:
enabled: true
fb_dataswarm_format:
enabled: false
fb_dataswarm_linter: {}
fb_dataswarm_macros_definition:
enabled: true
fb_dataswarm_macros_hover:
enabled: true
fb_dataswarm_roots:
enabled: true
fb_dataswarm_table_definition:
enabled: true
fb_dataswarm_udf_completion:
enabled: true
fb_dataswarm_udf_hover:
enabled: true
fb_ig_format:
enabled: true
fb_ig_source_roots:
enabled: true
fb_linktree:
enabled: true
jedi_highlight:
enabled: true
source_roots:
enabled: true
hyperclick:
darwinTriggerKeys: "metaKey"
linuxTriggerKeys: "ctrlKey"
win32TriggerKeys: "ctrlKey"
installRecommendedPackages: true
"nuclide-adb-logcat":
pathToAdb: "adb"
whitelistedTags: "^((unknown:|)React|ReactNativeJS|ACRA|AndroidRuntime|TTI|BuckReporting|unknown)$"
"nuclide-bookshelf":
changeActiveBookmarkBehavior: "Always Ignore"
"nuclide-buck":
buildDbErrorNotify: true
compileErrorDiagnostics: true
pathToBuck: "buck"
suggestTaskRunner: false
"nuclide-clang":
defaultDiagnostics: false
enableDefaultFlags: true
libclangPath: ""
serverProcessMemoryLimit: 15
"nuclide-code-search":
localTool: ""
localUseVcsSearch: true
maxResults: 30
remoteTool: ""
remoteUseVcsSearch: true
"nuclide-css-lsp-client":
logLevel: "INFO"
"nuclide-ctags":
disableWithHack: true
"nuclide-debugger-java":
sourceFilePaths: ""
"nuclide-debugger-php":
hhvmRuntimeArgs: ""
hhvmRuntimePath: "/usr/local/bin/hhvm"
hhvmServerAttachPort: 8999
"nuclide-definition-preview":
editorHeight: 50
priority: 20
"nuclide-distraction-free-mode":
hideFindAndReplace: false
hideStatusBar: false
hideToolBar: false
"nuclide-file-tree":
allowKeyboardPrefixNavigation: true
autoExpandSingleChild: true
focusEditorOnFileSelection: false
hideIgnoredNames: true
hideVcsIgnoredPaths: true
revealFileOnSwitch: false
showOpenFiles: true
showUncommittedChanges: true
showUncommittedChangesKind: "Uncommitted changes"
"nuclide-flow":
canUseFlowBin: false
excludeOtherAutocomplete: false
flowAutocompleteResultsFirst: true
functionSnippetShouldIncludeArguments: true
liveSyntaxErrors: true
logLevel: "DEBUG"
pathToFlow: "flow"
stopFlowOnExit: true
warnOnNotInstalled: false
"nuclide-fuzzy-filename-provider":
smartCase: false
"nuclide-hack":
hhClientPath: ""
legacyHackIde: false
logLevel: "DEBUG"
"nuclide-health":
analyticsTimeout: 20
viewTimeout: 20
"nuclide-hg-repository":
enableDiffStats: true
"nuclide-home":
showHome: true
"nuclide-infer-al":
pathToInfer: "infer"
"nuclide-ios-simulator-logs":
pathToSyslog: "syslog"
whitelistedTags: "^(core\\.react|tid:com\\.facebook\\.react\\.JavaScript|com.apple.console)$"
"nuclide-js-imports-client":
componentModulePathFilter: "html/js"
diagnosticsWhitelist: [
"/nuclide"
"/www"
"/instance-ide"
"/xplat/js"
"/oculus-skyline"
"/Software/Apps/skyline"
"/www-hg"
"/trunk-hg-www"
]
logLevel: "INFO"
requiresWhitelist: [
"/www"
"/instance-ide"
"/xplat/js"
"/www-hg"
"/trunk-hg-www"
]
"nuclide-notifications":
onlyNativeFriendly: true
whenFocused: false
"nuclide-objc":
enableAutomaticSquareBracketInsertion: true
"nuclide-ocaml":
codeLens: true
codeLensCopy: true
logLevel: "DEBUG"
pathToMerlin: "ocamlmerlin"
pathToOcpIndent: ""
pathToRefmt: "refmt"
refmtFlags: ""
"nuclide-python":
autocompleteArguments: true
enableLinting: true
includeOptionalArguments: false
pathToFlake8: "flake8"
pathToPython: "python"
showGlobalVariables: true
showSignatureHelp: false
"nuclide-quick-open":
useSelection: true
"nuclide-remote-atom":
enableLocalCommandService: false
shouldNotifyWhenCommandLineIsWaitingOnFile: false
"nuclide-remote-projects":
connectionProfiles: [
{
deletable: true
displayTitle: "www"
params:
authMethod: "PASSWORD"
cwd: "/home/cmcginty/www"
displayTitle: "www"
pathToPrivateKey: "/Users/cmcginty/.ssh/fb_id_rsa"
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "fbsource"
params:
authMethod: "PASSWORD"
cwd: "/home/cmcginty/fbsource"
displayTitle: "fbsource"
pathToPrivateKey: "/Users/cmcginty/.ssh/fb_id_rsa"
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "configerator"
params:
authMethod: "SSL_AGENT"
cwd: "/data/users/cmcginty/configerator"
displayTitle: "configerator"
pathToPrivateKey: ""
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "configerator-dsi"
params:
cwd: "/home/cmcginty/configerator-dsi"
displayTitle: "configerator-dsi"
remoteServerCommand: ""
server: "our.cmcginty.sb.<EMAIL>"
sshPort: "22"
username: "cmcginty"
saveable: true
}
]
shutdownServerAfterDisconnection: false
"nuclide-swift":
sourceKittenDisabled: false
sourceKittenPath: "/usr/local/bin/sourcekitten"
swiftToolchainPath: ""
"nuclide-type-coverage":
colorizeStatusBar: true
"nuclide-vcs-log":
showDifferentialRevision: true
"nuclide-welcome-page":
hiddenTopics: [
"fb-nuclide-welcome-page"
]
"nuclide-working-sets": {}
use:
"atom-ide-busy-signal": "default"
"atom-ide-code-actions": "default"
"atom-ide-code-format": "default"
"atom-ide-code-highlight": "default"
"atom-ide-console": "default"
"atom-ide-datatip": "default"
"atom-ide-debugger": "default"
"atom-ide-debugger-java": "default"
"atom-ide-debugger-java-android": "default"
"atom-ide-debugger-native-gdb": "default"
"atom-ide-debugger-node": "default"
"atom-ide-debugger-ocaml": "default"
"atom-ide-debugger-python": "default"
"atom-ide-debugger-react-native": "default"
"atom-ide-definitions": "default"
"atom-ide-diagnostics": "default"
"atom-ide-diagnostics-ui": "default"
"atom-ide-find-references": "default"
"atom-ide-global": "default"
"atom-ide-outline-view": "default"
"atom-ide-refactor": "default"
"atom-ide-signature-help": "default"
"atom-ide-terminal": "default"
"fb-ai-code-search": "default"
"fb-ai-complete-blocks": "default"
"fb-ai-google-code-search": "default"
"fb-alitepath-provider": "default"
"fb-analytics-client": "default"
"fb-android-menu-items": "default"
"fb-android-native-debugger": "default"
"fb-android-welcome-page": "default"
"fb-arc-lint": "default"
"fb-arc-paste": "default"
"fb-arcanist": "default"
"fb-arcanist-editor": "default"
"fb-atom-command-dash-provider": "default"
"fb-atom-snippets": "default"
"fb-atomprojects": "default"
"fb-aurora-language-server": "default"
"fb-biggrep": "default"
"fb-biggrep-dash-provider": "default"
"fb-breadcrumbs": "default"
"fb-buck-java": "default"
"fb-buck-mobilejs": "default"
"fb-code-patterns": "default"
"fb-codegraph": "default"
"fb-codex": "default"
"fb-codex-pages": "default"
"fb-component-search": "default"
"fb-componentscript": "default"
"fb-componentscript-fiddle": "default"
"fb-cquery": "default"
"fb-css-constants": "default"
"fb-dash": "default"
"fb-dataswarm": "default"
"fb-debugger-mobilejs": "default"
"fb-debugger-native-config-resolver": "default"
"fb-debugger-source-paths": "default"
"fb-devserver-checkup": "default"
"fb-diff-view": "default"
"fb-disable-lf-utf8": "default"
"fb-dumpapp": "default"
"fb-employee": "default"
"fb-entschema": "default"
"fb-eslint-client": "default"
"fb-file-a-bug": "default"
"fb-file-family-data-plugins": "default"
"fb-fuzzy-file-dash-provider": "default"
"fb-gatekeeper": "default"
"fb-generated-file-support": "default"
"fb-go": "default"
"fb-go-to-file-symbol-dash-provider": "default"
"fb-go-to-line-dash-provider": "default"
"fb-go-to-project-symbol-dash-provider": "default"
"fb-graphql-metrics": "default"
"fb-hack-playground": "default"
"fb-hackast-refactorizer": "default"
"fb-haskell-lsp": "default"
"fb-hg-dash-provider": "default"
"fb-home": "default"
"fb-http-request-sender": "default"
"fb-instarn": "default"
"fb-intellij": "default"
"fb-interactive-smartlog": "default"
"fb-intern-dash-provider": "default"
"fb-intl-fbt-str": "default"
"fb-java": "default"
"fb-java-test-runner": "default"
"fb-jellyfish": "default"
"fb-jest-test-runner": "default"
"fb-js-refactorizer": "default"
"fb-js-require-size-client": "default"
"fb-language-capirca": "default"
"fb-language-dataswarm": "default"
"fb-login": "default"
"fb-miso": "default"
"fb-neuralcodesearch-provider": "default"
"fb-new-file-template": "default"
"fb-notes": "default"
"fb-nuclide-beta-indicator": "default"
"fb-nuclide-node-playground": "default"
"fb-nuclide-onboarding": "default"
"fb-nuclide-onboarding-core-tasks": "default"
"fb-nuclide-remote-transfer": "default"
"fb-nux": "default"
"fb-observe-www-directory": "default"
"fb-on-demand": "default"
"fb-oneworld": "default"
"fb-osmeta": "default"
"fb-osmeta-core": "default"
"fb-package-center": "default"
"fb-package-manager": "default"
"fb-perf-gutter": "default"
"fb-phabricator": "default"
"fb-phabricator-comments-client": "default"
"fb-populate-symbols": "default"
"fb-prettier": "default"
"fb-product-alert": "default"
"fb-pyre-language-server": "default"
"fb-python-lsp": "default"
"fb-rating": "default"
"fb-relay": "default"
"fb-remote-uri-formatters": "default"
"fb-rust-language-server": "default"
"fb-sample-dash-provider": "never"
"fb-scala-lsp": "default"
"fb-scmquery": "default"
"fb-services": "default"
"fb-similar-code-detector": "default"
"fb-sitevar-notification": "default"
"fb-skip": "default"
"fb-slog": "default"
"fb-smoothscroll": "default"
"fb-sparse-file-preview": "default"
"fb-symbol-wiki": "default"
"fb-terminal-dash-provider": "default"
"fb-terminal-onboarding": "default"
"fb-test-pilot-test-runner": "default"
"fb-test-runner": "default"
"fb-test-runner-ui": "default"
"fb-thrift-client-service": "default"
"fb-thrift-language-server": "default"
"fb-trace-collector": "default"
"fb-ui-component-tools": "default"
"fb-xml": "default"
hyperclick: "default"
"nuclide-adb-logcat": "default"
"nuclide-atom-notifications": "default"
"nuclide-autocomplete": "default"
"nuclide-blame": "default"
"nuclide-blame-provider-hg": "default"
"nuclide-bookshelf": "default"
"nuclide-buck": "default"
"nuclide-buck-ios": "default"
"nuclide-clang": "default"
"nuclide-clipboard-path": "default"
"nuclide-code-search": "default"
"nuclide-context-view": "default"
"nuclide-css-lsp-client": "default"
"nuclide-ctags": "default"
"nuclide-current-working-directory": "default"
"nuclide-debugger-java": "default"
"nuclide-debugger-native": "default"
"nuclide-debugger-php": "default"
"nuclide-debugger-prepack": "default"
"nuclide-debugger-vsp": "default"
"nuclide-deep-link": "default"
"nuclide-definition-preview": "default"
"nuclide-definition-preview-provider": "default"
"nuclide-deprecation-cop": "default"
"nuclide-device-panel": "default"
"nuclide-device-panel-android": "default"
"nuclide-device-panel-ios": "default"
"nuclide-diagnostics-extras": "default"
"nuclide-diff-gutters": "default"
"nuclide-distraction-free-mode": "default"
"nuclide-file-family": "default"
"nuclide-file-family-projectionist": "default"
"nuclide-file-tree": "default"
"nuclide-file-watcher": "default"
"nuclide-flow": "default"
"nuclide-fuzzy-filename-provider": "default"
"nuclide-graphql": "default"
"nuclide-hack": "default"
"nuclide-health": "default"
"nuclide-hg-repository": "default"
"nuclide-hhvm": "default"
"nuclide-home": "default"
"nuclide-http-request-sender": "default"
"nuclide-image-view": "default"
"nuclide-infer-al": "default"
"nuclide-ios-simulator-logs": "default"
"nuclide-js-imports-client": "default"
"nuclide-json": "default"
"nuclide-key-binding-hint-status": "default"
"nuclide-language-hack": "default"
"nuclide-language-infer-al": "default"
"nuclide-language-reason": "default"
"nuclide-language-service-client": "default"
"nuclide-language-status": "default"
"nuclide-metro": "default"
"nuclide-move-item-to-available-pane": "default"
"nuclide-navigation-stack": "default"
"nuclide-navigation-stack-status-bar": "default"
"nuclide-notifications": "default"
"nuclide-nux": "default"
"nuclide-objc": "default"
"nuclide-ocaml": "default"
"nuclide-open-filenames-provider": "default"
"nuclide-outline-view-extras": "default"
"nuclide-project": "default"
"nuclide-python": "default"
"nuclide-quick-open": "default"
"nuclide-react-inspector": "default"
"nuclide-recent-files-provider": "default"
"nuclide-recent-files-service": "default"
"nuclide-reload-deep-link": "default"
"nuclide-remote-atom": "default"
"nuclide-remote-projects": "default"
"nuclide-settings": "default"
"nuclide-ssh-tunnel": "default"
"nuclide-swift": "default"
"nuclide-syntactic-selection": "default"
"nuclide-task-runner": "default"
"nuclide-test-runner": "default"
"nuclide-type-coverage": "default"
"nuclide-type-hint": "default"
"nuclide-ui": "default"
"nuclide-unicode-datatip": "default"
"nuclide-url-hyperclick": "default"
"nuclide-vcs-log": "default"
"nuclide-welcome-page": "default"
"nuclide-working-sets": "default"
useLocalRpc: true
"spell-check":
addKnownWords: true
grammars: [
"source.asciidoc"
"source.gfm"
"text.git-commit"
"text.plain"
"text.plain.null-grammar"
"source.rst"
"text.restructuredtext"
"text.html.php"
"text.html.hack"
"source.python"
"text.md"
]
knownWords: [
"awaitable"
"contbuild"
"contbuilds"
"darray"
"dict"
"nullthrows"
"oncall"
"unlanded"
"vec"
"deduped"
"Traversables"
]
"tool-bar":
iconSize: "16px"
position: "Left"
useTouchBar: false
"vim-mode-plus":
ignoreCaseForSearch: true
welcome:
showOnStartup: false
whitespace:
ignoreWhitespaceOnCurrentLine: false
".python.regexp.source":
editor:
preferredLineLength: 88
".python.source":
editor:
preferredLineLength: 88
".text.xml":
editor:
nonWordCharacters: "/\\()\"',.;<>~!@#$%^&*|+=[]{}`?…"
| true | "*":
"atom-beautify":
general:
_analyticsUserId: "aab204c6-1c8e-47d3-afaa-31c509007127"
"autocomplete-plus":
strictMatching: true
"autocomplete-python":
useKite: false
core:
automaticallyUpdate: false
customFileTypes:
"source.ini": [
".hgrc"
".buckconfig"
".flowconfig"
]
"source.json": [
".arcconfig"
".jfconfig"
".watchmanconfig"
"BUCK.autodeps"
]
"source.python": [
"BUILD_DEFS"
"TARGETS"
"BUCK_WIN"
"bzl"
"cconf"
"cinc"
"ctest"
"ctw"
"mcconf"
"tw"
"thrift-cvalidator"
"BUCK"
]
"source.reason": [
".re"
]
"source.yaml": [
"yarn.lock"
]
disabledPackages: [
"markdown-preview"
"metrics"
"encoding-selector"
"line-ending-selector"
"fuzzy-finder"
"linter"
"github"
"tree-view"
"image-view"
"git-diff"
"haskell-grammar"
]
ignoredNames: [
".git"
".hg"
".svn"
".DS_Store"
"._*"
"Thumbs.db"
"desktop.ini"
"**/xplat/**/.*.metadata"
]
telemetryConsent: "no"
themes: [
"one-dark-ui"
"monokai"
]
useTreeSitterParsers: false
editor:
fontFamily: "UbuntuMono Nerd Font"
lineHeight: 1.1
showIndentGuide: true
"exception-reporting":
userId: "b3bd855a-38b4-44af-961e-0c6cfa5383f2"
"markdown-preview-plus":
syncConfig:
syncPreviewOnChange: true
syntaxThemeName: "solarized-dark-syntax"
useGitHubStyle: true
minimap:
plugins:
"split-diff": true
"split-diffDecorationsZIndex": 0
notifications:
showErrorsInDevMode: true
nuclide:
"atom-ide-code-format":
formatOnSave: false
formatOnType: true
"atom-ide-console":
diffUrlPattern: "https://phabricator.intern.facebook.com/%s"
fontScale: 0.9
maximumMessageCount: 10000
maximumSerializedHistory: 20
maximumSerializedMessages: 10
taskUrlPattern: "https://our.intern.facebook.com/intern/tasks?t=%s"
"atom-ide-datatip":
datatipDebounceDelay: 200
datatipInteractedWithDebounceDelay: 1000
onlyTopDatatip: false
"atom-ide-debugger":
showDebuggerOnBpSet: true
"atom-ide-diagnostics-ui":
autoVisibility: false
showDirectoryColumn: false
statusBarPosition: "left"
"atom-ide-find-references":
defaultLocationForPane: "bottom"
"atom-ide-outline-view":
nameOnly: false
"atom-ide-signature-help":
enable: true
"atom-ide-terminal":
allowTransparency: false
black: "#2e3436"
blue: "#3465a4"
brightBlack: "#555753"
brightBlue: "#729fcf"
brightCyan: "#34e2e2"
brightGreen: "#8ae234"
brightMagenta: "#ad7fa8"
brightRed: "#ef2929"
brightWhite: "#eeeeec"
brightYellow: "#fce94f"
charAtlas: "dynamic"
cursorBlink: false
cursorStyle: "block"
cyan: "#06989a"
documentationMessage: true
fontFamily: "courier-new, courier, monospace"
fontScale: 1
green: "#4e9a06"
lineHeight: 1.5
magenta: "#75507b"
optionIsMeta: false
red: "#cc0000"
rendererType: "auto"
scrollback: 500000
white: "#d3d7cf"
yellow: "#c4a000"
"fb-ai-code-search":
usePrefixOnly: true
"fb-ai-complete-blocks":
enableDevelopmentMode: false
"fb-android-native-debugger":
"devserver-android-tunnel": true
"fb-arc-lint":
blacklistedLinters: [
"FBHACK"
"FLAKE8"
"FLOW"
"HACK"
"HACKSYNTAX"
"PEP8"
]
lintTimeout: 60000
"fb-atomprojects":
localPaths: []
remotePaths: []
"fb-aurora-language-server":
logLevel: "INFO"
"fb-biggrep":
filterToProject: true
"fb-codex-pages":
priority: 30
"fb-component-search":
resultCountLimit: -1
shouldDisplayFullJSX: false
"fb-componentscript":
warnGraphQLChanged: false
"fb-cquery":
"enable-libclang-logs": false
"indexer-threads": 0
"memory-limit": 30
"use-cquery": true
"use-cquery-gk-result": true
"fb-debugger-mobilejs":
autoAttachComponentScript: "None"
autoAttachReactNative: "None"
logInspectorProxyMessages: false
showConsoleMessages: false
"fb-eslint-client":
"disable-arc-lint-results": true
logLevel: "INFO"
"fb-generated-file-support":
allowEdits: true
"fb-go":
goLanguageFeaturesProvider: "Default"
"fb-graphql-metrics":
priority: 50
"fb-highlight-selected":
allowedCharactersToSelect: "$@%-"
hideHighlightOnSelectedWord: false
highlightBackground: false
highlightInPanes: true
ignoreCase: false
lightTheme: false
minimumLength: 2
onlyHighlightWholeWords: true
showInStatusBar: true
showResultsOnScrollBar: false
statusBarString: "Highlighted: %c"
timeout: 20
"fb-intellij":
intelliJKeybindings: false
"fb-interactive-smartlog":
autorunPostResolveScripts: false
conflictResolverEnabled: true
externalMergeToolArguments: ""
externalMergeToolEnabled: false
externalMergeToolPath: ""
fetchCommitTemplateOption: "File"
fetchOption: "jf get"
modalComparison: true
pullOption: "hg pull"
showCommitInfoViewInline: false
showTools: true
syncMessagesToPhabricator: false
"fb-java":
autoBuild: false
autoFold: true
coverageType: "Symbols"
eclipseLogLevel: "ERROR"
languageServer: "Java Language Server"
logLevel: "Finer"
problemsToIgnore: ""
showCompleteDiagnostics: true
"fb-jellyfish":
pathToJellyfish: "jf"
"fb-miso":
authType: "token"
gateway: "http://localhost:8888"
"fb-new-file-template":
oncall_name: "rel_eng"
"fb-notes":
"fb-notes": {}
"fb-on-demand":
autoSetCwd: true
"close-behavior": "ask"
"duo-push-immediately": false
"mobile-android-tunnel": true
"mobile-fbandroid-target": "fb4a-noexo"
"mobile-fbobjc-target": "fbios"
"mobile-subdirectory-notification": true
"mobile-xplat-metro": true
"fb-oneworld":
apertureAppExecPath: ""
apertureAppUri: "https://our.intern.facebook.com/intern/aperture/app"
"fb-osmeta-core":
hideInternalTargets: true
"fb-package-manager":
"fuzzy-finder": false
github: false
linter: false
metrics: false
overriddenThirdPartySettings: []
showChangelogs: true
"fb-phabricator-comments-client":
autoRefreshMinutes: 3
commentOpener: "Editor"
showComments: true
showCommentsInDiffView: true
showCommentsInEditors: true
showOnlyMostRecentDiff: true
showResolvedComments: false
"fb-populate-symbols":
populateSymbolsView: true
"fb-prettier":
formatAlreadyFormattedFilesOnSave: true
"fb-rating":
rating: 5
"fb-similar-code-detector":
debug: false
"fb-skip":
includeTypeErrors: true
logLevel: "INFO"
pathToSkip: ""
remotePathToSkip: "~/www/scripts/skip"
useLSP: true
"fb-symbol-wiki":
priority: 10
"fb-terminal-dash-provider":
defaultTerminalPane: "bottom"
"fb-vscode-ext":
"fb-cquery":
"disable-flag-fetching": false
"enable-code-lens": false
"enable-libclang-logs": false
"indexer-threads": 0
"local-cache-directory": ""
"memory-limit": 30
"remote-cache-directory": ""
"use-cquery": true
"use-cquery-gk-result": true
javascript:
format:
enable: false
validate:
enable: false
"js-imports":
componentModulePathFilter: "html/js"
"nuclide-js-imports-client":
componentModulePathFilter: "html/js"
diagnosticsWhitelist: [
"/nuclide"
"/www"
"/instance-ide"
"/xplat/js"
"/oculus-skyline"
"/Software/Apps/skyline"
"/www-hg"
"/trunk-hg-www"
]
logLevel: "INFO"
requiresWhitelist: [
"/www"
"/instance-ide"
"/xplat/js"
"/www-hg"
"/trunk-hg-www"
]
"nuclide-ocaml":
codeLens: true
codeLensCopy: true
codeLensMultiLine: false
logLevel: "DEBUG"
pathToMerlin: "ocamlmerlin"
pathToOcpIndent: ""
pathToRefmt: "refmt"
refmtFlags: ""
pyls:
BuckFormatOnSave: false
DsfmtFormatOnSave: false
InstagramFormatOnSave: false
formatAlreadyFormattedFilesOnSave: false
plugins:
fb_buck_format:
enabled: true
fb_dataswarm_format:
enabled: false
fb_dataswarm_linter: {}
fb_dataswarm_macros_definition:
enabled: true
fb_dataswarm_macros_hover:
enabled: true
fb_dataswarm_roots:
enabled: true
fb_dataswarm_table_definition:
enabled: true
fb_dataswarm_udf_completion:
enabled: true
fb_dataswarm_udf_hover:
enabled: true
fb_ig_format:
enabled: true
fb_ig_source_roots:
enabled: true
fb_linktree:
enabled: true
jedi_highlight:
enabled: true
source_roots:
enabled: true
hyperclick:
darwinTriggerKeys: "metaKey"
linuxTriggerKeys: "ctrlKey"
win32TriggerKeys: "ctrlKey"
installRecommendedPackages: true
"nuclide-adb-logcat":
pathToAdb: "adb"
whitelistedTags: "^((unknown:|)React|ReactNativeJS|ACRA|AndroidRuntime|TTI|BuckReporting|unknown)$"
"nuclide-bookshelf":
changeActiveBookmarkBehavior: "Always Ignore"
"nuclide-buck":
buildDbErrorNotify: true
compileErrorDiagnostics: true
pathToBuck: "buck"
suggestTaskRunner: false
"nuclide-clang":
defaultDiagnostics: false
enableDefaultFlags: true
libclangPath: ""
serverProcessMemoryLimit: 15
"nuclide-code-search":
localTool: ""
localUseVcsSearch: true
maxResults: 30
remoteTool: ""
remoteUseVcsSearch: true
"nuclide-css-lsp-client":
logLevel: "INFO"
"nuclide-ctags":
disableWithHack: true
"nuclide-debugger-java":
sourceFilePaths: ""
"nuclide-debugger-php":
hhvmRuntimeArgs: ""
hhvmRuntimePath: "/usr/local/bin/hhvm"
hhvmServerAttachPort: 8999
"nuclide-definition-preview":
editorHeight: 50
priority: 20
"nuclide-distraction-free-mode":
hideFindAndReplace: false
hideStatusBar: false
hideToolBar: false
"nuclide-file-tree":
allowKeyboardPrefixNavigation: true
autoExpandSingleChild: true
focusEditorOnFileSelection: false
hideIgnoredNames: true
hideVcsIgnoredPaths: true
revealFileOnSwitch: false
showOpenFiles: true
showUncommittedChanges: true
showUncommittedChangesKind: "Uncommitted changes"
"nuclide-flow":
canUseFlowBin: false
excludeOtherAutocomplete: false
flowAutocompleteResultsFirst: true
functionSnippetShouldIncludeArguments: true
liveSyntaxErrors: true
logLevel: "DEBUG"
pathToFlow: "flow"
stopFlowOnExit: true
warnOnNotInstalled: false
"nuclide-fuzzy-filename-provider":
smartCase: false
"nuclide-hack":
hhClientPath: ""
legacyHackIde: false
logLevel: "DEBUG"
"nuclide-health":
analyticsTimeout: 20
viewTimeout: 20
"nuclide-hg-repository":
enableDiffStats: true
"nuclide-home":
showHome: true
"nuclide-infer-al":
pathToInfer: "infer"
"nuclide-ios-simulator-logs":
pathToSyslog: "syslog"
whitelistedTags: "^(core\\.react|tid:com\\.facebook\\.react\\.JavaScript|com.apple.console)$"
"nuclide-js-imports-client":
componentModulePathFilter: "html/js"
diagnosticsWhitelist: [
"/nuclide"
"/www"
"/instance-ide"
"/xplat/js"
"/oculus-skyline"
"/Software/Apps/skyline"
"/www-hg"
"/trunk-hg-www"
]
logLevel: "INFO"
requiresWhitelist: [
"/www"
"/instance-ide"
"/xplat/js"
"/www-hg"
"/trunk-hg-www"
]
"nuclide-notifications":
onlyNativeFriendly: true
whenFocused: false
"nuclide-objc":
enableAutomaticSquareBracketInsertion: true
"nuclide-ocaml":
codeLens: true
codeLensCopy: true
logLevel: "DEBUG"
pathToMerlin: "ocamlmerlin"
pathToOcpIndent: ""
pathToRefmt: "refmt"
refmtFlags: ""
"nuclide-python":
autocompleteArguments: true
enableLinting: true
includeOptionalArguments: false
pathToFlake8: "flake8"
pathToPython: "python"
showGlobalVariables: true
showSignatureHelp: false
"nuclide-quick-open":
useSelection: true
"nuclide-remote-atom":
enableLocalCommandService: false
shouldNotifyWhenCommandLineIsWaitingOnFile: false
"nuclide-remote-projects":
connectionProfiles: [
{
deletable: true
displayTitle: "www"
params:
authMethod: "PASSWORD"
cwd: "/home/cmcginty/www"
displayTitle: "www"
pathToPrivateKey: "/Users/cmcginty/.ssh/fb_id_rsa"
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "fbsource"
params:
authMethod: "PASSWORD"
cwd: "/home/cmcginty/fbsource"
displayTitle: "fbsource"
pathToPrivateKey: "/Users/cmcginty/.ssh/fb_id_rsa"
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "configerator"
params:
authMethod: "SSL_AGENT"
cwd: "/data/users/cmcginty/configerator"
displayTitle: "configerator"
pathToPrivateKey: ""
remoteServerCommand: ""
server: "our.cmcginty.sb.facebook.com"
sshPort: "22"
username: "cmcginty"
saveable: true
}
{
deletable: true
displayTitle: "configerator-dsi"
params:
cwd: "/home/cmcginty/configerator-dsi"
displayTitle: "configerator-dsi"
remoteServerCommand: ""
server: "our.cmcginty.sb.PI:EMAIL:<EMAIL>END_PI"
sshPort: "22"
username: "cmcginty"
saveable: true
}
]
shutdownServerAfterDisconnection: false
"nuclide-swift":
sourceKittenDisabled: false
sourceKittenPath: "/usr/local/bin/sourcekitten"
swiftToolchainPath: ""
"nuclide-type-coverage":
colorizeStatusBar: true
"nuclide-vcs-log":
showDifferentialRevision: true
"nuclide-welcome-page":
hiddenTopics: [
"fb-nuclide-welcome-page"
]
"nuclide-working-sets": {}
use:
"atom-ide-busy-signal": "default"
"atom-ide-code-actions": "default"
"atom-ide-code-format": "default"
"atom-ide-code-highlight": "default"
"atom-ide-console": "default"
"atom-ide-datatip": "default"
"atom-ide-debugger": "default"
"atom-ide-debugger-java": "default"
"atom-ide-debugger-java-android": "default"
"atom-ide-debugger-native-gdb": "default"
"atom-ide-debugger-node": "default"
"atom-ide-debugger-ocaml": "default"
"atom-ide-debugger-python": "default"
"atom-ide-debugger-react-native": "default"
"atom-ide-definitions": "default"
"atom-ide-diagnostics": "default"
"atom-ide-diagnostics-ui": "default"
"atom-ide-find-references": "default"
"atom-ide-global": "default"
"atom-ide-outline-view": "default"
"atom-ide-refactor": "default"
"atom-ide-signature-help": "default"
"atom-ide-terminal": "default"
"fb-ai-code-search": "default"
"fb-ai-complete-blocks": "default"
"fb-ai-google-code-search": "default"
"fb-alitepath-provider": "default"
"fb-analytics-client": "default"
"fb-android-menu-items": "default"
"fb-android-native-debugger": "default"
"fb-android-welcome-page": "default"
"fb-arc-lint": "default"
"fb-arc-paste": "default"
"fb-arcanist": "default"
"fb-arcanist-editor": "default"
"fb-atom-command-dash-provider": "default"
"fb-atom-snippets": "default"
"fb-atomprojects": "default"
"fb-aurora-language-server": "default"
"fb-biggrep": "default"
"fb-biggrep-dash-provider": "default"
"fb-breadcrumbs": "default"
"fb-buck-java": "default"
"fb-buck-mobilejs": "default"
"fb-code-patterns": "default"
"fb-codegraph": "default"
"fb-codex": "default"
"fb-codex-pages": "default"
"fb-component-search": "default"
"fb-componentscript": "default"
"fb-componentscript-fiddle": "default"
"fb-cquery": "default"
"fb-css-constants": "default"
"fb-dash": "default"
"fb-dataswarm": "default"
"fb-debugger-mobilejs": "default"
"fb-debugger-native-config-resolver": "default"
"fb-debugger-source-paths": "default"
"fb-devserver-checkup": "default"
"fb-diff-view": "default"
"fb-disable-lf-utf8": "default"
"fb-dumpapp": "default"
"fb-employee": "default"
"fb-entschema": "default"
"fb-eslint-client": "default"
"fb-file-a-bug": "default"
"fb-file-family-data-plugins": "default"
"fb-fuzzy-file-dash-provider": "default"
"fb-gatekeeper": "default"
"fb-generated-file-support": "default"
"fb-go": "default"
"fb-go-to-file-symbol-dash-provider": "default"
"fb-go-to-line-dash-provider": "default"
"fb-go-to-project-symbol-dash-provider": "default"
"fb-graphql-metrics": "default"
"fb-hack-playground": "default"
"fb-hackast-refactorizer": "default"
"fb-haskell-lsp": "default"
"fb-hg-dash-provider": "default"
"fb-home": "default"
"fb-http-request-sender": "default"
"fb-instarn": "default"
"fb-intellij": "default"
"fb-interactive-smartlog": "default"
"fb-intern-dash-provider": "default"
"fb-intl-fbt-str": "default"
"fb-java": "default"
"fb-java-test-runner": "default"
"fb-jellyfish": "default"
"fb-jest-test-runner": "default"
"fb-js-refactorizer": "default"
"fb-js-require-size-client": "default"
"fb-language-capirca": "default"
"fb-language-dataswarm": "default"
"fb-login": "default"
"fb-miso": "default"
"fb-neuralcodesearch-provider": "default"
"fb-new-file-template": "default"
"fb-notes": "default"
"fb-nuclide-beta-indicator": "default"
"fb-nuclide-node-playground": "default"
"fb-nuclide-onboarding": "default"
"fb-nuclide-onboarding-core-tasks": "default"
"fb-nuclide-remote-transfer": "default"
"fb-nux": "default"
"fb-observe-www-directory": "default"
"fb-on-demand": "default"
"fb-oneworld": "default"
"fb-osmeta": "default"
"fb-osmeta-core": "default"
"fb-package-center": "default"
"fb-package-manager": "default"
"fb-perf-gutter": "default"
"fb-phabricator": "default"
"fb-phabricator-comments-client": "default"
"fb-populate-symbols": "default"
"fb-prettier": "default"
"fb-product-alert": "default"
"fb-pyre-language-server": "default"
"fb-python-lsp": "default"
"fb-rating": "default"
"fb-relay": "default"
"fb-remote-uri-formatters": "default"
"fb-rust-language-server": "default"
"fb-sample-dash-provider": "never"
"fb-scala-lsp": "default"
"fb-scmquery": "default"
"fb-services": "default"
"fb-similar-code-detector": "default"
"fb-sitevar-notification": "default"
"fb-skip": "default"
"fb-slog": "default"
"fb-smoothscroll": "default"
"fb-sparse-file-preview": "default"
"fb-symbol-wiki": "default"
"fb-terminal-dash-provider": "default"
"fb-terminal-onboarding": "default"
"fb-test-pilot-test-runner": "default"
"fb-test-runner": "default"
"fb-test-runner-ui": "default"
"fb-thrift-client-service": "default"
"fb-thrift-language-server": "default"
"fb-trace-collector": "default"
"fb-ui-component-tools": "default"
"fb-xml": "default"
hyperclick: "default"
"nuclide-adb-logcat": "default"
"nuclide-atom-notifications": "default"
"nuclide-autocomplete": "default"
"nuclide-blame": "default"
"nuclide-blame-provider-hg": "default"
"nuclide-bookshelf": "default"
"nuclide-buck": "default"
"nuclide-buck-ios": "default"
"nuclide-clang": "default"
"nuclide-clipboard-path": "default"
"nuclide-code-search": "default"
"nuclide-context-view": "default"
"nuclide-css-lsp-client": "default"
"nuclide-ctags": "default"
"nuclide-current-working-directory": "default"
"nuclide-debugger-java": "default"
"nuclide-debugger-native": "default"
"nuclide-debugger-php": "default"
"nuclide-debugger-prepack": "default"
"nuclide-debugger-vsp": "default"
"nuclide-deep-link": "default"
"nuclide-definition-preview": "default"
"nuclide-definition-preview-provider": "default"
"nuclide-deprecation-cop": "default"
"nuclide-device-panel": "default"
"nuclide-device-panel-android": "default"
"nuclide-device-panel-ios": "default"
"nuclide-diagnostics-extras": "default"
"nuclide-diff-gutters": "default"
"nuclide-distraction-free-mode": "default"
"nuclide-file-family": "default"
"nuclide-file-family-projectionist": "default"
"nuclide-file-tree": "default"
"nuclide-file-watcher": "default"
"nuclide-flow": "default"
"nuclide-fuzzy-filename-provider": "default"
"nuclide-graphql": "default"
"nuclide-hack": "default"
"nuclide-health": "default"
"nuclide-hg-repository": "default"
"nuclide-hhvm": "default"
"nuclide-home": "default"
"nuclide-http-request-sender": "default"
"nuclide-image-view": "default"
"nuclide-infer-al": "default"
"nuclide-ios-simulator-logs": "default"
"nuclide-js-imports-client": "default"
"nuclide-json": "default"
"nuclide-key-binding-hint-status": "default"
"nuclide-language-hack": "default"
"nuclide-language-infer-al": "default"
"nuclide-language-reason": "default"
"nuclide-language-service-client": "default"
"nuclide-language-status": "default"
"nuclide-metro": "default"
"nuclide-move-item-to-available-pane": "default"
"nuclide-navigation-stack": "default"
"nuclide-navigation-stack-status-bar": "default"
"nuclide-notifications": "default"
"nuclide-nux": "default"
"nuclide-objc": "default"
"nuclide-ocaml": "default"
"nuclide-open-filenames-provider": "default"
"nuclide-outline-view-extras": "default"
"nuclide-project": "default"
"nuclide-python": "default"
"nuclide-quick-open": "default"
"nuclide-react-inspector": "default"
"nuclide-recent-files-provider": "default"
"nuclide-recent-files-service": "default"
"nuclide-reload-deep-link": "default"
"nuclide-remote-atom": "default"
"nuclide-remote-projects": "default"
"nuclide-settings": "default"
"nuclide-ssh-tunnel": "default"
"nuclide-swift": "default"
"nuclide-syntactic-selection": "default"
"nuclide-task-runner": "default"
"nuclide-test-runner": "default"
"nuclide-type-coverage": "default"
"nuclide-type-hint": "default"
"nuclide-ui": "default"
"nuclide-unicode-datatip": "default"
"nuclide-url-hyperclick": "default"
"nuclide-vcs-log": "default"
"nuclide-welcome-page": "default"
"nuclide-working-sets": "default"
useLocalRpc: true
"spell-check":
addKnownWords: true
grammars: [
"source.asciidoc"
"source.gfm"
"text.git-commit"
"text.plain"
"text.plain.null-grammar"
"source.rst"
"text.restructuredtext"
"text.html.php"
"text.html.hack"
"source.python"
"text.md"
]
knownWords: [
"awaitable"
"contbuild"
"contbuilds"
"darray"
"dict"
"nullthrows"
"oncall"
"unlanded"
"vec"
"deduped"
"Traversables"
]
"tool-bar":
iconSize: "16px"
position: "Left"
useTouchBar: false
"vim-mode-plus":
ignoreCaseForSearch: true
welcome:
showOnStartup: false
whitespace:
ignoreWhitespaceOnCurrentLine: false
".python.regexp.source":
editor:
preferredLineLength: 88
".python.source":
editor:
preferredLineLength: 88
".text.xml":
editor:
nonWordCharacters: "/\\()\"',.;<>~!@#$%^&*|+=[]{}`?…"
|
[
{
"context": " 24-bit pixel.\n These formulas were invented by David Dalrymple to obtain maximum contrast without going\n out ",
"end": 187,
"score": 0.9577504992485046,
"start": 172,
"tag": "NAME",
"value": "David Dalrymple"
},
{
"context": "nge 0-1.\n\n A saturation multip... | node_modules/chroma-js/src/converter/misc/lch2lab.coffee | Dozacode/ResumeChain | 15 | # @requires utils
lch2lab = () ->
###
Convert from a qualitative parameter h and a quantitative parameter l to a 24-bit pixel.
These formulas were invented by David Dalrymple to obtain maximum contrast without going
out of gamut if the parameters are in the range 0-1.
A saturation multiplier was added by Gregor Aisch
###
[l,c,h] = unpack arguments
h = h * DEG2RAD
[l, cos(h) * c, sin(h) * c]
| 41155 | # @requires utils
lch2lab = () ->
###
Convert from a qualitative parameter h and a quantitative parameter l to a 24-bit pixel.
These formulas were invented by <NAME> to obtain maximum contrast without going
out of gamut if the parameters are in the range 0-1.
A saturation multiplier was added by <NAME>
###
[l,c,h] = unpack arguments
h = h * DEG2RAD
[l, cos(h) * c, sin(h) * c]
| true | # @requires utils
lch2lab = () ->
###
Convert from a qualitative parameter h and a quantitative parameter l to a 24-bit pixel.
These formulas were invented by PI:NAME:<NAME>END_PI to obtain maximum contrast without going
out of gamut if the parameters are in the range 0-1.
A saturation multiplier was added by PI:NAME:<NAME>END_PI
###
[l,c,h] = unpack arguments
h = h * DEG2RAD
[l, cos(h) * c, sin(h) * c]
|
[
{
"context": " attributes', ->\n @object = new @Person(name: \"Jonas\", age: 28)\n serialized = @object.toJSON()\n ",
"end": 286,
"score": 0.9996823072433472,
"start": 281,
"tag": "NAME",
"value": "Jonas"
},
{
"context": "ject.toJSON()\n expect(serialized.name).to.eql(\"J... | test/model/to_json.spec.coffee | jnicklas/serenade.js | 1 | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe 'Serenade.Model#toJSON', ->
beforeEach ->
class @Person extends Serenade.Model
@attribute "name"
@attribute "age"
it 'serializes all attributes', ->
@object = new @Person(name: "Jonas", age: 28)
serialized = @object.toJSON()
expect(serialized.name).to.eql("Jonas")
expect(serialized.age).to.eql(28)
| 186531 | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe 'Serenade.Model#toJSON', ->
beforeEach ->
class @Person extends Serenade.Model
@attribute "name"
@attribute "age"
it 'serializes all attributes', ->
@object = new @Person(name: "<NAME>", age: 28)
serialized = @object.toJSON()
expect(serialized.name).to.eql("<NAME>")
expect(serialized.age).to.eql(28)
| true | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe 'Serenade.Model#toJSON', ->
beforeEach ->
class @Person extends Serenade.Model
@attribute "name"
@attribute "age"
it 'serializes all attributes', ->
@object = new @Person(name: "PI:NAME:<NAME>END_PI", age: 28)
serialized = @object.toJSON()
expect(serialized.name).to.eql("PI:NAME:<NAME>END_PI")
expect(serialized.age).to.eql(28)
|
[
{
"context": "undle file' or 'meteor build path'\n if key is 'bundle' or key is 'build'\n debug = '--debug' in pro",
"end": 6176,
"score": 0.933057963848114,
"start": 6170,
"tag": "KEY",
"value": "bundle"
},
{
"context": "eor build path'\n if key is 'bundle' or key is 'bui... | plugin/browserify.coffee | optilude/cosmos-browserify | 2 | Browserify = Npm.require 'browserify'
# use custom envify so we can specify the env based on the meteor command used
envify = Npm.require 'envify/custom'
# use exorcist transform to extract source map data
exorcist = Npm.require 'exorcist'
# get 'stream' to use PassThrough to provide a Buffer as a Readable stream
stream = Npm.require 'stream'
fs = Npm.require 'fs'
processFile = (step) ->
# check for extension as filename
checkFilename step
# get options for Browserify
browserifyOptions = getBrowserifyOptions step
# create a browserify instance passing our readable stream as input,
# and options object for debug and the basedir
browserify = Browserify [getReadable(step)], browserifyOptions
# extract envify tranform's options so it isn't used in loop
envifyOptions = browserifyOptions.transforms.envify
delete browserifyOptions.transforms.envify
# run each transform
for own transformName, transformOptions of browserifyOptions.transforms
browserify.transform transformName, transformOptions
# run the envify transform
browserify.transform envify envifyOptions
# have browserify process the file and include all required modules.
# we receive a readable stream as the result
bundle = browserify.bundle()
# set the readable stream's encoding so we read strings from it
bundle.setEncoding('utf8')
# extract the source map content from the generated file to give to Meteor
# explicitly by piping bundle thru `exorcist`
mapFileName = step.fullInputPath+'.map'
bundle = bundle.pipe exorcist mapFileName, step.pathForSourceMap
# use Meteor.wrapAsync to wrap `getString` so it's done synchronously
wrappedFn = Meteor.wrapAsync getString
try # try-catch for browserify errors
# call our wrapped function with the readable stream as its argument
string = wrappedFn bundle
# read the generated source map from the file
sourceMap = fs.readFileSync mapFileName, 'utf8'
fs.unlinkSync mapFileName
# now that we have the compiled result as a string we can add it using CompileStep
# inside try-catch because this shouldn't run when there's an error.
step.addJavaScript
path: step.inputPath # name of the file
sourcePath: step.inputPath # use same name, we've just browserified it
data: string # the actual browserified results
sourceMap: sourceMap
bare: step?.fileOptions?.bare
catch e
# output error via CompileStep#error()
# convert it to a string and then remove the 'Error: ' at the beginning.
step.error
message:e.toString().substring 7
sourcePath: step.inputPath
# add our function as the handler for files ending in 'browserify.js'
Plugin.registerSourceHandler 'browserify.js', processFile
# add a source handler for config files so that they are watched for changes
Plugin.registerSourceHandler 'browserify.options.json', ->
getBasedir = (step) ->
# basedir should point to the '.npm/package' folder containing the npm modules.
# step.fullInputPath is the full path to our browserify.js file. it may be:
# 1. in a package
# 2. in the app itself
# for both of the above it also may be:
# 1. in the root (of package or app)
# 2. in a subfolder
# NOTE:
# the app doesn't have npm support, so, no .npm/package.
# using meteorhacks:npm creates a package to contain the npm modules.
# so, if the browserify.js file is an app file, then let's look for
# packages/npm-container/.npm/package
# the basedir tail depends on whether this file is in the app or a package
# for an app file, we're going to assume they are using meteorhacks:npm
tail = if step?.packageName? then '.npm/package' else 'packages/npm-container/.npm/package'
# CompileStep has the absolute path to the file in `fullInputPath`
# CompileStep has the package/app relative path to the file in `inputPath`
# basedir is fullInputPath with inputPath replaced with the tail
basedir = step.fullInputPath[0...-(step.inputPath.length)] + tail
# TODO: use fs.existsSync basedir
# could print a more helpful message to user than the browserify error saying
# it can't find the module at this directory. can suggest checking package.js
# for Npm.depends(), or, if an app file, adding meteorhacks:npm and checking
# packages.json.
return basedir
getBrowserifyOptions = (step) ->
# empty user options to fill from file, if it exists
userOptions = {}
# look for a file with the same name, but .browserify.options.json extension
optionsFileName = step.fullInputPath[0...-2] + 'options.json'
if fs.existsSync optionsFileName
try
# read json file and convert it into an object
userOptions = JSON.parse fs.readFileSync optionsFileName, 'utf8'
catch e
step.error
message: 'Couldn\'t read JSON data: '+e.toString()
sourcePath: step.inputPath
# sane defaults for options; most important is the baseDir
defaultOptions =
# Browserify will look here for npm modules
basedir: getBasedir(step)
# must be true to produce source map which we extract via exorcist and
# provide to CompileStep
debug: true
# put the defaults for envify transform in here as well
# TODO: have an option which disables using envify
transforms:
envify:
NODE_ENV: if getDebug() then 'development' else 'production'
_:'purge'
# merge user options with defaults
_.defaults userOptions, defaultOptions
# when they supply transforms it clobbers the envify defaults because
# _.defaults works only on top level keys.
# so, if there's no envify then set the default options for it
userOptions.transforms?.envify ?= defaultOptions.transforms.envify
return userOptions
checkFilename = (step) ->
if step.inputPath is 'browserify.js'
console.log 'WARNING: using \'browserify.js\' as full filename may stop working.' +
' See Meteor Issue #3985. Please add something before it like: client.browserify.js'
getDebug = ->
debug = true
# check args used
for key in process.argv
# if 'meteor bundle file' or 'meteor build path'
if key is 'bundle' or key is 'build'
debug = '--debug' in process.argv
break;
return debug
getReadable = (step) ->
# Browserify accepts a Readable stream as input, so, we'll use a PassThrough
# stream to hold the Buffer
readable = new stream.PassThrough()
# Meteor's CompileStep provides the file as a Buffer from step.read()
# add the buffer into the stream and end the stream with one call to end()
readable.end step.read()
return readable
# async function for reading entire bundle output into a string
getString = (bundle, cb) ->
# holds all data read from bundle
string = ''
# concatenate data chunk to string
bundle.on 'data', (data) -> string += data
# when we reach the end, call Meteor.wrapAsync's callback with string result
bundle.once 'end', -> cb undefined, string # undefined = error
# when there's an error, give it to the callback
bundle.once 'error', (error) -> cb error
| 28005 | Browserify = Npm.require 'browserify'
# use custom envify so we can specify the env based on the meteor command used
envify = Npm.require 'envify/custom'
# use exorcist transform to extract source map data
exorcist = Npm.require 'exorcist'
# get 'stream' to use PassThrough to provide a Buffer as a Readable stream
stream = Npm.require 'stream'
fs = Npm.require 'fs'
processFile = (step) ->
# check for extension as filename
checkFilename step
# get options for Browserify
browserifyOptions = getBrowserifyOptions step
# create a browserify instance passing our readable stream as input,
# and options object for debug and the basedir
browserify = Browserify [getReadable(step)], browserifyOptions
# extract envify tranform's options so it isn't used in loop
envifyOptions = browserifyOptions.transforms.envify
delete browserifyOptions.transforms.envify
# run each transform
for own transformName, transformOptions of browserifyOptions.transforms
browserify.transform transformName, transformOptions
# run the envify transform
browserify.transform envify envifyOptions
# have browserify process the file and include all required modules.
# we receive a readable stream as the result
bundle = browserify.bundle()
# set the readable stream's encoding so we read strings from it
bundle.setEncoding('utf8')
# extract the source map content from the generated file to give to Meteor
# explicitly by piping bundle thru `exorcist`
mapFileName = step.fullInputPath+'.map'
bundle = bundle.pipe exorcist mapFileName, step.pathForSourceMap
# use Meteor.wrapAsync to wrap `getString` so it's done synchronously
wrappedFn = Meteor.wrapAsync getString
try # try-catch for browserify errors
# call our wrapped function with the readable stream as its argument
string = wrappedFn bundle
# read the generated source map from the file
sourceMap = fs.readFileSync mapFileName, 'utf8'
fs.unlinkSync mapFileName
# now that we have the compiled result as a string we can add it using CompileStep
# inside try-catch because this shouldn't run when there's an error.
step.addJavaScript
path: step.inputPath # name of the file
sourcePath: step.inputPath # use same name, we've just browserified it
data: string # the actual browserified results
sourceMap: sourceMap
bare: step?.fileOptions?.bare
catch e
# output error via CompileStep#error()
# convert it to a string and then remove the 'Error: ' at the beginning.
step.error
message:e.toString().substring 7
sourcePath: step.inputPath
# add our function as the handler for files ending in 'browserify.js'
Plugin.registerSourceHandler 'browserify.js', processFile
# add a source handler for config files so that they are watched for changes
Plugin.registerSourceHandler 'browserify.options.json', ->
getBasedir = (step) ->
# basedir should point to the '.npm/package' folder containing the npm modules.
# step.fullInputPath is the full path to our browserify.js file. it may be:
# 1. in a package
# 2. in the app itself
# for both of the above it also may be:
# 1. in the root (of package or app)
# 2. in a subfolder
# NOTE:
# the app doesn't have npm support, so, no .npm/package.
# using meteorhacks:npm creates a package to contain the npm modules.
# so, if the browserify.js file is an app file, then let's look for
# packages/npm-container/.npm/package
# the basedir tail depends on whether this file is in the app or a package
# for an app file, we're going to assume they are using meteorhacks:npm
tail = if step?.packageName? then '.npm/package' else 'packages/npm-container/.npm/package'
# CompileStep has the absolute path to the file in `fullInputPath`
# CompileStep has the package/app relative path to the file in `inputPath`
# basedir is fullInputPath with inputPath replaced with the tail
basedir = step.fullInputPath[0...-(step.inputPath.length)] + tail
# TODO: use fs.existsSync basedir
# could print a more helpful message to user than the browserify error saying
# it can't find the module at this directory. can suggest checking package.js
# for Npm.depends(), or, if an app file, adding meteorhacks:npm and checking
# packages.json.
return basedir
getBrowserifyOptions = (step) ->
# empty user options to fill from file, if it exists
userOptions = {}
# look for a file with the same name, but .browserify.options.json extension
optionsFileName = step.fullInputPath[0...-2] + 'options.json'
if fs.existsSync optionsFileName
try
# read json file and convert it into an object
userOptions = JSON.parse fs.readFileSync optionsFileName, 'utf8'
catch e
step.error
message: 'Couldn\'t read JSON data: '+e.toString()
sourcePath: step.inputPath
# sane defaults for options; most important is the baseDir
defaultOptions =
# Browserify will look here for npm modules
basedir: getBasedir(step)
# must be true to produce source map which we extract via exorcist and
# provide to CompileStep
debug: true
# put the defaults for envify transform in here as well
# TODO: have an option which disables using envify
transforms:
envify:
NODE_ENV: if getDebug() then 'development' else 'production'
_:'purge'
# merge user options with defaults
_.defaults userOptions, defaultOptions
# when they supply transforms it clobbers the envify defaults because
# _.defaults works only on top level keys.
# so, if there's no envify then set the default options for it
userOptions.transforms?.envify ?= defaultOptions.transforms.envify
return userOptions
checkFilename = (step) ->
if step.inputPath is 'browserify.js'
console.log 'WARNING: using \'browserify.js\' as full filename may stop working.' +
' See Meteor Issue #3985. Please add something before it like: client.browserify.js'
getDebug = ->
debug = true
# check args used
for key in process.argv
# if 'meteor bundle file' or 'meteor build path'
if key is '<KEY>' or key is '<KEY>'
debug = '--debug' in process.argv
break;
return debug
getReadable = (step) ->
# Browserify accepts a Readable stream as input, so, we'll use a PassThrough
# stream to hold the Buffer
readable = new stream.PassThrough()
# Meteor's CompileStep provides the file as a Buffer from step.read()
# add the buffer into the stream and end the stream with one call to end()
readable.end step.read()
return readable
# async function for reading entire bundle output into a string
getString = (bundle, cb) ->
# holds all data read from bundle
string = ''
# concatenate data chunk to string
bundle.on 'data', (data) -> string += data
# when we reach the end, call Meteor.wrapAsync's callback with string result
bundle.once 'end', -> cb undefined, string # undefined = error
# when there's an error, give it to the callback
bundle.once 'error', (error) -> cb error
| true | Browserify = Npm.require 'browserify'
# use custom envify so we can specify the env based on the meteor command used
envify = Npm.require 'envify/custom'
# use exorcist transform to extract source map data
exorcist = Npm.require 'exorcist'
# get 'stream' to use PassThrough to provide a Buffer as a Readable stream
stream = Npm.require 'stream'
fs = Npm.require 'fs'
processFile = (step) ->
# check for extension as filename
checkFilename step
# get options for Browserify
browserifyOptions = getBrowserifyOptions step
# create a browserify instance passing our readable stream as input,
# and options object for debug and the basedir
browserify = Browserify [getReadable(step)], browserifyOptions
# extract envify tranform's options so it isn't used in loop
envifyOptions = browserifyOptions.transforms.envify
delete browserifyOptions.transforms.envify
# run each transform
for own transformName, transformOptions of browserifyOptions.transforms
browserify.transform transformName, transformOptions
# run the envify transform
browserify.transform envify envifyOptions
# have browserify process the file and include all required modules.
# we receive a readable stream as the result
bundle = browserify.bundle()
# set the readable stream's encoding so we read strings from it
bundle.setEncoding('utf8')
# extract the source map content from the generated file to give to Meteor
# explicitly by piping bundle thru `exorcist`
mapFileName = step.fullInputPath+'.map'
bundle = bundle.pipe exorcist mapFileName, step.pathForSourceMap
# use Meteor.wrapAsync to wrap `getString` so it's done synchronously
wrappedFn = Meteor.wrapAsync getString
try # try-catch for browserify errors
# call our wrapped function with the readable stream as its argument
string = wrappedFn bundle
# read the generated source map from the file
sourceMap = fs.readFileSync mapFileName, 'utf8'
fs.unlinkSync mapFileName
# now that we have the compiled result as a string we can add it using CompileStep
# inside try-catch because this shouldn't run when there's an error.
step.addJavaScript
path: step.inputPath # name of the file
sourcePath: step.inputPath # use same name, we've just browserified it
data: string # the actual browserified results
sourceMap: sourceMap
bare: step?.fileOptions?.bare
catch e
# output error via CompileStep#error()
# convert it to a string and then remove the 'Error: ' at the beginning.
step.error
message:e.toString().substring 7
sourcePath: step.inputPath
# add our function as the handler for files ending in 'browserify.js'
Plugin.registerSourceHandler 'browserify.js', processFile
# add a source handler for config files so that they are watched for changes
Plugin.registerSourceHandler 'browserify.options.json', ->
getBasedir = (step) ->
# basedir should point to the '.npm/package' folder containing the npm modules.
# step.fullInputPath is the full path to our browserify.js file. it may be:
# 1. in a package
# 2. in the app itself
# for both of the above it also may be:
# 1. in the root (of package or app)
# 2. in a subfolder
# NOTE:
# the app doesn't have npm support, so, no .npm/package.
# using meteorhacks:npm creates a package to contain the npm modules.
# so, if the browserify.js file is an app file, then let's look for
# packages/npm-container/.npm/package
# the basedir tail depends on whether this file is in the app or a package
# for an app file, we're going to assume they are using meteorhacks:npm
tail = if step?.packageName? then '.npm/package' else 'packages/npm-container/.npm/package'
# CompileStep has the absolute path to the file in `fullInputPath`
# CompileStep has the package/app relative path to the file in `inputPath`
# basedir is fullInputPath with inputPath replaced with the tail
basedir = step.fullInputPath[0...-(step.inputPath.length)] + tail
# TODO: use fs.existsSync basedir
# could print a more helpful message to user than the browserify error saying
# it can't find the module at this directory. can suggest checking package.js
# for Npm.depends(), or, if an app file, adding meteorhacks:npm and checking
# packages.json.
return basedir
getBrowserifyOptions = (step) ->
# empty user options to fill from file, if it exists
userOptions = {}
# look for a file with the same name, but .browserify.options.json extension
optionsFileName = step.fullInputPath[0...-2] + 'options.json'
if fs.existsSync optionsFileName
try
# read json file and convert it into an object
userOptions = JSON.parse fs.readFileSync optionsFileName, 'utf8'
catch e
step.error
message: 'Couldn\'t read JSON data: '+e.toString()
sourcePath: step.inputPath
# sane defaults for options; most important is the baseDir
defaultOptions =
# Browserify will look here for npm modules
basedir: getBasedir(step)
# must be true to produce source map which we extract via exorcist and
# provide to CompileStep
debug: true
# put the defaults for envify transform in here as well
# TODO: have an option which disables using envify
transforms:
envify:
NODE_ENV: if getDebug() then 'development' else 'production'
_:'purge'
# merge user options with defaults
_.defaults userOptions, defaultOptions
# when they supply transforms it clobbers the envify defaults because
# _.defaults works only on top level keys.
# so, if there's no envify then set the default options for it
userOptions.transforms?.envify ?= defaultOptions.transforms.envify
return userOptions
checkFilename = (step) ->
if step.inputPath is 'browserify.js'
console.log 'WARNING: using \'browserify.js\' as full filename may stop working.' +
' See Meteor Issue #3985. Please add something before it like: client.browserify.js'
getDebug = ->
debug = true
# check args used
for key in process.argv
# if 'meteor bundle file' or 'meteor build path'
if key is 'PI:KEY:<KEY>END_PI' or key is 'PI:KEY:<KEY>END_PI'
debug = '--debug' in process.argv
break;
return debug
getReadable = (step) ->
# Browserify accepts a Readable stream as input, so, we'll use a PassThrough
# stream to hold the Buffer
readable = new stream.PassThrough()
# Meteor's CompileStep provides the file as a Buffer from step.read()
# add the buffer into the stream and end the stream with one call to end()
readable.end step.read()
return readable
# async function for reading entire bundle output into a string
getString = (bundle, cb) ->
# holds all data read from bundle
string = ''
# concatenate data chunk to string
bundle.on 'data', (data) -> string += data
# when we reach the end, call Meteor.wrapAsync's callback with string result
bundle.once 'end', -> cb undefined, string # undefined = error
# when there's an error, give it to the callback
bundle.once 'error', (error) -> cb error
|
[
{
"context": "thods for accessing parse javascript SDK\n# @author Michael Lin, Snaphappi Inc.\n# \n###\n\n### login notes:\n parseL",
"end": 133,
"score": 0.9993287324905396,
"start": 122,
"tag": "NAME",
"value": "Michael Lin"
},
{
"context": "Obj')\n }\n\n ANON_PREFIX = {\n ... | app/js/services/backend.coffee | mixersoft/ionic-parse-facebook-scaffold | 5 | 'use strict'
###*
# @ngdoc factory
# @name appParse
# @description methods for accessing parse javascript SDK
# @author Michael Lin, Snaphappi Inc.
#
###
### login notes:
parseLogin vs linkLogin w/authData
parseLogin
parseUser.get('authData')==null
parseUser.password
parseUser.emailVerified READ_ONLY
linkedLogin:
parseUser.get('authData')?
parseUser.username is random string
parseUser.username = linkedUser.name, BUT owner can update
parseUser.email == linkedUser.email
parseUser.linkedId = linkedUser.id # not sure what happens to parseUser when unlinked
ignore parseUser.password
ignore parseUser.emailVerfied
check Session.get('createdWith').authProvider = [anonymous,password,facebook,twitter]
you can recover an unlinked account, only for Parse.User.current(), i.e. not logged out
BUG:
for browser, linkUserP() returns a 60 day expiration, regardless of authData
for device, authData expiration is put into UserObj.authData correctly,
but Parse.User.current().get('authData') returns an expired token
BUG:
for device, inAppBrowser only appears first time, can't re-connect a 2nd time.
###
angular
.module 'parse.backend', ['auth']
.factory 'restApi', [
'$http', 'auth.KEYS'
($http, KEYS)->
parseHeaders_GET = {
'X-Parse-Application-Id': KEYS.APP_ID,
'X-Parse-REST-API-Key':KEYS.REST_API_KEY,
}
parseHeaders_SET = _.defaults { 'Content-Type':'application/json' }, parseHeaders_GET
self = {
getAll: (className)->
return $http.get('https://api.parse.com/1/classes/' + className, {
headers: parseHeaders_GET
})
get: (className, id)->
return $http.get('https://api.parse.com/1/classes/' + className + '/' + id, {
headers: parseHeaders_GET
})
create: (className, data)->
return $http.post('https://api.parse.com/1/classes/' + className, data, {
headers: parseHeaders_SET
})
edit: (className, id, data)->
return $http.put('https://api.parse.com/1/classes/' + className + '/' + id, data, {
headers: parseHeaders_SET
});
delete: (className, id)->
return $http.delete('https://api.parse.com/1/classes/' + className + '/' + id, {
headers: parseHeaders_SET
})
}
return self
]
.factory 'appParse', [
'$q', '$timeout', '$http', '$rootScope', 'deviceReady', 'auth.KEYS',
'exportDebug'
($q, $timeout, $http, $rootScope, deviceReady, KEYS, exportDebug)->
parseClass = {
BacklogObj : Parse.Object.extend('BacklogObj')
}
ANON_PREFIX = {
username: 'anonymous-'
password: 'password-'
}
ANON_USER = {
id: null
username: null
password: null
email: null
emailVerified: false
tosAgree: false
rememberMe: false
isRegistered: false
}
self = {
_authProvider : null
authProvider : (value)->
self._authProvider = value if `value!=null`
return self._authProvider
authProviderP: ()->
return Parse.Session.current()
.then (session)->
return self.authProvider( session.get('createdWith').authProvider )
isAnonymousUser: ()->
# should really check self.authProviderP()
if self.authProvider()?
return true if self.authProvider() == 'anonymous'
return false
return true if _.isEmpty $rootScope.parseUser
return false if $rootScope.parseUser.get('authData')
return true if $rootScope.parseUser.get('username').indexOf(ANON_PREFIX.username) == 0
# return true if $rootScope.parseUser.get('username') == 'browser'
return false
mergeSessionUser: (anonUser={})->
anonUser = _.extend _.clone(ANON_USER), anonUser
# merge from cookie into $rootScope.user
$rootScope.parseUser = Parse.User.current()
return anonUser if !($rootScope.parseUser instanceof Parse.Object)
isRegistered = !self.isAnonymousUser()
return anonUser if !isRegistered
userCred = _.pick( $rootScope.parseUser.toJSON(), [
'username', 'role',
'email', 'emailVerified',
'tosAgree', 'rememberMe'
] )
userCred.password = 'HIDDEN'
userCred.tosAgree = !!userCred.tosAgree # checkbox:ng-model expects a boolean
userCred.isRegistered = self.isAnonymousUser()
return _.extend anonUser, userCred
signUpP: (userCred)->
user = new Parse.User();
user.set("username", userCred.username.toLowerCase())
user.set("password", userCred.password)
user.set("email", userCred.email)
return user.signUp().then (user)->
promise = self.authProviderP()
return $rootScope.parseUser = Parse.User.current()
, (user, error)->
$rootScope.parseUser = null
$rootScope.user.username = ''
$rootScope.user.password = ''
$rootScope.user.email = ''
console.warn "parse User.signUp error, msg=" + JSON.stringify error
return $q.reject(error)
###
# parseUser login only, authProvider='password'
# @params userCred object, keys {username:, password:}
# or array of keys
###
loginP: (userCred, signOutOnErr=true)->
userCred = _.pick userCred, ['username', 'password']
return deviceReady.waitP().then ()->
return Parse.User.logIn( userCred.username.trim().toLowerCase(), userCred.password )
.then (user)->
promise = self.authProviderP()
$rootScope.parseUser = Parse.User.current()
$rootScope.user = self.mergeSessionUser($rootScope.user)
return user
, (error)->
if signOutOnErr
$rootScope.parseUser = null
$rootScope.$broadcast 'user:sign-out'
console.warn "User login error. msg=" + JSON.stringify error
$q.reject(error)
signUpOrLoginFromAuthDataP: (authData, cb)->
options = {
method: 'POST'
url: "https://api.parse.com/1/users"
headers:
'X-Parse-Application-Id': KEYS.parse.APP_ID
'X-Parse-REST-API-Key': KEYS.parse.REST_API_KEY
'Content-Type': 'application/json'
data: {authData: authData}
}
return deviceReady.waitP()
.then ()->
if deviceReady.device().isDevice
# for device Only or also Browser?
return Parse._getInstallationId()
.then (installationId)->
options.headers['X-Parse-Installation-Id'] = installationId
return options
else
return options
.then (options)->
return $http( options )
.then (resp)->
switch resp.status
when 200,201
sessionToken = resp.data.sessionToken
return Parse.User.become(sessionToken)
.then (user)->
$rootScope.parseUser = Parse.User.current()
$rootScope.user = self.mergeSessionUser($rootScope.user)
return resp
else
return $q.reject (resp)
.then (resp)->
# import FB attrs, see appFacebook._patchUserFieldsP(fbUser)
switch resp.status
when 200
console.log ">> Parse LOGIN from linkedUser", [resp.data, authData]
when 201 # created new Parse.User from linked Account
console.log ">> Parse user CREATED from linkedUser", [resp.data, authData]
if _.isFunction cb
return cb( resp ).then ()->
return resp
return resp
logoutSession: (anonUser)->
Parse.User.logOut()
$rootScope.parseUser = Parse.User.current()
$rootScope.user = ANON_USER
return
anonSignUpP: (seed)->
_uniqueId = (length=8) ->
id = ""
id += Math.random().toString(36).substr(2) while id.length < length
id.substr 0, length
seed = _uniqueId(8) if !seed
anon = {
username: ANON_PREFIX.username + seed
password: ANON_PREFIX.password + seed
}
return self.signUpP(anon)
.then (userObj)->
return userObj
, (userCred, error)->
console.warn "parseUser anonSignUpP() FAILED, userCred=" + JSON.stringify userCred
return $q.reject( error )
linkUserP: (authData)->
parseUser = Parse.User.current()
return $q.reject('linkUserP error: Parse.User not signed in ') if !parseUser
options = {
method: 'PUT'
url: "https://api.parse.com/1/users/" + parseUser.id
headers:
'X-Parse-Application-Id': KEYS.parse.APP_ID
'X-Parse-REST-API-Key': KEYS.parse.REST_API_KEY
'X-Parse-Session-Token': parseUser.getSessionToken()
'Content-Type': 'application/json'
params: null
data: {authData: authData}
}
return $http( options ).then (resp)->
console.log ">> Parse user LINKED to linkedUser", [resp.data, authData]
return $q.reject (resp) if resp.statusText != 'OK'
return parseUser
# confirm userCred or create anonymous user if Parse.User.current()==null
checkSessionUserP: (userCred, createAnonUser=true)->
if userCred # confirm userCred
authPromise = self.loginP(userCred, false).then null, (err)->
return $q.reject({
message: "userCred invalid"
code: 301
})
else if $rootScope.parseUser
authPromise = $q.when($rootScope.parseUser)
else
authPromise = $q.reject()
if createAnonUser
authPromise = authPromise.then (o)->
return o
, (error)->
return self.anonSignUpP()
return authPromise
saveSessionUserP : (updateKeys, userCred)->
# update or create
if _.isEmpty($rootScope.parseUser)
# create
promise = self.signUpP(userCred)
else if self.isAnonymousUser()
promise = $q.when()
else # verify userCred before updating user profile
reverify = {
username: userCred['username']
password: userCred['currentPassword']
}
promise = self.checkSessionUserP(reverify, false)
promise = promise.then ()->
# userCred should be valid, continue with update
_.each updateKeys, (key)->
return if key == 'currentPassword'
if key=='username'
userCred['username'] = userCred['username'].trim().toLowerCase()
$rootScope.parseUser.set(key, userCred[key])
return
return $rootScope.parseUser.save().then ()->
return $rootScope.user = self.mergeSessionUser($rootScope.user)
, (error)->
$rootScope.parseUser = null
$rootScope.user.username = ''
$rootScope.user.password = ''
$rootScope.user.email = ''
console.warn "parse User.save error, msg=" + JSON.stringify error
return $q.reject(error)
.then ()->
$rootScope.parseUser = Parse.User.current()
return $q.when($rootScope.parseUser)
, (err)->
return $q.reject(err) # end of line
updateUserProfileP : (options)->
keys = ['tosAgree', 'rememberMe']
options = _.pick options, keys
return $q.when() if _.isEmpty options
return deviceReady.waitP().then ()->
return self.checkSessionUserP(null, true)
.then ()->
return $rootScope.parseUser.save(options)
, (err)->
return err
###
# THESE METHODS ARE UNTESTED
###
uploadPhotoMetaP: (workorderObj, photo)->
return $q.reject("uploadPhotoMetaP: photo is empty") if !photo
# upload photo meta BEFORE file upload from native uploader
# photo.src == 'queued'
return deviceReady.waitP().then self.checkSessionUserP(null, false)
.then ()->
attrsForParse = [
'dateTaken', 'originalWidth', 'originalHeight',
'rating', 'favorite', 'caption', 'hidden'
'exif', 'orientation', 'location'
"mediaType", "mediaSubTypes", "burstIdentifier", "burstSelectionTypes", "representsBurst",
]
extendedAttrs = _.pick photo, attrsForParse
# console.log extendedAttrs
parseData = _.extend {
# assetId: photo.UUID # deprecate
UUID: photo.UUID
owner: $rootScope.parseUser
deviceId: deviceReady.device().id
src: "queued"
}
, extendedAttrs # , classDefaults
photoObj = new parseClass.PhotoObj parseData , {initClass: false }
# set default ACL, owner:rw, Curator:rw
photoACL = new Parse.ACL(parseData.owner)
photoACL.setRoleReadAccess('Curator', true)
photoACL.setRoleWriteAccess('Curator', true)
photoObj.setACL (photoACL)
return photoObj.save()
.then (o)->
# console.log "photoObj.save() complete: " + JSON.stringify o.attributes
return
, (err)->
console.warn "ERROR: uploadPhotoMetaP photoObj.save(), err=" + JSON.stringify err
return $q.reject(err)
uploadPhotoFileP : (options, dataURL)->
# called by parseUploader, _uploadNext()
# upload file then update PhotoObj photo.src, does not know workorder
# return parseFile = { UUID:, url(): }
return deviceReady.waitP().then self.checkSessionUserP(null, false)
.then ()->
if deviceReady.device().isBrowser
return $q.reject( {
UUID: UUID
message: "error: file upload not available from browser"
})
.then ()->
photo = {
UUID: options.UUID
filename: options.filename
data: dataURL
}
# photo.UUID, photo.data = dataURL
return self.uploadFileP(photo.data, photo)
.catch (error)->
skipErrorFile = {
UUID: error.UUID
url: ()-> return error.message
}
switch error.message
when "error: Base64 encoding failed", "Base64 encoding failed"
return $q.when skipErrorFile
when "error: UUID not found in CameraRoll", "Not found!"
return $q.when skipErrorFile
else
throw error
# 'parse' uploader only, requires DataURLs
uploadFileP : (base64src, photo)->
if /^data:image/.test(base64src)
# expecting this prefix: 'data:image/jpg;base64,' + rawBase64
mimeType = base64src[10..20]
ext = 'jpg' if (/jpg|jpeg/i.test(mimeType))
ext = 'png' if (/png/i.test(mimeType))
filename = photo.filename || photo.UUID.replace('/','_') + '.' + ext
console.log "\n\n >>> Parse file save, filename=" + filename
console.log "\n\n >>> Parse file save, dataURL=" + base64src[0..50]
# get mimeType, then strip off mimeType, as necessary
base64src = base64src.split(',')[1]
else
ext = 'jpg' # just assume
# save DataURL as image file on Parse
parseFile = new Parse.File(filename, {
base64: base64src
})
return parseFile.save()
}
exportDebug.appParse = self
return self
]
###
#
# @ngdoc factory
# @name appFacebook
# @description
# methods for accessing openFB lib, se https://github.com/ccoenraets/OpenFB
#
###
angular
.module( 'parse.backend')
.factory 'appFacebook', [
'$q'
'$rootScope'
'exportDebug'
'appParse', 'ngFB'
($q, $rootScope, exportDebug, appParse, ngFB)->
$rootScope.$on 'user:sign-out', ()->
self.disconnectP()
return
self = {
LINK_EXPIRATION_DAYS: 60
_findParseUserByLinkedIdP : (fbId)->
return $q.when( rootScope.parseUser ) if $rootScope.parseUser
userQ = new Parse.Query(Parse.User)
userQ.equalTo('linkedId', fbId)
return userQ.first().then (resp)->
return $q.reject('Parse User not found') if _.isEmpty resp
return resp
_getAuthData: (fbUser, fbLogin)->
return authData = {facebook:null} if fbUser==false
expireTime = 1000 * 3600 * 24 * self.LINK_EXPIRATION_DAYS + Date.now()
return authData = {
facebook:
id: fbUser.id
access_token: fbLogin.authResponse.accessToken
# authData.expiration sets to +60 days regardless of what we put here
expiration_date: new Date(expireTime).toJSON()
}
_patchUserFieldsP : (fbUser)->
parseUser = Parse.User.current()
updateFields = {
'name': fbUser.name
'linkedId': fbUser.id # save for admin recovery
'face': fbUser.picture.data.url
}
if parseUser.get('emailVerified') != true
updateFields['email'] = fbUser.email
return parseUser.save( updateFields)
checkLoginP: (user)->
user = $rootScope.user if `user==null`
return ngFB.getLoginStatus()
.then (resp)->
console.log "checkLoginP", resp
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return resp.status if resp.status == 'connected'
$rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return $q.reject('fbUser not connected')
loginP: (privileges)->
default_access = 'public_profile,email,user_friends'
privileges = default_access if !privileges
fbLogin = null
return ngFB.login({ scope: privileges })
.then (resp)->
fbLogin = resp
if resp.status != 'connected'
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
console.warn "Facebook login failed, resp=", resp
return $q.reject(resp)
# FbLogin sucessful
console.log 'FB connected=', resp.status
return self.getMeP()
.then (fbUser)->
# Parse login or create NEW parseUser
if $rootScope.parseUser == null
# create anon user and link with FbUser
authData = self._getAuthData(fbUser, fbLogin)
return appParse.signUpOrLoginFromAuthDataP( authData )
.then ()->
return self._patchUserFieldsP(fbUser)
if $rootScope.parseUser?
# link Parse.User & FbUser: replace any prior authData
authData = self._getAuthData(fbUser, fbLogin)
return appParse.linkUserP( authData )
.then ()->
return self._patchUserFieldsP(fbUser)
.then ()->
promise = appParse.authProviderP()
return fbLogin.status
disconnectP: ()->
# disconnect revokes the Fb sessionToken.
# calls to getMeP() should fail with "Invalid OAuth access token."
return ngFB.logout().then ()->
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return ngFB.getLoginStatus().then (resp)->
console.log "disconnnect status=",resp
unlinkP: ()->
# resets parseUser.authData
return appParse.authProviderP()
.then (authProvider)->
if authProvider != 'facebook'
return $q.reject('ERROR: unlinkP() session not linked from Facebook')
# reset the parseUser.authData field
authData = self._getAuthData(false)
return appParse.linkUserP(authData)
.then (resp)->
return self.disconnectP()
.then ()->
return $rootScope.parseUser.save({
'face':null
'name':null
'email':null # unique key
})
.then ()->
# manually set to 'password' this is only valid to next logout
promise = appParse.authProvider('password')
$rootScope.user['unlinked'] = true
# Parse.User.logOut();
$rootScope.$state.reload()
console.log "done"
getMeP: (fields)->
default_profile_fields = 'id,name,first_name,last_name,email,gender,location,locale,link,timezone,verified,picture,cover'
options = {
path : '/me'
params:
'fields': fields || default_profile_fields
}
return ngFB.api(options)
.then (resp)->
if appParse.isAnonymousUser()
_.extend $rootScope['user'], {
username: resp.name
email: resp.email
emailVerified: resp.email?
}
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = resp
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
console.log 'getFbUserP', resp
return resp
.catch (err)->
console.error 'getFbUserP', err
$rootScope.user['isConnected'] = false
return $q.reject(err)
getPermissions: ()->
options = {
path : '/me/permissions'
}
return ngFB.api(options)
.then (resp)->
return resp.data
}
exportDebug.appFacebook = self
return self
]
# # test cloudCode with js debugger
window.cloud = { }
| 118583 | 'use strict'
###*
# @ngdoc factory
# @name appParse
# @description methods for accessing parse javascript SDK
# @author <NAME>, Snaphappi Inc.
#
###
### login notes:
parseLogin vs linkLogin w/authData
parseLogin
parseUser.get('authData')==null
parseUser.password
parseUser.emailVerified READ_ONLY
linkedLogin:
parseUser.get('authData')?
parseUser.username is random string
parseUser.username = linkedUser.name, BUT owner can update
parseUser.email == linkedUser.email
parseUser.linkedId = linkedUser.id # not sure what happens to parseUser when unlinked
ignore parseUser.password
ignore parseUser.emailVerfied
check Session.get('createdWith').authProvider = [anonymous,password,facebook,twitter]
you can recover an unlinked account, only for Parse.User.current(), i.e. not logged out
BUG:
for browser, linkUserP() returns a 60 day expiration, regardless of authData
for device, authData expiration is put into UserObj.authData correctly,
but Parse.User.current().get('authData') returns an expired token
BUG:
for device, inAppBrowser only appears first time, can't re-connect a 2nd time.
###
angular
.module 'parse.backend', ['auth']
.factory 'restApi', [
'$http', 'auth.KEYS'
($http, KEYS)->
parseHeaders_GET = {
'X-Parse-Application-Id': KEYS.APP_ID,
'X-Parse-REST-API-Key':KEYS.REST_API_KEY,
}
parseHeaders_SET = _.defaults { 'Content-Type':'application/json' }, parseHeaders_GET
self = {
getAll: (className)->
return $http.get('https://api.parse.com/1/classes/' + className, {
headers: parseHeaders_GET
})
get: (className, id)->
return $http.get('https://api.parse.com/1/classes/' + className + '/' + id, {
headers: parseHeaders_GET
})
create: (className, data)->
return $http.post('https://api.parse.com/1/classes/' + className, data, {
headers: parseHeaders_SET
})
edit: (className, id, data)->
return $http.put('https://api.parse.com/1/classes/' + className + '/' + id, data, {
headers: parseHeaders_SET
});
delete: (className, id)->
return $http.delete('https://api.parse.com/1/classes/' + className + '/' + id, {
headers: parseHeaders_SET
})
}
return self
]
.factory 'appParse', [
'$q', '$timeout', '$http', '$rootScope', 'deviceReady', 'auth.KEYS',
'exportDebug'
($q, $timeout, $http, $rootScope, deviceReady, KEYS, exportDebug)->
parseClass = {
BacklogObj : Parse.Object.extend('BacklogObj')
}
ANON_PREFIX = {
username: 'anonymous-'
password: '<PASSWORD>-'
}
ANON_USER = {
id: null
username: null
password: <PASSWORD>
email: null
emailVerified: false
tosAgree: false
rememberMe: false
isRegistered: false
}
self = {
_authProvider : null
authProvider : (value)->
self._authProvider = value if `value!=null`
return self._authProvider
authProviderP: ()->
return Parse.Session.current()
.then (session)->
return self.authProvider( session.get('createdWith').authProvider )
isAnonymousUser: ()->
# should really check self.authProviderP()
if self.authProvider()?
return true if self.authProvider() == 'anonymous'
return false
return true if _.isEmpty $rootScope.parseUser
return false if $rootScope.parseUser.get('authData')
return true if $rootScope.parseUser.get('username').indexOf(ANON_PREFIX.username) == 0
# return true if $rootScope.parseUser.get('username') == 'browser'
return false
mergeSessionUser: (anonUser={})->
anonUser = _.extend _.clone(ANON_USER), anonUser
# merge from cookie into $rootScope.user
$rootScope.parseUser = Parse.User.current()
return anonUser if !($rootScope.parseUser instanceof Parse.Object)
isRegistered = !self.isAnonymousUser()
return anonUser if !isRegistered
userCred = _.pick( $rootScope.parseUser.toJSON(), [
'username', 'role',
'email', 'emailVerified',
'tosAgree', 'rememberMe'
] )
userCred.password = '<PASSWORD>'
userCred.tosAgree = !!userCred.tosAgree # checkbox:ng-model expects a boolean
userCred.isRegistered = self.isAnonymousUser()
return _.extend anonUser, userCred
signUpP: (userCred)->
user = new Parse.User();
user.set("username", userCred.username.toLowerCase())
user.set("password", <PASSWORD>)
user.set("email", userCred.email)
return user.signUp().then (user)->
promise = self.authProviderP()
return $rootScope.parseUser = Parse.User.current()
, (user, error)->
$rootScope.parseUser = null
$rootScope.user.username = ''
$rootScope.user.password =<PASSWORD> ''
$rootScope.user.email = ''
console.warn "parse User.signUp error, msg=" + JSON.stringify error
return $q.reject(error)
###
# parseUser login only, authProvider='password'
# @params userCred object, keys {username:, password:}
# or array of keys
###
loginP: (userCred, signOutOnErr=true)->
userCred = _.pick userCred, ['username', '<PASSWORD>']
return deviceReady.waitP().then ()->
return Parse.User.logIn( userCred.username.trim().toLowerCase(), userCred.password )
.then (user)->
promise = self.authProviderP()
$rootScope.parseUser = Parse.User.current()
$rootScope.user = self.mergeSessionUser($rootScope.user)
return user
, (error)->
if signOutOnErr
$rootScope.parseUser = null
$rootScope.$broadcast 'user:sign-out'
console.warn "User login error. msg=" + JSON.stringify error
$q.reject(error)
signUpOrLoginFromAuthDataP: (authData, cb)->
options = {
method: 'POST'
url: "https://api.parse.com/1/users"
headers:
'X-Parse-Application-Id': KEYS.parse.APP_ID
'X-Parse-REST-API-Key': KEYS.parse.REST_API_KEY
'Content-Type': 'application/json'
data: {authData: authData}
}
return deviceReady.waitP()
.then ()->
if deviceReady.device().isDevice
# for device Only or also Browser?
return Parse._getInstallationId()
.then (installationId)->
options.headers['X-Parse-Installation-Id'] = installationId
return options
else
return options
.then (options)->
return $http( options )
.then (resp)->
switch resp.status
when 200,201
sessionToken = resp.data.sessionToken
return Parse.User.become(sessionToken)
.then (user)->
$rootScope.parseUser = Parse.User.current()
$rootScope.user = self.mergeSessionUser($rootScope.user)
return resp
else
return $q.reject (resp)
.then (resp)->
# import FB attrs, see appFacebook._patchUserFieldsP(fbUser)
switch resp.status
when 200
console.log ">> Parse LOGIN from linkedUser", [resp.data, authData]
when 201 # created new Parse.User from linked Account
console.log ">> Parse user CREATED from linkedUser", [resp.data, authData]
if _.isFunction cb
return cb( resp ).then ()->
return resp
return resp
logoutSession: (anonUser)->
Parse.User.logOut()
$rootScope.parseUser = Parse.User.current()
$rootScope.user = ANON_USER
return
anonSignUpP: (seed)->
_uniqueId = (length=8) ->
id = ""
id += Math.random().toString(36).substr(2) while id.length < length
id.substr 0, length
seed = _uniqueId(8) if !seed
anon = {
username: ANON_PREFIX.username + seed
password: ANON_PREFIX.password + seed
}
return self.signUpP(anon)
.then (userObj)->
return userObj
, (userCred, error)->
console.warn "parseUser anonSignUpP() FAILED, userCred=" + JSON.stringify userCred
return $q.reject( error )
linkUserP: (authData)->
parseUser = Parse.User.current()
return $q.reject('linkUserP error: Parse.User not signed in ') if !parseUser
options = {
method: 'PUT'
url: "https://api.parse.com/1/users/" + parseUser.id
headers:
'X-Parse-Application-Id': KEYS.parse.APP_ID
'X-Parse-REST-API-Key': KEYS.parse.REST_API_KEY
'X-Parse-Session-Token': parseUser.getSessionToken()
'Content-Type': 'application/json'
params: null
data: {authData: authData}
}
return $http( options ).then (resp)->
console.log ">> Parse user LINKED to linkedUser", [resp.data, authData]
return $q.reject (resp) if resp.statusText != 'OK'
return parseUser
# confirm userCred or create anonymous user if Parse.User.current()==null
checkSessionUserP: (userCred, createAnonUser=true)->
if userCred # confirm userCred
authPromise = self.loginP(userCred, false).then null, (err)->
return $q.reject({
message: "userCred invalid"
code: 301
})
else if $rootScope.parseUser
authPromise = $q.when($rootScope.parseUser)
else
authPromise = $q.reject()
if createAnonUser
authPromise = authPromise.then (o)->
return o
, (error)->
return self.anonSignUpP()
return authPromise
saveSessionUserP : (updateKeys, userCred)->
# update or create
if _.isEmpty($rootScope.parseUser)
# create
promise = self.signUpP(userCred)
else if self.isAnonymousUser()
promise = $q.when()
else # verify userCred before updating user profile
reverify = {
username: userCred['username']
password: <PASSWORD>['<PASSWORD>']
}
promise = self.checkSessionUserP(reverify, false)
promise = promise.then ()->
# userCred should be valid, continue with update
_.each updateKeys, (key)->
return if key == 'currentPassword'
if key=='username'
userCred['username'] = userCred['username'].trim().toLowerCase()
$rootScope.parseUser.set(key, userCred[key])
return
return $rootScope.parseUser.save().then ()->
return $rootScope.user = self.mergeSessionUser($rootScope.user)
, (error)->
$rootScope.parseUser = null
$rootScope.user.username = ''
$rootScope.user.password =<PASSWORD> ''
$rootScope.user.email = ''
console.warn "parse User.save error, msg=" + JSON.stringify error
return $q.reject(error)
.then ()->
$rootScope.parseUser = Parse.User.current()
return $q.when($rootScope.parseUser)
, (err)->
return $q.reject(err) # end of line
updateUserProfileP : (options)->
keys = ['<KEY>', '<KEY>']
options = _.pick options, keys
return $q.when() if _.isEmpty options
return deviceReady.waitP().then ()->
return self.checkSessionUserP(null, true)
.then ()->
return $rootScope.parseUser.save(options)
, (err)->
return err
###
# THESE METHODS ARE UNTESTED
###
uploadPhotoMetaP: (workorderObj, photo)->
return $q.reject("uploadPhotoMetaP: photo is empty") if !photo
# upload photo meta BEFORE file upload from native uploader
# photo.src == 'queued'
return deviceReady.waitP().then self.checkSessionUserP(null, false)
.then ()->
attrsForParse = [
'dateTaken', 'originalWidth', 'originalHeight',
'rating', 'favorite', 'caption', 'hidden'
'exif', 'orientation', 'location'
"mediaType", "mediaSubTypes", "burstIdentifier", "burstSelectionTypes", "representsBurst",
]
extendedAttrs = _.pick photo, attrsForParse
# console.log extendedAttrs
parseData = _.extend {
# assetId: photo.UUID # deprecate
UUID: photo.UUID
owner: $rootScope.parseUser
deviceId: deviceReady.device().id
src: "queued"
}
, extendedAttrs # , classDefaults
photoObj = new parseClass.PhotoObj parseData , {initClass: false }
# set default ACL, owner:rw, Curator:rw
photoACL = new Parse.ACL(parseData.owner)
photoACL.setRoleReadAccess('Curator', true)
photoACL.setRoleWriteAccess('Curator', true)
photoObj.setACL (photoACL)
return photoObj.save()
.then (o)->
# console.log "photoObj.save() complete: " + JSON.stringify o.attributes
return
, (err)->
console.warn "ERROR: uploadPhotoMetaP photoObj.save(), err=" + JSON.stringify err
return $q.reject(err)
uploadPhotoFileP : (options, dataURL)->
# called by parseUploader, _uploadNext()
# upload file then update PhotoObj photo.src, does not know workorder
# return parseFile = { UUID:, url(): }
return deviceReady.waitP().then self.checkSessionUserP(null, false)
.then ()->
if deviceReady.device().isBrowser
return $q.reject( {
UUID: UUID
message: "error: file upload not available from browser"
})
.then ()->
photo = {
UUID: options.UUID
filename: options.filename
data: dataURL
}
# photo.UUID, photo.data = dataURL
return self.uploadFileP(photo.data, photo)
.catch (error)->
skipErrorFile = {
UUID: error.UUID
url: ()-> return error.message
}
switch error.message
when "error: Base64 encoding failed", "Base64 encoding failed"
return $q.when skipErrorFile
when "error: UUID not found in CameraRoll", "Not found!"
return $q.when skipErrorFile
else
throw error
# 'parse' uploader only, requires DataURLs
uploadFileP : (base64src, photo)->
if /^data:image/.test(base64src)
# expecting this prefix: 'data:image/jpg;base64,' + rawBase64
mimeType = base64src[10..20]
ext = 'jpg' if (/jpg|jpeg/i.test(mimeType))
ext = 'png' if (/png/i.test(mimeType))
filename = photo.filename || photo.UUID.replace('/','_') + '.' + ext
console.log "\n\n >>> Parse file save, filename=" + filename
console.log "\n\n >>> Parse file save, dataURL=" + base64src[0..50]
# get mimeType, then strip off mimeType, as necessary
base64src = base64src.split(',')[1]
else
ext = 'jpg' # just assume
# save DataURL as image file on Parse
parseFile = new Parse.File(filename, {
base64: base64src
})
return parseFile.save()
}
exportDebug.appParse = self
return self
]
###
#
# @ngdoc factory
# @name appFacebook
# @description
# methods for accessing openFB lib, se https://github.com/ccoenraets/OpenFB
#
###
angular
.module( 'parse.backend')
.factory 'appFacebook', [
'$q'
'$rootScope'
'exportDebug'
'appParse', 'ngFB'
($q, $rootScope, exportDebug, appParse, ngFB)->
$rootScope.$on 'user:sign-out', ()->
self.disconnectP()
return
self = {
LINK_EXPIRATION_DAYS: 60
_findParseUserByLinkedIdP : (fbId)->
return $q.when( rootScope.parseUser ) if $rootScope.parseUser
userQ = new Parse.Query(Parse.User)
userQ.equalTo('linkedId', fbId)
return userQ.first().then (resp)->
return $q.reject('Parse User not found') if _.isEmpty resp
return resp
_getAuthData: (fbUser, fbLogin)->
return authData = {facebook:null} if fbUser==false
expireTime = 1000 * 3600 * 24 * self.LINK_EXPIRATION_DAYS + Date.now()
return authData = {
facebook:
id: fbUser.id
access_token: fbLogin.authResponse.accessToken
# authData.expiration sets to +60 days regardless of what we put here
expiration_date: new Date(expireTime).toJSON()
}
_patchUserFieldsP : (fbUser)->
parseUser = Parse.User.current()
updateFields = {
'name': fbUser.name
'linkedId': fbUser.id # save for admin recovery
'face': fbUser.picture.data.url
}
if parseUser.get('emailVerified') != true
updateFields['email'] = fbUser.email
return parseUser.save( updateFields)
checkLoginP: (user)->
user = $rootScope.user if `user==null`
return ngFB.getLoginStatus()
.then (resp)->
console.log "checkLoginP", resp
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return resp.status if resp.status == 'connected'
$rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return $q.reject('fbUser not connected')
loginP: (privileges)->
default_access = 'public_profile,email,user_friends'
privileges = default_access if !privileges
fbLogin = null
return ngFB.login({ scope: privileges })
.then (resp)->
fbLogin = resp
if resp.status != 'connected'
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
console.warn "Facebook login failed, resp=", resp
return $q.reject(resp)
# FbLogin sucessful
console.log 'FB connected=', resp.status
return self.getMeP()
.then (fbUser)->
# Parse login or create NEW parseUser
if $rootScope.parseUser == null
# create anon user and link with FbUser
authData = self._getAuthData(fbUser, fbLogin)
return appParse.signUpOrLoginFromAuthDataP( authData )
.then ()->
return self._patchUserFieldsP(fbUser)
if $rootScope.parseUser?
# link Parse.User & FbUser: replace any prior authData
authData = self._getAuthData(fbUser, fbLogin)
return appParse.linkUserP( authData )
.then ()->
return self._patchUserFieldsP(fbUser)
.then ()->
promise = appParse.authProviderP()
return fbLogin.status
disconnectP: ()->
# disconnect revokes the Fb sessionToken.
# calls to getMeP() should fail with "Invalid OAuth access token."
return ngFB.logout().then ()->
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return ngFB.getLoginStatus().then (resp)->
console.log "disconnnect status=",resp
unlinkP: ()->
# resets parseUser.authData
return appParse.authProviderP()
.then (authProvider)->
if authProvider != 'facebook'
return $q.reject('ERROR: unlinkP() session not linked from Facebook')
# reset the parseUser.authData field
authData = self._getAuthData(false)
return appParse.linkUserP(authData)
.then (resp)->
return self.disconnectP()
.then ()->
return $rootScope.parseUser.save({
'face':null
'name':null
'email':null # unique key
})
.then ()->
# manually set to 'password' this is only valid to next logout
promise = appParse.authProvider('password')
$rootScope.user['unlinked'] = true
# Parse.User.logOut();
$rootScope.$state.reload()
console.log "done"
getMeP: (fields)->
default_profile_fields = 'id,name,first_name,last_name,email,gender,location,locale,link,timezone,verified,picture,cover'
options = {
path : '/me'
params:
'fields': fields || default_profile_fields
}
return ngFB.api(options)
.then (resp)->
if appParse.isAnonymousUser()
_.extend $rootScope['user'], {
username: resp.name
email: resp.email
emailVerified: resp.email?
}
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = resp
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
console.log 'getFbUserP', resp
return resp
.catch (err)->
console.error 'getFbUserP', err
$rootScope.user['isConnected'] = false
return $q.reject(err)
getPermissions: ()->
options = {
path : '/me/permissions'
}
return ngFB.api(options)
.then (resp)->
return resp.data
}
exportDebug.appFacebook = self
return self
]
# # test cloudCode with js debugger
window.cloud = { }
| true | 'use strict'
###*
# @ngdoc factory
# @name appParse
# @description methods for accessing parse javascript SDK
# @author PI:NAME:<NAME>END_PI, Snaphappi Inc.
#
###
### login notes:
parseLogin vs linkLogin w/authData
parseLogin
parseUser.get('authData')==null
parseUser.password
parseUser.emailVerified READ_ONLY
linkedLogin:
parseUser.get('authData')?
parseUser.username is random string
parseUser.username = linkedUser.name, BUT owner can update
parseUser.email == linkedUser.email
parseUser.linkedId = linkedUser.id # not sure what happens to parseUser when unlinked
ignore parseUser.password
ignore parseUser.emailVerfied
check Session.get('createdWith').authProvider = [anonymous,password,facebook,twitter]
you can recover an unlinked account, only for Parse.User.current(), i.e. not logged out
BUG:
for browser, linkUserP() returns a 60 day expiration, regardless of authData
for device, authData expiration is put into UserObj.authData correctly,
but Parse.User.current().get('authData') returns an expired token
BUG:
for device, inAppBrowser only appears first time, can't re-connect a 2nd time.
###
angular
.module 'parse.backend', ['auth']
.factory 'restApi', [
'$http', 'auth.KEYS'
($http, KEYS)->
parseHeaders_GET = {
'X-Parse-Application-Id': KEYS.APP_ID,
'X-Parse-REST-API-Key':KEYS.REST_API_KEY,
}
parseHeaders_SET = _.defaults { 'Content-Type':'application/json' }, parseHeaders_GET
self = {
getAll: (className)->
return $http.get('https://api.parse.com/1/classes/' + className, {
headers: parseHeaders_GET
})
get: (className, id)->
return $http.get('https://api.parse.com/1/classes/' + className + '/' + id, {
headers: parseHeaders_GET
})
create: (className, data)->
return $http.post('https://api.parse.com/1/classes/' + className, data, {
headers: parseHeaders_SET
})
edit: (className, id, data)->
return $http.put('https://api.parse.com/1/classes/' + className + '/' + id, data, {
headers: parseHeaders_SET
});
delete: (className, id)->
return $http.delete('https://api.parse.com/1/classes/' + className + '/' + id, {
headers: parseHeaders_SET
})
}
return self
]
.factory 'appParse', [
'$q', '$timeout', '$http', '$rootScope', 'deviceReady', 'auth.KEYS',
'exportDebug'
($q, $timeout, $http, $rootScope, deviceReady, KEYS, exportDebug)->
parseClass = {
BacklogObj : Parse.Object.extend('BacklogObj')
}
ANON_PREFIX = {
username: 'anonymous-'
password: 'PI:PASSWORD:<PASSWORD>END_PI-'
}
ANON_USER = {
id: null
username: null
password: PI:PASSWORD:<PASSWORD>END_PI
email: null
emailVerified: false
tosAgree: false
rememberMe: false
isRegistered: false
}
self = {
_authProvider : null
authProvider : (value)->
self._authProvider = value if `value!=null`
return self._authProvider
authProviderP: ()->
return Parse.Session.current()
.then (session)->
return self.authProvider( session.get('createdWith').authProvider )
isAnonymousUser: ()->
# should really check self.authProviderP()
if self.authProvider()?
return true if self.authProvider() == 'anonymous'
return false
return true if _.isEmpty $rootScope.parseUser
return false if $rootScope.parseUser.get('authData')
return true if $rootScope.parseUser.get('username').indexOf(ANON_PREFIX.username) == 0
# return true if $rootScope.parseUser.get('username') == 'browser'
return false
mergeSessionUser: (anonUser={})->
anonUser = _.extend _.clone(ANON_USER), anonUser
# merge from cookie into $rootScope.user
$rootScope.parseUser = Parse.User.current()
return anonUser if !($rootScope.parseUser instanceof Parse.Object)
isRegistered = !self.isAnonymousUser()
return anonUser if !isRegistered
userCred = _.pick( $rootScope.parseUser.toJSON(), [
'username', 'role',
'email', 'emailVerified',
'tosAgree', 'rememberMe'
] )
userCred.password = 'PI:PASSWORD:<PASSWORD>END_PI'
userCred.tosAgree = !!userCred.tosAgree # checkbox:ng-model expects a boolean
userCred.isRegistered = self.isAnonymousUser()
return _.extend anonUser, userCred
signUpP: (userCred)->
user = new Parse.User();
user.set("username", userCred.username.toLowerCase())
user.set("password", PI:PASSWORD:<PASSWORD>END_PI)
user.set("email", userCred.email)
return user.signUp().then (user)->
promise = self.authProviderP()
return $rootScope.parseUser = Parse.User.current()
, (user, error)->
$rootScope.parseUser = null
$rootScope.user.username = ''
$rootScope.user.password =PI:PASSWORD:<PASSWORD>END_PI ''
$rootScope.user.email = ''
console.warn "parse User.signUp error, msg=" + JSON.stringify error
return $q.reject(error)
###
# parseUser login only, authProvider='password'
# @params userCred object, keys {username:, password:}
# or array of keys
###
loginP: (userCred, signOutOnErr=true)->
userCred = _.pick userCred, ['username', 'PI:PASSWORD:<PASSWORD>END_PI']
return deviceReady.waitP().then ()->
return Parse.User.logIn( userCred.username.trim().toLowerCase(), userCred.password )
.then (user)->
promise = self.authProviderP()
$rootScope.parseUser = Parse.User.current()
$rootScope.user = self.mergeSessionUser($rootScope.user)
return user
, (error)->
if signOutOnErr
$rootScope.parseUser = null
$rootScope.$broadcast 'user:sign-out'
console.warn "User login error. msg=" + JSON.stringify error
$q.reject(error)
signUpOrLoginFromAuthDataP: (authData, cb)->
options = {
method: 'POST'
url: "https://api.parse.com/1/users"
headers:
'X-Parse-Application-Id': KEYS.parse.APP_ID
'X-Parse-REST-API-Key': KEYS.parse.REST_API_KEY
'Content-Type': 'application/json'
data: {authData: authData}
}
return deviceReady.waitP()
.then ()->
if deviceReady.device().isDevice
# for device Only or also Browser?
return Parse._getInstallationId()
.then (installationId)->
options.headers['X-Parse-Installation-Id'] = installationId
return options
else
return options
.then (options)->
return $http( options )
.then (resp)->
switch resp.status
when 200,201
sessionToken = resp.data.sessionToken
return Parse.User.become(sessionToken)
.then (user)->
$rootScope.parseUser = Parse.User.current()
$rootScope.user = self.mergeSessionUser($rootScope.user)
return resp
else
return $q.reject (resp)
.then (resp)->
# import FB attrs, see appFacebook._patchUserFieldsP(fbUser)
switch resp.status
when 200
console.log ">> Parse LOGIN from linkedUser", [resp.data, authData]
when 201 # created new Parse.User from linked Account
console.log ">> Parse user CREATED from linkedUser", [resp.data, authData]
if _.isFunction cb
return cb( resp ).then ()->
return resp
return resp
logoutSession: (anonUser)->
Parse.User.logOut()
$rootScope.parseUser = Parse.User.current()
$rootScope.user = ANON_USER
return
anonSignUpP: (seed)->
_uniqueId = (length=8) ->
id = ""
id += Math.random().toString(36).substr(2) while id.length < length
id.substr 0, length
seed = _uniqueId(8) if !seed
anon = {
username: ANON_PREFIX.username + seed
password: ANON_PREFIX.password + seed
}
return self.signUpP(anon)
.then (userObj)->
return userObj
, (userCred, error)->
console.warn "parseUser anonSignUpP() FAILED, userCred=" + JSON.stringify userCred
return $q.reject( error )
linkUserP: (authData)->
parseUser = Parse.User.current()
return $q.reject('linkUserP error: Parse.User not signed in ') if !parseUser
options = {
method: 'PUT'
url: "https://api.parse.com/1/users/" + parseUser.id
headers:
'X-Parse-Application-Id': KEYS.parse.APP_ID
'X-Parse-REST-API-Key': KEYS.parse.REST_API_KEY
'X-Parse-Session-Token': parseUser.getSessionToken()
'Content-Type': 'application/json'
params: null
data: {authData: authData}
}
return $http( options ).then (resp)->
console.log ">> Parse user LINKED to linkedUser", [resp.data, authData]
return $q.reject (resp) if resp.statusText != 'OK'
return parseUser
# confirm userCred or create anonymous user if Parse.User.current()==null
checkSessionUserP: (userCred, createAnonUser=true)->
if userCred # confirm userCred
authPromise = self.loginP(userCred, false).then null, (err)->
return $q.reject({
message: "userCred invalid"
code: 301
})
else if $rootScope.parseUser
authPromise = $q.when($rootScope.parseUser)
else
authPromise = $q.reject()
if createAnonUser
authPromise = authPromise.then (o)->
return o
, (error)->
return self.anonSignUpP()
return authPromise
saveSessionUserP : (updateKeys, userCred)->
# update or create
if _.isEmpty($rootScope.parseUser)
# create
promise = self.signUpP(userCred)
else if self.isAnonymousUser()
promise = $q.when()
else # verify userCred before updating user profile
reverify = {
username: userCred['username']
password: PI:PASSWORD:<PASSWORD>END_PI['PI:PASSWORD:<PASSWORD>END_PI']
}
promise = self.checkSessionUserP(reverify, false)
promise = promise.then ()->
# userCred should be valid, continue with update
_.each updateKeys, (key)->
return if key == 'currentPassword'
if key=='username'
userCred['username'] = userCred['username'].trim().toLowerCase()
$rootScope.parseUser.set(key, userCred[key])
return
return $rootScope.parseUser.save().then ()->
return $rootScope.user = self.mergeSessionUser($rootScope.user)
, (error)->
$rootScope.parseUser = null
$rootScope.user.username = ''
$rootScope.user.password =PI:PASSWORD:<PASSWORD>END_PI ''
$rootScope.user.email = ''
console.warn "parse User.save error, msg=" + JSON.stringify error
return $q.reject(error)
.then ()->
$rootScope.parseUser = Parse.User.current()
return $q.when($rootScope.parseUser)
, (err)->
return $q.reject(err) # end of line
updateUserProfileP : (options)->
keys = ['PI:KEY:<KEY>END_PI', 'PI:KEY:<KEY>END_PI']
options = _.pick options, keys
return $q.when() if _.isEmpty options
return deviceReady.waitP().then ()->
return self.checkSessionUserP(null, true)
.then ()->
return $rootScope.parseUser.save(options)
, (err)->
return err
###
# THESE METHODS ARE UNTESTED
###
uploadPhotoMetaP: (workorderObj, photo)->
return $q.reject("uploadPhotoMetaP: photo is empty") if !photo
# upload photo meta BEFORE file upload from native uploader
# photo.src == 'queued'
return deviceReady.waitP().then self.checkSessionUserP(null, false)
.then ()->
attrsForParse = [
'dateTaken', 'originalWidth', 'originalHeight',
'rating', 'favorite', 'caption', 'hidden'
'exif', 'orientation', 'location'
"mediaType", "mediaSubTypes", "burstIdentifier", "burstSelectionTypes", "representsBurst",
]
extendedAttrs = _.pick photo, attrsForParse
# console.log extendedAttrs
parseData = _.extend {
# assetId: photo.UUID # deprecate
UUID: photo.UUID
owner: $rootScope.parseUser
deviceId: deviceReady.device().id
src: "queued"
}
, extendedAttrs # , classDefaults
photoObj = new parseClass.PhotoObj parseData , {initClass: false }
# set default ACL, owner:rw, Curator:rw
photoACL = new Parse.ACL(parseData.owner)
photoACL.setRoleReadAccess('Curator', true)
photoACL.setRoleWriteAccess('Curator', true)
photoObj.setACL (photoACL)
return photoObj.save()
.then (o)->
# console.log "photoObj.save() complete: " + JSON.stringify o.attributes
return
, (err)->
console.warn "ERROR: uploadPhotoMetaP photoObj.save(), err=" + JSON.stringify err
return $q.reject(err)
uploadPhotoFileP : (options, dataURL)->
# called by parseUploader, _uploadNext()
# upload file then update PhotoObj photo.src, does not know workorder
# return parseFile = { UUID:, url(): }
return deviceReady.waitP().then self.checkSessionUserP(null, false)
.then ()->
if deviceReady.device().isBrowser
return $q.reject( {
UUID: UUID
message: "error: file upload not available from browser"
})
.then ()->
photo = {
UUID: options.UUID
filename: options.filename
data: dataURL
}
# photo.UUID, photo.data = dataURL
return self.uploadFileP(photo.data, photo)
.catch (error)->
skipErrorFile = {
UUID: error.UUID
url: ()-> return error.message
}
switch error.message
when "error: Base64 encoding failed", "Base64 encoding failed"
return $q.when skipErrorFile
when "error: UUID not found in CameraRoll", "Not found!"
return $q.when skipErrorFile
else
throw error
# 'parse' uploader only, requires DataURLs
uploadFileP : (base64src, photo)->
if /^data:image/.test(base64src)
# expecting this prefix: 'data:image/jpg;base64,' + rawBase64
mimeType = base64src[10..20]
ext = 'jpg' if (/jpg|jpeg/i.test(mimeType))
ext = 'png' if (/png/i.test(mimeType))
filename = photo.filename || photo.UUID.replace('/','_') + '.' + ext
console.log "\n\n >>> Parse file save, filename=" + filename
console.log "\n\n >>> Parse file save, dataURL=" + base64src[0..50]
# get mimeType, then strip off mimeType, as necessary
base64src = base64src.split(',')[1]
else
ext = 'jpg' # just assume
# save DataURL as image file on Parse
parseFile = new Parse.File(filename, {
base64: base64src
})
return parseFile.save()
}
exportDebug.appParse = self
return self
]
###
#
# @ngdoc factory
# @name appFacebook
# @description
# methods for accessing openFB lib, se https://github.com/ccoenraets/OpenFB
#
###
angular
.module( 'parse.backend')
.factory 'appFacebook', [
'$q'
'$rootScope'
'exportDebug'
'appParse', 'ngFB'
($q, $rootScope, exportDebug, appParse, ngFB)->
$rootScope.$on 'user:sign-out', ()->
self.disconnectP()
return
self = {
LINK_EXPIRATION_DAYS: 60
_findParseUserByLinkedIdP : (fbId)->
return $q.when( rootScope.parseUser ) if $rootScope.parseUser
userQ = new Parse.Query(Parse.User)
userQ.equalTo('linkedId', fbId)
return userQ.first().then (resp)->
return $q.reject('Parse User not found') if _.isEmpty resp
return resp
_getAuthData: (fbUser, fbLogin)->
return authData = {facebook:null} if fbUser==false
expireTime = 1000 * 3600 * 24 * self.LINK_EXPIRATION_DAYS + Date.now()
return authData = {
facebook:
id: fbUser.id
access_token: fbLogin.authResponse.accessToken
# authData.expiration sets to +60 days regardless of what we put here
expiration_date: new Date(expireTime).toJSON()
}
_patchUserFieldsP : (fbUser)->
parseUser = Parse.User.current()
updateFields = {
'name': fbUser.name
'linkedId': fbUser.id # save for admin recovery
'face': fbUser.picture.data.url
}
if parseUser.get('emailVerified') != true
updateFields['email'] = fbUser.email
return parseUser.save( updateFields)
checkLoginP: (user)->
user = $rootScope.user if `user==null`
return ngFB.getLoginStatus()
.then (resp)->
console.log "checkLoginP", resp
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return resp.status if resp.status == 'connected'
$rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return $q.reject('fbUser not connected')
loginP: (privileges)->
default_access = 'public_profile,email,user_friends'
privileges = default_access if !privileges
fbLogin = null
return ngFB.login({ scope: privileges })
.then (resp)->
fbLogin = resp
if resp.status != 'connected'
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
console.warn "Facebook login failed, resp=", resp
return $q.reject(resp)
# FbLogin sucessful
console.log 'FB connected=', resp.status
return self.getMeP()
.then (fbUser)->
# Parse login or create NEW parseUser
if $rootScope.parseUser == null
# create anon user and link with FbUser
authData = self._getAuthData(fbUser, fbLogin)
return appParse.signUpOrLoginFromAuthDataP( authData )
.then ()->
return self._patchUserFieldsP(fbUser)
if $rootScope.parseUser?
# link Parse.User & FbUser: replace any prior authData
authData = self._getAuthData(fbUser, fbLogin)
return appParse.linkUserP( authData )
.then ()->
return self._patchUserFieldsP(fbUser)
.then ()->
promise = appParse.authProviderP()
return fbLogin.status
disconnectP: ()->
# disconnect revokes the Fb sessionToken.
# calls to getMeP() should fail with "Invalid OAuth access token."
return ngFB.logout().then ()->
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = null
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
return ngFB.getLoginStatus().then (resp)->
console.log "disconnnect status=",resp
unlinkP: ()->
# resets parseUser.authData
return appParse.authProviderP()
.then (authProvider)->
if authProvider != 'facebook'
return $q.reject('ERROR: unlinkP() session not linked from Facebook')
# reset the parseUser.authData field
authData = self._getAuthData(false)
return appParse.linkUserP(authData)
.then (resp)->
return self.disconnectP()
.then ()->
return $rootScope.parseUser.save({
'face':null
'name':null
'email':null # unique key
})
.then ()->
# manually set to 'password' this is only valid to next logout
promise = appParse.authProvider('password')
$rootScope.user['unlinked'] = true
# Parse.User.logOut();
$rootScope.$state.reload()
console.log "done"
getMeP: (fields)->
default_profile_fields = 'id,name,first_name,last_name,email,gender,location,locale,link,timezone,verified,picture,cover'
options = {
path : '/me'
params:
'fields': fields || default_profile_fields
}
return ngFB.api(options)
.then (resp)->
if appParse.isAnonymousUser()
_.extend $rootScope['user'], {
username: resp.name
email: resp.email
emailVerified: resp.email?
}
$rootScope.localStorage['fbProfile'] = $rootScope.user['fbProfile'] = resp
$rootScope.user['isConnected'] = $rootScope.user['fbProfile']?
console.log 'getFbUserP', resp
return resp
.catch (err)->
console.error 'getFbUserP', err
$rootScope.user['isConnected'] = false
return $q.reject(err)
getPermissions: ()->
options = {
path : '/me/permissions'
}
return ngFB.api(options)
.then (resp)->
return resp.data
}
exportDebug.appFacebook = self
return self
]
# # test cloudCode with js debugger
window.cloud = { }
|
[
{
"context": "# Copyright 2010-2019 Dan Elliott, Russell Valentine\n#\n# Licensed under the Apach",
"end": 35,
"score": 0.9998146295547485,
"start": 24,
"tag": "NAME",
"value": "Dan Elliott"
},
{
"context": "# Copyright 2010-2019 Dan Elliott, Russell Valentine\n#\n# Licensed ... | clients/www/src/coffee/isadore_graphs/isadore_control_graphs.coffee | bluthen/isadore_server | 0 | # Copyright 2010-2019 Dan Elliott, Russell Valentine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
dygraph with slider controls for zoom and position.
###
class window.IsadoreControlGraph
@SIDE_LEFT: 0
@SIDE_RIGHT: 1
@TYPE_POSITION: 0
@TYPE_MAG: 1
@CTL_LEFT_POS: 0
@CTL_LEFT_MAG: 1
@CTL_RIGHT_MAG: 2
@CTL_RIGHT_POS: 3
constructor: (element, data, options) ->
self=this
icg=IsadoreControlGraph
@parent = $(element)
@icgWrapper=$('<div class="icg_wrapper"></div>')
@controlWrappers=[
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
]
@controls=[
$('<div class="icg_slider"></div>') #Left Position
$('<div class="icg_slider"></div>') #Left Mag
$('<div class="icg_slider"></div>') #Right Mag
$('<div class="icg_slider"></div>') #Right Position
]
for i in [0...@controlWrappers.length]
if(i==icg.CTL_RIGHT_MAG)
#Insert graph between controls
@graphDiv=$('<div class="icg_graph"></div>')
@icgWrapper.append(@graphDiv)
@controlWrappers[i].append(@controls[i])
@icgWrapper.append(@controlWrappers[i])
@parent.append(@icgWrapper)
@ranges=[null,null]
@plot = new Dygraph(
@graphDiv[0]
data
options
)
sliderOptions={
orientation: 'vertical'
value: 50
slide: (event, ui) ->
self.scrollSlides_(event, ui)
change: (event, ui) ->
self.scrollStop_(event, ui)
}
for control in @controls
control.slider(sliderOptions)
@relayout()
###
@side Y-axis side
@slider the jquery slider object.
@param sliderType
###
scrollSlidesSide_: (side, slider, sliderType) ->
icg=IsadoreControlGraph
if(slider.slider('option', 'value') != 50)
if(not @ranges[side])
@ranges[side]=@plot.yAxisRange(side)
value = (slider.slider('option', 'value')-50)/50
center = @ranges[side][0] + (@ranges[side][1] - @ranges[side][0])/2
if(sliderType == icg.TYPE_POSITION)
range = (@ranges[side][1] - @ranges[side][0])/2
center = center+range*value
newRange=[center-range, center+range]
else
if(value > icg.TYPE_MAG)
range = (@ranges[side][1] - @ranges[side][0])/(2+2*value)
newRange=[center - range, center + range]
else
value=-value
range = ((@ranges[side][1] - @ranges[side][0])/2)*(1+value)
newRange=[center - range, center + range]
if(newRange[0] != @ranges[side][0] or newRange[1] != @ranges[side][1])
ax = {axes: {} }
ax.axes['y'+(if side then '2' else '')]={valueRange: newRange}
#ax.axes['y'+(if side then '' else '2')]={valueRange: @plot.yAxisRange(Math.abs(side-1))}
console.log(ax)
@plot.updateOptions(ax)
scrollSlides_: (event, ui) ->
icg=IsadoreControlGraph
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_LEFT_MAG][0])
side=icg.SIDE_LEFT
else
side=icg.SIDE_RIGHT
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_RIGHT_POS][0])
sliderType = icg.TYPE_POSITION #Position
else
sliderType = icg.TYPE_MAG
@scrollSlidesSide_(side, $(event.target), sliderType)
scrollStopSide_: (side, slider) ->
@ranges[side]=null
if(slider.slider('option', 'value') != 50)
slider.slider('option', 'value', 50)
scrollStop_: (event, ui) ->
icg=IsadoreControlGraph
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_LEFT_MAG][0])
@scrollStopSide_(icg.SIDE_LEFT, $(event.target))
else if(event.target == @controls[icg.CTL_RIGHT_POS][0] or event.target == @controls[icg.CTL_RIGHT_MAG][0])
@scrollStopSide_(icg.SIDE_RIGHT, $(event.target))
relayout: () ->
icg = IsadoreControlGraph
h = @icgWrapper.innerHeight()
bpad=20
tpad=10
for controlWrapper in @controlWrappers
controlWrapper.height(h-bpad-tpad)
vpad=20
pos=vpad
for ii in [0...@controlWrappers.length]
if(ii == icg.CTL_RIGHT_MAG)
#size graph
@graphDiv.height(@icgWrapper.innerHeight()-tpad)
#TODO: Why do we need the -1 to make it fit?
@graphDiv.width(@icgWrapper.innerWidth() - 2*pos-1)
@graphDiv.css({top: tpad, left: pos+'px'})
@plot.resize(@graphDiv.innerWidth(), @graphDiv.innerHeight())
pos+=@graphDiv.outerWidth()+vpad
controlWrapper = @controlWrappers[ii]
controlWrapper.css({top: tpad, left: pos+'px'})
pos+=controlWrapper.outerWidth()+vpad
updateOptions: (options) ->
@plot.updateOptions(options)
return this
xAxisRange: () ->
return @plot.xAxisRange()
###
Dual control graphs with range selector.
@param element The elemnt to put the dual graph in
@param The data [topdata, bottomdata, rangeselectordata]
@param IDCGOptions object with the following:
{
axisColors:[[topY1color, topY2color], [bottomY1color, bottomY2color]]
axisLabels:[[topY1Label, topY2Label], [bottomY1Label, bottomY2Label]]
seriesLabels:[[topSeries1label, topSeries2label,...], [bottomSeries1Label, ...]]
dataAxisMap:[[topSeries1AxisIndex, ...], [bottomSeries1AxisIndex, ...]]
}
Colors are html hex #FFAB02
AxisIndex 0 for y axis, 1 for y2 axis.
###
class window.IsadoreDualControlGraph
@GRAPH_TOP: 0
@GRAPH_BOTTOM: 1
constructor: (element, data, IDCGOptions) ->
self=this
idcg=IsadoreDualControlGraph
@parent = $(element)
@idcgWrapper = $('<div class="idcg_wrapper"></div>')
@graphDivs = [
$('<div class="idcg_graph idcg_graph_top"></div>'),
$('<div class="idcg_graph idcg_graph_bottom"></div>')
$('<div class="idcg_graph idcg_graph_range_selector"></div>')
]
for div in @graphDivs
@idcgWrapper.append(div)
@parent.append(@idcgWrapper)
options=[
@genDygraphOptions_(IDCGOptions, idcg.GRAPH_TOP),
@genDygraphOptions_(IDCGOptions, idcg.GRAPH_BOTTOM),
@genRangeSelectorOptions_(IDCGOptions)
]
iModel= {
mousedown: (event, g, context) ->
context.initializeMouseDown(event, g, context)
Dygraph.startPan(event, g, context)
mousemove: (event, g, context) ->
if (context.isPanning)
Dygraph.movePan(event, g, context)
mouseup: (event, g, context) ->
if (context.isPanning)
Dygraph.endPan(event, g, context)
}
options[0].interactionModel=iModel
options[1].interactionModel=iModel
@plots=[]
for i in [0...2]
@plots.push(
new IsadoreControlGraph(
@graphDivs[i]
data[i]
options[i]
)
)
#Range selector
@plots.push(
new Dygraph(
@graphDivs[2][0]
data[2]
options[2]
)
)
for i in [0...@graphDivs.length]
altCallback = options[i].drawCallback
@plots[i].updateOptions({
drawCallback: (graph, inited) ->
range=graph.xAxisRange()
for plot in self.plots
if(plot != graph)
orange = plot.xAxisRange()
if(range[1] - range[0] < 0)
graph.updateOptions({dateWindow: [orange[0], orange[1]]})
break
if(orange[0] != range[0] or orange[1] != range[1])
plot.updateOptions({dateWindow: [range[0], range[1]]})
if(altCallback)
altCallback(graph, inited)
})
@icons = [
$('<img class="idcg_icons" src="imgs/icon_position.png" alt="position"/>')
$('<img class="idcg_icons" src="imgs/icon_magnify.png" alt="magnify"/>')
$('<img class="idcg_icons" src="imgs/icon_magnify.png" alt="magnify"/>')
$('<img class="idcg_icons" src="imgs/icon_position.png" alt="position"/>')
]
for icon in @icons
@idcgWrapper.append(icon)
@relayout()
updateOptions: (options) ->
idcg=IsadoreDualControlGraph
optionsA = [
@genDygraphOptions_(options, idcg.GRAPH_TOP)
@genDygraphOptions_(options, idcg.GRAPH_BOTTOM)
]
@plots[idcg.GRAPH_TOP].updateOptions(optionsA[idcg.GRAPH_TOP])
@plots[idcg.GRAPH_BOTTOM].updateOptions(optionsA[idcg.GRAPH_BOTTOM])
# Add number to end of label if there are duplicates.
fixedSeriesLabel_: (labels, idx) ->
label = labels[idx]
count = 1
for i in [0...idx]
if labels[i] == label
count++
if count > 1
return (label + count)
else
return label
genDygraphOptions_: (options, graph) ->
idcg=IsadoreDualControlGraph
patterns=[null, Dygraph.DASHED_LINE, Dygraph.DOTTED_LINE, Dygraph.DOT_DASH_LINE]
newOptions={
legend: 'always'
panEdgeFraction: .0001
customBars: true
axes: {
y2: {}
}
ylabel: '<span style="color: '+options.axisColors[graph][0]+'">'+options.axisLabels[graph][0]+'</span>'
y2label: '<span style="color: '+options.axisColors[graph][1]+'">'+options.axisLabels[graph][1]+'</span>'
yAxisLabelWidth: 70
}
newOptions.colors=[]
for i in [0...options.seriesLabels[graph].length]
newOptions.colors.push(options.axisColors[graph][options.dataAxisMap[graph][i]])
newOptions.labels=['Date']
firstY2Label=null
ycount = 0
y2count = 0
for i in [0...options.seriesLabels[graph].length]
label=@fixedSeriesLabel_(options.seriesLabels[graph], i)
newOptions.labels.push(label)
if(options.dataAxisMap[graph][i]==1)
if(firstY2Label)
newOptions[label]={axis:firstY2Label, strokePattern: patterns[y2count%patterns.length]}
else
newOptions[label]={axis:{}, strokePattern: patterns[y2count%patterns.length]}
firstY2Label=label
y2count++
else
newOptions[label]={strokePattern: patterns[ycount%patterns.length]}
ycount++
if(graph == idcg.BOTTOM_GRAPH)
newOptions.xAxisLabelWidth = 0
console.log('newOptions='+newOptions)
return newOptions
genRangeSelectorOptions_: (options) ->
newOptions = {
drawYAxis: false
drawXAxis: false
customBars: true
showRangeSelector: true
rangeSelectorHeight: 40
xAxisLabelWidth: 0
}
return newOptions
relayout: () ->
rsHeight = $('.dygraph-rangesel-fgcanvas').height()
h=(@idcgWrapper.innerHeight()-rsHeight)/2
@graphDivs[0].height(h)
@graphDivs[1].height(h)
@graphDivs[2].height(rsHeight)
@plots[0].relayout()
@plots[1].relayout()
for ii in [0...@icons.length]
icon = @icons[ii]
wrapper = @plots[0].controlWrappers[ii] #slider wrapper
icon.css({top: wrapper.position().top+wrapper.height()+5, left: wrapper.position().left-3})
#Range selector position
@graphDivs[2].width(@plots[0].graphDiv.width()-110)
@graphDivs[2].css({top: -20, left: @plots[0].graphDiv.position().left+55})
@plots[2].resize(@graphDivs[2].width(), @graphDivs[2].height())
| 35800 | # Copyright 2010-2019 <NAME>, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
dygraph with slider controls for zoom and position.
###
class window.IsadoreControlGraph
@SIDE_LEFT: 0
@SIDE_RIGHT: 1
@TYPE_POSITION: 0
@TYPE_MAG: 1
@CTL_LEFT_POS: 0
@CTL_LEFT_MAG: 1
@CTL_RIGHT_MAG: 2
@CTL_RIGHT_POS: 3
constructor: (element, data, options) ->
self=this
icg=IsadoreControlGraph
@parent = $(element)
@icgWrapper=$('<div class="icg_wrapper"></div>')
@controlWrappers=[
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
]
@controls=[
$('<div class="icg_slider"></div>') #Left Position
$('<div class="icg_slider"></div>') #Left Mag
$('<div class="icg_slider"></div>') #Right Mag
$('<div class="icg_slider"></div>') #Right Position
]
for i in [0...@controlWrappers.length]
if(i==icg.CTL_RIGHT_MAG)
#Insert graph between controls
@graphDiv=$('<div class="icg_graph"></div>')
@icgWrapper.append(@graphDiv)
@controlWrappers[i].append(@controls[i])
@icgWrapper.append(@controlWrappers[i])
@parent.append(@icgWrapper)
@ranges=[null,null]
@plot = new Dygraph(
@graphDiv[0]
data
options
)
sliderOptions={
orientation: 'vertical'
value: 50
slide: (event, ui) ->
self.scrollSlides_(event, ui)
change: (event, ui) ->
self.scrollStop_(event, ui)
}
for control in @controls
control.slider(sliderOptions)
@relayout()
###
@side Y-axis side
@slider the jquery slider object.
@param sliderType
###
scrollSlidesSide_: (side, slider, sliderType) ->
icg=IsadoreControlGraph
if(slider.slider('option', 'value') != 50)
if(not @ranges[side])
@ranges[side]=@plot.yAxisRange(side)
value = (slider.slider('option', 'value')-50)/50
center = @ranges[side][0] + (@ranges[side][1] - @ranges[side][0])/2
if(sliderType == icg.TYPE_POSITION)
range = (@ranges[side][1] - @ranges[side][0])/2
center = center+range*value
newRange=[center-range, center+range]
else
if(value > icg.TYPE_MAG)
range = (@ranges[side][1] - @ranges[side][0])/(2+2*value)
newRange=[center - range, center + range]
else
value=-value
range = ((@ranges[side][1] - @ranges[side][0])/2)*(1+value)
newRange=[center - range, center + range]
if(newRange[0] != @ranges[side][0] or newRange[1] != @ranges[side][1])
ax = {axes: {} }
ax.axes['y'+(if side then '2' else '')]={valueRange: newRange}
#ax.axes['y'+(if side then '' else '2')]={valueRange: @plot.yAxisRange(Math.abs(side-1))}
console.log(ax)
@plot.updateOptions(ax)
scrollSlides_: (event, ui) ->
icg=IsadoreControlGraph
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_LEFT_MAG][0])
side=icg.SIDE_LEFT
else
side=icg.SIDE_RIGHT
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_RIGHT_POS][0])
sliderType = icg.TYPE_POSITION #Position
else
sliderType = icg.TYPE_MAG
@scrollSlidesSide_(side, $(event.target), sliderType)
scrollStopSide_: (side, slider) ->
@ranges[side]=null
if(slider.slider('option', 'value') != 50)
slider.slider('option', 'value', 50)
scrollStop_: (event, ui) ->
icg=IsadoreControlGraph
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_LEFT_MAG][0])
@scrollStopSide_(icg.SIDE_LEFT, $(event.target))
else if(event.target == @controls[icg.CTL_RIGHT_POS][0] or event.target == @controls[icg.CTL_RIGHT_MAG][0])
@scrollStopSide_(icg.SIDE_RIGHT, $(event.target))
relayout: () ->
icg = IsadoreControlGraph
h = @icgWrapper.innerHeight()
bpad=20
tpad=10
for controlWrapper in @controlWrappers
controlWrapper.height(h-bpad-tpad)
vpad=20
pos=vpad
for ii in [0...@controlWrappers.length]
if(ii == icg.CTL_RIGHT_MAG)
#size graph
@graphDiv.height(@icgWrapper.innerHeight()-tpad)
#TODO: Why do we need the -1 to make it fit?
@graphDiv.width(@icgWrapper.innerWidth() - 2*pos-1)
@graphDiv.css({top: tpad, left: pos+'px'})
@plot.resize(@graphDiv.innerWidth(), @graphDiv.innerHeight())
pos+=@graphDiv.outerWidth()+vpad
controlWrapper = @controlWrappers[ii]
controlWrapper.css({top: tpad, left: pos+'px'})
pos+=controlWrapper.outerWidth()+vpad
updateOptions: (options) ->
@plot.updateOptions(options)
return this
xAxisRange: () ->
return @plot.xAxisRange()
###
Dual control graphs with range selector.
@param element The elemnt to put the dual graph in
@param The data [topdata, bottomdata, rangeselectordata]
@param IDCGOptions object with the following:
{
axisColors:[[topY1color, topY2color], [bottomY1color, bottomY2color]]
axisLabels:[[topY1Label, topY2Label], [bottomY1Label, bottomY2Label]]
seriesLabels:[[topSeries1label, topSeries2label,...], [bottomSeries1Label, ...]]
dataAxisMap:[[topSeries1AxisIndex, ...], [bottomSeries1AxisIndex, ...]]
}
Colors are html hex #FFAB02
AxisIndex 0 for y axis, 1 for y2 axis.
###
class window.IsadoreDualControlGraph
@GRAPH_TOP: 0
@GRAPH_BOTTOM: 1
constructor: (element, data, IDCGOptions) ->
self=this
idcg=IsadoreDualControlGraph
@parent = $(element)
@idcgWrapper = $('<div class="idcg_wrapper"></div>')
@graphDivs = [
$('<div class="idcg_graph idcg_graph_top"></div>'),
$('<div class="idcg_graph idcg_graph_bottom"></div>')
$('<div class="idcg_graph idcg_graph_range_selector"></div>')
]
for div in @graphDivs
@idcgWrapper.append(div)
@parent.append(@idcgWrapper)
options=[
@genDygraphOptions_(IDCGOptions, idcg.GRAPH_TOP),
@genDygraphOptions_(IDCGOptions, idcg.GRAPH_BOTTOM),
@genRangeSelectorOptions_(IDCGOptions)
]
iModel= {
mousedown: (event, g, context) ->
context.initializeMouseDown(event, g, context)
Dygraph.startPan(event, g, context)
mousemove: (event, g, context) ->
if (context.isPanning)
Dygraph.movePan(event, g, context)
mouseup: (event, g, context) ->
if (context.isPanning)
Dygraph.endPan(event, g, context)
}
options[0].interactionModel=iModel
options[1].interactionModel=iModel
@plots=[]
for i in [0...2]
@plots.push(
new IsadoreControlGraph(
@graphDivs[i]
data[i]
options[i]
)
)
#Range selector
@plots.push(
new Dygraph(
@graphDivs[2][0]
data[2]
options[2]
)
)
for i in [0...@graphDivs.length]
altCallback = options[i].drawCallback
@plots[i].updateOptions({
drawCallback: (graph, inited) ->
range=graph.xAxisRange()
for plot in self.plots
if(plot != graph)
orange = plot.xAxisRange()
if(range[1] - range[0] < 0)
graph.updateOptions({dateWindow: [orange[0], orange[1]]})
break
if(orange[0] != range[0] or orange[1] != range[1])
plot.updateOptions({dateWindow: [range[0], range[1]]})
if(altCallback)
altCallback(graph, inited)
})
@icons = [
$('<img class="idcg_icons" src="imgs/icon_position.png" alt="position"/>')
$('<img class="idcg_icons" src="imgs/icon_magnify.png" alt="magnify"/>')
$('<img class="idcg_icons" src="imgs/icon_magnify.png" alt="magnify"/>')
$('<img class="idcg_icons" src="imgs/icon_position.png" alt="position"/>')
]
for icon in @icons
@idcgWrapper.append(icon)
@relayout()
updateOptions: (options) ->
idcg=IsadoreDualControlGraph
optionsA = [
@genDygraphOptions_(options, idcg.GRAPH_TOP)
@genDygraphOptions_(options, idcg.GRAPH_BOTTOM)
]
@plots[idcg.GRAPH_TOP].updateOptions(optionsA[idcg.GRAPH_TOP])
@plots[idcg.GRAPH_BOTTOM].updateOptions(optionsA[idcg.GRAPH_BOTTOM])
# Add number to end of label if there are duplicates.
fixedSeriesLabel_: (labels, idx) ->
label = labels[idx]
count = 1
for i in [0...idx]
if labels[i] == label
count++
if count > 1
return (label + count)
else
return label
genDygraphOptions_: (options, graph) ->
idcg=IsadoreDualControlGraph
patterns=[null, Dygraph.DASHED_LINE, Dygraph.DOTTED_LINE, Dygraph.DOT_DASH_LINE]
newOptions={
legend: 'always'
panEdgeFraction: .0001
customBars: true
axes: {
y2: {}
}
ylabel: '<span style="color: '+options.axisColors[graph][0]+'">'+options.axisLabels[graph][0]+'</span>'
y2label: '<span style="color: '+options.axisColors[graph][1]+'">'+options.axisLabels[graph][1]+'</span>'
yAxisLabelWidth: 70
}
newOptions.colors=[]
for i in [0...options.seriesLabels[graph].length]
newOptions.colors.push(options.axisColors[graph][options.dataAxisMap[graph][i]])
newOptions.labels=['Date']
firstY2Label=null
ycount = 0
y2count = 0
for i in [0...options.seriesLabels[graph].length]
label=@fixedSeriesLabel_(options.seriesLabels[graph], i)
newOptions.labels.push(label)
if(options.dataAxisMap[graph][i]==1)
if(firstY2Label)
newOptions[label]={axis:firstY2Label, strokePattern: patterns[y2count%patterns.length]}
else
newOptions[label]={axis:{}, strokePattern: patterns[y2count%patterns.length]}
firstY2Label=label
y2count++
else
newOptions[label]={strokePattern: patterns[ycount%patterns.length]}
ycount++
if(graph == idcg.BOTTOM_GRAPH)
newOptions.xAxisLabelWidth = 0
console.log('newOptions='+newOptions)
return newOptions
genRangeSelectorOptions_: (options) ->
newOptions = {
drawYAxis: false
drawXAxis: false
customBars: true
showRangeSelector: true
rangeSelectorHeight: 40
xAxisLabelWidth: 0
}
return newOptions
relayout: () ->
rsHeight = $('.dygraph-rangesel-fgcanvas').height()
h=(@idcgWrapper.innerHeight()-rsHeight)/2
@graphDivs[0].height(h)
@graphDivs[1].height(h)
@graphDivs[2].height(rsHeight)
@plots[0].relayout()
@plots[1].relayout()
for ii in [0...@icons.length]
icon = @icons[ii]
wrapper = @plots[0].controlWrappers[ii] #slider wrapper
icon.css({top: wrapper.position().top+wrapper.height()+5, left: wrapper.position().left-3})
#Range selector position
@graphDivs[2].width(@plots[0].graphDiv.width()-110)
@graphDivs[2].css({top: -20, left: @plots[0].graphDiv.position().left+55})
@plots[2].resize(@graphDivs[2].width(), @graphDivs[2].height())
| true | # Copyright 2010-2019 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
dygraph with slider controls for zoom and position.
###
class window.IsadoreControlGraph
@SIDE_LEFT: 0
@SIDE_RIGHT: 1
@TYPE_POSITION: 0
@TYPE_MAG: 1
@CTL_LEFT_POS: 0
@CTL_LEFT_MAG: 1
@CTL_RIGHT_MAG: 2
@CTL_RIGHT_POS: 3
constructor: (element, data, options) ->
self=this
icg=IsadoreControlGraph
@parent = $(element)
@icgWrapper=$('<div class="icg_wrapper"></div>')
@controlWrappers=[
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
$('<div class="icg_slider_wrapper"></div>')
]
@controls=[
$('<div class="icg_slider"></div>') #Left Position
$('<div class="icg_slider"></div>') #Left Mag
$('<div class="icg_slider"></div>') #Right Mag
$('<div class="icg_slider"></div>') #Right Position
]
for i in [0...@controlWrappers.length]
if(i==icg.CTL_RIGHT_MAG)
#Insert graph between controls
@graphDiv=$('<div class="icg_graph"></div>')
@icgWrapper.append(@graphDiv)
@controlWrappers[i].append(@controls[i])
@icgWrapper.append(@controlWrappers[i])
@parent.append(@icgWrapper)
@ranges=[null,null]
@plot = new Dygraph(
@graphDiv[0]
data
options
)
sliderOptions={
orientation: 'vertical'
value: 50
slide: (event, ui) ->
self.scrollSlides_(event, ui)
change: (event, ui) ->
self.scrollStop_(event, ui)
}
for control in @controls
control.slider(sliderOptions)
@relayout()
###
@side Y-axis side
@slider the jquery slider object.
@param sliderType
###
scrollSlidesSide_: (side, slider, sliderType) ->
icg=IsadoreControlGraph
if(slider.slider('option', 'value') != 50)
if(not @ranges[side])
@ranges[side]=@plot.yAxisRange(side)
value = (slider.slider('option', 'value')-50)/50
center = @ranges[side][0] + (@ranges[side][1] - @ranges[side][0])/2
if(sliderType == icg.TYPE_POSITION)
range = (@ranges[side][1] - @ranges[side][0])/2
center = center+range*value
newRange=[center-range, center+range]
else
if(value > icg.TYPE_MAG)
range = (@ranges[side][1] - @ranges[side][0])/(2+2*value)
newRange=[center - range, center + range]
else
value=-value
range = ((@ranges[side][1] - @ranges[side][0])/2)*(1+value)
newRange=[center - range, center + range]
if(newRange[0] != @ranges[side][0] or newRange[1] != @ranges[side][1])
ax = {axes: {} }
ax.axes['y'+(if side then '2' else '')]={valueRange: newRange}
#ax.axes['y'+(if side then '' else '2')]={valueRange: @plot.yAxisRange(Math.abs(side-1))}
console.log(ax)
@plot.updateOptions(ax)
scrollSlides_: (event, ui) ->
icg=IsadoreControlGraph
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_LEFT_MAG][0])
side=icg.SIDE_LEFT
else
side=icg.SIDE_RIGHT
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_RIGHT_POS][0])
sliderType = icg.TYPE_POSITION #Position
else
sliderType = icg.TYPE_MAG
@scrollSlidesSide_(side, $(event.target), sliderType)
scrollStopSide_: (side, slider) ->
@ranges[side]=null
if(slider.slider('option', 'value') != 50)
slider.slider('option', 'value', 50)
scrollStop_: (event, ui) ->
icg=IsadoreControlGraph
if(event.target == @controls[icg.CTL_LEFT_POS][0] or event.target == @controls[icg.CTL_LEFT_MAG][0])
@scrollStopSide_(icg.SIDE_LEFT, $(event.target))
else if(event.target == @controls[icg.CTL_RIGHT_POS][0] or event.target == @controls[icg.CTL_RIGHT_MAG][0])
@scrollStopSide_(icg.SIDE_RIGHT, $(event.target))
relayout: () ->
icg = IsadoreControlGraph
h = @icgWrapper.innerHeight()
bpad=20
tpad=10
for controlWrapper in @controlWrappers
controlWrapper.height(h-bpad-tpad)
vpad=20
pos=vpad
for ii in [0...@controlWrappers.length]
if(ii == icg.CTL_RIGHT_MAG)
#size graph
@graphDiv.height(@icgWrapper.innerHeight()-tpad)
#TODO: Why do we need the -1 to make it fit?
@graphDiv.width(@icgWrapper.innerWidth() - 2*pos-1)
@graphDiv.css({top: tpad, left: pos+'px'})
@plot.resize(@graphDiv.innerWidth(), @graphDiv.innerHeight())
pos+=@graphDiv.outerWidth()+vpad
controlWrapper = @controlWrappers[ii]
controlWrapper.css({top: tpad, left: pos+'px'})
pos+=controlWrapper.outerWidth()+vpad
updateOptions: (options) ->
@plot.updateOptions(options)
return this
xAxisRange: () ->
return @plot.xAxisRange()
###
Dual control graphs with range selector.
@param element The elemnt to put the dual graph in
@param The data [topdata, bottomdata, rangeselectordata]
@param IDCGOptions object with the following:
{
axisColors:[[topY1color, topY2color], [bottomY1color, bottomY2color]]
axisLabels:[[topY1Label, topY2Label], [bottomY1Label, bottomY2Label]]
seriesLabels:[[topSeries1label, topSeries2label,...], [bottomSeries1Label, ...]]
dataAxisMap:[[topSeries1AxisIndex, ...], [bottomSeries1AxisIndex, ...]]
}
Colors are html hex #FFAB02
AxisIndex 0 for y axis, 1 for y2 axis.
###
class window.IsadoreDualControlGraph
@GRAPH_TOP: 0
@GRAPH_BOTTOM: 1
constructor: (element, data, IDCGOptions) ->
self=this
idcg=IsadoreDualControlGraph
@parent = $(element)
@idcgWrapper = $('<div class="idcg_wrapper"></div>')
@graphDivs = [
$('<div class="idcg_graph idcg_graph_top"></div>'),
$('<div class="idcg_graph idcg_graph_bottom"></div>')
$('<div class="idcg_graph idcg_graph_range_selector"></div>')
]
for div in @graphDivs
@idcgWrapper.append(div)
@parent.append(@idcgWrapper)
options=[
@genDygraphOptions_(IDCGOptions, idcg.GRAPH_TOP),
@genDygraphOptions_(IDCGOptions, idcg.GRAPH_BOTTOM),
@genRangeSelectorOptions_(IDCGOptions)
]
iModel= {
mousedown: (event, g, context) ->
context.initializeMouseDown(event, g, context)
Dygraph.startPan(event, g, context)
mousemove: (event, g, context) ->
if (context.isPanning)
Dygraph.movePan(event, g, context)
mouseup: (event, g, context) ->
if (context.isPanning)
Dygraph.endPan(event, g, context)
}
options[0].interactionModel=iModel
options[1].interactionModel=iModel
@plots=[]
for i in [0...2]
@plots.push(
new IsadoreControlGraph(
@graphDivs[i]
data[i]
options[i]
)
)
#Range selector
@plots.push(
new Dygraph(
@graphDivs[2][0]
data[2]
options[2]
)
)
for i in [0...@graphDivs.length]
altCallback = options[i].drawCallback
@plots[i].updateOptions({
drawCallback: (graph, inited) ->
range=graph.xAxisRange()
for plot in self.plots
if(plot != graph)
orange = plot.xAxisRange()
if(range[1] - range[0] < 0)
graph.updateOptions({dateWindow: [orange[0], orange[1]]})
break
if(orange[0] != range[0] or orange[1] != range[1])
plot.updateOptions({dateWindow: [range[0], range[1]]})
if(altCallback)
altCallback(graph, inited)
})
@icons = [
$('<img class="idcg_icons" src="imgs/icon_position.png" alt="position"/>')
$('<img class="idcg_icons" src="imgs/icon_magnify.png" alt="magnify"/>')
$('<img class="idcg_icons" src="imgs/icon_magnify.png" alt="magnify"/>')
$('<img class="idcg_icons" src="imgs/icon_position.png" alt="position"/>')
]
for icon in @icons
@idcgWrapper.append(icon)
@relayout()
updateOptions: (options) ->
idcg=IsadoreDualControlGraph
optionsA = [
@genDygraphOptions_(options, idcg.GRAPH_TOP)
@genDygraphOptions_(options, idcg.GRAPH_BOTTOM)
]
@plots[idcg.GRAPH_TOP].updateOptions(optionsA[idcg.GRAPH_TOP])
@plots[idcg.GRAPH_BOTTOM].updateOptions(optionsA[idcg.GRAPH_BOTTOM])
# Add number to end of label if there are duplicates.
fixedSeriesLabel_: (labels, idx) ->
label = labels[idx]
count = 1
for i in [0...idx]
if labels[i] == label
count++
if count > 1
return (label + count)
else
return label
genDygraphOptions_: (options, graph) ->
idcg=IsadoreDualControlGraph
patterns=[null, Dygraph.DASHED_LINE, Dygraph.DOTTED_LINE, Dygraph.DOT_DASH_LINE]
newOptions={
legend: 'always'
panEdgeFraction: .0001
customBars: true
axes: {
y2: {}
}
ylabel: '<span style="color: '+options.axisColors[graph][0]+'">'+options.axisLabels[graph][0]+'</span>'
y2label: '<span style="color: '+options.axisColors[graph][1]+'">'+options.axisLabels[graph][1]+'</span>'
yAxisLabelWidth: 70
}
newOptions.colors=[]
for i in [0...options.seriesLabels[graph].length]
newOptions.colors.push(options.axisColors[graph][options.dataAxisMap[graph][i]])
newOptions.labels=['Date']
firstY2Label=null
ycount = 0
y2count = 0
for i in [0...options.seriesLabels[graph].length]
label=@fixedSeriesLabel_(options.seriesLabels[graph], i)
newOptions.labels.push(label)
if(options.dataAxisMap[graph][i]==1)
if(firstY2Label)
newOptions[label]={axis:firstY2Label, strokePattern: patterns[y2count%patterns.length]}
else
newOptions[label]={axis:{}, strokePattern: patterns[y2count%patterns.length]}
firstY2Label=label
y2count++
else
newOptions[label]={strokePattern: patterns[ycount%patterns.length]}
ycount++
if(graph == idcg.BOTTOM_GRAPH)
newOptions.xAxisLabelWidth = 0
console.log('newOptions='+newOptions)
return newOptions
genRangeSelectorOptions_: (options) ->
newOptions = {
drawYAxis: false
drawXAxis: false
customBars: true
showRangeSelector: true
rangeSelectorHeight: 40
xAxisLabelWidth: 0
}
return newOptions
relayout: () ->
rsHeight = $('.dygraph-rangesel-fgcanvas').height()
h=(@idcgWrapper.innerHeight()-rsHeight)/2
@graphDivs[0].height(h)
@graphDivs[1].height(h)
@graphDivs[2].height(rsHeight)
@plots[0].relayout()
@plots[1].relayout()
for ii in [0...@icons.length]
icon = @icons[ii]
wrapper = @plots[0].controlWrappers[ii] #slider wrapper
icon.css({top: wrapper.position().top+wrapper.height()+5, left: wrapper.position().left-3})
#Range selector position
@graphDivs[2].width(@plots[0].graphDiv.width()-110)
@graphDivs[2].css({top: -20, left: @plots[0].graphDiv.position().left+55})
@plots[2].resize(@graphDivs[2].width(), @graphDivs[2].height())
|
[
{
"context": " a recipe', ->\n newCocktail =\n name: 'Margarita'\n description: '5 ingredients...'\n\n i",
"end": 815,
"score": 0.9995349049568176,
"start": 806,
"tag": "NAME",
"value": "Margarita"
},
{
"context": "l = new @Cocktail\n @scope.cocktail.na... | spec/javascripts/controllers/CocktailAddCtrl_spec.coffee | baberthal/cocktails | 0 | #= require spec_helper
describe 'CocktailAddCtrl', ->
beforeEach ->
@setupController('CocktailAddCtrl')
@mdDialog = @injector.get('$mdDialog')
@Cocktail = @model('Cocktail')
spyOn(@mdDialog, 'cancel')
@loadFixtures()
@http.whenGET('/ingredients').respond(200, @ingredients)
@templateExpectations()
describe 'controller initialization', ->
describe 'listing available ingredients', ->
it 'sets up the list of available ingredients', ->
expect(@scope.ingredients).toEqualData(@ingredients)
describe '$mdDialog', ->
it 'knows how to close the dialog', ->
expect(@scope.cancel).toBeDefined()
@scope.cancel()
expect(@mdDialog.cancel).toHaveBeenCalled()
describe 'saving a recipe', ->
newCocktail =
name: 'Margarita'
description: '5 ingredients...'
it 'posts to the backend', ->
@scope.cocktail = new @Cocktail
@scope.cocktail.name = "Margarita"
@scope.cocktail.description = '5 ingredients..'
@scope.saveRecipe(@scope.cocktail)
@http.expectPOST('/cocktails').respond(201,newCocktail)
@http.flush()
expect(@location.path()).toBe("/")
| 203440 | #= require spec_helper
describe 'CocktailAddCtrl', ->
beforeEach ->
@setupController('CocktailAddCtrl')
@mdDialog = @injector.get('$mdDialog')
@Cocktail = @model('Cocktail')
spyOn(@mdDialog, 'cancel')
@loadFixtures()
@http.whenGET('/ingredients').respond(200, @ingredients)
@templateExpectations()
describe 'controller initialization', ->
describe 'listing available ingredients', ->
it 'sets up the list of available ingredients', ->
expect(@scope.ingredients).toEqualData(@ingredients)
describe '$mdDialog', ->
it 'knows how to close the dialog', ->
expect(@scope.cancel).toBeDefined()
@scope.cancel()
expect(@mdDialog.cancel).toHaveBeenCalled()
describe 'saving a recipe', ->
newCocktail =
name: '<NAME>'
description: '5 ingredients...'
it 'posts to the backend', ->
@scope.cocktail = new @Cocktail
@scope.cocktail.name = "<NAME>"
@scope.cocktail.description = '5 ingredients..'
@scope.saveRecipe(@scope.cocktail)
@http.expectPOST('/cocktails').respond(201,newCocktail)
@http.flush()
expect(@location.path()).toBe("/")
| true | #= require spec_helper
describe 'CocktailAddCtrl', ->
beforeEach ->
@setupController('CocktailAddCtrl')
@mdDialog = @injector.get('$mdDialog')
@Cocktail = @model('Cocktail')
spyOn(@mdDialog, 'cancel')
@loadFixtures()
@http.whenGET('/ingredients').respond(200, @ingredients)
@templateExpectations()
describe 'controller initialization', ->
describe 'listing available ingredients', ->
it 'sets up the list of available ingredients', ->
expect(@scope.ingredients).toEqualData(@ingredients)
describe '$mdDialog', ->
it 'knows how to close the dialog', ->
expect(@scope.cancel).toBeDefined()
@scope.cancel()
expect(@mdDialog.cancel).toHaveBeenCalled()
describe 'saving a recipe', ->
newCocktail =
name: 'PI:NAME:<NAME>END_PI'
description: '5 ingredients...'
it 'posts to the backend', ->
@scope.cocktail = new @Cocktail
@scope.cocktail.name = "PI:NAME:<NAME>END_PI"
@scope.cocktail.description = '5 ingredients..'
@scope.saveRecipe(@scope.cocktail)
@http.expectPOST('/cocktails').respond(201,newCocktail)
@http.flush()
expect(@location.path()).toBe("/")
|
[
{
"context": "options = Observable [\n {name: Observable(\"Napoleon\"), date: \"1850 AD\"}\n {name: Observable(\"Ba",
"end": 3939,
"score": 0.999819815158844,
"start": 3931,
"tag": "NAME",
"value": "Napoleon"
},
{
"context": "on\"), date: \"1850 AD\"}\n {name: Ob... | test/select.coffee | STRd6/jadelet | 18 | describe "SELECT", ->
describe "with an array of basic types for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should generate options", ->
model =
options: [1, 2, 3]
value: 2
select = template(model)
assert.equal select.querySelectorAll("option").length, model.options.length
it "should have it's value set", ->
model =
options: [1, 2, 3]
value: 2
select = template(model)
assert.equal select.value, model.value
it "should pass the option to the value binding on a change event", (done) ->
model =
options: [1, 2, 3]
value: Observable(1)
model.value.observe (value) ->
# NOTE: The value is a memebr of the options array
assert typeof value is "number"
assert.equal value, 3
done()
select = template(model)
# NOTE: To simulate a selection by choosing value you must pass a string
select.value = "3"
assert.equal select.value, "3"
select.onchange()
it "should get the correct value when another bound input changes", ->
template = makeTemplate """
div
select(@value @options)
input(@value)
"""
model =
options: [1, 2, 3]
value: Observable 2
element = template(model)
input = element.querySelector("input")
select = element.querySelector("select")
input.value = "3"
input.oninput()
assert.equal model.value(), 3
assert.equal select.value, 3
model.value 1
assert.equal select.value, 1
describe "with an array of objects for options", ->
template = makeTemplate """
select(@value @options)
"""
options = [
{name: "yolo", value: "badical"}
{name: "wat", value: "noice"}
]
model =
options: options
value: options[0]
it "should generate options", ->
select = template(model)
assert.equal select.querySelectorAll("option").length, model.options.length
it "option names should be the name property of the object", ->
select = template(model)
names = Array::map.call select.querySelectorAll("option"), (o) -> o.text
names.forEach (name, i) ->
assert.equal name, model.options[i].name
it "option values should be the value property of the object", ->
select = template(model)
values = Array::map.call select.querySelectorAll("option"), (o) -> o.value
values.forEach (value, i) ->
assert.equal value, model.options[i].value
it "should have it's value set", ->
select = template(model)
# TODO: This isn't a great check
assert.equal select._value, model.value
it "should trigger a call to value binding when changing", (done) ->
model =
options: options
model.value = Observable options[0], model
model.value.observe (v) ->
assert v.name is "wat"
done()
select = template(model)
# Simulate a selection
select.value = "noice"
select.onchange()
describe "An observable array of objects without value properties", ->
template = makeTemplate """
select(@value @options)
"""
options = Observable [
{name: "foo"}
{name: "bar"}
{name: "baz"}
]
model =
options: Observable options
value: Observable options[0]
it "should update the selected item when the model changes and the options don't have value properties", ->
select = template(model)
assert.equal select.selectedIndex, 0
model.value model.options.get(1)
assert.equal select.selectedIndex, 1
describe "with objects that have an observable name property", ->
template = makeTemplate """
select(@value @options)
"""
it "should observe the name as the text of the value options", ->
options = Observable [
{name: Observable("Napoleon"), date: "1850 AD"}
{name: Observable("Barrack"), date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
optionElements = select.querySelectorAll("option")
assert.equal optionElements[0].textContent, "Napoleon"
options.get(0).name("Yolo")
assert.equal optionElements[0].textContent, "Yolo"
describe "with objects that have an observable value property", ->
template = makeTemplate """
select(@value @options)
"""
it "should observe the value as the value of the value options", ->
options = Observable [
{name: Observable("Napoleon"), value: Observable("1850 AD")}
{name: Observable("Barrack"), value: Observable("1995 AD")}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.value, "1850 AD"
options.get(0).value "YOLO"
assert.equal select.value, "YOLO"
describe "with an observable array for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should add options added to the observable array", ->
options = Observable [
{name: "Napoleon", date: "1850 AD"}
{name: "Barrack", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options.push name: "Test", date: "2014 AD"
assert.equal select.querySelectorAll("option").length, 3
it "should remove options removed from the observable array", ->
options = Observable [
{name: "Napoleon", date: "1850 AD"}
{name: "Barrack", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options.remove options.get(0)
assert.equal select.querySelectorAll("option").length, 1
it "should have it's value set", ->
options = Observable [
{name: "Napoleon", date: "1850 AD"}
{name: "Barrack", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
# TODO: This isn't a great check
assert.equal select._value, model.value
describe "with an object for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should have an option for each key", ->
options = Observable
nap: "Napoleon"
bar: "Barrack"
model =
options: options
value: "bar"
select = template model
assert.equal select.value, "bar"
it "should add options added to the observable object", ->
options = Observable
nap: "Napoleon"
bar: "Barrack"
model =
options: options
value: "bar"
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options Object.assign {}, options(), {test: "Test"}
assert.equal select.querySelectorAll("option").length, 3
it "should remove options removed from the observable object", ->
options = Observable
nap: "Napoleon"
bar: "Barrack"
model =
options: options
value: "bar"
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
delete options().bar
options Object.assign {}, options()
assert.equal select.querySelectorAll("option").length, 1
it "should observe the value as the value of the value options", ->
options = Observable
nap: Observable "Napoleon"
bar: Observable "Barrack"
model =
options: options
value: "bar"
select = template model
optionElements = select.querySelectorAll("option")
assert.equal optionElements[1].textContent, "Barrack"
options().bar "YOLO"
assert.equal optionElements[1].textContent, "YOLO"
| 90912 | describe "SELECT", ->
describe "with an array of basic types for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should generate options", ->
model =
options: [1, 2, 3]
value: 2
select = template(model)
assert.equal select.querySelectorAll("option").length, model.options.length
it "should have it's value set", ->
model =
options: [1, 2, 3]
value: 2
select = template(model)
assert.equal select.value, model.value
it "should pass the option to the value binding on a change event", (done) ->
model =
options: [1, 2, 3]
value: Observable(1)
model.value.observe (value) ->
# NOTE: The value is a memebr of the options array
assert typeof value is "number"
assert.equal value, 3
done()
select = template(model)
# NOTE: To simulate a selection by choosing value you must pass a string
select.value = "3"
assert.equal select.value, "3"
select.onchange()
it "should get the correct value when another bound input changes", ->
template = makeTemplate """
div
select(@value @options)
input(@value)
"""
model =
options: [1, 2, 3]
value: Observable 2
element = template(model)
input = element.querySelector("input")
select = element.querySelector("select")
input.value = "3"
input.oninput()
assert.equal model.value(), 3
assert.equal select.value, 3
model.value 1
assert.equal select.value, 1
describe "with an array of objects for options", ->
template = makeTemplate """
select(@value @options)
"""
options = [
{name: "yolo", value: "badical"}
{name: "wat", value: "noice"}
]
model =
options: options
value: options[0]
it "should generate options", ->
select = template(model)
assert.equal select.querySelectorAll("option").length, model.options.length
it "option names should be the name property of the object", ->
select = template(model)
names = Array::map.call select.querySelectorAll("option"), (o) -> o.text
names.forEach (name, i) ->
assert.equal name, model.options[i].name
it "option values should be the value property of the object", ->
select = template(model)
values = Array::map.call select.querySelectorAll("option"), (o) -> o.value
values.forEach (value, i) ->
assert.equal value, model.options[i].value
it "should have it's value set", ->
select = template(model)
# TODO: This isn't a great check
assert.equal select._value, model.value
it "should trigger a call to value binding when changing", (done) ->
model =
options: options
model.value = Observable options[0], model
model.value.observe (v) ->
assert v.name is "wat"
done()
select = template(model)
# Simulate a selection
select.value = "noice"
select.onchange()
describe "An observable array of objects without value properties", ->
template = makeTemplate """
select(@value @options)
"""
options = Observable [
{name: "foo"}
{name: "bar"}
{name: "baz"}
]
model =
options: Observable options
value: Observable options[0]
it "should update the selected item when the model changes and the options don't have value properties", ->
select = template(model)
assert.equal select.selectedIndex, 0
model.value model.options.get(1)
assert.equal select.selectedIndex, 1
describe "with objects that have an observable name property", ->
template = makeTemplate """
select(@value @options)
"""
it "should observe the name as the text of the value options", ->
options = Observable [
{name: Observable("<NAME>"), date: "1850 AD"}
{name: Observable("<NAME>"), date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
optionElements = select.querySelectorAll("option")
assert.equal optionElements[0].textContent, "Napoleon"
options.get(0).name("Yolo")
assert.equal optionElements[0].textContent, "Yolo"
describe "with objects that have an observable value property", ->
template = makeTemplate """
select(@value @options)
"""
it "should observe the value as the value of the value options", ->
options = Observable [
{name: Observable("<NAME>"), value: Observable("1850 AD")}
{name: Observable("<NAME>"), value: Observable("1995 AD")}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.value, "1850 AD"
options.get(0).value "YOLO"
assert.equal select.value, "YOLO"
describe "with an observable array for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should add options added to the observable array", ->
options = Observable [
{name: "<NAME>", date: "1850 AD"}
{name: "<NAME>", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options.push name: "Test", date: "2014 AD"
assert.equal select.querySelectorAll("option").length, 3
it "should remove options removed from the observable array", ->
options = Observable [
{name: "<NAME>", date: "1850 AD"}
{name: "<NAME>", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options.remove options.get(0)
assert.equal select.querySelectorAll("option").length, 1
it "should have it's value set", ->
options = Observable [
{name: "<NAME>", date: "1850 AD"}
{name: "<NAME>", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
# TODO: This isn't a great check
assert.equal select._value, model.value
describe "with an object for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should have an option for each key", ->
options = Observable
nap: "N<NAME>"
bar: "Barrack"
model =
options: options
value: "bar"
select = template model
assert.equal select.value, "bar"
it "should add options added to the observable object", ->
options = Observable
nap: "N<NAME>"
bar: "Barrack"
model =
options: options
value: "bar"
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options Object.assign {}, options(), {test: "Test"}
assert.equal select.querySelectorAll("option").length, 3
it "should remove options removed from the observable object", ->
options = Observable
nap: "N<NAME>"
bar: "Barrack"
model =
options: options
value: "bar"
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
delete options().bar
options Object.assign {}, options()
assert.equal select.querySelectorAll("option").length, 1
it "should observe the value as the value of the value options", ->
options = Observable
nap: Observable "N<NAME>"
bar: Observable "Barrack"
model =
options: options
value: "bar"
select = template model
optionElements = select.querySelectorAll("option")
assert.equal optionElements[1].textContent, "Barrack"
options().bar "YOLO"
assert.equal optionElements[1].textContent, "YOLO"
| true | describe "SELECT", ->
describe "with an array of basic types for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should generate options", ->
model =
options: [1, 2, 3]
value: 2
select = template(model)
assert.equal select.querySelectorAll("option").length, model.options.length
it "should have it's value set", ->
model =
options: [1, 2, 3]
value: 2
select = template(model)
assert.equal select.value, model.value
it "should pass the option to the value binding on a change event", (done) ->
model =
options: [1, 2, 3]
value: Observable(1)
model.value.observe (value) ->
# NOTE: The value is a memebr of the options array
assert typeof value is "number"
assert.equal value, 3
done()
select = template(model)
# NOTE: To simulate a selection by choosing value you must pass a string
select.value = "3"
assert.equal select.value, "3"
select.onchange()
it "should get the correct value when another bound input changes", ->
template = makeTemplate """
div
select(@value @options)
input(@value)
"""
model =
options: [1, 2, 3]
value: Observable 2
element = template(model)
input = element.querySelector("input")
select = element.querySelector("select")
input.value = "3"
input.oninput()
assert.equal model.value(), 3
assert.equal select.value, 3
model.value 1
assert.equal select.value, 1
describe "with an array of objects for options", ->
template = makeTemplate """
select(@value @options)
"""
options = [
{name: "yolo", value: "badical"}
{name: "wat", value: "noice"}
]
model =
options: options
value: options[0]
it "should generate options", ->
select = template(model)
assert.equal select.querySelectorAll("option").length, model.options.length
it "option names should be the name property of the object", ->
select = template(model)
names = Array::map.call select.querySelectorAll("option"), (o) -> o.text
names.forEach (name, i) ->
assert.equal name, model.options[i].name
it "option values should be the value property of the object", ->
select = template(model)
values = Array::map.call select.querySelectorAll("option"), (o) -> o.value
values.forEach (value, i) ->
assert.equal value, model.options[i].value
it "should have it's value set", ->
select = template(model)
# TODO: This isn't a great check
assert.equal select._value, model.value
it "should trigger a call to value binding when changing", (done) ->
model =
options: options
model.value = Observable options[0], model
model.value.observe (v) ->
assert v.name is "wat"
done()
select = template(model)
# Simulate a selection
select.value = "noice"
select.onchange()
describe "An observable array of objects without value properties", ->
template = makeTemplate """
select(@value @options)
"""
options = Observable [
{name: "foo"}
{name: "bar"}
{name: "baz"}
]
model =
options: Observable options
value: Observable options[0]
it "should update the selected item when the model changes and the options don't have value properties", ->
select = template(model)
assert.equal select.selectedIndex, 0
model.value model.options.get(1)
assert.equal select.selectedIndex, 1
describe "with objects that have an observable name property", ->
template = makeTemplate """
select(@value @options)
"""
it "should observe the name as the text of the value options", ->
options = Observable [
{name: Observable("PI:NAME:<NAME>END_PI"), date: "1850 AD"}
{name: Observable("PI:NAME:<NAME>END_PI"), date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
optionElements = select.querySelectorAll("option")
assert.equal optionElements[0].textContent, "Napoleon"
options.get(0).name("Yolo")
assert.equal optionElements[0].textContent, "Yolo"
describe "with objects that have an observable value property", ->
template = makeTemplate """
select(@value @options)
"""
it "should observe the value as the value of the value options", ->
options = Observable [
{name: Observable("PI:NAME:<NAME>END_PI"), value: Observable("1850 AD")}
{name: Observable("PI:NAME:<NAME>END_PI"), value: Observable("1995 AD")}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.value, "1850 AD"
options.get(0).value "YOLO"
assert.equal select.value, "YOLO"
describe "with an observable array for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should add options added to the observable array", ->
options = Observable [
{name: "PI:NAME:<NAME>END_PI", date: "1850 AD"}
{name: "PI:NAME:<NAME>END_PI", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options.push name: "Test", date: "2014 AD"
assert.equal select.querySelectorAll("option").length, 3
it "should remove options removed from the observable array", ->
options = Observable [
{name: "PI:NAME:<NAME>END_PI", date: "1850 AD"}
{name: "PI:NAME:<NAME>END_PI", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options.remove options.get(0)
assert.equal select.querySelectorAll("option").length, 1
it "should have it's value set", ->
options = Observable [
{name: "PI:NAME:<NAME>END_PI", date: "1850 AD"}
{name: "PI:NAME:<NAME>END_PI", date: "1995 AD"}
]
model =
options: options
value: options.get(0)
select = template(model)
# TODO: This isn't a great check
assert.equal select._value, model.value
describe "with an object for options", ->
template = makeTemplate """
select(@value @options)
"""
it "should have an option for each key", ->
options = Observable
nap: "NPI:NAME:<NAME>END_PI"
bar: "Barrack"
model =
options: options
value: "bar"
select = template model
assert.equal select.value, "bar"
it "should add options added to the observable object", ->
options = Observable
nap: "NPI:NAME:<NAME>END_PI"
bar: "Barrack"
model =
options: options
value: "bar"
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
options Object.assign {}, options(), {test: "Test"}
assert.equal select.querySelectorAll("option").length, 3
it "should remove options removed from the observable object", ->
options = Observable
nap: "NPI:NAME:<NAME>END_PI"
bar: "Barrack"
model =
options: options
value: "bar"
select = template(model)
assert.equal select.querySelectorAll("option").length, 2
delete options().bar
options Object.assign {}, options()
assert.equal select.querySelectorAll("option").length, 1
it "should observe the value as the value of the value options", ->
options = Observable
nap: Observable "NPI:NAME:<NAME>END_PI"
bar: Observable "Barrack"
model =
options: options
value: "bar"
select = template model
optionElements = select.querySelectorAll("option")
assert.equal optionElements[1].textContent, "Barrack"
options().bar "YOLO"
assert.equal optionElements[1].textContent, "YOLO"
|
[
{
"context": "@\n name: name\n password: password\n \n model.auth callback\n",
"end": 1078,
"score": 0.999370276927948,
"start": 1070,
"tag": "PASSWORD",
"value": "password"
}
] | models/plugins/auth.coffee | cncolder/vcvs | 0 | { SchemaType: { ValidatorError } } = require 'mongoose'
crypto = require 'crypto'
sha2560 = (s) ->
for i in [ 1..10 ]
s = crypto.createHash('sha256').update(s).digest 'hex'
s
module.exports = (schema, options) ->
schema.add
password:
type: String
lowercase: true
trim: true
match: /^[\da-f]{64}$/
select: false
set: (value) ->
if value?.length then sha2560 value.toLowerCase() else undefined
schema.methods.auth = (callback) ->
{ name, password } = @
@constructor.findOne
name: name
'+password'
(err, doc) ->
return callback new ValidatorError 'name', 'exists' if not doc?
return callback new ValidatorError 'password', 'equal' if doc.password isnt password
callback err, doc
schema.statics.auth = (name, password, callback) ->
model = new @
name: name
password: password
model.auth callback
| 124400 | { SchemaType: { ValidatorError } } = require 'mongoose'
crypto = require 'crypto'
sha2560 = (s) ->
for i in [ 1..10 ]
s = crypto.createHash('sha256').update(s).digest 'hex'
s
module.exports = (schema, options) ->
schema.add
password:
type: String
lowercase: true
trim: true
match: /^[\da-f]{64}$/
select: false
set: (value) ->
if value?.length then sha2560 value.toLowerCase() else undefined
schema.methods.auth = (callback) ->
{ name, password } = @
@constructor.findOne
name: name
'+password'
(err, doc) ->
return callback new ValidatorError 'name', 'exists' if not doc?
return callback new ValidatorError 'password', 'equal' if doc.password isnt password
callback err, doc
schema.statics.auth = (name, password, callback) ->
model = new @
name: name
password: <PASSWORD>
model.auth callback
| true | { SchemaType: { ValidatorError } } = require 'mongoose'
crypto = require 'crypto'
sha2560 = (s) ->
for i in [ 1..10 ]
s = crypto.createHash('sha256').update(s).digest 'hex'
s
module.exports = (schema, options) ->
schema.add
password:
type: String
lowercase: true
trim: true
match: /^[\da-f]{64}$/
select: false
set: (value) ->
if value?.length then sha2560 value.toLowerCase() else undefined
schema.methods.auth = (callback) ->
{ name, password } = @
@constructor.findOne
name: name
'+password'
(err, doc) ->
return callback new ValidatorError 'name', 'exists' if not doc?
return callback new ValidatorError 'password', 'equal' if doc.password isnt password
callback err, doc
schema.statics.auth = (name, password, callback) ->
model = new @
name: name
password: PI:PASSWORD:<PASSWORD>END_PI
model.auth callback
|
[
{
"context": "ist of passed items', ->\n\n props = { query: 'mars', items }\n helpers.dropboxItemsTest(\n ",
"end": 1269,
"score": 0.635639488697052,
"start": 1265,
"tag": "NAME",
"value": "mars"
},
{
"context": "with selected item', ->\n\n props = { query : 'mar... | client/activity/lib/components/chatinputwidget/test/searchdropbox.coffee | ezgikaysi/koding | 1 | kd = require 'kd'
React = require 'kd-react'
expect = require 'expect'
toImmutable = require 'app/util/toImmutable'
SearchDropbox = require '../searchdropbox'
helpers = require './helpers'
mockingjay = require '../../../../../mocks/mockingjay'
describe 'SearchDropbox', ->
items = toImmutable [
{ message : mockingjay.getMockMessage 'Life on Mars', { id : '1' } }
{ message : mockingjay.getMockMessage 'Expedition on Mars', { id : '2' } }
]
afterEach -> helpers.clearDropboxes()
describe '::render', ->
it 'renders error if query is not empty but items are empty', ->
dropbox = helpers.renderDropbox { query: 'test' }, SearchDropbox
expect(dropbox.props.visible).toBe yes
content = dropbox.getContentElement()
error = content.querySelector '.ErrorDropboxItem'
expect(error).toExist()
it 'renders "continue typing" message if query is empty', ->
dropbox = helpers.renderDropbox {}, SearchDropbox
expect(dropbox.props.visible).toBe yes
content = dropbox.getContentElement()
error = content.querySelector '.emptyQueryMessage'
expect(error).toExist()
it 'renders dropbox with list of passed items', ->
props = { query: 'mars', items }
helpers.dropboxItemsTest(
props,
SearchDropbox,
(item, itemData) ->
body = item.querySelector 'article'
expect(body).toExist()
expect(body.textContent).toEqual itemData.getIn [ 'message', 'body' ]
)
it 'renders dropbox with selected item', ->
props = { query : 'mars', items, selectedIndex : 1 }
helpers.dropboxSelectedItemTest props, SearchDropbox
describe '::onItemSelected', ->
it 'should be called when dropbox item is hovered', ->
props = { query : 'mars', items, selectedIndex : 0 }
helpers.dropboxSelectedItemCallbackTest props, SearchDropbox
describe '::onItemConfirmed', ->
it 'should be called when dropbox item is clicked', ->
props = { query : 'mars', items, selectedIndex : 1 }
helpers.dropboxConfirmedItemCallbackTest props, SearchDropbox
| 68382 | kd = require 'kd'
React = require 'kd-react'
expect = require 'expect'
toImmutable = require 'app/util/toImmutable'
SearchDropbox = require '../searchdropbox'
helpers = require './helpers'
mockingjay = require '../../../../../mocks/mockingjay'
describe 'SearchDropbox', ->
items = toImmutable [
{ message : mockingjay.getMockMessage 'Life on Mars', { id : '1' } }
{ message : mockingjay.getMockMessage 'Expedition on Mars', { id : '2' } }
]
afterEach -> helpers.clearDropboxes()
describe '::render', ->
it 'renders error if query is not empty but items are empty', ->
dropbox = helpers.renderDropbox { query: 'test' }, SearchDropbox
expect(dropbox.props.visible).toBe yes
content = dropbox.getContentElement()
error = content.querySelector '.ErrorDropboxItem'
expect(error).toExist()
it 'renders "continue typing" message if query is empty', ->
dropbox = helpers.renderDropbox {}, SearchDropbox
expect(dropbox.props.visible).toBe yes
content = dropbox.getContentElement()
error = content.querySelector '.emptyQueryMessage'
expect(error).toExist()
it 'renders dropbox with list of passed items', ->
props = { query: '<NAME>', items }
helpers.dropboxItemsTest(
props,
SearchDropbox,
(item, itemData) ->
body = item.querySelector 'article'
expect(body).toExist()
expect(body.textContent).toEqual itemData.getIn [ 'message', 'body' ]
)
it 'renders dropbox with selected item', ->
props = { query : '<NAME>', items, selectedIndex : 1 }
helpers.dropboxSelectedItemTest props, SearchDropbox
describe '::onItemSelected', ->
it 'should be called when dropbox item is hovered', ->
props = { query : 'mars', items, selectedIndex : 0 }
helpers.dropboxSelectedItemCallbackTest props, SearchDropbox
describe '::onItemConfirmed', ->
it 'should be called when dropbox item is clicked', ->
props = { query : 'mars', items, selectedIndex : 1 }
helpers.dropboxConfirmedItemCallbackTest props, SearchDropbox
| true | kd = require 'kd'
React = require 'kd-react'
expect = require 'expect'
toImmutable = require 'app/util/toImmutable'
SearchDropbox = require '../searchdropbox'
helpers = require './helpers'
mockingjay = require '../../../../../mocks/mockingjay'
describe 'SearchDropbox', ->
items = toImmutable [
{ message : mockingjay.getMockMessage 'Life on Mars', { id : '1' } }
{ message : mockingjay.getMockMessage 'Expedition on Mars', { id : '2' } }
]
afterEach -> helpers.clearDropboxes()
describe '::render', ->
it 'renders error if query is not empty but items are empty', ->
dropbox = helpers.renderDropbox { query: 'test' }, SearchDropbox
expect(dropbox.props.visible).toBe yes
content = dropbox.getContentElement()
error = content.querySelector '.ErrorDropboxItem'
expect(error).toExist()
it 'renders "continue typing" message if query is empty', ->
dropbox = helpers.renderDropbox {}, SearchDropbox
expect(dropbox.props.visible).toBe yes
content = dropbox.getContentElement()
error = content.querySelector '.emptyQueryMessage'
expect(error).toExist()
it 'renders dropbox with list of passed items', ->
props = { query: 'PI:NAME:<NAME>END_PI', items }
helpers.dropboxItemsTest(
props,
SearchDropbox,
(item, itemData) ->
body = item.querySelector 'article'
expect(body).toExist()
expect(body.textContent).toEqual itemData.getIn [ 'message', 'body' ]
)
it 'renders dropbox with selected item', ->
props = { query : 'PI:NAME:<NAME>END_PI', items, selectedIndex : 1 }
helpers.dropboxSelectedItemTest props, SearchDropbox
describe '::onItemSelected', ->
it 'should be called when dropbox item is hovered', ->
props = { query : 'mars', items, selectedIndex : 0 }
helpers.dropboxSelectedItemCallbackTest props, SearchDropbox
describe '::onItemConfirmed', ->
it 'should be called when dropbox item is clicked', ->
props = { query : 'mars', items, selectedIndex : 1 }
helpers.dropboxConfirmedItemCallbackTest props, SearchDropbox
|
[
{
"context": ").getGridOrBoard().columnConfig.fields).toEqual ['HelloKitty']\n\n it 'should have the rowSettings field',",
"end": 6435,
"score": 0.6690595746040344,
"start": 6430,
"tag": "NAME",
"value": "Hello"
},
{
"context": "dOrBoard().columnConfig.fields).toEqual ['HelloKitty']... | test/spec/iterationtrackingboard/IterationTrackingBoardAppSpec.coffee | KilobytesandBits/app-catalog | 0 | Ext = window.Ext4 || window.Ext
Ext.require [
'Rally.apps.iterationtrackingboard.IterationTrackingBoardApp'
'Rally.ui.gridboard.GridBoard'
'Rally.util.DateTime'
'Rally.app.Context',
'Rally.domain.Subscription'
]
describe 'Rally.apps.iterationtrackingboard.IterationTrackingBoardApp', ->
helpers
createApp: (config = {}, dndRankEnabled = true) ->
now = new Date(1384305300 * 1000);
tomorrow = Rally.util.DateTime.add(now, 'day', 1)
nextDay = Rally.util.DateTime.add(tomorrow, 'day', 1)
dayAfter = Rally.util.DateTime.add(nextDay, 'day', 1)
@iterationData = [
{Name:'Iteration 1', StartDate: now, EndDate: tomorrow}
{Name:'Iteration 2', StartDate: nextDay, EndDate: dayAfter}
]
@iterationRecord = @mom.getRecord('iteration', values: @iterationData[0])
scopeRecord = if Ext.isDefined(config?.iterationRecord) then config.iterationRecord else @iterationRecord
@app = Ext.create('Rally.apps.iterationtrackingboard.IterationTrackingBoardApp', Ext.apply(
context: Ext.create('Rally.app.Context',
initialValues:
timebox: Ext.create 'Rally.app.TimeboxScope', type: 'iteration', record: scopeRecord
project:
_ref: @projectRef
workspace:
WorkspaceConfiguration:
DragDropRankingEnabled: dndRankEnabled
WorkDays: "Monday,Friday"
subscription: Rally.environment.getContext().getSubscription()
),
renderTo: 'testDiv'
height: 400
, config))
@waitForComponentReady(@app)
getIterationFilter: ->
iteration = @iterationData[0]
[
{ property: 'Iteration.Name', operator: '=', value: iteration.Name }
{ property: "Iteration.StartDate", operator: '=', value: Rally.util.DateTime.toIsoString(iteration.StartDate) }
{ property: "Iteration.EndDate", operator: '=', value: Rally.util.DateTime.toIsoString(iteration.EndDate) }
]
stubRequests: ->
@ajax.whenQueryingAllowedValues('userstory', 'ScheduleState').respondWith(["Defined", "In-Progress", "Completed", "Accepted"]);
@ajax.whenQuerying('artifact').respondWith [{
RevisionHistory:
_ref: '/revisionhistory/1'
}]
toggleToBoard: ->
@app.gridboard.setToggleState('board')
toggleToGrid: ->
@app.gridboard.setToggleState('grid')
stubFeatureToggle: (toggles, value = true) ->
stub = @stub(Rally.app.Context.prototype, 'isFeatureEnabled');
stub.withArgs(toggle).returns(value) for toggle in toggles
stub
beforeEach ->
@defaultToggleState = Rally.ui.gridboard.GridBoard.prototype.toggleState
Rally.ui.gridboard.GridBoard.prototype.toggleState = 'board' # tests assume board is default view
@ajax.whenReading('project').respondWith {
TeamMembers: []
Editors: []
}
@ajax.whenQuerying('iteration').respondWith([],
schema:
properties:
EndDate:
format:
tzOffset: 0
)
@stubRequests()
@tooltipHelper = new Helpers.TooltipHelper this
afterEach ->
@app?.destroy()
Rally.ui.gridboard.GridBoard.prototype.toggleState = @defaultToggleState
it 'should use anchor layout by default', ->
@createApp().then =>
expect(@app.layout.$className).toBe 'Ext.layout.container.Anchor'
it 'should not render a header', ->
try
@createApp().then =>
header = @app.down('container[cls=header]')
expect(header == null).toBe true
it 'sets current view on viewchange', ->
@createApp().then =>
removeSpy = @spy(@app, 'remove')
@app.down('#gridBoard').fireEvent 'viewchange'
expect(removeSpy).toHaveBeenCalledOnce()
expect(removeSpy).toHaveBeenCalledWith 'gridBoard'
expect(@app.down('#gridBoard')).toBeDefined()
it 'resets view on scope change', ->
@createApp().then =>
removeSpy = @spy(@app, 'remove')
newScope = Ext.create('Rally.app.TimeboxScope',
record: @mom.getRecord('iteration', values: @iterationData[1])
)
@app.onTimeboxScopeChange newScope
expect(removeSpy).toHaveBeenCalledOnce()
expect(removeSpy).toHaveBeenCalledWith 'gridBoard'
expect(@app.down('#gridBoard')).toBeDefined()
it 'fires storecurrentpagereset on scope change', ->
@createApp().then =>
treeGrid = Ext.create 'Ext.Component'
downStub = @stub(@app, 'down').withArgs('rallytreegrid').returns treeGrid
storeCurrentPageResetStub = @stub()
@app.down('rallytreegrid').on 'storecurrentpagereset', storeCurrentPageResetStub
newScope = Ext.create('Rally.app.TimeboxScope',
record: @mom.getRecord('iteration', values: @iterationData[1])
)
@app.onTimeboxScopeChange newScope
expect(storeCurrentPageResetStub).toHaveBeenCalledOnce()
describe 'stats banner', ->
it 'should show showStatsBanner settings field when app IS a full page app', ->
@createApp(isFullPageApp: true).then =>
expect(_.find(@app.getUserSettingsFields(), {xtype: 'rallystatsbannersettingsfield'})).toBeDefined()
it 'should NOT show showStatsBanner settings field when app IS NOT a full page app', ->
@createApp(isFullPageApp: false).then =>
expect(_.find(@app.getUserSettingsFields(), {xtype: 'rallystatsbannersettingsfield'})).not.toBeDefined()
it 'fires contentupdated event after board load', ->
contentUpdatedHandlerStub = @stub()
@createApp(
listeners:
contentupdated: contentUpdatedHandlerStub
).then =>
contentUpdatedHandlerStub.reset()
@app.gridboard.fireEvent('load')
expect(contentUpdatedHandlerStub).toHaveBeenCalledOnce()
it 'should include PortfolioItem in columnConfig.additionalFetchFields', ->
@createApp().then =>
expect(@app.gridboard.getGridOrBoard().columnConfig.additionalFetchFields).toContain 'PortfolioItem'
it 'should have a default card fields setting', ->
@createApp().then =>
expect(@app.down('rallygridboard').getGridOrBoard().columnConfig.fields).toEqual ['Parent', 'Tasks', 'Defects', 'Discussion', 'PlanEstimate', 'Iteration']
it 'should have use the cardFields setting if available', ->
@createApp(
settings:
cardFields: 'HelloKitty'
).then =>
expect(@app.down('rallygridboard').getGridOrBoard().columnConfig.fields).toEqual ['HelloKitty']
it 'should have the rowSettings field', ->
@createApp().then =>
expect(_.find(@app.getSettingsFields(), {xtype: 'rowsettingsfield'})).toBeDefined()
it 'adds the rowConfig property to the boardConfig', ->
@createApp(
settings:
showRows: true
).then =>
expect(@app.gridboard.getGridOrBoard().config.rowConfig).toBeDefined()
it 'adds the requiresModelSpecificFilters property to the boardConfig', ->
@createApp().then =>
expect(@app.gridboard.getGridOrBoard().columnConfig.requiresModelSpecificFilters).toBe false
it 'should show the field picker in board mode', ->
@createApp().then =>
@toggleToBoard()
expect(@app.down('#fieldpickerbtn').isVisible()).toBe true
it 'should enable bulk edit', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().enableBulkEdit).toBe true
it 'should show a treegrid when treegrid toggled on', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('rallytreegrid')).not.toBeNull()
expect(@app.down('rallygrid')).toBeNull()
describe 'iteration filtering', ->
describe 'with a scope', ->
it 'should filter the grid to the currently selected iteration', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith filters: @getIterationFilter()
it 'should filter the board to the currently selected iteration', ->
requests = @stubRequests()
@createApp().then =>
@toggleToBoard()
expect(request).toBeWsapiRequestWith(filters: @getIterationFilter()) for request in requests
describe 'unscheduled', ->
helpers
createLeafStoriesOnlyFilter: ->
storyTypeDefOid = Rally.test.mock.data.WsapiModelFactory.getModel('UserStory').typeDefOid
Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
value: storyTypeDefOid
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'DirectChildrenCount'
value: 0
)).or(Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
operator: '!='
value: storyTypeDefOid
))
createUnassociatedDefectsOnlyFilter: ->
defectTypeDefOid = Rally.test.mock.data.WsapiModelFactory.getModel('Defect').typeDefOid
Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
value: defectTypeDefOid
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'TestCase'
operator: '='
value: null
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'Requirement.Iteration'
operator: '!='
value: null
).or(Ext.create('Rally.data.wsapi.Filter',
property: 'Requirement'
operator: '='
value: null
)))
).or(Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
operator: '!='
value: defectTypeDefOid
))
describe 'stories', ->
it 'should exclude epic stories from the grid', ->
requestStub = @stubRequests()
@createApp(iterationRecord: null).then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith
filters: [@createLeafStoriesOnlyFilter()]
it 'should not attach leaf-stories-only filter if iteration is not null', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).not.toBeWsapiRequestWith
filters: [@createLeafStoriesOnlyFilter()]
describe 'defects', ->
it 'should exclude associated defects from the grid', ->
requestStub = @stubRequests()
@createApp(iterationRecord: null).then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith
filters: [@createUnassociatedDefectsOnlyFilter()]
it 'should not attach unassociated-defects-only filter if iteration is not null', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).not.toBeWsapiRequestWith
filters: [@createUnassociatedDefectsOnlyFilter()]
describe 'tree grid config', ->
it 'returns the columns with the FormattedID removed', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().initialConfig.columnCfgs).toEqual ['Name', 'ScheduleState', 'Blocked', 'PlanEstimate', 'Tasks', 'TaskEstimateTotal', 'TaskRemainingTotal', 'Owner', 'Defects', 'Discussion']
it 'should include test sets', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('rallytreegrid').getStore().parentTypes).toContain 'testset'
it 'should include dataContext', ->
buildSpy = @spy(Rally.data.wsapi.TreeStoreBuilder::, 'build')
@createApp().then (app) ->
expect(buildSpy.getCall(0).args[0].context).toEqual app.getContext().getDataContext()
it 'sets the expandAllInColumnHeaderEnabled to true', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().initialConfig.expandAllInColumnHeaderEnabled).toBe true
it 'should fetch PlanEstimate, Release and Iteration', ->
@createApp().then =>
@toggleToGrid()
store = @app.down('rallytreegrid').getStore()
expect(store.fetch).toContain 'PlanEstimate'
expect(store.fetch).toContain 'Release'
expect(store.fetch).toContain 'Iteration'
it 'should pass in enableAddPlusNewChildStories to inlineAddRowExpander plugin', ->
@createApp().then =>
@toggleToGrid()
inlineAddRowExpander = _.find(@app.down('rallytreegrid').plugins, {'ptype': 'rallyinlineaddrowexpander'})
expect(inlineAddRowExpander.enableAddPlusNewChildStories).toBe false
describe 'toggle grid/board cls to ensure overflow-y gets set for fixed header plugin', ->
it 'should add board-toggled class to app on initial load in board view', ->
@stub(Rally.ui.gridboard.GridBoard::, 'toggleState', 'board')
@createApp().then =>
expect(@app.getEl().dom.className).toContain 'board-toggled'
it 'should add board-toggled class to app when toggled to board view', ->
@createApp().then =>
@toggleToBoard()
expect(@app.getEl().dom.className).toContain 'board-toggled'
it 'should add grid-toggled class to app when toggled to grid view', ->
@createApp().then =>
@toggleToGrid()
expect(@app.getEl().dom.className).toContain 'grid-toggled'
describe 'sizing', ->
it 'should set an initial gridboard height', ->
@createApp().then =>
expect(@app.down('rallygridboard').getHeight()).toBe @app._getAvailableGridBoardHeight()
it 'should update the grid or board height', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
setHeightSpy = @spy gridBoard, 'setHeight'
currentHeight = gridBoard.getHeight()
@app.setHeight @app.getHeight() + 10
@waitForCallback(setHeightSpy).then =>
expect(gridBoard.getHeight()).toBe currentHeight + 10
describe 'custom filter popover', ->
it 'should add common storeConfig to gridboard', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
expect(gridBoard.storeConfig.filters.length).toBe 1
expect(gridBoard.storeConfig.filters[0].toString()).toBe @app.getContext().getTimeboxScope().getQueryFilter().toString()
it 'should use rallygridboard custom filter control', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardcustomfiltercontrol'
expect(plugin).toBeDefined()
expect(plugin.filterControlConfig.stateful).toBe true
expect(plugin.filterControlConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-custom-filter-button')
expect(plugin.showOwnerFilter).toBe true
expect(plugin.ownerFilterControlConfig.stateful).toBe true
expect(plugin.ownerFilterControlConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-owner-filter')
it 'should include the Milestones field in the available Fields', ->
@createApp().then =>
filterPlugin = _.find(@app.gridboard.plugins, ptype: 'rallygridboardcustomfiltercontrol')
expect(_.contains(filterPlugin.filterControlConfig.whiteListFields, 'Milestones')).toBe true
describe 'filtering panel plugin', ->
helpers
getPlugin: ->
gridBoard = @app.down 'rallygridboard'
_.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardinlinefiltercontrol'
beforeEach ->
@stubFeatureToggle ['F7336_ADVANCED_FILTERING'], true
it 'should use rallygridboard filtering plugin', ->
@createApp().then =>
expect(@getPlugin()).toBeDefined()
it 'should default to inline when a full page app', ->
@createApp().then =>
expect(@getPlugin().inline).toBe true
it 'should set inline false when NOT a full page app', ->
@createApp(isFullPageApp: false).then =>
expect(@getPlugin().inline).toBe false
describe 'quick filters', ->
it 'should add filters for search, owner and schedulestate', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
expect(config.fields[0]).toBe 'ArtifactSearch'
expect(config.fields[1]).toBe 'Owner'
expect(config.fields[2].name).toBe 'ScheduleState'
it 'schedulestate should be multiselect', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
expect(config.fields[2].multiSelect).toBe true
expect(config.fields[2].allowClear).toBe false
it 'should return an empty schedulestate filter', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
cmp = config.fields[2]
cmp.lastValue = []
expect(cmp.getFilter()).toBeNull()
it 'should return an or\'ed schedulestate filter', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
cmp = config.fields[2]
cmp.lastValue = ['In-Progress', 'Completed']
expect(cmp.getFilter().toString()).toBe '((ScheduleState = "In-Progress") OR (ScheduleState = "Completed"))'
describe 'shared view plugin', ->
it 'should use rallygridboard shared view plugin', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin).toBeDefined()
expect(plugin.sharedViewConfig.stateful).toBe true
expect(plugin.sharedViewConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-shared-view')
expect(plugin.sharedViewConfig.defaultViews).toBeDefined()
it 'should add correct rank field when manually ranked', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp({}, false).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
defaultViews = plugin.sharedViewConfig.defaultViews
expect(Ext.JSON.decode(defaultViews[0].Value, true).columns[0].dataIndex).toBe 'Rank'
it 'should add correct rank field when dnd ranked', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
defaultViews = plugin.sharedViewConfig.defaultViews
expect(Ext.JSON.decode(defaultViews[0].Value, true).columns[0].dataIndex).toBe 'DragAndDropRank'
it 'should enableGridEditing when S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE is true', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.enableGridEditing).toBe true
it 'should NOT enableGridEditing when S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE is false', ->
isFeatureEnabledStub = @stub(Rally.app.Context.prototype, 'isFeatureEnabled')
isFeatureEnabledStub.withArgs('F6028_ISP_SHARED_VIEWS').returns true
isFeatureEnabledStub.withArgs('S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE').returns false
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.enableGridEditing).toBe false
it 'should enableUrlSharing when isFullPageApp is true', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp(
isFullPageApp: true
).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.sharedViewConfig.enableUrlSharing).toBe true
it 'should NOT enableUrlSharing when isFullPageApp is false', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp(
isFullPageApp: false
).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.sharedViewConfig.enableUrlSharing).toBe false
describe 'page sizes', ->
beforeEach ->
@_isIE = Ext.isIE
afterEach ->
Ext.isIE = @_isIE
it 'should give the correct page sizes for non-ie', ->
@createApp().then =>
Ext.isIE = false
expect(@app.getGridPageSizes()).toEqual [10, 25, 50, 100]
it 'should give the correct page sizes for ie', ->
@createApp().then =>
Ext.isIE = true
expect(@app.getGridPageSizes()).toEqual [10, 25, 50]
describe 'grid configurations', ->
it 'should create a grid store with the correct page size', ->
@createApp().then =>
@toggleToGrid()
expect(@app.gridboard.getGridOrBoard().store.pageSize).toEqual 25 | 129437 | Ext = window.Ext4 || window.Ext
Ext.require [
'Rally.apps.iterationtrackingboard.IterationTrackingBoardApp'
'Rally.ui.gridboard.GridBoard'
'Rally.util.DateTime'
'Rally.app.Context',
'Rally.domain.Subscription'
]
describe 'Rally.apps.iterationtrackingboard.IterationTrackingBoardApp', ->
helpers
createApp: (config = {}, dndRankEnabled = true) ->
now = new Date(1384305300 * 1000);
tomorrow = Rally.util.DateTime.add(now, 'day', 1)
nextDay = Rally.util.DateTime.add(tomorrow, 'day', 1)
dayAfter = Rally.util.DateTime.add(nextDay, 'day', 1)
@iterationData = [
{Name:'Iteration 1', StartDate: now, EndDate: tomorrow}
{Name:'Iteration 2', StartDate: nextDay, EndDate: dayAfter}
]
@iterationRecord = @mom.getRecord('iteration', values: @iterationData[0])
scopeRecord = if Ext.isDefined(config?.iterationRecord) then config.iterationRecord else @iterationRecord
@app = Ext.create('Rally.apps.iterationtrackingboard.IterationTrackingBoardApp', Ext.apply(
context: Ext.create('Rally.app.Context',
initialValues:
timebox: Ext.create 'Rally.app.TimeboxScope', type: 'iteration', record: scopeRecord
project:
_ref: @projectRef
workspace:
WorkspaceConfiguration:
DragDropRankingEnabled: dndRankEnabled
WorkDays: "Monday,Friday"
subscription: Rally.environment.getContext().getSubscription()
),
renderTo: 'testDiv'
height: 400
, config))
@waitForComponentReady(@app)
getIterationFilter: ->
iteration = @iterationData[0]
[
{ property: 'Iteration.Name', operator: '=', value: iteration.Name }
{ property: "Iteration.StartDate", operator: '=', value: Rally.util.DateTime.toIsoString(iteration.StartDate) }
{ property: "Iteration.EndDate", operator: '=', value: Rally.util.DateTime.toIsoString(iteration.EndDate) }
]
stubRequests: ->
@ajax.whenQueryingAllowedValues('userstory', 'ScheduleState').respondWith(["Defined", "In-Progress", "Completed", "Accepted"]);
@ajax.whenQuerying('artifact').respondWith [{
RevisionHistory:
_ref: '/revisionhistory/1'
}]
toggleToBoard: ->
@app.gridboard.setToggleState('board')
toggleToGrid: ->
@app.gridboard.setToggleState('grid')
stubFeatureToggle: (toggles, value = true) ->
stub = @stub(Rally.app.Context.prototype, 'isFeatureEnabled');
stub.withArgs(toggle).returns(value) for toggle in toggles
stub
beforeEach ->
@defaultToggleState = Rally.ui.gridboard.GridBoard.prototype.toggleState
Rally.ui.gridboard.GridBoard.prototype.toggleState = 'board' # tests assume board is default view
@ajax.whenReading('project').respondWith {
TeamMembers: []
Editors: []
}
@ajax.whenQuerying('iteration').respondWith([],
schema:
properties:
EndDate:
format:
tzOffset: 0
)
@stubRequests()
@tooltipHelper = new Helpers.TooltipHelper this
afterEach ->
@app?.destroy()
Rally.ui.gridboard.GridBoard.prototype.toggleState = @defaultToggleState
it 'should use anchor layout by default', ->
@createApp().then =>
expect(@app.layout.$className).toBe 'Ext.layout.container.Anchor'
it 'should not render a header', ->
try
@createApp().then =>
header = @app.down('container[cls=header]')
expect(header == null).toBe true
it 'sets current view on viewchange', ->
@createApp().then =>
removeSpy = @spy(@app, 'remove')
@app.down('#gridBoard').fireEvent 'viewchange'
expect(removeSpy).toHaveBeenCalledOnce()
expect(removeSpy).toHaveBeenCalledWith 'gridBoard'
expect(@app.down('#gridBoard')).toBeDefined()
it 'resets view on scope change', ->
@createApp().then =>
removeSpy = @spy(@app, 'remove')
newScope = Ext.create('Rally.app.TimeboxScope',
record: @mom.getRecord('iteration', values: @iterationData[1])
)
@app.onTimeboxScopeChange newScope
expect(removeSpy).toHaveBeenCalledOnce()
expect(removeSpy).toHaveBeenCalledWith 'gridBoard'
expect(@app.down('#gridBoard')).toBeDefined()
it 'fires storecurrentpagereset on scope change', ->
@createApp().then =>
treeGrid = Ext.create 'Ext.Component'
downStub = @stub(@app, 'down').withArgs('rallytreegrid').returns treeGrid
storeCurrentPageResetStub = @stub()
@app.down('rallytreegrid').on 'storecurrentpagereset', storeCurrentPageResetStub
newScope = Ext.create('Rally.app.TimeboxScope',
record: @mom.getRecord('iteration', values: @iterationData[1])
)
@app.onTimeboxScopeChange newScope
expect(storeCurrentPageResetStub).toHaveBeenCalledOnce()
describe 'stats banner', ->
it 'should show showStatsBanner settings field when app IS a full page app', ->
@createApp(isFullPageApp: true).then =>
expect(_.find(@app.getUserSettingsFields(), {xtype: 'rallystatsbannersettingsfield'})).toBeDefined()
it 'should NOT show showStatsBanner settings field when app IS NOT a full page app', ->
@createApp(isFullPageApp: false).then =>
expect(_.find(@app.getUserSettingsFields(), {xtype: 'rallystatsbannersettingsfield'})).not.toBeDefined()
it 'fires contentupdated event after board load', ->
contentUpdatedHandlerStub = @stub()
@createApp(
listeners:
contentupdated: contentUpdatedHandlerStub
).then =>
contentUpdatedHandlerStub.reset()
@app.gridboard.fireEvent('load')
expect(contentUpdatedHandlerStub).toHaveBeenCalledOnce()
it 'should include PortfolioItem in columnConfig.additionalFetchFields', ->
@createApp().then =>
expect(@app.gridboard.getGridOrBoard().columnConfig.additionalFetchFields).toContain 'PortfolioItem'
it 'should have a default card fields setting', ->
@createApp().then =>
expect(@app.down('rallygridboard').getGridOrBoard().columnConfig.fields).toEqual ['Parent', 'Tasks', 'Defects', 'Discussion', 'PlanEstimate', 'Iteration']
it 'should have use the cardFields setting if available', ->
@createApp(
settings:
cardFields: 'HelloKitty'
).then =>
expect(@app.down('rallygridboard').getGridOrBoard().columnConfig.fields).toEqual ['<NAME>Kit<NAME>']
it 'should have the rowSettings field', ->
@createApp().then =>
expect(_.find(@app.getSettingsFields(), {xtype: 'rowsettingsfield'})).toBeDefined()
it 'adds the rowConfig property to the boardConfig', ->
@createApp(
settings:
showRows: true
).then =>
expect(@app.gridboard.getGridOrBoard().config.rowConfig).toBeDefined()
it 'adds the requiresModelSpecificFilters property to the boardConfig', ->
@createApp().then =>
expect(@app.gridboard.getGridOrBoard().columnConfig.requiresModelSpecificFilters).toBe false
it 'should show the field picker in board mode', ->
@createApp().then =>
@toggleToBoard()
expect(@app.down('#fieldpickerbtn').isVisible()).toBe true
it 'should enable bulk edit', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().enableBulkEdit).toBe true
it 'should show a treegrid when treegrid toggled on', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('rallytreegrid')).not.toBeNull()
expect(@app.down('rallygrid')).toBeNull()
describe 'iteration filtering', ->
describe 'with a scope', ->
it 'should filter the grid to the currently selected iteration', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith filters: @getIterationFilter()
it 'should filter the board to the currently selected iteration', ->
requests = @stubRequests()
@createApp().then =>
@toggleToBoard()
expect(request).toBeWsapiRequestWith(filters: @getIterationFilter()) for request in requests
describe 'unscheduled', ->
helpers
createLeafStoriesOnlyFilter: ->
storyTypeDefOid = Rally.test.mock.data.WsapiModelFactory.getModel('UserStory').typeDefOid
Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
value: storyTypeDefOid
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'DirectChildrenCount'
value: 0
)).or(Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
operator: '!='
value: storyTypeDefOid
))
createUnassociatedDefectsOnlyFilter: ->
defectTypeDefOid = Rally.test.mock.data.WsapiModelFactory.getModel('Defect').typeDefOid
Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
value: defectTypeDefOid
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'TestCase'
operator: '='
value: null
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'Requirement.Iteration'
operator: '!='
value: null
).or(Ext.create('Rally.data.wsapi.Filter',
property: 'Requirement'
operator: '='
value: null
)))
).or(Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
operator: '!='
value: defectTypeDefOid
))
describe 'stories', ->
it 'should exclude epic stories from the grid', ->
requestStub = @stubRequests()
@createApp(iterationRecord: null).then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith
filters: [@createLeafStoriesOnlyFilter()]
it 'should not attach leaf-stories-only filter if iteration is not null', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).not.toBeWsapiRequestWith
filters: [@createLeafStoriesOnlyFilter()]
describe 'defects', ->
it 'should exclude associated defects from the grid', ->
requestStub = @stubRequests()
@createApp(iterationRecord: null).then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith
filters: [@createUnassociatedDefectsOnlyFilter()]
it 'should not attach unassociated-defects-only filter if iteration is not null', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).not.toBeWsapiRequestWith
filters: [@createUnassociatedDefectsOnlyFilter()]
describe 'tree grid config', ->
it 'returns the columns with the FormattedID removed', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().initialConfig.columnCfgs).toEqual ['Name', 'ScheduleState', 'Blocked', 'PlanEstimate', 'Tasks', 'TaskEstimateTotal', 'TaskRemainingTotal', 'Owner', 'Defects', 'Discussion']
it 'should include test sets', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('rallytreegrid').getStore().parentTypes).toContain 'testset'
it 'should include dataContext', ->
buildSpy = @spy(Rally.data.wsapi.TreeStoreBuilder::, 'build')
@createApp().then (app) ->
expect(buildSpy.getCall(0).args[0].context).toEqual app.getContext().getDataContext()
it 'sets the expandAllInColumnHeaderEnabled to true', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().initialConfig.expandAllInColumnHeaderEnabled).toBe true
it 'should fetch PlanEstimate, Release and Iteration', ->
@createApp().then =>
@toggleToGrid()
store = @app.down('rallytreegrid').getStore()
expect(store.fetch).toContain 'PlanEstimate'
expect(store.fetch).toContain 'Release'
expect(store.fetch).toContain 'Iteration'
it 'should pass in enableAddPlusNewChildStories to inlineAddRowExpander plugin', ->
@createApp().then =>
@toggleToGrid()
inlineAddRowExpander = _.find(@app.down('rallytreegrid').plugins, {'ptype': 'rallyinlineaddrowexpander'})
expect(inlineAddRowExpander.enableAddPlusNewChildStories).toBe false
describe 'toggle grid/board cls to ensure overflow-y gets set for fixed header plugin', ->
it 'should add board-toggled class to app on initial load in board view', ->
@stub(Rally.ui.gridboard.GridBoard::, 'toggleState', 'board')
@createApp().then =>
expect(@app.getEl().dom.className).toContain 'board-toggled'
it 'should add board-toggled class to app when toggled to board view', ->
@createApp().then =>
@toggleToBoard()
expect(@app.getEl().dom.className).toContain 'board-toggled'
it 'should add grid-toggled class to app when toggled to grid view', ->
@createApp().then =>
@toggleToGrid()
expect(@app.getEl().dom.className).toContain 'grid-toggled'
describe 'sizing', ->
it 'should set an initial gridboard height', ->
@createApp().then =>
expect(@app.down('rallygridboard').getHeight()).toBe @app._getAvailableGridBoardHeight()
it 'should update the grid or board height', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
setHeightSpy = @spy gridBoard, 'setHeight'
currentHeight = gridBoard.getHeight()
@app.setHeight @app.getHeight() + 10
@waitForCallback(setHeightSpy).then =>
expect(gridBoard.getHeight()).toBe currentHeight + 10
describe 'custom filter popover', ->
it 'should add common storeConfig to gridboard', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
expect(gridBoard.storeConfig.filters.length).toBe 1
expect(gridBoard.storeConfig.filters[0].toString()).toBe @app.getContext().getTimeboxScope().getQueryFilter().toString()
it 'should use rallygridboard custom filter control', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardcustomfiltercontrol'
expect(plugin).toBeDefined()
expect(plugin.filterControlConfig.stateful).toBe true
expect(plugin.filterControlConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-custom-filter-button')
expect(plugin.showOwnerFilter).toBe true
expect(plugin.ownerFilterControlConfig.stateful).toBe true
expect(plugin.ownerFilterControlConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-owner-filter')
it 'should include the Milestones field in the available Fields', ->
@createApp().then =>
filterPlugin = _.find(@app.gridboard.plugins, ptype: 'rallygridboardcustomfiltercontrol')
expect(_.contains(filterPlugin.filterControlConfig.whiteListFields, 'Milestones')).toBe true
describe 'filtering panel plugin', ->
helpers
getPlugin: ->
gridBoard = @app.down 'rallygridboard'
_.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardinlinefiltercontrol'
beforeEach ->
@stubFeatureToggle ['F7336_ADVANCED_FILTERING'], true
it 'should use rallygridboard filtering plugin', ->
@createApp().then =>
expect(@getPlugin()).toBeDefined()
it 'should default to inline when a full page app', ->
@createApp().then =>
expect(@getPlugin().inline).toBe true
it 'should set inline false when NOT a full page app', ->
@createApp(isFullPageApp: false).then =>
expect(@getPlugin().inline).toBe false
describe 'quick filters', ->
it 'should add filters for search, owner and schedulestate', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
expect(config.fields[0]).toBe 'ArtifactSearch'
expect(config.fields[1]).toBe 'Owner'
expect(config.fields[2].name).toBe 'ScheduleState'
it 'schedulestate should be multiselect', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
expect(config.fields[2].multiSelect).toBe true
expect(config.fields[2].allowClear).toBe false
it 'should return an empty schedulestate filter', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
cmp = config.fields[2]
cmp.lastValue = []
expect(cmp.getFilter()).toBeNull()
it 'should return an or\'ed schedulestate filter', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
cmp = config.fields[2]
cmp.lastValue = ['In-Progress', 'Completed']
expect(cmp.getFilter().toString()).toBe '((ScheduleState = "In-Progress") OR (ScheduleState = "Completed"))'
describe 'shared view plugin', ->
it 'should use rallygridboard shared view plugin', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin).toBeDefined()
expect(plugin.sharedViewConfig.stateful).toBe true
expect(plugin.sharedViewConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-shared-view')
expect(plugin.sharedViewConfig.defaultViews).toBeDefined()
it 'should add correct rank field when manually ranked', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp({}, false).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
defaultViews = plugin.sharedViewConfig.defaultViews
expect(Ext.JSON.decode(defaultViews[0].Value, true).columns[0].dataIndex).toBe 'Rank'
it 'should add correct rank field when dnd ranked', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
defaultViews = plugin.sharedViewConfig.defaultViews
expect(Ext.JSON.decode(defaultViews[0].Value, true).columns[0].dataIndex).toBe 'DragAndDropRank'
it 'should enableGridEditing when S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE is true', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.enableGridEditing).toBe true
it 'should NOT enableGridEditing when S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE is false', ->
isFeatureEnabledStub = @stub(Rally.app.Context.prototype, 'isFeatureEnabled')
isFeatureEnabledStub.withArgs('F6028_ISP_SHARED_VIEWS').returns true
isFeatureEnabledStub.withArgs('S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE').returns false
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.enableGridEditing).toBe false
it 'should enableUrlSharing when isFullPageApp is true', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp(
isFullPageApp: true
).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.sharedViewConfig.enableUrlSharing).toBe true
it 'should NOT enableUrlSharing when isFullPageApp is false', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp(
isFullPageApp: false
).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.sharedViewConfig.enableUrlSharing).toBe false
describe 'page sizes', ->
beforeEach ->
@_isIE = Ext.isIE
afterEach ->
Ext.isIE = @_isIE
it 'should give the correct page sizes for non-ie', ->
@createApp().then =>
Ext.isIE = false
expect(@app.getGridPageSizes()).toEqual [10, 25, 50, 100]
it 'should give the correct page sizes for ie', ->
@createApp().then =>
Ext.isIE = true
expect(@app.getGridPageSizes()).toEqual [10, 25, 50]
describe 'grid configurations', ->
it 'should create a grid store with the correct page size', ->
@createApp().then =>
@toggleToGrid()
expect(@app.gridboard.getGridOrBoard().store.pageSize).toEqual 25 | true | Ext = window.Ext4 || window.Ext
Ext.require [
'Rally.apps.iterationtrackingboard.IterationTrackingBoardApp'
'Rally.ui.gridboard.GridBoard'
'Rally.util.DateTime'
'Rally.app.Context',
'Rally.domain.Subscription'
]
describe 'Rally.apps.iterationtrackingboard.IterationTrackingBoardApp', ->
helpers
createApp: (config = {}, dndRankEnabled = true) ->
now = new Date(1384305300 * 1000);
tomorrow = Rally.util.DateTime.add(now, 'day', 1)
nextDay = Rally.util.DateTime.add(tomorrow, 'day', 1)
dayAfter = Rally.util.DateTime.add(nextDay, 'day', 1)
@iterationData = [
{Name:'Iteration 1', StartDate: now, EndDate: tomorrow}
{Name:'Iteration 2', StartDate: nextDay, EndDate: dayAfter}
]
@iterationRecord = @mom.getRecord('iteration', values: @iterationData[0])
scopeRecord = if Ext.isDefined(config?.iterationRecord) then config.iterationRecord else @iterationRecord
@app = Ext.create('Rally.apps.iterationtrackingboard.IterationTrackingBoardApp', Ext.apply(
context: Ext.create('Rally.app.Context',
initialValues:
timebox: Ext.create 'Rally.app.TimeboxScope', type: 'iteration', record: scopeRecord
project:
_ref: @projectRef
workspace:
WorkspaceConfiguration:
DragDropRankingEnabled: dndRankEnabled
WorkDays: "Monday,Friday"
subscription: Rally.environment.getContext().getSubscription()
),
renderTo: 'testDiv'
height: 400
, config))
@waitForComponentReady(@app)
getIterationFilter: ->
iteration = @iterationData[0]
[
{ property: 'Iteration.Name', operator: '=', value: iteration.Name }
{ property: "Iteration.StartDate", operator: '=', value: Rally.util.DateTime.toIsoString(iteration.StartDate) }
{ property: "Iteration.EndDate", operator: '=', value: Rally.util.DateTime.toIsoString(iteration.EndDate) }
]
stubRequests: ->
@ajax.whenQueryingAllowedValues('userstory', 'ScheduleState').respondWith(["Defined", "In-Progress", "Completed", "Accepted"]);
@ajax.whenQuerying('artifact').respondWith [{
RevisionHistory:
_ref: '/revisionhistory/1'
}]
toggleToBoard: ->
@app.gridboard.setToggleState('board')
toggleToGrid: ->
@app.gridboard.setToggleState('grid')
stubFeatureToggle: (toggles, value = true) ->
stub = @stub(Rally.app.Context.prototype, 'isFeatureEnabled');
stub.withArgs(toggle).returns(value) for toggle in toggles
stub
beforeEach ->
@defaultToggleState = Rally.ui.gridboard.GridBoard.prototype.toggleState
Rally.ui.gridboard.GridBoard.prototype.toggleState = 'board' # tests assume board is default view
@ajax.whenReading('project').respondWith {
TeamMembers: []
Editors: []
}
@ajax.whenQuerying('iteration').respondWith([],
schema:
properties:
EndDate:
format:
tzOffset: 0
)
@stubRequests()
@tooltipHelper = new Helpers.TooltipHelper this
afterEach ->
@app?.destroy()
Rally.ui.gridboard.GridBoard.prototype.toggleState = @defaultToggleState
it 'should use anchor layout by default', ->
@createApp().then =>
expect(@app.layout.$className).toBe 'Ext.layout.container.Anchor'
it 'should not render a header', ->
try
@createApp().then =>
header = @app.down('container[cls=header]')
expect(header == null).toBe true
it 'sets current view on viewchange', ->
@createApp().then =>
removeSpy = @spy(@app, 'remove')
@app.down('#gridBoard').fireEvent 'viewchange'
expect(removeSpy).toHaveBeenCalledOnce()
expect(removeSpy).toHaveBeenCalledWith 'gridBoard'
expect(@app.down('#gridBoard')).toBeDefined()
it 'resets view on scope change', ->
@createApp().then =>
removeSpy = @spy(@app, 'remove')
newScope = Ext.create('Rally.app.TimeboxScope',
record: @mom.getRecord('iteration', values: @iterationData[1])
)
@app.onTimeboxScopeChange newScope
expect(removeSpy).toHaveBeenCalledOnce()
expect(removeSpy).toHaveBeenCalledWith 'gridBoard'
expect(@app.down('#gridBoard')).toBeDefined()
it 'fires storecurrentpagereset on scope change', ->
@createApp().then =>
treeGrid = Ext.create 'Ext.Component'
downStub = @stub(@app, 'down').withArgs('rallytreegrid').returns treeGrid
storeCurrentPageResetStub = @stub()
@app.down('rallytreegrid').on 'storecurrentpagereset', storeCurrentPageResetStub
newScope = Ext.create('Rally.app.TimeboxScope',
record: @mom.getRecord('iteration', values: @iterationData[1])
)
@app.onTimeboxScopeChange newScope
expect(storeCurrentPageResetStub).toHaveBeenCalledOnce()
describe 'stats banner', ->
it 'should show showStatsBanner settings field when app IS a full page app', ->
@createApp(isFullPageApp: true).then =>
expect(_.find(@app.getUserSettingsFields(), {xtype: 'rallystatsbannersettingsfield'})).toBeDefined()
it 'should NOT show showStatsBanner settings field when app IS NOT a full page app', ->
@createApp(isFullPageApp: false).then =>
expect(_.find(@app.getUserSettingsFields(), {xtype: 'rallystatsbannersettingsfield'})).not.toBeDefined()
it 'fires contentupdated event after board load', ->
contentUpdatedHandlerStub = @stub()
@createApp(
listeners:
contentupdated: contentUpdatedHandlerStub
).then =>
contentUpdatedHandlerStub.reset()
@app.gridboard.fireEvent('load')
expect(contentUpdatedHandlerStub).toHaveBeenCalledOnce()
it 'should include PortfolioItem in columnConfig.additionalFetchFields', ->
@createApp().then =>
expect(@app.gridboard.getGridOrBoard().columnConfig.additionalFetchFields).toContain 'PortfolioItem'
it 'should have a default card fields setting', ->
@createApp().then =>
expect(@app.down('rallygridboard').getGridOrBoard().columnConfig.fields).toEqual ['Parent', 'Tasks', 'Defects', 'Discussion', 'PlanEstimate', 'Iteration']
it 'should have use the cardFields setting if available', ->
@createApp(
settings:
cardFields: 'HelloKitty'
).then =>
expect(@app.down('rallygridboard').getGridOrBoard().columnConfig.fields).toEqual ['PI:NAME:<NAME>END_PIKitPI:NAME:<NAME>END_PI']
it 'should have the rowSettings field', ->
@createApp().then =>
expect(_.find(@app.getSettingsFields(), {xtype: 'rowsettingsfield'})).toBeDefined()
it 'adds the rowConfig property to the boardConfig', ->
@createApp(
settings:
showRows: true
).then =>
expect(@app.gridboard.getGridOrBoard().config.rowConfig).toBeDefined()
it 'adds the requiresModelSpecificFilters property to the boardConfig', ->
@createApp().then =>
expect(@app.gridboard.getGridOrBoard().columnConfig.requiresModelSpecificFilters).toBe false
it 'should show the field picker in board mode', ->
@createApp().then =>
@toggleToBoard()
expect(@app.down('#fieldpickerbtn').isVisible()).toBe true
it 'should enable bulk edit', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().enableBulkEdit).toBe true
it 'should show a treegrid when treegrid toggled on', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('rallytreegrid')).not.toBeNull()
expect(@app.down('rallygrid')).toBeNull()
describe 'iteration filtering', ->
describe 'with a scope', ->
it 'should filter the grid to the currently selected iteration', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith filters: @getIterationFilter()
it 'should filter the board to the currently selected iteration', ->
requests = @stubRequests()
@createApp().then =>
@toggleToBoard()
expect(request).toBeWsapiRequestWith(filters: @getIterationFilter()) for request in requests
describe 'unscheduled', ->
helpers
createLeafStoriesOnlyFilter: ->
storyTypeDefOid = Rally.test.mock.data.WsapiModelFactory.getModel('UserStory').typeDefOid
Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
value: storyTypeDefOid
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'DirectChildrenCount'
value: 0
)).or(Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
operator: '!='
value: storyTypeDefOid
))
createUnassociatedDefectsOnlyFilter: ->
defectTypeDefOid = Rally.test.mock.data.WsapiModelFactory.getModel('Defect').typeDefOid
Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
value: defectTypeDefOid
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'TestCase'
operator: '='
value: null
).and(Ext.create('Rally.data.wsapi.Filter',
property: 'Requirement.Iteration'
operator: '!='
value: null
).or(Ext.create('Rally.data.wsapi.Filter',
property: 'Requirement'
operator: '='
value: null
)))
).or(Ext.create('Rally.data.wsapi.Filter',
property: 'TypeDefOid'
operator: '!='
value: defectTypeDefOid
))
describe 'stories', ->
it 'should exclude epic stories from the grid', ->
requestStub = @stubRequests()
@createApp(iterationRecord: null).then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith
filters: [@createLeafStoriesOnlyFilter()]
it 'should not attach leaf-stories-only filter if iteration is not null', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).not.toBeWsapiRequestWith
filters: [@createLeafStoriesOnlyFilter()]
describe 'defects', ->
it 'should exclude associated defects from the grid', ->
requestStub = @stubRequests()
@createApp(iterationRecord: null).then =>
@toggleToGrid()
expect(requestStub).toBeWsapiRequestWith
filters: [@createUnassociatedDefectsOnlyFilter()]
it 'should not attach unassociated-defects-only filter if iteration is not null', ->
requestStub = @stubRequests()
@createApp().then =>
@toggleToGrid()
expect(requestStub).not.toBeWsapiRequestWith
filters: [@createUnassociatedDefectsOnlyFilter()]
describe 'tree grid config', ->
it 'returns the columns with the FormattedID removed', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().initialConfig.columnCfgs).toEqual ['Name', 'ScheduleState', 'Blocked', 'PlanEstimate', 'Tasks', 'TaskEstimateTotal', 'TaskRemainingTotal', 'Owner', 'Defects', 'Discussion']
it 'should include test sets', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('rallytreegrid').getStore().parentTypes).toContain 'testset'
it 'should include dataContext', ->
buildSpy = @spy(Rally.data.wsapi.TreeStoreBuilder::, 'build')
@createApp().then (app) ->
expect(buildSpy.getCall(0).args[0].context).toEqual app.getContext().getDataContext()
it 'sets the expandAllInColumnHeaderEnabled to true', ->
@createApp().then =>
@toggleToGrid()
expect(@app.down('#gridBoard').getGridOrBoard().initialConfig.expandAllInColumnHeaderEnabled).toBe true
it 'should fetch PlanEstimate, Release and Iteration', ->
@createApp().then =>
@toggleToGrid()
store = @app.down('rallytreegrid').getStore()
expect(store.fetch).toContain 'PlanEstimate'
expect(store.fetch).toContain 'Release'
expect(store.fetch).toContain 'Iteration'
it 'should pass in enableAddPlusNewChildStories to inlineAddRowExpander plugin', ->
@createApp().then =>
@toggleToGrid()
inlineAddRowExpander = _.find(@app.down('rallytreegrid').plugins, {'ptype': 'rallyinlineaddrowexpander'})
expect(inlineAddRowExpander.enableAddPlusNewChildStories).toBe false
describe 'toggle grid/board cls to ensure overflow-y gets set for fixed header plugin', ->
it 'should add board-toggled class to app on initial load in board view', ->
@stub(Rally.ui.gridboard.GridBoard::, 'toggleState', 'board')
@createApp().then =>
expect(@app.getEl().dom.className).toContain 'board-toggled'
it 'should add board-toggled class to app when toggled to board view', ->
@createApp().then =>
@toggleToBoard()
expect(@app.getEl().dom.className).toContain 'board-toggled'
it 'should add grid-toggled class to app when toggled to grid view', ->
@createApp().then =>
@toggleToGrid()
expect(@app.getEl().dom.className).toContain 'grid-toggled'
describe 'sizing', ->
it 'should set an initial gridboard height', ->
@createApp().then =>
expect(@app.down('rallygridboard').getHeight()).toBe @app._getAvailableGridBoardHeight()
it 'should update the grid or board height', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
setHeightSpy = @spy gridBoard, 'setHeight'
currentHeight = gridBoard.getHeight()
@app.setHeight @app.getHeight() + 10
@waitForCallback(setHeightSpy).then =>
expect(gridBoard.getHeight()).toBe currentHeight + 10
describe 'custom filter popover', ->
it 'should add common storeConfig to gridboard', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
expect(gridBoard.storeConfig.filters.length).toBe 1
expect(gridBoard.storeConfig.filters[0].toString()).toBe @app.getContext().getTimeboxScope().getQueryFilter().toString()
it 'should use rallygridboard custom filter control', ->
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardcustomfiltercontrol'
expect(plugin).toBeDefined()
expect(plugin.filterControlConfig.stateful).toBe true
expect(plugin.filterControlConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-custom-filter-button')
expect(plugin.showOwnerFilter).toBe true
expect(plugin.ownerFilterControlConfig.stateful).toBe true
expect(plugin.ownerFilterControlConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-owner-filter')
it 'should include the Milestones field in the available Fields', ->
@createApp().then =>
filterPlugin = _.find(@app.gridboard.plugins, ptype: 'rallygridboardcustomfiltercontrol')
expect(_.contains(filterPlugin.filterControlConfig.whiteListFields, 'Milestones')).toBe true
describe 'filtering panel plugin', ->
helpers
getPlugin: ->
gridBoard = @app.down 'rallygridboard'
_.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardinlinefiltercontrol'
beforeEach ->
@stubFeatureToggle ['F7336_ADVANCED_FILTERING'], true
it 'should use rallygridboard filtering plugin', ->
@createApp().then =>
expect(@getPlugin()).toBeDefined()
it 'should default to inline when a full page app', ->
@createApp().then =>
expect(@getPlugin().inline).toBe true
it 'should set inline false when NOT a full page app', ->
@createApp(isFullPageApp: false).then =>
expect(@getPlugin().inline).toBe false
describe 'quick filters', ->
it 'should add filters for search, owner and schedulestate', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
expect(config.fields[0]).toBe 'ArtifactSearch'
expect(config.fields[1]).toBe 'Owner'
expect(config.fields[2].name).toBe 'ScheduleState'
it 'schedulestate should be multiselect', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
expect(config.fields[2].multiSelect).toBe true
expect(config.fields[2].allowClear).toBe false
it 'should return an empty schedulestate filter', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
cmp = config.fields[2]
cmp.lastValue = []
expect(cmp.getFilter()).toBeNull()
it 'should return an or\'ed schedulestate filter', ->
@createApp().then =>
config = @getPlugin().inlineFilterButtonConfig.inlineFilterPanelConfig.quickFilterPanelConfig
cmp = config.fields[2]
cmp.lastValue = ['In-Progress', 'Completed']
expect(cmp.getFilter().toString()).toBe '((ScheduleState = "In-Progress") OR (ScheduleState = "Completed"))'
describe 'shared view plugin', ->
it 'should use rallygridboard shared view plugin', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin).toBeDefined()
expect(plugin.sharedViewConfig.stateful).toBe true
expect(plugin.sharedViewConfig.stateId).toBe @app.getContext().getScopedStateId('iteration-tracking-shared-view')
expect(plugin.sharedViewConfig.defaultViews).toBeDefined()
it 'should add correct rank field when manually ranked', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp({}, false).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
defaultViews = plugin.sharedViewConfig.defaultViews
expect(Ext.JSON.decode(defaultViews[0].Value, true).columns[0].dataIndex).toBe 'Rank'
it 'should add correct rank field when dnd ranked', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
defaultViews = plugin.sharedViewConfig.defaultViews
expect(Ext.JSON.decode(defaultViews[0].Value, true).columns[0].dataIndex).toBe 'DragAndDropRank'
it 'should enableGridEditing when S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE is true', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.enableGridEditing).toBe true
it 'should NOT enableGridEditing when S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE is false', ->
isFeatureEnabledStub = @stub(Rally.app.Context.prototype, 'isFeatureEnabled')
isFeatureEnabledStub.withArgs('F6028_ISP_SHARED_VIEWS').returns true
isFeatureEnabledStub.withArgs('S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE').returns false
@createApp().then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.enableGridEditing).toBe false
it 'should enableUrlSharing when isFullPageApp is true', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp(
isFullPageApp: true
).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.sharedViewConfig.enableUrlSharing).toBe true
it 'should NOT enableUrlSharing when isFullPageApp is false', ->
@stubFeatureToggle ['F6028_ISP_SHARED_VIEWS', 'S91174_ISP_SHARED_VIEWS_MAKE_PREFERENCE_NAMES_UPDATABLE'], true
@createApp(
isFullPageApp: false
).then =>
gridBoard = @app.down 'rallygridboard'
plugin = _.find gridBoard.plugins, (plugin) ->
plugin.ptype == 'rallygridboardsharedviewcontrol'
expect(plugin.sharedViewConfig.enableUrlSharing).toBe false
describe 'page sizes', ->
beforeEach ->
@_isIE = Ext.isIE
afterEach ->
Ext.isIE = @_isIE
it 'should give the correct page sizes for non-ie', ->
@createApp().then =>
Ext.isIE = false
expect(@app.getGridPageSizes()).toEqual [10, 25, 50, 100]
it 'should give the correct page sizes for ie', ->
@createApp().then =>
Ext.isIE = true
expect(@app.getGridPageSizes()).toEqual [10, 25, 50]
describe 'grid configurations', ->
it 'should create a grid store with the correct page size', ->
@createApp().then =>
@toggleToGrid()
expect(@app.gridboard.getGridOrBoard().store.pageSize).toEqual 25 |
[
{
"context": " Under MIT license, see LICENSE file for details\n Andrey Popp (c) 2013\n\n###\n\n{defer} = require 'kew'\npg ",
"end": 117,
"score": 0.9998383522033691,
"start": 106,
"tag": "NAME",
"value": "Andrey Popp"
}
] | db.coffee | andreypopp/wall | 2 | ###
Thin database access layer based on promises.
Under MIT license, see LICENSE file for details
Andrey Popp (c) 2013
###
{defer} = require 'kew'
pg = require 'pg'
sql = require 'sql'
{extend} = require 'underscore'
connect = (uri) ->
promise = defer()
pg.connect uri, (err, conn, done) ->
if err
promise.reject(err)
else
conn.release = done
promise.resolve(conn)
promise
query = (db, text, values...) ->
{text, values} = text.toQuery() if text.toQuery?
promise = defer()
db.query text, values, (err, result) ->
if err then promise.reject(err) else promise.resolve(result)
promise
queryRows = (args...) ->
query(args...).then (res) -> res.rows
queryRow = (args...) ->
query(args...).then (res) -> res.rows[0]
queryScalar = (args...) ->
query(args...).then (res) ->
row = res.rows[0]
if row
columnName = Object.keys(row)[0]
row[columnName]
begin = (db) ->
query(db, "BEGIN")
commit = (db) ->
query(db, "COMMIT")
rollback = (db) ->
query(db, "ROLLBACK")
items = sql.define
name: 'items'
columns: [
'id', 'title', 'uri', 'post',
'created', 'updated', 'creator',
'parent', 'child_count']
items_ordered = sql.define
name: 'items_ordered'
columns: [
'id', 'title', 'uri', 'post',
'created', 'updated', 'creator',
'parent', 'child_count', 'order']
module.exports = extend {}, pg, {
items, items_ordered,
connect, begin, commit, rollback,
query, queryRows, queryRow, queryScalar}
| 77547 | ###
Thin database access layer based on promises.
Under MIT license, see LICENSE file for details
<NAME> (c) 2013
###
{defer} = require 'kew'
pg = require 'pg'
sql = require 'sql'
{extend} = require 'underscore'
connect = (uri) ->
promise = defer()
pg.connect uri, (err, conn, done) ->
if err
promise.reject(err)
else
conn.release = done
promise.resolve(conn)
promise
query = (db, text, values...) ->
{text, values} = text.toQuery() if text.toQuery?
promise = defer()
db.query text, values, (err, result) ->
if err then promise.reject(err) else promise.resolve(result)
promise
queryRows = (args...) ->
query(args...).then (res) -> res.rows
queryRow = (args...) ->
query(args...).then (res) -> res.rows[0]
queryScalar = (args...) ->
query(args...).then (res) ->
row = res.rows[0]
if row
columnName = Object.keys(row)[0]
row[columnName]
begin = (db) ->
query(db, "BEGIN")
commit = (db) ->
query(db, "COMMIT")
rollback = (db) ->
query(db, "ROLLBACK")
items = sql.define
name: 'items'
columns: [
'id', 'title', 'uri', 'post',
'created', 'updated', 'creator',
'parent', 'child_count']
items_ordered = sql.define
name: 'items_ordered'
columns: [
'id', 'title', 'uri', 'post',
'created', 'updated', 'creator',
'parent', 'child_count', 'order']
module.exports = extend {}, pg, {
items, items_ordered,
connect, begin, commit, rollback,
query, queryRows, queryRow, queryScalar}
| true | ###
Thin database access layer based on promises.
Under MIT license, see LICENSE file for details
PI:NAME:<NAME>END_PI (c) 2013
###
{defer} = require 'kew'
pg = require 'pg'
sql = require 'sql'
{extend} = require 'underscore'
connect = (uri) ->
promise = defer()
pg.connect uri, (err, conn, done) ->
if err
promise.reject(err)
else
conn.release = done
promise.resolve(conn)
promise
query = (db, text, values...) ->
{text, values} = text.toQuery() if text.toQuery?
promise = defer()
db.query text, values, (err, result) ->
if err then promise.reject(err) else promise.resolve(result)
promise
queryRows = (args...) ->
query(args...).then (res) -> res.rows
queryRow = (args...) ->
query(args...).then (res) -> res.rows[0]
queryScalar = (args...) ->
query(args...).then (res) ->
row = res.rows[0]
if row
columnName = Object.keys(row)[0]
row[columnName]
begin = (db) ->
query(db, "BEGIN")
commit = (db) ->
query(db, "COMMIT")
rollback = (db) ->
query(db, "ROLLBACK")
items = sql.define
name: 'items'
columns: [
'id', 'title', 'uri', 'post',
'created', 'updated', 'creator',
'parent', 'child_count']
items_ordered = sql.define
name: 'items_ordered'
columns: [
'id', 'title', 'uri', 'post',
'created', 'updated', 'creator',
'parent', 'child_count', 'order']
module.exports = extend {}, pg, {
items, items_ordered,
connect, begin, commit, rollback,
query, queryRows, queryRow, queryScalar}
|
[
{
"context": "##\n knockback.js 1.2.3\n Copyright (c) 2011-2016 Kevin Malakoff.\n License: MIT (http://www.opensource.org/licens",
"end": 66,
"score": 0.9998288154602051,
"start": 52,
"tag": "NAME",
"value": "Kevin Malakoff"
},
{
"context": "ses/mit-license.php)\n Source: https:... | src/core/functions/unwrap_models.coffee | kmalakoff/knockback | 160 | ###
knockback.js 1.2.3
Copyright (c) 2011-2016 Kevin Malakoff.
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/kmalakoff/knockback
Dependencies: Knockout.js, Backbone.js, and Underscore.js (or LoDash.js).
Optional dependencies: Backbone.ModelRef.js and BackboneORM.
###
{_} = require '../kb'
# @nodoc
module.exports = unwrapModels = (obj) ->
return obj unless obj
return (if obj.__kb.hasOwnProperty('object') then obj.__kb.object else obj) if obj.__kb
return _.map(obj, (test) -> return unwrapModels(test)) if _.isArray(obj)
if _.isObject(obj) and (obj.constructor is {}.constructor) # a simple object
result = {}
result[key] = unwrapModels(value) for key, value of obj
return result
return obj
| 25918 | ###
knockback.js 1.2.3
Copyright (c) 2011-2016 <NAME>.
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/kmalakoff/knockback
Dependencies: Knockout.js, Backbone.js, and Underscore.js (or LoDash.js).
Optional dependencies: Backbone.ModelRef.js and BackboneORM.
###
{_} = require '../kb'
# @nodoc
module.exports = unwrapModels = (obj) ->
return obj unless obj
return (if obj.__kb.hasOwnProperty('object') then obj.__kb.object else obj) if obj.__kb
return _.map(obj, (test) -> return unwrapModels(test)) if _.isArray(obj)
if _.isObject(obj) and (obj.constructor is {}.constructor) # a simple object
result = {}
result[key] = unwrapModels(value) for key, value of obj
return result
return obj
| true | ###
knockback.js 1.2.3
Copyright (c) 2011-2016 PI:NAME:<NAME>END_PI.
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/kmalakoff/knockback
Dependencies: Knockout.js, Backbone.js, and Underscore.js (or LoDash.js).
Optional dependencies: Backbone.ModelRef.js and BackboneORM.
###
{_} = require '../kb'
# @nodoc
module.exports = unwrapModels = (obj) ->
return obj unless obj
return (if obj.__kb.hasOwnProperty('object') then obj.__kb.object else obj) if obj.__kb
return _.map(obj, (test) -> return unwrapModels(test)) if _.isArray(obj)
if _.isObject(obj) and (obj.constructor is {}.constructor) # a simple object
result = {}
result[key] = unwrapModels(value) for key, value of obj
return result
return obj
|
[
{
"context": "#\n# Copyright 2014 Carsten Klein\n#\n# Licensed under the Apache License, Version 2.",
"end": 32,
"score": 0.9998632669448853,
"start": 19,
"tag": "NAME",
"value": "Carsten Klein"
}
] | test/mixed-node-coffee-script-inheritance-test.coffee | vibejs/vibejs-subclassof | 0 | #
# Copyright 2014 Carsten Klein
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
vows = require 'vows'
assert = require 'assert'
util = require 'util'
require '../src/macros'
vows
.describe 'mixed coffee-script/node inheritance'
.addBatch
'monkey patches for Errors work' :
topic : ->
result =
base : TypeError
derived : class Child extends TypeError
'is subclassof TypeError' : (topic) ->
assert.equal subclassof(topic.derived, topic.base), true
'is subclassof Error' : (topic) ->
assert.equal subclassof(topic.derived, Error), true
'mixed node-coffee inheritance tree' :
topic : ->
class Child1 extends TypeError
Child2 = ->
util.inherits(Child2, Child1)
class Child3 extends Child2
result =
base : TypeError
base1 : Child1
base2 : Child2
derived : Child3
'is subclassof Child2' : (topic) ->
assert.equal subclassof(topic.derived, topic.base2), true
'is subclassof Child1' : (topic) ->
assert.equal subclassof(topic.derived, topic.base1), true
'is subclassof Super' : (topic) ->
assert.equal subclassof(topic.derived, topic.base), true
.export module
| 85683 | #
# Copyright 2014 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
vows = require 'vows'
assert = require 'assert'
util = require 'util'
require '../src/macros'
vows
.describe 'mixed coffee-script/node inheritance'
.addBatch
'monkey patches for Errors work' :
topic : ->
result =
base : TypeError
derived : class Child extends TypeError
'is subclassof TypeError' : (topic) ->
assert.equal subclassof(topic.derived, topic.base), true
'is subclassof Error' : (topic) ->
assert.equal subclassof(topic.derived, Error), true
'mixed node-coffee inheritance tree' :
topic : ->
class Child1 extends TypeError
Child2 = ->
util.inherits(Child2, Child1)
class Child3 extends Child2
result =
base : TypeError
base1 : Child1
base2 : Child2
derived : Child3
'is subclassof Child2' : (topic) ->
assert.equal subclassof(topic.derived, topic.base2), true
'is subclassof Child1' : (topic) ->
assert.equal subclassof(topic.derived, topic.base1), true
'is subclassof Super' : (topic) ->
assert.equal subclassof(topic.derived, topic.base), true
.export module
| true | #
# Copyright 2014 PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
vows = require 'vows'
assert = require 'assert'
util = require 'util'
require '../src/macros'
vows
.describe 'mixed coffee-script/node inheritance'
.addBatch
'monkey patches for Errors work' :
topic : ->
result =
base : TypeError
derived : class Child extends TypeError
'is subclassof TypeError' : (topic) ->
assert.equal subclassof(topic.derived, topic.base), true
'is subclassof Error' : (topic) ->
assert.equal subclassof(topic.derived, Error), true
'mixed node-coffee inheritance tree' :
topic : ->
class Child1 extends TypeError
Child2 = ->
util.inherits(Child2, Child1)
class Child3 extends Child2
result =
base : TypeError
base1 : Child1
base2 : Child2
derived : Child3
'is subclassof Child2' : (topic) ->
assert.equal subclassof(topic.derived, topic.base2), true
'is subclassof Child1' : (topic) ->
assert.equal subclassof(topic.derived, topic.base1), true
'is subclassof Super' : (topic) ->
assert.equal subclassof(topic.derived, topic.base), true
.export module
|
[
{
"context": "her(true)\n traitsToReport.push('firstName', 'lastName')\n for userTrait in traitsToReport\n trait",
"end": 1515,
"score": 0.8437103629112244,
"start": 1507,
"tag": "NAME",
"value": "lastName"
}
] | app/core/Tracker.coffee | cihatislamdede/codecombat | 4,858 | {me} = require 'core/auth'
SuperModel = require 'models/SuperModel'
utils = require 'core/utils'
CocoClass = require 'core/CocoClass'
api = require('core/api')
debugAnalytics = false
module.exports = class Tracker extends CocoClass
initialized: false
cookies: {required: false, answered: false, consented: false, declined: false}
constructor: ->
super()
@supermodel = new SuperModel()
@isProduction = document.location.href.search('codecombat.com') isnt -1
finishInitialization: ->
return if @initialized
@initialized = true
@trackReferrers()
@identify() # Needs supermodel to exist first
trackReferrers: ->
elapsed = new Date() - new Date(me.get('dateCreated'))
return unless elapsed < 5 * 60 * 1000
return if me.get('siteref') or me.get('referrer')
changed = false
if siteref = utils.getQueryVariable '_r'
me.set 'siteref', siteref
changed = true
if referrer = document.referrer
me.set 'referrer', referrer
changed = true
me.patch() if changed
identify: (traits={}) ->
# Save explicit traits for internal tracking
@explicitTraits ?= {}
@explicitTraits[key] = value for key, value of traits
traitsToReport = [
'email', 'anonymous', 'dateCreated', 'hourOfCode', 'name', 'referrer', 'testGroupNumber', 'testGroupNumberUS',
'gender', 'lastLevel', 'siteref', 'ageRange', 'schoolName', 'coursePrepaidID', 'role'
]
if me.isTeacher(true)
traitsToReport.push('firstName', 'lastName')
for userTrait in traitsToReport
traits[userTrait] ?= me.get(userTrait) if me.get(userTrait)?
if me.isTeacher(true)
traits.teacher = true
traits.host = document.location.host
console.log 'Would identify', me.id, traits if debugAnalytics
@trackEventInternal('Identify', {id: me.id, traits})
return unless @shouldTrackExternalEvents()
trackPageView: (includeIntegrations = []) ->
name = Backbone.history.getFragment()
url = "/#{name}"
console.log "Would track analytics pageview: #{url}" if debugAnalytics
@trackEventInternal 'Pageview', url: name, href: window.location.href
return unless @shouldTrackExternalEvents()
# Google Analytics
# https://developers.google.com/analytics/devguides/collection/analyticsjs/pages
ga? 'send', 'pageview', url
trackEvent: (action, properties={}, includeIntegrations=[]) =>
console.log 'Tracking external analytics event:', action, properties, includeIntegrations if debugAnalytics
return unless @shouldTrackExternalEvents()
@trackEventInternal action, _.cloneDeep properties
unless action in ['View Load', 'Script Started', 'Script Ended', 'Heard Sprite']
# Google Analytics
# https://developers.google.com/analytics/devguides/collection/analyticsjs/events
gaFieldObject =
hitType: 'event'
eventCategory: properties.category ? 'All'
eventAction: action
gaFieldObject.eventLabel = properties.label if properties.label?
gaFieldObject.eventValue = properties.value if properties.value?
ga? 'send', gaFieldObject
trackEventInternal: (event, properties) =>
return if @shouldBlockAllTracking()
return if @isProduction and me.isAdmin()
return unless @supermodel?
# Skipping heavily logged actions we don't use internally
# TODO: 'Error in ssoConfirmView' event is only for detecting an error in prod. Tracking this only via GA. Remove when not required.
return if event in ['Simulator Result', 'Started Level Load', 'Finished Level Load', 'View Load', 'Error in ssoConfirmView']
# Trimming properties we don't use internally
# TODO: delete properites.level for 'Saw Victory' after 2/8/15. Should be using levelID instead.
if event in ['Clicked Start Level', 'Inventory Play', 'Heard Sprite', 'Started Level', 'Saw Victory', 'Click Play', 'Choose Inventory', 'Homepage Loaded', 'Change Hero']
delete properties.category
delete properties.label
else if event in ['Loaded World Map', 'Started Signup', 'Finished Signup', 'Login', 'Facebook Login', 'Google Login', 'Show subscription modal']
delete properties.category
properties[key] = value for key, value of @explicitTraits if @explicitTraits?
console.log 'Tracking internal analytics event:', event, properties if debugAnalytics
api.analyticsLogEvents.post({event, properties})
trackTiming: (duration, category, variable, label) ->
# https://developers.google.com/analytics/devguides/collection/analyticsjs/user-timings
return console.warn "Duration #{duration} invalid for trackTiming call." unless duration >= 0 and duration < 60 * 60 * 1000
console.log 'Would track timing event:', arguments if debugAnalytics
if @shouldTrackExternalEvents()
ga? 'send', 'timing', category, variable, duration, label
shouldBlockAllTracking: ->
doNotTrack = (navigator?.doNotTrack or window?.doNotTrack) and not (navigator?.doNotTrack is 'unspecified' or window?.doNotTrack is 'unspecified')
return me.isSmokeTestUser() or window.serverSession.amActually or doNotTrack or @cookies.declined
# Should we include application.testing in this?
shouldTrackExternalEvents: ->
return not @shouldBlockAllTracking() and @isProduction and not me.isAdmin()
| 109347 | {me} = require 'core/auth'
SuperModel = require 'models/SuperModel'
utils = require 'core/utils'
CocoClass = require 'core/CocoClass'
api = require('core/api')
debugAnalytics = false
module.exports = class Tracker extends CocoClass
initialized: false
cookies: {required: false, answered: false, consented: false, declined: false}
constructor: ->
super()
@supermodel = new SuperModel()
@isProduction = document.location.href.search('codecombat.com') isnt -1
finishInitialization: ->
return if @initialized
@initialized = true
@trackReferrers()
@identify() # Needs supermodel to exist first
trackReferrers: ->
elapsed = new Date() - new Date(me.get('dateCreated'))
return unless elapsed < 5 * 60 * 1000
return if me.get('siteref') or me.get('referrer')
changed = false
if siteref = utils.getQueryVariable '_r'
me.set 'siteref', siteref
changed = true
if referrer = document.referrer
me.set 'referrer', referrer
changed = true
me.patch() if changed
identify: (traits={}) ->
# Save explicit traits for internal tracking
@explicitTraits ?= {}
@explicitTraits[key] = value for key, value of traits
traitsToReport = [
'email', 'anonymous', 'dateCreated', 'hourOfCode', 'name', 'referrer', 'testGroupNumber', 'testGroupNumberUS',
'gender', 'lastLevel', 'siteref', 'ageRange', 'schoolName', 'coursePrepaidID', 'role'
]
if me.isTeacher(true)
traitsToReport.push('firstName', '<NAME>')
for userTrait in traitsToReport
traits[userTrait] ?= me.get(userTrait) if me.get(userTrait)?
if me.isTeacher(true)
traits.teacher = true
traits.host = document.location.host
console.log 'Would identify', me.id, traits if debugAnalytics
@trackEventInternal('Identify', {id: me.id, traits})
return unless @shouldTrackExternalEvents()
trackPageView: (includeIntegrations = []) ->
name = Backbone.history.getFragment()
url = "/#{name}"
console.log "Would track analytics pageview: #{url}" if debugAnalytics
@trackEventInternal 'Pageview', url: name, href: window.location.href
return unless @shouldTrackExternalEvents()
# Google Analytics
# https://developers.google.com/analytics/devguides/collection/analyticsjs/pages
ga? 'send', 'pageview', url
trackEvent: (action, properties={}, includeIntegrations=[]) =>
console.log 'Tracking external analytics event:', action, properties, includeIntegrations if debugAnalytics
return unless @shouldTrackExternalEvents()
@trackEventInternal action, _.cloneDeep properties
unless action in ['View Load', 'Script Started', 'Script Ended', 'Heard Sprite']
# Google Analytics
# https://developers.google.com/analytics/devguides/collection/analyticsjs/events
gaFieldObject =
hitType: 'event'
eventCategory: properties.category ? 'All'
eventAction: action
gaFieldObject.eventLabel = properties.label if properties.label?
gaFieldObject.eventValue = properties.value if properties.value?
ga? 'send', gaFieldObject
trackEventInternal: (event, properties) =>
return if @shouldBlockAllTracking()
return if @isProduction and me.isAdmin()
return unless @supermodel?
# Skipping heavily logged actions we don't use internally
# TODO: 'Error in ssoConfirmView' event is only for detecting an error in prod. Tracking this only via GA. Remove when not required.
return if event in ['Simulator Result', 'Started Level Load', 'Finished Level Load', 'View Load', 'Error in ssoConfirmView']
# Trimming properties we don't use internally
# TODO: delete properites.level for 'Saw Victory' after 2/8/15. Should be using levelID instead.
if event in ['Clicked Start Level', 'Inventory Play', 'Heard Sprite', 'Started Level', 'Saw Victory', 'Click Play', 'Choose Inventory', 'Homepage Loaded', 'Change Hero']
delete properties.category
delete properties.label
else if event in ['Loaded World Map', 'Started Signup', 'Finished Signup', 'Login', 'Facebook Login', 'Google Login', 'Show subscription modal']
delete properties.category
properties[key] = value for key, value of @explicitTraits if @explicitTraits?
console.log 'Tracking internal analytics event:', event, properties if debugAnalytics
api.analyticsLogEvents.post({event, properties})
trackTiming: (duration, category, variable, label) ->
# https://developers.google.com/analytics/devguides/collection/analyticsjs/user-timings
return console.warn "Duration #{duration} invalid for trackTiming call." unless duration >= 0 and duration < 60 * 60 * 1000
console.log 'Would track timing event:', arguments if debugAnalytics
if @shouldTrackExternalEvents()
ga? 'send', 'timing', category, variable, duration, label
shouldBlockAllTracking: ->
doNotTrack = (navigator?.doNotTrack or window?.doNotTrack) and not (navigator?.doNotTrack is 'unspecified' or window?.doNotTrack is 'unspecified')
return me.isSmokeTestUser() or window.serverSession.amActually or doNotTrack or @cookies.declined
# Should we include application.testing in this?
shouldTrackExternalEvents: ->
return not @shouldBlockAllTracking() and @isProduction and not me.isAdmin()
| true | {me} = require 'core/auth'
SuperModel = require 'models/SuperModel'
utils = require 'core/utils'
CocoClass = require 'core/CocoClass'
api = require('core/api')
debugAnalytics = false
module.exports = class Tracker extends CocoClass
initialized: false
cookies: {required: false, answered: false, consented: false, declined: false}
constructor: ->
super()
@supermodel = new SuperModel()
@isProduction = document.location.href.search('codecombat.com') isnt -1
finishInitialization: ->
return if @initialized
@initialized = true
@trackReferrers()
@identify() # Needs supermodel to exist first
trackReferrers: ->
elapsed = new Date() - new Date(me.get('dateCreated'))
return unless elapsed < 5 * 60 * 1000
return if me.get('siteref') or me.get('referrer')
changed = false
if siteref = utils.getQueryVariable '_r'
me.set 'siteref', siteref
changed = true
if referrer = document.referrer
me.set 'referrer', referrer
changed = true
me.patch() if changed
identify: (traits={}) ->
# Save explicit traits for internal tracking
@explicitTraits ?= {}
@explicitTraits[key] = value for key, value of traits
traitsToReport = [
'email', 'anonymous', 'dateCreated', 'hourOfCode', 'name', 'referrer', 'testGroupNumber', 'testGroupNumberUS',
'gender', 'lastLevel', 'siteref', 'ageRange', 'schoolName', 'coursePrepaidID', 'role'
]
if me.isTeacher(true)
traitsToReport.push('firstName', 'PI:NAME:<NAME>END_PI')
for userTrait in traitsToReport
traits[userTrait] ?= me.get(userTrait) if me.get(userTrait)?
if me.isTeacher(true)
traits.teacher = true
traits.host = document.location.host
console.log 'Would identify', me.id, traits if debugAnalytics
@trackEventInternal('Identify', {id: me.id, traits})
return unless @shouldTrackExternalEvents()
trackPageView: (includeIntegrations = []) ->
name = Backbone.history.getFragment()
url = "/#{name}"
console.log "Would track analytics pageview: #{url}" if debugAnalytics
@trackEventInternal 'Pageview', url: name, href: window.location.href
return unless @shouldTrackExternalEvents()
# Google Analytics
# https://developers.google.com/analytics/devguides/collection/analyticsjs/pages
ga? 'send', 'pageview', url
trackEvent: (action, properties={}, includeIntegrations=[]) =>
console.log 'Tracking external analytics event:', action, properties, includeIntegrations if debugAnalytics
return unless @shouldTrackExternalEvents()
@trackEventInternal action, _.cloneDeep properties
unless action in ['View Load', 'Script Started', 'Script Ended', 'Heard Sprite']
# Google Analytics
# https://developers.google.com/analytics/devguides/collection/analyticsjs/events
gaFieldObject =
hitType: 'event'
eventCategory: properties.category ? 'All'
eventAction: action
gaFieldObject.eventLabel = properties.label if properties.label?
gaFieldObject.eventValue = properties.value if properties.value?
ga? 'send', gaFieldObject
trackEventInternal: (event, properties) =>
return if @shouldBlockAllTracking()
return if @isProduction and me.isAdmin()
return unless @supermodel?
# Skipping heavily logged actions we don't use internally
# TODO: 'Error in ssoConfirmView' event is only for detecting an error in prod. Tracking this only via GA. Remove when not required.
return if event in ['Simulator Result', 'Started Level Load', 'Finished Level Load', 'View Load', 'Error in ssoConfirmView']
# Trimming properties we don't use internally
# TODO: delete properites.level for 'Saw Victory' after 2/8/15. Should be using levelID instead.
if event in ['Clicked Start Level', 'Inventory Play', 'Heard Sprite', 'Started Level', 'Saw Victory', 'Click Play', 'Choose Inventory', 'Homepage Loaded', 'Change Hero']
delete properties.category
delete properties.label
else if event in ['Loaded World Map', 'Started Signup', 'Finished Signup', 'Login', 'Facebook Login', 'Google Login', 'Show subscription modal']
delete properties.category
properties[key] = value for key, value of @explicitTraits if @explicitTraits?
console.log 'Tracking internal analytics event:', event, properties if debugAnalytics
api.analyticsLogEvents.post({event, properties})
trackTiming: (duration, category, variable, label) ->
# https://developers.google.com/analytics/devguides/collection/analyticsjs/user-timings
return console.warn "Duration #{duration} invalid for trackTiming call." unless duration >= 0 and duration < 60 * 60 * 1000
console.log 'Would track timing event:', arguments if debugAnalytics
if @shouldTrackExternalEvents()
ga? 'send', 'timing', category, variable, duration, label
shouldBlockAllTracking: ->
doNotTrack = (navigator?.doNotTrack or window?.doNotTrack) and not (navigator?.doNotTrack is 'unspecified' or window?.doNotTrack is 'unspecified')
return me.isSmokeTestUser() or window.serverSession.amActually or doNotTrack or @cookies.declined
# Should we include application.testing in this?
shouldTrackExternalEvents: ->
return not @shouldBlockAllTracking() and @isProduction and not me.isAdmin()
|
[
{
"context": "ows.describe('Kckup MQ')\n\nredis_config =\n host: \"127.0.0.1\"\n port: 6379\n db: null\n auth:\n password: nu",
"end": 128,
"score": 0.9997005462646484,
"start": 119,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "0.1\"\n port: 6379\n db: null\n ... | test/001_kckup-mq.coffee | MusicKickup/kckupmq | 1 | ###
###
vows = require('vows')
assert = require('assert')
suite = vows.describe('Kckup MQ')
redis_config =
host: "127.0.0.1"
port: 6379
db: null
auth:
password: null
# Redis config for use inside Heroku nodes
if process.env.REDISTOGO_URL
rtg = require("url").parse(process.env.REDISTOGO_URL)
redis_config.host = rtg.hostname
redis_config.port = rtg.port
redis_config.auth.password = rtg.auth.split(":")[1]
suite.addBatch
'kckupMQ':
topic: -> require("#{__dirname}/../lib/kckupmq")
'should be object': (topic) ->
assert.isObject topic
'should have method instance': (topic) ->
assert.isFunction topic.instance
'should have method KckupMQ': (topic) ->
assert.isFunction topic.KckupMQ
'should have method RedisMQ': (topic) ->
assert.isFunction topic.RedisMQ
'should have method RabbitMQ': (topic) ->
assert.isFunction topic.RabbitMQ
suite.addBatch
'kckupMQ.KckupMQ':
topic: ->
@instance = new (require("#{__dirname}/../lib/kckupmq").KckupMQ)()
require("#{__dirname}/../lib/kckupmq").KckupMQ
'should be function': (topic) ->
assert.isFunction topic
'should be instance of EventEmitter': (topic) ->
assert.instanceOf @instance, require("events").EventEmitter
'instance should have method initialize': (topic) ->
assert.isFunction @instance.initialize
'instance should have method subscribe': (topic) ->
assert.isFunction @instance.subscribe
'instance should have method unsubscribe': (topic) ->
assert.isFunction @instance.unsubscribe
'instance should have method publish': (topic) ->
assert.isFunction @instance.publish
'instance should have method getSize': (topic) ->
assert.isFunction @instance.getSize
'instance should have method clearTopicQueue': (topic) ->
assert.isFunction @instance.clearTopicQueue
'instance should have method getTopics': (topic) ->
assert.isFunction @instance.getTopics
'instance should have method disconnect': (topic) ->
assert.isFunction @instance.disconnect
'instance should throw error on unimplemented methods': (topic) ->
assert.throws =>
@instance.disconnect()
, Error
'instance constructor should pass extra arguments to initialization method': (topic) ->
klass = require("#{__dirname}/../lib/kckupmq").KckupMQ
assert.doesNotThrow ->
new klass({}, 'arg1', 'arg2')
, Error
suite.addBatch
'kckupMQ.RedisMQ':
topic: ->
require("#{__dirname}/../lib/kckupmq").RedisMQ
'should be function': (topic) ->
assert.isFunction topic
'instance should extend KckupMQ': (topic) ->
assert.instanceOf (new topic()), require("#{__dirname}/../lib/kckupmq").KckupMQ
'instance should generate clientId if not given': (topic) ->
instance = new topic()
assert.isNotNull instance.clientId
assert.lengthOf instance.clientId, 36
'instance should use given clientId': (topic) ->
instance = new topic({}, 'test-client-id')
assert.isNotNull instance.clientId
assert.equal instance.clientId, 'test-client-id'
suite.addBatch
'kckupMQ.RedisMQ instance':
topic: ->
@test_queue_name = 'test-queue'
@config = redis_config
@instance = new (require("#{__dirname}/../lib/kckupmq").RedisMQ)(@config)
@instance
'should subscribe to queue':
topic: (topic) ->
topic.subscribe @test_queue_name, @callback
return
'and get list of current subscriptions': (topics) ->
assert.isNotNull topics
assert.isArray topics
'should unsubscribe from queue':
topic: (topic) ->
topic.unsubscribe @test_queue_name, @callback
return
'and get list of remaining subscriptions': (topics) ->
assert.isNotNull topics
assert.isArray topics
'should publish to queue':
topic: (topic) ->
topic.publish @test_queue_name, {hello: 'world'}, @callback
return
'with proper id': (id) ->
assert.isNotNull id
assert.lengthOf id, 36
'and report queue size':
topic: ->
@instance.getSize @test_queue_name, @callback
return
'with correct size': (size) ->
assert.isNotNull size
assert.isNumber size
'should catch published message':
topic: ->
@instance.subscribe @test_queue_name, () =>
@instance.on @test_queue_name, @callback
@instance.publish @test_queue_name, {hello: 'world'}
return
'with correct id and data': (id, data) ->
assert.isNotNull id
assert.isNotNull data
'should be able to clear topic queue':
topic: ->
@instance.clearTopicQueue @test_queue_name, =>
@instance.getSize @test_queue_name, @callback
return
'with correct size': (size) ->
assert.isNotNull size
assert.isNumber size
assert.equal size, 0
'should disconnect properly':
topic: ->
instance = new (require("#{__dirname}/../lib/kckupmq").RedisMQ)(@config)
instance
'without error': (topic) ->
assert.isFalse topic.pub.connected
assert.isFalse topic.sub.connected
suite.addBatch
'kckupMQ.RabbitMQ':
topic: ->
require("#{__dirname}/../lib/kckupmq").RabbitMQ
'should be function': (topic) ->
assert.isFunction topic
'instance should extend KckupMQ': (topic) ->
assert.instanceOf (new topic()), require("#{__dirname}/../lib/kckupmq").KckupMQ
suite.addBatch
'kckupMQ.instance':
topic: ->
@config =
host: "localhost"
require("#{__dirname}/../lib/kckupmq")
'should return null with unknown type': (topic) ->
assert.isNull topic.instance('unknown')
'should return RedisMQ instance with type "redis"': (topic) ->
assert.instanceOf topic.instance('redis'), require("#{__dirname}/../lib/kckupmq").RedisMQ
'should return RabbitMQ instance with type "rabbit"': (topic) ->
assert.instanceOf topic.instance('rabbit'), require("#{__dirname}/../lib/kckupmq").RabbitMQ
suite.export module
| 85424 | ###
###
vows = require('vows')
assert = require('assert')
suite = vows.describe('Kckup MQ')
redis_config =
host: "127.0.0.1"
port: 6379
db: null
auth:
password: <PASSWORD>
# Redis config for use inside Heroku nodes
if process.env.REDISTOGO_URL
rtg = require("url").parse(process.env.REDISTOGO_URL)
redis_config.host = rtg.hostname
redis_config.port = rtg.port
redis_config.auth.password = rtg.auth.split(":")[1]
suite.addBatch
'kckupMQ':
topic: -> require("#{__dirname}/../lib/kckupmq")
'should be object': (topic) ->
assert.isObject topic
'should have method instance': (topic) ->
assert.isFunction topic.instance
'should have method KckupMQ': (topic) ->
assert.isFunction topic.KckupMQ
'should have method RedisMQ': (topic) ->
assert.isFunction topic.RedisMQ
'should have method RabbitMQ': (topic) ->
assert.isFunction topic.RabbitMQ
suite.addBatch
'kckupMQ.KckupMQ':
topic: ->
@instance = new (require("#{__dirname}/../lib/kckupmq").KckupMQ)()
require("#{__dirname}/../lib/kckupmq").KckupMQ
'should be function': (topic) ->
assert.isFunction topic
'should be instance of EventEmitter': (topic) ->
assert.instanceOf @instance, require("events").EventEmitter
'instance should have method initialize': (topic) ->
assert.isFunction @instance.initialize
'instance should have method subscribe': (topic) ->
assert.isFunction @instance.subscribe
'instance should have method unsubscribe': (topic) ->
assert.isFunction @instance.unsubscribe
'instance should have method publish': (topic) ->
assert.isFunction @instance.publish
'instance should have method getSize': (topic) ->
assert.isFunction @instance.getSize
'instance should have method clearTopicQueue': (topic) ->
assert.isFunction @instance.clearTopicQueue
'instance should have method getTopics': (topic) ->
assert.isFunction @instance.getTopics
'instance should have method disconnect': (topic) ->
assert.isFunction @instance.disconnect
'instance should throw error on unimplemented methods': (topic) ->
assert.throws =>
@instance.disconnect()
, Error
'instance constructor should pass extra arguments to initialization method': (topic) ->
klass = require("#{__dirname}/../lib/kckupmq").KckupMQ
assert.doesNotThrow ->
new klass({}, 'arg1', 'arg2')
, Error
suite.addBatch
'kckupMQ.RedisMQ':
topic: ->
require("#{__dirname}/../lib/kckupmq").RedisMQ
'should be function': (topic) ->
assert.isFunction topic
'instance should extend KckupMQ': (topic) ->
assert.instanceOf (new topic()), require("#{__dirname}/../lib/kckupmq").KckupMQ
'instance should generate clientId if not given': (topic) ->
instance = new topic()
assert.isNotNull instance.clientId
assert.lengthOf instance.clientId, 36
'instance should use given clientId': (topic) ->
instance = new topic({}, 'test-client-id')
assert.isNotNull instance.clientId
assert.equal instance.clientId, 'test-client-id'
suite.addBatch
'kckupMQ.RedisMQ instance':
topic: ->
@test_queue_name = 'test-queue'
@config = redis_config
@instance = new (require("#{__dirname}/../lib/kckupmq").RedisMQ)(@config)
@instance
'should subscribe to queue':
topic: (topic) ->
topic.subscribe @test_queue_name, @callback
return
'and get list of current subscriptions': (topics) ->
assert.isNotNull topics
assert.isArray topics
'should unsubscribe from queue':
topic: (topic) ->
topic.unsubscribe @test_queue_name, @callback
return
'and get list of remaining subscriptions': (topics) ->
assert.isNotNull topics
assert.isArray topics
'should publish to queue':
topic: (topic) ->
topic.publish @test_queue_name, {hello: 'world'}, @callback
return
'with proper id': (id) ->
assert.isNotNull id
assert.lengthOf id, 36
'and report queue size':
topic: ->
@instance.getSize @test_queue_name, @callback
return
'with correct size': (size) ->
assert.isNotNull size
assert.isNumber size
'should catch published message':
topic: ->
@instance.subscribe @test_queue_name, () =>
@instance.on @test_queue_name, @callback
@instance.publish @test_queue_name, {hello: 'world'}
return
'with correct id and data': (id, data) ->
assert.isNotNull id
assert.isNotNull data
'should be able to clear topic queue':
topic: ->
@instance.clearTopicQueue @test_queue_name, =>
@instance.getSize @test_queue_name, @callback
return
'with correct size': (size) ->
assert.isNotNull size
assert.isNumber size
assert.equal size, 0
'should disconnect properly':
topic: ->
instance = new (require("#{__dirname}/../lib/kckupmq").RedisMQ)(@config)
instance
'without error': (topic) ->
assert.isFalse topic.pub.connected
assert.isFalse topic.sub.connected
suite.addBatch
'kckupMQ.RabbitMQ':
topic: ->
require("#{__dirname}/../lib/kckupmq").RabbitMQ
'should be function': (topic) ->
assert.isFunction topic
'instance should extend KckupMQ': (topic) ->
assert.instanceOf (new topic()), require("#{__dirname}/../lib/kckupmq").KckupMQ
suite.addBatch
'kckupMQ.instance':
topic: ->
@config =
host: "localhost"
require("#{__dirname}/../lib/kckupmq")
'should return null with unknown type': (topic) ->
assert.isNull topic.instance('unknown')
'should return RedisMQ instance with type "redis"': (topic) ->
assert.instanceOf topic.instance('redis'), require("#{__dirname}/../lib/kckupmq").RedisMQ
'should return RabbitMQ instance with type "rabbit"': (topic) ->
assert.instanceOf topic.instance('rabbit'), require("#{__dirname}/../lib/kckupmq").RabbitMQ
suite.export module
| true | ###
###
vows = require('vows')
assert = require('assert')
suite = vows.describe('Kckup MQ')
redis_config =
host: "127.0.0.1"
port: 6379
db: null
auth:
password: PI:PASSWORD:<PASSWORD>END_PI
# Redis config for use inside Heroku nodes
if process.env.REDISTOGO_URL
rtg = require("url").parse(process.env.REDISTOGO_URL)
redis_config.host = rtg.hostname
redis_config.port = rtg.port
redis_config.auth.password = rtg.auth.split(":")[1]
suite.addBatch
'kckupMQ':
topic: -> require("#{__dirname}/../lib/kckupmq")
'should be object': (topic) ->
assert.isObject topic
'should have method instance': (topic) ->
assert.isFunction topic.instance
'should have method KckupMQ': (topic) ->
assert.isFunction topic.KckupMQ
'should have method RedisMQ': (topic) ->
assert.isFunction topic.RedisMQ
'should have method RabbitMQ': (topic) ->
assert.isFunction topic.RabbitMQ
suite.addBatch
'kckupMQ.KckupMQ':
topic: ->
@instance = new (require("#{__dirname}/../lib/kckupmq").KckupMQ)()
require("#{__dirname}/../lib/kckupmq").KckupMQ
'should be function': (topic) ->
assert.isFunction topic
'should be instance of EventEmitter': (topic) ->
assert.instanceOf @instance, require("events").EventEmitter
'instance should have method initialize': (topic) ->
assert.isFunction @instance.initialize
'instance should have method subscribe': (topic) ->
assert.isFunction @instance.subscribe
'instance should have method unsubscribe': (topic) ->
assert.isFunction @instance.unsubscribe
'instance should have method publish': (topic) ->
assert.isFunction @instance.publish
'instance should have method getSize': (topic) ->
assert.isFunction @instance.getSize
'instance should have method clearTopicQueue': (topic) ->
assert.isFunction @instance.clearTopicQueue
'instance should have method getTopics': (topic) ->
assert.isFunction @instance.getTopics
'instance should have method disconnect': (topic) ->
assert.isFunction @instance.disconnect
'instance should throw error on unimplemented methods': (topic) ->
assert.throws =>
@instance.disconnect()
, Error
'instance constructor should pass extra arguments to initialization method': (topic) ->
klass = require("#{__dirname}/../lib/kckupmq").KckupMQ
assert.doesNotThrow ->
new klass({}, 'arg1', 'arg2')
, Error
suite.addBatch
'kckupMQ.RedisMQ':
topic: ->
require("#{__dirname}/../lib/kckupmq").RedisMQ
'should be function': (topic) ->
assert.isFunction topic
'instance should extend KckupMQ': (topic) ->
assert.instanceOf (new topic()), require("#{__dirname}/../lib/kckupmq").KckupMQ
'instance should generate clientId if not given': (topic) ->
instance = new topic()
assert.isNotNull instance.clientId
assert.lengthOf instance.clientId, 36
'instance should use given clientId': (topic) ->
instance = new topic({}, 'test-client-id')
assert.isNotNull instance.clientId
assert.equal instance.clientId, 'test-client-id'
suite.addBatch
'kckupMQ.RedisMQ instance':
topic: ->
@test_queue_name = 'test-queue'
@config = redis_config
@instance = new (require("#{__dirname}/../lib/kckupmq").RedisMQ)(@config)
@instance
'should subscribe to queue':
topic: (topic) ->
topic.subscribe @test_queue_name, @callback
return
'and get list of current subscriptions': (topics) ->
assert.isNotNull topics
assert.isArray topics
'should unsubscribe from queue':
topic: (topic) ->
topic.unsubscribe @test_queue_name, @callback
return
'and get list of remaining subscriptions': (topics) ->
assert.isNotNull topics
assert.isArray topics
'should publish to queue':
topic: (topic) ->
topic.publish @test_queue_name, {hello: 'world'}, @callback
return
'with proper id': (id) ->
assert.isNotNull id
assert.lengthOf id, 36
'and report queue size':
topic: ->
@instance.getSize @test_queue_name, @callback
return
'with correct size': (size) ->
assert.isNotNull size
assert.isNumber size
'should catch published message':
topic: ->
@instance.subscribe @test_queue_name, () =>
@instance.on @test_queue_name, @callback
@instance.publish @test_queue_name, {hello: 'world'}
return
'with correct id and data': (id, data) ->
assert.isNotNull id
assert.isNotNull data
'should be able to clear topic queue':
topic: ->
@instance.clearTopicQueue @test_queue_name, =>
@instance.getSize @test_queue_name, @callback
return
'with correct size': (size) ->
assert.isNotNull size
assert.isNumber size
assert.equal size, 0
'should disconnect properly':
topic: ->
instance = new (require("#{__dirname}/../lib/kckupmq").RedisMQ)(@config)
instance
'without error': (topic) ->
assert.isFalse topic.pub.connected
assert.isFalse topic.sub.connected
suite.addBatch
'kckupMQ.RabbitMQ':
topic: ->
require("#{__dirname}/../lib/kckupmq").RabbitMQ
'should be function': (topic) ->
assert.isFunction topic
'instance should extend KckupMQ': (topic) ->
assert.instanceOf (new topic()), require("#{__dirname}/../lib/kckupmq").KckupMQ
suite.addBatch
'kckupMQ.instance':
topic: ->
@config =
host: "localhost"
require("#{__dirname}/../lib/kckupmq")
'should return null with unknown type': (topic) ->
assert.isNull topic.instance('unknown')
'should return RedisMQ instance with type "redis"': (topic) ->
assert.instanceOf topic.instance('redis'), require("#{__dirname}/../lib/kckupmq").RedisMQ
'should return RabbitMQ instance with type "rabbit"': (topic) ->
assert.instanceOf topic.instance('rabbit'), require("#{__dirname}/../lib/kckupmq").RabbitMQ
suite.export module
|
[
{
"context": "ode = \"SE\"\ndico =\n cardinals:\n written: [{m: \"ett\", n: \"en\"}, \"två\", \"tre\", \"fyra\", \"fem\", \"sex\"\n ",
"end": 176,
"score": 0.5294227600097656,
"start": 173,
"tag": "NAME",
"value": "ett"
},
{
"context": "\n cardinals:\n written: [{m: \"ett\",... | lang/written.se.coffee | stephenhutchings/underscore.strings | 11 | # Swedish language support for Written
# For compatibility, Swedish uses "m" for co(m)mon, and "n" for (n)euter genders.
code = "SE"
dico =
cardinals:
written: [{m: "ett", n: "en"}, "två", "tre", "fyra", "fem", "sex"
"sju", "åtta", "nio", "tio", "elva", "tolv"]
if typeof define is "function" and define.amd
define [], dico
else if typeof exports is "object"
module.exports = dico
else if typeof written is "object"
written.setLanguage(dico, code)
| 219453 | # Swedish language support for Written
# For compatibility, Swedish uses "m" for co(m)mon, and "n" for (n)euter genders.
code = "SE"
dico =
cardinals:
written: [{m: "<NAME>", n: "en"}, "<NAME>", "<NAME>", "<NAME>", "fem", "sex"
"sju", "<NAME>", "nio", "tio", "elva", "tolv"]
if typeof define is "function" and define.amd
define [], dico
else if typeof exports is "object"
module.exports = dico
else if typeof written is "object"
written.setLanguage(dico, code)
| true | # Swedish language support for Written
# For compatibility, Swedish uses "m" for co(m)mon, and "n" for (n)euter genders.
code = "SE"
dico =
cardinals:
written: [{m: "PI:NAME:<NAME>END_PI", n: "en"}, "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "fem", "sex"
"sju", "PI:NAME:<NAME>END_PI", "nio", "tio", "elva", "tolv"]
if typeof define is "function" and define.amd
define [], dico
else if typeof exports is "object"
module.exports = dico
else if typeof written is "object"
written.setLanguage(dico, code)
|
[
{
"context": "tHeight\n render: ->\n @data = [\n author: \"Toyama Satoshi\"\n comment: \"Year!\"\n ,\n author: \"Tam",
"end": 579,
"score": 0.9998156428337097,
"start": 565,
"tag": "NAME",
"value": "Toyama Satoshi"
},
{
"context": "shi\"\n comment: \"Year!... | src/components/main/commentBox/index.coffee | toyamarinyon/arda-testproject | 0 | template = require('./template').locals
CommentList: require './components/CommentList'
CommentForm: require './components/CommentForm'
module.exports = React.createClass
mixins: [Arda.mixin]
componentDidMount: ->
dom = @getDOMNode()
offsetTop = dom.getBoundingClientRect().top
stretchHeight = window.innerHeight - offsetTop
componentHeight = dom.clientHeight
if componentHeight < stretchHeight
componentHeight = stretchHeight
@dispatch "context:set-contents-height", componentHeight
render: ->
@data = [
author: "Toyama Satoshi"
comment: "Year!"
,
author: "Tamurine"
comment: "Ops!"
]
template @
| 154207 | template = require('./template').locals
CommentList: require './components/CommentList'
CommentForm: require './components/CommentForm'
module.exports = React.createClass
mixins: [Arda.mixin]
componentDidMount: ->
dom = @getDOMNode()
offsetTop = dom.getBoundingClientRect().top
stretchHeight = window.innerHeight - offsetTop
componentHeight = dom.clientHeight
if componentHeight < stretchHeight
componentHeight = stretchHeight
@dispatch "context:set-contents-height", componentHeight
render: ->
@data = [
author: "<NAME>"
comment: "Year!"
,
author: "<NAME>"
comment: "Ops!"
]
template @
| true | template = require('./template').locals
CommentList: require './components/CommentList'
CommentForm: require './components/CommentForm'
module.exports = React.createClass
mixins: [Arda.mixin]
componentDidMount: ->
dom = @getDOMNode()
offsetTop = dom.getBoundingClientRect().top
stretchHeight = window.innerHeight - offsetTop
componentHeight = dom.clientHeight
if componentHeight < stretchHeight
componentHeight = stretchHeight
@dispatch "context:set-contents-height", componentHeight
render: ->
@data = [
author: "PI:NAME:<NAME>END_PI"
comment: "Year!"
,
author: "PI:NAME:<NAME>END_PI"
comment: "Ops!"
]
template @
|
[
{
"context": "###\n backbone-http.js 0.5.5\n Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-http\n Lic",
"end": 58,
"score": 0.9987209439277649,
"start": 50,
"tag": "NAME",
"value": "Vidigami"
},
{
"context": " Copyright (c) 2013 Vidigami - https://github.com/v... | src/cursor.coffee | michaelBenin/backbone-http | 1 | ###
backbone-http.js 0.5.5
Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-http
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, Moment.js, Inflection.js, BackboneORM, and Superagent.
###
_ = require 'underscore'
Cursor = require('backbone-orm').Cursor
JSONUtils = require('backbone-orm').JSONUtils
Utils = require('backbone-orm').Utils
module.exports = class HTTPCursor extends Cursor
##############################################
# Execution of the Query
##############################################
toJSON: (callback) ->
return callback(null, if @hasCursorQuery('$one') then null else []) if @hasCursorQuery('$zero')
req = @request.get(@url)
.query(query = JSONUtils.toQuery(_.extend(_.clone(@_find), @_cursor)))
.type('json')
@sync.beforeSend(req, null)
req.end (err, res) =>
return callback(err) if err
return callback(null, null) if query.$one and (res.status is 404) # not found
return callback(new Error "Ajax failed with status #{res.status} with: #{Utils.inspect(res.body)}") unless res.ok
result = JSONUtils.parse(res.body)
callback(null, if (@hasCursorQuery('$count') or @hasCursorQuery('$exists')) then result.result else result)
| 142089 | ###
backbone-http.js 0.5.5
Copyright (c) 2013 <NAME> - https://github.com/vidigami/backbone-http
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, Moment.js, Inflection.js, BackboneORM, and Superagent.
###
_ = require 'underscore'
Cursor = require('backbone-orm').Cursor
JSONUtils = require('backbone-orm').JSONUtils
Utils = require('backbone-orm').Utils
module.exports = class HTTPCursor extends Cursor
##############################################
# Execution of the Query
##############################################
toJSON: (callback) ->
return callback(null, if @hasCursorQuery('$one') then null else []) if @hasCursorQuery('$zero')
req = @request.get(@url)
.query(query = JSONUtils.toQuery(_.extend(_.clone(@_find), @_cursor)))
.type('json')
@sync.beforeSend(req, null)
req.end (err, res) =>
return callback(err) if err
return callback(null, null) if query.$one and (res.status is 404) # not found
return callback(new Error "Ajax failed with status #{res.status} with: #{Utils.inspect(res.body)}") unless res.ok
result = JSONUtils.parse(res.body)
callback(null, if (@hasCursorQuery('$count') or @hasCursorQuery('$exists')) then result.result else result)
| true | ###
backbone-http.js 0.5.5
Copyright (c) 2013 PI:NAME:<NAME>END_PI - https://github.com/vidigami/backbone-http
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, Moment.js, Inflection.js, BackboneORM, and Superagent.
###
_ = require 'underscore'
Cursor = require('backbone-orm').Cursor
JSONUtils = require('backbone-orm').JSONUtils
Utils = require('backbone-orm').Utils
module.exports = class HTTPCursor extends Cursor
##############################################
# Execution of the Query
##############################################
toJSON: (callback) ->
return callback(null, if @hasCursorQuery('$one') then null else []) if @hasCursorQuery('$zero')
req = @request.get(@url)
.query(query = JSONUtils.toQuery(_.extend(_.clone(@_find), @_cursor)))
.type('json')
@sync.beforeSend(req, null)
req.end (err, res) =>
return callback(err) if err
return callback(null, null) if query.$one and (res.status is 404) # not found
return callback(new Error "Ajax failed with status #{res.status} with: #{Utils.inspect(res.body)}") unless res.ok
result = JSONUtils.parse(res.body)
callback(null, if (@hasCursorQuery('$count') or @hasCursorQuery('$exists')) then result.result else result)
|
[
{
"context": "= $rootScope.$new()\n $scope.session = userid: 'egon@columbia.edu'\n\n disableUserPromise = {then: sandbox.stub()}",
"end": 1016,
"score": 0.9999128580093384,
"start": 999,
"tag": "EMAIL",
"value": "egon@columbia.edu"
},
{
"context": "ristine: sandbox.spy()\n ... | tests/js/controllers/account-management-test.coffee | RichardLitt/h | 0 | assert = chai.assert
sinon.assert.expose assert, prefix: null
sandbox = sinon.sandbox.create()
describe 'h.controllers.AccountManagement', ->
$scope = null
fakeFlash = null
fakeProfile = null
fakeIdentity = null
fakeFormHelpers = null
editProfilePromise = null
disableUserPromise = null
createController = null
beforeEach module ($provide, $filterProvider) ->
fakeProfile = {}
fakeFlash = sandbox.spy()
fakeIdentity =
logout: sandbox.spy()
fakeFormHelpers =
applyValidationErrors: sandbox.spy()
$filterProvider.register 'persona', ->
sandbox.stub().returns('STUBBED_PERSONA_FILTER')
$provide.value 'profile', fakeProfile
$provide.value 'flash', fakeFlash
$provide.value 'identity', fakeIdentity
$provide.value 'formHelpers', fakeFormHelpers
return
beforeEach module('h.controllers.AccountManagement')
beforeEach inject ($rootScope, $q, $controller) ->
$scope = $rootScope.$new()
$scope.session = userid: 'egon@columbia.edu'
disableUserPromise = {then: sandbox.stub()}
editProfilePromise = {then: sandbox.stub()}
fakeProfile.edit_profile = sandbox.stub().returns($promise: editProfilePromise)
fakeProfile.disable_user = sandbox.stub().returns($promise: disableUserPromise)
createController = ->
$controller('AccountManagement', {$scope: $scope})
it 'hides the sheet by default', ->
controller = createController()
assert.isFalse($scope.sheet)
describe 'event subscriptions', ->
it 'should show the sheet on "nav:account" event', ->
controller = createController()
$scope.$emit('nav:account')
assert.isTrue($scope.sheet)
it 'should hide the sheet on "logout" event', ->
controller = createController()
$scope.$emit('logout')
assert.isFalse($scope.sheet)
describe '.submit', ->
createFakeForm = (overrides={}) ->
defaults =
$name: 'changePasswordForm'
$valid: true
$setPristine: sandbox.spy()
pwd: $modelValue: 'gozer'
password: $modelValue: 'paranormal'
angular.extend(defaults, overrides)
it 'updates the password on the backend', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
assert.calledWith(fakeProfile.edit_profile, {
username: 'STUBBED_PERSONA_FILTER'
pwd: 'gozer'
password: 'paranormal'
})
it 'clears the fields', ->
controller = createController()
$scope.changePassword = {pwd: 'password', password: 'password'}
fakeForm = createFakeForm()
# Resolve the request.
editProfilePromise.then.yields(flash: {
success: ['Your profile has been updated.']
})
$scope.submit(fakeForm)
assert.deepEqual($scope.changePassword, {})
it 'updates the error fields on bad response', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 400
data:
errors:
pwd: 'this is wrong'
assert.calledWith fakeFormHelpers.applyValidationErrors, fakeForm,
pwd: 'this is wrong'
it 'displays a flash message on success', ->
fakeForm = createFakeForm()
# Resolve the request.
editProfilePromise.then.yields(flash: {
success: ['Your profile has been updated.']
})
controller = createController()
$scope.submit(fakeForm)
assert.calledWith(fakeFlash, 'success', [
'Your profile has been updated.'
])
it 'displays a flash message if a server error occurs', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 500
data:
flash:
error: ['Something bad happened']
assert.calledWith(fakeFlash, 'error', ['Something bad happened'])
it 'displays a fallback flash message if none are present', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 500
data: {}
assert.calledWith(fakeFlash, 'error', 'Sorry, we were unable to perform your request')
describe '.delete', ->
createFakeForm = (overrides={}) ->
defaults =
$name: 'deleteAccountForm'
$valid: true
$setPristine: sandbox.spy()
pwd: $modelValue: 'paranormal'
angular.extend(defaults, overrides)
it 'disables the user account', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
assert.calledWith fakeProfile.disable_user,
username: 'STUBBED_PERSONA_FILTER'
pwd: 'paranormal'
it 'logs the user out of the application', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 0,
status: 200
assert.calledWith(fakeIdentity.logout)
it 'clears the password field', ->
controller = createController()
fakeForm = createFakeForm()
$scope.deleteAccount = {pwd: ''}
$scope.delete(fakeForm)
disableUserPromise.then.callArg 0,
status: 200
assert.deepEqual($scope.deleteAccount, {})
it 'updates the error fields on bad response', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 400
data:
errors:
pwd: 'this is wrong'
assert.calledWith fakeFormHelpers.applyValidationErrors, fakeForm,
pwd: 'this is wrong'
it 'displays a flash message if a server error occurs', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 500
data:
flash:
error: ['Something bad happened']
assert.calledWith(fakeFlash, 'error', ['Something bad happened'])
it 'displays a fallback flash message if none are present', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 500
data: {}
assert.calledWith(fakeFlash, 'error', 'Sorry, we were unable to perform your request')
| 223112 | assert = chai.assert
sinon.assert.expose assert, prefix: null
sandbox = sinon.sandbox.create()
describe 'h.controllers.AccountManagement', ->
$scope = null
fakeFlash = null
fakeProfile = null
fakeIdentity = null
fakeFormHelpers = null
editProfilePromise = null
disableUserPromise = null
createController = null
beforeEach module ($provide, $filterProvider) ->
fakeProfile = {}
fakeFlash = sandbox.spy()
fakeIdentity =
logout: sandbox.spy()
fakeFormHelpers =
applyValidationErrors: sandbox.spy()
$filterProvider.register 'persona', ->
sandbox.stub().returns('STUBBED_PERSONA_FILTER')
$provide.value 'profile', fakeProfile
$provide.value 'flash', fakeFlash
$provide.value 'identity', fakeIdentity
$provide.value 'formHelpers', fakeFormHelpers
return
beforeEach module('h.controllers.AccountManagement')
beforeEach inject ($rootScope, $q, $controller) ->
$scope = $rootScope.$new()
$scope.session = userid: '<EMAIL>'
disableUserPromise = {then: sandbox.stub()}
editProfilePromise = {then: sandbox.stub()}
fakeProfile.edit_profile = sandbox.stub().returns($promise: editProfilePromise)
fakeProfile.disable_user = sandbox.stub().returns($promise: disableUserPromise)
createController = ->
$controller('AccountManagement', {$scope: $scope})
it 'hides the sheet by default', ->
controller = createController()
assert.isFalse($scope.sheet)
describe 'event subscriptions', ->
it 'should show the sheet on "nav:account" event', ->
controller = createController()
$scope.$emit('nav:account')
assert.isTrue($scope.sheet)
it 'should hide the sheet on "logout" event', ->
controller = createController()
$scope.$emit('logout')
assert.isFalse($scope.sheet)
describe '.submit', ->
createFakeForm = (overrides={}) ->
defaults =
$name: 'changePasswordForm'
$valid: true
$setPristine: sandbox.spy()
pwd: $modelValue: '<PASSWORD>'
password: $modelValue: '<PASSWORD>'
angular.extend(defaults, overrides)
it 'updates the password on the backend', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
assert.calledWith(fakeProfile.edit_profile, {
username: 'STUBBED_PERSONA_FILTER'
pwd: '<PASSWORD>'
password: '<PASSWORD>'
})
it 'clears the fields', ->
controller = createController()
$scope.changePassword = {pwd: '<PASSWORD>', password: '<PASSWORD>'}
fakeForm = createFakeForm()
# Resolve the request.
editProfilePromise.then.yields(flash: {
success: ['Your profile has been updated.']
})
$scope.submit(fakeForm)
assert.deepEqual($scope.changePassword, {})
it 'updates the error fields on bad response', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 400
data:
errors:
pwd: '<PASSWORD>'
assert.calledWith fakeFormHelpers.applyValidationErrors, fakeForm,
pwd: '<PASSWORD>'
it 'displays a flash message on success', ->
fakeForm = createFakeForm()
# Resolve the request.
editProfilePromise.then.yields(flash: {
success: ['Your profile has been updated.']
})
controller = createController()
$scope.submit(fakeForm)
assert.calledWith(fakeFlash, 'success', [
'Your profile has been updated.'
])
it 'displays a flash message if a server error occurs', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 500
data:
flash:
error: ['Something bad happened']
assert.calledWith(fakeFlash, 'error', ['Something bad happened'])
it 'displays a fallback flash message if none are present', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 500
data: {}
assert.calledWith(fakeFlash, 'error', 'Sorry, we were unable to perform your request')
describe '.delete', ->
createFakeForm = (overrides={}) ->
defaults =
$name: 'deleteAccountForm'
$valid: true
$setPristine: sandbox.spy()
pwd: $modelValue: '<PASSWORD>'
angular.extend(defaults, overrides)
it 'disables the user account', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
assert.calledWith fakeProfile.disable_user,
username: 'STUBBED_PERSONA_FILTER'
pwd: '<PASSWORD>'
it 'logs the user out of the application', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 0,
status: 200
assert.calledWith(fakeIdentity.logout)
it 'clears the password field', ->
controller = createController()
fakeForm = createFakeForm()
$scope.deleteAccount = {pwd: ''}
$scope.delete(fakeForm)
disableUserPromise.then.callArg 0,
status: 200
assert.deepEqual($scope.deleteAccount, {})
it 'updates the error fields on bad response', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 400
data:
errors:
pwd: '<PASSWORD>'
assert.calledWith fakeFormHelpers.applyValidationErrors, fakeForm,
pwd: '<PASSWORD>'
it 'displays a flash message if a server error occurs', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 500
data:
flash:
error: ['Something bad happened']
assert.calledWith(fakeFlash, 'error', ['Something bad happened'])
it 'displays a fallback flash message if none are present', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 500
data: {}
assert.calledWith(fakeFlash, 'error', 'Sorry, we were unable to perform your request')
| true | assert = chai.assert
sinon.assert.expose assert, prefix: null
sandbox = sinon.sandbox.create()
describe 'h.controllers.AccountManagement', ->
$scope = null
fakeFlash = null
fakeProfile = null
fakeIdentity = null
fakeFormHelpers = null
editProfilePromise = null
disableUserPromise = null
createController = null
beforeEach module ($provide, $filterProvider) ->
fakeProfile = {}
fakeFlash = sandbox.spy()
fakeIdentity =
logout: sandbox.spy()
fakeFormHelpers =
applyValidationErrors: sandbox.spy()
$filterProvider.register 'persona', ->
sandbox.stub().returns('STUBBED_PERSONA_FILTER')
$provide.value 'profile', fakeProfile
$provide.value 'flash', fakeFlash
$provide.value 'identity', fakeIdentity
$provide.value 'formHelpers', fakeFormHelpers
return
beforeEach module('h.controllers.AccountManagement')
beforeEach inject ($rootScope, $q, $controller) ->
$scope = $rootScope.$new()
$scope.session = userid: 'PI:EMAIL:<EMAIL>END_PI'
disableUserPromise = {then: sandbox.stub()}
editProfilePromise = {then: sandbox.stub()}
fakeProfile.edit_profile = sandbox.stub().returns($promise: editProfilePromise)
fakeProfile.disable_user = sandbox.stub().returns($promise: disableUserPromise)
createController = ->
$controller('AccountManagement', {$scope: $scope})
it 'hides the sheet by default', ->
controller = createController()
assert.isFalse($scope.sheet)
describe 'event subscriptions', ->
it 'should show the sheet on "nav:account" event', ->
controller = createController()
$scope.$emit('nav:account')
assert.isTrue($scope.sheet)
it 'should hide the sheet on "logout" event', ->
controller = createController()
$scope.$emit('logout')
assert.isFalse($scope.sheet)
describe '.submit', ->
createFakeForm = (overrides={}) ->
defaults =
$name: 'changePasswordForm'
$valid: true
$setPristine: sandbox.spy()
pwd: $modelValue: 'PI:PASSWORD:<PASSWORD>END_PI'
password: $modelValue: 'PI:PASSWORD:<PASSWORD>END_PI'
angular.extend(defaults, overrides)
it 'updates the password on the backend', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
assert.calledWith(fakeProfile.edit_profile, {
username: 'STUBBED_PERSONA_FILTER'
pwd: 'PI:PASSWORD:<PASSWORD>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
})
it 'clears the fields', ->
controller = createController()
$scope.changePassword = {pwd: 'PI:PASSWORD:<PASSWORD>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI'}
fakeForm = createFakeForm()
# Resolve the request.
editProfilePromise.then.yields(flash: {
success: ['Your profile has been updated.']
})
$scope.submit(fakeForm)
assert.deepEqual($scope.changePassword, {})
it 'updates the error fields on bad response', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 400
data:
errors:
pwd: 'PI:PASSWORD:<PASSWORD>END_PI'
assert.calledWith fakeFormHelpers.applyValidationErrors, fakeForm,
pwd: 'PI:PASSWORD:<PASSWORD>END_PI'
it 'displays a flash message on success', ->
fakeForm = createFakeForm()
# Resolve the request.
editProfilePromise.then.yields(flash: {
success: ['Your profile has been updated.']
})
controller = createController()
$scope.submit(fakeForm)
assert.calledWith(fakeFlash, 'success', [
'Your profile has been updated.'
])
it 'displays a flash message if a server error occurs', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 500
data:
flash:
error: ['Something bad happened']
assert.calledWith(fakeFlash, 'error', ['Something bad happened'])
it 'displays a fallback flash message if none are present', ->
fakeForm = createFakeForm()
controller = createController()
$scope.submit(fakeForm)
# Resolve the request.
editProfilePromise.then.callArg 1,
status: 500
data: {}
assert.calledWith(fakeFlash, 'error', 'Sorry, we were unable to perform your request')
describe '.delete', ->
createFakeForm = (overrides={}) ->
defaults =
$name: 'deleteAccountForm'
$valid: true
$setPristine: sandbox.spy()
pwd: $modelValue: 'PI:PASSWORD:<PASSWORD>END_PI'
angular.extend(defaults, overrides)
it 'disables the user account', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
assert.calledWith fakeProfile.disable_user,
username: 'STUBBED_PERSONA_FILTER'
pwd: 'PI:PASSWORD:<PASSWORD>END_PI'
it 'logs the user out of the application', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 0,
status: 200
assert.calledWith(fakeIdentity.logout)
it 'clears the password field', ->
controller = createController()
fakeForm = createFakeForm()
$scope.deleteAccount = {pwd: ''}
$scope.delete(fakeForm)
disableUserPromise.then.callArg 0,
status: 200
assert.deepEqual($scope.deleteAccount, {})
it 'updates the error fields on bad response', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 400
data:
errors:
pwd: 'PI:PASSWORD:<PASSWORD>END_PI'
assert.calledWith fakeFormHelpers.applyValidationErrors, fakeForm,
pwd: 'PI:PASSWORD:<PASSWORD>END_PI'
it 'displays a flash message if a server error occurs', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 500
data:
flash:
error: ['Something bad happened']
assert.calledWith(fakeFlash, 'error', ['Something bad happened'])
it 'displays a fallback flash message if none are present', ->
fakeForm = createFakeForm()
controller = createController()
$scope.delete(fakeForm)
# Resolve the request.
disableUserPromise.then.callArg 1,
status: 500
data: {}
assert.calledWith(fakeFlash, 'error', 'Sorry, we were unable to perform your request')
|
[
{
"context": "finger tree implementation.\n#\n# Copyright (c) 2011 Olaf Delgado-Friedrichs (odf@github.com)\n# ------------------------------",
"end": 149,
"score": 0.999881386756897,
"start": 126,
"tag": "NAME",
"value": "Olaf Delgado-Friedrichs"
},
{
"context": ".\n#\n# Copyright (... | lib/finger_tree.coffee | odf/pazy.js | 0 | # --------------------------------------------------------------------
# A finger tree implementation.
#
# Copyright (c) 2011 Olaf Delgado-Friedrichs (odf@github.com)
# --------------------------------------------------------------------
if typeof(require) != 'undefined'
{ seq } = require('sequence')
{ bounce, suspend } = require('functional')
else
{ seq, bounce, suspend } = pazy
class Void
class DefaultExtensions
after: (x) -> if x == undefined then @ else new @constructor @data.after x
before: (x) -> if x == undefined then @ else new @constructor @data.before x
concat: (t) -> if not t? then this else new @constructor @data.concat t.data
reverse: -> new @constructor @data.reverse()
@::plus = @::before
class FingerTreeType
constructor: (measure, extensions = DefaultExtensions) ->
@build = -> seq.reduce arguments, empty, (s, a) -> s.plus a
single = (x) -> if x == Empty or x.constructor in internal
x.measure()
else
measure.single(x)
norm = -> seq.reduce arguments, measure.empty, (n, x) ->
if x? then measure.sum n, single x else n
rev = (x) -> if x?.constructor in [Node2, Node3] then x.reverse() else x
# Wrapper for finger tree instances
class Instance extends extensions
constructor: (@data) ->
empty: -> empty
isEmpty: -> @data.isEmpty()
reduceLeft: (z, op) -> @data.reduceLeft z, op
reduceRight: (op, z) -> @data.reduceRight op, z
first: -> @data.first()
last: -> @data.last()
rest: -> new Instance @data.rest()
init: -> new Instance @data.init()
measure: -> @data.measure()
split: (p) ->
if @data != Empty and p norm @data
[l, x, r] = @data.split p, measure.empty
[new Instance(l), x, new Instance(r)]
else
[this, undefined, new Instance(Empty)]
takeUntil: (p) -> @split(p)[0]
dropUntil: (p) ->
[l, x, r] = @split(p)
if x == undefined then r else new Instance r.data.after x
find: (p) -> @split(p)[1]
toSeq: -> @data.reduceRight ((x, s) -> seq.conj x, -> s), null
toString: -> @data.reduceLeft "", (s, x) -> s + ' ' + x
# A node.
class Node2
constructor: (@a, @b) -> @v = norm @a, @b
reduceLeft: (z, op) -> op(op(z, @a), @b)
reduceRight: (op, z) -> op(@a, op(@b, z))
asDigit: -> new Digit2 @a, @b
measure: -> @v
reverse: -> new Node2 rev(@b), rev(@a)
class Node3
constructor: (@a, @b, @c) -> @v = norm @a, @b, @c
reduceLeft: (z, op) -> op(op(op(z, @a), @b), @c)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, z)))
asDigit: -> new Digit3 @a, @b, @c
measure: -> @v
reverse: -> new Node3 rev(@c), rev(@b), rev(@a)
# A digit in a finger tree.
class Digit1
constructor: (@a) ->
reduceLeft: (z, op) -> op(z, @a)
reduceRight: (op, z) -> op(@a, z)
after: (x) -> new Digit2 x, @a
before: (x) -> new Digit2 @a, x
first: -> @a
last: -> @a
rest: -> Empty
init: -> Empty
measure: -> norm @a
split: (p, i) -> [Empty, @a, Empty]
reverse: -> new Digit1 rev(@a)
class Digit2
constructor: (@a, @b) ->
reduceLeft: (z, op) -> op(op(z, @a), @b)
reduceRight: (op, z) -> op(@a, op(@b, z))
after: (x) -> new Digit3 x, @a, @b
before: (x) -> new Digit3 @a, @b, x
first: -> @a
last: -> @b
rest: -> new Digit1 @b
init: -> new Digit1 @a
asNode: -> new Node2 @a, @b
measure: -> norm @a, @b
split: (p, i) ->
if p measure.sum i, norm @a
[Empty, @a, new Digit1(@b)]
else
[new Digit1(@a), @b, Empty]
reverse: -> new Digit2 rev(@b), rev(@a)
class Digit3
constructor: (@a, @b, @c) ->
reduceLeft: (z, op) -> op(op(op(z, @a), @b), @c)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, z)))
after: (x) -> new Digit4 x, @a, @b, @c
before: (x) -> new Digit4 @a, @b, @c, x
first: -> @a
last: -> @c
rest: -> new Digit2 @b, @c
init: -> new Digit2 @a, @b
asNode: -> new Node3 @a, @b, @c
measure: -> norm @a, @b, @c
split: (p, i) ->
i1 = measure.sum i, norm @a
if p i1
[Empty, @a, new Digit2(@b, @c)]
else if p measure.sum i1, norm @b
[new Digit1(@a), @b, new Digit1(@c)]
else
[new Digit2(@a, @b), @c, Empty]
reverse: -> new Digit3 rev(@c), rev(@b), rev(@a)
class Digit4
constructor: (@a, @b, @c, @d) ->
reduceLeft: (z, op) -> op(op(op(op(z, @a), @b), @c), @d)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, op(@d, z))))
first: -> @a
last: -> @d
rest: -> new Digit3 @b, @c, @d
init: -> new Digit3 @a, @b, @c
measure: -> norm @a, @b, @c, @d
split: (p, i) ->
i1 = measure.sum i, norm @a
if p i1
[Empty, @a, new Digit3(@b, @c, @d)]
else
i2 = measure.sum i1, norm @b
if p i2
[new Digit1(@a), @b, new Digit2(@c, @d)]
else if p measure.sum i2, norm @c
[new Digit2(@a, @b), @c, new Digit1(@d)]
else
[new Digit3(@a, @b, @c), @d, Empty]
reverse: -> new Digit4 rev(@d), rev(@c), rev(@b), rev(@a)
# An empty finger tree.
Empty = {
isEmpty: -> true
reduceLeft: (z, op) -> z
reduceRight: (op, z) -> z
after: (a) -> new Single a
before: (a) -> new Single a
first: ->
last: ->
rest: ->
init: ->
concat: (t) -> t
measure: -> norm()
reverse: -> this
}
empty = new Instance Empty
# A finger tree with a single element.
class Single
constructor: (@a) ->
isEmpty: -> false
reduceLeft: (z, op) -> op z, @a
reduceRight: (op, z) -> op @a, z
after: (x) -> new Deep new Digit1(x), (-> Empty), new Digit1(@a)
before: (x) -> new Deep new Digit1(@a), (-> Empty), new Digit1(x)
first: -> @a
last: -> @a
rest: -> Empty
init: -> Empty
concat: (t) -> t.after @a
measure: -> norm @a
split: (p, i) -> [Empty, @a, Empty]
reverse: -> this
# A deep finger tree.
class Deep
reduceLeft = (op) -> (z, x) -> x.reduceLeft(z, op)
reduceRight = (op) -> (x, z) -> x.reduceRight(op, z)
asTree = (s) -> s.reduceLeft Empty, (a, b) -> a.before b
asSeq = (s) -> s.reduceRight ((a, b) -> seq.conj a, -> b), null
constructor: (@l, @m, @r) ->
isEmpty: -> false
measure: -> val = norm(@l, @m(), @r); (@measure = -> val)()
reduceLeft: (z, op0) ->
op1 = reduceLeft op0
op2 = reduceLeft op1
op1(op2(op1(z, @l), @m()), @r)
reduceRight: (op0, z) ->
op1 = reduceRight op0
op2 = reduceRight op1
op1(@l, op2(@m(), op1(@r, z)))
after: (x) ->
if @l.constructor == Digit4
{ a, b, c, d } = @l
l = new Digit2 x, a
new Deep l, suspend(=> @m().after(new Node3(b, c, d))), @r
else
new Deep @l.after(x), @m, @r
before: (x) ->
if @r.constructor == Digit4
{ a, b, c, d } = @r
r = new Digit2(d, x)
new Deep @l, suspend(=> @m().before(new Node3(a, b, c))), r
else
new Deep @l, @m, @r.before(x)
first: -> @l.first()
last: -> @r.last()
deepL = (l, m, r) ->
if l == Empty
if m() == Empty
asTree r
else
new Deep m().first().asDigit(), suspend(=> m().rest()), r
else
new Deep l, m, r
deepR = (l, m, r) ->
if r == Empty
if m() == Empty
asTree l
else
new Deep l, suspend(=> m().init()), m().last().asDigit()
else
new Deep l, m, r
rest: -> deepL @l.rest(), suspend(=> @m()), @r
init: -> deepR @l, suspend(=> @m()), @r.init()
nodes = (n, s) ->
if n == 0
null
else if n == 1 or n < 0
throw new Error "this should not happen"
else if n == 2 or n % 3 == 1
seq.conj new Node2(s.take(2).into([])...), -> nodes n-2, s.drop 2
else
seq.conj new Node3(s.take(3).into([])...), -> nodes n-3, s.drop 3
app3 = (tLeft, list, tRight) ->
if tLeft == Empty
seq.reduce seq.reverse(list), tRight, (t, x) -> t.after x
else if tRight == Empty
seq.reduce list, tLeft, (t, x) -> t.before x
else if tLeft.constructor == Single
app3(Empty, list, tRight).after tLeft.a
else if tRight.constructor == Single
app3(tLeft, list, Empty).before tRight.a
else
tmp = seq.flatten [asSeq(tLeft.r), list, asSeq(tRight.l)]
s = nodes tmp.size(), tmp
new Deep tLeft.l, suspend(-> app3 tLeft.m(), s, tRight.m()), tRight.r
concat: (t) -> app3 this, null, t
split: (p, i) ->
i1 = measure.sum i, norm @l
if p i1
[l, x, r] = @l.split p, i
[asTree(l), x, deepL(r, suspend(=> @m()), @r)]
else
i2 = measure.sum i1, norm @m()
if p i2
[ml, xs, mr] = @m().split p, i1
[l, x, r] = xs.asDigit().split p, measure.sum i1, norm ml
[deepR(@l, (-> ml), l), x, deepL(r, (-> mr), @r)]
else
[l, x, r] = @r.split p, i2
[deepR(@l, suspend(=> @m()), l), x, asTree(r)]
reverse: -> new Deep @r.reverse(), suspend(=> @m().reverse()), @l.reverse()
internal = [
Node2
Node3
Digit1
Digit2
Digit3
Digit4
Single
Deep
]
# --------------------------------------------------------------------
# Specialisations
# --------------------------------------------------------------------
SizeMeasure =
empty: 0
single: (x) -> 1
sum: (a, b) -> a + b
class CountedExtensions extends DefaultExtensions
size: -> @measure()
get: (i) -> @find (m) -> m > i
splitAt: (i) -> [l, x, r] = @split((m) -> m > i); [l, r.after x]
CountedSeq = new FingerTreeType SizeMeasure, CountedExtensions
OrderMeasure =
empty: undefined
single: (x) -> x
sum: (a, b) -> if b? then b else a
SortedExtensions = (less, extensions) -> class extends extensions
after = (s, k) ->
if k == undefined then s else new s.constructor s.data.after k
before = (s, k) ->
if k == undefined then s else new s.constructor s.data.before k
concat = (s, t) -> new s.constructor s.data.concat t.data
partition: (k) ->
[l, x, r] = @split((m) -> not less m, k)
[l, after r, x]
insert: (k) ->
[l, r] = @partition k
concat l, after r, k
deleteAll: (k) ->
[l, r] = @partition k
concat l, r.dropUntil (m) -> less k, m
merge = (s, t1, t2) ->
if t2.isEmpty()
concat s, t1
else
k = t2.first()
[l, x, r] = t1.split (m) -> less k, m
->
a = concat s, before l, k
merge a, t2.rest(), after r, x
merge: (other) -> bounce merge @empty(), this, other
intersect = (s, t1, t2) ->
if t2.isEmpty()
s
else
k = t2.first()
[l, x, r] = t1.split (m) -> not less m, k
if less(k, x)
-> intersect s, t2.rest(), after r, x
else
-> intersect before(s, x), t2.rest(), r
intersect: (other) -> bounce intersect @empty(), this, other
@::plus = @::insert
class SortedSeqType extends FingerTreeType
constructor: (less = ((a, b) -> a < b), extensions = Void) ->
super OrderMeasure, SortedExtensions less, extensions
# --------------------------------------------------------------------
# Exports
# --------------------------------------------------------------------
exports = module?.exports or this.pazy ?= {}
exports.FingerTreeType = FingerTreeType
exports.CountedSeq = CountedSeq
exports.SortedSeqType = SortedSeqType
exports.SortedSeq = new SortedSeqType()
| 74169 | # --------------------------------------------------------------------
# A finger tree implementation.
#
# Copyright (c) 2011 <NAME> (<EMAIL>)
# --------------------------------------------------------------------
if typeof(require) != 'undefined'
{ seq } = require('sequence')
{ bounce, suspend } = require('functional')
else
{ seq, bounce, suspend } = pazy
class Void
class DefaultExtensions
after: (x) -> if x == undefined then @ else new @constructor @data.after x
before: (x) -> if x == undefined then @ else new @constructor @data.before x
concat: (t) -> if not t? then this else new @constructor @data.concat t.data
reverse: -> new @constructor @data.reverse()
@::plus = @::before
class FingerTreeType
constructor: (measure, extensions = DefaultExtensions) ->
@build = -> seq.reduce arguments, empty, (s, a) -> s.plus a
single = (x) -> if x == Empty or x.constructor in internal
x.measure()
else
measure.single(x)
norm = -> seq.reduce arguments, measure.empty, (n, x) ->
if x? then measure.sum n, single x else n
rev = (x) -> if x?.constructor in [Node2, Node3] then x.reverse() else x
# Wrapper for finger tree instances
class Instance extends extensions
constructor: (@data) ->
empty: -> empty
isEmpty: -> @data.isEmpty()
reduceLeft: (z, op) -> @data.reduceLeft z, op
reduceRight: (op, z) -> @data.reduceRight op, z
first: -> @data.first()
last: -> @data.last()
rest: -> new Instance @data.rest()
init: -> new Instance @data.init()
measure: -> @data.measure()
split: (p) ->
if @data != Empty and p norm @data
[l, x, r] = @data.split p, measure.empty
[new Instance(l), x, new Instance(r)]
else
[this, undefined, new Instance(Empty)]
takeUntil: (p) -> @split(p)[0]
dropUntil: (p) ->
[l, x, r] = @split(p)
if x == undefined then r else new Instance r.data.after x
find: (p) -> @split(p)[1]
toSeq: -> @data.reduceRight ((x, s) -> seq.conj x, -> s), null
toString: -> @data.reduceLeft "", (s, x) -> s + ' ' + x
# A node.
class Node2
constructor: (@a, @b) -> @v = norm @a, @b
reduceLeft: (z, op) -> op(op(z, @a), @b)
reduceRight: (op, z) -> op(@a, op(@b, z))
asDigit: -> new Digit2 @a, @b
measure: -> @v
reverse: -> new Node2 rev(@b), rev(@a)
class Node3
constructor: (@a, @b, @c) -> @v = norm @a, @b, @c
reduceLeft: (z, op) -> op(op(op(z, @a), @b), @c)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, z)))
asDigit: -> new Digit3 @a, @b, @c
measure: -> @v
reverse: -> new Node3 rev(@c), rev(@b), rev(@a)
# A digit in a finger tree.
class Digit1
constructor: (@a) ->
reduceLeft: (z, op) -> op(z, @a)
reduceRight: (op, z) -> op(@a, z)
after: (x) -> new Digit2 x, @a
before: (x) -> new Digit2 @a, x
first: -> @a
last: -> @a
rest: -> Empty
init: -> Empty
measure: -> norm @a
split: (p, i) -> [Empty, @a, Empty]
reverse: -> new Digit1 rev(@a)
class Digit2
constructor: (@a, @b) ->
reduceLeft: (z, op) -> op(op(z, @a), @b)
reduceRight: (op, z) -> op(@a, op(@b, z))
after: (x) -> new Digit3 x, @a, @b
before: (x) -> new Digit3 @a, @b, x
first: -> @a
last: -> @b
rest: -> new Digit1 @b
init: -> new Digit1 @a
asNode: -> new Node2 @a, @b
measure: -> norm @a, @b
split: (p, i) ->
if p measure.sum i, norm @a
[Empty, @a, new Digit1(@b)]
else
[new Digit1(@a), @b, Empty]
reverse: -> new Digit2 rev(@b), rev(@a)
class Digit3
constructor: (@a, @b, @c) ->
reduceLeft: (z, op) -> op(op(op(z, @a), @b), @c)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, z)))
after: (x) -> new Digit4 x, @a, @b, @c
before: (x) -> new Digit4 @a, @b, @c, x
first: -> @a
last: -> @c
rest: -> new Digit2 @b, @c
init: -> new Digit2 @a, @b
asNode: -> new Node3 @a, @b, @c
measure: -> norm @a, @b, @c
split: (p, i) ->
i1 = measure.sum i, norm @a
if p i1
[Empty, @a, new Digit2(@b, @c)]
else if p measure.sum i1, norm @b
[new Digit1(@a), @b, new Digit1(@c)]
else
[new Digit2(@a, @b), @c, Empty]
reverse: -> new Digit3 rev(@c), rev(@b), rev(@a)
class Digit4
constructor: (@a, @b, @c, @d) ->
reduceLeft: (z, op) -> op(op(op(op(z, @a), @b), @c), @d)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, op(@d, z))))
first: -> @a
last: -> @d
rest: -> new Digit3 @b, @c, @d
init: -> new Digit3 @a, @b, @c
measure: -> norm @a, @b, @c, @d
split: (p, i) ->
i1 = measure.sum i, norm @a
if p i1
[Empty, @a, new Digit3(@b, @c, @d)]
else
i2 = measure.sum i1, norm @b
if p i2
[new Digit1(@a), @b, new Digit2(@c, @d)]
else if p measure.sum i2, norm @c
[new Digit2(@a, @b), @c, new Digit1(@d)]
else
[new Digit3(@a, @b, @c), @d, Empty]
reverse: -> new Digit4 rev(@d), rev(@c), rev(@b), rev(@a)
# An empty finger tree.
Empty = {
isEmpty: -> true
reduceLeft: (z, op) -> z
reduceRight: (op, z) -> z
after: (a) -> new Single a
before: (a) -> new Single a
first: ->
last: ->
rest: ->
init: ->
concat: (t) -> t
measure: -> norm()
reverse: -> this
}
empty = new Instance Empty
# A finger tree with a single element.
class Single
constructor: (@a) ->
isEmpty: -> false
reduceLeft: (z, op) -> op z, @a
reduceRight: (op, z) -> op @a, z
after: (x) -> new Deep new Digit1(x), (-> Empty), new Digit1(@a)
before: (x) -> new Deep new Digit1(@a), (-> Empty), new Digit1(x)
first: -> @a
last: -> @a
rest: -> Empty
init: -> Empty
concat: (t) -> t.after @a
measure: -> norm @a
split: (p, i) -> [Empty, @a, Empty]
reverse: -> this
# A deep finger tree.
class Deep
reduceLeft = (op) -> (z, x) -> x.reduceLeft(z, op)
reduceRight = (op) -> (x, z) -> x.reduceRight(op, z)
asTree = (s) -> s.reduceLeft Empty, (a, b) -> a.before b
asSeq = (s) -> s.reduceRight ((a, b) -> seq.conj a, -> b), null
constructor: (@l, @m, @r) ->
isEmpty: -> false
measure: -> val = norm(@l, @m(), @r); (@measure = -> val)()
reduceLeft: (z, op0) ->
op1 = reduceLeft op0
op2 = reduceLeft op1
op1(op2(op1(z, @l), @m()), @r)
reduceRight: (op0, z) ->
op1 = reduceRight op0
op2 = reduceRight op1
op1(@l, op2(@m(), op1(@r, z)))
after: (x) ->
if @l.constructor == Digit4
{ a, b, c, d } = @l
l = new Digit2 x, a
new Deep l, suspend(=> @m().after(new Node3(b, c, d))), @r
else
new Deep @l.after(x), @m, @r
before: (x) ->
if @r.constructor == Digit4
{ a, b, c, d } = @r
r = new Digit2(d, x)
new Deep @l, suspend(=> @m().before(new Node3(a, b, c))), r
else
new Deep @l, @m, @r.before(x)
first: -> @l.first()
last: -> @r.last()
deepL = (l, m, r) ->
if l == Empty
if m() == Empty
asTree r
else
new Deep m().first().asDigit(), suspend(=> m().rest()), r
else
new Deep l, m, r
deepR = (l, m, r) ->
if r == Empty
if m() == Empty
asTree l
else
new Deep l, suspend(=> m().init()), m().last().asDigit()
else
new Deep l, m, r
rest: -> deepL @l.rest(), suspend(=> @m()), @r
init: -> deepR @l, suspend(=> @m()), @r.init()
nodes = (n, s) ->
if n == 0
null
else if n == 1 or n < 0
throw new Error "this should not happen"
else if n == 2 or n % 3 == 1
seq.conj new Node2(s.take(2).into([])...), -> nodes n-2, s.drop 2
else
seq.conj new Node3(s.take(3).into([])...), -> nodes n-3, s.drop 3
app3 = (tLeft, list, tRight) ->
if tLeft == Empty
seq.reduce seq.reverse(list), tRight, (t, x) -> t.after x
else if tRight == Empty
seq.reduce list, tLeft, (t, x) -> t.before x
else if tLeft.constructor == Single
app3(Empty, list, tRight).after tLeft.a
else if tRight.constructor == Single
app3(tLeft, list, Empty).before tRight.a
else
tmp = seq.flatten [asSeq(tLeft.r), list, asSeq(tRight.l)]
s = nodes tmp.size(), tmp
new Deep tLeft.l, suspend(-> app3 tLeft.m(), s, tRight.m()), tRight.r
concat: (t) -> app3 this, null, t
split: (p, i) ->
i1 = measure.sum i, norm @l
if p i1
[l, x, r] = @l.split p, i
[asTree(l), x, deepL(r, suspend(=> @m()), @r)]
else
i2 = measure.sum i1, norm @m()
if p i2
[ml, xs, mr] = @m().split p, i1
[l, x, r] = xs.asDigit().split p, measure.sum i1, norm ml
[deepR(@l, (-> ml), l), x, deepL(r, (-> mr), @r)]
else
[l, x, r] = @r.split p, i2
[deepR(@l, suspend(=> @m()), l), x, asTree(r)]
reverse: -> new Deep @r.reverse(), suspend(=> @m().reverse()), @l.reverse()
internal = [
Node2
Node3
Digit1
Digit2
Digit3
Digit4
Single
Deep
]
# --------------------------------------------------------------------
# Specialisations
# --------------------------------------------------------------------
SizeMeasure =
empty: 0
single: (x) -> 1
sum: (a, b) -> a + b
class CountedExtensions extends DefaultExtensions
size: -> @measure()
get: (i) -> @find (m) -> m > i
splitAt: (i) -> [l, x, r] = @split((m) -> m > i); [l, r.after x]
CountedSeq = new FingerTreeType SizeMeasure, CountedExtensions
OrderMeasure =
empty: undefined
single: (x) -> x
sum: (a, b) -> if b? then b else a
SortedExtensions = (less, extensions) -> class extends extensions
after = (s, k) ->
if k == undefined then s else new s.constructor s.data.after k
before = (s, k) ->
if k == undefined then s else new s.constructor s.data.before k
concat = (s, t) -> new s.constructor s.data.concat t.data
partition: (k) ->
[l, x, r] = @split((m) -> not less m, k)
[l, after r, x]
insert: (k) ->
[l, r] = @partition k
concat l, after r, k
deleteAll: (k) ->
[l, r] = @partition k
concat l, r.dropUntil (m) -> less k, m
merge = (s, t1, t2) ->
if t2.isEmpty()
concat s, t1
else
k = t2.first()
[l, x, r] = t1.split (m) -> less k, m
->
a = concat s, before l, k
merge a, t2.rest(), after r, x
merge: (other) -> bounce merge @empty(), this, other
intersect = (s, t1, t2) ->
if t2.isEmpty()
s
else
k = t2.first()
[l, x, r] = t1.split (m) -> not less m, k
if less(k, x)
-> intersect s, t2.rest(), after r, x
else
-> intersect before(s, x), t2.rest(), r
intersect: (other) -> bounce intersect @empty(), this, other
@::plus = @::insert
class SortedSeqType extends FingerTreeType
constructor: (less = ((a, b) -> a < b), extensions = Void) ->
super OrderMeasure, SortedExtensions less, extensions
# --------------------------------------------------------------------
# Exports
# --------------------------------------------------------------------
exports = module?.exports or this.pazy ?= {}
exports.FingerTreeType = FingerTreeType
exports.CountedSeq = CountedSeq
exports.SortedSeqType = SortedSeqType
exports.SortedSeq = new SortedSeqType()
| true | # --------------------------------------------------------------------
# A finger tree implementation.
#
# Copyright (c) 2011 PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
# --------------------------------------------------------------------
if typeof(require) != 'undefined'
{ seq } = require('sequence')
{ bounce, suspend } = require('functional')
else
{ seq, bounce, suspend } = pazy
class Void
class DefaultExtensions
after: (x) -> if x == undefined then @ else new @constructor @data.after x
before: (x) -> if x == undefined then @ else new @constructor @data.before x
concat: (t) -> if not t? then this else new @constructor @data.concat t.data
reverse: -> new @constructor @data.reverse()
@::plus = @::before
class FingerTreeType
constructor: (measure, extensions = DefaultExtensions) ->
@build = -> seq.reduce arguments, empty, (s, a) -> s.plus a
single = (x) -> if x == Empty or x.constructor in internal
x.measure()
else
measure.single(x)
norm = -> seq.reduce arguments, measure.empty, (n, x) ->
if x? then measure.sum n, single x else n
rev = (x) -> if x?.constructor in [Node2, Node3] then x.reverse() else x
# Wrapper for finger tree instances
class Instance extends extensions
constructor: (@data) ->
empty: -> empty
isEmpty: -> @data.isEmpty()
reduceLeft: (z, op) -> @data.reduceLeft z, op
reduceRight: (op, z) -> @data.reduceRight op, z
first: -> @data.first()
last: -> @data.last()
rest: -> new Instance @data.rest()
init: -> new Instance @data.init()
measure: -> @data.measure()
split: (p) ->
if @data != Empty and p norm @data
[l, x, r] = @data.split p, measure.empty
[new Instance(l), x, new Instance(r)]
else
[this, undefined, new Instance(Empty)]
takeUntil: (p) -> @split(p)[0]
dropUntil: (p) ->
[l, x, r] = @split(p)
if x == undefined then r else new Instance r.data.after x
find: (p) -> @split(p)[1]
toSeq: -> @data.reduceRight ((x, s) -> seq.conj x, -> s), null
toString: -> @data.reduceLeft "", (s, x) -> s + ' ' + x
# A node.
class Node2
constructor: (@a, @b) -> @v = norm @a, @b
reduceLeft: (z, op) -> op(op(z, @a), @b)
reduceRight: (op, z) -> op(@a, op(@b, z))
asDigit: -> new Digit2 @a, @b
measure: -> @v
reverse: -> new Node2 rev(@b), rev(@a)
class Node3
constructor: (@a, @b, @c) -> @v = norm @a, @b, @c
reduceLeft: (z, op) -> op(op(op(z, @a), @b), @c)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, z)))
asDigit: -> new Digit3 @a, @b, @c
measure: -> @v
reverse: -> new Node3 rev(@c), rev(@b), rev(@a)
# A digit in a finger tree.
class Digit1
constructor: (@a) ->
reduceLeft: (z, op) -> op(z, @a)
reduceRight: (op, z) -> op(@a, z)
after: (x) -> new Digit2 x, @a
before: (x) -> new Digit2 @a, x
first: -> @a
last: -> @a
rest: -> Empty
init: -> Empty
measure: -> norm @a
split: (p, i) -> [Empty, @a, Empty]
reverse: -> new Digit1 rev(@a)
class Digit2
constructor: (@a, @b) ->
reduceLeft: (z, op) -> op(op(z, @a), @b)
reduceRight: (op, z) -> op(@a, op(@b, z))
after: (x) -> new Digit3 x, @a, @b
before: (x) -> new Digit3 @a, @b, x
first: -> @a
last: -> @b
rest: -> new Digit1 @b
init: -> new Digit1 @a
asNode: -> new Node2 @a, @b
measure: -> norm @a, @b
split: (p, i) ->
if p measure.sum i, norm @a
[Empty, @a, new Digit1(@b)]
else
[new Digit1(@a), @b, Empty]
reverse: -> new Digit2 rev(@b), rev(@a)
class Digit3
constructor: (@a, @b, @c) ->
reduceLeft: (z, op) -> op(op(op(z, @a), @b), @c)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, z)))
after: (x) -> new Digit4 x, @a, @b, @c
before: (x) -> new Digit4 @a, @b, @c, x
first: -> @a
last: -> @c
rest: -> new Digit2 @b, @c
init: -> new Digit2 @a, @b
asNode: -> new Node3 @a, @b, @c
measure: -> norm @a, @b, @c
split: (p, i) ->
i1 = measure.sum i, norm @a
if p i1
[Empty, @a, new Digit2(@b, @c)]
else if p measure.sum i1, norm @b
[new Digit1(@a), @b, new Digit1(@c)]
else
[new Digit2(@a, @b), @c, Empty]
reverse: -> new Digit3 rev(@c), rev(@b), rev(@a)
class Digit4
constructor: (@a, @b, @c, @d) ->
reduceLeft: (z, op) -> op(op(op(op(z, @a), @b), @c), @d)
reduceRight: (op, z) -> op(@a, op(@b, op(@c, op(@d, z))))
first: -> @a
last: -> @d
rest: -> new Digit3 @b, @c, @d
init: -> new Digit3 @a, @b, @c
measure: -> norm @a, @b, @c, @d
split: (p, i) ->
i1 = measure.sum i, norm @a
if p i1
[Empty, @a, new Digit3(@b, @c, @d)]
else
i2 = measure.sum i1, norm @b
if p i2
[new Digit1(@a), @b, new Digit2(@c, @d)]
else if p measure.sum i2, norm @c
[new Digit2(@a, @b), @c, new Digit1(@d)]
else
[new Digit3(@a, @b, @c), @d, Empty]
reverse: -> new Digit4 rev(@d), rev(@c), rev(@b), rev(@a)
# An empty finger tree.
Empty = {
isEmpty: -> true
reduceLeft: (z, op) -> z
reduceRight: (op, z) -> z
after: (a) -> new Single a
before: (a) -> new Single a
first: ->
last: ->
rest: ->
init: ->
concat: (t) -> t
measure: -> norm()
reverse: -> this
}
empty = new Instance Empty
# A finger tree with a single element.
class Single
constructor: (@a) ->
isEmpty: -> false
reduceLeft: (z, op) -> op z, @a
reduceRight: (op, z) -> op @a, z
after: (x) -> new Deep new Digit1(x), (-> Empty), new Digit1(@a)
before: (x) -> new Deep new Digit1(@a), (-> Empty), new Digit1(x)
first: -> @a
last: -> @a
rest: -> Empty
init: -> Empty
concat: (t) -> t.after @a
measure: -> norm @a
split: (p, i) -> [Empty, @a, Empty]
reverse: -> this
# A deep finger tree.
class Deep
reduceLeft = (op) -> (z, x) -> x.reduceLeft(z, op)
reduceRight = (op) -> (x, z) -> x.reduceRight(op, z)
asTree = (s) -> s.reduceLeft Empty, (a, b) -> a.before b
asSeq = (s) -> s.reduceRight ((a, b) -> seq.conj a, -> b), null
constructor: (@l, @m, @r) ->
isEmpty: -> false
measure: -> val = norm(@l, @m(), @r); (@measure = -> val)()
reduceLeft: (z, op0) ->
op1 = reduceLeft op0
op2 = reduceLeft op1
op1(op2(op1(z, @l), @m()), @r)
reduceRight: (op0, z) ->
op1 = reduceRight op0
op2 = reduceRight op1
op1(@l, op2(@m(), op1(@r, z)))
after: (x) ->
if @l.constructor == Digit4
{ a, b, c, d } = @l
l = new Digit2 x, a
new Deep l, suspend(=> @m().after(new Node3(b, c, d))), @r
else
new Deep @l.after(x), @m, @r
before: (x) ->
if @r.constructor == Digit4
{ a, b, c, d } = @r
r = new Digit2(d, x)
new Deep @l, suspend(=> @m().before(new Node3(a, b, c))), r
else
new Deep @l, @m, @r.before(x)
first: -> @l.first()
last: -> @r.last()
deepL = (l, m, r) ->
if l == Empty
if m() == Empty
asTree r
else
new Deep m().first().asDigit(), suspend(=> m().rest()), r
else
new Deep l, m, r
deepR = (l, m, r) ->
if r == Empty
if m() == Empty
asTree l
else
new Deep l, suspend(=> m().init()), m().last().asDigit()
else
new Deep l, m, r
rest: -> deepL @l.rest(), suspend(=> @m()), @r
init: -> deepR @l, suspend(=> @m()), @r.init()
nodes = (n, s) ->
if n == 0
null
else if n == 1 or n < 0
throw new Error "this should not happen"
else if n == 2 or n % 3 == 1
seq.conj new Node2(s.take(2).into([])...), -> nodes n-2, s.drop 2
else
seq.conj new Node3(s.take(3).into([])...), -> nodes n-3, s.drop 3
app3 = (tLeft, list, tRight) ->
if tLeft == Empty
seq.reduce seq.reverse(list), tRight, (t, x) -> t.after x
else if tRight == Empty
seq.reduce list, tLeft, (t, x) -> t.before x
else if tLeft.constructor == Single
app3(Empty, list, tRight).after tLeft.a
else if tRight.constructor == Single
app3(tLeft, list, Empty).before tRight.a
else
tmp = seq.flatten [asSeq(tLeft.r), list, asSeq(tRight.l)]
s = nodes tmp.size(), tmp
new Deep tLeft.l, suspend(-> app3 tLeft.m(), s, tRight.m()), tRight.r
concat: (t) -> app3 this, null, t
split: (p, i) ->
i1 = measure.sum i, norm @l
if p i1
[l, x, r] = @l.split p, i
[asTree(l), x, deepL(r, suspend(=> @m()), @r)]
else
i2 = measure.sum i1, norm @m()
if p i2
[ml, xs, mr] = @m().split p, i1
[l, x, r] = xs.asDigit().split p, measure.sum i1, norm ml
[deepR(@l, (-> ml), l), x, deepL(r, (-> mr), @r)]
else
[l, x, r] = @r.split p, i2
[deepR(@l, suspend(=> @m()), l), x, asTree(r)]
reverse: -> new Deep @r.reverse(), suspend(=> @m().reverse()), @l.reverse()
internal = [
Node2
Node3
Digit1
Digit2
Digit3
Digit4
Single
Deep
]
# --------------------------------------------------------------------
# Specialisations
# --------------------------------------------------------------------
SizeMeasure =
empty: 0
single: (x) -> 1
sum: (a, b) -> a + b
class CountedExtensions extends DefaultExtensions
size: -> @measure()
get: (i) -> @find (m) -> m > i
splitAt: (i) -> [l, x, r] = @split((m) -> m > i); [l, r.after x]
CountedSeq = new FingerTreeType SizeMeasure, CountedExtensions
OrderMeasure =
empty: undefined
single: (x) -> x
sum: (a, b) -> if b? then b else a
SortedExtensions = (less, extensions) -> class extends extensions
after = (s, k) ->
if k == undefined then s else new s.constructor s.data.after k
before = (s, k) ->
if k == undefined then s else new s.constructor s.data.before k
concat = (s, t) -> new s.constructor s.data.concat t.data
partition: (k) ->
[l, x, r] = @split((m) -> not less m, k)
[l, after r, x]
insert: (k) ->
[l, r] = @partition k
concat l, after r, k
deleteAll: (k) ->
[l, r] = @partition k
concat l, r.dropUntil (m) -> less k, m
merge = (s, t1, t2) ->
if t2.isEmpty()
concat s, t1
else
k = t2.first()
[l, x, r] = t1.split (m) -> less k, m
->
a = concat s, before l, k
merge a, t2.rest(), after r, x
merge: (other) -> bounce merge @empty(), this, other
intersect = (s, t1, t2) ->
if t2.isEmpty()
s
else
k = t2.first()
[l, x, r] = t1.split (m) -> not less m, k
if less(k, x)
-> intersect s, t2.rest(), after r, x
else
-> intersect before(s, x), t2.rest(), r
intersect: (other) -> bounce intersect @empty(), this, other
@::plus = @::insert
class SortedSeqType extends FingerTreeType
constructor: (less = ((a, b) -> a < b), extensions = Void) ->
super OrderMeasure, SortedExtensions less, extensions
# --------------------------------------------------------------------
# Exports
# --------------------------------------------------------------------
exports = module?.exports or this.pazy ?= {}
exports.FingerTreeType = FingerTreeType
exports.CountedSeq = CountedSeq
exports.SortedSeqType = SortedSeqType
exports.SortedSeq = new SortedSeqType()
|
[
{
"context": "# Description:\n# 당번이 할 일을 알려줍니다.\n#\n# Author: Leop0ld\n#\n# Commands:\n# 날씨! (위치) - 지정한 위치의 분별 날씨를 알려줍",
"end": 56,
"score": 0.9997279644012451,
"start": 49,
"tag": "USERNAME",
"value": "Leop0ld"
},
{
"context": "com.com/weather/current/minutely?version=1&appK... | scripts/weather.coffee | Leop0ld/leobot | 0 | # Description:
# 당번이 할 일을 알려줍니다.
#
# Author: Leop0ld
#
# Commands:
# 날씨! (위치) - 지정한 위치의 분별 날씨를 알려줍니다.
http = require 'http'
q = require 'q'
baseUrl = 'https://api2.sktelecom.com/weather/current/minutely?version=1&appKey=9ffe2c4e-4210-415c-a524-5b6190d3e286'
module.exports = (robot) ->
robot.hear /날씨! (.*)/i, (msg) ->
location = decodeURIComponent(unescape(msg.match[1]))
getGeocode(msg, location)
.then (geoCode) ->
getWeather(msg, geoCode, location)
getGeocode = (msg, location) ->
deferred = q.defer()
robot.http("https://maps.googleapis.com/maps/api/geocode/json")
.query({
address: location
})
.get() (err, res, body) ->
response = JSON.parse(body)
geo = response.results[0].geometry.location
if response.status is "OK"
geoCode = {
lat : geo.lat
lng : geo.lng
}
deferred.resolve(geoCode)
else
deferred.reject(err)
return deferred.promise
getWeather = (msg, geoCode, location) ->
targetUrl = "#{baseUrl}&lat=#{geoCode.lat}&lon=#{geoCode.lng}"
robot.http(targetUrl).get() (err, res, body) ->
dataObj = JSON.parse(body).weather.minutely[0]
stationName = dataObj.station.name
wspd = dataObj.wind.wspd
precipitation = dataObj.precipitation.type
sinceOntime = dataObj.precipitation.sinceOntime
skyName = dataObj.sky.name
tc = dataObj.temperature.tc
tmax = dataObj.temperature.tmax
tmin = dataObj.temperature.tmin
timeObservation = dataObj.timeObservation
resultMsg = "*#{timeObservation} 기준 #{stationName} 관측소 관측 결과* 입니다.\n"
resultMsg += "현재 기온은 *#{tc}도* 이며, 최고기온은 *#{tmax}도* 이고, 최저기온은 *#{tmin}도* 입니다.\n"
resultMsg += "현재 풍속은 *#{wspd}m/s* 이며, 하늘은 *#{skyName}* 입니다.\n"
if precipitation == 3
resultMsg += "또한 오늘은 눈 예보가 있으며, 적설량은 #{sinceOntime}cm 정도로 예상됩니다.\n"
else if precipitation == 2
resultMsg += "또한 오늘은 비/눈 예보가 있으며, 강수량은 #{sinceOntime}mm 정도로 예상됩니다.\n"
else if precipitation == 1
resultMsg += "또한 오늘은 비 예보가 있으며, 강수량은 #{sinceOntime}mm 정도로 예상됩니다.\n"
else
resultMsg += "또한 오늘은 비/눈 예보가 없습니다."
msg.send resultMsg
| 154794 | # Description:
# 당번이 할 일을 알려줍니다.
#
# Author: Leop0ld
#
# Commands:
# 날씨! (위치) - 지정한 위치의 분별 날씨를 알려줍니다.
http = require 'http'
q = require 'q'
baseUrl = 'https://api2.sktelecom.com/weather/current/minutely?version=1&appKey=<KEY>'
module.exports = (robot) ->
robot.hear /날씨! (.*)/i, (msg) ->
location = decodeURIComponent(unescape(msg.match[1]))
getGeocode(msg, location)
.then (geoCode) ->
getWeather(msg, geoCode, location)
getGeocode = (msg, location) ->
deferred = q.defer()
robot.http("https://maps.googleapis.com/maps/api/geocode/json")
.query({
address: location
})
.get() (err, res, body) ->
response = JSON.parse(body)
geo = response.results[0].geometry.location
if response.status is "OK"
geoCode = {
lat : geo.lat
lng : geo.lng
}
deferred.resolve(geoCode)
else
deferred.reject(err)
return deferred.promise
getWeather = (msg, geoCode, location) ->
targetUrl = "#{baseUrl}&lat=#{geoCode.lat}&lon=#{geoCode.lng}"
robot.http(targetUrl).get() (err, res, body) ->
dataObj = JSON.parse(body).weather.minutely[0]
stationName = dataObj.station.name
wspd = dataObj.wind.wspd
precipitation = dataObj.precipitation.type
sinceOntime = dataObj.precipitation.sinceOntime
skyName = dataObj.sky.name
tc = dataObj.temperature.tc
tmax = dataObj.temperature.tmax
tmin = dataObj.temperature.tmin
timeObservation = dataObj.timeObservation
resultMsg = "*#{timeObservation} 기준 #{stationName} 관측소 관측 결과* 입니다.\n"
resultMsg += "현재 기온은 *#{tc}도* 이며, 최고기온은 *#{tmax}도* 이고, 최저기온은 *#{tmin}도* 입니다.\n"
resultMsg += "현재 풍속은 *#{wspd}m/s* 이며, 하늘은 *#{skyName}* 입니다.\n"
if precipitation == 3
resultMsg += "또한 오늘은 눈 예보가 있으며, 적설량은 #{sinceOntime}cm 정도로 예상됩니다.\n"
else if precipitation == 2
resultMsg += "또한 오늘은 비/눈 예보가 있으며, 강수량은 #{sinceOntime}mm 정도로 예상됩니다.\n"
else if precipitation == 1
resultMsg += "또한 오늘은 비 예보가 있으며, 강수량은 #{sinceOntime}mm 정도로 예상됩니다.\n"
else
resultMsg += "또한 오늘은 비/눈 예보가 없습니다."
msg.send resultMsg
| true | # Description:
# 당번이 할 일을 알려줍니다.
#
# Author: Leop0ld
#
# Commands:
# 날씨! (위치) - 지정한 위치의 분별 날씨를 알려줍니다.
http = require 'http'
q = require 'q'
baseUrl = 'https://api2.sktelecom.com/weather/current/minutely?version=1&appKey=PI:KEY:<KEY>END_PI'
module.exports = (robot) ->
robot.hear /날씨! (.*)/i, (msg) ->
location = decodeURIComponent(unescape(msg.match[1]))
getGeocode(msg, location)
.then (geoCode) ->
getWeather(msg, geoCode, location)
getGeocode = (msg, location) ->
deferred = q.defer()
robot.http("https://maps.googleapis.com/maps/api/geocode/json")
.query({
address: location
})
.get() (err, res, body) ->
response = JSON.parse(body)
geo = response.results[0].geometry.location
if response.status is "OK"
geoCode = {
lat : geo.lat
lng : geo.lng
}
deferred.resolve(geoCode)
else
deferred.reject(err)
return deferred.promise
getWeather = (msg, geoCode, location) ->
targetUrl = "#{baseUrl}&lat=#{geoCode.lat}&lon=#{geoCode.lng}"
robot.http(targetUrl).get() (err, res, body) ->
dataObj = JSON.parse(body).weather.minutely[0]
stationName = dataObj.station.name
wspd = dataObj.wind.wspd
precipitation = dataObj.precipitation.type
sinceOntime = dataObj.precipitation.sinceOntime
skyName = dataObj.sky.name
tc = dataObj.temperature.tc
tmax = dataObj.temperature.tmax
tmin = dataObj.temperature.tmin
timeObservation = dataObj.timeObservation
resultMsg = "*#{timeObservation} 기준 #{stationName} 관측소 관측 결과* 입니다.\n"
resultMsg += "현재 기온은 *#{tc}도* 이며, 최고기온은 *#{tmax}도* 이고, 최저기온은 *#{tmin}도* 입니다.\n"
resultMsg += "현재 풍속은 *#{wspd}m/s* 이며, 하늘은 *#{skyName}* 입니다.\n"
if precipitation == 3
resultMsg += "또한 오늘은 눈 예보가 있으며, 적설량은 #{sinceOntime}cm 정도로 예상됩니다.\n"
else if precipitation == 2
resultMsg += "또한 오늘은 비/눈 예보가 있으며, 강수량은 #{sinceOntime}mm 정도로 예상됩니다.\n"
else if precipitation == 1
resultMsg += "또한 오늘은 비 예보가 있으며, 강수량은 #{sinceOntime}mm 정도로 예상됩니다.\n"
else
resultMsg += "또한 오늘은 비/눈 예보가 없습니다."
msg.send resultMsg
|
[
{
"context": " cfg.player_1 =\n user_id: 33\n name: \"Kunta Kinte\"\n cfg.player_2 =\n user_id: 666\n name",
"end": 4874,
"score": 0.9997552633285522,
"start": 4863,
"tag": "NAME",
"value": "Kunta Kinte"
},
{
"context": " cfg.player_2 =\n user_id: 666... | test/dummy/spec/javascripts/Game_config_spec.js.coffee | railsc0d0r/global8ball_game | 0 | #= require game/config/Game
describe 'Game config', ->
GameConfig = global8ball.config.Game
createConfig = ->
borders:
left: [
{
x: -1.25
y: 0.55
},
{
x: -1.25,
y: -0.55
},
{
x: -1.3,
y: -0.6
},
{
x: -1.3,
y: 0.6
}
]
leftBottom: [
{
x: -1.25,
y: 0.7
},
{
x: 0,
y: 0.7
},
{
x: 0,
y: 0.6
},
{
x: -1.2,
y: 0.6
}
]
leftTop: [
{
x: 0,
y: -0.7
},
{
x: -1.2,
y: -0.7
},
{
x: -1.2,
y: -0.6
},
{
x: 0,
y: -0.6
}
]
right: [
{
x: 1.3,
y: 0.6
},
{
x: 1.3,
y: -0.6
},
{
x: 1.25,
y: -0.5
},
{
x: 1.25,
y: 0.55
}
]
rightBottom: [
{
x: 0,
y: 0.7
},
{
x: 1.25,
y: 0.7
},
{
x: 1.2,
y: 0.6
},
{
x: 0.1,
y: 0.6
}
]
rightTop: [
{
x: 1.25,
y: -0.7
},
{
x: 0,
y: -0.7
},
{
x: 0.1,
y: -0.6
},
{
x: 1.2,
y: -0.6
}
]
holes:
centerBottom:
radius: 0.01
x: -0.2
y: 0.6
centerTop:
radius: 0.01
x: -0.2
y: 0.6
leftBottom:
radius: 0.01
x: -0.2
y: 0.6
leftTop:
radius: 0.01
x: -0.2
y: 0.6
rightBottom:
radius: 0.01
x: -0.2
y: 0.6
rightTop:
radius: 0.01
x: -0.2
y: 0.6
current_viewer:
name: ""
user_id: 1
player_1:
name: ""
user_id: 1
player_2:
name: ""
user_id: 2
table:
contact_materials:
ball_ball:
restitution: 0.9
stiffness: "INFINITY"
ball_border:
restitution: 0.95
stiffness: "INFINITY"
damping: 0.2
max_breakball_speed: 10
min_ball_speed: 0.01
scaling_factor: 400
describe 'Physics config', ->
it 'provides an mpx converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 300
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.mpx 2
expect(scaledValue).toEqual 600
it 'provides an mpxi converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 400
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.mpxi 0.5
expect(scaledValue).toEqual -200
it 'provides an pxm converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 250
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.pxm 125
expect(scaledValue).toEqual 0.5
it 'provides an pxmi converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 350
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.pxmi 700
expect(scaledValue).toEqual -2
it 'provides converted holes data', ->
cfg = createConfig()
cfg.table.scaling_factor = 200
cfg.holes.leftBottom =
x: -0.8
y: 0.6
radius: 0.01
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
leftBottomHole = gameConfig.holesData(width: 1000, height: 800).leftBottom
expect(leftBottomHole.radius).toEqual 2
expect(leftBottomHole.pos.x).toEqual 340
expect(leftBottomHole.pos.y).toEqual 520
it 'provides converted borders data', ->
cfg = createConfig()
cfg.table.scaling_factor = 300
cfg.borders.left = [
{
x: -1
y: -1
},
{
x: 1
y: 0.5
},
{
x: -0.5
y: 1
}
]
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
leftBorderPoints = gameConfig.borderData(width: 800, height: 600).left
expect(leftBorderPoints).toContain new Phaser.Point(100, 0)
expect(leftBorderPoints).toContain new Phaser.Point(700, 450)
expect(leftBorderPoints).toContain new Phaser.Point(250, 600)
it 'provides player configs', ->
cfg = createConfig()
cfg.player_1 =
user_id: 33
name: "Kunta Kinte"
cfg.player_2 =
user_id: 666
name: "NOT the devil"
gameConfig = new GameConfig cfg
playerData = gameConfig.getPlayerData()
expect(playerData.first.id).toEqual 33
expect(playerData.first.name).toEqual "Kunta Kinte"
expect(playerData.second.id).toEqual 666
expect(playerData.second.name).toEqual "NOT the devil"
it 'provides current viewer data', ->
cfg = createConfig()
cfg.current_viewer =
user_id: 42
name: "Mysterious Man"
gameConfig = new GameConfig cfg
currentViewerData = gameConfig.getCurrentViewerData()
expect(currentViewerData.id).toEqual 42
expect(currentViewerData.name).toEqual "Mysterious Man"
describe 'Table config', ->
it 'exposes table damping', ->
cfg = createConfig()
cfg.table.damping = 0.1
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getDamping()).toEqual 0.1
it 'exposes maximum breakball speed', ->
cfg = createConfig()
cfg.table.max_breakball_speed = 12
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getMaximumBreakballSpeed()).toEqual 12
it 'exposes ball/ball stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.stiffness = 0.8
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallStiffness()).toEqual 0.8
it 'exposes infinite ball/ball stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.stiffness = 'InfiNity'
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallStiffness()).toEqual +Infinity
it 'exposes ball/border stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.stiffness = 0.75
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderStiffness()).toEqual 0.75
it 'exposes infinite ball/border stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.stiffness = 'inFinitY'
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderStiffness()).toEqual +Infinity
it 'exposes ball/ball restitution', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.restitution = 0.75
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallRestitution()).toEqual 0.75
it 'exposes ball/border restitution', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.restitution = 0.875
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderRestitution()).toEqual 0.875
| 108083 | #= require game/config/Game
describe 'Game config', ->
GameConfig = global8ball.config.Game
createConfig = ->
borders:
left: [
{
x: -1.25
y: 0.55
},
{
x: -1.25,
y: -0.55
},
{
x: -1.3,
y: -0.6
},
{
x: -1.3,
y: 0.6
}
]
leftBottom: [
{
x: -1.25,
y: 0.7
},
{
x: 0,
y: 0.7
},
{
x: 0,
y: 0.6
},
{
x: -1.2,
y: 0.6
}
]
leftTop: [
{
x: 0,
y: -0.7
},
{
x: -1.2,
y: -0.7
},
{
x: -1.2,
y: -0.6
},
{
x: 0,
y: -0.6
}
]
right: [
{
x: 1.3,
y: 0.6
},
{
x: 1.3,
y: -0.6
},
{
x: 1.25,
y: -0.5
},
{
x: 1.25,
y: 0.55
}
]
rightBottom: [
{
x: 0,
y: 0.7
},
{
x: 1.25,
y: 0.7
},
{
x: 1.2,
y: 0.6
},
{
x: 0.1,
y: 0.6
}
]
rightTop: [
{
x: 1.25,
y: -0.7
},
{
x: 0,
y: -0.7
},
{
x: 0.1,
y: -0.6
},
{
x: 1.2,
y: -0.6
}
]
holes:
centerBottom:
radius: 0.01
x: -0.2
y: 0.6
centerTop:
radius: 0.01
x: -0.2
y: 0.6
leftBottom:
radius: 0.01
x: -0.2
y: 0.6
leftTop:
radius: 0.01
x: -0.2
y: 0.6
rightBottom:
radius: 0.01
x: -0.2
y: 0.6
rightTop:
radius: 0.01
x: -0.2
y: 0.6
current_viewer:
name: ""
user_id: 1
player_1:
name: ""
user_id: 1
player_2:
name: ""
user_id: 2
table:
contact_materials:
ball_ball:
restitution: 0.9
stiffness: "INFINITY"
ball_border:
restitution: 0.95
stiffness: "INFINITY"
damping: 0.2
max_breakball_speed: 10
min_ball_speed: 0.01
scaling_factor: 400
describe 'Physics config', ->
it 'provides an mpx converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 300
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.mpx 2
expect(scaledValue).toEqual 600
it 'provides an mpxi converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 400
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.mpxi 0.5
expect(scaledValue).toEqual -200
it 'provides an pxm converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 250
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.pxm 125
expect(scaledValue).toEqual 0.5
it 'provides an pxmi converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 350
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.pxmi 700
expect(scaledValue).toEqual -2
it 'provides converted holes data', ->
cfg = createConfig()
cfg.table.scaling_factor = 200
cfg.holes.leftBottom =
x: -0.8
y: 0.6
radius: 0.01
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
leftBottomHole = gameConfig.holesData(width: 1000, height: 800).leftBottom
expect(leftBottomHole.radius).toEqual 2
expect(leftBottomHole.pos.x).toEqual 340
expect(leftBottomHole.pos.y).toEqual 520
it 'provides converted borders data', ->
cfg = createConfig()
cfg.table.scaling_factor = 300
cfg.borders.left = [
{
x: -1
y: -1
},
{
x: 1
y: 0.5
},
{
x: -0.5
y: 1
}
]
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
leftBorderPoints = gameConfig.borderData(width: 800, height: 600).left
expect(leftBorderPoints).toContain new Phaser.Point(100, 0)
expect(leftBorderPoints).toContain new Phaser.Point(700, 450)
expect(leftBorderPoints).toContain new Phaser.Point(250, 600)
it 'provides player configs', ->
cfg = createConfig()
cfg.player_1 =
user_id: 33
name: "<NAME>"
cfg.player_2 =
user_id: 666
name: "<NAME>"
gameConfig = new GameConfig cfg
playerData = gameConfig.getPlayerData()
expect(playerData.first.id).toEqual 33
expect(playerData.first.name).toEqual "<NAME>"
expect(playerData.second.id).toEqual 666
expect(playerData.second.name).toEqual "<NAME>"
it 'provides current viewer data', ->
cfg = createConfig()
cfg.current_viewer =
user_id: 42
name: "<NAME>"
gameConfig = new GameConfig cfg
currentViewerData = gameConfig.getCurrentViewerData()
expect(currentViewerData.id).toEqual 42
expect(currentViewerData.name).toEqual "<NAME>"
describe 'Table config', ->
it 'exposes table damping', ->
cfg = createConfig()
cfg.table.damping = 0.1
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getDamping()).toEqual 0.1
it 'exposes maximum breakball speed', ->
cfg = createConfig()
cfg.table.max_breakball_speed = 12
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getMaximumBreakballSpeed()).toEqual 12
it 'exposes ball/ball stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.stiffness = 0.8
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallStiffness()).toEqual 0.8
it 'exposes infinite ball/ball stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.stiffness = 'InfiNity'
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallStiffness()).toEqual +Infinity
it 'exposes ball/border stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.stiffness = 0.75
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderStiffness()).toEqual 0.75
it 'exposes infinite ball/border stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.stiffness = 'inFinitY'
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderStiffness()).toEqual +Infinity
it 'exposes ball/ball restitution', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.restitution = 0.75
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallRestitution()).toEqual 0.75
it 'exposes ball/border restitution', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.restitution = 0.875
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderRestitution()).toEqual 0.875
| true | #= require game/config/Game
describe 'Game config', ->
GameConfig = global8ball.config.Game
createConfig = ->
borders:
left: [
{
x: -1.25
y: 0.55
},
{
x: -1.25,
y: -0.55
},
{
x: -1.3,
y: -0.6
},
{
x: -1.3,
y: 0.6
}
]
leftBottom: [
{
x: -1.25,
y: 0.7
},
{
x: 0,
y: 0.7
},
{
x: 0,
y: 0.6
},
{
x: -1.2,
y: 0.6
}
]
leftTop: [
{
x: 0,
y: -0.7
},
{
x: -1.2,
y: -0.7
},
{
x: -1.2,
y: -0.6
},
{
x: 0,
y: -0.6
}
]
right: [
{
x: 1.3,
y: 0.6
},
{
x: 1.3,
y: -0.6
},
{
x: 1.25,
y: -0.5
},
{
x: 1.25,
y: 0.55
}
]
rightBottom: [
{
x: 0,
y: 0.7
},
{
x: 1.25,
y: 0.7
},
{
x: 1.2,
y: 0.6
},
{
x: 0.1,
y: 0.6
}
]
rightTop: [
{
x: 1.25,
y: -0.7
},
{
x: 0,
y: -0.7
},
{
x: 0.1,
y: -0.6
},
{
x: 1.2,
y: -0.6
}
]
holes:
centerBottom:
radius: 0.01
x: -0.2
y: 0.6
centerTop:
radius: 0.01
x: -0.2
y: 0.6
leftBottom:
radius: 0.01
x: -0.2
y: 0.6
leftTop:
radius: 0.01
x: -0.2
y: 0.6
rightBottom:
radius: 0.01
x: -0.2
y: 0.6
rightTop:
radius: 0.01
x: -0.2
y: 0.6
current_viewer:
name: ""
user_id: 1
player_1:
name: ""
user_id: 1
player_2:
name: ""
user_id: 2
table:
contact_materials:
ball_ball:
restitution: 0.9
stiffness: "INFINITY"
ball_border:
restitution: 0.95
stiffness: "INFINITY"
damping: 0.2
max_breakball_speed: 10
min_ball_speed: 0.01
scaling_factor: 400
describe 'Physics config', ->
it 'provides an mpx converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 300
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.mpx 2
expect(scaledValue).toEqual 600
it 'provides an mpxi converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 400
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.mpxi 0.5
expect(scaledValue).toEqual -200
it 'provides an pxm converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 250
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.pxm 125
expect(scaledValue).toEqual 0.5
it 'provides an pxmi converter', ->
cfg = createConfig()
cfg.table.scaling_factor = 350
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
scaledValue = physicsConfig.pxmi 700
expect(scaledValue).toEqual -2
it 'provides converted holes data', ->
cfg = createConfig()
cfg.table.scaling_factor = 200
cfg.holes.leftBottom =
x: -0.8
y: 0.6
radius: 0.01
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
leftBottomHole = gameConfig.holesData(width: 1000, height: 800).leftBottom
expect(leftBottomHole.radius).toEqual 2
expect(leftBottomHole.pos.x).toEqual 340
expect(leftBottomHole.pos.y).toEqual 520
it 'provides converted borders data', ->
cfg = createConfig()
cfg.table.scaling_factor = 300
cfg.borders.left = [
{
x: -1
y: -1
},
{
x: 1
y: 0.5
},
{
x: -0.5
y: 1
}
]
gameConfig = new GameConfig cfg
physicsConfig = gameConfig.getPhysicsConfig()
leftBorderPoints = gameConfig.borderData(width: 800, height: 600).left
expect(leftBorderPoints).toContain new Phaser.Point(100, 0)
expect(leftBorderPoints).toContain new Phaser.Point(700, 450)
expect(leftBorderPoints).toContain new Phaser.Point(250, 600)
it 'provides player configs', ->
cfg = createConfig()
cfg.player_1 =
user_id: 33
name: "PI:NAME:<NAME>END_PI"
cfg.player_2 =
user_id: 666
name: "PI:NAME:<NAME>END_PI"
gameConfig = new GameConfig cfg
playerData = gameConfig.getPlayerData()
expect(playerData.first.id).toEqual 33
expect(playerData.first.name).toEqual "PI:NAME:<NAME>END_PI"
expect(playerData.second.id).toEqual 666
expect(playerData.second.name).toEqual "PI:NAME:<NAME>END_PI"
it 'provides current viewer data', ->
cfg = createConfig()
cfg.current_viewer =
user_id: 42
name: "PI:NAME:<NAME>END_PI"
gameConfig = new GameConfig cfg
currentViewerData = gameConfig.getCurrentViewerData()
expect(currentViewerData.id).toEqual 42
expect(currentViewerData.name).toEqual "PI:NAME:<NAME>END_PI"
describe 'Table config', ->
it 'exposes table damping', ->
cfg = createConfig()
cfg.table.damping = 0.1
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getDamping()).toEqual 0.1
it 'exposes maximum breakball speed', ->
cfg = createConfig()
cfg.table.max_breakball_speed = 12
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getMaximumBreakballSpeed()).toEqual 12
it 'exposes ball/ball stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.stiffness = 0.8
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallStiffness()).toEqual 0.8
it 'exposes infinite ball/ball stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.stiffness = 'InfiNity'
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallStiffness()).toEqual +Infinity
it 'exposes ball/border stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.stiffness = 0.75
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderStiffness()).toEqual 0.75
it 'exposes infinite ball/border stiffness', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.stiffness = 'inFinitY'
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderStiffness()).toEqual +Infinity
it 'exposes ball/ball restitution', ->
cfg = createConfig()
cfg.table.contact_materials.ball_ball.restitution = 0.75
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBallRestitution()).toEqual 0.75
it 'exposes ball/border restitution', ->
cfg = createConfig()
cfg.table.contact_materials.ball_border.restitution = 0.875
gameConfig = new GameConfig cfg
expect(gameConfig.getTable().getBallBorderRestitution()).toEqual 0.875
|
[
{
"context": "p\">\n <input\n placeholder='Your name'\n className=\"form-control\"\n ",
"end": 1205,
"score": 0.722706139087677,
"start": 1201,
"tag": "NAME",
"value": "name"
}
] | source/javascripts/components/post_form.js.jsx.coffee | garciaf/local_blog | 0 | @PostForm = React.createClass(
mixins: [Backbone.React.Component.mixin]
getInitialState: ->
{
title: ''
author: ''
text: ''
}
create: (e) ->
e.preventDefault()
author = @refs.author.value.trim()
text = @refs.text.value.trim()
title = @refs.title.value.trim()
new_post = new MyApp.Models.Post
new_post.set
author: author
text: text
title: title
if new_post.isValid()
@getCollection().add new_post
@refs.text.value = ''
@refs.title.value = ''
render: ->
`<form className="form-horizontal" onSubmit={this.create}>
<div className="form-group">
<input
placeholder='Title'
className="form-control"
type="text"
defaultValue={this.state.title}
ref="title"
/>
</div>
<div className="form-group">
<textarea
className="form-control"
rows="4" cols="50"
placeholder='Say something'
defaultValue={this.state.text}
ref="text"
/>
</div>
<div className="form-group">
<input
placeholder='Your name'
className="form-control"
type="text"
defaultValue={this.state.author}
ref="author"
/>
</div>
<div className="form-group">
<input type="submit" className="btn btn-default" value="Post" />
</div>
</form>`
)
MyApp.Views.PostForm = React.createFactory(@PostForm) | 174889 | @PostForm = React.createClass(
mixins: [Backbone.React.Component.mixin]
getInitialState: ->
{
title: ''
author: ''
text: ''
}
create: (e) ->
e.preventDefault()
author = @refs.author.value.trim()
text = @refs.text.value.trim()
title = @refs.title.value.trim()
new_post = new MyApp.Models.Post
new_post.set
author: author
text: text
title: title
if new_post.isValid()
@getCollection().add new_post
@refs.text.value = ''
@refs.title.value = ''
render: ->
`<form className="form-horizontal" onSubmit={this.create}>
<div className="form-group">
<input
placeholder='Title'
className="form-control"
type="text"
defaultValue={this.state.title}
ref="title"
/>
</div>
<div className="form-group">
<textarea
className="form-control"
rows="4" cols="50"
placeholder='Say something'
defaultValue={this.state.text}
ref="text"
/>
</div>
<div className="form-group">
<input
placeholder='Your <NAME>'
className="form-control"
type="text"
defaultValue={this.state.author}
ref="author"
/>
</div>
<div className="form-group">
<input type="submit" className="btn btn-default" value="Post" />
</div>
</form>`
)
MyApp.Views.PostForm = React.createFactory(@PostForm) | true | @PostForm = React.createClass(
mixins: [Backbone.React.Component.mixin]
getInitialState: ->
{
title: ''
author: ''
text: ''
}
create: (e) ->
e.preventDefault()
author = @refs.author.value.trim()
text = @refs.text.value.trim()
title = @refs.title.value.trim()
new_post = new MyApp.Models.Post
new_post.set
author: author
text: text
title: title
if new_post.isValid()
@getCollection().add new_post
@refs.text.value = ''
@refs.title.value = ''
render: ->
`<form className="form-horizontal" onSubmit={this.create}>
<div className="form-group">
<input
placeholder='Title'
className="form-control"
type="text"
defaultValue={this.state.title}
ref="title"
/>
</div>
<div className="form-group">
<textarea
className="form-control"
rows="4" cols="50"
placeholder='Say something'
defaultValue={this.state.text}
ref="text"
/>
</div>
<div className="form-group">
<input
placeholder='Your PI:NAME:<NAME>END_PI'
className="form-control"
type="text"
defaultValue={this.state.author}
ref="author"
/>
</div>
<div className="form-group">
<input type="submit" className="btn btn-default" value="Post" />
</div>
</form>`
)
MyApp.Views.PostForm = React.createFactory(@PostForm) |
[
{
"context": " sellStack: [\n {\n id: \"OLAUID\",\n type: \"sell\",\n p",
"end": 242,
"score": 0.7651041746139526,
"start": 236,
"tag": "USERNAME",
"value": "OLAUID"
},
{
"context": " buyStack: [\n {\n ... | testing/samplehandlers.coffee | JhonnyJason/interface-gen | 1 | ############################################################
scihandlers.getLatestOrders = (authCode, assetPairs, subscriber) ->
result = {}
###
{
"ether-euro": {
sellStack: [
{
id: "OLAUID",
type: "sell",
price: 199.0,
volume: 0.3
},
//...
],
buyStack: [
{
id: "SIAUID",
type: "buy",
price: 195.0,
volume: 0.3
},
//...
],
cancelledStack: [
{
id: "AAAAA",
time: 1671356172123,
type: "buy",
price: 191.0,
volume: 0.3
},
//...
],
filledStack: [
{
id: "XXXX",
time: 1671356172356,
type: "buy",
price: 197.0,
volume: 0.3
},
//...
]
}
}
###
return result
############################################################
scihandlers.getLatestTickers = (authCode, assetPairs, subscriber) ->
result = {}
###
{
"ether-euro": {
askPrice: 197,
bidPrice: 198,
closingPrice: 197.5,
timestamp: 132456789
}
}
###
return result
############################################################
scihandlers.getLatestBalances = (authCode, assets, subscriber) ->
result = {}
###
{
"euro": 999.90,
"ether": 12.23,
"usdt": 662.27
}
###
return result
############################################################
scihandlers.addRelevantAsset = (authCode, exchangeName, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.removeRelevantAsset = (authCode, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.addRelevantAssetPair = (authCode, exchangeName, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.removeRelevantAssetPair = (authCode, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.getRelevantAssets = (authCode) ->
result = {}
###
{
"relevantAssets": [
"bancor": {
"exchangeName":"BNT",
"ourName": "bancor"
},
"usdt": {
"exchangeName":"USDT",
"ourName": "usdt"
}
]
}
###
return result
############################################################
scihandlers.getRelevantAssetPairs = (authCode) ->
result = {}
###
{
"relevantAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDT",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getFailingIdentifiers = (authCode) ->
result = {}
###
{
"failingIdentifiers": [
"bancor": {
"exchangeName":"SBNT",
"ourName": "bancor"
},
"usdt": {
"exchangeName":"USDK",
"ourName": "usdt"
},
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getServiceStatus = (authCode) ->
result = {}
###
{
"failingAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getNodeId = (authCode) ->
result = {}
###
{
"failingAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
| 161150 | ############################################################
scihandlers.getLatestOrders = (authCode, assetPairs, subscriber) ->
result = {}
###
{
"ether-euro": {
sellStack: [
{
id: "OLAUID",
type: "sell",
price: 199.0,
volume: 0.3
},
//...
],
buyStack: [
{
id: "SIAUID",
type: "buy",
price: 195.0,
volume: 0.3
},
//...
],
cancelledStack: [
{
id: "AAAAA",
time: 1671356172123,
type: "buy",
price: 191.0,
volume: 0.3
},
//...
],
filledStack: [
{
id: "<NAME>",
time: 1671356172356,
type: "buy",
price: 197.0,
volume: 0.3
},
//...
]
}
}
###
return result
############################################################
scihandlers.getLatestTickers = (authCode, assetPairs, subscriber) ->
result = {}
###
{
"ether-euro": {
askPrice: 197,
bidPrice: 198,
closingPrice: 197.5,
timestamp: 132456789
}
}
###
return result
############################################################
scihandlers.getLatestBalances = (authCode, assets, subscriber) ->
result = {}
###
{
"euro": 999.90,
"ether": 12.23,
"usdt": 662.27
}
###
return result
############################################################
scihandlers.addRelevantAsset = (authCode, exchangeName, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.removeRelevantAsset = (authCode, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.addRelevantAssetPair = (authCode, exchangeName, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.removeRelevantAssetPair = (authCode, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.getRelevantAssets = (authCode) ->
result = {}
###
{
"relevantAssets": [
"bancor": {
"exchangeName":"BNT",
"ourName": "bancor"
},
"usdt": {
"exchangeName":"USDT",
"ourName": "usdt"
}
]
}
###
return result
############################################################
scihandlers.getRelevantAssetPairs = (authCode) ->
result = {}
###
{
"relevantAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDT",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getFailingIdentifiers = (authCode) ->
result = {}
###
{
"failingIdentifiers": [
"bancor": {
"exchangeName":"SBNT",
"ourName": "bancor"
},
"usdt": {
"exchangeName":"USDK",
"ourName": "usdt"
},
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getServiceStatus = (authCode) ->
result = {}
###
{
"failingAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getNodeId = (authCode) ->
result = {}
###
{
"failingAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
| true | ############################################################
scihandlers.getLatestOrders = (authCode, assetPairs, subscriber) ->
result = {}
###
{
"ether-euro": {
sellStack: [
{
id: "OLAUID",
type: "sell",
price: 199.0,
volume: 0.3
},
//...
],
buyStack: [
{
id: "SIAUID",
type: "buy",
price: 195.0,
volume: 0.3
},
//...
],
cancelledStack: [
{
id: "AAAAA",
time: 1671356172123,
type: "buy",
price: 191.0,
volume: 0.3
},
//...
],
filledStack: [
{
id: "PI:NAME:<NAME>END_PI",
time: 1671356172356,
type: "buy",
price: 197.0,
volume: 0.3
},
//...
]
}
}
###
return result
############################################################
scihandlers.getLatestTickers = (authCode, assetPairs, subscriber) ->
result = {}
###
{
"ether-euro": {
askPrice: 197,
bidPrice: 198,
closingPrice: 197.5,
timestamp: 132456789
}
}
###
return result
############################################################
scihandlers.getLatestBalances = (authCode, assets, subscriber) ->
result = {}
###
{
"euro": 999.90,
"ether": 12.23,
"usdt": 662.27
}
###
return result
############################################################
scihandlers.addRelevantAsset = (authCode, exchangeName, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.removeRelevantAsset = (authCode, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.addRelevantAssetPair = (authCode, exchangeName, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.removeRelevantAssetPair = (authCode, ourName) ->
result = {}
###
{
"ok": true
}
###
return result
############################################################
scihandlers.getRelevantAssets = (authCode) ->
result = {}
###
{
"relevantAssets": [
"bancor": {
"exchangeName":"BNT",
"ourName": "bancor"
},
"usdt": {
"exchangeName":"USDT",
"ourName": "usdt"
}
]
}
###
return result
############################################################
scihandlers.getRelevantAssetPairs = (authCode) ->
result = {}
###
{
"relevantAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDT",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getFailingIdentifiers = (authCode) ->
result = {}
###
{
"failingIdentifiers": [
"bancor": {
"exchangeName":"SBNT",
"ourName": "bancor"
},
"usdt": {
"exchangeName":"USDK",
"ourName": "usdt"
},
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getServiceStatus = (authCode) ->
result = {}
###
{
"failingAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
############################################################
scihandlers.getNodeId = (authCode) ->
result = {}
###
{
"failingAssetPairs": [
"bancor-usdt": {
"exchangeName":"BNTUSDK",
"ourName": "bancor-usdt"
}
]
}
###
return result
|
[
{
"context": "###\n# Gruntfile.js\n#\n# © 2014 Dan Nichols\n# See LICENSE for more details\n#\n# Do our grunt w",
"end": 41,
"score": 0.9996365308761597,
"start": 30,
"tag": "NAME",
"value": "Dan Nichols"
}
] | Gruntfile.coffee | dlnichols/h_media | 0 | ###
# Gruntfile.js
#
# © 2014 Dan Nichols
# See LICENSE for more details
#
# Do our grunt work!
###
'use strict'
# Require coffee-script/register so that require uses the project version of
# coffeescript (~1.7) instead of the grunt version (~1.3)
require 'coffee-script/register'
module.exports = (grunt) ->
_ = require 'lodash'
# Time how long tasks take to analyze / optimize
require('time-grunt') grunt
# Load NPM tasks
require('jit-grunt') grunt,
express: 'grunt-express-server'
useminPrepare: 'grunt-usemin'
db: 'grunt/database.coffee'
# Load Grunt config
# TODO: Fork load-grunt-config and make it JIT for configs?
gruntConfig = _.extend(
require('load-grunt-config') grunt,
loadGruntTasks: false
require './grunt/custom/min'
)
# Define the configuration for all the tasks
grunt.initConfig gruntConfig
# Used for delaying livereload until after server has restarted
grunt.registerTask 'wait', ->
grunt.log.ok 'Waiting for server reload...'
done = @async()
setTimeout (->
grunt.log.ok 'Done waiting!'
done()
return
), 1000
return
grunt.registerTask 'serve', [
#'concurrent:serve'
'express:dev'
'wait'
'open:serve'
'watch'
]
grunt.registerTask 'assets', 'Build assets.', [
'concurrent:serve'
]
grunt.registerTask 'build', 'Prepare for deployment.', [
'clean:build'
'useminPrepare' # prep concat/*min blocks
'concurrent:build' # compass/less/coffee+image/svg/htmlmin
'concat' # based on usemin block in html
'ngmin'
'cssmin'
'uglify'
'copy'
'rev'
'usemin'
]
grunt.registerTask 'test', 'Run tests.', [
'mochaTest'
]
grunt.registerTask 'default', 'Does nothing.', [
]
return
| 9973 | ###
# Gruntfile.js
#
# © 2014 <NAME>
# See LICENSE for more details
#
# Do our grunt work!
###
'use strict'
# Require coffee-script/register so that require uses the project version of
# coffeescript (~1.7) instead of the grunt version (~1.3)
require 'coffee-script/register'
module.exports = (grunt) ->
_ = require 'lodash'
# Time how long tasks take to analyze / optimize
require('time-grunt') grunt
# Load NPM tasks
require('jit-grunt') grunt,
express: 'grunt-express-server'
useminPrepare: 'grunt-usemin'
db: 'grunt/database.coffee'
# Load Grunt config
# TODO: Fork load-grunt-config and make it JIT for configs?
gruntConfig = _.extend(
require('load-grunt-config') grunt,
loadGruntTasks: false
require './grunt/custom/min'
)
# Define the configuration for all the tasks
grunt.initConfig gruntConfig
# Used for delaying livereload until after server has restarted
grunt.registerTask 'wait', ->
grunt.log.ok 'Waiting for server reload...'
done = @async()
setTimeout (->
grunt.log.ok 'Done waiting!'
done()
return
), 1000
return
grunt.registerTask 'serve', [
#'concurrent:serve'
'express:dev'
'wait'
'open:serve'
'watch'
]
grunt.registerTask 'assets', 'Build assets.', [
'concurrent:serve'
]
grunt.registerTask 'build', 'Prepare for deployment.', [
'clean:build'
'useminPrepare' # prep concat/*min blocks
'concurrent:build' # compass/less/coffee+image/svg/htmlmin
'concat' # based on usemin block in html
'ngmin'
'cssmin'
'uglify'
'copy'
'rev'
'usemin'
]
grunt.registerTask 'test', 'Run tests.', [
'mochaTest'
]
grunt.registerTask 'default', 'Does nothing.', [
]
return
| true | ###
# Gruntfile.js
#
# © 2014 PI:NAME:<NAME>END_PI
# See LICENSE for more details
#
# Do our grunt work!
###
'use strict'
# Require coffee-script/register so that require uses the project version of
# coffeescript (~1.7) instead of the grunt version (~1.3)
require 'coffee-script/register'
module.exports = (grunt) ->
_ = require 'lodash'
# Time how long tasks take to analyze / optimize
require('time-grunt') grunt
# Load NPM tasks
require('jit-grunt') grunt,
express: 'grunt-express-server'
useminPrepare: 'grunt-usemin'
db: 'grunt/database.coffee'
# Load Grunt config
# TODO: Fork load-grunt-config and make it JIT for configs?
gruntConfig = _.extend(
require('load-grunt-config') grunt,
loadGruntTasks: false
require './grunt/custom/min'
)
# Define the configuration for all the tasks
grunt.initConfig gruntConfig
# Used for delaying livereload until after server has restarted
grunt.registerTask 'wait', ->
grunt.log.ok 'Waiting for server reload...'
done = @async()
setTimeout (->
grunt.log.ok 'Done waiting!'
done()
return
), 1000
return
grunt.registerTask 'serve', [
#'concurrent:serve'
'express:dev'
'wait'
'open:serve'
'watch'
]
grunt.registerTask 'assets', 'Build assets.', [
'concurrent:serve'
]
grunt.registerTask 'build', 'Prepare for deployment.', [
'clean:build'
'useminPrepare' # prep concat/*min blocks
'concurrent:build' # compass/less/coffee+image/svg/htmlmin
'concat' # based on usemin block in html
'ngmin'
'cssmin'
'uglify'
'copy'
'rev'
'usemin'
]
grunt.registerTask 'test', 'Run tests.', [
'mochaTest'
]
grunt.registerTask 'default', 'Does nothing.', [
]
return
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.