CombinedText stringlengths 4 3.42M |
|---|
#!/usr/bin/env ruby
# coding: utf-8
REPO_PATH = ARGV.first || '/usr/src/ruby' # path to ruby repo
SECTION_NAME, G_PREFIX = case RUBY_PLATFORM
when /linux/i; ["rodata", ""]
when /darwin/i; ["const_data", "g"]
else ;["rdata", ""]
end
UNICODE_VERSION = "10.0.0"
SRC_DIR = "../src/org/jcodings"
DST_BIN_DIR = "../resources/tables"
INDENT = " " * 4
def generate_data
generate_encoding_list
generate_transcoder_list
generate_transoder_data
generate_coderange_data
generate_coderange_list
generate_fold_data
end
def process_binary obj_name
binary = open(obj_name, "rb"){|f|f.read}
offset = `#{G_PREFIX}objdump -h -j .#{SECTION_NAME} #{obj_name}`[/\.#{SECTION_NAME}.*?(\w+)\s+\S+$/, 1].to_i(16)
`#{G_PREFIX}nm --no-sort --defined-only #{obj_name}`.split("\n").map{|s|s.split(/\s+/)}.each do |address, _, name|
yield name, binary, address.to_i(16) + offset
end
end
def generate_encoding_list
enc_map = {
"ASCII-8BIT" => "ASCII",
"UTF-8" => "UTF8",
"US-ASCII" => "USASCII",
"Big5" => "BIG5",
"Big5-HKSCS" => "Big5HKSCS",
"Big5-UAO" => "Big5UAO",
"CP949" => "CP949",
"Emacs-Mule" => "EmacsMule",
"EUC-JP" => "EUCJP",
"EUC-KR" => "EUCKR",
"EUC-TW" => "EUCTW",
"GB2312" => "GB2312",
"GB18030" => "GB18030",
"GBK" => "GBK",
"ISO-8859-1" => "ISO8859_1",
"ISO-8859-2" => "ISO8859_2",
"ISO-8859-3" => "ISO8859_3",
"ISO-8859-4" => "ISO8859_4",
"ISO-8859-5" => "ISO8859_5",
"ISO-8859-6" => "ISO8859_6",
"ISO-8859-7" => "ISO8859_7",
"ISO-8859-8" => "ISO8859_8",
"ISO-8859-9" => "ISO8859_9",
"ISO-8859-10" => "ISO8859_10",
"ISO-8859-11" => "ISO8859_11",
"ISO-8859-13" => "ISO8859_13",
"ISO-8859-14" => "ISO8859_14",
"ISO-8859-15" => "ISO8859_15",
"ISO-8859-16" => "ISO8859_16",
"KOI8-R" => "KOI8R",
"KOI8-U" => "KOI8U",
"Shift_JIS" => "SJIS",
"UTF-16BE" => "UTF16BE",
"UTF-16LE" => "UTF16LE",
"UTF-32BE" => "UTF32BE",
"UTF-32LE" => "UTF32LE",
"Windows-31J" => "Windows_31J", # TODO: Windows-31J is actually a variant of SJIS
"Windows-1250" => "Windows_1250",
"Windows-1251" => "Windows_1251",
"Windows-1252" => "Windows_1252",
"Windows-1253" => "Windows_1253",
"Windows-1254" => "Windows_1254",
"Windows-1257" => "Windows_1257"
}
defines, other = open("#{REPO_PATH}/encdb.h").read.tr('()', '').scan(/ENC_([A-Z_]+)(.*?);/m).partition { |a, b| a =~ /DEFINE/ }
other << ["ALIAS", "\"MS932\", \"Windows-31J\""]
open("#{SRC_DIR}/EncodingList.java", "wb") { |f| f << open("EncodingListTemplate.java", "rb").read.
sub(/%\{defines\}/, defines.map { |cmd, name| "#{INDENT*2}EncodingDB.declare(#{name}, \"#{enc_map[name[/[^"]+/]] || (raise 'class not found for encoding ' + name)}\");" }.join("\n")).
sub(/%\{other\}/, other.map { |cmd, from, to| "#{INDENT*2}EncodingDB.#{cmd.downcase}(#{from}#{to.nil? ? "" : to});" }.join("\n")).
sub(/%\{switch\}/, defines.map { |cmd, name| "#{INDENT*3}case \"#{enc_map[name[/[^"]+/]]}\": return #{enc_map[name[/[^"]+/]]}Encoding.INSTANCE;"}.join("\n"))}
end
def generate_transcoder_list
generic_list = []
transcoder_list = []
Dir["#{REPO_PATH}/enc/trans/*.c"].reject{|f| f =~ /transdb/}.sort.each do |trans_file|
name = trans_file[/(\w+)\.c/, 1].split('_').map{|e| e.capitalize}.join("")
trans_src = open(trans_file){|f|f.read}
trans_src.scan(/static\s+const\s+rb_transcoder.*?(\w+)\s+=\s+\{(.+?)\};/m) do |t_name, body|
n = t_name.split('_')
t_name = n[1].capitalize
t_name += '_' + n[2..-1].join('_') unless n[2..-1].empty?
body = body.gsub(/(\/\*.*?\*\/)/, "").split(',').map{|e|e.strip}
src, dst, tree_start, table_info, iul, max_in, max_out, conv, state_size, state_init, state_fini, *funcs = body
tree_start = trans_src[/#define\s+#{tree_start}\s+WORDINDEX2INFO\((\d+)\)/, 1].to_i << 2
state_size = "0" if state_size == "sizeof(struct from_utf8_mac_status)"
generic = funcs.all?{|f|f == "NULL"}
generic_list << [src, dst, tree_start, "\"#{name}\"", iul, max_in, max_out, "AsciiCompatibility.#{conv.split('_').last.upcase}", state_size] if generic
transcoder_list << [src, dst, t_name, !generic]
end
end
open("#{SRC_DIR}/transcode/TranscoderList.java", "wb") << open("TranscoderListTemplate.java", "rb"){|f|f.read}.
sub(/%\{list\}/, transcoder_list.map{|src, dst, cls, specific| "#{INDENT*2}TranscoderDB.declare(#{src}, #{dst}, #{specific ? '"' + cls + '"' : 'null /*' + cls + '*/'});"}.join("\n")).
sub(/%\{generic\}/, generic_list.map{|g| "#{INDENT*2}new GenericTranscoderEntry(#{g.join(', ')})"}.join(",\n")).
sub(/%\{switch\}/, transcoder_list.map{|src, dst, cls, specific| "#{INDENT*3}case \"#{cls}\": return #{cls}_Transcoder.INSTANCE;" if specific}.compact.join("\n"))
end
def generate_transoder_data
Dir["#{REPO_PATH}/enc/trans/*.c"].reject{|f| f =~ /transdb/}.sort.each do |trans_file|
# next unless trans_file =~ /utf8/
trans_file = trans_file[/(.*)\./, 1]
src = open("#{trans_file}.c", "rb").read
make_name = -> (name) {name.split('_').map{|e|e.capitalize}.join('')}
process_binary "#{trans_file}.o" do |name, binary, address|
case name
when /(.*)_byte_array/
name = $1
size = src[/(\w+?_byte_array)\[(\d+?)\]/m, 2].to_i
open("#{DST_BIN_DIR}/" + "Transcoder_#{make_name.(name)}_ByteArray.bin", "wb") do |f|
f << [size].pack("N")
f << binary[address, size]
end
when /(.*)_word_array/
name = $1
size = src[/(\w+?_word_array)\[(\d+?)\]/m, 2].to_i
open("#{DST_BIN_DIR}/" + "Transcoder_#{make_name.(name)}_WordArray.bin", "wb") do |f|
f << [size].pack("N")
address.step(address + (size * 4 - 1), 4).each do |adr|
f << binary[adr, 4].unpack("l").pack("N")
end
end
end
end
end
end
def generate_coderange_data
process_binary "#{REPO_PATH}/enc/unicode.o" do |name, binary, address|
case name
when /CR_(.*)/
size = binary[address, 4].unpack("l")
address += 4
open("#{DST_BIN_DIR}/#{name}.bin", "wb") do |f|
f << [size[0] * 2 + 1].pack("N")
f << size.pack("N")
address.step(address + (size[0] * 2 * 4 - 1), 4).each do |adr|
f << binary[adr, 4].unpack("l").pack("N")
end
end
end
end
end
def generate_coderange_list
name2ctype_h = "#{REPO_PATH}/enc/unicode/#{UNICODE_VERSION}/name2ctype.h"
cr_map = open("#{name2ctype_h}", "rb"){|f|f.read}.scan(/#define CR_(.*?) CR_(.*)/).inject({}){|h, (k, v)|h[v] = k.tr('_', '').downcase; h}
unicode_src = `cpp #{name2ctype_h} -DUSE_UNICODE_PROPERTIES -DUSE_UNICODE_AGE_PROPERTIES | grep "^[^#;]"`
gperf_map = Hash[unicode_src[/struct\s+uniname2ctype_pool_t\s+\{(.*?)\}\;/m, 1].scan(/uniname2ctype_pool_str(\d+).*\"(\S+)\"/)]
aliases = unicode_src[/wordlist\[\]\s+=\s+\{(.*?)\}\;/m, 1].scan(/uniname2ctype_pool_str(\d+).*?(\d+)/).
inject(Hash.new{|h, k|h[k] = []}){|h, (name, num)|h[num] << gperf_map[name]; h}.inject({}) do |h, (k, v)|
h.merge! Hash[v.map{|val|[val, v - [val]]}]
h
end
ranges = unicode_src[/CodeRanges\[\]\s+=\s+\{(.*?)\}\;/m, 1].scan(/CR_(\w+)/).flatten
standard_char_type_range = 16
out = ranges.take(standard_char_type_range).map{|range|[range.tr('_', '').downcase, range]} +
ranges.drop(standard_char_type_range).map do |range|
name = range =~ /Age_(\d+)_(\d+)/ ? "age=#{$1}.#{$2}" : range.tr('_', '').downcase
name = cr_map.delete(range) || name
name = "#{$1}=#{$2}" if name =~ /(graphemeclusterbreak)(.*)/i
([name] + aliases[name].to_a).map{|n|[n, range]}
end.flatten(1)
max_length = out.max_by{|name, table|name.length}.first.length.to_s
open("#{SRC_DIR}/unicode/UnicodeProperties.java", "wb") do |f| f <<
open("UnicodePropertiesTemplate.java", "rb").read.sub(/%\{max_length\}/, max_length).sub(/%\{extcrs\}/, out.map{|name, table| "#{INDENT * 2}" + "new CodeRangeEntry(\"#{name}\", \"CR_#{table}\")"}.join(",\n"))
end
end
def generate_fold_data
src = open("#{REPO_PATH}/enc/unicode/#{UNICODE_VERSION}/casefold.h"){|f|f.read}
offsets = src.scan(/#define (Case\S+).*?\[(\w+)\].*?\+(\d+)/).inject({}){|h, (k, *v)| h[k] = v.map(&:to_i);h}
extract = -> (f, binary, address, from, range, from_w, to_w) do
f << [0].pack("N")
width = from_w + to_w
size = 0
start = address + from * width * 4
start.step(start + (range * width * 4 - 1), width * 4) do |adr|
f << binary[adr, from_w * 4].unpack("l*").pack("N*")
packed = binary[adr + from_w * 4, 4].unpack("l").first
length = packed & 7
size += length
f << [packed].pack("N")
f << binary[adr + from_w * 4 + 4, length * 4].unpack("l*").pack("N*")
end
f.seek(0)
vrange = size - (size - range)
f << [(range + vrange) / 2].pack("N")
end
process_binary "#{REPO_PATH}/enc/unicode.o" do |name, binary, address|
case name
when /(CaseFold)_11_Table/
name = $1
range, from = offsets[name]
range += offsets[name + '_Locale'].first
open("#{DST_BIN_DIR}/CaseFold.bin", "wb") do |f|
extract.(f, binary, address, from, range, 1, 4)
end
when /(CaseUnfold_(\d+))_Table/
name = $1
case $2
when '11'
range, from = offsets[name]
open("#{DST_BIN_DIR}/CaseUnfold_11.bin", "wb") do |f|
extract.(f, binary, address, from, range, 1, 4)
end
range, from = offsets[name + '_Locale']
open("#{DST_BIN_DIR}/CaseUnfold_11_Locale.bin", "wb") do |f|
extract.(f, binary, address, from, range, 1, 4)
end
when '12'
range, from = offsets[name]
open("#{DST_BIN_DIR}/CaseUnfold_12.bin", "wb") do |f|
extract.(f, binary, address, from, range, 2, 3)
end
range, from = offsets[name + '_Locale']
open("#{DST_BIN_DIR}/CaseUnfold_12_Locale.bin", "wb") do |f|
extract.(f, binary, address, from, range, 2, 3)
end
when '13'
range, from = offsets[name]
open("#{DST_BIN_DIR}/CaseUnfold_13.bin", "wb") do |f|
extract.(f, binary, address, from, range, 3, 3)
end
end
when /CaseMappingSpecials/
open("#{DST_BIN_DIR}/CaseMappingSpecials.bin", "wb") do |f|
size = src[/CaseMappingSpecials\[\]\s+=\s+\{(.*?)\}\;/m, 1].scan(/0x[0-9A-F]{4}/).size
f << [size].pack("N")
address.step(address + (size * 4 - 1), 4).each do |adr|
f << binary[adr, 4].unpack("l").pack("N")
end
end
end
end
end
generate_data
bump unicode version to 11.0
#!/usr/bin/env ruby
# coding: utf-8
REPO_PATH = ARGV.first || '/usr/src/ruby' # path to ruby repo
SECTION_NAME, G_PREFIX = case RUBY_PLATFORM
when /linux/i; ["rodata", ""]
when /darwin/i; ["const_data", "g"]
else ;["rdata", ""]
end
UNICODE_VERSION = "11.0.0"
SRC_DIR = "../src/org/jcodings"
DST_BIN_DIR = "../resources/tables"
INDENT = " " * 4
def generate_data
generate_encoding_list
generate_transcoder_list
generate_transoder_data
generate_coderange_data
generate_coderange_list
generate_fold_data
end
def process_binary obj_name
binary = open(obj_name, "rb"){|f|f.read}
offset = `#{G_PREFIX}objdump -h -j .#{SECTION_NAME} #{obj_name}`[/\.#{SECTION_NAME}.*?(\w+)\s+\S+$/, 1].to_i(16)
`#{G_PREFIX}nm --no-sort --defined-only #{obj_name}`.split("\n").map{|s|s.split(/\s+/)}.each do |address, _, name|
yield name, binary, address.to_i(16) + offset
end
end
def generate_encoding_list
enc_map = {
"ASCII-8BIT" => "ASCII",
"UTF-8" => "UTF8",
"US-ASCII" => "USASCII",
"Big5" => "BIG5",
"Big5-HKSCS" => "Big5HKSCS",
"Big5-UAO" => "Big5UAO",
"CP949" => "CP949",
"Emacs-Mule" => "EmacsMule",
"EUC-JP" => "EUCJP",
"EUC-KR" => "EUCKR",
"EUC-TW" => "EUCTW",
"GB2312" => "GB2312",
"GB18030" => "GB18030",
"GBK" => "GBK",
"ISO-8859-1" => "ISO8859_1",
"ISO-8859-2" => "ISO8859_2",
"ISO-8859-3" => "ISO8859_3",
"ISO-8859-4" => "ISO8859_4",
"ISO-8859-5" => "ISO8859_5",
"ISO-8859-6" => "ISO8859_6",
"ISO-8859-7" => "ISO8859_7",
"ISO-8859-8" => "ISO8859_8",
"ISO-8859-9" => "ISO8859_9",
"ISO-8859-10" => "ISO8859_10",
"ISO-8859-11" => "ISO8859_11",
"ISO-8859-13" => "ISO8859_13",
"ISO-8859-14" => "ISO8859_14",
"ISO-8859-15" => "ISO8859_15",
"ISO-8859-16" => "ISO8859_16",
"KOI8-R" => "KOI8R",
"KOI8-U" => "KOI8U",
"Shift_JIS" => "SJIS",
"UTF-16BE" => "UTF16BE",
"UTF-16LE" => "UTF16LE",
"UTF-32BE" => "UTF32BE",
"UTF-32LE" => "UTF32LE",
"Windows-31J" => "Windows_31J", # TODO: Windows-31J is actually a variant of SJIS
"Windows-1250" => "Windows_1250",
"Windows-1251" => "Windows_1251",
"Windows-1252" => "Windows_1252",
"Windows-1253" => "Windows_1253",
"Windows-1254" => "Windows_1254",
"Windows-1257" => "Windows_1257"
}
defines, other = open("#{REPO_PATH}/encdb.h").read.tr('()', '').scan(/ENC_([A-Z_]+)(.*?);/m).partition { |a, b| a =~ /DEFINE/ }
other << ["ALIAS", "\"MS932\", \"Windows-31J\""]
open("#{SRC_DIR}/EncodingList.java", "wb") { |f| f << open("EncodingListTemplate.java", "rb").read.
sub(/%\{defines\}/, defines.map { |cmd, name| "#{INDENT*2}EncodingDB.declare(#{name}, \"#{enc_map[name[/[^"]+/]] || (raise 'class not found for encoding ' + name)}\");" }.join("\n")).
sub(/%\{other\}/, other.map { |cmd, from, to| "#{INDENT*2}EncodingDB.#{cmd.downcase}(#{from}#{to.nil? ? "" : to});" }.join("\n")).
sub(/%\{switch\}/, defines.map { |cmd, name| "#{INDENT*3}case \"#{enc_map[name[/[^"]+/]]}\": return #{enc_map[name[/[^"]+/]]}Encoding.INSTANCE;"}.join("\n"))}
end
def generate_transcoder_list
generic_list = []
transcoder_list = []
Dir["#{REPO_PATH}/enc/trans/*.c"].reject{|f| f =~ /transdb/}.sort.each do |trans_file|
name = trans_file[/(\w+)\.c/, 1].split('_').map{|e| e.capitalize}.join("")
trans_src = open(trans_file){|f|f.read}
trans_src.scan(/static\s+const\s+rb_transcoder.*?(\w+)\s+=\s+\{(.+?)\};/m) do |t_name, body|
n = t_name.split('_')
t_name = n[1].capitalize
t_name += '_' + n[2..-1].join('_') unless n[2..-1].empty?
body = body.gsub(/(\/\*.*?\*\/)/, "").split(',').map{|e|e.strip}
src, dst, tree_start, table_info, iul, max_in, max_out, conv, state_size, state_init, state_fini, *funcs = body
tree_start = trans_src[/#define\s+#{tree_start}\s+WORDINDEX2INFO\((\d+)\)/, 1].to_i << 2
state_size = "0" if state_size == "sizeof(struct from_utf8_mac_status)"
generic = funcs.all?{|f|f == "NULL"}
generic_list << [src, dst, tree_start, "\"#{name}\"", iul, max_in, max_out, "AsciiCompatibility.#{conv.split('_').last.upcase}", state_size] if generic
transcoder_list << [src, dst, t_name, !generic]
end
end
open("#{SRC_DIR}/transcode/TranscoderList.java", "wb") << open("TranscoderListTemplate.java", "rb"){|f|f.read}.
sub(/%\{list\}/, transcoder_list.map{|src, dst, cls, specific| "#{INDENT*2}TranscoderDB.declare(#{src}, #{dst}, #{specific ? '"' + cls + '"' : 'null /*' + cls + '*/'});"}.join("\n")).
sub(/%\{generic\}/, generic_list.map{|g| "#{INDENT*2}new GenericTranscoderEntry(#{g.join(', ')})"}.join(",\n")).
sub(/%\{switch\}/, transcoder_list.map{|src, dst, cls, specific| "#{INDENT*3}case \"#{cls}\": return #{cls}_Transcoder.INSTANCE;" if specific}.compact.join("\n"))
end
def generate_transoder_data
Dir["#{REPO_PATH}/enc/trans/*.c"].reject{|f| f =~ /transdb/}.sort.each do |trans_file|
# next unless trans_file =~ /utf8/
trans_file = trans_file[/(.*)\./, 1]
src = open("#{trans_file}.c", "rb").read
make_name = -> (name) {name.split('_').map{|e|e.capitalize}.join('')}
process_binary "#{trans_file}.o" do |name, binary, address|
case name
when /(.*)_byte_array/
name = $1
size = src[/(\w+?_byte_array)\[(\d+?)\]/m, 2].to_i
open("#{DST_BIN_DIR}/" + "Transcoder_#{make_name.(name)}_ByteArray.bin", "wb") do |f|
f << [size].pack("N")
f << binary[address, size]
end
when /(.*)_word_array/
name = $1
size = src[/(\w+?_word_array)\[(\d+?)\]/m, 2].to_i
open("#{DST_BIN_DIR}/" + "Transcoder_#{make_name.(name)}_WordArray.bin", "wb") do |f|
f << [size].pack("N")
address.step(address + (size * 4 - 1), 4).each do |adr|
f << binary[adr, 4].unpack("l").pack("N")
end
end
end
end
end
end
def generate_coderange_data
process_binary "#{REPO_PATH}/enc/unicode.o" do |name, binary, address|
case name
when /CR_(.*)/
size = binary[address, 4].unpack("l")
address += 4
open("#{DST_BIN_DIR}/#{name}.bin", "wb") do |f|
f << [size[0] * 2 + 1].pack("N")
f << size.pack("N")
address.step(address + (size[0] * 2 * 4 - 1), 4).each do |adr|
f << binary[adr, 4].unpack("l").pack("N")
end
end
end
end
end
def generate_coderange_list
name2ctype_h = "#{REPO_PATH}/enc/unicode/#{UNICODE_VERSION}/name2ctype.h"
cr_map = open("#{name2ctype_h}", "rb"){|f|f.read}.scan(/#define CR_(.*?) CR_(.*)/).inject({}){|h, (k, v)|h[v] = k.tr('_', '').downcase; h}
unicode_src = `cpp #{name2ctype_h} -DUSE_UNICODE_PROPERTIES -DUSE_UNICODE_AGE_PROPERTIES | grep "^[^#;]"`
gperf_map = Hash[unicode_src[/struct\s+uniname2ctype_pool_t\s+\{(.*?)\}\;/m, 1].scan(/uniname2ctype_pool_str(\d+).*\"(\S+)\"/)]
aliases = unicode_src[/wordlist\[\]\s+=\s+\{(.*?)\}\;/m, 1].scan(/uniname2ctype_pool_str(\d+).*?(\d+)/).
inject(Hash.new{|h, k|h[k] = []}){|h, (name, num)|h[num] << gperf_map[name]; h}.inject({}) do |h, (k, v)|
h.merge! Hash[v.map{|val|[val, v - [val]]}]
h
end
ranges = unicode_src[/CodeRanges\[\]\s+=\s+\{(.*?)\}\;/m, 1].scan(/CR_(\w+)/).flatten
standard_char_type_range = 16
out = ranges.take(standard_char_type_range).map{|range|[range.tr('_', '').downcase, range]} +
ranges.drop(standard_char_type_range).map do |range|
name = range =~ /Age_(\d+)_(\d+)/ ? "age=#{$1}.#{$2}" : range.tr('_', '').downcase
name = cr_map.delete(range) || name
name = "#{$1}=#{$2}" if name =~ /(graphemeclusterbreak)(.*)/i
([name] + aliases[name].to_a).map{|n|[n, range]}
end.flatten(1)
max_length = out.max_by{|name, table|name.length}.first.length.to_s
open("#{SRC_DIR}/unicode/UnicodeProperties.java", "wb") do |f| f <<
open("UnicodePropertiesTemplate.java", "rb").read.sub(/%\{max_length\}/, max_length).sub(/%\{extcrs\}/, out.map{|name, table| "#{INDENT * 2}" + "new CodeRangeEntry(\"#{name}\", \"CR_#{table}\")"}.join(",\n"))
end
end
def generate_fold_data
src = open("#{REPO_PATH}/enc/unicode/#{UNICODE_VERSION}/casefold.h"){|f|f.read}
offsets = src.scan(/#define (Case\S+).*?\[(\w+)\].*?\+(\d+)/).inject({}){|h, (k, *v)| h[k] = v.map(&:to_i);h}
extract = -> (f, binary, address, from, range, from_w, to_w) do
f << [0].pack("N")
width = from_w + to_w
size = 0
start = address + from * width * 4
start.step(start + (range * width * 4 - 1), width * 4) do |adr|
f << binary[adr, from_w * 4].unpack("l*").pack("N*")
packed = binary[adr + from_w * 4, 4].unpack("l").first
length = packed & 7
size += length
f << [packed].pack("N")
f << binary[adr + from_w * 4 + 4, length * 4].unpack("l*").pack("N*")
end
f.seek(0)
vrange = size - (size - range)
f << [(range + vrange) / 2].pack("N")
end
process_binary "#{REPO_PATH}/enc/unicode.o" do |name, binary, address|
case name
when /(CaseFold)_11_Table/
name = $1
range, from = offsets[name]
range += offsets[name + '_Locale'].first
open("#{DST_BIN_DIR}/CaseFold.bin", "wb") do |f|
extract.(f, binary, address, from, range, 1, 4)
end
when /(CaseUnfold_(\d+))_Table/
name = $1
case $2
when '11'
range, from = offsets[name]
open("#{DST_BIN_DIR}/CaseUnfold_11.bin", "wb") do |f|
extract.(f, binary, address, from, range, 1, 4)
end
range, from = offsets[name + '_Locale']
open("#{DST_BIN_DIR}/CaseUnfold_11_Locale.bin", "wb") do |f|
extract.(f, binary, address, from, range, 1, 4)
end
when '12'
range, from = offsets[name]
open("#{DST_BIN_DIR}/CaseUnfold_12.bin", "wb") do |f|
extract.(f, binary, address, from, range, 2, 3)
end
range, from = offsets[name + '_Locale']
open("#{DST_BIN_DIR}/CaseUnfold_12_Locale.bin", "wb") do |f|
extract.(f, binary, address, from, range, 2, 3)
end
when '13'
range, from = offsets[name]
open("#{DST_BIN_DIR}/CaseUnfold_13.bin", "wb") do |f|
extract.(f, binary, address, from, range, 3, 3)
end
end
when /CaseMappingSpecials/
open("#{DST_BIN_DIR}/CaseMappingSpecials.bin", "wb") do |f|
size = src[/CaseMappingSpecials\[\]\s+=\s+\{(.*?)\}\;/m, 1].scan(/0x[0-9A-F]{4}/).size
f << [size].pack("N")
address.step(address + (size * 4 - 1), 4).each do |adr|
f << binary[adr, 4].unpack("l").pack("N")
end
end
end
end
end
generate_data
|
# coding: utf-8
Gem::Specification.new do |spec|
spec.name = 'fast_bitarray'
spec.version = '0.0.1'
spec.authors = ['Maxim Dobryakov']
spec.email = %w(maxim.dobryakov@gmail.com)
spec.description = %q{Fast BitArray implementation for Ruby}
spec.summary = %q{Fast BitArray implementation for Ruby base on C extension}
spec.homepage = ''
spec.license = 'MIT'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = %w(lib)
spec.add_development_dependency 'bundler', '~> 1.3'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rake-compiler'
end
Build extconf.rb after gem installation
# coding: utf-8
Gem::Specification.new do |spec|
spec.name = 'fast_bitarray'
spec.version = '0.0.1'
spec.authors = ['Maxim Dobryakov']
spec.email = %w(maxim.dobryakov@gmail.com)
spec.description = %q{Fast BitArray implementation for Ruby}
spec.summary = %q{Fast BitArray implementation for Ruby base on C extension}
spec.homepage = ''
spec.license = 'MIT'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = %w(lib)
spec.extensions = %w(ext/fast_bitarray/extconf.rb)
spec.add_development_dependency 'bundler', '~> 1.3'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rake-compiler'
end
|
SOURCE = File.expand_path "~/Downloads/Data"
DESTINATION = File.expand_path "~/Poker"
require 'zip'
require 'date'
require 'shellwords'
CURRENT_YEAR = Date.today.year
YEARS = 2010.upto(CURRENT_YEAR).to_a
LIMITS = {
"5-10" => '-5-10-',
"10-20" => '-10-20-',
"7.50-15" => '-7.50-15-',
"8-16" => '-8-16-',
"10-10" => '-10-10-',
"15-30" => '-15-30-',
"25-50" => '-25-50-',
"30-60" => '-30-60-',
"50-100" => '-50-100-',
"75-150" => '-75-150-',
"100-200" => '-100-200-',
"200-400" => '-200-400-',
"250-500" => '-250-500-',
"300-600" => '-300-600-',
"400-800" => '-400-800-',
"500-1000" => '-500-1,000-',
"1000-2000" => '-1,000-2,000-',
}
POKER_MAPPING = {
"NL Holdem-PokerStars" => [
"NL Holdem-PokerStars",
"NoLimitHoldem-PokerStars",
"PokerStars-NoLimitHoldem"
],
"FL Holdem-PokerStars" => [
"FL Holdem-PokerStars",
"FixedLimitHoldem-PokerStars",
"PokerStars-FixedLimitHoldem"
],
"PL Omaha-FullTilt" => [
"PotLimitOmaha-FullTilt",
"FullTilt-PotLimitOmaha"
],
"PL Omaha-Pacific" => [
"PotLimitOmaha-Pacific",
"Pacific-PotLimitOmaha"
],
"PL Omaha-PokerStars" => [
"PLO-PokerStars",
"PotLimitOmaha-PokerStars",
"PokerStars-PotLimitOmaha"
],
"PL Omaha-MicroGaming" => [
"MicroGaming-PotLimitOmaha",
"PotLimitOmaha-MicroGaming"
],
"PL Omaha-OnGame" => [
"PotLimitOmaha-OnGame",
"OnGame-PotLimitOmaha"
],
"FL Holdem-OnGame" => [
"FL Holdem-OnGame",
"FixedLimitHoldem-OnGame",
"OnGame-FixedLimitHoldem"
],
"NL Holdem-OnGame" => [
"NoLimitHoldem-OnGame",
"NL Holdem-OnGame",
"OnGame-NoLimitHoldem"
],
"NL Holdem-FullTilt" => [
"NoLimitHoldem-FullTilt",
"FullTilt-NoLimitHoldem"
],
"NL Holdem-Pacific" => [
"NoLimitHoldem-Pacific",
"Pacific-NoLimitHoldem"
],
"FL Holdem-Pacific" => [
"FixedLimitHoldem-Pacific",
"Pacific-FixedLimitHoldem"
],
"FL Omaha Hi-Lo-PokerStars" => [
"FixedLimitOmahaHiLo-PokerStars",
"FL Omaha Hi-Lo-PokerStars",
"PokerStars-FixedLimitOmahaHiLo"
],
"FL Holdem-FullTilt" => [
"FixedLimitHoldem-FullTilt",
"FullTilt-FixedLimitHoldem"
],
"NL Holdem-MicroGaming" => [
"NoLimitHoldem-MicroGaming",
"MicroGaming-NoLimitHoldem"
],
"FL Holdem-MicroGaming" => [
"FixedLimitHoldem-MicroGaming",
"MicroGaming-FixedLimitHoldem"
],
"PL Holdem-PokerStars" => [
"PotLimitHoldem-PokerStars",
"Pot Limit Holdem-PokerStars",
"PokerStars-PotLimitHoldem"
]
}
def poker_pattern_limit
acc = []
POKER_MAPPING.each do |type, patterns|
patterns.each do |pattern|
LIMITS.each_key do |limit|
acc << [type, pattern, limit]
end
end
end
return acc
end
def get_content_from_zip zipped_file, name
require 'zip'
Zip::File.open zipped_file do |zipfile|
entry = zipfile.entries.select {|e| e.name == name}.first
entry.get_input_stream.read
end
end
def detect_year filepath
found_years = YEARS.select do |year|
filepath.include? year.to_s
end
case found_years.length
when 0
raise "No years found"
when 1
return found_years.first.to_s
else
raise "Found multiple years"
end
end
def names_with_suffix filepath
ext = File.extname filepath
name = File.basename filepath, ext
path = File.dirname filepath
suffix = 0
suffix_name = nil
acc = [filepath]
while true
suffix += 1
suffix_name = File.join(path, "#{name}_#{suffix}#{ext}")
acc << suffix_name
break unless File.exists? suffix_name
end
acc
end
def log2 msg
log msg
$stderr.puts msg
end
def empty_dir? folder
f = Shellwords.escape folder
Dir.glob("#{f}/*").empty?
end
def kind_move source_file, dest_folder
require 'fileutils'
dest_folder = File.expand_path dest_folder
basename = File.basename source_file
dest_file = File.join dest_folder, basename
unless File.exists? dest_file
mkdir dest_folder
return rename(source_file, dest_file)
end
names = names_with_suffix dest_file
is_unique = names.select do |n|
File.exists?(n) && FileUtils.cmp(n, source_file)
end.empty?
if is_unique
uniq_name = names.last
log2 "Synonym found, copying to #{uniq_name}"
mkdir dest_folder
rename source_file, uniq_name
else
log2 "Duplicate found, removing #{source_file}"
remove(source_file)
end
end
Maid.rules do
rule "Remove empty folders" do
dirs = Dir.glob("#{SOURCE}/**/")
dirs.delete "#{SOURCE}/"
dirs.each do |folder|
remove(folder) if empty_dir? folder
end
end
poker_pattern_limit.each do |poker, poker_pattern, limit|
limit_pattern = LIMITS[limit]
rule "[TXT] #{poker_pattern} - limit #{limit}" do
# Process txt files
txt_pattern = "*#{poker_pattern}*.txt"
dir("#{SOURCE}/**/#{txt_pattern}").each do |path|
# skip pathes without limit pattern
next unless path.include?(limit_pattern)
year = detect_year path
destination = File.join(DESTINATION, poker, limit, year)
kind_move path, destination
end
end
rule "[DAT] #{poker_pattern} - limit #{limit}" do
dat_pattern = "*#{poker_pattern}*.dat"
dir("#{SOURCE}/**/#{dat_pattern}").each do |path|
# skip pathes without limit pattern
next unless path.include?(limit_pattern)
year = detect_year path
destination = File.join(DESTINATION, poker, limit, year)
kind_move path, destination
end
end
rule "[ZIP] #{poker_pattern} - limit #{limit}" do
zip_pattern = "*#{poker_pattern}*.zip"
dir("#{SOURCE}/**/#{zip_pattern}").each do |path|
# skip pathes without limit pattern
next unless path.include?(limit_pattern)
log2("Found zip-file #{path} to operate")
year = detect_year path
destination = File.join(DESTINATION, poker, limit, year)
mkdir(destination)
# Create temp directory and extract zip
# contents there
Dir.mktmpdir 'poker_' do |tempdir|
Zip::File.open(path) do |zipfile|
zipfile.each do |file|
filename = file.name
file.extract(File.join tempdir, filename)
end
end
# Walk through extracted files and process them
Dir.entries(tempdir).select{ |n|
!File.directory? (File.join(tempdir, n))
}.each do |file|
new_path = File.join(tempdir, file)
log2 "processing from zip #{new_path}"
kind_move new_path, destination
end
end
# We can remove zip file once it was processed
remove path
end
end
end
end
New limit added 2000-4000
SOURCE = File.expand_path "~/Downloads/Data"
DESTINATION = File.expand_path "~/Poker"
require 'zip'
require 'date'
require 'shellwords'
CURRENT_YEAR = Date.today.year
YEARS = 2010.upto(CURRENT_YEAR).to_a
LIMITS = {
"5-10" => '-5-10-',
"10-20" => '-10-20-',
"7.50-15" => '-7.50-15-',
"8-16" => '-8-16-',
"10-10" => '-10-10-',
"15-30" => '-15-30-',
"25-50" => '-25-50-',
"30-60" => '-30-60-',
"50-100" => '-50-100-',
"75-150" => '-75-150-',
"100-200" => '-100-200-',
"200-400" => '-200-400-',
"250-500" => '-250-500-',
"300-600" => '-300-600-',
"400-800" => '-400-800-',
"500-1000" => '-500-1,000-',
"1000-2000" => '-1,000-2,000-',
"2000-4000" => '-2,000-4,000-'
}
POKER_MAPPING = {
"NL Holdem-PokerStars" => [
"NL Holdem-PokerStars",
"NoLimitHoldem-PokerStars",
"PokerStars-NoLimitHoldem"
],
"FL Holdem-PokerStars" => [
"FL Holdem-PokerStars",
"FixedLimitHoldem-PokerStars",
"PokerStars-FixedLimitHoldem"
],
"PL Omaha-FullTilt" => [
"PotLimitOmaha-FullTilt",
"FullTilt-PotLimitOmaha"
],
"PL Omaha-Pacific" => [
"PotLimitOmaha-Pacific",
"Pacific-PotLimitOmaha"
],
"PL Omaha-PokerStars" => [
"PLO-PokerStars",
"PotLimitOmaha-PokerStars",
"PokerStars-PotLimitOmaha"
],
"PL Omaha-MicroGaming" => [
"MicroGaming-PotLimitOmaha",
"PotLimitOmaha-MicroGaming"
],
"PL Omaha-OnGame" => [
"PotLimitOmaha-OnGame",
"OnGame-PotLimitOmaha"
],
"FL Holdem-OnGame" => [
"FL Holdem-OnGame",
"FixedLimitHoldem-OnGame",
"OnGame-FixedLimitHoldem"
],
"NL Holdem-OnGame" => [
"NoLimitHoldem-OnGame",
"NL Holdem-OnGame",
"OnGame-NoLimitHoldem"
],
"NL Holdem-FullTilt" => [
"NoLimitHoldem-FullTilt",
"FullTilt-NoLimitHoldem"
],
"NL Holdem-Pacific" => [
"NoLimitHoldem-Pacific",
"Pacific-NoLimitHoldem"
],
"FL Holdem-Pacific" => [
"FixedLimitHoldem-Pacific",
"Pacific-FixedLimitHoldem"
],
"FL Omaha Hi-Lo-PokerStars" => [
"FixedLimitOmahaHiLo-PokerStars",
"FL Omaha Hi-Lo-PokerStars",
"PokerStars-FixedLimitOmahaHiLo"
],
"FL Holdem-FullTilt" => [
"FixedLimitHoldem-FullTilt",
"FullTilt-FixedLimitHoldem"
],
"NL Holdem-MicroGaming" => [
"NoLimitHoldem-MicroGaming",
"MicroGaming-NoLimitHoldem"
],
"FL Holdem-MicroGaming" => [
"FixedLimitHoldem-MicroGaming",
"MicroGaming-FixedLimitHoldem"
],
"PL Holdem-PokerStars" => [
"PotLimitHoldem-PokerStars",
"Pot Limit Holdem-PokerStars",
"PokerStars-PotLimitHoldem"
]
}
def poker_pattern_limit
acc = []
POKER_MAPPING.each do |type, patterns|
patterns.each do |pattern|
LIMITS.each_key do |limit|
acc << [type, pattern, limit]
end
end
end
return acc
end
def get_content_from_zip zipped_file, name
require 'zip'
Zip::File.open zipped_file do |zipfile|
entry = zipfile.entries.select {|e| e.name == name}.first
entry.get_input_stream.read
end
end
def detect_year filepath
found_years = YEARS.select do |year|
filepath.include? year.to_s
end
case found_years.length
when 0
raise "No years found"
when 1
return found_years.first.to_s
else
raise "Found multiple years"
end
end
def names_with_suffix filepath
ext = File.extname filepath
name = File.basename filepath, ext
path = File.dirname filepath
suffix = 0
suffix_name = nil
acc = [filepath]
while true
suffix += 1
suffix_name = File.join(path, "#{name}_#{suffix}#{ext}")
acc << suffix_name
break unless File.exists? suffix_name
end
acc
end
def log2 msg
log msg
$stderr.puts msg
end
def empty_dir? folder
f = Shellwords.escape folder
Dir.glob("#{f}/*").empty?
end
def kind_move source_file, dest_folder
require 'fileutils'
dest_folder = File.expand_path dest_folder
basename = File.basename source_file
dest_file = File.join dest_folder, basename
unless File.exists? dest_file
mkdir dest_folder
return rename(source_file, dest_file)
end
names = names_with_suffix dest_file
is_unique = names.select do |n|
File.exists?(n) && FileUtils.cmp(n, source_file)
end.empty?
if is_unique
uniq_name = names.last
log2 "Synonym found, copying to #{uniq_name}"
mkdir dest_folder
rename source_file, uniq_name
else
log2 "Duplicate found, removing #{source_file}"
remove(source_file)
end
end
Maid.rules do
rule "Remove empty folders" do
dirs = Dir.glob("#{SOURCE}/**/")
dirs.delete "#{SOURCE}/"
dirs.each do |folder|
remove(folder) if empty_dir? folder
end
end
poker_pattern_limit.each do |poker, poker_pattern, limit|
limit_pattern = LIMITS[limit]
rule "[TXT] #{poker_pattern} - limit #{limit}" do
# Process txt files
txt_pattern = "*#{poker_pattern}*.txt"
dir("#{SOURCE}/**/#{txt_pattern}").each do |path|
# skip pathes without limit pattern
next unless path.include?(limit_pattern)
year = detect_year path
destination = File.join(DESTINATION, poker, limit, year)
kind_move path, destination
end
end
rule "[DAT] #{poker_pattern} - limit #{limit}" do
dat_pattern = "*#{poker_pattern}*.dat"
dir("#{SOURCE}/**/#{dat_pattern}").each do |path|
# skip pathes without limit pattern
next unless path.include?(limit_pattern)
year = detect_year path
destination = File.join(DESTINATION, poker, limit, year)
kind_move path, destination
end
end
rule "[ZIP] #{poker_pattern} - limit #{limit}" do
zip_pattern = "*#{poker_pattern}*.zip"
dir("#{SOURCE}/**/#{zip_pattern}").each do |path|
# skip pathes without limit pattern
next unless path.include?(limit_pattern)
log2("Found zip-file #{path} to operate")
year = detect_year path
destination = File.join(DESTINATION, poker, limit, year)
mkdir(destination)
# Create temp directory and extract zip
# contents there
Dir.mktmpdir 'poker_' do |tempdir|
Zip::File.open(path) do |zipfile|
zipfile.each do |file|
filename = file.name
file.extract(File.join tempdir, filename)
end
end
# Walk through extracted files and process them
Dir.entries(tempdir).select{ |n|
!File.directory? (File.join(tempdir, n))
}.each do |file|
new_path = File.join(tempdir, file)
log2 "processing from zip #{new_path}"
kind_move new_path, destination
end
end
# We can remove zip file once it was processed
remove path
end
end
end
end
|
Gem::Specification.new do |s|
s.name = 'prospectus_pkgforge'
s.version = '0.0.8'
s.date = Time.now.strftime('%Y-%m-%d')
s.summary = 'Prospectus helpers for pkgforge'
s.description = 'Prospectus helpers for pkgforge'
s.authors = ['Les Aker']
s.email = 'me@lesaker.org'
s.homepage = 'https://github.com/amylum/prospectus_pkgforge'
s.license = 'MIT'
s.files = `git ls-files`.split
s.test_files = `git ls-files spec/*`.split
s.add_dependency 'pkgforge', '~> 0.20.0'
s.add_dependency 'prospectus', '~> 0.5.0'
s.add_development_dependency 'codecov', '~> 0.1.1'
s.add_development_dependency 'fuubar', '~> 2.3.0'
s.add_development_dependency 'goodcop', '~> 0.5.0'
s.add_development_dependency 'rake', '~> 12.3.0'
s.add_development_dependency 'rspec', '~> 3.7.0'
s.add_development_dependency 'rubocop', '~> 0.57.1'
end
Updated version of rubocop to 0.58.0
Gem::Specification.new do |s|
s.name = 'prospectus_pkgforge'
s.version = '0.0.8'
s.date = Time.now.strftime('%Y-%m-%d')
s.summary = 'Prospectus helpers for pkgforge'
s.description = 'Prospectus helpers for pkgforge'
s.authors = ['Les Aker']
s.email = 'me@lesaker.org'
s.homepage = 'https://github.com/amylum/prospectus_pkgforge'
s.license = 'MIT'
s.files = `git ls-files`.split
s.test_files = `git ls-files spec/*`.split
s.add_dependency 'pkgforge', '~> 0.20.0'
s.add_dependency 'prospectus', '~> 0.5.0'
s.add_development_dependency 'codecov', '~> 0.1.1'
s.add_development_dependency 'fuubar', '~> 2.3.0'
s.add_development_dependency 'goodcop', '~> 0.5.0'
s.add_development_dependency 'rake', '~> 12.3.0'
s.add_development_dependency 'rspec', '~> 3.7.0'
s.add_development_dependency 'rubocop', '~> 0.58.0'
end
|
require 'matrix'
require 'rdl'
$error_threshold = 0.000001
class Matrix
class << self
extend RDL
spec :identity do
pre_task do |arg|
$matrix_identity_arg = arg
end
arg 0, (RDL.flat {|a| a > 0})
ret (RDL.flat {|r|
num_rows = r.instance_variable_get(:@rows).size
num_cols = r.instance_variable_get(:@column_size)
arg = $matrix_identity_arg
c = true
for i in 0..num_rows-1
for j in 0..num_cols-1
if i == j
c = c & (r[i,j] == 1)
else
c = c & (r[i,j] == 0)
end
end
end
(num_cols == arg) & (num_rows == arg) & c
})
end
spec :[] do
pre_task do |*args|
$matrix_create_args = args
end
pre_cond do |*args|
arg_sizes = args.map {|x| x.size}
(arg_sizes.uniq.size == 1) | (arg_sizes.uniq.size == 0)
end
ret (RDL.flat {|r|
args = $matrix_create_args
ret_rows = r.instance_variable_get(:@rows)
args == ret_rows
})
end
spec :build do
pre_task do |*args|
$matrix_build_args = args
end
pre_cond do |*args|
arg_sizes = args.map {|x| x.size}
(arg_sizes.uniq.size == 1) | (arg_sizes.uniq.size == 0)
end
arg 0, RDL.flat {|a| a >= 0}
rest 1, RDL.flat {|a| a >= 0}
ret (RDL.flat {|r|
row_size_arg = $matrix_build_args[0]
if $matrix_build_args.size == 1
# 2nd arg is a default arg == 1st_arg
column_size_arg = row_size_arg
else
column_size_arg = $matrix_build_args[1]
end
ret_row_count = r.instance_variable_get(:@rows).size
ret_col_count = r.instance_variable_get(:@column_size)
(row_size_arg == ret_row_count) & (column_size_arg == ret_col_count)
})
end
spec :column_vector do
pre_task do |arg|
$matrix_column_vector_arg = arg
end
ret (RDL.flat {|r|
arg = $matrix_column_vector_arg
ret_rows = r.instance_variable_get(:@rows)
ret_row_count = ret_rows.size
ret_col_count = r.instance_variable_get(:@column_size)
c = true
for i in 0..ret_row_count-1
c = c & (ret_rows[i] == [arg[i]])
end
(ret_row_count == arg.size) & (ret_col_count == 1) & c
})
end
spec :columns do
pre_task do |arg|
$matrix_columns_arg = arg
end
ret (RDL.flat {|r|
arg = $matrix_columns_arg
transpose_ret_rows = r.transpose.instance_variable_get(:@rows)
transpose_ret_rows == arg
})
end
spec :diagonal do
pre_task do |*args|
$matrix_diagonal_args = args
end
ret (RDL.flat {|r|
args = $matrix_diagonal_args
ret_row_count = r.instance_variable_get(:@rows).size
ret_col_count = r.instance_variable_get(:@column_size)
c = true
for i in 0..ret_row_count-1
for j in 0..ret_col_count-1
if i == j
c = c & (r[i,j] == args[i])
else
c = c & (r[i,j] == 0)
end
end
end
(ret_row_count == ret_col_count) & (ret_row_count == args.size) & c
})
end
spec :empty do
pre_task do |*args|
$matrix_empty_args = args
end
pre_cond do |*args|
if args.size == 0
row_size = 0
col_size = 0
elsif args.size == 1
row_size = args[0]
col_size = 0
else
row_size = args[0]
col_size = args[1]
end
(row_size >= 0) & (col_size >= 0) & ((row_size == 0) | (col_size == 0))
end
ret (RDL.flat {|r|
args = $matrix_empty_args
if args.size == 0
row_size = 0
col_size = 0
elsif args.size == 1
row_size = args[0]
col_size = 0
else
row_size = args[0]
col_size = args[1]
end
ret_rows = r.instance_variable_get(:@rows)
ret_row_count = ret_rows.size
ret_col_count = r.instance_variable_get(:@column_size)
c = ret_rows.all? {|x| x == []}
(ret_row_count == row_size) & (ret_col_count == col_size) & c
})
end
spec :row_vector do
pre_task do |arg|
$matrix_row_vector_arg = arg
end
ret (RDL.flat {|r|
arg = $matrix_row_vector_arg
ret_rows = r.instance_variable_get(:@rows)
ret_row_count = ret_rows.size
ret_col_count = r.instance_variable_get(:@column_size)
(ret_rows == [arg]) & (ret_row_count == 1) & (ret_col_count == arg.size)
})
end
spec :rows do
pre_task do |*args|
$matrix_rows_args = args
end
pre_cond do |*args|
arg_sizes = args[0].map {|x| x.size}
(arg_sizes.uniq.size == 1) | (arg_sizes.uniq.size == 0)
end
ret (RDL.flat {|r|
arg_row = $matrix_rows_args[0]
# There is a second optional arg.
# the optional argument copy is false, use the given arrays as the
# internal structure of the matrix without copying
# the post condition involving dup is not listed here
ret_rows = r.instance_variable_get(:@rows)
ret_rows == arg_row
})
end
spec :scalar do
pre_task do |*args|
$matrix_scalar_args = args
end
arg 0, RDL.flat {|a| a > 0}
ret (RDL.flat {|r|
arg_n = $matrix_scalar_args[0]
arg_value = $matrix_scalar_args[1]
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in 0..ret_row_size-1
for j in 0..ret_col_size-1
if i == j
c = c & (r[i,j] == arg_value)
else
c = c & (r[i,j] == 0)
end
end
end
(ret_row_size == ret_col_size) & (ret_col_size == arg_n) & c
})
end
spec :zero do
pre_task do |arg|
$matrix_zero_arg = arg
end
arg 0, RDL.flat {|a| a > 0}
ret (RDL.flat {|r|
arg = $matrix_zero_arg
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in 0..ret_row_size-1
for j in 0..ret_col_size-1
c = c & (r[i,j] == 0)
end
end
(ret_row_size == ret_col_size) & (ret_col_size == arg) & c
})
end
end
end
class Matrix
extend RDL
spec :* do
pre_task do |arg|
$matrix_mult_arg = arg
$matrix_mult_self = self
end
# an RTC pre_condition would require all elements to be numbers
ret (RDL.flat {|r|
arg = $matrix_mult_arg
slf = $matrix_mult_self
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
slf_row_size = slf.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
(ret_row_size == slf_row_size) & (ret_col_size == arg_col_size)
})
end
spec :** do
pre_task do |arg|
$matrix_exp_arg = arg
$matrix_exp_self = self
end
ret (RDL.flat {|r|
slf = $matrix_exp_self
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
slf_row_size = slf.instance_variable_get(:@rows).size
(ret_row_size == ret_col_size) & (slf_row_size == ret_col_size)
})
end
spec :- do
pre_task do |arg|
$matrix_minus_arg = arg
$matrix_minus_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_minus_arg
slf = $matrix_minus_slf
arg_row_size = arg.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in 0..ret_row_size-1
for j in 0..ret_col_size-1
c = c & (r[i,j] == slf[i,j] - arg[i,j])
end
end
(ret_row_size == arg_row_size) & (ret_col_size == arg_col_size) & c
})
end
spec :+ do
pre_task do |arg|
$matrix_add_arg = arg
$matrix_add_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_add_arg
slf = $matrix_add_slf
arg_row_size = arg.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in 0..ret_row_size-1
for j in 0..ret_col_size-1
c = c & (r[i,j] == slf[i,j] + arg[i,j])
end
end
(ret_row_size == arg_row_size) & (ret_col_size == arg_col_size) & c
})
end
spec :/ do
pre_task do |arg|
$matrix_div_arg = arg
$matrix_div_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_div_arg
slf = $matrix_div_slf
arg_row_size = arg.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
ret_approx = slf * arg.inverse
diff = r - ret_approx
diff_row_size = diff.instance_variable_get(:@rows).size
diff_col_size = diff.instance_variable_get(:@column_size)
c = true
for i in 0..diff_row_size-1
for j in 0...diff_col_size-1
c = c & (diff[i,j].abs < $error_threshold)
end
end
(ret_row_size == arg_row_size) & (ret_col_size == arg_col_size) & c
})
end
spec :== do
pre_task do |arg|
$matrix_eq_arg = arg
$matrix_eq_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_eq_arg
slf = $matrix_eq_slf
arg_row_size = arg.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
matrices_eq = true
if not (arg_row_size == slf_row_size and arg_col_size == slf_col_size)
matrices_eq = false
else
for i in 0..arg_row_size-1
for j in 0..arg_col_size-1
matrices_eq = matrices_eq & (arg[i,j] == slf[i,j])
end
end
end
((r == true) & matrices_eq) | ((r == false) & (not matrices_eq))
})
end
spec :[] do
pre_task do |*args|
$matrix_index_args = args
$matrix_index_slf = self
end
ret (RDL.flat {|r|
args = $matrix_index_args
slf = $matrix_index_slf
arg_i = args[0]
arg_j = args[1]
slf_rows = slf.instance_variable_get(:@rows)
slf_row_size = slf_rows.size
slf_col_size = slf.instance_variable_get(:@column_size)
args_out_of_range = (arg_i < 0) | (arg_i >= slf_row_size) | (arg_j < 0) | (arg_j >= slf_col_size)
slf_includes_arg = slf_rows.any? {|x| x.any? {|x2| x2 == r} }
((r == nil) & args_out_of_range) | slf_includes_arg
})
end
spec :clone do
# skipped
end
spec :coerce do
# skipped
end
spec :collect do
pre_task do |*args|
$matrix_collect_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_collect_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
(slf_row_size == ret_row_size) & (slf_col_size == ret_col_size)
})
end
spec :column do
pre_task do |arg, &blk|
$matrix_column_arg = arg
$matrix_column_blk = blk
$matrix_column_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_column_arg
blk = $matrix_column_blk
slf = $matrix_column_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
c = true
for i in 0..slf_row_size-1
c = c & (r[i] == slf[i, arg]) if r != nil
end
arg_out_of_range = arg >= slf_col_size || arg < -slf_col_size
((not blk) & ((r == nil) & arg_out_of_range) | c) | blk
})
end
spec :column_vectors do
pre_task do |*args|
$matrix_column_vectors_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_column_vectors_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
c = true
for i in 0..r.size-1
c = c & (slf.column(i) == r[i])
end
(r.size == slf_col_size) & c
})
end
spec :conjugate do
pre_task do |*args|
$matrix_conjugate_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_conjugate_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
r_row_size = r.instance_variable_get(:@rows).size
r_col_size = r.instance_variable_get(:@column_size)
(r_row_size == slf_row_size) & (r_col_size == slf_col_size)
})
end
spec :determinant do
pre_task do |*args|
$matrix_determinant_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_determinant_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
# could do something like m.determinant == m.transpose.determinant
# but seems like there are some recursive call problems with RDL
true
})
end
spec :each do
# skipped, it can do something with the block argument
end
spec :each_with_index do
# skipped, it can do something with the block argument
end
spec :elements_to_f do
pre_task do |*args|
$matrix_elements_to_f_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_elements_to_f_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in 0..slf_row_size-1
for j in 0..slf_col_size-1
diff = (slf[i,j] - r[i,j]).abs
c = c & (diff < $error_threshold)
end
end
(slf_row_size == ret_row_size) & (slf_col_size == ret_col_size) & c
})
end
spec :elements_to_i do
pre_task do |*args|
$matrix_elements_to_f_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_elements_to_f_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in 0..slf_row_size-1
for j in 0..slf_col_size-1
diff = (slf[i,j] - r[i,j]).abs
c = c & (diff < $error_threshold)
end
end
(slf_row_size == ret_row_size) & (slf_col_size == ret_col_size) & c
})
end
spec :elements_to_r do
pre_task do |*args|
$matrix_elements_to_f_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_elements_to_f_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in 0..slf_row_size-1
for j in 0..slf_col_size-1
diff = (slf[i,j] - r[i,j]).abs
c = c & (diff < $error_threshold)
end
end
(slf_row_size == ret_row_size) & (slf_col_size == ret_col_size) & c
})
end
spec :empty? do
pre_task do |*args|
$matrix_empty_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_empty_q_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
# this post condition is pretty much the method definition
((r == true) & ((slf_row_size == 0) & (slf_col_size == 0))) | ((r == false) & (not ((slf_row_size == 0) & (slf_col_size == 0))))
})
end
spec :hash do
# no post cond?
# could do something like a bunch of returns are different from difference matrices
ret (RDL.flat {|r|
true
})
end
spec :imaginary do
pre_task do |*args|
$matrix_imaginary_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_imaginary_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
r_row_size = r.instance_variable_get(:@rows).size
r_col_size = r.instance_variable_get(:@column_size)
(r_row_size == slf_row_size) & (r_col_size == slf_col_size)
})
end
spec :inspect do
# depends on the inspect of each individual element's inspect
end
spec :inverse do
pre_task do |*args|
$matrix_inverse_slf = self
end
pre_cond do |*args|
slf_row_size = self.instance_variable_get(:@rows).size
slf_col_size = self.instance_variable_get(:@column_size)
slf_row_size == slf_col_size
end
ret (RDL.flat {|r|
slf = $matrix_inverse_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
# could do something like A * A.inverse = I
# but seems like RDL does not support such a recursive call
true
})
end
spec :minor do
pre_task do |*args|
$matrix_minor_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_minor_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
slf_elements = slf.column_vectors.map {|x| x.instance_variable_get(:@elements)}
slf_elements = slf_elements.flatten(1)
ret_elements = r.column_vectors.map {|x| x.instance_variable_get(:@elements)}
ret_elements = ret_elements.flatten(1)
c = ret_elements.all? {|x| slf_elements.include?(x)}
(ret_row_size <= slf_row_size) & (ret_col_size <= slf_col_size) & c
})
end
spec :rank do
pre_task do |*args|
$matrix_rank_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_rank_slf
slf_row_size = slf.instance_variable_get(:@rows).size
(r >= 0) & (r <= slf_row_size)
})
end
spec :real do
pre_task do |*args|
$matrix_real_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_real_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
r_row_size = r.instance_variable_get(:@rows).size
r_col_size = r.instance_variable_get(:@column_size)
(r_row_size == slf_row_size) & (r_col_size == slf_col_size)
})
end
spec :real? do
pre_task do |*args|
$matrix_real_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_real_q_slf
all_are_real = slf.all? {|x| x.real?}
(r == true & all_are_real) | (r == false & (not all_are_real))
})
end
spec :row do
pre_task do |arg, &blk|
$matrix_row_arg = arg
$matrix_row_blk = blk
$matrix_row_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_row_arg
blk = $matrix_row_blk
slf = $matrix_row_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
c = true
for i in 0..slf_col_size-1
c = c & (r[i] == slf[arg, i]) if r != nil
end
arg_out_of_range = arg >= slf_row_size || arg < -slf_row_size
((not blk) & ((r == nil) & arg_out_of_range) | c) | blk
})
end
spec :regular? do
pre_task do |*args|
$matrix_regular_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_regular_q_slf
((r == true) & (not (slf.singular?))) | ((r == false) & (slf.singular?))
})
end
spec :row_size do
ret (RDL.flat {|r|
r >= 0
})
end
spec :row_vectors do
pre_task do |*args|
$matrix_row_vectors_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_row_vectors_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
c = true
for i in 0..r.size-1
c = c & (slf.row(i) == r[i])
end
(r.size == slf_row_size) & c
})
end
spec :singular? do
pre_task do |*args|
$matrix_singular_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_singular_q_slf
((r == true) & (slf.determinant == 0)) | ((r == false) & (slf.determinant != 0))
})
end
spec :square? do
pre_task do |*args|
$matrix_square_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_square_q_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
((r == true) & (slf_row_size == slf_col_size)) | ((r == false) & (slf_row_size != slf_col_size))
})
end
spec :to_a do
pre_task do |*args|
$matrix_trace_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_trace_slf
slf_rows = slf.instance_variable_get(:@rows)
slf_row_size = slf_rows.size
slf_col_size = slf.instance_variable_get(:@column_size)
r == slf_rows
})
end
spec :to_s do
# depends on individual element's to_s
end
spec :trace do
pre_task do |*args|
$matrix_trace_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_trace_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
r2 = 0
for i in 0..slf_row_size-1
for j in 0..slf_col_size-1
r2 += slf[i,j] if i == j
end
end
r == r2
})
end
spec :transpose do
pre_task do |*args|
$matrix_transpose_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_transpose_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in 0..slf_row_size-1
for j in 0..slf_col_size-1
c = c & (slf[i,j] == r[j, i])
end
end
(slf_row_size == ret_col_size) & (slf_col_size == ret_row_size) & c
})
end
end
# Test cases
Matrix.identity(2)
#Matrix.identity(-1)
Matrix[ [25, 93], [-1, 66] ]
Matrix.build(2, 4) {|row, col| col - row }
Matrix.build(3) { rand }
Matrix.column_vector([4,5,6])
Matrix.columns([[25, 93], [-1, 66]])
Matrix.diagonal(9, 5, -3)
Matrix.empty()
Matrix.empty(0)
Matrix.empty(1)
Matrix.empty(2, 0)
Matrix.empty(0, 3)
Matrix.row_vector([4,5,6])
Matrix.rows([[25, 93], [-1, 66]])
Matrix.scalar(2, 5)
Matrix.zero(2)
Matrix[[2,4], [6,8]] * Matrix.identity(2)
Matrix[[7,6], [3,9]] ** 2
Matrix.scalar(2,5) + Matrix[[1,0], [-4,7]]
Matrix[[1,5], [4,2]] - Matrix[[9,3], [-4,1]]
Matrix[[7,6], [3,9]] / Matrix[[2,9], [3,1]]
Matrix[[7,6], [3,9]] == Matrix[[2,9], [3,1]]
Matrix[[7,6], [3,9]] == Matrix[[7,6], [3,9]]
Matrix[[7,6], [3,9]] == Matrix[[7,6], [2,4], [3,9]]
Matrix[[7,6], [3,9]] == Matrix[[7,6], [2,3], [3,9]]
Matrix[[7,6], [3,9]][0,1]
Matrix[[7,6], [3,9]][0,8]
Matrix[[7,6], [3,9]][1,8]
Matrix[[7,6], [3,9]][8,8]
Matrix[ [1,2], [3,4] ].collect { |e| e**2 }
Matrix[[1,2], [3,4], [5, 6]].column(0)
Matrix[[1,2], [3,4], [5, 6]].column(1)
Matrix[[1,2], [3,4], [5, 6]].column(2)
Matrix[[1,2], [3,4], [5, 6]].column(3)
Matrix[[1,2], [3,4], [5, 6]].column(-2)
Matrix[[1,2], [3,4], [5, 6]].column(-1)
Matrix[[1,2], [3,4], [5, 6]].column(-100)
Matrix[[1,2], [3,4], [5, 6]].column(200)
Matrix[[3,4], [5, 6]].column_vectors
Matrix[[Complex(1,2), Complex(0,1), 0], [1, 2, 3]].conjugate
Matrix[[7,6], [3,9]].determinant
Matrix[[7,6], [3,9]].elements_to_f
Matrix[[7,6], [3,9]].elements_to_i
Matrix[[7,6], [3,9]].elements_to_r
Matrix[[7,6], [3,9]].empty?
Matrix[].elements_to_r
Matrix[[7,6], [3,9]].hash
Matrix[].hash
Matrix[[Complex(1,2), Complex(0,1), 0], [1, 2, 3]].imaginary
Matrix.diagonal(9, 5, -3).minor(0..1, 0..2)
Matrix[[7,6], [3,9]].rank
Matrix[[Complex(1,2), Complex(0,1), 0], [1, 2, 3]].real
Matrix[[1,2], [3,4], [5, 6]].row(0)
Matrix[[1,2], [3,4], [5, 6]].row(1)
Matrix[[1,2], [3,4], [5, 6]].row(2)
Matrix[[1,2], [3,4], [5, 6]].row(3)
Matrix[[1,2], [3,4], [5, 6]].row(-2)
Matrix[[1,2], [3,4], [5, 6]].row(-1)
Matrix[[1,2], [3,4], [5, 6]].row(-100)
Matrix[[1,2], [3,4], [5, 6]].row(200)
Matrix[[7,6], [3,9]].regular?
Matrix[[1,0], [0,1]].regular?
Matrix[[1,2], [3,4], [5, 6]].row_size
Matrix[[3,4], [5, 6]].row_vectors
Matrix[[7,6], [3,9]].singular?
Matrix[[1,0], [0,1]].singular?
Matrix[[7,6], [3,9]].square?
Matrix[[7,6], [3,9], [1,2]].square?
Matrix[[7,6], [3,9]].to_a
Matrix[[7,6], [3,9]].trace
Matrix[[1,2], [3,4], [5,6]].transpose
a few improvements
require 'matrix'
require 'rdl'
$error_threshold = 0.000001
class Matrix
class << self
extend RDL
spec :identity do
pre_task do |arg|
$matrix_identity_arg = arg
end
arg 0, (RDL.flat {|a| a > 0})
ret (RDL.flat {|r|
num_rows = r.instance_variable_get(:@rows).size
num_cols = r.instance_variable_get(:@column_size)
arg = $matrix_identity_arg
c = true
for i in Range.new(0, num_rows-1)
for j in Range.new(0, num_cols-1)
if i == j
c = (c and (r[i,j] == 1))
else
c = (c and (r[i,j] == 0))
end
end
end
(num_cols == arg) and (num_rows == arg) and c
})
end
spec :[] do
pre_task do |*args|
$matrix_create_args = args
end
pre_cond do |*args|
arg_sizes = args.map {|x| x.size}
(arg_sizes.uniq.size == 1) | (arg_sizes.uniq.size == 0)
end
ret (RDL.flat {|r|
args = $matrix_create_args
ret_rows = r.instance_variable_get(:@rows)
args == ret_rows
})
end
spec :build do
pre_task do |*args|
$matrix_build_args = args
end
pre_cond do |*args|
arg_sizes = args.map {|x| x.size}
(arg_sizes.uniq.size == 1) | (arg_sizes.uniq.size == 0)
end
arg 0, RDL.flat {|a| a >= 0}
rest 1, RDL.flat {|a| a >= 0}
ret (RDL.flat {|r|
row_size_arg = $matrix_build_args[0]
if $matrix_build_args.size == 1
# 2nd arg is a default arg == 1st_arg
column_size_arg = row_size_arg
else
column_size_arg = $matrix_build_args[1]
end
ret_row_count = r.instance_variable_get(:@rows).size
ret_col_count = r.instance_variable_get(:@column_size)
(row_size_arg == ret_row_count) and (column_size_arg == ret_col_count)
})
end
spec :column_vector do
pre_task do |arg|
$matrix_column_vector_arg = arg
end
ret (RDL.flat {|r|
arg = $matrix_column_vector_arg
ret_rows = r.instance_variable_get(:@rows)
ret_row_count = ret_rows.size
ret_col_count = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, ret_row_count-1)
c = (c and (ret_rows[i] == [arg[i]]))
end
(ret_row_count == arg.size) and (ret_col_count == 1) and c
})
end
spec :columns do
pre_task do |arg|
$matrix_columns_arg = arg
end
ret (RDL.flat {|r|
arg = $matrix_columns_arg
transpose_ret_rows = r.transpose.instance_variable_get(:@rows)
transpose_ret_rows == arg
})
end
spec :diagonal do
pre_task do |*args|
$matrix_diagonal_args = args
end
ret (RDL.flat {|r|
args = $matrix_diagonal_args
ret_row_count = r.instance_variable_get(:@rows).size
ret_col_count = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, ret_row_count-1)
for j in Range.new(0, ret_col_count-1)
if i == j
c = (c and (r[i,j] == args[i]))
else
c = (c and (r[i,j] == 0))
end
end
end
(ret_row_count == ret_col_count) and (ret_row_count == args.size) and c
})
end
spec :empty do
pre_task do |*args|
$matrix_empty_args = args
end
pre_cond do |*args|
if args.size == 0
row_size = 0
col_size = 0
elsif args.size == 1
row_size = args[0]
col_size = 0
else
row_size = args[0]
col_size = args[1]
end
(row_size >= 0) and (col_size >= 0) and ((row_size == 0) or (col_size == 0))
end
ret (RDL.flat {|r|
args = $matrix_empty_args
if args.size == 0
row_size = 0
col_size = 0
elsif args.size == 1
row_size = args[0]
col_size = 0
else
row_size = args[0]
col_size = args[1]
end
ret_rows = r.instance_variable_get(:@rows)
ret_row_count = ret_rows.size
ret_col_count = r.instance_variable_get(:@column_size)
c = ret_rows.all? {|x| x == []}
(ret_row_count == row_size) and (ret_col_count == col_size) and c
})
end
spec :row_vector do
pre_task do |arg|
$matrix_row_vector_arg = arg
end
ret (RDL.flat {|r|
arg = $matrix_row_vector_arg
ret_rows = r.instance_variable_get(:@rows)
ret_row_count = ret_rows.size
ret_col_count = r.instance_variable_get(:@column_size)
(ret_rows == [arg]) and (ret_row_count == 1) and (ret_col_count == arg.size)
})
end
spec :rows do
pre_task do |*args|
$matrix_rows_args = args
end
pre_cond do |*args|
arg_sizes = args[0].map {|x| x.size}
(arg_sizes.uniq.size == 1) or (arg_sizes.uniq.size == 0)
end
ret (RDL.flat {|r|
arg_row = $matrix_rows_args[0]
# There is a second optional arg.
# the optional argument copy is false, use the given arrays as the
# internal structure of the matrix without copying
# the post condition involving dup is not listed here
ret_rows = r.instance_variable_get(:@rows)
ret_rows == arg_row
})
end
spec :scalar do
pre_task do |*args|
$matrix_scalar_args = args
end
arg 0, RDL.flat {|a| a > 0}
ret (RDL.flat {|r|
arg_n = $matrix_scalar_args[0]
arg_value = $matrix_scalar_args[1]
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, ret_row_size-1)
for j in Range.new(0, ret_col_size-1)
if i == j
c = (c and (r[i,j] == arg_value))
else
c = (c and (r[i,j] == 0))
end
end
end
(ret_row_size == ret_col_size) and (ret_col_size == arg_n) and c
})
end
spec :zero do
pre_task do |arg|
$matrix_zero_arg = arg
end
arg 0, RDL.flat {|a| a > 0}
ret (RDL.flat {|r|
arg = $matrix_zero_arg
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, ret_row_size-1)
for j in Range.new(0, ret_col_size-1)
c = (c and (r[i,j] == 0))
end
end
(ret_row_size == ret_col_size) and (ret_col_size == arg) and c
})
end
end
end
class Matrix
extend RDL
spec :* do
pre_task do |arg|
$matrix_mult_arg = arg
$matrix_mult_self = self
end
# an RTC pre_condition would require all elements to be numbers
ret (RDL.flat {|r|
arg = $matrix_mult_arg
slf = $matrix_mult_self
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
slf_row_size = slf.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
(ret_row_size == slf_row_size) and (ret_col_size == arg_col_size)
})
end
spec :** do
pre_task do |arg|
$matrix_exp_arg = arg
$matrix_exp_self = self
end
ret (RDL.flat {|r|
slf = $matrix_exp_self
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
slf_row_size = slf.instance_variable_get(:@rows).size
(ret_row_size == ret_col_size) and (slf_row_size == ret_col_size)
})
end
spec :- do
pre_task do |arg|
$matrix_minus_arg = arg
$matrix_minus_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_minus_arg
slf = $matrix_minus_slf
arg_row_size = arg.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, ret_row_size-1)
for j in Range.new(0, ret_col_size-1)
c = (c and (r[i,j] == slf[i,j] - arg[i,j]))
end
end
(ret_row_size == arg_row_size) and (ret_col_size == arg_col_size) and c
})
end
spec :+ do
pre_task do |arg|
$matrix_add_arg = arg
$matrix_add_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_add_arg
slf = $matrix_add_slf
arg_row_size = arg.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, ret_row_size-1)
for j in Range.new(0, ret_col_size-1)
c = (c and (r[i,j] == slf[i,j] + arg[i,j]))
end
end
(ret_row_size == arg_row_size) and (ret_col_size == arg_col_size) and c
})
end
spec :/ do
pre_task do |arg|
$matrix_div_arg = arg
$matrix_div_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_div_arg
slf = $matrix_div_slf
arg_row_size = arg.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
ret_approx = slf * arg.inverse
diff = r - ret_approx
diff_row_size = diff.instance_variable_get(:@rows).size
diff_col_size = diff.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, diff_row_size-1)
for j in Range.new(0, diff_col_size-1)
c = (c and (diff[i,j].abs < $error_threshold))
end
end
(ret_row_size == arg_row_size) and (ret_col_size == arg_col_size) and c
})
end
spec :== do
pre_task do |arg|
$matrix_eq_arg = arg
$matrix_eq_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_eq_arg
slf = $matrix_eq_slf
arg_row_size = arg.instance_variable_get(:@rows).size
arg_col_size = arg.instance_variable_get(:@column_size)
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
matrices_eq = true
if not (arg_row_size == slf_row_size and arg_col_size == slf_col_size)
matrices_eq = false
else
for i in Range.new(0, arg_row_size-1)
for j in Range.new(0, arg_col_size-1)
matrices_eq = (matrices_eq and (arg[i,j] == slf[i,j]))
end
end
end
(r and matrices_eq) or ((not r) and (not matrices_eq))
})
end
spec :[] do
pre_task do |*args|
$matrix_index_args = args
$matrix_index_slf = self
end
ret (RDL.flat {|r|
args = $matrix_index_args
slf = $matrix_index_slf
arg_i = args[0]
arg_j = args[1]
slf_rows = slf.instance_variable_get(:@rows)
slf_row_size = slf_rows.size
slf_col_size = slf.instance_variable_get(:@column_size)
args_out_of_range = (arg_i < 0) | (arg_i >= slf_row_size) | (arg_j < 0) | (arg_j >= slf_col_size)
slf_includes_arg = slf_rows.any? {|x| x.any? {|x2| x2 == r} }
((r == nil) and args_out_of_range) or slf_includes_arg
})
end
spec :clone do
# skipped
end
spec :coerce do
# skipped
end
spec :collect do
pre_task do |*args|
$matrix_collect_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_collect_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
(slf_row_size == ret_row_size) and (slf_col_size == ret_col_size)
})
end
spec :column do
pre_task do |arg, &blk|
$matrix_column_arg = arg
$matrix_column_blk = blk
$matrix_column_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_column_arg
blk = $matrix_column_blk
slf = $matrix_column_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, slf_row_size-1)
c = (c and (r[i] == slf[i, arg])) if r != nil
end
arg_out_of_range = arg >= slf_col_size || arg < -slf_col_size
((not blk) and ((r == nil) and arg_out_of_range) or c) or blk
})
end
spec :column_vectors do
pre_task do |*args|
$matrix_column_vectors_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_column_vectors_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, r.size-1)
c = (c and (slf.column(i) == r[i]))
end
(r.size == slf_col_size) and c
})
end
spec :conjugate do
pre_task do |*args|
$matrix_conjugate_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_conjugate_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
r_row_size = r.instance_variable_get(:@rows).size
r_col_size = r.instance_variable_get(:@column_size)
(r_row_size == slf_row_size) and (r_col_size == slf_col_size)
})
end
spec :determinant do
pre_task do |*args|
$matrix_determinant_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_determinant_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
# could do something like m.determinant == m.transpose.determinant
# but seems like there are some recursive call problems with RDL
true
})
end
spec :each do
# skipped, it can do something with the block argument
end
spec :each_with_index do
# skipped, it can do something with the block argument
end
spec :elements_to_f do
pre_task do |*args|
$matrix_elements_to_f_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_elements_to_f_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, slf_row_size-1)
for j in Range.new(0, slf_col_size-1)
diff = (slf[i,j] - r[i,j]).abs
c = (c and (diff < $error_threshold))
end
end
(slf_row_size == ret_row_size) and (slf_col_size == ret_col_size) and c
})
end
spec :elements_to_i do
pre_task do |*args|
$matrix_elements_to_f_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_elements_to_f_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, slf_row_size-1)
for j in Range.new(0, slf_col_size-1)
diff = (slf[i,j] - r[i,j]).abs
c = (c and (diff < $error_threshold))
end
end
(slf_row_size == ret_row_size) and (slf_col_size == ret_col_size) and c
})
end
spec :elements_to_r do
pre_task do |*args|
$matrix_elements_to_f_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_elements_to_f_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, slf_row_size-1)
for j in Range.new(0, slf_col_size-1)
diff = (slf[i,j] - r[i,j]).abs
c = (c and (diff < $error_threshold))
end
end
(slf_row_size == ret_row_size) and (slf_col_size == ret_col_size) and c
})
end
spec :empty? do
pre_task do |*args|
$matrix_empty_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_empty_q_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
# this post condition is pretty much the method definition
(r and ((slf_row_size == 0) and (slf_col_size == 0))) or ((not r) and (not ((slf_row_size == 0) and (slf_col_size == 0))))
})
end
spec :hash do
# no post cond?
# could do something like a bunch of returns are different from difference matrices
ret (RDL.flat {|r|
true
})
end
spec :imaginary do
pre_task do |*args|
$matrix_imaginary_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_imaginary_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
r_row_size = r.instance_variable_get(:@rows).size
r_col_size = r.instance_variable_get(:@column_size)
(r_row_size == slf_row_size) and (r_col_size == slf_col_size)
})
end
spec :inspect do
# depends on the inspect of each individual element's inspect
end
spec :inverse do
pre_task do |*args|
$matrix_inverse_slf = self
end
pre_cond do |*args|
slf_row_size = self.instance_variable_get(:@rows).size
slf_col_size = self.instance_variable_get(:@column_size)
slf_row_size == slf_col_size
end
ret (RDL.flat {|r|
slf = $matrix_inverse_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
# could do something like A * A.inverse = I
# but seems like RDL does not support such a recursive call
true
})
end
spec :minor do
pre_task do |*args|
$matrix_minor_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_minor_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
slf_elements = slf.column_vectors.map {|x| x.instance_variable_get(:@elements)}
slf_elements = slf_elements.flatten(1)
ret_elements = r.column_vectors.map {|x| x.instance_variable_get(:@elements)}
ret_elements = ret_elements.flatten(1)
c = ret_elements.all? {|x| slf_elements.include?(x)}
(ret_row_size <= slf_row_size) and (ret_col_size <= slf_col_size) and c
})
end
spec :rank do
pre_task do |*args|
$matrix_rank_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_rank_slf
slf_row_size = slf.instance_variable_get(:@rows).size
(r >= 0) and (r <= slf_row_size)
})
end
spec :real do
pre_task do |*args|
$matrix_real_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_real_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
r_row_size = r.instance_variable_get(:@rows).size
r_col_size = r.instance_variable_get(:@column_size)
(r_row_size == slf_row_size) and (r_col_size == slf_col_size)
})
end
spec :real? do
pre_task do |*args|
$matrix_real_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_real_q_slf
all_are_real = slf.all? {|x| x.real?}
(r and all_are_real) or ((not r) and (not all_are_real))
})
end
spec :row do
pre_task do |arg, &blk|
$matrix_row_arg = arg
$matrix_row_blk = blk
$matrix_row_slf = self
end
ret (RDL.flat {|r|
arg = $matrix_row_arg
blk = $matrix_row_blk
slf = $matrix_row_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, slf_col_size-1)
c = (c and (r[i] == slf[arg, i])) if r != nil
end
arg_out_of_range = arg >= slf_row_size || arg < -slf_row_size
((not blk) and ((r == nil) and arg_out_of_range) or c) or blk
})
end
spec :regular? do
pre_task do |*args|
$matrix_regular_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_regular_q_slf
(r and (not (slf.singular?))) or ((not r) and (slf.singular?))
})
end
spec :row_size do
ret (RDL.flat {|r|
r >= 0
})
end
spec :row_vectors do
pre_task do |*args|
$matrix_row_vectors_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_row_vectors_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, r.size-1)
c = (c and (slf.row(i) == r[i]))
end
(r.size == slf_row_size) and c
})
end
spec :singular? do
pre_task do |*args|
$matrix_singular_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_singular_q_slf
(r and (slf.determinant == 0)) or ((not r) and (slf.determinant != 0))
})
end
spec :square? do
pre_task do |*args|
$matrix_square_q_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_square_q_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
(r and (slf_row_size == slf_col_size)) or ((not r) and (slf_row_size != slf_col_size))
})
end
spec :to_a do
pre_task do |*args|
$matrix_trace_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_trace_slf
slf_rows = slf.instance_variable_get(:@rows)
slf_row_size = slf_rows.size
slf_col_size = slf.instance_variable_get(:@column_size)
r == slf_rows
})
end
spec :to_s do
# depends on individual element's to_s
end
spec :trace do
pre_task do |*args|
$matrix_trace_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_trace_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
r2 = 0
for i in Range.new(0, slf_row_size-1)
for j in Range.new(0, slf_col_size-1)
r2 += slf[i,j] if i == j
end
end
r == r2
})
end
spec :transpose do
pre_task do |*args|
$matrix_transpose_slf = self
end
ret (RDL.flat {|r|
slf = $matrix_transpose_slf
slf_row_size = slf.instance_variable_get(:@rows).size
slf_col_size = slf.instance_variable_get(:@column_size)
ret_row_size = r.instance_variable_get(:@rows).size
ret_col_size = r.instance_variable_get(:@column_size)
c = true
for i in Range.new(0, slf_row_size-1)
for j in Range.new(0, slf_col_size-1)
c = (c and (slf[i,j] == r[j, i]))
end
end
(slf_row_size == ret_col_size) and (slf_col_size == ret_row_size) and c
})
end
end
# Test cases
Matrix.identity(2)
#Matrix.identity(-1)
Matrix[ [25, 93], [-1, 66] ]
Matrix.build(2, 4) {|row, col| col - row }
Matrix.build(3) { rand }
Matrix.column_vector([4,5,6])
Matrix.columns([[25, 93], [-1, 66]])
Matrix.diagonal(9, 5, -3)
Matrix.empty()
Matrix.empty(0)
Matrix.empty(1)
Matrix.empty(2, 0)
Matrix.empty(0, 3)
Matrix.row_vector([4,5,6])
Matrix.rows([[25, 93], [-1, 66]])
Matrix.scalar(2, 5)
Matrix.zero(2)
Matrix[[2,4], [6,8]] * Matrix.identity(2)
Matrix[[7,6], [3,9]] ** 2
Matrix.scalar(2,5) + Matrix[[1,0], [-4,7]]
Matrix[[1,5], [4,2]] - Matrix[[9,3], [-4,1]]
Matrix[[7,6], [3,9]] / Matrix[[2,9], [3,1]]
Matrix[[7,6], [3,9]] == Matrix[[2,9], [3,1]]
Matrix[[7,6], [3,9]] == Matrix[[7,6], [3,9]]
Matrix[[7,6], [3,9]] == Matrix[[7,6], [2,4], [3,9]]
Matrix[[7,6], [3,9]] == Matrix[[7,6], [2,3], [3,9]]
Matrix[[7,6], [3,9]][0,1]
Matrix[[7,6], [3,9]][0,8]
Matrix[[7,6], [3,9]][1,8]
Matrix[[7,6], [3,9]][8,8]
Matrix[ [1,2], [3,4] ].collect { |e| e**2 }
Matrix[[1,2], [3,4], [5, 6]].column(0)
Matrix[[1,2], [3,4], [5, 6]].column(1)
Matrix[[1,2], [3,4], [5, 6]].column(2)
Matrix[[1,2], [3,4], [5, 6]].column(3)
Matrix[[1,2], [3,4], [5, 6]].column(-2)
Matrix[[1,2], [3,4], [5, 6]].column(-1)
Matrix[[1,2], [3,4], [5, 6]].column(-100)
Matrix[[1,2], [3,4], [5, 6]].column(200)
Matrix[[3,4], [5, 6]].column_vectors
Matrix[[Complex(1,2), Complex(0,1), 0], [1, 2, 3]].conjugate
Matrix[[7,6], [3,9]].determinant
Matrix[[7,6], [3,9]].elements_to_f
Matrix[[7,6], [3,9]].elements_to_i
Matrix[[7,6], [3,9]].elements_to_r
Matrix[[7,6], [3,9]].empty?
Matrix[].elements_to_r
Matrix[[7,6], [3,9]].hash
Matrix[].hash
Matrix[[Complex(1,2), Complex(0,1), 0], [1, 2, 3]].imaginary
Matrix.diagonal(9, 5, -3).minor(0..1, 0..2)
Matrix[[7,6], [3,9]].rank
Matrix[[Complex(1,2), Complex(0,1), 0], [1, 2, 3]].real
Matrix[[1,2], [3,4], [5, 6]].row(0)
Matrix[[1,2], [3,4], [5, 6]].row(1)
Matrix[[1,2], [3,4], [5, 6]].row(2)
Matrix[[1,2], [3,4], [5, 6]].row(3)
Matrix[[1,2], [3,4], [5, 6]].row(-2)
Matrix[[1,2], [3,4], [5, 6]].row(-1)
Matrix[[1,2], [3,4], [5, 6]].row(-100)
Matrix[[1,2], [3,4], [5, 6]].row(200)
Matrix[[7,6], [3,9]].regular?
Matrix[[1,0], [0,1]].regular?
Matrix[[1,2], [3,4], [5, 6]].row_size
Matrix[[3,4], [5, 6]].row_vectors
Matrix[[7,6], [3,9]].singular?
Matrix[[1,0], [0,1]].singular?
Matrix[[7,6], [3,9]].square?
Matrix[[7,6], [3,9], [1,2]].square?
Matrix[[7,6], [3,9]].to_a
Matrix[[7,6], [3,9]].trace
Matrix[[1,2], [3,4], [5,6]].transpose
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'service_mock/version'
Gem::Specification.new do |spec|
spec.name = "service_mock"
spec.version = ServiceMock::VERSION
spec.authors = ["Jeffrey S. Morgan"]
spec.email = ["jeff.morgan@leandog.com"]
spec.summary = %q{Simple wrapper over WireMock}
spec.description = %q{Simple wrapper over WireMock}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "childprocess", "~>0.5"
spec.add_development_dependency "bundler", "~> 1.12"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
added website to gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'service_mock/version'
Gem::Specification.new do |spec|
spec.name = "service_mock"
spec.version = ServiceMock::VERSION
spec.authors = ["Jeffrey S. Morgan"]
spec.email = ["jeff.morgan@leandog.com"]
spec.summary = %q{Simple wrapper over WireMock}
spec.description = %q{Simple wrapper over WireMock}
spec.homepage = "https://github.com/cheezy/service_mock"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "childprocess", "~>0.5"
spec.add_development_dependency "bundler", "~> 1.12"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
|
Gem::Specification.new do |gem|
gem.name = 'fluent-plugin-google-cloud'
gem.description = <<-eos
Fluentd plugins for the Stackdriver Logging API, which will make logs
viewable in the Stackdriver Logs Viewer and can optionally store them
in Google Cloud Storage and/or BigQuery.
This is an official Google Ruby gem.
eos
gem.summary = 'fluentd plugins for the Stackdriver Logging API'
gem.homepage =
'https://github.com/GoogleCloudPlatform/fluent-plugin-google-cloud'
gem.license = 'Apache-2.0'
gem.version = '0.7.6'
gem.authors = ['Stackdriver Agents Team']
gem.email = ['stackdriver-agents@google.com']
gem.required_ruby_version = Gem::Requirement.new('>= 2.2')
gem.files = Dir['**/*'].keep_if { |file| File.file?(file) }
gem.test_files = gem.files.grep(/^(test)/)
gem.require_paths = ['lib']
gem.add_runtime_dependency 'fluentd', '1.2.5'
gem.add_runtime_dependency 'googleapis-common-protos', '1.3.7'
gem.add_runtime_dependency 'googleauth', '0.6.6'
gem.add_runtime_dependency 'google-api-client', '0.28.4'
gem.add_runtime_dependency 'google-cloud-logging', '1.6.0'
gem.add_runtime_dependency 'google-protobuf', '3.6.1'
gem.add_runtime_dependency 'grpc', '1.14.2'
gem.add_runtime_dependency 'json', '2.1.0'
gem.add_development_dependency 'mocha', '~> 1.1'
gem.add_development_dependency 'prometheus-client', '~> 0.7.1'
gem.add_development_dependency 'rake', '~> 10.3'
gem.add_development_dependency 'rubocop', '~> 0.39.0'
gem.add_development_dependency 'test-unit', '~> 3.0'
gem.add_development_dependency 'webmock', '~> 2.3.1'
end
Automated: Bump gem version for future releases. (#301)
Gem::Specification.new do |gem|
gem.name = 'fluent-plugin-google-cloud'
gem.description = <<-eos
Fluentd plugins for the Stackdriver Logging API, which will make logs
viewable in the Stackdriver Logs Viewer and can optionally store them
in Google Cloud Storage and/or BigQuery.
This is an official Google Ruby gem.
eos
gem.summary = 'fluentd plugins for the Stackdriver Logging API'
gem.homepage =
'https://github.com/GoogleCloudPlatform/fluent-plugin-google-cloud'
gem.license = 'Apache-2.0'
gem.version = '0.7.7'
gem.authors = ['Stackdriver Agents Team']
gem.email = ['stackdriver-agents@google.com']
gem.required_ruby_version = Gem::Requirement.new('>= 2.2')
gem.files = Dir['**/*'].keep_if { |file| File.file?(file) }
gem.test_files = gem.files.grep(/^(test)/)
gem.require_paths = ['lib']
gem.add_runtime_dependency 'fluentd', '1.2.5'
gem.add_runtime_dependency 'googleapis-common-protos', '1.3.7'
gem.add_runtime_dependency 'googleauth', '0.6.6'
gem.add_runtime_dependency 'google-api-client', '0.28.4'
gem.add_runtime_dependency 'google-cloud-logging', '1.6.0'
gem.add_runtime_dependency 'google-protobuf', '3.6.1'
gem.add_runtime_dependency 'grpc', '1.14.2'
gem.add_runtime_dependency 'json', '2.1.0'
gem.add_development_dependency 'mocha', '~> 1.1'
gem.add_development_dependency 'prometheus-client', '~> 0.7.1'
gem.add_development_dependency 'rake', '~> 10.3'
gem.add_development_dependency 'rubocop', '~> 0.39.0'
gem.add_development_dependency 'test-unit', '~> 3.0'
gem.add_development_dependency 'webmock', '~> 2.3.1'
end
|
Gem::Specification.new do |gem|
gem.name = 'fluent-plugin-google-cloud'
gem.description = <<-eos
Fluentd plugins for the Stackdriver Logging API, which will make logs
viewable in the Stackdriver Logs Viewer and can optionally store them
in Google Cloud Storage and/or BigQuery.
This is an official Google Ruby gem.
eos
gem.summary = 'fluentd plugins for the Stackdriver Logging API'
gem.homepage =
'https://github.com/GoogleCloudPlatform/fluent-plugin-google-cloud'
gem.license = 'Apache-2.0'
gem.version = '0.7.19'
gem.authors = ['Stackdriver Agents Team']
gem.email = ['stackdriver-agents@google.com']
gem.required_ruby_version = Gem::Requirement.new('>= 2.2')
gem.files = Dir['**/*'].keep_if { |file| File.file?(file) }
gem.test_files = gem.files.grep(/^(test)/)
gem.require_paths = ['lib']
gem.add_runtime_dependency 'fluentd', '1.6.3'
gem.add_runtime_dependency 'googleapis-common-protos', '1.3.9'
gem.add_runtime_dependency 'googleauth', '0.9.0'
gem.add_runtime_dependency 'google-api-client', '0.30.8'
gem.add_runtime_dependency 'google-cloud-logging', '1.6.6'
gem.add_runtime_dependency 'google-protobuf', '3.9.0'
gem.add_runtime_dependency 'grpc', '1.22.0'
gem.add_runtime_dependency 'json', '2.2.0'
gem.add_development_dependency 'mocha', '1.9.0'
gem.add_development_dependency 'prometheus-client', '0.9.0'
# TODO(qingling128): Upgrade rake to 11.0+ after the following issues are
# fixed because rake (11.0+) requires ALL variables to be explicitly
# initialized.
# https://github.com/googleapis/google-auth-library-ruby/issues/227
# https://github.com/farcaller/rly/issues/2
gem.add_development_dependency 'rake', '10.5.0'
gem.add_development_dependency 'rubocop', '0.39.0'
gem.add_development_dependency 'test-unit', '3.3.3'
gem.add_development_dependency 'webmock', '3.6.2'
end
Automated: Bump gem version to 0.7.20 for future releases. (#341)
Gem::Specification.new do |gem|
gem.name = 'fluent-plugin-google-cloud'
gem.description = <<-eos
Fluentd plugins for the Stackdriver Logging API, which will make logs
viewable in the Stackdriver Logs Viewer and can optionally store them
in Google Cloud Storage and/or BigQuery.
This is an official Google Ruby gem.
eos
gem.summary = 'fluentd plugins for the Stackdriver Logging API'
gem.homepage =
'https://github.com/GoogleCloudPlatform/fluent-plugin-google-cloud'
gem.license = 'Apache-2.0'
gem.version = '0.7.20'
gem.authors = ['Stackdriver Agents Team']
gem.email = ['stackdriver-agents@google.com']
gem.required_ruby_version = Gem::Requirement.new('>= 2.2')
gem.files = Dir['**/*'].keep_if { |file| File.file?(file) }
gem.test_files = gem.files.grep(/^(test)/)
gem.require_paths = ['lib']
gem.add_runtime_dependency 'fluentd', '1.6.3'
gem.add_runtime_dependency 'googleapis-common-protos', '1.3.9'
gem.add_runtime_dependency 'googleauth', '0.9.0'
gem.add_runtime_dependency 'google-api-client', '0.30.8'
gem.add_runtime_dependency 'google-cloud-logging', '1.6.6'
gem.add_runtime_dependency 'google-protobuf', '3.9.0'
gem.add_runtime_dependency 'grpc', '1.22.0'
gem.add_runtime_dependency 'json', '2.2.0'
gem.add_development_dependency 'mocha', '1.9.0'
gem.add_development_dependency 'prometheus-client', '0.9.0'
# TODO(qingling128): Upgrade rake to 11.0+ after the following issues are
# fixed because rake (11.0+) requires ALL variables to be explicitly
# initialized.
# https://github.com/googleapis/google-auth-library-ruby/issues/227
# https://github.com/farcaller/rly/issues/2
gem.add_development_dependency 'rake', '10.5.0'
gem.add_development_dependency 'rubocop', '0.39.0'
gem.add_development_dependency 'test-unit', '3.3.3'
gem.add_development_dependency 'webmock', '3.6.2'
end
|
require_relative 'app_services_test_base'
class HttpTest < AppServicesTestBase
def self.hex_prefix
'F02B3E'
end
# - - - - - - - - - - - - - - - - - - - - -
test '620',
'test using runner-stateful' do
json = http.post('runner-stateful', '4557', 'kata_new', {
image_name:'',
kata_id:''
})
assert_equal 'image_name:malformed', json['exception']
end
# - - - - - - - - - - - - - - - - - - - - -
test '621',
'test using runner-stateless' do
json = http.post('runner-stateless', '4597', 'kata_new', {
image_name:'',
kata_id:''
})
assert_equal 'image_name:malformed', json['exception']
end
end
update runner-service tests
require_relative 'app_services_test_base'
class HttpTest < AppServicesTestBase
def self.hex_prefix
'F02B3E'
end
# - - - - - - - - - - - - - - - - - - - - -
test '620',
'test using runner-stateful' do
json = http.post('runner-stateful', '4557', 'kata_new', {
image_name:'',
kata_id:''
})
assert_equal 'image_name:malformed', json['exception']
end
# - - - - - - - - - - - - - - - - - - - - -
test '621',
'test using runner-stateless' do
json = http.post('runner-stateless', '4597', 'kata_new', {
image_name:'',
kata_id:''
})
ex = json['exception']
assert_equal 'ClientError', ex['class']
assert_equal 'image_name:malformed', ex['message']
assert_equal 'Array', ex['backtrace'].class.name
end
end
|
proc { |p| $:.unshift(p) unless $:.any? { |lp| File.expand_path(lp) == p } }.call(File.expand_path('.', File.dirname(__FILE__)))
require 'helper'
describe(Zy::Server) do
before do
server = TCPServer.new('127.0.0.1', 0)
@server_port = server_port = server.addr[1]
server.close
STDERR.puts "firing server"
Thread.abort_on_exception = true
@server_thread = Thread.new do
Zy::Server.new(
:app => proc { |request| {} },
:bind => "tcp://*:#{server_port}",
).start
end
STDERR.puts "fired server"
end
after do
@server_thread.kill
end
let(:client_socket) do
Zy.zmq_context.socket(ZMQ::REQ).tap do |client_socket|
client_socket.connect("tcp://localhost:#{@server_port}")
end
end
def request_s(request_s)
send_rc = client_socket.send_string(request_s)
assert(send_rc >= 0)
reply_s = ''
recv_rc = client_socket.recv_string(reply_s)
assert(recv_rc >= 0)
reply_s
end
it 'requests and replies' do
reply_s = request_s '{}'
reply = JSON.parse(reply_s)
end
it 'rejects non-json' do
reply_s = request_s('hello!')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'request', 'not_json']}, reply)
end
it 'rejects non-object in json' do
reply_s = request_s('["a request"]')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'request', 'not_object']}, reply)
end
end
tests with protocol frame
proc { |p| $:.unshift(p) unless $:.any? { |lp| File.expand_path(lp) == p } }.call(File.expand_path('.', File.dirname(__FILE__)))
require 'helper'
describe(Zy::Server) do
before do
server = TCPServer.new('127.0.0.1', 0)
@server_port = server_port = server.addr[1]
server.close
STDERR.puts "firing server"
Thread.abort_on_exception = true
@server_thread = Thread.new do
Zy::Server.new(
:app => proc { |request| {} },
:bind => "tcp://*:#{server_port}",
).start
end
STDERR.puts "fired server"
end
after do
@server_thread.kill
end
let(:client_socket) do
Zy.zmq_context.socket(ZMQ::REQ).tap do |client_socket|
client_socket.connect("tcp://localhost:#{@server_port}")
end
end
def request_s(*request_strings)
request_strings.each_with_index do |request_s, i|
flags = i < request_strings.size - 1 ? ZMQ::SNDMORE : 0
send_rc = client_socket.send_string(request_s, flags)
assert(send_rc >= 0)
end
reply_s = ''
recv_rc = client_socket.recv_string(reply_s)
assert(recv_rc >= 0)
reply_s
end
it 'requests and replies' do
reply_s = request_s('zy 0.0 json', '{}')
reply = JSON.parse(reply_s)
end
it 'rejects non-json' do
reply_s = request_s('zy 0.0 json', 'hello!')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'request', 'not_json']}, reply)
end
it 'rejects non-object in json' do
reply_s = request_s('zy 0.0 json', '["a request"]')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'request', 'not_object']}, reply)
end
it 'rejects missing request frame' do
reply_s = request_s('zy 0.0 json')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'request', 'request_not_specified']}, reply)
end
it 'rejects too many frames' do
reply_s = request_s('zy 0.0 json', '{}', '{}')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'request', 'too_many_frames']}, reply)
end
it 'rejects missing format' do
reply_s = request_s('zy 0.0', '{}')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'protocol', 'format_not_specified']}, reply)
end
it 'rejects missing version' do
reply_s = request_s('zy', '{}')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'protocol', 'version_not_specified']}, reply)
end
it 'rejects missing protocol' do
reply_s = request_s('', '{}')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'protocol', 'protocol_name_not_specified']}, reply)
end
it 'rejects unrecognized format' do
reply_s = request_s('zy 0.0 xml', '{}')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'protocol', 'format_not_supported']}, reply)
end
it 'rejects unsupported version' do
reply_s = request_s('zy 9.0 json', '{}')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'protocol', 'version_not_supported']}, reply)
end
it 'rejects wrong protocol' do
reply_s = request_s('http 0.0 json', '{}')
reply = JSON.parse(reply_s)
assert_equal({'status' => ['error', 'protocol', 'protocol_not_supported']}, reply)
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "grape-swagger"
s.version = "0.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Tim Vandecasteele"]
s.date = "2012-07-19"
s.description = "A simple way to add proper auto generated documentation - that can be displayed with swagger - to your inline described grape API"
s.email = "tim.vandecasteele@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.markdown"
]
s.files = [
".document",
".rvmrc",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.markdown",
"Rakefile",
"VERSION",
"grape-swagger.gemspec",
"lib/grape-swagger.rb",
"test/dummy/README.rdoc",
"test/dummy/Rakefile",
"test/dummy/app/assets/javascripts/application.js",
"test/dummy/app/assets/stylesheets/application.css",
"test/dummy/app/controllers/application_controller.rb",
"test/dummy/app/helpers/application_helper.rb",
"test/dummy/app/mailers/.gitkeep",
"test/dummy/app/models/.gitkeep",
"test/dummy/app/views/layouts/application.html.erb",
"test/dummy/config.ru",
"test/dummy/config/application.rb",
"test/dummy/config/boot.rb",
"test/dummy/config/database.yml",
"test/dummy/config/environment.rb",
"test/dummy/config/environments/development.rb",
"test/dummy/config/environments/production.rb",
"test/dummy/config/environments/test.rb",
"test/dummy/config/initializers/backtrace_silencers.rb",
"test/dummy/config/initializers/inflections.rb",
"test/dummy/config/initializers/mime_types.rb",
"test/dummy/config/initializers/secret_token.rb",
"test/dummy/config/initializers/session_store.rb",
"test/dummy/config/initializers/wrap_parameters.rb",
"test/dummy/config/locales/en.yml",
"test/dummy/config/routes.rb",
"test/dummy/lib/assets/.gitkeep",
"test/dummy/log/.gitkeep",
"test/dummy/public/404.html",
"test/dummy/public/422.html",
"test/dummy/public/500.html",
"test/dummy/public/favicon.ico",
"test/dummy/script/rails",
"test/grape-swagger_test.rb",
"test/test_helper.rb"
]
s.homepage = "http://github.com/tim-vandecasteele/grape-swagger"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "Add swagger compliant documentation to your grape API"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<grape>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_development_dependency(%q<jquery-rails>, [">= 0"])
s.add_development_dependency(%q<rails>, ["~> 3.2"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
else
s.add_dependency(%q<grape>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<jquery-rails>, [">= 0"])
s.add_dependency(%q<rails>, ["~> 3.2"])
s.add_dependency(%q<sqlite3>, [">= 0"])
end
else
s.add_dependency(%q<grape>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<jquery-rails>, [">= 0"])
s.add_dependency(%q<rails>, ["~> 3.2"])
s.add_dependency(%q<sqlite3>, [">= 0"])
end
end
Regenerate gemspec for version 0.1.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "grape-swagger"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Tim Vandecasteele"]
s.date = "2012-07-19"
s.description = "A simple way to add proper auto generated documentation - that can be displayed with swagger - to your inline described grape API"
s.email = "tim.vandecasteele@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.markdown"
]
s.files = [
".document",
".rvmrc",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.markdown",
"Rakefile",
"VERSION",
"grape-swagger.gemspec",
"lib/grape-swagger.rb",
"test/dummy/README.rdoc",
"test/dummy/Rakefile",
"test/dummy/app/assets/javascripts/application.js",
"test/dummy/app/assets/stylesheets/application.css",
"test/dummy/app/controllers/application_controller.rb",
"test/dummy/app/helpers/application_helper.rb",
"test/dummy/app/mailers/.gitkeep",
"test/dummy/app/models/.gitkeep",
"test/dummy/app/views/layouts/application.html.erb",
"test/dummy/config.ru",
"test/dummy/config/application.rb",
"test/dummy/config/boot.rb",
"test/dummy/config/database.yml",
"test/dummy/config/environment.rb",
"test/dummy/config/environments/development.rb",
"test/dummy/config/environments/production.rb",
"test/dummy/config/environments/test.rb",
"test/dummy/config/initializers/backtrace_silencers.rb",
"test/dummy/config/initializers/inflections.rb",
"test/dummy/config/initializers/mime_types.rb",
"test/dummy/config/initializers/secret_token.rb",
"test/dummy/config/initializers/session_store.rb",
"test/dummy/config/initializers/wrap_parameters.rb",
"test/dummy/config/locales/en.yml",
"test/dummy/config/routes.rb",
"test/dummy/lib/assets/.gitkeep",
"test/dummy/log/.gitkeep",
"test/dummy/public/404.html",
"test/dummy/public/422.html",
"test/dummy/public/500.html",
"test/dummy/public/favicon.ico",
"test/dummy/script/rails",
"test/grape-swagger_test.rb",
"test/test_helper.rb"
]
s.homepage = "http://github.com/tim-vandecasteele/grape-swagger"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "Add swagger compliant documentation to your grape API"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<grape>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_development_dependency(%q<jquery-rails>, [">= 0"])
s.add_development_dependency(%q<rails>, ["~> 3.2"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
else
s.add_dependency(%q<grape>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<jquery-rails>, [">= 0"])
s.add_dependency(%q<rails>, ["~> 3.2"])
s.add_dependency(%q<sqlite3>, [">= 0"])
end
else
s.add_dependency(%q<grape>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<jquery-rails>, [">= 0"])
s.add_dependency(%q<rails>, ["~> 3.2"])
s.add_dependency(%q<sqlite3>, [">= 0"])
end
end
|
require File.expand_path(File.join(File.dirname(__FILE__), 'test_helper'))
require File.expand_path(File.join(File.dirname(__FILE__), 'schema'))
class Explicit < ActiveRecord::Base
columns_on_demand :file_data, :processing_log, :original_filename
end
class Implicit < ActiveRecord::Base
columns_on_demand
end
class Parent < ActiveRecord::Base
columns_on_demand
has_many :children
end
class Child < ActiveRecord::Base
columns_on_demand
belongs_to :parent
end
class ColumnsOnDemandTest < ActiveSupport::TestCase
def assert_not_loaded(record, attr_name)
assert_equal nil, record.instance_variable_get("@attributes")[attr_name.to_s]
end
fixtures :all
self.use_transactional_fixtures = true
test "it lists explicitly given columns for loading on demand" do
assert_equal ["file_data", "processing_log", "original_filename"], Explicit.columns_to_load_on_demand
end
test "it lists all :binary and :text columns for loading on demand if none are explicitly given" do
assert_equal ["file_data", "processing_log", "results"], Implicit.columns_to_load_on_demand
end
test "it selects all the other columns for loading eagerly" do
assert_match /\W*id\W*, \W*results\W*, \W*processed_at\W*/, Explicit.default_select(false)
assert_match /\W*explicits\W*.results/, Explicit.default_select(true)
assert_match /\W*id\W*, \W*original_filename\W*, \W*processed_at\W*/, Implicit.default_select(false)
assert_match /\W*implicits\W*.original_filename/, Implicit.default_select(true)
end
test "it doesn't load the columns_to_load_on_demand straight away when finding the records" do
record = Implicit.find(:first)
assert_not_equal nil, record
assert_not_loaded record, "file_data"
assert_not_loaded record, "processing_log"
record = Implicit.find(:all).first
assert_not_equal nil, record
assert_not_loaded record, "file_data"
assert_not_loaded record, "processing_log"
end
test "it loads the columns when accessed as an attribute" do
record = Implicit.find(:first)
assert_equal "This is the file data!", record.file_data
assert_equal "Processed 0 entries OK", record.results
assert_equal record.results.object_id, record.results.object_id # should not have to re-find
record = Implicit.find(:all).first
assert_not_equal nil, record.file_data
end
test "it loads the column when accessed using read_attribute" do
record = Implicit.find(:first)
assert_equal "This is the file data!", record.read_attribute(:file_data)
assert_equal "This is the file data!", record.read_attribute("file_data")
assert_equal "Processed 0 entries OK", record.read_attribute("results")
assert_equal record.read_attribute(:results).object_id, record.read_attribute("results").object_id # should not have to re-find
end
test "it loads the column when accessed using read_attribute_before_type_cast" do
record = Implicit.find(:first)
assert_equal "This is the file data!", record.read_attribute_before_type_cast("file_data")
assert_equal "Processed 0 entries OK", record.read_attribute_before_type_cast("results")
# read_attribute_before_type_cast doesn't tolerate symbol arguments as read_attribute does
end
test "it loads the column when generating #attributes" do
attributes = Implicit.find(:first).attributes
assert_equal "This is the file data!", attributes["file_data"]
end
test "it loads the column when generating #to_json" do
json = Implicit.find(:first).to_json
assert_equal "This is the file data!", ActiveSupport::JSON.decode(json)["implicit"]["file_data"]
end
test "it loads the column for #clone" do
record = Implicit.find(:first).clone
assert_equal "This is the file data!", record.file_data
record = Implicit.find(:first).clone.tap(&:save!)
assert_equal "This is the file data!", Implicit.find(record.id).file_data
end
test "it clears the column on reload, and can load it again" do
record = Implicit.find(:first)
old_object_id = record.file_data.object_id
Implicit.update_all(:file_data => "New file data")
record.reload
assert_not_loaded record, "file_data"
assert_equal "New file data", record.file_data
end
test "it doesn't override custom :select finds" do
record = Implicit.find(:first, :select => "id, file_data")
klass = ActiveModel.const_defined?(:MissingAttributeError) ? ActiveModel::MissingAttributeError : ActiveRecord::MissingAttributeError
assert_raise klass do
record.processed_at # explicitly not loaded, overriding default
end
assert_equal "This is the file data!", record.instance_variable_get("@attributes")["file_data"] # already loaded, overriding default
end
test "it raises normal ActiveRecord::RecordNotFound if the record is deleted before the column load" do
record = Implicit.find(:first)
Implicit.delete_all
assert_raise ActiveRecord::RecordNotFound do
record.file_data
end
end
test "it doesn't raise on column access if the record is deleted after the column load" do
record = Implicit.find(:first)
record.file_data
Implicit.delete_all
assert_equal "This is the file data!", record.file_data # check it doesn't raise
end
test "it updates the select strings when columns are changed and the column information is reset" do
ActiveRecord::Schema.define(:version => 1) do
create_table :dummies, :force => true do |t|
t.string :some_field
t.binary :big_field
end
end
class Dummy < ActiveRecord::Base
columns_on_demand
end
assert_match /\W*id\W*, \W*some_field\W*/, Dummy.default_select(false)
ActiveRecord::Schema.define(:version => 2) do
create_table :dummies, :force => true do |t|
t.string :some_field
t.binary :big_field
t.string :another_field
end
end
assert_match /\W*id\W*, \W*some_field\W*/, Dummy.default_select(false)
Dummy.reset_column_information
assert_match /\W*id\W*, \W*some_field\W*, \W*another_field\W*/, Dummy.default_select(false)
end
test "it handles STI models" do
ActiveRecord::Schema.define(:version => 1) do
create_table :stis, :force => true do |t|
t.string :type
t.string :some_field
t.binary :big_field
end
end
class Sti < ActiveRecord::Base
columns_on_demand
end
class StiChild < Sti
columns_on_demand :some_field
end
assert_match /\W*id\W*, \W*type\W*, \W*some_field\W*/, Sti.default_select(false)
assert_match /\W*id\W*, \W*type\W*, \W*big_field\W*/, StiChild.default_select(false)
end
test "it works on child records loaded from associations" do
parent = parents(:some_parent)
child = parent.children.find(:first)
assert_not_loaded child, "test_data"
assert_equal "Some test data", child.test_data
end
test "it works on parent records loaded from associations" do
child = children(:a_child_of_some_parent)
parent = child.parent
assert_not_loaded parent, "info"
assert_equal "Here's some info.", parent.info
end
test "it doesn't break validates_presence_of" do
class ValidatedImplicit < ActiveRecord::Base
set_table_name "implicits"
columns_on_demand
validates_presence_of :original_filename, :file_data, :results
end
assert !ValidatedImplicit.new(:original_filename => "test.txt").valid?
instance = ValidatedImplicit.create!(:original_filename => "test.txt", :file_data => "test file data", :results => "test results")
instance.update_attributes!({}) # file_data and results are already loaded
new_instance = ValidatedImplicit.find(instance.id)
new_instance.update_attributes!({}) # file_data and results aren't loaded yet, but will be loaded to validate
end
test "it works with serialized columns" do
class Serializing < ActiveRecord::Base
columns_on_demand
serialize :data
end
data = {:foo => '1', :bar => '2', :baz => '3'}
original_record = Serializing.create!(:data => data)
assert_equal data, original_record.data
record = Serializing.find(:first)
assert_not_loaded record, "data"
assert_equal data, record.data
assert_equal false, record.data_changed?
assert_equal false, record.changed?
assert_equal data, record.data
record.data = "replacement"
assert_equal true, record.data_changed?
assert_equal true, record.changed?
record.save!
record = Serializing.find(:first)
assert_not_loaded record, "data"
assert_equal "replacement", record.data
end
end
add compatibility with 2.3 for a couple of tests
require File.expand_path(File.join(File.dirname(__FILE__), 'test_helper'))
require File.expand_path(File.join(File.dirname(__FILE__), 'schema'))
class Explicit < ActiveRecord::Base
columns_on_demand :file_data, :processing_log, :original_filename
end
class Implicit < ActiveRecord::Base
columns_on_demand
end
class Parent < ActiveRecord::Base
columns_on_demand
has_many :children
end
class Child < ActiveRecord::Base
columns_on_demand
belongs_to :parent
end
class ColumnsOnDemandTest < ActiveSupport::TestCase
def assert_not_loaded(record, attr_name)
assert_equal nil, record.instance_variable_get("@attributes")[attr_name.to_s]
end
fixtures :all
self.use_transactional_fixtures = true
test "it lists explicitly given columns for loading on demand" do
assert_equal ["file_data", "processing_log", "original_filename"], Explicit.columns_to_load_on_demand
end
test "it lists all :binary and :text columns for loading on demand if none are explicitly given" do
assert_equal ["file_data", "processing_log", "results"], Implicit.columns_to_load_on_demand
end
test "it selects all the other columns for loading eagerly" do
assert_match /\W*id\W*, \W*results\W*, \W*processed_at\W*/, Explicit.default_select(false)
assert_match /\W*explicits\W*.results/, Explicit.default_select(true)
assert_match /\W*id\W*, \W*original_filename\W*, \W*processed_at\W*/, Implicit.default_select(false)
assert_match /\W*implicits\W*.original_filename/, Implicit.default_select(true)
end
test "it doesn't load the columns_to_load_on_demand straight away when finding the records" do
record = Implicit.find(:first)
assert_not_equal nil, record
assert_not_loaded record, "file_data"
assert_not_loaded record, "processing_log"
record = Implicit.find(:all).first
assert_not_equal nil, record
assert_not_loaded record, "file_data"
assert_not_loaded record, "processing_log"
end
test "it loads the columns when accessed as an attribute" do
record = Implicit.find(:first)
assert_equal "This is the file data!", record.file_data
assert_equal "Processed 0 entries OK", record.results
assert_equal record.results.object_id, record.results.object_id # should not have to re-find
record = Implicit.find(:all).first
assert_not_equal nil, record.file_data
end
test "it loads the column when accessed using read_attribute" do
record = Implicit.find(:first)
assert_equal "This is the file data!", record.read_attribute(:file_data)
assert_equal "This is the file data!", record.read_attribute("file_data")
assert_equal "Processed 0 entries OK", record.read_attribute("results")
assert_equal record.read_attribute(:results).object_id, record.read_attribute("results").object_id # should not have to re-find
end
test "it loads the column when accessed using read_attribute_before_type_cast" do
record = Implicit.find(:first)
assert_equal "This is the file data!", record.read_attribute_before_type_cast("file_data")
assert_equal "Processed 0 entries OK", record.read_attribute_before_type_cast("results")
# read_attribute_before_type_cast doesn't tolerate symbol arguments as read_attribute does
end
test "it loads the column when generating #attributes" do
attributes = Implicit.find(:first).attributes
assert_equal "This is the file data!", attributes["file_data"]
end
test "it loads the column when generating #to_json" do
ActiveRecord::Base.include_root_in_json = true
json = Implicit.find(:first).to_json
assert_equal "This is the file data!", ActiveSupport::JSON.decode(json)["implicit"]["file_data"]
end
test "it loads the column for #clone" do
record = Implicit.find(:first).clone
assert_equal "This is the file data!", record.file_data
record = Implicit.find(:first).clone.tap(&:save!)
assert_equal "This is the file data!", Implicit.find(record.id).file_data
end
test "it clears the column on reload, and can load it again" do
record = Implicit.find(:first)
old_object_id = record.file_data.object_id
Implicit.update_all(:file_data => "New file data")
record.reload
assert_not_loaded record, "file_data"
assert_equal "New file data", record.file_data
end
test "it doesn't override custom :select finds" do
record = Implicit.find(:first, :select => "id, file_data")
klass = ActiveRecord.const_defined?(:MissingAttributeError) ? ActiveRecord::MissingAttributeError : ActiveModel::MissingAttributeError
assert_raise klass do
record.processed_at # explicitly not loaded, overriding default
end
assert_equal "This is the file data!", record.instance_variable_get("@attributes")["file_data"] # already loaded, overriding default
end
test "it raises normal ActiveRecord::RecordNotFound if the record is deleted before the column load" do
record = Implicit.find(:first)
Implicit.delete_all
assert_raise ActiveRecord::RecordNotFound do
record.file_data
end
end
test "it doesn't raise on column access if the record is deleted after the column load" do
record = Implicit.find(:first)
record.file_data
Implicit.delete_all
assert_equal "This is the file data!", record.file_data # check it doesn't raise
end
test "it updates the select strings when columns are changed and the column information is reset" do
ActiveRecord::Schema.define(:version => 1) do
create_table :dummies, :force => true do |t|
t.string :some_field
t.binary :big_field
end
end
class Dummy < ActiveRecord::Base
columns_on_demand
end
assert_match /\W*id\W*, \W*some_field\W*/, Dummy.default_select(false)
ActiveRecord::Schema.define(:version => 2) do
create_table :dummies, :force => true do |t|
t.string :some_field
t.binary :big_field
t.string :another_field
end
end
assert_match /\W*id\W*, \W*some_field\W*/, Dummy.default_select(false)
Dummy.reset_column_information
assert_match /\W*id\W*, \W*some_field\W*, \W*another_field\W*/, Dummy.default_select(false)
end
test "it handles STI models" do
ActiveRecord::Schema.define(:version => 1) do
create_table :stis, :force => true do |t|
t.string :type
t.string :some_field
t.binary :big_field
end
end
class Sti < ActiveRecord::Base
columns_on_demand
end
class StiChild < Sti
columns_on_demand :some_field
end
assert_match /\W*id\W*, \W*type\W*, \W*some_field\W*/, Sti.default_select(false)
assert_match /\W*id\W*, \W*type\W*, \W*big_field\W*/, StiChild.default_select(false)
end
test "it works on child records loaded from associations" do
parent = parents(:some_parent)
child = parent.children.find(:first)
assert_not_loaded child, "test_data"
assert_equal "Some test data", child.test_data
end
test "it works on parent records loaded from associations" do
child = children(:a_child_of_some_parent)
parent = child.parent
assert_not_loaded parent, "info"
assert_equal "Here's some info.", parent.info
end
test "it doesn't break validates_presence_of" do
class ValidatedImplicit < ActiveRecord::Base
set_table_name "implicits"
columns_on_demand
validates_presence_of :original_filename, :file_data, :results
end
assert !ValidatedImplicit.new(:original_filename => "test.txt").valid?
instance = ValidatedImplicit.create!(:original_filename => "test.txt", :file_data => "test file data", :results => "test results")
instance.update_attributes!({}) # file_data and results are already loaded
new_instance = ValidatedImplicit.find(instance.id)
new_instance.update_attributes!({}) # file_data and results aren't loaded yet, but will be loaded to validate
end
test "it works with serialized columns" do
class Serializing < ActiveRecord::Base
columns_on_demand
serialize :data
end
data = {:foo => '1', :bar => '2', :baz => '3'}
original_record = Serializing.create!(:data => data)
assert_equal data, original_record.data
record = Serializing.find(:first)
assert_not_loaded record, "data"
assert_equal data, record.data
assert_equal false, record.data_changed?
assert_equal false, record.changed?
assert_equal data, record.data
record.data = "replacement"
assert_equal true, record.data_changed?
assert_equal true, record.changed?
record.save!
record = Serializing.find(:first)
assert_not_loaded record, "data"
assert_equal "replacement", record.data
end
end
|
module Spree
module Admin
class ReturnAuthorizationsController < ResourceController
belongs_to 'spree/order', find_by: :number
before_action :load_form_data, only: [:new, :edit]
create.fails :load_form_data
update.fails :load_form_data
def fire
@return_authorization.send("#{params[:e]}!")
flash[:success] = t('spree.return_authorization_updated')
redirect_back(fallback_location: admin_order_return_authorization(@order))
end
private
def load_form_data
load_return_items
load_reimbursement_types
load_return_reasons
load_stock_locations
end
# To satisfy how nested attributes works we want to create placeholder ReturnItems for
# any InventoryUnits that have not already been added to the ReturnAuthorization.
def load_return_items
all_inventory_units = @return_authorization.order.inventory_units
associated_inventory_units = @return_authorization.return_items.map(&:inventory_unit)
unassociated_inventory_units = all_inventory_units - associated_inventory_units
new_return_items = unassociated_inventory_units.map do |new_unit|
Spree::ReturnItem.new(inventory_unit: new_unit).tap(&:set_default_amount)
end
@form_return_items = (@return_authorization.return_items + new_return_items).sort_by(&:inventory_unit_id)
end
def load_reimbursement_types
@reimbursement_types = Spree::ReimbursementType.accessible_by(current_ability, :read).active
end
def load_return_reasons
@reasons = Spree::ReturnReason.reasons_for_return_items(@return_authorization.return_items)
end
def load_stock_locations
@stock_locations = Spree::StockLocation.order_default.active
end
end
end
end
Fix return url after fire in return_authorizations controller
module Spree
module Admin
class ReturnAuthorizationsController < ResourceController
belongs_to 'spree/order', find_by: :number
before_action :load_form_data, only: [:new, :edit]
create.fails :load_form_data
update.fails :load_form_data
def fire
@return_authorization.send("#{params[:e]}!")
redirect_back(fallback_location: admin_order_return_authorizations_path(@order),
flash: { success: t('spree.return_authorization_updated') })
end
private
def load_form_data
load_return_items
load_reimbursement_types
load_return_reasons
load_stock_locations
end
# To satisfy how nested attributes works we want to create placeholder ReturnItems for
# any InventoryUnits that have not already been added to the ReturnAuthorization.
def load_return_items
all_inventory_units = @return_authorization.order.inventory_units
associated_inventory_units = @return_authorization.return_items.map(&:inventory_unit)
unassociated_inventory_units = all_inventory_units - associated_inventory_units
new_return_items = unassociated_inventory_units.map do |new_unit|
Spree::ReturnItem.new(inventory_unit: new_unit).tap(&:set_default_amount)
end
@form_return_items = (@return_authorization.return_items + new_return_items).sort_by(&:inventory_unit_id)
end
def load_reimbursement_types
@reimbursement_types = Spree::ReimbursementType.accessible_by(current_ability, :read).active
end
def load_return_reasons
@reasons = Spree::ReturnReason.reasons_for_return_items(@return_authorization.return_items)
end
def load_stock_locations
@stock_locations = Spree::StockLocation.order_default.active
end
end
end
end
|
require 'rib/test'
require 'rib/core/history_file'
describe Rib::HistoryFile do
behaves_like :rib
before do
Rib::HistoryFile.enable
@history = "/tmp/test_rib_#{rand}"
@shell = Rib::Shell.new(:history_file => @history).before_loop
end
after do
FileUtils.rm_f(@history)
end
should '#after_loop save history' do
inputs = %w[blih blah]
@shell.history.replace(inputs)
@shell.after_loop
File.read(@history).should.eq "#{inputs.join("\n")}\n"
end
should '#before_loop load previous history' do
File.open(@history, 'w'){ |f| f.write "check\nthe\nmike" }
@shell.before_loop
@shell.history.to_a.should.eq %w[check the mike]
end
should '#before_loop have empty history if no history file exists' do
@shell.before_loop
@shell.history.to_a.should.eq []
end
should '#read_history be accessible to plugins in #before_loop' do
mod = Module.new do
def read_history
config[:history] = ['pong_read_history']
end
end
shell = Rib::Shell.dup
shell.use(mod)
shell.new.before_loop.history.should.eq ['pong_read_history']
end
should '#write_history be accessible to plugins in #after_loop' do
mod = Module.new do
def write_history
config[:history] = ['pong_write_history']
end
end
shell = Rib::Shell.dup
shell.use(mod)
shell.new.before_loop.after_loop.history.should.eq ['pong_write_history']
end
end
test_history_file.rb: rename history to history_file to avoid confusion
require 'rib/test'
require 'rib/core/history_file'
describe Rib::HistoryFile do
behaves_like :rib
before do
Rib::HistoryFile.enable
@history_path = "/tmp/test_rib_#{rand}"
@shell = Rib::Shell.new(:history_file => @history_path).before_loop
end
after do
FileUtils.rm_f(@history_path)
end
should '#after_loop save history' do
inputs = %w[blih blah]
@shell.history.replace(inputs)
@shell.after_loop
File.read(@history_path).should.eq "#{inputs.join("\n")}\n"
end
should '#before_loop load previous history' do
File.open(@history_path, 'w'){ |f| f.write "check\nthe\nmike" }
@shell.before_loop
@shell.history.to_a.should.eq %w[check the mike]
end
should '#before_loop have empty history if no history file exists' do
@shell.before_loop
@shell.history.to_a.should.eq []
end
should '#read_history be accessible to plugins in #before_loop' do
mod = Module.new do
def read_history
config[:history] = ['pong_read_history']
end
end
shell = Rib::Shell.dup
shell.use(mod)
shell.new.before_loop.history.should.eq ['pong_read_history']
end
should '#write_history be accessible to plugins in #after_loop' do
mod = Module.new do
def write_history
config[:history] = ['pong_write_history']
end
end
shell = Rib::Shell.dup
shell.use(mod)
shell.new.before_loop.after_loop.history.should.eq ['pong_write_history']
end
end
|
$LOAD_PATH.unshift('lib')
require 'grape'
require 'grape-entity'
require 'grape-swagger'
require 'require_all'
require 'logger'
require 'kwalify'
require 'daybreak'
require 'lmdb'
require 'moneta'
require 'tmpdir'
require 'find'
require 'filesize'
require 'time'
require 'securerandom'
require 'rack/body_proxy'
require 'rack/utils'
require 'rack/response'
require 'rack/auth/basic'
# Currently we need excon AND rest_client, due to excon not supporting multipart requests. See also:
# https://github.com/excon/excon/issues/353
require 'excon'
require 'rest_client'
# faye is used to fetch logs from cloud foundry, using websocket communication
require 'faye/websocket'
require 'rack/stream'
require 'protobuf'
require 'eventmachine'
require 'request_store'
require 'git'
require 'mime-types'
# require archive dependencies
require 'zip'
require 'zlib'
require 'rubygems/package'
# require url regex
require 'regexp'
# OS detection
require 'OS'
# models
require_all 'app/models'
# core
require_all 'app/core'
# adapters
require_all 'app/adapters'
# rack middleware
require_all 'app/middleware'
# api
require_all 'app/api/entities'
require_all 'app/api/helpers'
# This is a workaround to properly load all swagger-documentation:
# Load each api version, but start with the protected controllers
Paasal::ApiDetector.api_versions.each do |api_version|
require_all "app/api/versions/#{api_version}/protected"
require_all "app/api/versions/#{api_version}"
end
# Finally load the complete API to make sure we did not miss anything
require_all 'app/api'
require 'os' instead of capital 'OS'
$LOAD_PATH.unshift('lib')
require 'grape'
require 'grape-entity'
require 'grape-swagger'
require 'require_all'
require 'logger'
require 'kwalify'
require 'daybreak'
require 'lmdb'
require 'moneta'
require 'tmpdir'
require 'find'
require 'filesize'
require 'time'
require 'securerandom'
require 'rack/body_proxy'
require 'rack/utils'
require 'rack/response'
require 'rack/auth/basic'
# Currently we need excon AND rest_client, due to excon not supporting multipart requests. See also:
# https://github.com/excon/excon/issues/353
require 'excon'
require 'rest_client'
# faye is used to fetch logs from cloud foundry, using websocket communication
require 'faye/websocket'
require 'rack/stream'
require 'protobuf'
require 'eventmachine'
require 'request_store'
require 'git'
require 'mime-types'
# require archive dependencies
require 'zip'
require 'zlib'
require 'rubygems/package'
# require url regex
require 'regexp'
# OS detection
require 'os'
# models
require_all 'app/models'
# core
require_all 'app/core'
# adapters
require_all 'app/adapters'
# rack middleware
require_all 'app/middleware'
# api
require_all 'app/api/entities'
require_all 'app/api/helpers'
# This is a workaround to properly load all swagger-documentation:
# Load each api version, but start with the protected controllers
Paasal::ApiDetector.api_versions.each do |api_version|
require_all "app/api/versions/#{api_version}/protected"
require_all "app/api/versions/#{api_version}"
end
# Finally load the complete API to make sure we did not miss anything
require_all 'app/api'
|
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'rspec/autorun'
require 'database_cleaner'
require 'capybara/rails'
require 'capybara/rspec'
require 'factory_girl_rails'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
#config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = false
config.before(:each) do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with(:truncation)
FactoryGirl.reload
end
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = "random"
config.include Capybara::DSL
config.include FactoryGirl::Syntax::Methods
end
def same_order?(model, field, arr, asc=true)
if asc
model.sql_sort(field.to_sym, :asc).map { |e| e.send(field) }.uniq ==
arr.sort_by{ |p| p.send(field) }.map { |e| e.send(field) }.uniq
else
model.sql_sort(field.to_sym, :desc).map { |e| e.send(field) }.uniq ==
arr.sort_by{ |p| p.send(field) }.reverse.map { |e| e.send(field) }.uniq
end
end
def run_generator
cmd_str = 'rails g sql_search_n_sort:install --quiet --force'
puts "\n#{cmd_str}"
%x(#{cmd_str})
end
def run_destroy
cmd_str = 'rails d sql_search_n_sort:install --quiet --force'
puts "\n#{cmd_str}"
%x(#{cmd_str})
end
remove spec_helper in order to import stash
|
Release 0.5.1
|
# encoding: utf-8
require 'pp'
# TODO: Should pull report dir (if any) from cucumber command options
REPORT_DIR = 'report'
FileUtils.mkdir_p REPORT_DIR
def filenameify(string, sep = '-')
filename = string.downcase
filename.gsub!(/[^a-z0-9\-_]+/, sep)
unless sep.nil? || sep.empty?
re_sep = Regexp.escape(sep)
filename.gsub!(/#{re_sep}{2,}/, sep)
filename.gsub!(/^#{re_sep}|#{re_sep}$/, '')
end
filename
end
def add_screenshot(name)
filename = "#{filenameify(name)}.png"
screenshot = @browser.screenshot
embed screenshot.base64, 'image/png', "Screenshot"
screenshot.save "#{REPORT_DIR}/#{filename}" # Keep on disk, as well
end
# BEFORE HOOKS will run in the same order of which they are registered.
Before do
@context = {}
@active = {} # Hash of active context objects (Book, Patron, Branch, etc.)
@cleanup = [] # A stack of operations to be undone in reversed order after feature
end
Before do |scenario|
if !scenario.source_tag_names.include?("@no-browser")
@browser = @browser || (Watir::Browser.new (ENV['BROWSER'] || "phantomjs").to_sym)
@browser.window.resize_to(800, 600) unless ENV['BROWSER']
@site = @site || Site.new(@browser)
end
end
# AFTER HOOKS will run in the OPPOSITE order of which they are registered.
After do |scenario| # The final hook
@site = nil
@browser.close if @browser
Selenium::WebDriver.for(:phantomjs).quit # force PhantomJS to quit if stalled
end
def title_of(scenario)
(defined? scenario.name) ? scenario.name : scenario.scenario_outline.name
end
After do |scenario| # cleanup based on @cleanup - in reverse order
STDOUT.puts "--------------- Context: #{title_of(scenario)} "
STDOUT.puts @context.pretty_inspect
last_cleanup_exception = nil
if @browser && @browser.driver.manage.logs.get("browser").length > 0
STDOUT.puts "--------------- Messages to browser console:"
STDOUT.puts @browser.driver.manage.logs.get "browser"
end
STDOUT.puts "--------------- Cleanup: #{title_of(scenario)} "
if @browser
step "at jeg er logget inn som adminbruker" if @cleanup.length > 0 # TODO Only relevant for Koha-related cleanups
end
@cleanup.reverse.each do |hash|
cleanup_desc = " cleanup '#{hash.keys.first}'"
cleanup_func = hash.values.first
begin
cleanup_func.call
STDOUT.puts "#{cleanup_desc} completed"
rescue Exception => e
last_cleanup_exception = e
STDOUT.puts "#{cleanup_desc} failed: #{e}"
e.backtrace.each_with_index { |line, i| STDOUT.puts(" #{line}") if i < 3 }
STDOUT.puts "--------------- Active context upon failure: #{cleanup_desc} "
STDOUT.puts "#{@active.pretty_inspect}"
add_screenshot("#{cleanup_desc}")
end
end
STDOUT.puts("Scenario failed: #{scenario.exception}") if scenario.failed?
STDOUT.puts("Cleanup failed: #{last_cleanup_exception}") if last_cleanup_exception
STDOUT.puts "======================================================================================== "
STDOUT.flush
raise Exception.new("Cleanup failed: #{last_cleanup_exception}") if !scenario.failed? && last_cleanup_exception
end
After do |scenario|
if scenario.failed? && @browser
add_screenshot(title_of(scenario))
STDOUT.puts "--------------- Active context upon failure: #{title_of(scenario)} "
STDOUT.puts "#{@active.pretty_inspect}"
end
end
Revert "Force PhantomJS to quit and release browser if stalled"
Not solving the Timeout on Login problem after all.
this reverts commit 1aae59f4340c89b9acd6cd53d7609533073b1152.
# encoding: utf-8
require 'pp'
# TODO: Should pull report dir (if any) from cucumber command options
REPORT_DIR = 'report'
FileUtils.mkdir_p REPORT_DIR
def filenameify(string, sep = '-')
filename = string.downcase
filename.gsub!(/[^a-z0-9\-_]+/, sep)
unless sep.nil? || sep.empty?
re_sep = Regexp.escape(sep)
filename.gsub!(/#{re_sep}{2,}/, sep)
filename.gsub!(/^#{re_sep}|#{re_sep}$/, '')
end
filename
end
def add_screenshot(name)
filename = "#{filenameify(name)}.png"
screenshot = @browser.screenshot
embed screenshot.base64, 'image/png', "Screenshot"
screenshot.save "#{REPORT_DIR}/#{filename}" # Keep on disk, as well
end
# BEFORE HOOKS will run in the same order of which they are registered.
Before do
@context = {}
@active = {} # Hash of active context objects (Book, Patron, Branch, etc.)
@cleanup = [] # A stack of operations to be undone in reversed order after feature
end
Before do |scenario|
if !scenario.source_tag_names.include?("@no-browser")
@browser = @browser || (Watir::Browser.new (ENV['BROWSER'] || "phantomjs").to_sym)
@browser.window.resize_to(800, 600) unless ENV['BROWSER']
@site = @site || Site.new(@browser)
end
end
# AFTER HOOKS will run in the OPPOSITE order of which they are registered.
After do |scenario| # The final hook
@site = nil
@browser.close if @browser
end
def title_of(scenario)
(defined? scenario.name) ? scenario.name : scenario.scenario_outline.name
end
After do |scenario| # cleanup based on @cleanup - in reverse order
STDOUT.puts "--------------- Context: #{title_of(scenario)} "
STDOUT.puts @context.pretty_inspect
last_cleanup_exception = nil
if @browser && @browser.driver.manage.logs.get("browser").length > 0
STDOUT.puts "--------------- Messages to browser console:"
STDOUT.puts @browser.driver.manage.logs.get "browser"
end
STDOUT.puts "--------------- Cleanup: #{title_of(scenario)} "
if @browser
step "at jeg er logget inn som adminbruker" if @cleanup.length > 0 # TODO Only relevant for Koha-related cleanups
end
@cleanup.reverse.each do |hash|
cleanup_desc = " cleanup '#{hash.keys.first}'"
cleanup_func = hash.values.first
begin
cleanup_func.call
STDOUT.puts "#{cleanup_desc} completed"
rescue Exception => e
last_cleanup_exception = e
STDOUT.puts "#{cleanup_desc} failed: #{e}"
e.backtrace.each_with_index { |line, i| STDOUT.puts(" #{line}") if i < 3 }
STDOUT.puts "--------------- Active context upon failure: #{cleanup_desc} "
STDOUT.puts "#{@active.pretty_inspect}"
add_screenshot("#{cleanup_desc}")
end
end
STDOUT.puts("Scenario failed: #{scenario.exception}") if scenario.failed?
STDOUT.puts("Cleanup failed: #{last_cleanup_exception}") if last_cleanup_exception
STDOUT.puts "======================================================================================== "
STDOUT.flush
raise Exception.new("Cleanup failed: #{last_cleanup_exception}") if !scenario.failed? && last_cleanup_exception
end
After do |scenario|
if scenario.failed? && @browser
add_screenshot(title_of(scenario))
STDOUT.puts "--------------- Active context upon failure: #{title_of(scenario)} "
STDOUT.puts "#{@active.pretty_inspect}"
end
end
|
# encoding: utf-8
class Nanoc::Filters::UglifyJSTest < MiniTest::Unit::TestCase
include Nanoc::TestHelpers
def test_filter
if_have 'uglifier' do
# Create filter
filter = ::Nanoc::Filters::UglifyJS.new
# Run filter
result = filter.run("foo = 1; (function(bar) { if (true) alert(bar); })(foo)")
assert_equal("foo=1,function(a){alert(a)}(foo);", result)
end
end
def test_filter_with_options
if_have 'uglifier' do
# Create filter
filter = ::Nanoc::Filters::UglifyJS.new
# Run filter
result = filter.run("foo = 1; (function(bar) { if (true) alert(bar); })(foo)", :toplevel => true)
assert_equal("foo=1,function(a){alert(a)}(foo);", result)
end
end
end
fixed failing UgifyJS test
# encoding: utf-8
class Nanoc::Filters::UglifyJSTest < MiniTest::Unit::TestCase
include Nanoc::TestHelpers
def test_filter
if_have 'uglifier' do
# Create filter
filter = ::Nanoc::Filters::UglifyJS.new
# Run filter
result = filter.run("foo = 1; (function(bar) { if (true) alert(bar); })(foo)")
assert_equal("foo=1,function(e){alert(e)}(foo);", result)
end
end
def test_filter_with_options
if_have 'uglifier' do
# Create filter
filter = ::Nanoc::Filters::UglifyJS.new
# Run filter
result = filter.run("foo = 1; (function(bar) { if (true) alert(bar); })(foo)", :toplevel => true)
assert_equal("foo=1,function(e){alert(e)}(foo);", result)
end
end
end
|
require_relative "setup"
# Why must this be here at global scope?
BV = BitVault::Client.discover("http://localhost:8999/") { BitVault::Client::Context.new }
Resources = BitVault::Client::Resources
PassphraseBox = BitVault::Crypto::PassphraseBox
MultiWallet = BitVault::Bitcoin::MultiWallet
describe "Using the BitVault API" do
######################################################################
# Cached access to various test objects
######################################################################
=begin
def BV
@BV ||= BitVault::Client.discover("http://localhost:8999/") {
BitVault::Client::Context.new
}
end
=end
def client
@client ||= begin
client = BV.spawn
client.context.password = "incredibly secure"
client
end
end
def context
@context ||= client.context
end
def resources
@resources ||= client.resources
end
def users
@users ||= resources.users
end
def user
@user ||= users.create(
:email => "matthew@bitvault.io",
:first_name => "Matthew",
:last_name => "King",
:password => "incredibly_secure"
)
end
def applications
@applications ||= user.applications
end
def application_names
# This won't actually work while we're returning only mock data
["bitcoin-emporium", "bitcoin-extravaganza", "bitcoins-r-us"]
end
def application_list
@application_list ||= application_names.map do |name|
applications.create(
:name => name,
:callback_url => "https://api.#{name}.io/events"
)
end
end
def application
application_list[0]
end
def new_wallet
@new_wallet ||= MultiWallet.generate [:primary, :backup]
end
def passphrase
"wrong pony generator brad"
end
def wallets
@wallets ||= begin
# Needed for wallets.list, wallet operations
client.context.api_token = application.api_token
application.wallets
end
end
def wallet
@wallet ||= begin
primary_seed = new_wallet.trees[:primary].to_serialized_address(:private)
encrypted_seed = PassphraseBox.encrypt(passphrase, primary_seed)
# Must have the authentication token to create a wallet
wallets.create(
:name => "my favorite wallet",
:network => "bitcoin_testnet",
:backup_address => new_wallet.trees[:backup].to_serialized_address,
:primary_address => new_wallet.trees[:primary].to_serialized_address,
:primary_seed => encrypted_seed
)
end
end
def accounts
@accounts ||= wallet.accounts
end
def account
@account ||= accounts.create :name => "office supplies"
end
def addresses
@addresses ||= account.addresses
end
def incoming_address
@incoming_address ||= addresses.create
end
def payee
@payee ||= begin
payee = Bitcoin::Key.new
payee.generate
payee
end
end
######################################################################
# Test API discovery
######################################################################
describe "BitVault API discovery" do
# N.B.: The tests reflect the API even when we know, e.g. that the function
# exists because we called it in the code above.
specify "expected class actions" do
assert_respond_to BitVault::Client, :discover
end
specify "correct class" do
assert_kind_of BitVault::Client, BV
end
specify "expected actions" do
assert_respond_to BV, :spawn
end
end
######################################################################
# Test client creation
######################################################################
describe "client" do
specify "correct class" do
assert_kind_of Patchboard::Client, client
end
specify "expected actions" do
[:resources, :context].each do |method|
assert_respond_to client, method
end
end
end
######################################################################
# Test client context
######################################################################
describe "client.context" do
specify "expected actions" do
[:authorizer].each do |method|
assert_respond_to context, method
end
# These are not required according to client_usage.rb, but exist in the
# code
[:password, :api_token, :inspect].each do |method|
assert_respond_to context, method
end
end
end
######################################################################
# Test client resources
######################################################################
describe "client.resources" do
specify "expected actions" do
assert_respond_to resources, :users
end
end
######################################################################
# Test users resource
######################################################################
describe "client.resources.users" do
specify "expected actions" do
assert_respond_to client.resources.users, :create
end
end
######################################################################
# Test users.create
######################################################################
describe "users.create" do
specify "correct type" do
assert_kind_of Resources::User, user
end
specify "expected actions" do
[:get, :update, :reset].each do |method|
assert_respond_to user, method
end
# TODO: add tests of each method
assert_kind_of Resources::User, user.get
end
specify "expected attributes" do
[:email, :first_name, :last_name, :applications].each do |method|
assert_respond_to user, method
end
end
end
######################################################################
# Test applications resource
######################################################################
describe "applications" do
specify "user.applications is a resource" do
assert_kind_of Resources::Applications, applications
end
specify "expected actions" do
[:create, :list].each do |method|
assert_respond_to applications, method
end
end
end
######################################################################
# Test applications methods
######################################################################
describe "test applications.create, applications.list" do
specify "correct type" do
application_list.each do |app|
assert_kind_of Resources::Application, app
end
# Here so that we know that the applications have been created
# the below is for the future, it won't work while the server
# is returning mock data.
#assert_equal applications.list.length, application_list.length
assert_equal applications.list.length, 1
applications.list.each do |app|
assert_kind_of Resources::Application, app
end
end
end
######################################################################
# Test application methods
######################################################################
describe "test application methods" do
specify "expected actions" do
[:get, :update, :reset, :delete].each do |method|
application_list.each do |app|
assert_respond_to app, method
end
end
# TODO: test each method
application_list.each do |app|
assert_kind_of Resources::Application, app.get
end
end
specify "expected attributes" do
[:name, :api_token, :owner, :wallets, :callback_url].each do |method|
application_list.each do |app|
assert_respond_to app, method
end
end
end
specify "test application.update" do
application_list.each do |app|
app.update(:name => app.name + "-updated")
end
# TODO: after mock-data, check that the names are changed
end
specify "test application.reset" do
# No actual reset with mock data
application_list.each do |app|
reset = app.reset
assert_kind_of Resources::Application, reset
assert_respond_to reset, :api_token
refute_equal reset.api_token, app.api_token
end
end
specify "test application.delete" do
# No actual reset with mock data
application_list.each do |app|
app.delete
end
# TODO: after mock-data, test that they were deleted
end
end
######################################################################
# Test wallets resource
######################################################################
describe "test application.wallets" do
specify "correct type" do
assert_kind_of Resources::Wallets, wallets
end
specify "expected actions" do
[:create, :list].each do |method|
assert_respond_to wallets, method
end
end
end
######################################################################
# Test MultiWallet creation
######################################################################
describe "test MultiWallet creation" do
specify "MultiWallet.generate" do
assert_kind_of MultiWallet, new_wallet
end
end
######################################################################
# Test wallet creation
######################################################################
describe "test wallet creation" do
specify "correct type" do
assert_kind_of Resources::Wallet, wallet
end
specify "expected actions" do
[:get].each do |method|
assert_respond_to wallet, method
end
# TODO: test each method
assert_kind_of Resources::Wallet, wallet.get
end
specify "test wallets.list" do
assert_equal wallets.list.length, 1
wallets.list.each do |wallet|
assert_kind_of Resources::Wallet, wallet
end
end
end
######################################################################
# Test accounts resource
######################################################################
describe "test wallet.accounts resource" do
specify "correct type" do
assert_kind_of Resources::Accounts, accounts
end
specify "expected actions" do
[:create, :list].each do |method|
assert_respond_to accounts, method
end
end
end
######################################################################
# Test account creation
######################################################################
describe "test account creation" do
specify "correct type" do
assert_kind_of Resources::Account, account
end
specify "expected actions" do
[:get, :update].each do |method|
assert_respond_to account, method
end
# TODO: test each method
assert_kind_of Resources::Account, account.get
assert_kind_of Resources::Account, account.update(:name => "rubber bands")
end
specify "accounts.list" do
assert_equal accounts.list.length, 1
accounts.list.each do |acct|
assert_kind_of Resources::Account, account
end
end
end
######################################################################
# Test addresses resource
######################################################################
describe "test account.addresses resource" do
specify "correct type" do
assert_kind_of Resources::Addresses, addresses
end
specify "expected actions" do
[:create].each do |method|
assert_respond_to addresses, method
end
end
end
######################################################################
# Test address creation
######################################################################
describe "test address creation" do
specify "correct type" do
assert_kind_of Hashie::Mash, incoming_address
end
end
######################################################################
# Test payee creation
######################################################################
describe "test payee creation" do
specify "correct type" do
assert_kind_of Bitcoin::Key, payee
end
specify "expected actions" do
[:addr].each do |method|
assert_respond_to payee, method
end
end
end
end
Test payments resource
require_relative "setup"
# Why must this be here at global scope?
BV = BitVault::Client.discover("http://localhost:8999/") { BitVault::Client::Context.new }
Resources = BitVault::Client::Resources
PassphraseBox = BitVault::Crypto::PassphraseBox
MultiWallet = BitVault::Bitcoin::MultiWallet
describe "Using the BitVault API" do
######################################################################
# Cached access to various test objects
######################################################################
=begin
def BV
@BV ||= BitVault::Client.discover("http://localhost:8999/") {
BitVault::Client::Context.new
}
end
=end
def client
@client ||= begin
client = BV.spawn
client.context.password = "incredibly secure"
client
end
end
def context
@context ||= client.context
end
def resources
@resources ||= client.resources
end
def users
@users ||= resources.users
end
def user
@user ||= users.create(
:email => "matthew@bitvault.io",
:first_name => "Matthew",
:last_name => "King",
:password => "incredibly_secure"
)
end
def applications
@applications ||= user.applications
end
def application_names
# This won't actually work while we're returning only mock data
["bitcoin-emporium", "bitcoin-extravaganza", "bitcoins-r-us"]
end
def application_list
@application_list ||= application_names.map do |name|
applications.create(
:name => name,
:callback_url => "https://api.#{name}.io/events"
)
end
end
def application
application_list[0]
end
def new_wallet
@new_wallet ||= MultiWallet.generate [:primary, :backup]
end
def passphrase
"wrong pony generator brad"
end
def wallets
@wallets ||= begin
# Needed for wallets.list, wallet operations
client.context.api_token = application.api_token
application.wallets
end
end
def wallet
@wallet ||= begin
primary_seed = new_wallet.trees[:primary].to_serialized_address(:private)
encrypted_seed = PassphraseBox.encrypt(passphrase, primary_seed)
# Must have the authentication token to create a wallet
wallets.create(
:name => "my favorite wallet",
:network => "bitcoin_testnet",
:backup_address => new_wallet.trees[:backup].to_serialized_address,
:primary_address => new_wallet.trees[:primary].to_serialized_address,
:primary_seed => encrypted_seed
)
end
end
def accounts
@accounts ||= wallet.accounts
end
def account
@account ||= accounts.create :name => "office supplies"
end
def addresses
@addresses ||= account.addresses
end
def incoming_address
@incoming_address ||= addresses.create
end
def payee
@payee ||= begin
payee = Bitcoin::Key.new
payee.generate
payee
end
end
def payments
@payments ||= account.payments
end
def payee_address
@payee_address ||= payee.addr
end
######################################################################
# Test API discovery
######################################################################
describe "BitVault API discovery" do
# N.B.: The tests reflect the API even when we know, e.g. that the function
# exists because we called it in the code above.
specify "expected class actions" do
assert_respond_to BitVault::Client, :discover
end
specify "correct class" do
assert_kind_of BitVault::Client, BV
end
specify "expected actions" do
assert_respond_to BV, :spawn
end
end
######################################################################
# Test client creation
######################################################################
describe "client" do
specify "correct class" do
assert_kind_of Patchboard::Client, client
end
specify "expected actions" do
[:resources, :context].each do |method|
assert_respond_to client, method
end
end
end
######################################################################
# Test client context
######################################################################
describe "client.context" do
specify "expected actions" do
[:authorizer].each do |method|
assert_respond_to context, method
end
# These are not required according to client_usage.rb, but exist in the
# code
[:password, :api_token, :inspect].each do |method|
assert_respond_to context, method
end
end
end
######################################################################
# Test client resources
######################################################################
describe "client.resources" do
specify "expected actions" do
assert_respond_to resources, :users
end
end
######################################################################
# Test users resource
######################################################################
describe "client.resources.users" do
specify "expected actions" do
assert_respond_to client.resources.users, :create
end
end
######################################################################
# Test users.create
######################################################################
describe "users.create" do
specify "correct type" do
assert_kind_of Resources::User, user
end
specify "expected actions" do
[:get, :update, :reset].each do |method|
assert_respond_to user, method
end
# TODO: add tests of each method
assert_kind_of Resources::User, user.get
end
specify "expected attributes" do
[:email, :first_name, :last_name, :applications].each do |method|
assert_respond_to user, method
end
end
end
######################################################################
# Test applications resource
######################################################################
describe "applications" do
specify "user.applications is a resource" do
assert_kind_of Resources::Applications, applications
end
specify "expected actions" do
[:create, :list].each do |method|
assert_respond_to applications, method
end
end
end
######################################################################
# Test applications methods
######################################################################
describe "test applications.create, applications.list" do
specify "correct type" do
application_list.each do |app|
assert_kind_of Resources::Application, app
end
# Here so that we know that the applications have been created
# the below is for the future, it won't work while the server
# is returning mock data.
#assert_equal applications.list.length, application_list.length
assert_equal applications.list.length, 1
applications.list.each do |app|
assert_kind_of Resources::Application, app
end
end
end
######################################################################
# Test application methods
######################################################################
describe "test application methods" do
specify "expected actions" do
[:get, :update, :reset, :delete].each do |method|
application_list.each do |app|
assert_respond_to app, method
end
end
# TODO: test each method
application_list.each do |app|
assert_kind_of Resources::Application, app.get
end
end
specify "expected attributes" do
[:name, :api_token, :owner, :wallets, :callback_url].each do |method|
application_list.each do |app|
assert_respond_to app, method
end
end
end
specify "test application.update" do
application_list.each do |app|
app.update(:name => app.name + "-updated")
end
# TODO: after mock-data, check that the names are changed
end
specify "test application.reset" do
# No actual reset with mock data
application_list.each do |app|
reset = app.reset
assert_kind_of Resources::Application, reset
assert_respond_to reset, :api_token
refute_equal reset.api_token, app.api_token
end
end
specify "test application.delete" do
# No actual reset with mock data
application_list.each do |app|
app.delete
end
# TODO: after mock-data, test that they were deleted
end
end
######################################################################
# Test wallets resource
######################################################################
describe "test application.wallets" do
specify "correct type" do
assert_kind_of Resources::Wallets, wallets
end
specify "expected actions" do
[:create, :list].each do |method|
assert_respond_to wallets, method
end
end
end
######################################################################
# Test MultiWallet creation
######################################################################
describe "test MultiWallet creation" do
specify "MultiWallet.generate" do
assert_kind_of MultiWallet, new_wallet
end
end
######################################################################
# Test wallet creation
######################################################################
describe "test wallet creation" do
specify "correct type" do
assert_kind_of Resources::Wallet, wallet
end
specify "expected actions" do
[:get].each do |method|
assert_respond_to wallet, method
end
# TODO: test each method
assert_kind_of Resources::Wallet, wallet.get
end
specify "test wallets.list" do
assert_equal wallets.list.length, 1
wallets.list.each do |wallet|
assert_kind_of Resources::Wallet, wallet
end
end
end
######################################################################
# Test accounts resource
######################################################################
describe "test wallet.accounts resource" do
specify "correct type" do
assert_kind_of Resources::Accounts, accounts
end
specify "expected actions" do
[:create, :list].each do |method|
assert_respond_to accounts, method
end
end
end
######################################################################
# Test account creation
######################################################################
describe "test account creation" do
specify "correct type" do
assert_kind_of Resources::Account, account
end
specify "expected actions" do
[:get, :update].each do |method|
assert_respond_to account, method
end
# TODO: test each method
assert_kind_of Resources::Account, account.get
assert_kind_of Resources::Account, account.update(:name => "rubber bands")
end
specify "accounts.list" do
assert_equal accounts.list.length, 1
accounts.list.each do |acct|
assert_kind_of Resources::Account, account
end
end
end
######################################################################
# Test addresses resource
######################################################################
describe "test account.addresses resource" do
specify "correct type" do
assert_kind_of Resources::Addresses, addresses
end
specify "expected actions" do
[:create].each do |method|
assert_respond_to addresses, method
end
end
end
######################################################################
# Test address creation
######################################################################
describe "test address creation" do
specify "correct type" do
assert_kind_of Hashie::Mash, incoming_address
end
end
######################################################################
# Test payee creation
######################################################################
describe "test payee creation" do
specify "correct type" do
assert_kind_of Bitcoin::Key, payee
end
specify "expected actions" do
[:addr].each do |method|
assert_respond_to payee, method
end
end
end
######################################################################
# Test payments resource
######################################################################
describe "test payments resource" do
specify "correct type" do
assert_kind_of Resources::Payments, payments
end
specify "expected actions" do
[:create].each do |method|
assert_respond_to payments, method
end
end
end
end
|
class Service::Sprintly < Service
def receive_push
if data['api_key'].to_s.empty?
raise_config_error "Must provide an api key"
end
if data['username'].to_s.empty?
raise_config_error "Must provide a sprint.ly username"
end
# @@@ Auth? username + api_key?
http.basic_auth(data['username'], data['api_key'])
http.headers['Content-Type'] = 'application/json'
http.url_prefix = "https://sprint.ly/integration/github/" # @@@
payload['commits'].each do |commit|
# POST https://url_prefix/api_key?
http_post data['api_key'], commit.to_json
end
end
end
Closer to a more realistic thing I might actually consider deploying. Refs #3101
class Service::Sprintly < Service
default_events :commit_comment, :create, :delete, :download,
:follow, :fork, :fork_apply, :gist, :gollum, :issue_comment,
:issues, :member, :public, :pull_request, :push, :team_add,
:watch, :pull_request_review_comment, :status
string :email, :api_key, :product_id
white_list :email, :product_id
def receive_event
raise_config_error "Must provide an api key" if data['api_key'].to_s.empty?
raise_config_error "Must provide an email address." if data['email'].to_s.empty?
raise_config_error "Must provide a product id." if data['product_id'].to_s.empty?
http.headers['Content-Type'] = 'application/json'
http.basic_auth(data['email'], data['api_key'])
http.url_prefix = "https://sprint.ly/integration/github/#{product_id}/#{event}/"
http_post data['api_key'], payload.to_json
end
end
|
Create hashes.rb
###
# type to run:
# $ ruby ./hashes.rb
require "digest"
def sha256( msg )
Digest::SHA256.hexdigest( msg )
end
p sha256( "Hello, world!0" )
#=> "1312af178c253f84028d480a6adc1e25e81caa44c749ec81976192e2ec934c64"
p sha256( "Hello, world!1" )
#=> "e9afc424b79e4f6ab42d99c81156d3a17228d6e1eef4139be78e948a9332a7d8"
p sha256( "Hello, world!2" )
#=> "ae37343a357a8297591625e7134cbea22f5928be8ca2a32aa475cf05fd4266b7"
# ...
p sha256( "Hello, world!4248" )
#=> "6e110d98b388e77e9c6f042ac6b497cec46660deef75a55ebc7cfdf65cc0b965"
p sha256( "Hello, world!4249" )
#=> "c004190b822f1669cac8dc37e761cb73652e7832fb814565702245cf26ebb9e6"
p sha256( "Hello, world!4250" )
#=> "0000c3af42fc31103f1fdc0151fa747ff87349a4714df7cc52ea464e12dcd4e9"
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'greek_stemmer/version'
Gem::Specification.new do |spec|
spec.name = "greek_stemmer"
spec.version = GreekStemmer::VERSION
spec.authors = ["Tasos Stathopoulos", "Giorgos Tsiftsis"]
spec.email = ["stathopa@skroutz.gr", "giorgos.tsiftsis@skroutz.gr"]
spec.summary = %q{A simple Greek stemmer}
spec.description = %q{A simple Greek stemmer}
spec.homepage = "https://github.com/skroutz/greek_stemmer"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.files.reject! { Dir['benchmarks'] }
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
if RUBY_VERSION >= "2.0.0"
spec.add_development_dependency "pry-byebug"
else
spec.add_development_dependency "pry"
end
end
Comment rejecting bench files
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'greek_stemmer/version'
Gem::Specification.new do |spec|
spec.name = "greek_stemmer"
spec.version = GreekStemmer::VERSION
spec.authors = ["Tasos Stathopoulos", "Giorgos Tsiftsis"]
spec.email = ["stathopa@skroutz.gr", "giorgos.tsiftsis@skroutz.gr"]
spec.summary = %q{A simple Greek stemmer}
spec.description = %q{A simple Greek stemmer}
spec.homepage = "https://github.com/skroutz/greek_stemmer"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
# spec.files.reject! { Dir['benchmarks'] }
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
if RUBY_VERSION >= "2.0.0"
spec.add_development_dependency "pry-byebug"
else
spec.add_development_dependency "pry"
end
end
|
Bump to version 0.6.0
This will mainly allow to use the experimental feature to use the Co-Authored-By trailer instead of the Signed-Off-By.
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'alephant/lookup/version'
Gem::Specification.new do |spec|
spec.name = "alephant-lookup"
spec.version = Alephant::Lookup::VERSION
spec.authors = ["Robert Kenny"]
spec.email = ["kenoir@gmail.com"]
spec.summary = %q{TODO: Write a short summary. Required.}
spec.description = %q{TODO: Write a longer description. Optional.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "rspec"
spec.add_development_dependency "rspec-nc"
spec.add_development_dependency "guard"
spec.add_development_dependency "guard-rspec"
spec.add_development_dependency "pry"
spec.add_development_dependency "pry-remote"
spec.add_development_dependency "pry-nav"
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
end
updates gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'alephant/lookup/version'
Gem::Specification.new do |spec|
spec.name = "alephant-lookup"
spec.version = Alephant::Lookup::VERSION
spec.authors = ["Robert Kenny"]
spec.email = ["kenoir@gmail.com"]
spec.summary = %q{Lookup a location in S3 using DynamoDB.}
spec.homepage = "https://github.com/BBC-News/alephant-lookup"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "rspec"
spec.add_development_dependency "rspec-nc"
spec.add_development_dependency "guard"
spec.add_development_dependency "guard-rspec"
spec.add_development_dependency "pry"
spec.add_development_dependency "pry-remote"
spec.add_development_dependency "pry-nav"
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
end
|
Fixed gemspec
|
add de
|
module ::Boson::Scientist
alias_method :_render_or_raw, :render_or_raw
def render_or_raw(result)
if (menu_options = @global_options.delete(:menu))
menu_options = ((@command.config[:menu] || {}) rescue {}).merge menu_options
filters = @global_options.delete(:filters)
new_result = ::Hirb::Helpers::AutoTable.render(result, @global_options.merge(:return_rows=>true))
Menu.run(new_result, menu_options.merge(:filters=>filters, :items=>result), @global_options)
nil
else
# @global_options[:render] = true
_render_or_raw(result)
end
end
class Menu
require 'shellwords'
CHOSEN_REGEXP = /^(\d(?:[^:]+)?)(?::)?(\S+)?/
OPTIONS = {:default_field=>:string, :shell=>:boolean, :pretend=>:boolean, :once=>:boolean, :help=>:boolean,
:multi=>:boolean, :object=>:boolean, :command=>:string, :args=>:string, :splat=>:boolean}
def self.run(items, options, global_options)
new(items, options, global_options).run
end
def self.option_parser
@option_parser ||= ::Boson::OptionParser.new OPTIONS
end
def initialize(items, options, global_options)
@items, @default_options, @global_options = items, options, global_options
@options = @default_options.dup
@is_hash = items[0].is_a?(Hash)
@fields = @global_options[:fields] ? @global_options[:fields] :
@global_options[:change_fields] ? @global_options[:change_fields] :
items[0].is_a?(Hash) ? items[0].keys : [:to_s]
end
def run
input = get_input
if @options[:shell]
while input != 'q'
parse_and_invoke input
input = get_input
end
else
parse_and_invoke input
end
end
def get_input
prompt = @options[:object] ? "Choose objects: " : "Default field: #{default_field}\nChoose rows: "
::Boson.invoke(:menu, @items, :return_input=>true, :fields=>@fields, :prompt=>prompt, :readline=>true)
end
def parse_and_invoke(input)
cmd, *args = parse_input(input)
if @options[:help]
self.class.option_parser.print_usage_table
else
@options[:once] ? invoke(cmd, args) : args.flatten.each {|e| invoke(cmd, [e]) }
end
end
def parse_input(input)
args = Shellwords.shellwords(input)
@options = @default_options.merge self.class.option_parser.parse(args, :opts_before_args=>true)
return if @options[:help]
@options[:multi] ? parse_multi(args) : parse_template(args)
end
def invoke(cmd, args)
@options[:splat] = true if ::Boson::Index.read && (cmd_obj = ::Boson::Index.find_command(cmd)) &&
cmd_obj.has_splat_args?
if @options[:pretend]
puts "#{cmd} #{@options[:splat] ? '*' : ''}#{args.inspect}"
else
output = @options[:splat] ? (::Boson.full_invoke cmd, args.flatten) : ::Boson.full_invoke(cmd, args)
unless ::Boson::View.silent_object?(output)
opts = output.is_a?(String) ? {:method=>'puts'} : {:inspect=>!output.is_a?(Array) }
::Boson::View.render(output, opts)
end
end
end
def process_template(args)
return args if @options[:object]
template = @options[:args] && args.size <= 1 ? @options[:args] : args.join(' ')
if template.empty?
template_args = [default_field]
template = "%s"
else
template_args = []
template.gsub!(/%s|:\w+/) {|e|
template_args << (e == '%s' ? default_field : unalias_field(e[/\w+/]))
"%s"
}
end
Array(@chosen).map {|e| sprintf(template, *template_args.map {|field| map_item(e, field) }) }
end
def parse_template(args)
args = args.map do |word|
if word[CHOSEN_REGEXP] && !@seen
field = $2 ? ":#{unalias_field($2)}" : '%s'
@chosen = ::Hirb::Util.choose_from_array(items, $1)
@seen = true
@options[:object] ? @chosen : field
else
word
end
end
cmd = args.size == 1 ? @options[:command] : args.shift
raise "No command given" unless cmd
[cmd] + process_template(args)
end
# doesn't work w/ :object
def parse_multi(args)
args.map {|word|
if word[CHOSEN_REGEXP]
field = $2 ? unalias_field($2) : default_field
::Hirb::Util.choose_from_array(items, $1).map {|e| map_item(e, field) }
else
word
end
}.flatten
end
def items
@options[:object] ? @default_options[:items] : @items
end
def default_field
@options[:default_field] ? unalias_field(@options[:default_field]) : @fields[0]
end
def map_item(obj, field)
@is_hash ? obj[field] : obj.send(field)
end
def unalias_field(field)
@fields.sort_by {|e| e.to_s }.find {|e| e.to_s[/^#{field}/] } || field
end
end
end
::Boson::OptionCommand::PIPE_OPTIONS[:menu] = { :bool_default=>{},
:alias=>['m'], :type=>:hash, :keys=>::Boson::Scientist::Menu::OPTIONS.keys
}
menu fix for regex warning: nested repeat operator
module ::Boson::Scientist
alias_method :_render_or_raw, :render_or_raw
def render_or_raw(result)
if (menu_options = @global_options.delete(:menu))
menu_options = ((@command.config[:menu] || {}) rescue {}).merge menu_options
filters = @global_options.delete(:filters)
new_result = ::Hirb::Helpers::AutoTable.render(result, @global_options.merge(:return_rows=>true))
Menu.run(new_result, menu_options.merge(:filters=>filters, :items=>result), @global_options)
nil
else
# @global_options[:render] = true
_render_or_raw(result)
end
end
class Menu
require 'shellwords'
CHOSEN_REGEXP = /^(\d(?:[^:]+))(?::)?(\S+)?/
OPTIONS = {:default_field=>:string, :shell=>:boolean, :pretend=>:boolean, :once=>:boolean, :help=>:boolean,
:multi=>:boolean, :object=>:boolean, :command=>:string, :args=>:string, :splat=>:boolean}
def self.run(items, options, global_options)
new(items, options, global_options).run
end
def self.option_parser
@option_parser ||= ::Boson::OptionParser.new OPTIONS
end
def initialize(items, options, global_options)
@items, @default_options, @global_options = items, options, global_options
@options = @default_options.dup
@is_hash = items[0].is_a?(Hash)
@fields = @global_options[:fields] ? @global_options[:fields] :
@global_options[:change_fields] ? @global_options[:change_fields] :
items[0].is_a?(Hash) ? items[0].keys : [:to_s]
end
def run
input = get_input
if @options[:shell]
while input != 'q'
parse_and_invoke input
input = get_input
end
else
parse_and_invoke input
end
end
def get_input
prompt = @options[:object] ? "Choose objects: " : "Default field: #{default_field}\nChoose rows: "
::Boson.invoke(:menu, @items, :return_input=>true, :fields=>@fields, :prompt=>prompt, :readline=>true)
end
def parse_and_invoke(input)
cmd, *args = parse_input(input)
if @options[:help]
self.class.option_parser.print_usage_table
else
@options[:once] ? invoke(cmd, args) : args.flatten.each {|e| invoke(cmd, [e]) }
end
end
def parse_input(input)
args = Shellwords.shellwords(input)
@options = @default_options.merge self.class.option_parser.parse(args, :opts_before_args=>true)
return if @options[:help]
@options[:multi] ? parse_multi(args) : parse_template(args)
end
def invoke(cmd, args)
@options[:splat] = true if ::Boson::Index.read && (cmd_obj = ::Boson::Index.find_command(cmd)) &&
cmd_obj.has_splat_args?
if @options[:pretend]
puts "#{cmd} #{@options[:splat] ? '*' : ''}#{args.inspect}"
else
output = @options[:splat] ? (::Boson.full_invoke cmd, args.flatten) : ::Boson.full_invoke(cmd, args)
unless ::Boson::View.silent_object?(output)
opts = output.is_a?(String) ? {:method=>'puts'} : {:inspect=>!output.is_a?(Array) }
::Boson::View.render(output, opts)
end
end
end
def process_template(args)
return args if @options[:object]
template = @options[:args] && args.size <= 1 ? @options[:args] : args.join(' ')
if template.empty?
template_args = [default_field]
template = "%s"
else
template_args = []
template.gsub!(/%s|:\w+/) {|e|
template_args << (e == '%s' ? default_field : unalias_field(e[/\w+/]))
"%s"
}
end
Array(@chosen).map {|e| sprintf(template, *template_args.map {|field| map_item(e, field) }) }
end
def parse_template(args)
args = args.map do |word|
if word[CHOSEN_REGEXP] && !@seen
field = $2 ? ":#{unalias_field($2)}" : '%s'
@chosen = ::Hirb::Util.choose_from_array(items, $1)
@seen = true
@options[:object] ? @chosen : field
else
word
end
end
cmd = args.size == 1 ? @options[:command] : args.shift
raise "No command given" unless cmd
[cmd] + process_template(args)
end
# doesn't work w/ :object
def parse_multi(args)
args.map {|word|
if word[CHOSEN_REGEXP]
field = $2 ? unalias_field($2) : default_field
::Hirb::Util.choose_from_array(items, $1).map {|e| map_item(e, field) }
else
word
end
}.flatten
end
def items
@options[:object] ? @default_options[:items] : @items
end
def default_field
@options[:default_field] ? unalias_field(@options[:default_field]) : @fields[0]
end
def map_item(obj, field)
@is_hash ? obj[field] : obj.send(field)
end
def unalias_field(field)
@fields.sort_by {|e| e.to_s }.find {|e| e.to_s[/^#{field}/] } || field
end
end
end
::Boson::OptionCommand::PIPE_OPTIONS[:menu] = { :bool_default=>{},
:alias=>['m'], :type=>:hash, :keys=>::Boson::Scientist::Menu::OPTIONS.keys
} |
#! /usr/bin/ruby
require_relative 'helper'
require 'ostruct'
# Test the option parser
class GitarroGitop < Minitest::Test
def test_gitop
@full_hash = { repo: 'openSUSE/gitarro', context: 'python-t', description:
'functional', test_file: 'gino.sh', file_type: '.sh',
git_dir: 'gitty', https: true, changed_since: -1 }
gb = Backend.new(@full_hash)
# crate fake object for internal class external repo
# FIXME: this could improved creating a full mock obj
pr = 'fake'
GitOp.new(gb.git_dir, pr, @full_hash)
puts gb.git_dir
end
end
Add unit_test for gitop
Fix #76
#! /usr/bin/ruby
require_relative 'helper'
require 'ostruct'
# Test the option parser
class GitarroGitop < Minitest::Test
def test_gitop
@full_hash = { repo: 'openSUSE/gitarro', context: 'python-t', description:
'functional', test_file: 'gino.sh', file_type: '.sh',
git_dir: 'gitty', https: true, changed_since: -1 }
gb = Backend.new(@full_hash)
# crate fake object for internal class external repo
# FIXME: this could improved creating a full mock obj
pr = 'fake'
GitOp.new(gb.git_dir, pr, @full_hash)
puts gb.git_dir
end
def test_gitop_dirnonexist
@full_hash = { repo: 'openSUSE/gitarro', context: 'python-t', description:
'functional', test_file: 'gino.sh', file_type: '.sh',
git_dir: '/tmp/gitarro_test',
https: true, changed_since: -1 }
gb = Backend.new(@full_hash)
# crate fake object for internal class external repo
# FIXME: this could improved creating a full mock obj
pr = 'fake'
git = GitOp.new(gb.git_dir, pr, @full_hash)
git.merge_pr_totarget('master', 'master')
assert(true, File.directory?(gb.git_dir))
end
end
|
class CreateEchos < ActiveRecord::Migration
def change
create_table :echos do |t|
t.references :user
t.string :echo_type
t.text :user_text
t.text :selected_string
t.string :long_url
t.string :short_url
end
end
end
Echo; divide db by each social media transaction.
class CreateEchos < ActiveRecord::Migration
def change
create_table :echos do |t|
t.references :user
t.string :sent_to_venue
t.text :user_text
t.text :selected_string
t.string :long_url
t.string :short_url
end
end
end
|
#
# ResponseDetective.podspec
#
# Copyright (c) 2016-2017 Netguru Sp. z o.o. All rights reserved.
# Licensed under the MIT License.
#
Pod::Spec.new do |spec|
# Description
spec.name = 'ResponseDetective'
spec.version = '0.5'
spec.summary = 'Sherlock Holmes of the networking layer'
spec.homepage = 'https://github.com/netguru/ResponseDetective'
# License
spec.license = {
type: 'MIT',
file: 'LICENSE.md'
}
spec.authors = {
'Adrian Kashivskyy' => 'adrian.kashivskyy@netguru.co',
'Aleksander Popko' => 'aleksander.popko@netguru.co'
}
# Source
spec.source = {
git: 'https://github.com/netguru/ResponseDetective.git',
tag: spec.version.to_s
}
spec.source_files = 'Sources'
# Linking
spec.frameworks = 'Foundation'
spec.libraries = 'xml2'
spec.ios.frameworks = 'UIKit'
spec.osx.frameworks = 'AppKit'
# Settings
spec.requires_arc = true
spec.ios.deployment_target = '8.0'
spec.osx.deployment_target = '10.10'
spec.tvos.deployment_target = '9.0'
spec.xcconfig = {
'HEADER_SEARCH_PATHS' => '$(SDKROOT)/usr/include/libxml2'
}
end
update path for source_files
#
# ResponseDetective.podspec
#
# Copyright (c) 2016-2017 Netguru Sp. z o.o. All rights reserved.
# Licensed under the MIT License.
#
Pod::Spec.new do |spec|
# Description
spec.name = 'ResponseDetective'
spec.version = '0.5'
spec.summary = 'Sherlock Holmes of the networking layer'
spec.homepage = 'https://github.com/netguru/ResponseDetective'
# License
spec.license = {
type: 'MIT',
file: 'LICENSE.md'
}
spec.authors = {
'Adrian Kashivskyy' => 'adrian.kashivskyy@netguru.co',
'Aleksander Popko' => 'aleksander.popko@netguru.co'
}
# Source
spec.source = {
git: 'https://github.com/netguru/ResponseDetective.git',
tag: spec.version.to_s
}
spec.source_files = 'ResponseDetective/Sources'
# Linking
spec.frameworks = 'Foundation'
spec.libraries = 'xml2'
spec.ios.frameworks = 'UIKit'
spec.osx.frameworks = 'AppKit'
# Settings
spec.requires_arc = true
spec.ios.deployment_target = '8.0'
spec.osx.deployment_target = '10.10'
spec.tvos.deployment_target = '9.0'
spec.xcconfig = {
'HEADER_SEARCH_PATHS' => '$(SDKROOT)/usr/include/libxml2'
}
end
|
Renamed service_test to service_gen_class_test to clarify the difference from other service tests
require File.expand_path("test_helper", File.dirname(__FILE__))
require 'fileutils'
class ServiceGenClassTest < Test::Unit::TestCase
SRC_DIR = "#{APP_DIR}/src"
def setup
generate_app
end
def teardown
cleanup_app
end
def test_service_startup
Dir.chdir APP_DIR do
system "#{RUBOTO_CMD} gen class Service --name RubotoTestService"
activity_filename = "#{SRC_DIR}/ruboto_test_app_activity.rb"
assert File.exists? activity_filename
File.open(activity_filename, 'w') { |f| f << <<EOF }
require 'ruboto/activity'
require 'ruboto/widget'
require 'ruboto/service'
ruboto_import_widgets :Button, :LinearLayout, :TextView
class RubotoTestAppActivity
def onCreate(bundle)
super
$ruboto_test_app_activity = self
set_title 'Domo arigato, Mr Ruboto!'
self.content_view =
linear_layout :orientation => :vertical do
@text_view = text_view :text => 'What hath Matz wrought?', :id => 42,
:layout => {:width => :fill_parent},
:gravity => android.view.Gravity::CENTER, :text_size => 48.0
button :text => 'M-x butterfly', :layout => {:width => :fill_parent},
:id => 43, :on_click_listener => proc { butterfly }
end
rescue
puts "Exception creating activity: \#{$!}"
puts $!.backtrace.join("\\n")
end
def set_text(text)
@text_view.text = text
end
private
def butterfly
puts 'butterfly'
Thread.start do
begin
start_ruboto_service("RubotoTestService")
rescue Exception
puts "Exception starting the service: \#{$!}"
puts $!.backtrace.join("\\n")
end
end
puts 'butterfly OK'
end
end
EOF
service_filename = "#{SRC_DIR}/ruboto_test_service.rb"
assert File.exists? service_filename
File.open(service_filename, 'w') { |f| f << <<EOF }
class RubotoTestService
TARGET_TEXT = 'What hath Matz wrought!'
def onCreate
puts "service onCreate"
Thread.start do
loop do
sleep 1
puts "\#{self.class} running..."
end
end
puts "\#{self.class} started."
$ruboto_test_app_activity.set_title 'onCreate'
android.app.Service::START_STICKY
end
def onStartCommand(intent, flags, start_id)
puts "service on_start_command(\#{intent}, \#{flags}, \#{start_id})"
$ruboto_test_app_activity.set_title 'on_start_command'
$ruboto_test_app_activity.set_text TARGET_TEXT
android.app.Service::START_STICKY
end
end
EOF
service_test_filename = "#{APP_DIR}/test/src/ruboto_test_app_activity_test.rb"
assert File.exists? service_test_filename
File.open(service_test_filename, 'w') { |f| f << <<EOF }
activity Java::org.ruboto.test_app.RubotoTestAppActivity
setup do |activity|
start = Time.now
loop do
@text_view = activity.findViewById(42)
break if @text_view || (Time.now - start > 60)
sleep 1
end
assert @text_view
end
test 'button changes text', :ui => false do |activity|
button = activity.findViewById(43)
puts 'Clicking...'
activity.run_on_ui_thread{button.performClick}
puts 'Clicked!'
start = Time.now
loop do
break if @text_view.text == 'What hath Matz wrought!' || (Time.now - start > 60)
sleep 1
end
assert_equal 'What hath Matz wrought!', @text_view.text
end
EOF
end
run_app_tests
end
end
|
BareTest.suite do
suite "Styler" do
suite "Style" do
suite "ClassMethods" do
suite "#association" do
setup :assoc, "a single association" do
@style = Styler.new_style_for(Model::Foo.new).foo
@result_class = Style::Bar
end
setup :assoc, "an association collection" do
@style = Styler.new_style_for(Model::Foo.new).fooz
@result_class = Style::Bar
end
assert "it maps models to stylers in :assoc" do
if @style.respond_to? :each
@style.all? {|style| equal(style.__class__,@result_class)}
else
equal(@style.__class__, @result_class)
end
end
end
suite "#delegate" do
setup do
@styler = Styler.new_style_for(Model::Foo.new)
end
assert "it delegates to the model" do
equal :hello, @styler.hello
end
end
end
suite "InstanceMethods" do
suite "#to_s" do
setup :style, "a style" do
@model = Model::Foo.new
@style = ::Styler.new_style_for(@model)
@result = [["style/foo/default"], {:@model => @model, :@type => :default}]
end
setup :style, "a complex style" do
@model = Model::Foo.new
@style = ::Styler.new_style_for(@model)
@style.with(:bla => "foo")
@result = [["style/foo/default"], {:@model => @model, :@type => :default, :@bla => "foo"}]
end
assert "it renders correctly" do
equal(@result, @style.to_s)
end
end
end
end
end
end
added a style with context to tests
BareTest.suite do
suite "Styler" do
suite "Style" do
suite "ClassMethods" do
suite "#association" do
setup :assoc, "a single association" do
@style = Styler.new_style_for(Model::Foo.new).foo
@result_class = Style::Bar
end
setup :assoc, "an association collection" do
@style = Styler.new_style_for(Model::Foo.new).fooz
@result_class = Style::Bar
end
assert "it maps models to stylers in :assoc" do
if @style.respond_to? :each
@style.all? {|style| equal(style.__class__,@result_class)}
else
equal(@style.__class__, @result_class)
end
end
end
suite "#delegate" do
setup do
@styler = Styler.new_style_for(Model::Foo.new)
end
assert "it delegates to the model" do
equal :hello, @styler.hello
end
end
end
suite "InstanceMethods" do
suite "#to_s" do
setup :style, "a style" do
@model = Model::Foo.new
@style = ::Styler.new_style_for(@model)
@result = [["style/foo/default"], {:@model => @model, :@type => :default}]
end
setup :style, "a complex style" do
@model = Model::Foo.new
@style = ::Styler.new_style_for(@model)
@style.with(:bla => "foo")
@result = [["style/foo/default"], {:@model => @model, :@type => :default, :@bla => "foo"}]
end
setup :style, "a style with context" do
model = Model::Foo.new
style = ::Styler.new_style_for(model)
style.with(:bla => "foo")
@style = style.foo
@result = [["style/bar/default"], {:@foo => style, :@model => @style.model, :@type => :default, :@bla => "foo"}]
end
assert "it renders correctly" do
equal(@result, @style.to_s)
end
end
end
end
end
end
|
require 'tmpdir'
require 'chef/knife/kitchen'
module KitchenHelper
def in_kitchen
Dir.mktmpdir do |dir|
Dir.chdir(dir) do
knife_command(Chef::Knife::Kitchen, ".").run
yield
end
end
end
def outside_kitchen
Dir.mktmpdir do |dir|
Dir.chdir(dir) do
yield
end
end
end
end
Fix DRY in kitchen test helper
require 'tmpdir'
require 'chef/knife/kitchen'
module KitchenHelper
def in_kitchen
outside_kitchen do
knife_command(Chef::Knife::Kitchen, ".").run
yield
end
end
def outside_kitchen
Dir.mktmpdir do |dir|
Dir.chdir(dir) do
yield
end
end
end
end
|
require 'test_helper'
require 'tbar/chart_of_accounts'
module Tbar
class ChartOfAccountsTest < Test
def setup
@chart = ChartOfAccounts.new
end
def test_add_account
assert_equal 5, @chart.size
@chart.add_account( 'Expenses/Banking/Service Fee' )
assert_equal 3, @chart.depth
assert_equal 7, @chart.size
end
def test_load_paths
assert_equal 5, @chart.size
@chart.load_paths( data_file( 'chart_of_accounts.dat' ).readlines )
assert_equal 28, @chart.size
assert_equal 4, @chart.depth
end
end
end
end
test alternate path separator
require 'test_helper'
require 'tbar/chart_of_accounts'
module Tbar
class ChartOfAccountsTest < Test
def setup
@chart = ChartOfAccounts.new
end
def test_add_account
assert_equal 5, @chart.size
@chart.add_account( 'Expenses/Banking/Service Fee' )
assert_equal 3, @chart.depth
assert_equal 7, @chart.size
end
def test_load_paths
assert_equal 5, @chart.size
@chart.load_paths( data_file( 'chart_of_accounts.dat' ).readlines )
assert_equal 28, @chart.size
assert_equal 4, @chart.depth
end
def test_alterante_path_separator
chart = ChartOfAccounts.new( :options => { :path_separator => "::" } )
assert_equal 5, chart.size
chart.add_account( 'Expenses::Banking::Service Fee' )
assert_equal 3, chart.depth
assert_equal 7, chart.size
end
end
end
|
require 'test_helper'
require 'support/formats'
class ConverterTest < Minitest::Test
include Formats
include TeXMath
Converter::READERS.each_key do |reader|
Converter::WRITERS.each_key do |writer|
define_method "test_#{reader}_to_#{writer}" do
assert_converts reader, writer
end
end
end
def test_symbol_reader_and_writer
assert_converts :tex, :mathml
end
def test_string_reader_and_writer
assert_converts 'tex', 'mathml'
end
def test_default_reader
assert_equal :tex, Converter.new(to: :mathml).reader
end
def test_default_writer
assert_equal :mathml, Converter.new(from: :tex).writer
end
def test_invalid_input
assert_raises ConversionError do
Converter.new.convert('\sqrt{3')
end
end
def test_executable_not_found
assert_raises NoExecutableError, "Can't find the 'ogremath' executable." do
Converter.new('ogremath').convert('\sqrt{3}')
end
end
def test_string_reader_not_found
assert_raises InvalidReaderError, "Can't find 'pears' reader." do
Converter.new(from: 'pears', to: :mathml)
end
end
def test_string_writer_not_found
assert_raises InvalidWriterError, "Can't find 'pears' writer." do
Converter.new(from: :tex, to: 'pears')
end
end
def test_symbol_reader_not_found
assert_raises InvalidReaderError, "Can't find 'pears' reader." do
Converter.new(from: :pears, to: :mathml)
end
end
def test_symbol_writer_not_found
assert_raises InvalidWriterError, "Can't find 'pears' writer." do
Converter.new(from: :tex, to: :pears)
end
end
private
def assert_converts(from, to)
input, expected_output = send(from), send(to)
actual_output = Converter.new(from: from, to: to).convert(input)
assert_equal expected_output, actual_output
end
end
Use TeXMath.convert as primary test interface
This is much more likely to be used in practice than
TeXMath::Converter#convert.
require 'test_helper'
require 'support/formats'
class ConverterTest < Minitest::Test
include Formats
include TeXMath
Converter::READERS.each_key do |reader|
Converter::WRITERS.each_key do |writer|
define_method "test_#{reader}_to_#{writer}" do
assert_converts reader, writer
end
end
end
def test_symbol_reader_and_writer
assert_converts :tex, :mathml
end
def test_string_reader_and_writer
assert_converts 'tex', 'mathml'
end
def test_default_reader
assert_equal :tex, Converter.new(to: :mathml).reader
end
def test_default_writer
assert_equal :mathml, Converter.new(from: :tex).writer
end
def test_invalid_input
assert_raises ConversionError do
Converter.new.convert('\sqrt{3')
end
end
def test_executable_not_found
assert_raises NoExecutableError, "Can't find the 'ogremath' executable." do
Converter.new('ogremath').convert('\sqrt{3}')
end
end
def test_string_reader_not_found
assert_raises InvalidReaderError, "Can't find 'pears' reader." do
Converter.new(from: 'pears', to: :mathml)
end
end
def test_string_writer_not_found
assert_raises InvalidWriterError, "Can't find 'pears' writer." do
Converter.new(from: :tex, to: 'pears')
end
end
def test_symbol_reader_not_found
assert_raises InvalidReaderError, "Can't find 'pears' reader." do
Converter.new(from: :pears, to: :mathml)
end
end
def test_symbol_writer_not_found
assert_raises InvalidWriterError, "Can't find 'pears' writer." do
Converter.new(from: :tex, to: :pears)
end
end
private
def assert_converts(from, to)
input, expected_output = send(from), send(to)
actual_output = TeXMath.convert(input, from: from, to: to)
assert_equal expected_output, actual_output
end
end
|
# frozen_string_literal: true
require 'timeout'
# K8sConformanceTest represents the Kubernetes upstream conformance tests
class TestContainer
def initialize(image, cluster, vpn_tunnel)
@image = image
@cluster = cluster
@vpn_tunnel = vpn_tunnel
end
def run
::Timeout.timeout(3 * 60 * 60) do # 3 hours
command = if @cluster.env_variables['PLATFORM'].include?('metal')
"sudo rkt run --volume kubecfg,kind=host,readOnly=false,source=#{@cluster.kubeconfig} \
--mount volume=kubecfg,target=/kubeconfig #{network_config} --dns=host \
#{container_env('rkt')} --insecure-options=image #{@image}"
else
"docker run -v #{@cluster.kubeconfig}:/kubeconfig \
#{network_config} #{container_env('docker')} #{@image}"
end
succeeded = system(command)
raise 'Running container tests failed' unless succeeded
end
end
private
# When the cluster is only reachable via a VPN connection, the
# kube-conformance container has to share the same linux network namespace
# like the current container to be able to use the same VPN tunnel.
def network_config
return '--net=host' unless @vpn_tunnel
hostname = `hostname`.chomp
"--net=container:#{hostname}"
end
# Some tests require a few environment variables to run properly,
# build the environment parameters here.
def container_env(engine)
env = {
'KUBECONFIG' => '/kubeconfig',
'BRIDGE_AUTH_USERNAME' => @cluster.tectonic_admin_email,
'BRIDGE_AUTH_PASSWORD' => @cluster.tectonic_admin_password,
'BRIDGE_BASE_ADDRESS' => 'https://' + @cluster.tectonic_console_url,
'BRIDGE_BASE_PATH' => '/'
}
return env.map { |k, v| "-e #{k}='#{v}'" }.join(' ').chomp if engine == 'docker'
return env.map { |k, v| "--set-env #{k}='#{v}'" }.join(' ').chomp if engine == 'rkt'
raise 'unknown container engine'
end
end
rspec/lib: pulls test image before running it
TestContainer'd better pull the component test image before running it.
Otherwise it could run a local out of date cache.
# frozen_string_literal: true
require 'timeout'
# K8sConformanceTest represents the Kubernetes upstream conformance tests
class TestContainer
def initialize(image, cluster, vpn_tunnel)
@image = image
@cluster = cluster
@vpn_tunnel = vpn_tunnel
end
def run
::Timeout.timeout(3 * 60 * 60) do # 3 hours
command = if @cluster.env_variables['PLATFORM'].include?('metal')
"sudo rkt fetch --insecure-options=image #{@image}; \
sudo rkt run --volume kubecfg,kind=host,readOnly=false,source=#{@cluster.kubeconfig} \
--mount volume=kubecfg,target=/kubeconfig #{network_config} --dns=host \
#{container_env('rkt')} --insecure-options=image #{@image}"
else
"docker pull #{@image}; \
docker run -v #{@cluster.kubeconfig}:/kubeconfig \
#{network_config} #{container_env('docker')} #{@image}"
end
succeeded = system(command)
raise 'Running container tests failed' unless succeeded
end
end
private
# When the cluster is only reachable via a VPN connection, the
# kube-conformance container has to share the same linux network namespace
# like the current container to be able to use the same VPN tunnel.
def network_config
return '--net=host' unless @vpn_tunnel
hostname = `hostname`.chomp
"--net=container:#{hostname}"
end
# Some tests require a few environment variables to run properly,
# build the environment parameters here.
def container_env(engine)
env = {
'KUBECONFIG' => '/kubeconfig',
'BRIDGE_AUTH_USERNAME' => @cluster.tectonic_admin_email,
'BRIDGE_AUTH_PASSWORD' => @cluster.tectonic_admin_password,
'BRIDGE_BASE_ADDRESS' => 'https://' + @cluster.tectonic_console_url,
'BRIDGE_BASE_PATH' => '/'
}
return env.map { |k, v| "-e #{k}='#{v}'" }.join(' ').chomp if engine == 'docker'
return env.map { |k, v| "--set-env #{k}='#{v}'" }.join(' ').chomp if engine == 'rkt'
raise 'unknown container engine'
end
end
|
#!/usr/bin/env ruby1.9.3
# 2015, s.andres@syseleven.de
#
# Sensu Handler: sms
#
# This handler formats alerts as sms and sends them off to defined sms contacts
#
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-handler'
require 'timeout'
# load sys11handler
require "#{File.dirname(__FILE__)}/sys11"
class Sms < Sys11Handler
def handle
if ENV['DEBUG'] == 'true'
debug = true
puts settings['notifications'].inspect
end
if settings['notifications']['sms'] == false
# do nothing
exit()
end
# Only send notifications between 0900 - 1659 when nine_to_five is true
if settings['notifications']['sms']['nine_to_five'] == true
if not Time.now.hour.between?(9, 16)
raise 'Not sending SMS. nine_to_five is enabled and it is not between 0900 and 1659.'
exit()
end
end
if not settings['notifications']['sms'].include? 'source' or settings['notifications']['sms'] == true
raise 'Missing sms source address. Got no default'
else
source = settings['notifications']['sms']['source']
end
output = "#{@event['check']['output']}"
text = "#{status_to_string}: #{@event['client']['name']} #{@event['check']['name']} #{output}"
# Cut the SMS text to 159 characters
text = text[0..158]
settings['notifications']['sms']['targets'].each do |target|
if debug
ret = `echo txt2sms -s "#{source}" -d "#{target}" -m "#{text}" 2>&1`
else
ret = `txt2sms -s "#{source}" -d "#{target}" -m "#{text}" 2>&1`
end
if $?.success?
puts "txt2sms successully send sms: #{target} (#{text}): #{ret}"
else
puts "txt2sms did not successully finish for #{target} (#{text}): #{ret}"
end
end
end
end
os-967 remove useless exit
#!/usr/bin/env ruby1.9.3
# 2015, s.andres@syseleven.de
#
# Sensu Handler: sms
#
# This handler formats alerts as sms and sends them off to defined sms contacts
#
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-handler'
require 'timeout'
# load sys11handler
require "#{File.dirname(__FILE__)}/sys11"
class Sms < Sys11Handler
def handle
if ENV['DEBUG'] == 'true'
debug = true
puts settings['notifications'].inspect
end
if settings['notifications']['sms'] == false
# do nothing
exit()
end
# Only send notifications between 0900 - 1659 when nine_to_five is true
if settings['notifications']['sms']['nine_to_five'] == true
if not Time.now.hour.between?(9, 16)
raise 'Not sending SMS. nine_to_five is enabled and it is not between 0900 and 1659.'
end
end
if not settings['notifications']['sms'].include? 'source' or settings['notifications']['sms'] == true
raise 'Missing sms source address. Got no default'
else
source = settings['notifications']['sms']['source']
end
output = "#{@event['check']['output']}"
text = "#{status_to_string}: #{@event['client']['name']} #{@event['check']['name']} #{output}"
# Cut the SMS text to 159 characters
text = text[0..158]
settings['notifications']['sms']['targets'].each do |target|
if debug
ret = `echo txt2sms -s "#{source}" -d "#{target}" -m "#{text}" 2>&1`
else
ret = `txt2sms -s "#{source}" -d "#{target}" -m "#{text}" 2>&1`
end
if $?.success?
puts "txt2sms successully send sms: #{target} (#{text}): #{ret}"
else
puts "txt2sms did not successully finish for #{target} (#{text}): #{ret}"
end
end
end
end
|
#!/usr/bin/env ruby
require 'rubygems'
gem 'sinatra', '>= 1.3.2' # for :public_folder
require 'sinatra/base'
require 'active_support/core_ext'
require 'pp'
require 'optparse'
$DOCROOT = File.expand_path(File.join(File.dirname(__FILE__), '../priv/public'))
def build_all_images_js!
all_images_names = Dir.chdir($DOCROOT) do
Dir.glob("images/**/*").reject {|n| File.directory? n}
end
File.open($DOCROOT + "/js/all-images.js", "wt") do |file|
file << "var AllImages = " << all_images_names.to_json << ";\n"
end
end
build_all_images_js!
class Middleware
def initialize(app)
@app = app
end
def call(env)
req = Rack::Request.new(env)
if req.path_info == "/index.html"
text = IO.read($DOCROOT + "/index.html").gsub("</body>", "<script src='/js/hooks.js'></script></body>")
return [200, {'Content-Type' => 'text/html; charset=utf-8'}, [text]]
elsif req.path_info.starts_with?('/js/')
path = req.path_info
if File.file?(path)
return [200, {'Content-Type' => 'application/javascript'}, IO.readlines(path, 'r')]
end
end
@app.call(env)
end
end
class NSServer < Sinatra::Base
use Middleware
set :public_folder, $DOCROOT
get "/" do
redirect "/index.html"
end
end
OptionParser.new do |opts|
script_name = File.basename($0)
opts.banner = "Usage: #{script_name} [options]"
NSServer.set :port, 8080
opts.on('-x', 'Turn on the mutex lock (default is off)') do
NSServer.set :lock, true
end
opts.on('-e env', 'Set the environment (default is development)') do |opt|
NSServer.set :environment, opt.to_sym
end
opts.on('-s server', 'Specify rack server/handler (default is thin)') do |opt|
NSServer.set :server, opt
end
opts.on('-p port', 'Set the port (default is 8080)') do |opt|
NSServer.set :port, opt.to_i
end
opts.on('-o addr', 'Set the host (default is localhost)') do |opt|
NSServer.set :bind, opt
end
opts.on('-t', '--shots', 'Make application screenshots') do |opt|
$do_screenshots = true
end
opts.on_tail('-h', '--help', 'Show this message') do
puts opts.help
exit
end
end.parse!
NSServer.run! do
if $do_screenshots
phantomjs_pid = fork do
system "casperjs test tests/ --base-url=http://#{NSServer.settings.bind}:#{NSServer.settings.port.to_s}/index.html --screenshots-output-path=tests/screenshots-output/ "
end
Process.detach(phantomjs_pid)
end
end
MB-8972: cleanup ruby code to spawn casperjs a bit
Particularly we're avoiding fork and making it all work without any
additional args and from inside any directory
Change-Id: I39ccd0a2ee5eab6c247fc9e9e3827b47f9a9df90
Reviewed-on: http://review.couchbase.org/29138
Reviewed-by: Pavel Blagodov <596b71e626fc58dbdad141b817770e3e4810bb9b@gmail.com>
Tested-by: Pavel Blagodov <596b71e626fc58dbdad141b817770e3e4810bb9b@gmail.com>
#!/usr/bin/env ruby
require 'rubygems'
gem 'sinatra', '>= 1.3.2' # for :public_folder
require 'sinatra/base'
require 'active_support/core_ext'
require 'pp'
require 'optparse'
$DOCROOT = File.expand_path(File.join(File.dirname(__FILE__), '../priv/public'))
def build_all_images_js!
all_images_names = Dir.chdir($DOCROOT) do
Dir.glob("images/**/*").reject {|n| File.directory? n}
end
File.open($DOCROOT + "/js/all-images.js", "wt") do |file|
file << "var AllImages = " << all_images_names.to_json << ";\n"
end
end
build_all_images_js!
class Middleware
def initialize(app)
@app = app
end
def call(env)
req = Rack::Request.new(env)
if req.path_info == "/index.html"
text = IO.read($DOCROOT + "/index.html").gsub("</body>", "<script src='/js/hooks.js'></script></body>")
return [200, {'Content-Type' => 'text/html; charset=utf-8'}, [text]]
elsif req.path_info.starts_with?('/js/')
path = req.path_info
if File.file?(path)
return [200, {'Content-Type' => 'application/javascript'}, IO.readlines(path, 'r')]
end
end
@app.call(env)
end
end
class NSServer < Sinatra::Base
use Middleware
set :public_folder, $DOCROOT
get "/" do
redirect "/index.html"
end
end
OptionParser.new do |opts|
script_name = File.basename($0)
opts.banner = "Usage: #{script_name} [options]"
NSServer.set :port, 8080
opts.on('-x', 'Turn on the mutex lock (default is off)') do
NSServer.set :lock, true
end
opts.on('-e env', 'Set the environment (default is development)') do |opt|
NSServer.set :environment, opt.to_sym
end
opts.on('-s server', 'Specify rack server/handler (default is thin)') do |opt|
NSServer.set :server, opt
end
opts.on('-p port', 'Set the port (default is 8080)') do |opt|
NSServer.set :port, opt.to_i
end
opts.on('-o addr', 'Set the host (default is localhost)') do |opt|
NSServer.set :bind, opt
end
opts.on('-t', '--shots', 'Make application screenshots') do |opt|
$do_screenshots = true
end
opts.on_tail('-h', '--help', 'Show this message') do
puts opts.help
exit
end
end.parse!
Dir.chdir(File.dirname(__FILE__))
NSServer.run! do
if $do_screenshots
Thread.new do
cmd = "casperjs test tests/ --base-url=http://#{NSServer.settings.bind || "127.0.0.1"}:#{NSServer.settings.port.to_s}/index.html --screenshots-output-path=tests/screenshots-output/ "
puts "cmd: #{cmd}"
ok = system(cmd)
unless ok
puts("casperjs command failed")
end
Process.exit!(ok ? 0 : 1)
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'BioTrackTHC/version'
Gem::Specification.new do |spec|
spec.name = 'BioTrackTHC'
spec.version = BioTrackTHC::VERSION
spec.authors = ['Emanuele Tozzato']
spec.email = ['etozzato@gmail.com']
spec.summary = %q{Pull and push lab data between a LIMS and BioTrackTHC}
spec.description = %q{A simple gem to pull lab tests and push results to BioTrackTHC }
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_dependency 'mechanize'
spec.add_dependency 'bundler', '~> 1.11'
spec.add_dependency 'rake', '~> 0.9.2.2'
spec.add_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'pry'
end
ruby version update
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'BioTrackTHC/version'
Gem::Specification.new do |spec|
spec.name = 'BioTrackTHC'
spec.version = BioTrackTHC::VERSION
spec.authors = ['Emanuele Tozzato']
spec.email = ['etozzato@gmail.com']
spec.summary = %q{Pull and push lab data between a LIMS and BioTrackTHC}
spec.description = %q{A simple gem to pull lab tests and push results to BioTrackTHC }
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_dependency 'mechanize'
spec.add_dependency 'bundler', '~> 1.11'
spec.add_dependency 'rake', '~> 10.1.0'
spec.add_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'pry'
end
|
#
# Be sure to run `pod lib lint BoltsExtras.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "BoltsExtras"
s.version = "0.3.0"
s.summary = "A collection of stuff to make IOS Programming Easier with Bolts."
s.description = <<-DESC
A collection of stuff to make IOS Programming Easier with Bolts.
UIAlertView and UIActionSheet block based implementations that let wrap them into BFTasks
* Markdown format.
* Don't worry about the indent, we strip it!
DESC
s.homepage = "https://github.com/mishagray/BoltsExtras"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "Michael Gray" => "mishagray@gmail.com" }
s.source = { :git => "https://github.com/mishagray/BoltsExtras.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/mishagray'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/*.{h,m}'
# s.resources = 'Pod/Assets/*.png'
# s.public_header_files = 'Pod/Classes/**/*.h'
s.frameworks = 'UIKit'
s.dependency 'Bolts', '~> 1.1.2'
end
podspec changes
#
# Be sure to run `pod lib lint BoltsExtras.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "BoltsExtras"
s.version = "0.4.0"
s.summary = "A collection of stuff to make IOS Programming Easier with Bolts."
s.description = <<-DESC
A collection of stuff to make IOS Programming Easier with Bolts.
UIAlertView and UIActionSheet block based implementations that let wrap them into BFTasks
* Markdown format.
* Don't worry about the indent, we strip it!
DESC
s.homepage = "https://github.com/mishagray/BoltsExtras"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "Michael Gray" => "mishagray@gmail.com" }
s.source = { :git => "https://github.com/mishagray/BoltsExtras.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/mishagray'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/*.{h,m}'
# s.resources = 'Pod/Assets/*.png'
# s.public_header_files = 'Pod/Classes/**/*.h'
s.frameworks = 'UIKit'
s.dependency 'Bolts', '~> 1.1.2'
end
|
Pod::Spec.new do |s|
s.name = 'BreinifyApi'
s.version = '2.0.19'
s.summary = 'Breinify´s DigitalDNA API puts dynamic behavior-based, people-driven data right at your fingertips'
s.description = 'Breinify´s DigitalDNA API puts dynamic behavior-based, people-driven data right at your fingertips. We believe that in many situations, a critical component of a great user experience is personalization. With all the data available on the web it should be easy to provide a unique experience to every visitor, and yet, sometimes you may find yourself wondering why it is so difficult.'
s.homepage = 'https://github.com/Breinify/brein-api-library-ios'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Breinify Inc.' => 'support@breinify.com' }
s.source = { :git => 'https://github.com/Breinify/brein-api-library-ios.git', :tag => s.version.to_s }
s.swift_version = '5.0'
s.ios.deployment_target = '9.0'
s.source_files = 'BreinifyApi/**/*.swift'
end
new version number added
Pod::Spec.new do |s|
s.name = 'BreinifyApi'
s.version = '2.0.20'
s.summary = 'Breinify´s DigitalDNA API puts dynamic behavior-based, people-driven data right at your fingertips'
s.description = 'Breinify´s DigitalDNA API puts dynamic behavior-based, people-driven data right at your fingertips. We believe that in many situations, a critical component of a great user experience is personalization. With all the data available on the web it should be easy to provide a unique experience to every visitor, and yet, sometimes you may find yourself wondering why it is so difficult.'
s.homepage = 'https://github.com/Breinify/brein-api-library-ios'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Breinify Inc.' => 'support@breinify.com' }
s.source = { :git => 'https://github.com/Breinify/brein-api-library-ios.git', :tag => s.version.to_s }
s.swift_version = '5.0'
s.ios.deployment_target = '9.0'
s.source_files = 'BreinifyApi/**/*.swift'
end
|
Errbit::Application.configure do
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
end
no more debug_rjs in rails
Errbit::Application.configure do
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
end
|
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default_url_options = { :host =>"192.168.1.42:3000" }
ActionMailer::Base.delivery_method = :smtp
ActionMailer::Base.smtp_settings = {
:enable_starttls_auto => true,
:address => "smtp.gmail.com",
:port => 587,
:domain => "gmail.com",
:user_name => "tachyonscet@gmail.com",
:password => "tachyons999",
:authentication => :plain
}
Update development.rb
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default_url_options = { :host =>"192.168.1.42:3000" }
ActionMailer::Base.delivery_method = :smtp
ActionMailer::Base.smtp_settings = {
:enable_starttls_auto => true,
:address => "smtp.gmail.com",
:port => 587,
:domain => "gmail.com",
:user_name => "tachyonscet@gmail.com",
:password => "password",
:authentication => :plain
}
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# defined default url options
config.action_mailer.default_url_options = { host: 'localhost', port: 3000 }
config.action_mailer.delivery_method = :letter_opener
Paperclip.options[:command_path] = "/usr/local/bin/"
end
trigger build
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# defined default url options
config.action_mailer.default_url_options = { host: 'localhost', port: 3000 }
config.action_mailer.delivery_method = :letter_opener
Paperclip.options[:command_path] = "/usr/local/bin/"
end
|
require 'exception_notifier'
require 'exception_notification/rack'
require 'exception_notification/rails'
require 'exception_notification'
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
Rails.application.config.middleware.use ExceptionNotification::Rack,
:email => {
# :deliver_with => :deliver, # Rails >= 4.2.1 do not need this option since it defaults to :deliver_now
:email_prefix => "[Stash Exception]",
:sender_address => %{"notifier" no-reply-dash2-dev@ucop.edu },
:exception_recipients => %w{david.moles@ucop.edu marisa.strong@ucop.edu scott.fisher@ucop.edu bhavi.vedula@ucop.edu}
}
config.action_mailer.delivery_method = :sendmail
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default_url_options = { :host => "https://dash2-dev.ucop.edu/stash" }
end
removing bad lines from development.rb
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
Rails.application.config.middleware.use ExceptionNotification::Rack,
:email => {
# :deliver_with => :deliver, # Rails >= 4.2.1 do not need this option since it defaults to :deliver_now
:email_prefix => "[Stash Exception]",
:sender_address => %{"notifier" no-reply-dash2-dev@ucop.edu },
:exception_recipients => %w{david.moles@ucop.edu marisa.strong@ucop.edu scott.fisher@ucop.edu bhavi.vedula@ucop.edu}
}
config.action_mailer.delivery_method = :sendmail
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.default_url_options = { :host => "https://dash2-dev.ucop.edu/stash" }
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=172800'
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Mailcatcher configuration
config.action_mailer.default_url_options = { :host => 'localhost:4200' }
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = { :address => 'mailcatcher', :port => 1025 }
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
Fixing mailcatcher address
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=172800'
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Mailcatcher configuration
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = { :address => 'mailcatcher', :port => 1025 }
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
|
RgsocTeams::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
# Do not compress assets
config.assets.compress = false
# Expands the lines which load the assets
config.assets.debug = true
config.action_mailer.default_url_options = { host: 'localhost:3000' }
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
user_name: ENV.fetch('MAILTRAP_USER'),
password: ENV.fetch('MAILTRAP_PASSWORD'),
address: 'mailtrap.io',
domain: 'mailtrap.io',
port: '2525',
authentication: :cram_md5,
enable_starttls_auto: true
}
config.eager_load = false
end
Ignore missing Mailtrap env
[ci skip]
RgsocTeams::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
# Do not compress assets
config.assets.compress = false
# Expands the lines which load the assets
config.assets.debug = true
config.action_mailer.default_url_options = { host: 'localhost:3000' }
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
user_name: ENV['MAILTRAP_USER'],
password: ENV['MAILTRAP_PASSWORD'],
address: 'mailtrap.io',
domain: 'mailtrap.io',
port: '2525',
authentication: :cram_md5,
enable_starttls_auto: true
}
config.eager_load = false
end
|
Limelight::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
# Do not compress assets
config.assets.compress = false
# Expands the lines which load the assets
config.assets.debug = true
end
re-enable jasmine spec directory as asset path for development, allows running tests in browser
Limelight::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
# Do not compress assets
config.assets.compress = false
# Expands the lines which load the assets
config.assets.debug = true
config.assets.paths << Rails.root.join("spec", "javascripts")
end
|
Digitalsocial::Application.configure do
Digitalsocial::NOMINATIM_EMAIL = 'nominatim-dev@digitalsocial.eu'
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
# Raise exception on mass assignment protection for Active Record models
#config.active_record.mass_assignment_sanitizer = :strict
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
#config.active_record.auto_explain_threshold_in_seconds = 0.5
# Do not compress assets
config.assets.compress = false
# Expands the lines which load the assets
config.assets.debug = true
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = { host: "localhost", port: 1025 }
config.action_mailer.default_url_options = { host: 'localhost:3000' }
Digitalsocial::DATA_ENDPOINT = 'http://127.0.0.1:3030/dsidev/data'
Tripod.configure do |config|
config.update_endpoint = 'http://127.0.0.1:3030/dsidev/update'
config.query_endpoint = 'http://127.0.0.1:3030/dsidev/sparql'
config.timeout_seconds = 30
config.cache_store = Tripod::CacheStores::MemcachedCacheStore.new('localhost:11214')
end
end
Dev cache config comment
Digitalsocial::Application.configure do
Digitalsocial::NOMINATIM_EMAIL = 'nominatim-dev@digitalsocial.eu'
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
# Raise exception on mass assignment protection for Active Record models
#config.active_record.mass_assignment_sanitizer = :strict
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
#config.active_record.auto_explain_threshold_in_seconds = 0.5
# Do not compress assets
config.assets.compress = false
# Expands the lines which load the assets
config.assets.debug = true
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = { host: "localhost", port: 1025 }
config.action_mailer.default_url_options = { host: 'localhost:3000' }
Digitalsocial::DATA_ENDPOINT = 'http://127.0.0.1:3030/dsidev/data'
Tripod.configure do |config|
config.update_endpoint = 'http://127.0.0.1:3030/dsidev/update'
config.query_endpoint = 'http://127.0.0.1:3030/dsidev/sparql'
config.timeout_seconds = 30
# e.g memcached -m 1024 -p 11214 -I 5M -u memcache -l 127.0.0.1
config.cache_store = Tripod::CacheStores::MemcachedCacheStore.new('localhost:11214')
end
end
|
require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded any time
# it changes. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=#{2.days.to_i}",
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise exceptions for disallowed deprecations.
config.active_support.disallowed_deprecation = :raise
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.i18n.raise_on_missing_translations = true
# Annotate rendered view with file names.
# config.action_view.annotate_rendered_view_with_filenames = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# Uncomment if you wish to allow Action Cable access from any origin.
# config.action_cable.disable_request_forgery_protection = true
# Allow access to the application using dev.gov.uk hostnames
config.hosts += %w[
specialist-publisher.dev.gov.uk
specialist-publisher.govuk.dev.cbaines.net
]
end
Allow all hosts in development mode
We seem to have had a developers local hostname left behind in here.
Since GOV.UK developers rarely use localhost for development its quite
conventional to just clear the restriction, see [1] for prior art
[1]: https://github.com/alphagov/content-publisher/commit/42cc6297b3ce2970510ec3fc31e2cd8d1e2de310
require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded any time
# it changes. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=#{2.days.to_i}",
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise exceptions for disallowed deprecations.
config.active_support.disallowed_deprecation = :raise
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.i18n.raise_on_missing_translations = true
# Annotate rendered view with file names.
# config.action_view.annotate_rendered_view_with_filenames = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# Uncomment if you wish to allow Action Cable access from any origin.
# config.action_cable.disable_request_forgery_protection = true
# Allow requests for all domains e.g. <app>.dev.gov.uk
config.hosts.clear
end
|
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.action_view.debug_rjs = true
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
config.active_record.allow_concurrency = true
config.after_initialize do background() end
Remove depricated config.active_record.allow_concurrency setting
git-svn-id: f67d969b640da65cb7bc1d229e09fd6d2db44ae1@748 392d7b7b-3f31-0410-9dc3-c95a9635ea79
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.action_view.debug_rjs = true
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
config.after_initialize do background() end
|
Edgar::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Add the fonts path
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
# Precompile additional assets
config.assets.precompile += %w( jquery.js jquery_ujs contacts_import.js contacts_index.js contacts.js tag.js contacts_form.js scheduling.js tasks_group.js simple/application.js onepage/application.js tour/home-tour-fr.js onepage.js onepage.css .svg .eot .woff .ttf multipage.css )
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
config.action_mailer.default_url_options = { :host => ENV['DOMAIN'] }
# ActionMailer Config
# Setup for production - deliveries, no errors raised
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default :charset => "utf-8"
config.action_mailer.smtp_settings = {
:address => "smtp.mandrillapp.com",
:port => 25,
:user_name => ENV["MANDRILL_USERNAME"],
:password => ENV["MANDRILL_API_KEY"]
}
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
add contacts_show.js for precompilation
Edgar::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Add the fonts path
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
# Precompile additional assets
config.assets.precompile += %w( jquery.js jquery_ujs jquery.ui.addresspicker.js contacts_import.js contacts_index.js contacts_show.js contacts.js tag.js contacts_form.js scheduling.js tasks_group.js simple/application.js onepage/application.js tour/home-tour-fr.js onepage.js onepage.css .svg .eot .woff .ttf multipage.css )
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
config.action_mailer.default_url_options = { :host => ENV['DOMAIN'] }
# ActionMailer Config
# Setup for production - deliveries, no errors raised
config.action_mailer.delivery_method = :smtp
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default :charset => "utf-8"
config.action_mailer.smtp_settings = {
:address => "smtp.mandrillapp.com",
:port => 25,
:user_name => ENV["MANDRILL_USERNAME"],
:password => ENV["MANDRILL_API_KEY"]
}
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
|
#
# Be sure to run `pod lib lint BumbleB-iOS.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "BumbleB-iOS"
s.version = "0.1.1"
s.summary = "convenient access to the BumbleB API for objective-c developers"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = "BumbleB-iOS provides convenient access to the BumbleB API for objective-c developers. The interface converts Soundy API responses into NSObjects"
s.homepage = "https://github.com/BumbleB-IO/BumbleB-iOS"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "Ram Greenberg" => "ramgreenberg@hotmail.com" }
s.source = { :git => "https://github.com/BumbleB-IO/BumbleB-iOS.git", :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.resource_bundles = {
'BumbleB-iOS' => ['Pod/Assets/*.png']
}
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
s.dependency 'AFNetworking', '~> 2.6'
end
Release 0.1.2
#
# Be sure to run `pod lib lint BumbleB-iOS.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "BumbleB-iOS"
s.version = "0.1.2"
s.summary = "convenient access to the BumbleB API for objective-c developers"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = "BumbleB-iOS provides convenient access to the BumbleB API for objective-c developers. The interface converts Soundy API responses into NSObjects"
s.homepage = "https://github.com/BumbleB-IO/BumbleB-iOS"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "Ram Greenberg" => "ramgreenberg@hotmail.com" }
s.source = { :git => "https://github.com/BumbleB-IO/BumbleB-iOS.git", :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.resource_bundles = {
'BumbleB-iOS' => ['Pod/Assets/*.png']
}
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
s.dependency 'AFNetworking', '~> 2.6'
end
|
#
# Be sure to run `pod spec lint BxObjC.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "BxTextField"
s.version = "1.10.11"
s.summary = "Swift UI component improving features UITextField"
s.description = "This component will help iOS developers with different functions of putting text"
s.homepage = "https://github.com/ByteriX/BxTextField.git"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = { :type => "MIT", :file => "LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Sergey Balalaev" => "sof.bix@mail.ru" }
# Or just: s.author = "ByteriX"
# s.authors = { "ByteriX" => "email@address.com" }
# s.social_media_url = "http://twitter.com/ByteriX"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
s.platform = :ios, "8.0"
s.swift_versions = ['4.0', '4.1', '4.2', '5.0', '5.1', '5.2']
#s.ios.deployment_target = "5.0"
#s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
s.source = { :git => "https://github.com/ByteriX/BxTextField.git", :tag => s.version }
s.frameworks = ["Foundation", "UIKit"]
#s.resources = "BxTextField/Sources/Assets.xcassets", "BxTextField/Sources/**/*.xib"
s.source_files = "BxTextField/Sources/**/*.{swift}", "BxTextField/Sources/*.{swift}"
#s.test_spec 'Tests' do |test_spec|
#test_spec.platform = :ios, "11.0"
# test_spec.source_files = "BxTextFieldTests/*.swift"
#, "BxTextFieldUITests/*.swift"
# test_spec.framework = 'XCTest'
#end
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
end
1.10.12 release
#
# Be sure to run `pod spec lint BxObjC.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "BxTextField"
s.version = "1.10.12"
s.summary = "Swift UI component improving features UITextField"
s.description = "This component will help iOS developers with different functions of putting text"
s.homepage = "https://github.com/ByteriX/BxTextField.git"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = { :type => "MIT", :file => "LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Sergey Balalaev" => "sof.bix@mail.ru" }
# Or just: s.author = "ByteriX"
# s.authors = { "ByteriX" => "email@address.com" }
# s.social_media_url = "http://twitter.com/ByteriX"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
s.platform = :ios, "8.0"
s.swift_versions = ['3.2', '4.0', '4.1', '4.2', '5.0', '5.1', '5.2']
#s.ios.deployment_target = "5.0"
#s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
s.source = { :git => "https://github.com/ByteriX/BxTextField.git", :tag => s.version }
s.frameworks = ["Foundation", "UIKit"]
#s.resources = "BxTextField/Sources/Assets.xcassets", "BxTextField/Sources/**/*.xib"
s.source_files = "BxTextField/Sources/**/*.{swift}", "BxTextField/Sources/*.{swift}"
#s.test_spec 'Tests' do |test_spec|
#test_spec.platform = :ios, "11.0"
# test_spec.source_files = "BxTextFieldTests/*.swift"
#, "BxTextFieldUITests/*.swift"
# test_spec.framework = 'XCTest'
#end
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
end
|
CarrierWave.configure do |config|
config.fog_provider = 'fog/aws' # required
config.fog_credentials = {
provider: 'AWS', # required
aws_access_key_id: Rails.application.secrets.aws_access_key_id, # required
aws_secret_access_key: Rails.application.secrets.aws_secret_access_key, # required
region: 'eu-west-1', # optional, defaults to 'us-east-1'
# host: 's3.example.com', # optional, defaults to nil
# endpoint: 'https://s3.example.com:8080' # optional, defaults to nil
}
config.fog_directory = 'inmo-a' # required
# config.fog_public = false # optional, defaults to true
config.fog_attributes = { 'Cache-Control' => "max-age=#{365.day.to_i}" } # optional, defaults to {}
end
Default dummy values for aws credentials
CarrierWave.configure do |config|
aws_access_key_id = Rails.application.secrets.aws_access_key_id || "dummy"
aws_secret_access_key = Rails.application.secrets.aws_secret_access_key || "dummy"
# defaulting to dummy above ensures app will start
# even though uploading will not work
config.fog_provider = 'fog/aws' # required
config.fog_credentials = {
provider: 'AWS', # required
aws_access_key_id: aws_access_key_id, # required
aws_secret_access_key: aws_secret_access_key, # required
region: 'eu-west-1', # optional, defaults to 'us-east-1'
# host: 's3.example.com', # optional, defaults to nil
# endpoint: 'https://s3.example.com:8080' # optional, defaults to nil
}
config.fog_directory = 'pwb' # required
# config.fog_public = false # optional, defaults to true
config.fog_attributes = { 'Cache-Control' => "max-age=#{365.day.to_i}" } # optional, defaults to {}
end
|
# Be sure to restart your server when you modify this file.
# Add new inflection rules using the following format. Inflections
# are locale specific, and you may define rules for as many different
# locales as you wish. All of these examples are active by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.plural /^(ox)$/i, '\1en'
# inflect.singular /^(ox)en/i, '\1'
# inflect.irregular 'person', 'people'
# inflect.uncountable %w( fish sheep )
# end
# These inflection rules are supported but not enabled by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.acronym 'RESTful'
# end
ActiveSupport::Inflector.inflections(:en) do |inflect|
inflect.plural /(friend) of (ruby together)/i, '\1s of \2'
end
remove now-unused friend inflection
# Be sure to restart your server when you modify this file.
# Add new inflection rules using the following format. Inflections
# are locale specific, and you may define rules for as many different
# locales as you wish. All of these examples are active by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.plural /^(ox)$/i, '\1en'
# inflect.singular /^(ox)en/i, '\1'
# inflect.irregular 'person', 'people'
# inflect.uncountable %w( fish sheep )
# end
# These inflection rules are supported but not enabled by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.acronym 'RESTful'
# end
|
APPLICATION_ENGINES = []
Rails.application.railties.engines.collect do |engine|
engine_name = engine.class.to_s.split('::')[0]
if eval(engine_name).respond_to? :mcp
APPLICATION_ENGINES << {railtie: engine, name: engine_name.underscore}
end
end
Rails.logger.debug "Mapped mcp engines: #{APPLICATION_ENGINES.map {|e| e[:name]}} from #{__FILE__}"
config_file = "#{Rails.root}/config/application.yml"
if File.exists? config_file
# Load Application Settings - common across applications
APPLICATION_CONFIG = YAML.load_file(config_file)[Rails.env]
# Set the relative url if app is deployed to a subdirectory
# See: https://github.com/rails/rails/issues/6933
if APPLICATION_CONFIG['relative_url']
Rails.application.routes.default_url_options[:script_name] = "/#{APPLICATION_CONFIG['relative_url']}"
end
# If the 'asset_dir' config variable is set then ensure the directory tree exists and set a couple of system variables
if APPLICATION_CONFIG['asset_dir']
APPLICATION_CONFIG['view_path'] = "/#{APPLICATION_CONFIG['asset_dir']}"
APPLICATION_CONFIG['asset_path'] = "#{Rails.root}/public/#{APPLICATION_CONFIG['asset_dir']}"
if not File.directory? "#{APPLICATION_CONFIG['asset_path']}/"
FileUtils.mkdir_p "#{APPLICATION_CONFIG['asset_path']}/"
end
end
# Override ActionMailer settings if they exist in the config file
if APPLICATION_CONFIG['smtp_settings']
ActionMailer::Base.delivery_method = :smtp
ActionMailer::Base.smtp_settings = Hash[APPLICATION_CONFIG['smtp_settings'].map{ |k, v| [k.to_sym, v] }]
end
else
APPLICATION_CONFIG = []
end
removing parsing of engines
# can do this after deciding how config files will be deployed to the application
# for now it is just assumed that it is fan_club
#APPLICATION_ENGINES = [{name: ENV['APPLICATION_ENGINE']}]
APPLICATION_ENGINES = [{name: 'fan_club'}]
dir_p = "#{Rails.root}/public/cache/facebook"
FileUtils.mkdir_p dir_p unless File.directory?(dir_p)
=begin
APPLICATION_ENGINES = []
Rails.application.railties.engines.collect do |engine|
engine_name = engine.class.to_s.split('::')[0]
if eval(engine_name).respond_to? :mcp
APPLICATION_ENGINES << {railtie: engine, name: engine_name.underscore}
end
end
Rails.logger.debug "Mapped mcp engines: #{APPLICATION_ENGINES.map {|e| e[:name]}} from #{__FILE__}"
config_file = "#{Rails.root}/config/application.yml"
if File.exists? config_file
# Load Application Settings - common across applications
APPLICATION_CONFIG = YAML.load_file(config_file)[Rails.env]
# Set the relative url if app is deployed to a subdirectory
# See: https://github.com/rails/rails/issues/6933
if APPLICATION_CONFIG['relative_url']
Rails.application.routes.default_url_options[:script_name] = "/#{APPLICATION_CONFIG['relative_url']}"
end
# If the 'asset_dir' config variable is set then ensure the directory tree exists and set a couple of system variables
if APPLICATION_CONFIG['asset_dir']
APPLICATION_CONFIG['view_path'] = "/#{APPLICATION_CONFIG['asset_dir']}"
APPLICATION_CONFIG['asset_path'] = "#{Rails.root}/public/#{APPLICATION_CONFIG['asset_dir']}"
if not File.directory? "#{APPLICATION_CONFIG['asset_path']}/"
FileUtils.mkdir_p "#{APPLICATION_CONFIG['asset_path']}/"
end
end
# Override ActionMailer settings if they exist in the config file
if APPLICATION_CONFIG['smtp_settings']
ActionMailer::Base.delivery_method = :smtp
ActionMailer::Base.smtp_settings = Hash[APPLICATION_CONFIG['smtp_settings'].map{ |k, v| [k.to_sym, v] }]
end
else
APPLICATION_CONFIG = []
end
=end
|
Add papertrail initializer
Stops the deprecation `DEPRECATION WARNING: PaperTrail.config.track_associations has not been set. As of PaperTrail 5, it defaults to false`
PaperTrail.config.track_associations = false
|
Whitehall.public_host = ENV['FACTER_govuk_platform'] == 'production' ? 'www.gov.uk' : 'www.dev.gov.uk'
Make this more clear with parentheses
Whitehall.public_host = (ENV['FACTER_govuk_platform'] == 'production' ? 'www.gov.uk' : 'www.dev.gov.uk')
|
static_data = {}
Dir["#{Rails.root}/config/data/**/*.yml"].each do |file|
basename = File.basename(file, ".yml")
static_data[basename] = YAML.load_file(file)
end
STATIC_DATA = static_data.freeze
Remove static_data initializer
|
# frozen_string_literal: true
# New with Rails 6+, we need to define the list of locales outside the context of
# the Database since thiss runs during startup. Trying to access the DB causes
# issues with autoloading; 'DEPRECATION WARNING: Initialization autoloaded the constants ... Language'
#
# Note that the entries here must have a corresponding directory in config/locale, a
# YAML file in config/locales and should also have an entry in the DB's languages table
# SUPPORTED_LOCALES = %w[de en-CA en-GB en-US es fi fr-CA fr-FR pt-BR sv-FI tr-TR].freeze
SUPPORTED_LOCALES = %w[en-US pt-BR].freeze
# You can define a subset of the locales for your instance's version of Translation.io if applicable
# CLIENT_LOCALES = %w[de en-CA en-GB en-US es fi fr-CA fr-FR pt-BR sv-FI tr-TR].freeze
CLIENT_LOCALES = %w[en-US pt-BR].freeze
# DEFAULT_LOCALE = 'en-GB'
DEFAULT_LOCALE = 'en-US'
# Here we define the translation domains for the Roadmap application, `app` will
# contain translations from the open-source repository and ignore the contents
# of the `app/views/branded` directory. The `client` domain will
#
# When running the application, the `app` domain should be specified in your environment.
# the `app` domain will be searched first, falling back to `client`
#
# When generating the translations, the rake:tasks will need to be run with each
# domain specified in order to generate both sets of translation keys.
if !ENV['DOMAIN'] || ENV.fetch('DOMAIN', nil) == 'app'
TranslationIO.configure do |config|
config.api_key = Rails.configuration.x.dmproadmap.translation_io_key_app
config.source_locale = 'en'
config.target_locales = SUPPORTED_LOCALES
config.text_domain = 'app'
config.bound_text_domains = %w[app client]
config.ignored_source_paths = Dir.glob('**/*').select { |f| File.directory? f }
.collect { |name| "#{name}/" }
.select do |path|
path.include?('branded/') ||
path.include?('dmptool/') ||
path.include?('node_modules/')
end
config.locales_path = Rails.root.join('config', 'locale')
end
elsif ENV.fetch('DOMAIN', nil) == 'client'
# Control ignored source paths
# Note, all prefixes of the directory you want to translate must be defined here!
#
# To sync translations with the Translation IO server run:
# > rails translation:sync_and_purge DOMAIN=client
TranslationIO.configure do |config|
config.api_key = Rails.configuration.x.dmproadmap.translation_io_key_client
config.source_locale = 'en'
config.target_locales = CLIENT_LOCALES
config.text_domain = 'client'
config.bound_text_domains = ['client']
config.ignored_source_paths = Dir.glob('**/*').select { |f| File.directory? f }
.collect { |name| "#{name}/" }
.reject do |path|
path == 'app/' || path == 'app/views/' ||
path.include?('branded/') || path.include?('dmptool/')
end
config.disable_yaml = true
config.locales_path = Rails.root.join('config', 'locale')
end
end
# Setup languages
def default_locale
DEFAULT_LOCALE
end
def available_locales
SUPPORTED_LOCALES.sort { |a, b| a <=> b }
end
I18n.available_locales = SUPPORTED_LOCALES
I18n.default_locale = DEFAULT_LOCALE
added Spanish to languages list
# frozen_string_literal: true
# New with Rails 6+, we need to define the list of locales outside the context of
# the Database since thiss runs during startup. Trying to access the DB causes
# issues with autoloading; 'DEPRECATION WARNING: Initialization autoloaded the constants ... Language'
#
# Note that the entries here must have a corresponding directory in config/locale, a
# YAML file in config/locales and should also have an entry in the DB's languages table
# SUPPORTED_LOCALES = %w[de en-CA en-GB en-US es fi fr-CA fr-FR pt-BR sv-FI tr-TR].freeze
SUPPORTED_LOCALES = %w[en-US es pt-BR].freeze
# You can define a subset of the locales for your instance's version of Translation.io if applicable
# CLIENT_LOCALES = %w[de en-CA en-GB en-US es fi fr-CA fr-FR pt-BR sv-FI tr-TR].freeze
CLIENT_LOCALES = %w[en-US es pt-BR].freeze
# DEFAULT_LOCALE = 'en-GB'
DEFAULT_LOCALE = 'en-US'
# Here we define the translation domains for the Roadmap application, `app` will
# contain translations from the open-source repository and ignore the contents
# of the `app/views/branded` directory. The `client` domain will
#
# When running the application, the `app` domain should be specified in your environment.
# the `app` domain will be searched first, falling back to `client`
#
# When generating the translations, the rake:tasks will need to be run with each
# domain specified in order to generate both sets of translation keys.
if !ENV['DOMAIN'] || ENV.fetch('DOMAIN', nil) == 'app'
TranslationIO.configure do |config|
config.api_key = Rails.configuration.x.dmproadmap.translation_io_key_app
config.source_locale = 'en'
config.target_locales = SUPPORTED_LOCALES
config.text_domain = 'app'
config.bound_text_domains = %w[app client]
config.ignored_source_paths = Dir.glob('**/*').select { |f| File.directory? f }
.collect { |name| "#{name}/" }
.select do |path|
path.include?('branded/') ||
path.include?('dmptool/') ||
path.include?('node_modules/')
end
config.locales_path = Rails.root.join('config', 'locale')
end
elsif ENV.fetch('DOMAIN', nil) == 'client'
# Control ignored source paths
# Note, all prefixes of the directory you want to translate must be defined here!
#
# To sync translations with the Translation IO server run:
# > rails translation:sync_and_purge DOMAIN=client
TranslationIO.configure do |config|
config.api_key = Rails.configuration.x.dmproadmap.translation_io_key_client
config.source_locale = 'en'
config.target_locales = CLIENT_LOCALES
config.text_domain = 'client'
config.bound_text_domains = ['client']
config.ignored_source_paths = Dir.glob('**/*').select { |f| File.directory? f }
.collect { |name| "#{name}/" }
.reject do |path|
path == 'app/' || path == 'app/views/' ||
path.include?('branded/') || path.include?('dmptool/')
end
config.disable_yaml = true
config.locales_path = Rails.root.join('config', 'locale')
end
end
# Setup languages
def default_locale
DEFAULT_LOCALE
end
def available_locales
SUPPORTED_LOCALES.sort { |a, b| a <=> b }
end
I18n.available_locales = SUPPORTED_LOCALES
I18n.default_locale = DEFAULT_LOCALE
|
cask '4k-stogram' do
version '2.6.16.1610'
sha256 'fee0e8c78aa2fb1fcf82dbcbd1df660ecc873014cb4100609eacb9ffd6efea3a'
url "https://dl.4kdownload.com/app/4kstogram_#{version.major_minor_patch}.dmg"
appcast 'https://www.4kdownload.com/download'
name '4K Stogram'
homepage 'https://www.4kdownload.com/products/product-stogram'
app '4K Stogram.app'
zap trash: '~/Pictures/4K Stogram'
end
Update 4k-stogram to 2.6.17.1620 (#50662)
cask '4k-stogram' do
version '2.6.17.1620'
sha256 '5bcea91e67cfb24bdfc72fc8faa7e2694ac03db6c41f80f8def8407d8a3f3441'
url "https://dl.4kdownload.com/app/4kstogram_#{version.major_minor_patch}.dmg"
appcast 'https://www.4kdownload.com/download'
name '4K Stogram'
homepage 'https://www.4kdownload.com/products/product-stogram'
app '4K Stogram.app'
zap trash: '~/Pictures/4K Stogram'
end
|
class Actprinter < Cask
version '3.1.4'
sha256 '7f92b093be3ddc08f5ce00cd19b3edb13c01a4c4626948b204c484123d3e25e8'
url "http://www.actprinter.com/mac/ACTPrinter%20for%20Mac%20#{version}.zip"
homepage 'http://houdah.com/ACTPrinter/'
license :gratis
app 'ACTPrinter for Mac.app'
end
new-style header in actprinter
cask :v1 => 'actprinter' do
version '3.1.4'
sha256 '7f92b093be3ddc08f5ce00cd19b3edb13c01a4c4626948b204c484123d3e25e8'
url "http://www.actprinter.com/mac/ACTPrinter%20for%20Mac%20#{version}.zip"
homepage 'http://houdah.com/ACTPrinter/'
license :gratis
app 'ACTPrinter for Mac.app'
end
|
class Aquamacs25 < Cask
version '2.5'
sha256 '5857848d8d46bba43d160c02393b098a370e2156608be24b288419f668210be9'
url 'http://braeburn.aquamacs.org/releases/Aquamacs-Emacs-2.5.dmg'
homepage 'http://aquamacs.org/'
app 'Aquamacs.app'
end
re-use version in aquamacs25
class Aquamacs25 < Cask
version '2.5'
sha256 '5857848d8d46bba43d160c02393b098a370e2156608be24b288419f668210be9'
url "http://braeburn.aquamacs.org/releases/Aquamacs-Emacs-#{version}.dmg"
homepage 'http://aquamacs.org/'
app 'Aquamacs.app'
end
|
class Chronosync < Cask
version :latest
sha256 :no_check
url 'http://downloads.econtechnologies.com/CS4_Download.dmg'
homepage 'http://www.econtechnologies.com'
pkg 'Install.pkg'
uninstall :pkgutil => 'com.econtechnologies.pkg.ChronoSyncApplication'
end
add license stanza to chronosync
class Chronosync < Cask
version :latest
sha256 :no_check
url 'http://downloads.econtechnologies.com/CS4_Download.dmg'
homepage 'http://www.econtechnologies.com'
license :unknown
pkg 'Install.pkg'
uninstall :pkgutil => 'com.econtechnologies.pkg.ChronoSyncApplication'
end
|
cask "cleanmymac" do
version "4.6.11"
sha256 "0477c63b9e9da4dbdb7afabab7a00300f608e364d6d7751a00da2cf33d6fe2e0"
# dl.devmate.com/com.macpaw.CleanMyMac was verified as official when first introduced to the cask
url "https://dl.devmate.com/com.macpaw.CleanMyMac#{version.major}/CleanMyMacX.dmg"
appcast "https://updates.devmate.com/com.macpaw.CleanMyMac#{version.major}.xml"
name "CleanMyMac X"
homepage "https://macpaw.com/cleanmymac"
auto_updates true
app "CleanMyMac X.app"
uninstall delete: [
"/Library/PrivilegedHelperTools/com.macpaw.CleanMyMac#{version.major}.Agent",
"/private/var/run/com.macpaw.CleanMyMac#{version.major}.Agent.socket",
],
launchctl: [
"com.macpaw.CleanMyMac#{version.major}.Agent",
"com.macpaw.CleanMyMac#{version.major}.HealthMonitor",
"com.macpaw.CleanMyMac#{version.major}.Menu",
"com.macpaw.CleanMyMac#{version.major}.Scheduler",
"com.macpaw.CleanMyMac#{version.major}.Updater",
],
login_item: "CleanMyMac #{version.major} Menu",
quit: [
"com.macpaw.CleanMyMac#{version.major}",
"com.macpaw.CleanMyMac#{version.major}.HealthMonitor",
"com.macpaw.CleanMyMac#{version.major}.Menu",
]
zap trash: [
"/Users/Shared/CleanMyMac X",
"/Users/Shared/CleanMyMac X Menu",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.macpaw.cleanmymac#{version.major}.sfl*",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.macpaw.cleanmymac#{version.major}.scheduler.sfl*",
"~/Library/Application Support/CleanMyMac X",
"~/Library/Application Support/CleanMyMac X Menu",
"~/Library/Caches/CleanMyMac #{version.major}",
"~/Library/Caches/com.apple.helpd/SDMHelpData/Other/English/HelpSDMIndexFile/com.macpaw.CleanMyMac#{version.major}.help*",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.HealthMonitor",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.KnowledgeBase",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.Menu",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.Scheduler",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.Updater",
"~/Library/LaunchAgents/com.macpaw.CleanMyMac#{version.major}.HealthMonitor.plist",
"~/Library/LaunchAgents/com.macpaw.CleanMyMac#{version.major}.Updater.plist",
"~/Library/Logs/CleanMyMac #{version.major}.log",
"~/Library/Logs/CleanMyMac X Menu/HealthMonitor",
"~/Library/Logs/com.macpaw.CleanMyMac#{version.major}",
"~/Library/Preferences/com.macpaw.CleanMyMac-#{version.major}-Helper.plist",
"~/Library/Preferences/com.macpaw.CleanMyMac#{version.major}.HealthMonitor.plist",
"~/Library/Preferences/com.macpaw.CleanMyMac#{version.major}.KnowledgeBase.plist",
"~/Library/Preferences/com.macpaw.cleanmymac#{version.major}.Menu.plist",
"~/Library/Preferences/com.macpaw.CleanMyMac#{version.major}.Scheduler.plist",
"~/Library/Preferences/com.macpaw.CleanMyMac#{version.major}.plist",
"~/Pictures/Photos Library.photoslibrary/private/com.macpaw.CleanMyMac#{version.major}",
]
end
Update cleanmymac (#88356)
Co-authored-by: Achyuth Ajoy <a3127433a0dee143b74c56cbc4e91a0a360b0083@Achyuths-MacBook-Pro.local>
cask "cleanmymac" do
version "4.6.11"
sha256 "025f91a0b253ec58e9a3574b906f96f79ad6881d1d27ac1cfca5d3d3a7f16b57"
# dl.devmate.com/com.macpaw.CleanMyMac was verified as official when first introduced to the cask
url "https://dl.devmate.com/com.macpaw.CleanMyMac#{version.major}/CleanMyMacX.dmg"
appcast "https://updates.devmate.com/com.macpaw.CleanMyMac#{version.major}.xml"
name "CleanMyMac X"
homepage "https://macpaw.com/cleanmymac"
auto_updates true
app "CleanMyMac X.app"
uninstall delete: [
"/Library/PrivilegedHelperTools/com.macpaw.CleanMyMac#{version.major}.Agent",
"/private/var/run/com.macpaw.CleanMyMac#{version.major}.Agent.socket",
],
launchctl: [
"com.macpaw.CleanMyMac#{version.major}.Agent",
"com.macpaw.CleanMyMac#{version.major}.HealthMonitor",
"com.macpaw.CleanMyMac#{version.major}.Menu",
"com.macpaw.CleanMyMac#{version.major}.Scheduler",
"com.macpaw.CleanMyMac#{version.major}.Updater",
],
login_item: "CleanMyMac #{version.major} Menu",
quit: [
"com.macpaw.CleanMyMac#{version.major}",
"com.macpaw.CleanMyMac#{version.major}.HealthMonitor",
"com.macpaw.CleanMyMac#{version.major}.Menu",
]
zap trash: [
"/Users/Shared/CleanMyMac X",
"/Users/Shared/CleanMyMac X Menu",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.macpaw.cleanmymac#{version.major}.sfl*",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.macpaw.cleanmymac#{version.major}.scheduler.sfl*",
"~/Library/Application Support/CleanMyMac X",
"~/Library/Application Support/CleanMyMac X Menu",
"~/Library/Caches/CleanMyMac #{version.major}",
"~/Library/Caches/com.apple.helpd/SDMHelpData/Other/English/HelpSDMIndexFile/com.macpaw.CleanMyMac#{version.major}.help*",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.HealthMonitor",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.KnowledgeBase",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.Menu",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.Scheduler",
"~/Library/Caches/com.macpaw.CleanMyMac#{version.major}.Updater",
"~/Library/LaunchAgents/com.macpaw.CleanMyMac#{version.major}.HealthMonitor.plist",
"~/Library/LaunchAgents/com.macpaw.CleanMyMac#{version.major}.Updater.plist",
"~/Library/Logs/CleanMyMac #{version.major}.log",
"~/Library/Logs/CleanMyMac X Menu/HealthMonitor",
"~/Library/Logs/com.macpaw.CleanMyMac#{version.major}",
"~/Library/Preferences/com.macpaw.CleanMyMac-#{version.major}-Helper.plist",
"~/Library/Preferences/com.macpaw.CleanMyMac#{version.major}.HealthMonitor.plist",
"~/Library/Preferences/com.macpaw.CleanMyMac#{version.major}.KnowledgeBase.plist",
"~/Library/Preferences/com.macpaw.cleanmymac#{version.major}.Menu.plist",
"~/Library/Preferences/com.macpaw.CleanMyMac#{version.major}.Scheduler.plist",
"~/Library/Preferences/com.macpaw.CleanMyMac#{version.major}.plist",
"~/Pictures/Photos Library.photoslibrary/private/com.macpaw.CleanMyMac#{version.major}",
]
end
|
class Cloudytabs < Cask
url 'https://github.com/josh-/CloudyTabs/releases/download/v1.2/CloudyTabs.zip'
appcast 'http://joshparnham.com/projects/cloudytabs/appcast.xml'
homepage 'https://github.com/josh-/CloudyTabs/'
version '1.2'
sha256 '7409dcce88355799a65060bdd3c957c4a3146886f4fb489d41208d4d9ed7d905'
link 'CloudyTabs.app'
end
Reformat cloudytabs.rb according to readability conventions
class Cloudytabs < Cask
version '1.2'
sha256 '7409dcce88355799a65060bdd3c957c4a3146886f4fb489d41208d4d9ed7d905'
url 'https://github.com/josh-/CloudyTabs/releases/download/v1.2/CloudyTabs.zip'
appcast 'http://joshparnham.com/projects/cloudytabs/appcast.xml'
homepage 'https://github.com/josh-/CloudyTabs/'
link 'CloudyTabs.app'
end
|
cask 'coderunner' do
version '2.1.1'
sha256 '8b18a0db9966c537b6eb7b36502e74fa93cfed20aa06e4c56c57ecf3cc9e304c'
# cloudfront.net is the official download host per the vendor homepage
url "https://dktfof1z89xc1.cloudfront.net/CodeRunner%20#{version}.zip"
appcast 'https://coderunnerapp.com/appcast.xml',
:sha256 => '9b2e8558235267858dfc4af85fc8c5e7f24c41f0535256ea5300409c5571866c'
name 'CodeRunner'
homepage 'https://coderunnerapp.com/'
license :commercial
app 'CodeRunner.app'
end
coderunner.rb: appcast sha256 without pubDate
cask 'coderunner' do
version '2.1.1'
sha256 '8b18a0db9966c537b6eb7b36502e74fa93cfed20aa06e4c56c57ecf3cc9e304c'
# cloudfront.net is the official download host per the vendor homepage
url "https://dktfof1z89xc1.cloudfront.net/CodeRunner%20#{version}.zip"
appcast 'https://coderunnerapp.com/appcast.xml',
:sha256 => '96376821ad5f0fb6a193e5af27632f2410294268afd895602a4e22f533a86a76'
name 'CodeRunner'
homepage 'https://coderunnerapp.com/'
license :commercial
app 'CodeRunner.app'
end
|
cask 'compositor' do
version '1.4.1'
sha256 '7a8cceb474307773c7f51853d7c802aba69840395d2e09281922e0b4fe8b9b6f'
url "https://compositorapp.com/updates/Compositor_#{version}.zip"
appcast 'https://compositorapp.com/updates/appcast.xml'
name 'Compositor'
homepage 'https://compositorapp.com/'
app 'Compositor.app'
zap trash: [
'~/Library/Application Scripts/com.microlarge.Compositor',
'~/Library/Containers/com.microlarge.Compositor',
'~/Library/Preferences/com.microlarge.Compositor.plist',
]
end
Update compositor to 1.5.2 (#53276)
cask 'compositor' do
version '1.5.2'
sha256 'd99ff99cc384b317b2d039c2dcda0e7a7bf1fe8fda0b7139d240554629c88a61'
url "https://compositorapp.com/updates/Compositor_#{version}.zip"
appcast 'https://compositorapp.com/updates/appcast.xml'
name 'Compositor'
homepage 'https://compositorapp.com/'
app 'Compositor.app'
zap trash: [
'~/Library/Application Scripts/com.microlarge.Compositor',
'~/Library/Containers/com.microlarge.Compositor',
'~/Library/Preferences/com.microlarge.Compositor.plist',
]
end
|
cask "cozy-drive" do
version "3.30.1"
sha256 "5a48cb95dff4bac04f5ab904dd08cc85c0d8b7c49ef5d9d09a97338b6695eebd"
url "https://github.com/cozy-labs/cozy-desktop/releases/download/v#{version}/Cozy-Drive-#{version}.dmg",
verified: "github.com/cozy-labs/cozy-desktop"
name "Cozy Drive"
desc "File synchronization for Cozy Cloud"
homepage "https://cozy.io/"
livecheck do
url "https://nuts.cozycloud.cc/download/channel/stable/osx"
strategy :header_match
end
depends_on macos: ">= :sierra"
app "Cozy Drive.app"
end
Update cozy-drive from 3.30.1 to 3.31.0 (#114258)
cask "cozy-drive" do
version "3.31.0"
sha256 "ddba5b4ed3679c167c80cdfae6f0c7e2876549fd8f8d35bfde84314ea983d1d1"
url "https://github.com/cozy-labs/cozy-desktop/releases/download/v#{version}/Cozy-Drive-#{version}.dmg",
verified: "github.com/cozy-labs/cozy-desktop"
name "Cozy Drive"
desc "File synchronization for Cozy Cloud"
homepage "https://cozy.io/"
livecheck do
url "https://nuts.cozycloud.cc/download/channel/stable/osx"
strategy :header_match
end
depends_on macos: ">= :sierra"
app "Cozy Drive.app"
end
|
cask 'displaycal' do
version '3.8.0.0'
sha256 '696163477d45925d7c36517ff4f117592d097911bdea53ba08b74d80cac96006'
# sourceforge.net/dispcalgui was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/dispcalgui/release/#{version}/DisplayCAL-#{version}.pkg"
appcast 'https://sourceforge.net/projects/dispcalgui/rss?path=/release'
name 'DisplayCAL'
homepage 'https://displaycal.net/'
auto_updates true
depends_on formula: 'argyll-cms'
pkg "DisplayCAL-#{version}.pkg"
uninstall pkgutil: 'net.displaycal.*.DisplayCAL.*'
zap trash: [
'~/Library/Application Support/dispcalGUI',
'~/Library/Logs/dispcalGUI',
'~/Library/Preferences/dispcalGUI',
]
end
Update displaycal from 3.8.0.0 to 3.8.1.0 (#63515)
cask 'displaycal' do
version '3.8.1.0'
sha256 '421dde8ea1365efc06b1254bd26ed28b08559d81c2453d3d6b83c31f5987b540'
# sourceforge.net/dispcalgui was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/dispcalgui/release/#{version}/DisplayCAL-#{version}.pkg"
appcast 'https://sourceforge.net/projects/dispcalgui/rss?path=/release'
name 'DisplayCAL'
homepage 'https://displaycal.net/'
auto_updates true
depends_on formula: 'argyll-cms'
pkg "DisplayCAL-#{version}.pkg"
uninstall pkgutil: 'net.displaycal.*.DisplayCAL.*'
zap trash: [
'~/Library/Application Support/dispcalGUI',
'~/Library/Logs/dispcalGUI',
'~/Library/Preferences/dispcalGUI',
]
end
|
class Expandrive < Cask
version :latest
sha256 :no_check
url 'http://updates.expandrive.com/apps/expandrive/download_latest'
appcast 'http://updates.expandrive.com/appcast/expandrive.xml?version=3'
homepage 'http://www.expandrive.com/expandrive'
license :unknown
app 'ExpanDrive.app'
zap :delete => [
'~/Library/Application Support/ExpanDrive',
'~/Preferences/com.expandrive.ExpanDrive2.plist',
'~/Preferences/com.expandrive.ExpanDrive3.plist',
]
end
new-style header in expandrive
cask :v1 => 'expandrive' do
version :latest
sha256 :no_check
url 'http://updates.expandrive.com/apps/expandrive/download_latest'
appcast 'http://updates.expandrive.com/appcast/expandrive.xml?version=3'
homepage 'http://www.expandrive.com/expandrive'
license :unknown
app 'ExpanDrive.app'
zap :delete => [
'~/Library/Application Support/ExpanDrive',
'~/Preferences/com.expandrive.ExpanDrive2.plist',
'~/Preferences/com.expandrive.ExpanDrive3.plist',
]
end
|
cask :v1 => 'fingerlock' do
version :latest
sha256 :no_check
url 'http://www.fingerkeyapp.com/download/FingerLock.dmg'
homepage 'http://www.fingerkeyapp.com/'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'FingerLock.app'
end
fingerlock.rb: added name
cask :v1 => 'fingerlock' do
version :latest
sha256 :no_check
url 'http://www.fingerkeyapp.com/download/FingerLock.dmg'
name 'FingerLock'
homepage 'http://www.fingerkeyapp.com/'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'FingerLock.app'
end
|
class FirefoxFr < Cask
url 'https://download.mozilla.org/?product=firefox-latest&os=osx&lang=fr'
homepage 'http://www.mozilla.org/fr/firefox/new/'
version 'latest'
sha256 :no_check
link 'Firefox.app'
end
Format FireFox FR
class FirefoxFr < Cask
version 'latest'
sha256 :no_check
url 'https://download.mozilla.org/?product=firefox-latest&os=osx&lang=fr'
homepage 'http://www.mozilla.org/fr/firefox/new/'
link 'Firefox.app'
end
|
class FirefoxFr < Cask
version 'latest'
sha256 :no_check
url 'https://download.mozilla.org/?product=firefox-latest&os=osx&lang=fr'
homepage 'http://www.mozilla.org/fr/firefox/new/'
app 'Firefox.app'
end
:latest as symbol, firefox-fr
class FirefoxFr < Cask
version :latest
sha256 :no_check
url 'https://download.mozilla.org/?product=firefox-latest&os=osx&lang=fr'
homepage 'http://www.mozilla.org/fr/firefox/new/'
app 'Firefox.app'
end
|
cask "font-carme" do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url "https://github.com/google/fonts/raw/master/ofl/carme/Carme-Regular.ttf"
name "Carme"
homepage "https://fonts.google.com/specimen/Carme"
font "Carme-Regular.ttf"
end
font-carme.rb: Update verified parameter (#2990)
cask "font-carme" do
version :latest
sha256 :no_check
url "https://github.com/google/fonts/raw/master/ofl/carme/Carme-Regular.ttf",
verified: "github.com/google/fonts/"
name "Carme"
homepage "https://fonts.google.com/specimen/Carme"
font "Carme-Regular.ttf"
end
|
class FontCarme < Cask
url 'https://googlefontdirectory.googlecode.com/hg-history/67342bc472599b4c32201ee4a002fe59a6447a42/ofl/carme/Carme-Regular.ttf'
homepage 'http://www.google.com/fonts/specimen/Carme'
version '1.000'
sha256 '2be32ebe8f62944b1df0f1bf0c76957bda369567b99454d749e51192a393db90'
font 'Carme-Regular.ttf'
end
standard Cask layout for font-carme.rb
class FontCarme < Cask
version '1.000'
sha256 '2be32ebe8f62944b1df0f1bf0c76957bda369567b99454d749e51192a393db90'
url 'https://googlefontdirectory.googlecode.com/hg-history/67342bc472599b4c32201ee4a002fe59a6447a42/ofl/carme/Carme-Regular.ttf'
homepage 'http://www.google.com/fonts/specimen/Carme'
font 'Carme-Regular.ttf'
end
|
cask 'font-kanit' do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url 'https://github.com/google/fonts/trunk/ofl/kanit',
using: :svn,
trust_cert: true
name 'Kanit'
homepage 'https://fonts.google.com/specimen/Kanit'
depends_on macos: '>= :sierra'
font 'Kanit-Black.ttf'
font 'Kanit-BlackItalic.ttf'
font 'Kanit-Bold.ttf'
font 'Kanit-BoldItalic.ttf'
font 'Kanit-ExtraBold.ttf'
font 'Kanit-ExtraBoldItalic.ttf'
font 'Kanit-ExtraLight.ttf'
font 'Kanit-ExtraLightItalic.ttf'
font 'Kanit-Italic.ttf'
font 'Kanit-Light.ttf'
font 'Kanit-LightItalic.ttf'
font 'Kanit-Medium.ttf'
font 'Kanit-MediumItalic.ttf'
font 'Kanit-Regular.ttf'
font 'Kanit-SemiBold.ttf'
font 'Kanit-SemiBoldItalic.ttf'
font 'Kanit-Thin.ttf'
font 'Kanit-ThinItalic.ttf'
end
font-kanit.rb: fix for new style
cask "font-kanit" do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url "https://github.com/google/fonts/trunk/ofl/kanit",
using: :svn,
trust_cert: true
name "Kanit"
homepage "https://fonts.google.com/specimen/Kanit"
depends_on macos: ">= :sierra"
font "Kanit-Black.ttf"
font "Kanit-BlackItalic.ttf"
font "Kanit-Bold.ttf"
font "Kanit-BoldItalic.ttf"
font "Kanit-ExtraBold.ttf"
font "Kanit-ExtraBoldItalic.ttf"
font "Kanit-ExtraLight.ttf"
font "Kanit-ExtraLightItalic.ttf"
font "Kanit-Italic.ttf"
font "Kanit-Light.ttf"
font "Kanit-LightItalic.ttf"
font "Kanit-Medium.ttf"
font "Kanit-MediumItalic.ttf"
font "Kanit-Regular.ttf"
font "Kanit-SemiBold.ttf"
font "Kanit-SemiBoldItalic.ttf"
font "Kanit-Thin.ttf"
font "Kanit-ThinItalic.ttf"
end
|
cask "font-karma" do
version "2.000"
sha256 "ebbe01be41c18aed6e538ea8d88eec65bb1bca046afc36b2fc6a84e808bda7e4"
url "https://github.com/itfoundry/karma/releases/download/v2.000/karma-2_000.zip"
appcast "https://github.com/itfoundry/karma/releases.atom"
name "Karma"
homepage "https://github.com/itfoundry/karma"
font "Karma-Bold.otf"
font "Karma-Light.otf"
font "Karma-Medium.otf"
font "Karma-Regular.otf"
font "Karma-SemiBold.otf"
end
font-karma: interpolate version (#3107)
cask "font-karma" do
version "2.000"
sha256 "ebbe01be41c18aed6e538ea8d88eec65bb1bca046afc36b2fc6a84e808bda7e4"
url "https://github.com/itfoundry/karma/releases/download/v#{version}/karma-#{version.dots_to_underscores}.zip"
appcast "https://github.com/itfoundry/karma/releases.atom"
name "Karma"
homepage "https://github.com/itfoundry/karma"
font "Karma-Bold.otf"
font "Karma-Light.otf"
font "Karma-Medium.otf"
font "Karma-Regular.otf"
font "Karma-SemiBold.otf"
end
|
cask 'font-lilex' do
version '1.000'
sha256 'c73b0241fedac898014cbfe9b8701593e8d6472f43adb6b8bc31a5c45d44c904'
url "https://github.com/mishamyrt/Lilex/releases/download/#{version}/Lilex-#{version}.zip"
appcast 'https://github.com/mishamyrt/Lilex/releases.atom'
name 'Lilex'
homepage 'https://github.com/mishamyrt/Lilex'
font 'otf/Lilex-Bold.otf'
font 'otf/Lilex-Light.otf'
font 'otf/Lilex-Medium.otf'
font 'otf/Lilex-Regular.otf'
end
font-lilex.rb: fix for new style
cask "font-lilex" do
version "1.000"
sha256 "c73b0241fedac898014cbfe9b8701593e8d6472f43adb6b8bc31a5c45d44c904"
url "https://github.com/mishamyrt/Lilex/releases/download/#{version}/Lilex-#{version}.zip"
appcast "https://github.com/mishamyrt/Lilex/releases.atom"
name "Lilex"
homepage "https://github.com/mishamyrt/Lilex"
font "otf/Lilex-Bold.otf"
font "otf/Lilex-Light.otf"
font "otf/Lilex-Medium.otf"
font "otf/Lilex-Regular.otf"
end
|
cask :v1 => 'free-ruler' do
version '1.7b5'
sha256 'f746e49ae012bee5ccef9871592276de47d3b6affbe92ab3f45f2591cb05b915'
url "http://www.pascal.com/software/freeruler/FreeRuler#{version}.zip"
homepage 'http://www.pascal.com/software/freeruler/'
license :unknown
app "Free Ruler #{version}.app"
end
license todo comment in free-ruler
cask :v1 => 'free-ruler' do
version '1.7b5'
sha256 'f746e49ae012bee5ccef9871592276de47d3b6affbe92ab3f45f2591cb05b915'
url "http://www.pascal.com/software/freeruler/FreeRuler#{version}.zip"
homepage 'http://www.pascal.com/software/freeruler/'
license :unknown # todo: improve this machine-generated value
app "Free Ruler #{version}.app"
end
|
cask 'free-telly' do
version '16.1'
sha256 '1cd2e5e9e2ea8132640884623cd172682297225cc1f501693711981fe1b2e276'
url do
# FreeTelly is hosted on androidfilehost.com which looks up mirrors to download from
require 'net/http'
require 'json'
# androidfilehost.com was verified as official when first introduced to the cask
uri = URI('https://www.androidfilehost.com/libs/otf/mirrors.otf.php')
req = Net::HTTP::Post.new(uri)
file_id = '24588232905720770'
req.set_form_data('submit' => 'submit', 'action' => 'getdownloadmirrors', 'fid' => file_id)
req['Referer'] = "https://www.androidfilehost.com/?fid=#{file_id}"
res = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') { |http| http.request(req) }
JSON.parse(res.body)['MIRRORS'][0]['url']
end
name 'FreeTelly'
homepage 'https://www.tvaddons.ag/freetelly-mac-kodi/'
app 'FreeTelly.app'
zap delete: [
'~/Library/Logs/freetelly.log',
'~/Library/Logs/freetelly.old.log',
'~/Library/Application Support/FreeTelly',
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/ag.tvaddons.freetelly.sfl',
'~/Library/Saved Application State/ag.tvaddons.FreeTelly.savedState',
]
end
Remove freetelly (#35933)
|
cask 'gog-galaxy' do
version '1.2.8.3'
sha256 'afb0d42a2dc540a6cb6d2572c10b0557ebd5b92f80ce8e03a13f26dcbd0b645d'
url "https://cdn.gog.com/open/galaxy/client/galaxy_client_#{version}.pkg"
name 'GOG Galaxy Client'
homepage 'https://www.gog.com/galaxy'
depends_on macos: '>= :mountain_lion'
pkg "galaxy_client_#{version}.pkg"
uninstall delete: '/Applications/GalaxyClient.app',
launchctl: [
'com.gog.galaxy.ClientService',
'com.gog.galaxy.commservice',
]
zap delete: [
'/Library/PrivilegedHelperTools/com.gog.galaxy.ClientService',
'/Users/Shared/GOG.com',
'~/Library/Preferences/com.gog.galaxy.cef.renderer.plist',
'~/Library/Saved Application State/com.gog.galaxy.savedState',
]
end
Update gog-galaxy to 1.2.9.4 (#35239)
cask 'gog-galaxy' do
version '1.2.9.4'
sha256 '4377f65a7be35821537d8dbad75344bd18838314b8b142004ec8264235590e07'
url "https://cdn.gog.com/open/galaxy/client/galaxy_client_#{version}.pkg"
name 'GOG Galaxy Client'
homepage 'https://www.gog.com/galaxy'
depends_on macos: '>= :mountain_lion'
pkg "galaxy_client_#{version}.pkg"
uninstall delete: '/Applications/GalaxyClient.app',
launchctl: [
'com.gog.galaxy.ClientService',
'com.gog.galaxy.commservice',
]
zap delete: [
'/Library/PrivilegedHelperTools/com.gog.galaxy.ClientService',
'/Users/Shared/GOG.com',
'~/Library/Preferences/com.gog.galaxy.cef.renderer.plist',
'~/Library/Saved Application State/com.gog.galaxy.savedState',
]
end
|
cask 'grandtotal' do
version '5.1.5'
sha256 'aeda8d79516e5fd6bdfb29b50e606ff05bf638cae404f142c207f57bb4420eef'
url "https://mediaatelier.com/GrandTotal#{version.major}/GrandTotal_#{version}.zip"
appcast "https://mediaatelier.com/GrandTotal#{version.major}/feed.php",
checkpoint: '6960c34c6da5c8d319ed744a3f16c11db989a2503fb24dc679573b33a88361af'
name 'GrandTotal'
homepage "https://www.mediaatelier.com/GrandTotal#{version.major}/"
depends_on macos: '>= :mountain_lion'
app 'GrandTotal.app'
end
Update grandtotal to 5.1.6
cask 'grandtotal' do
version '5.1.6'
sha256 '876884c0ea61177feda2333cdd0506b537b3309ed62e9f452ecfe93c952f8a7b'
url "https://mediaatelier.com/GrandTotal#{version.major}/GrandTotal_#{version}.zip"
appcast "https://mediaatelier.com/GrandTotal#{version.major}/feed.php",
checkpoint: '0b2fa9f3239db87404ea1e2b821f3213e232a3cc1e2ec5b8ea6089a1070b601c'
name 'GrandTotal'
homepage "https://www.mediaatelier.com/GrandTotal#{version.major}/"
depends_on macos: '>= :mavericks'
app 'GrandTotal.app'
end
|
cask 'houdahspot' do
version '4.2.2'
sha256 '609e002a403b6d561e883d7901cfc585d7845ac46419faa04bcce7590a53b0d3'
url "https://www.houdah.com/houdahSpot/updates/cast#{version.major}_assets/HoudahSpot#{version}.zip"
appcast "https://www.houdah.com/houdahSpot/updates/cast#{version.major}.xml",
checkpoint: '58dd82d6c9de805828c06f22655e4ea2b8bdebddc33174a70b9e6ddfd5284bbd'
name 'HoudahSpot'
homepage 'https://www.houdah.com/houdahSpot/'
license :commercial
app 'HoudahSpot.app'
postflight do
suppress_move_to_applications
end
end
updated houdahspot (4.2.3) (#23652)
cask 'houdahspot' do
version '4.2.3'
sha256 '84fd159f36012569c10ab2fb62a774b39ea32a2b4ae00634388441d28b82ce6e'
url "https://www.houdah.com/houdahSpot/updates/cast#{version.major}_assets/HoudahSpot#{version}.zip"
appcast "https://www.houdah.com/houdahSpot/updates/cast#{version.major}.xml",
checkpoint: '5c99b50c655ceef16c50d5056d4bd749a89f12beb701b6057732fdaf94fe2026'
name 'HoudahSpot'
homepage 'https://www.houdah.com/houdahSpot/'
license :commercial
app 'HoudahSpot.app'
postflight do
suppress_move_to_applications
end
end
|
class IntelHaxm < Cask
version '1.0.8'
sha256 'cee233cf1a0293d9e19b15c375f2c4cb7cf0c6948b7fd579bec28719e0b51d35'
url 'https://software.intel.com/sites/default/files/managed/68/45/haxm-macosx_r04.zip'
homepage 'https://software.intel.com/en-us/android/articles/intel-hardware-accelerated-execution-manager'
nested_container "haxm-macosx_r04/IntelHAXM_#{version}.dmg"
install "IntelHAXM_#{version}.mpkg"
uninstall :script => { :executable => '/System/Library/Extensions/intelhaxm.kext/Contents/Resources/uninstall.sh', :input => 'y' }
end
intel-haxm.rb
class IntelHaxm < Cask
version '1.0.8'
sha256 'cee233cf1a0293d9e19b15c375f2c4cb7cf0c6948b7fd579bec28719e0b51d35'
url 'https://software.intel.com/sites/default/files/managed/68/45/haxm-macosx_r04.zip'
homepage 'https://software.intel.com/en-us/android/articles/intel-hardware-accelerated-execution-manager'
nested_container "haxm-macosx_r04/IntelHAXM_#{version}.dmg"
pkg "IntelHAXM_#{version}.mpkg"
uninstall :script => { :executable => '/System/Library/Extensions/intelhaxm.kext/Contents/Resources/uninstall.sh', :input => 'y' }
end
|
cask 'lightproxy' do
version '1.1.7'
sha256 'dad8d44ee3b092f046a767676d0131df8d70ab930b5620237dbbe8c7ff102d8f'
# gw.alipayobjects.com/os/LightProxy was verified as official when first introduced to the cask
url 'https://gw.alipayobjects.com/os/LightProxy/LightProxy.dmg'
appcast 'https://github.com/alibaba/lightproxy/tree/develop/CHANGELOG'
name 'LightProxy'
homepage 'https://alibaba.github.io/lightproxy/'
app 'LightProxy.app'
end
Update lightproxy from 1.1.7 to 1.1.8 (#76714)
cask 'lightproxy' do
version '1.1.8'
sha256 '287cfc258692bf89acb450111b0c7f1ac63ccc66f82f9b13982cf6a1d29cd27c'
# gw.alipayobjects.com/os/LightProxy was verified as official when first introduced to the cask
url 'https://gw.alipayobjects.com/os/LightProxy/LightProxy.dmg'
appcast 'https://github.com/alibaba/lightproxy/tree/develop/CHANGELOG'
name 'LightProxy'
homepage 'https://alibaba.github.io/lightproxy/'
app 'LightProxy.app'
end
|
cask :v1 => 'mactracker' do
version '7.4'
sha256 '186a2c1e46188023e11619f9af24f4a2d41b5805989dee5790d78eedfe8fda27'
url "http://www.mactracker.ca/downloads/Mactracker_#{version}.zip"
appcast 'http://update.mactracker.ca/appcast-b.xml',
:sha256 => '4792d02bbece6924e8ddb068327d0814b733475b1f95dddf33bb3cc9ecf0810c'
homepage 'http://mactracker.ca/'
license :unknown
app 'Mactracker.app'
end
license todo comment in mactracker
cask :v1 => 'mactracker' do
version '7.4'
sha256 '186a2c1e46188023e11619f9af24f4a2d41b5805989dee5790d78eedfe8fda27'
url "http://www.mactracker.ca/downloads/Mactracker_#{version}.zip"
appcast 'http://update.mactracker.ca/appcast-b.xml',
:sha256 => '4792d02bbece6924e8ddb068327d0814b733475b1f95dddf33bb3cc9ecf0810c'
homepage 'http://mactracker.ca/'
license :unknown # todo: improve this machine-generated value
app 'Mactracker.app'
end
|
cask "macupdater" do
version "2.0.2,11385"
sha256 "8a53e29657634e19ac217f7442bc59a079f7503fe0832cddc411f6c9b4a9457f"
url "https://www.corecode.io/downloads/macupdater_#{version.before_comma}.dmg"
name "MacUpdater"
desc "Track and update to the latest versions of installed software"
homepage "https://www.corecode.io/macupdater/index.html"
livecheck do
url "https://www.corecode.io/macupdater/macupdater2.xml"
strategy :sparkle
end
auto_updates true
app "MacUpdater.app"
binary "#{appdir}/MacUpdater.app/Contents/Resources/macupdater_client"
uninstall quit: "com.corecode.MacUpdater",
launchctl: "com.corecode.MacUpdaterLaunchHelper"
zap trash: [
"~/Library/Application Scripts/com.corecode.MacUpdaterLaunchHelper",
"~/Library/Application Support/MacUpdater",
"~/Library/Application Support/MacUpdaterInstallHelper",
"~/Library/Caches/com.corecode.MacUpdater",
"~/Library/Containers/com.corecode.MacUpdaterLaunchHelper",
"~/Library/Cookies/com.corecode.MacUpdater.binarycookies",
"~/Library/Preferences/com.corecode.MacUpdater.plist",
]
end
Update macupdater from 2.0.2,11385 to 2.0.2,11385 (#103033)
cask "macupdater" do
version "2.0.2,11385"
sha256 "4005bdb3140d6387be8dedd6c28bab1d5ab4af0d2410c9aabff6fffad42f1e67"
url "https://www.corecode.io/downloads/macupdater_#{version.before_comma}.dmg"
name "MacUpdater"
desc "Track and update to the latest versions of installed software"
homepage "https://www.corecode.io/macupdater/index.html"
livecheck do
url "https://www.corecode.io/macupdater/macupdater2.xml"
strategy :sparkle
end
auto_updates true
app "MacUpdater.app"
binary "#{appdir}/MacUpdater.app/Contents/Resources/macupdater_client"
uninstall quit: "com.corecode.MacUpdater",
launchctl: "com.corecode.MacUpdaterLaunchHelper"
zap trash: [
"~/Library/Application Scripts/com.corecode.MacUpdaterLaunchHelper",
"~/Library/Application Support/MacUpdater",
"~/Library/Application Support/MacUpdaterInstallHelper",
"~/Library/Caches/com.corecode.MacUpdater",
"~/Library/Containers/com.corecode.MacUpdaterLaunchHelper",
"~/Library/Cookies/com.corecode.MacUpdater.binarycookies",
"~/Library/Preferences/com.corecode.MacUpdater.plist",
]
end
|
cask 'mailbutler' do
version '2,1523-11286'
sha256 'f44467d9e133a7a494a8363ff9f5da52fe9234f2b8516369189d1551b7595bc3'
url "https://downloads.mailbutler.io/sparkle/public/Mailbutler_#{version.after_comma}.zip"
appcast "https://www.mailbutler.io/appcast#{version.major}.php"
name 'MailButler'
homepage 'https://www.mailbutler.io/'
depends_on macos: '>= :sierra'
app 'MailButler.app'
end
Update mailbutler.rb (#63467)
- Renamed name: Mailbutler stylizes it's name as "Mailbutler"
- Remamed app: Mailbutler will quit with error message,
if the filename is not "Mailbutler.app" but "MailButler.app".
cask 'mailbutler' do
version '2,1523-11286'
sha256 'f44467d9e133a7a494a8363ff9f5da52fe9234f2b8516369189d1551b7595bc3'
url "https://downloads.mailbutler.io/sparkle/public/Mailbutler_#{version.after_comma}.zip"
appcast "https://www.mailbutler.io/appcast#{version.major}.php"
name 'Mailbutler'
homepage 'https://www.mailbutler.io/'
depends_on macos: '>= :sierra'
app 'Mailbutler.app'
end
|
cask 'mcbopomofo' do
version '0.9.6.11'
sha256 '1c19735b97cf8319dba71a2dce443c332bba137260eab8e57b10baf2e6157f0f'
url "https://app.openvanilla.org/file/mcbopomofo/McBopomofo-Installer-#{version}.zip"
name 'McBopomofo'
homepage 'https://mcbopomofo.openvanilla.org/'
installer manual: 'McBopomofoInstaller.app'
uninstall delete: '~/Library/Input Methods/McBopomofo.app'
end
Update mcbopomofo to 0.9.7 (#26862)
cask 'mcbopomofo' do
version '0.9.7'
sha256 '67781854868a4186e47eb431d0290eff7161f41cdfb7a200d1deb30c379bdcd2'
# github.com was verified as official when first introduced to the cask
url "https://github.com/openvanilla/McBopomofo/releases/download/#{version}/McBopomofo-Installer-#{version}.zip"
appcast 'https://github.com/openvanilla/McBopomofo/releases.atom',
checkpoint: 'd664ecb07c906c48fd9f09a28b29e76413768ddb20f897e5272b73cee27aeccb'
name 'McBopomofo'
homepage 'https://mcbopomofo.openvanilla.org/'
installer manual: 'McBopomofoInstaller.app'
uninstall delete: '~/Library/Input Methods/McBopomofo.app'
end
|
cask :v1 => 'menumeters' do
version :latest
sha256 :no_check
url 'http://www.ragingmenace.com/software/download/MenuMeters.dmg'
homepage 'http://www.ragingmenace.com/software/menumeters/'
license :unknown
prefpane 'MenuMeters Installer.app/Contents/Resources/MenuMeters.prefPane'
zap :delete => '~/Library/Preferences/com.ragingmenace.MenuMeters.plist'
end
license todo comment in menumeters
cask :v1 => 'menumeters' do
version :latest
sha256 :no_check
url 'http://www.ragingmenace.com/software/download/MenuMeters.dmg'
homepage 'http://www.ragingmenace.com/software/menumeters/'
license :unknown # todo: improve this machine-generated value
prefpane 'MenuMeters Installer.app/Contents/Resources/MenuMeters.prefPane'
zap :delete => '~/Library/Preferences/com.ragingmenace.MenuMeters.plist'
end
|
cask "menumeters" do
version "2.0.8"
sha256 "acb318ef826f1eb66ea5e9d22c5495c2d334157b946a50eb3b60c8f40bc560c8"
# github.com/yujitach/MenuMeters was verified as official when first introduced to the cask
url "https://github.com/yujitach/MenuMeters/releases/download/#{version}/MenuMeters_#{version}.zip"
appcast "https://github.com/yujitach/MenuMeters/releases.atom"
name "MenuMeters for El Capitan (and later)"
desc "Set of CPU, memory, disk, and network monitoring tools"
homepage "https://member.ipmu.jp/yuji.tachikawa/MenuMetersElCapitan/"
auto_updates true
depends_on macos: ">= :el_capitan"
app "MenuMeters.app"
uninstall quit: "com.yujitach.MenuMeters"
zap trash: [
"~/Library/Caches/com.yujitach.MenuMeters",
"~/Library/PreferencesPanes/MenuMeters.prefPane",
"~/Library/Preferences/com.ragingmenace.MenuMeters.plist",
"~/Library/Preferences/com.yujitach.MenuMeters.plist",
]
end
Update menumeters from 2.0.8 to 2.0.8.1 (#91778)
cask "menumeters" do
version "2.0.8.1"
sha256 "f1ef3195ae46bf3ba3a6b3d7a6f802742fa21bb2e726396b58dccfdda66b5cae"
# github.com/yujitach/MenuMeters was verified as official when first introduced to the cask
url "https://github.com/yujitach/MenuMeters/releases/download/#{version}/MenuMeters_#{version}.zip"
appcast "https://github.com/yujitach/MenuMeters/releases.atom"
name "MenuMeters for El Capitan (and later)"
desc "Set of CPU, memory, disk, and network monitoring tools"
homepage "https://member.ipmu.jp/yuji.tachikawa/MenuMetersElCapitan/"
auto_updates true
depends_on macos: ">= :el_capitan"
app "MenuMeters.app"
uninstall quit: "com.yujitach.MenuMeters"
zap trash: [
"~/Library/Caches/com.yujitach.MenuMeters",
"~/Library/PreferencesPanes/MenuMeters.prefPane",
"~/Library/Preferences/com.ragingmenace.MenuMeters.plist",
"~/Library/Preferences/com.yujitach.MenuMeters.plist",
]
end
|
cask "metasploit" do
version "6.2.4,20220622103151"
sha256 "22d02f6260a9aa6882b10018b9318b7788d6d725c4bcb98b1ffaf763f54d908c"
url "https://osx.metasploit.com/metasploit-framework-#{version.csv.first}%2B#{version.csv.second}-1rapid7-1.x86_64.pkg"
name "Metasploit Framework"
desc "Penetration testing framework"
homepage "https://www.metasploit.com/"
livecheck do
url "https://osx.metasploit.com/LATEST"
strategy :page_match do |page|
match = page.match(/metasploit-framework-(\d+(?:\.\d+)+)\+(\d+)-1rapid7-1\.x86_64\.pkg/i)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
depends_on formula: "nmap"
pkg "metasploit-framework-#{version.csv.first}+#{version.csv.second}-1rapid7-1.x86_64.pkg"
binary "/opt/metasploit-framework/bin/msfbinscan"
binary "/opt/metasploit-framework/bin/msfconsole"
binary "/opt/metasploit-framework/bin/msfd"
binary "/opt/metasploit-framework/bin/msfdb"
binary "/opt/metasploit-framework/bin/msfelfscan"
binary "/opt/metasploit-framework/bin/msfmachscan"
binary "/opt/metasploit-framework/bin/msfpescan"
binary "/opt/metasploit-framework/bin/msfrop"
binary "/opt/metasploit-framework/bin/msfrpc"
binary "/opt/metasploit-framework/bin/msfrpcd"
binary "/opt/metasploit-framework/bin/msfvenom"
uninstall script: {
executable: "/opt/metasploit-framework/bin/msfremove",
input: ["y"],
sudo: true,
},
rmdir: "/opt/metasploit-framework"
zap trash: "~/.msf4"
end
metasploit 6.2.4,20220623102950
Update metasploit from 6.2.4,20220622103151 to 6.2.4,20220623102950
Closes #126392.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "metasploit" do
version "6.2.4,20220623102950"
sha256 "635d9adb584712a0fe7e9adff8c3c45cdce4b75c46bf3fc846f090b4d599f12c"
url "https://osx.metasploit.com/metasploit-framework-#{version.csv.first}%2B#{version.csv.second}-1rapid7-1.x86_64.pkg"
name "Metasploit Framework"
desc "Penetration testing framework"
homepage "https://www.metasploit.com/"
livecheck do
url "https://osx.metasploit.com/LATEST"
strategy :page_match do |page|
match = page.match(/metasploit-framework-(\d+(?:\.\d+)+)\+(\d+)-1rapid7-1\.x86_64\.pkg/i)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
depends_on formula: "nmap"
pkg "metasploit-framework-#{version.csv.first}+#{version.csv.second}-1rapid7-1.x86_64.pkg"
binary "/opt/metasploit-framework/bin/msfbinscan"
binary "/opt/metasploit-framework/bin/msfconsole"
binary "/opt/metasploit-framework/bin/msfd"
binary "/opt/metasploit-framework/bin/msfdb"
binary "/opt/metasploit-framework/bin/msfelfscan"
binary "/opt/metasploit-framework/bin/msfmachscan"
binary "/opt/metasploit-framework/bin/msfpescan"
binary "/opt/metasploit-framework/bin/msfrop"
binary "/opt/metasploit-framework/bin/msfrpc"
binary "/opt/metasploit-framework/bin/msfrpcd"
binary "/opt/metasploit-framework/bin/msfvenom"
uninstall script: {
executable: "/opt/metasploit-framework/bin/msfremove",
input: ["y"],
sudo: true,
},
rmdir: "/opt/metasploit-framework"
zap trash: "~/.msf4"
end
|
cask 'metasploit' do
version '4.16.31+20180117130324'
sha256 '981cbe8a8800632d53714c9994f5d07975f1cc49f241b346eb4bf78cc052bcdb'
url "https://osx.metasploit.com/metasploit-framework-#{version}-1rapid7-1.pkg"
appcast 'https://osx.metasploit.com/LATEST',
checkpoint: 'a579fa84fec77289aaf547d7d4ded17288ac698f8772d80b669e7ec2071f5047'
name 'Metasploit Framework'
homepage 'https://www.metasploit.com/'
gpg "#{url}.asc", key_id: '2007B954'
depends_on formula: 'nmap'
pkg "metasploit-framework-#{version.gsub('+', ' ')}-1rapid7-1.pkg"
binary '/opt/metasploit-framework/bin/metasploit-aggregator'
binary '/opt/metasploit-framework/bin/msfbinscan'
binary '/opt/metasploit-framework/bin/msfconsole'
binary '/opt/metasploit-framework/bin/msfd'
binary '/opt/metasploit-framework/bin/msfdb'
binary '/opt/metasploit-framework/bin/msfelfscan'
binary '/opt/metasploit-framework/bin/msfmachscan'
binary '/opt/metasploit-framework/bin/msfpescan'
binary '/opt/metasploit-framework/bin/msfrop'
binary '/opt/metasploit-framework/bin/msfrpc'
binary '/opt/metasploit-framework/bin/msfrpcd'
binary '/opt/metasploit-framework/bin/msfupdate'
binary '/opt/metasploit-framework/bin/msfvenom'
uninstall script: {
executable: '/opt/metasploit-framework/bin/msfremove',
input: ['y'],
sudo: true,
},
rmdir: '/opt/metasploit-framework'
zap trash: '~/.msf4'
end
Update metasploit to 4.16.33+20180124010438.git.2.6f2f0ed (#43133)
cask 'metasploit' do
version '4.16.33+20180124010438.git.2.6f2f0ed'
sha256 'aeb3772152a8235c8beedf4452f3b2a9cf055e31f8ff0faf7c8010982ba23eb0'
url "https://osx.metasploit.com/metasploit-framework-#{version}-1rapid7-1.pkg"
appcast 'https://osx.metasploit.com/LATEST',
checkpoint: '97120f971d61ab720bf7908fc09e20823fc3b1c02a1fa151eeb5f7cf5f966094'
name 'Metasploit Framework'
homepage 'https://www.metasploit.com/'
gpg "#{url}.asc", key_id: '2007B954'
depends_on formula: 'nmap'
pkg "metasploit-framework-#{version.gsub('+', ' ')}-1rapid7-1.pkg"
binary '/opt/metasploit-framework/bin/metasploit-aggregator'
binary '/opt/metasploit-framework/bin/msfbinscan'
binary '/opt/metasploit-framework/bin/msfconsole'
binary '/opt/metasploit-framework/bin/msfd'
binary '/opt/metasploit-framework/bin/msfdb'
binary '/opt/metasploit-framework/bin/msfelfscan'
binary '/opt/metasploit-framework/bin/msfmachscan'
binary '/opt/metasploit-framework/bin/msfpescan'
binary '/opt/metasploit-framework/bin/msfrop'
binary '/opt/metasploit-framework/bin/msfrpc'
binary '/opt/metasploit-framework/bin/msfrpcd'
binary '/opt/metasploit-framework/bin/msfupdate'
binary '/opt/metasploit-framework/bin/msfvenom'
uninstall script: {
executable: '/opt/metasploit-framework/bin/msfremove',
input: ['y'],
sudo: true,
},
rmdir: '/opt/metasploit-framework'
zap trash: '~/.msf4'
end
|
cask "metasploit" do
version "6.2.23,20221020102923"
sha256 "a54dcc7c6ea2cc31a2bf53706157469b89ee239ac2df9d0d7a2b3ee58562c5fe"
url "https://osx.metasploit.com/metasploit-framework-#{version.csv.first}%2B#{version.csv.second}-1rapid7-1.x86_64.pkg"
name "Metasploit Framework"
desc "Penetration testing framework"
homepage "https://www.metasploit.com/"
livecheck do
url "https://osx.metasploit.com/LATEST"
strategy :page_match do |page|
match = page.match(/metasploit-framework-(\d+(?:\.\d+)+)\+(\d+)-1rapid7-1\.x86_64\.pkg/i)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
depends_on formula: "nmap"
pkg "metasploit-framework-#{version.csv.first}+#{version.csv.second}-1rapid7-1.x86_64.pkg"
binary "/opt/metasploit-framework/bin/msfbinscan"
binary "/opt/metasploit-framework/bin/msfconsole"
binary "/opt/metasploit-framework/bin/msfd"
binary "/opt/metasploit-framework/bin/msfdb"
binary "/opt/metasploit-framework/bin/msfelfscan"
binary "/opt/metasploit-framework/bin/msfmachscan"
binary "/opt/metasploit-framework/bin/msfpescan"
binary "/opt/metasploit-framework/bin/msfrop"
binary "/opt/metasploit-framework/bin/msfrpc"
binary "/opt/metasploit-framework/bin/msfrpcd"
binary "/opt/metasploit-framework/bin/msfvenom"
uninstall script: {
executable: "/opt/metasploit-framework/bin/msfremove",
input: ["y"],
sudo: true,
},
rmdir: "/opt/metasploit-framework"
zap trash: "~/.msf4"
end
metasploit 6.2.24,20221021102956
Update metasploit from 6.2.23 to 6.2.24
Closes #133979.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "metasploit" do
version "6.2.24,20221021102956"
sha256 "334d3f2e8697d8c215bc036985848870ba1d085eeecaaa76b8d6fef8c6b8e273"
url "https://osx.metasploit.com/metasploit-framework-#{version.csv.first}%2B#{version.csv.second}-1rapid7-1.x86_64.pkg"
name "Metasploit Framework"
desc "Penetration testing framework"
homepage "https://www.metasploit.com/"
livecheck do
url "https://osx.metasploit.com/LATEST"
strategy :page_match do |page|
match = page.match(/metasploit-framework-(\d+(?:\.\d+)+)\+(\d+)-1rapid7-1\.x86_64\.pkg/i)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
depends_on formula: "nmap"
pkg "metasploit-framework-#{version.csv.first}+#{version.csv.second}-1rapid7-1.x86_64.pkg"
binary "/opt/metasploit-framework/bin/msfbinscan"
binary "/opt/metasploit-framework/bin/msfconsole"
binary "/opt/metasploit-framework/bin/msfd"
binary "/opt/metasploit-framework/bin/msfdb"
binary "/opt/metasploit-framework/bin/msfelfscan"
binary "/opt/metasploit-framework/bin/msfmachscan"
binary "/opt/metasploit-framework/bin/msfpescan"
binary "/opt/metasploit-framework/bin/msfrop"
binary "/opt/metasploit-framework/bin/msfrpc"
binary "/opt/metasploit-framework/bin/msfrpcd"
binary "/opt/metasploit-framework/bin/msfvenom"
uninstall script: {
executable: "/opt/metasploit-framework/bin/msfremove",
input: ["y"],
sudo: true,
},
rmdir: "/opt/metasploit-framework"
zap trash: "~/.msf4"
end
|
cask "metasploit" do
version "6.2.24,20221021102956"
sha256 "334d3f2e8697d8c215bc036985848870ba1d085eeecaaa76b8d6fef8c6b8e273"
url "https://osx.metasploit.com/metasploit-framework-#{version.csv.first}%2B#{version.csv.second}-1rapid7-1.x86_64.pkg"
name "Metasploit Framework"
desc "Penetration testing framework"
homepage "https://www.metasploit.com/"
livecheck do
url "https://osx.metasploit.com/LATEST"
strategy :page_match do |page|
match = page.match(/metasploit-framework-(\d+(?:\.\d+)+)\+(\d+)-1rapid7-1\.x86_64\.pkg/i)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
depends_on formula: "nmap"
pkg "metasploit-framework-#{version.csv.first}+#{version.csv.second}-1rapid7-1.x86_64.pkg"
binary "/opt/metasploit-framework/bin/msfbinscan"
binary "/opt/metasploit-framework/bin/msfconsole"
binary "/opt/metasploit-framework/bin/msfd"
binary "/opt/metasploit-framework/bin/msfdb"
binary "/opt/metasploit-framework/bin/msfelfscan"
binary "/opt/metasploit-framework/bin/msfmachscan"
binary "/opt/metasploit-framework/bin/msfpescan"
binary "/opt/metasploit-framework/bin/msfrop"
binary "/opt/metasploit-framework/bin/msfrpc"
binary "/opt/metasploit-framework/bin/msfrpcd"
binary "/opt/metasploit-framework/bin/msfvenom"
uninstall script: {
executable: "/opt/metasploit-framework/bin/msfremove",
input: ["y"],
sudo: true,
},
rmdir: "/opt/metasploit-framework"
zap trash: "~/.msf4"
end
metasploit 6.2.24,20221023102835
Update metasploit from 6.2.24,20221021102956 to 6.2.24,20221023102835
Closes #134109.
Signed-off-by: Bevan Kay <a88b7dcd1a9e3e17770bbaa6d7515b31a2d7e85d@bevankay.me>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "metasploit" do
version "6.2.24,20221023102835"
sha256 "5043a643e8918befd8dcb79d1675c7623e76c6b33d956910476b61194350e406"
url "https://osx.metasploit.com/metasploit-framework-#{version.csv.first}%2B#{version.csv.second}-1rapid7-1.x86_64.pkg"
name "Metasploit Framework"
desc "Penetration testing framework"
homepage "https://www.metasploit.com/"
livecheck do
url "https://osx.metasploit.com/LATEST"
strategy :page_match do |page|
match = page.match(/metasploit-framework-(\d+(?:\.\d+)+)\+(\d+)-1rapid7-1\.x86_64\.pkg/i)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
depends_on formula: "nmap"
pkg "metasploit-framework-#{version.csv.first}+#{version.csv.second}-1rapid7-1.x86_64.pkg"
binary "/opt/metasploit-framework/bin/msfbinscan"
binary "/opt/metasploit-framework/bin/msfconsole"
binary "/opt/metasploit-framework/bin/msfd"
binary "/opt/metasploit-framework/bin/msfdb"
binary "/opt/metasploit-framework/bin/msfelfscan"
binary "/opt/metasploit-framework/bin/msfmachscan"
binary "/opt/metasploit-framework/bin/msfpescan"
binary "/opt/metasploit-framework/bin/msfrop"
binary "/opt/metasploit-framework/bin/msfrpc"
binary "/opt/metasploit-framework/bin/msfrpcd"
binary "/opt/metasploit-framework/bin/msfvenom"
uninstall script: {
executable: "/opt/metasploit-framework/bin/msfremove",
input: ["y"],
sudo: true,
},
rmdir: "/opt/metasploit-framework"
zap trash: "~/.msf4"
end
|
cask "modern-csv" do
version "1.3.3"
sha256 "69a6133cdcc4ef15b7d7c1ec089a0216ed3c1e41f1c6b9ea236bd5a5a52cad45"
url "https://www.moderncsv.com/release/ModernCSV-Mac-v#{version}.dmg"
appcast "https://www.moderncsv.com/latest-version/"
name "Modern CSV"
desc "CSV editor"
homepage "https://www.moderncsv.com/"
app "Modern CSV.app"
zap trash: "~/Library/Application Support/Modern CSV"
end
Update modern-csv from 1.3.3 to 1.3.4 (#91415)
cask "modern-csv" do
version "1.3.4"
sha256 "b6bb1c6550cea551f7fb0e4d3d65521a55577460522dfb2bc464958ceb354c1e"
url "https://www.moderncsv.com/release/ModernCSV-Mac-v#{version}.dmg"
appcast "https://www.moderncsv.com/latest-version/"
name "Modern CSV"
desc "CSV editor"
homepage "https://www.moderncsv.com/"
app "Modern CSV.app"
zap trash: "~/Library/Application Support/Modern CSV"
end
|
cask "moneydance" do
version "2021.1,3069"
sha256 "b45db81dd83c2d188131f25b352d0a8921baa23f6482cfaaae62e230e32373d8"
url "https://infinitekind.com/stabledl/#{version.before_comma}_#{version.after_comma}/Moneydance.zip"
name "Moneydance"
desc "Personal financial management application focused on privacy"
homepage "https://infinitekind.com/moneydance"
livecheck do
url "https://infinitekind.com/download-moneydance-personal-finance-software"
strategy :page_match do |page|
page.scan(/Moneydance\s*v?(\d+(?:\.\d+)*)\s*\((\d+)\)/i).map do |match|
"#{match[0]},#{match[1]}"
end
end
end
depends_on macos: ">= :el_capitan"
app "Moneydance.app"
end
Update moneydance from 2021.1,3069 to 2022,4056 (#112859)
cask "moneydance" do
version "2022,4056"
sha256 "bfe2711707ce42809e4c92f7458059c7aa793e6c788f8febd92aafe6022b227a"
url "https://infinitekind.com/stabledl/#{version.before_comma}_#{version.after_comma}/Moneydance.zip"
name "Moneydance"
desc "Personal financial management application focused on privacy"
homepage "https://infinitekind.com/moneydance"
livecheck do
url "https://infinitekind.com/download-moneydance-personal-finance-software"
strategy :page_match do |page|
page.scan(/Moneydance\s*v?(\d+(?:\.\d+)*)\s*\((\d+)\)/i).map do |match|
"#{match[0]},#{match[1]}"
end
end
end
depends_on macos: ">= :el_capitan"
app "Moneydance.app"
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.