CombinedText stringlengths 4 3.42M |
|---|
require 'terminal-table'
desc 'Generate a Channel Guide'
namespace :network_executive do
task :guide => :environment do
format = '%H:%M'
guide = NetworkExecutive::Guide.new
start_time = guide.start_time.strftime( '%B %-e, %Y between %H:%M' )
stop_time = guide.stop_time.strftime( '%H:%M' )
puts "\n\n"
puts "Channel Guide for #{start_time} and #{stop_time}"
puts "\n"
guide[:channels].each do |ch|
puts ch[:channel].to_s.titleize
table = Terminal::Table.new do |t|
ch[:programs].each do |prog|
start = prog.occurrence.start_time.strftime( format )
end_time = prog.occurrence.end_time.strftime( format )
t << [ "#{start} - #{end_time}", prog.display_name ]
end
end
puts table
puts "\n"
end
end
end
Added start and stop arguments to Channel Guide generation task
require 'terminal-table'
desc 'Generate a Channel Guide (options: start=14:00 stop="Oct 10, 2013 14:30")'
namespace :network_executive do
task :guide => :environment do
format = '%H:%M'
start = Time.parse( ENV['start'] ) if ENV['start']
stop = Time.parse( ENV['stop'] ) if ENV['stop']
guide = NetworkExecutive::Guide.new start, stop
start_time = guide.start_time.strftime( '%B %-e, %Y between %H:%M' )
stop_time = guide.stop_time.strftime( '%H:%M' )
puts "\n\n"
puts "Channel Guide for #{start_time} and #{stop_time}"
puts "\n"
guide[:channels].each do |ch|
puts ch[:channel].to_s.titleize
table = Terminal::Table.new do |t|
ch[:programs].each do |prog|
start = prog.occurrence.start_time.strftime( format )
end_time = prog.occurrence.end_time.strftime( format )
t << [ "#{start} - #{end_time}", prog.display_name ]
end
end
puts table
puts "\n"
end
end
end |
Added hitch VMT to AT
namespace :compute do
desc "Hitch VMs to Appliance Types"
task hitch: :environment do
ComputeSite.all.each do |site|
filters = site.template_filters ? JSON.parse(site.template_filters) : nil
images = site.cloud_client.images.all(filters)
all_site_templates = site.virtual_machine_templates.to_a
images.each do |image|
updated_vmt = Cloud::VmtUpdater.new(site, image, all: true).update
all_site_templates.delete(updated_vmt)
end
#remove deleted templates
all_site_templates.each do |vmt|
vmt.destroy(false) if vmt.old?
end
end
end
end
|
# rubocop:disable Style/FrozenStringLiteralComment
require "asciidoctor"
require "octokit"
require "time"
require "digest/sha1"
def index_l10n_doc(filter_tags, doc_list, get_content)
ActiveRecord::Base.logger.level = Logger::WARN
rebuild = ENV["REBUILD_DOC"]
rerun = ENV["RERUN"] || rebuild || false
filter_tags.call(rebuild, false).sort_by { |tag| Version.version_to_num(tag.first[1..-1]) }.each do |tag|
name, commit_sha, tree_sha, ts = tag
puts "#{name}: #{ts}, #{commit_sha[0, 8]}, #{tree_sha[0, 8]}"
stag = Version.where(name: name.gsub("v", "l10n")).first_or_create
next if (stag.commit_sha == commit_sha) && !rerun
stag.commit_sha = commit_sha
stag.tree_sha = tree_sha
stag.committed = ts
stag.save
tag_files = doc_list.call(tree_sha)
doc_files = tag_files.select { |ent| ent.first =~
/^([_\w]+)\/(
(
git.*
)\.txt)/x
}
puts "Found #{doc_files.size} entries"
doc_limit = ENV["ONLY_BUILD_DOC"]
get_content_f = Proc.new do |source, target|
name = File.join(File.dirname(source), target)
content_file = tag_files.detect { |ent| ent.first == name }
if content_file
new_content = get_content.call (content_file.second)
else
puts "Included file #{name} was not translated. Processing anyway\n"
end
[new_content, name]
end
def expand!(path, content, get_f_content , categories)
content.gsub!(/include::(\S+)\.txt/) do |line|
line.gsub!("include::", "")
if categories[line]
new_content = categories[line]
else
new_content, path = get_f_content.call(path, line)
end
if new_content
expand!(path, new_content, get_f_content, categories)
else
"\n\n[WARNING]\n====\nMissing `#{path}`\n\nSee original version for this content.\n====\n\n"
end
end
return content
end
doc_files.each do |entry|
full_path, sha = entry
lang = File.dirname(full_path)
path = File.basename(full_path, ".txt")
#next if doc_limit && path !~ /#{doc_limit}/
file = DocFile.where(name: path).first_or_create
puts " build: #{path} for #{lang}"
content = get_content.call sha
categories = {}
expand!(full_path, content, get_content_f, categories)
content.gsub!(/link:(?:technical\/)?(\S*?)\.html(\#\S*?)?\[(.*?)\]/m, "link:/docs/\\1/#{lang}\\2[\\3]")
asciidoc = Asciidoctor::Document.new(content, attributes: {"sectanchors" => ""}, doctype: "book")
asciidoc_sha = Digest::SHA1.hexdigest(asciidoc.source)
doc = Doc.where(blob_sha: asciidoc_sha).first_or_create
if rerun || !doc.plain || !doc.html
html = asciidoc.render
html.gsub!(/linkgit:(\S+)\[(\d+)\]/) do |line|
x = /^linkgit:(\S+)\[(\d+)\]/.match(line)
line = "<a href='/docs/#{x[1]}/#{lang}'>#{x[1]}[#{x[2]}]</a>"
end
#HTML anchor on hdlist1 (i.e. command options)
html.gsub!(/<dt class="hdlist1">(.*?)<\/dt>/) do |m|
text = $1.tr("^A-Za-z0-9-", "")
anchor = "#{path}-#{text}"
"<dt class=\"hdlist1\" id=\"#{anchor}\"> <a class=\"anchor\" href=\"##{anchor}\"></a>#{$1} </dt>"
end
doc.plain = asciidoc.source
doc.html = html
doc.save
end
dv = DocVersion.where(version_id: stag.id, doc_file_id: file.id, language: lang).first_or_create
dv.doc_id = doc.id
dv.language = lang
dv.save
end
end
end
def index_doc(filter_tags, doc_list, get_content)
ActiveRecord::Base.logger.level = Logger::WARN
rebuild = ENV["REBUILD_DOC"]
rerun = ENV["RERUN"] || rebuild || false
filter_tags.call(rebuild).sort_by { |tag| Version.version_to_num(tag.first[1..-1]) }.each do |tag|
name, commit_sha, tree_sha, ts = tag
puts "#{name}: #{ts}, #{commit_sha[0, 8]}, #{tree_sha[0, 8]}"
stag = Version.where(name: name.gsub("v", "")).first
next if stag && !rerun
stag = Version.where(name: name.gsub("v", "")).first_or_create
stag.commit_sha = commit_sha
stag.tree_sha = tree_sha
stag.committed = ts
stag.save
tag_files = doc_list.call(tree_sha)
doc_files = tag_files.select { |ent| ent.first =~
/^Documentation\/(
SubmittingPatches |
MyFirstContribution.txt |
(
git.* |
everyday |
howto-index |
user-manual |
diff.* |
fetch.* |
merge.* |
rev.* |
pretty.* |
pull.* |
technical\/.*
)\.txt)/x
}
puts "Found #{doc_files.size} entries"
doc_limit = ENV["ONLY_BUILD_DOC"]
# generate command-list content
generated = {}
cmd = tag_files.detect { |f| f.first =~ /command-list\.txt/ }
if cmd
cmd_list = get_content.call(cmd.second).match(/(### command list.*|# command name.*)/m)[0].split("\n").reject { |l| l =~ /^#/ }.inject({}) do |list, cmd|
name, kind, attr = cmd.split(/\s+/)
list[kind] ||= []
list[kind] << [name, attr]
list
end
generated = cmd_list.keys.inject({}) do |list, category|
links = cmd_list[category].map do |cmd, attr|
if cmd_file = tag_files.detect { |ent| ent.first == "Documentation/#{cmd}.txt" }
if match = get_content.call(cmd_file.second).match(/NAME\n----\n\S+ - (.*)$/)
"linkgit:#{cmd}[1]::\n\t#{attr == 'deprecated' ? '(deprecated) ' : ''}#{match[1]}\n"
end
end
end
list.merge!("Documentation/cmds-#{category}.txt" => links.compact.join("\n"))
end
tools = tag_files.select { |ent| ent.first =~/^mergetools\// }.map do |entry|
path, sha = entry
tool = File.basename path
content = get_content.call sha
merge = (content.include? "can_merge") ? "" : " * #{tool}\n"
diff = (content.include? "can_diff") ? "" : " * #{tool}\n"
[merge, diff]
end
can_merge, can_diff = tools.transpose.map { |strs| strs.join "" }
generated["Documentation/mergetools-diff.txt"] = can_diff
generated["Documentation/mergetools-merge.txt"] = can_merge
get_content_f = Proc.new do |name|
content_file = tag_files.detect { |ent| ent.first == name }
if content_file
new_content = get_content.call (content_file.second)
end
new_content
end
def expand_content(content, path, get_f_content , generated)
content.gsub(/include::(\S+)\.txt\[\]/) do |line|
if File.dirname(path)=="."
new_fname = "#{$1}.txt"
else
new_fname = (Pathname.new(path).dirname + Pathname.new("#{$1}.txt")).cleanpath.to_s
end
if generated[new_fname]
new_content = generated[new_fname]
else
new_content = get_f_content.call(new_fname)
if new_content
expand_content(new_content.force_encoding("UTF-8"), new_fname, get_f_content, generated)
else
puts "#{new_fname} could not be resolved for expansion"
end
end
end
end
doc_files.each do |entry|
path, sha = entry
docname = File.basename(path, ".txt")
next if doc_limit && path !~ /#{doc_limit}/
file = DocFile.where(name: docname).first_or_create
puts " build: #{docname}"
content = expand_content((get_content.call sha).force_encoding("UTF-8"), path, get_content_f, generated)
content.gsub!(/link:(?:technical\/)?(\S*?)\.html(\#\S*?)?\[(.*?)\]/m, "link:/docs/\\1\\2[\\3]")
asciidoc = Asciidoctor::Document.new(content, attributes: {"sectanchors" => ""}, doctype: "book")
asciidoc_sha = Digest::SHA1.hexdigest(asciidoc.source)
doc = Doc.where(blob_sha: asciidoc_sha).first_or_create
if rerun || !doc.plain || !doc.html
html = asciidoc.render
html.gsub!(/linkgit:(\S+)\[(\d+)\]/) do |line|
x = /^linkgit:(\S+)\[(\d+)\]/.match(line)
line = "<a href='/docs/#{x[1]}'>#{x[1]}[#{x[2]}]</a>"
end
#HTML anchor on hdlist1 (i.e. command options)
html.gsub!(/<dt class="hdlist1">(.*?)<\/dt>/) do |m|
text = $1.tr("^A-Za-z0-9-", "")
anchor = "#{path}-#{text}"
"<dt class=\"hdlist1\" id=\"#{anchor}\"> <a class=\"anchor\" href=\"##{anchor}\"></a>#{$1} </dt>"
end
doc.plain = asciidoc.source
doc.html = html
doc.save
end
dv = DocVersion.where(version_id: stag.id, doc_file_id: file.id, language: "en").first_or_create
dv.doc_id = doc.id
dv.language = "en"
dv.save
end
end
Rails.cache.write("latest-version", Version.latest_version.name)
end
end
def github_index_doc(index_fun, repo)
Octokit.auto_paginate = true
if ENV["GITHUB_API_TOKEN"]
@octokit = Octokit::Client.new(access_token: ENV["GITHUB_API_TOKEN"])
else
@octokit = Octokit::Client.new(login: ENV["API_USER"], password: ENV["API_PASS"])
end
repo = ENV["GIT_REPO"] || repo
blob_content = Hash.new do |blobs, sha|
content = Base64.decode64(@octokit.blob(repo, sha, encoding: "base64").content)
blobs[sha] = content.force_encoding("UTF-8")
end
tag_filter = -> (tagname, gettags = true) do
# find all tags
if gettags
tags = @octokit.tags(repo).select { |tag| !tag.nil? && tag.name =~ /v\d([\.\d])+$/ } # just get release tags
if tagname
tags = tags.select { |t| t.name == tagname }
end
else
tags=[Struct.new(:name).new("heads/master")]
end
tags.collect do |tag|
# extract metadata
commit_info = @octokit.commit(repo, tag.name)
commit_sha = commit_info.sha
tree_sha = commit_info.commit.tree.sha
# ts = Time.parse( commit_info.commit.committer.date )
ts = commit_info.commit.committer.date
[tag.name, commit_sha, tree_sha, ts]
end
end
get_content = -> (sha) do blob_content[sha] end
get_file_list = -> (tree_sha) do
tree_info = @octokit.tree(repo, tree_sha, recursive: true)
tree_info.tree.collect { |ent| [ent.path, ent.sha] }
end
send(index_fun, tag_filter, get_file_list, get_content)
end
def local_index_doc(index_fun)
dir = ENV["GIT_REPO"]
Dir.chdir(dir) do
tag_filter = -> (tagname, gettags = true) do
if gettags
# find all tags
tags = `git tag | egrep 'v1|v2'`.strip.split("\n")
tags = tags.select { |tag| tag =~ /v\d([\.\d])+$/ } # just get release tags
if tagname
tags = tags.select { |t| t == tagname }
end
else
tags=["master"]
end
tags.collect do |tag|
# extract metadata
commit_sha = `git rev-parse #{tag}`.chomp
tree_sha = `git rev-parse #{tag}^{tree}`.chomp
tagger = `git cat-file commit #{tag} | grep committer`.chomp.split(" ")
tz = tagger.pop
ts = tagger.pop
ts = Time.at(ts.to_i)
[tag, commit_sha, tree_sha, ts]
end
end
get_content = -> (sha) do `git cat-file blob #{sha}` end
get_file_list = -> (tree_sha) do
entries = `git ls-tree -r #{tree_sha}`.strip.split("\n")
tree = entries. map do |e|
mode, type, sha, path = e.split(" ")
[path, sha]
end
end
send(index_fun, tag_filter, get_file_list, get_content)
end
end
task local_index: :environment do
local_index_doc(:index_doc)
end
task local_index_l10n: :environment do
local_index_doc(:index_l10n_doc)
end
task preindex: :environment do
github_index_doc(:index_doc, "gitster/git")
end
task preindex_l10n: :environment do
github_index_doc(:index_l10n_doc, "jnavila/git-html-l10n")
end
manpage-l10n fix import script to manage zh_HANS-CN
The regex must include '-' to import zh HANS pages.
# rubocop:disable Style/FrozenStringLiteralComment
require "asciidoctor"
require "octokit"
require "time"
require "digest/sha1"
def index_l10n_doc(filter_tags, doc_list, get_content)
ActiveRecord::Base.logger.level = Logger::WARN
rebuild = ENV["REBUILD_DOC"]
rerun = ENV["RERUN"] || rebuild || false
filter_tags.call(rebuild, false).sort_by { |tag| Version.version_to_num(tag.first[1..-1]) }.each do |tag|
name, commit_sha, tree_sha, ts = tag
puts "#{name}: #{ts}, #{commit_sha[0, 8]}, #{tree_sha[0, 8]}"
stag = Version.where(name: name.gsub("v", "l10n")).first_or_create
next if (stag.commit_sha == commit_sha) && !rerun
stag.commit_sha = commit_sha
stag.tree_sha = tree_sha
stag.committed = ts
stag.save
tag_files = doc_list.call(tree_sha)
doc_files = tag_files.select { |ent| ent.first =~
/^([-_\w]+)\/(
(
git.*
)\.txt)/x
}
puts "Found #{doc_files.size} entries"
doc_limit = ENV["ONLY_BUILD_DOC"]
get_content_f = Proc.new do |source, target|
name = File.join(File.dirname(source), target)
content_file = tag_files.detect { |ent| ent.first == name }
if content_file
new_content = get_content.call (content_file.second)
else
puts "Included file #{name} was not translated. Processing anyway\n"
end
[new_content, name]
end
def expand!(path, content, get_f_content , categories)
content.gsub!(/include::(\S+)\.txt/) do |line|
line.gsub!("include::", "")
if categories[line]
new_content = categories[line]
else
new_content, path = get_f_content.call(path, line)
end
if new_content
expand!(path, new_content, get_f_content, categories)
else
"\n\n[WARNING]\n====\nMissing `#{path}`\n\nSee original version for this content.\n====\n\n"
end
end
return content
end
doc_files.each do |entry|
full_path, sha = entry
lang = File.dirname(full_path)
path = File.basename(full_path, ".txt")
#next if doc_limit && path !~ /#{doc_limit}/
file = DocFile.where(name: path).first_or_create
puts " build: #{path} for #{lang}"
content = get_content.call sha
categories = {}
expand!(full_path, content, get_content_f, categories)
content.gsub!(/link:(?:technical\/)?(\S*?)\.html(\#\S*?)?\[(.*?)\]/m, "link:/docs/\\1/#{lang}\\2[\\3]")
asciidoc = Asciidoctor::Document.new(content, attributes: {"sectanchors" => ""}, doctype: "book")
asciidoc_sha = Digest::SHA1.hexdigest(asciidoc.source)
doc = Doc.where(blob_sha: asciidoc_sha).first_or_create
if rerun || !doc.plain || !doc.html
html = asciidoc.render
html.gsub!(/linkgit:(\S+)\[(\d+)\]/) do |line|
x = /^linkgit:(\S+)\[(\d+)\]/.match(line)
line = "<a href='/docs/#{x[1]}/#{lang}'>#{x[1]}[#{x[2]}]</a>"
end
#HTML anchor on hdlist1 (i.e. command options)
html.gsub!(/<dt class="hdlist1">(.*?)<\/dt>/) do |m|
text = $1.tr("^A-Za-z0-9-", "")
anchor = "#{path}-#{text}"
"<dt class=\"hdlist1\" id=\"#{anchor}\"> <a class=\"anchor\" href=\"##{anchor}\"></a>#{$1} </dt>"
end
doc.plain = asciidoc.source
doc.html = html
doc.save
end
dv = DocVersion.where(version_id: stag.id, doc_file_id: file.id, language: lang).first_or_create
dv.doc_id = doc.id
dv.language = lang
dv.save
end
end
end
def index_doc(filter_tags, doc_list, get_content)
ActiveRecord::Base.logger.level = Logger::WARN
rebuild = ENV["REBUILD_DOC"]
rerun = ENV["RERUN"] || rebuild || false
filter_tags.call(rebuild).sort_by { |tag| Version.version_to_num(tag.first[1..-1]) }.each do |tag|
name, commit_sha, tree_sha, ts = tag
puts "#{name}: #{ts}, #{commit_sha[0, 8]}, #{tree_sha[0, 8]}"
stag = Version.where(name: name.gsub("v", "")).first
next if stag && !rerun
stag = Version.where(name: name.gsub("v", "")).first_or_create
stag.commit_sha = commit_sha
stag.tree_sha = tree_sha
stag.committed = ts
stag.save
tag_files = doc_list.call(tree_sha)
doc_files = tag_files.select { |ent| ent.first =~
/^Documentation\/(
SubmittingPatches |
MyFirstContribution.txt |
(
git.* |
everyday |
howto-index |
user-manual |
diff.* |
fetch.* |
merge.* |
rev.* |
pretty.* |
pull.* |
technical\/.*
)\.txt)/x
}
puts "Found #{doc_files.size} entries"
doc_limit = ENV["ONLY_BUILD_DOC"]
# generate command-list content
generated = {}
cmd = tag_files.detect { |f| f.first =~ /command-list\.txt/ }
if cmd
cmd_list = get_content.call(cmd.second).match(/(### command list.*|# command name.*)/m)[0].split("\n").reject { |l| l =~ /^#/ }.inject({}) do |list, cmd|
name, kind, attr = cmd.split(/\s+/)
list[kind] ||= []
list[kind] << [name, attr]
list
end
generated = cmd_list.keys.inject({}) do |list, category|
links = cmd_list[category].map do |cmd, attr|
if cmd_file = tag_files.detect { |ent| ent.first == "Documentation/#{cmd}.txt" }
if match = get_content.call(cmd_file.second).match(/NAME\n----\n\S+ - (.*)$/)
"linkgit:#{cmd}[1]::\n\t#{attr == 'deprecated' ? '(deprecated) ' : ''}#{match[1]}\n"
end
end
end
list.merge!("Documentation/cmds-#{category}.txt" => links.compact.join("\n"))
end
tools = tag_files.select { |ent| ent.first =~/^mergetools\// }.map do |entry|
path, sha = entry
tool = File.basename path
content = get_content.call sha
merge = (content.include? "can_merge") ? "" : " * #{tool}\n"
diff = (content.include? "can_diff") ? "" : " * #{tool}\n"
[merge, diff]
end
can_merge, can_diff = tools.transpose.map { |strs| strs.join "" }
generated["Documentation/mergetools-diff.txt"] = can_diff
generated["Documentation/mergetools-merge.txt"] = can_merge
get_content_f = Proc.new do |name|
content_file = tag_files.detect { |ent| ent.first == name }
if content_file
new_content = get_content.call (content_file.second)
end
new_content
end
def expand_content(content, path, get_f_content , generated)
content.gsub(/include::(\S+)\.txt\[\]/) do |line|
if File.dirname(path)=="."
new_fname = "#{$1}.txt"
else
new_fname = (Pathname.new(path).dirname + Pathname.new("#{$1}.txt")).cleanpath.to_s
end
if generated[new_fname]
new_content = generated[new_fname]
else
new_content = get_f_content.call(new_fname)
if new_content
expand_content(new_content.force_encoding("UTF-8"), new_fname, get_f_content, generated)
else
puts "#{new_fname} could not be resolved for expansion"
end
end
end
end
doc_files.each do |entry|
path, sha = entry
docname = File.basename(path, ".txt")
next if doc_limit && path !~ /#{doc_limit}/
file = DocFile.where(name: docname).first_or_create
puts " build: #{docname}"
content = expand_content((get_content.call sha).force_encoding("UTF-8"), path, get_content_f, generated)
content.gsub!(/link:(?:technical\/)?(\S*?)\.html(\#\S*?)?\[(.*?)\]/m, "link:/docs/\\1\\2[\\3]")
asciidoc = Asciidoctor::Document.new(content, attributes: {"sectanchors" => ""}, doctype: "book")
asciidoc_sha = Digest::SHA1.hexdigest(asciidoc.source)
doc = Doc.where(blob_sha: asciidoc_sha).first_or_create
if rerun || !doc.plain || !doc.html
html = asciidoc.render
html.gsub!(/linkgit:(\S+)\[(\d+)\]/) do |line|
x = /^linkgit:(\S+)\[(\d+)\]/.match(line)
line = "<a href='/docs/#{x[1]}'>#{x[1]}[#{x[2]}]</a>"
end
#HTML anchor on hdlist1 (i.e. command options)
html.gsub!(/<dt class="hdlist1">(.*?)<\/dt>/) do |m|
text = $1.tr("^A-Za-z0-9-", "")
anchor = "#{path}-#{text}"
"<dt class=\"hdlist1\" id=\"#{anchor}\"> <a class=\"anchor\" href=\"##{anchor}\"></a>#{$1} </dt>"
end
doc.plain = asciidoc.source
doc.html = html
doc.save
end
dv = DocVersion.where(version_id: stag.id, doc_file_id: file.id, language: "en").first_or_create
dv.doc_id = doc.id
dv.language = "en"
dv.save
end
end
Rails.cache.write("latest-version", Version.latest_version.name)
end
end
def github_index_doc(index_fun, repo)
Octokit.auto_paginate = true
if ENV["GITHUB_API_TOKEN"]
@octokit = Octokit::Client.new(access_token: ENV["GITHUB_API_TOKEN"])
else
@octokit = Octokit::Client.new(login: ENV["API_USER"], password: ENV["API_PASS"])
end
repo = ENV["GIT_REPO"] || repo
blob_content = Hash.new do |blobs, sha|
content = Base64.decode64(@octokit.blob(repo, sha, encoding: "base64").content)
blobs[sha] = content.force_encoding("UTF-8")
end
tag_filter = -> (tagname, gettags = true) do
# find all tags
if gettags
tags = @octokit.tags(repo).select { |tag| !tag.nil? && tag.name =~ /v\d([\.\d])+$/ } # just get release tags
if tagname
tags = tags.select { |t| t.name == tagname }
end
else
tags=[Struct.new(:name).new("heads/master")]
end
tags.collect do |tag|
# extract metadata
commit_info = @octokit.commit(repo, tag.name)
commit_sha = commit_info.sha
tree_sha = commit_info.commit.tree.sha
# ts = Time.parse( commit_info.commit.committer.date )
ts = commit_info.commit.committer.date
[tag.name, commit_sha, tree_sha, ts]
end
end
get_content = -> (sha) do blob_content[sha] end
get_file_list = -> (tree_sha) do
tree_info = @octokit.tree(repo, tree_sha, recursive: true)
tree_info.tree.collect { |ent| [ent.path, ent.sha] }
end
send(index_fun, tag_filter, get_file_list, get_content)
end
def local_index_doc(index_fun)
dir = ENV["GIT_REPO"]
Dir.chdir(dir) do
tag_filter = -> (tagname, gettags = true) do
if gettags
# find all tags
tags = `git tag | egrep 'v1|v2'`.strip.split("\n")
tags = tags.select { |tag| tag =~ /v\d([\.\d])+$/ } # just get release tags
if tagname
tags = tags.select { |t| t == tagname }
end
else
tags=["master"]
end
tags.collect do |tag|
# extract metadata
commit_sha = `git rev-parse #{tag}`.chomp
tree_sha = `git rev-parse #{tag}^{tree}`.chomp
tagger = `git cat-file commit #{tag} | grep committer`.chomp.split(" ")
tz = tagger.pop
ts = tagger.pop
ts = Time.at(ts.to_i)
[tag, commit_sha, tree_sha, ts]
end
end
get_content = -> (sha) do `git cat-file blob #{sha}` end
get_file_list = -> (tree_sha) do
entries = `git ls-tree -r #{tree_sha}`.strip.split("\n")
tree = entries. map do |e|
mode, type, sha, path = e.split(" ")
[path, sha]
end
end
send(index_fun, tag_filter, get_file_list, get_content)
end
end
task local_index: :environment do
local_index_doc(:index_doc)
end
task local_index_l10n: :environment do
local_index_doc(:index_l10n_doc)
end
task preindex: :environment do
github_index_doc(:index_doc, "gitster/git")
end
task preindex_l10n: :environment do
github_index_doc(:index_l10n_doc, "jnavila/git-html-l10n")
end
|
require "uri"
require "net/http"
def check_links(links_to_check, broken, file)
links_to_check.uniq.each { |link|
begin
uri = URI.parse(link)
http = Net::HTTP.new(uri.host, uri.port)
if link.include?("https")
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
response = http.get(uri.request_uri)
puts "Checking link: #{link}"
unless response.class == Net::HTTPOK
new_hash = { :link => link, :resp => response.code, :file => file }
if response.code[0] == "3"
new_hash[:redirect] = response.header['location']
end
broken.push(new_hash)
end
rescue Exception => e
# this is here as sometimes we find wrong links through the Regexes
# dont need to do anything, just capture it to avoid the script breaking
end
}
broken
end
def prefix_link(link)
unless link.include?("http")
link = "https://www.gov.uk#{link}"
end
link
end
def check_locales_file(contents)
links_to_check = []
contents.gsub(/\[(.+)\]\((.+) "(.+)"\)/) { |match|
link = prefix_link($2)
links_to_check << link
}
links_to_check
end
def check_data_file(contents)
links_to_check = []
contents.gsub(/: (\/.+)$/) {
link = prefix_link($1)
links_to_check << link
}
links_to_check
end
namespace :links do
desc 'Checks all URLs within Smart Answers for errors.'
task :check, :file do |t, args|
broken = []
pwd = Dir.pwd
# check a single file the user has passed in
if args.file
file = args.file
path = File.expand_path("#{pwd}#{file}")
puts "Checking #{file}"
links_to_check = check_locales_file(IO.read(file))
broken = check_links(links_to_check, broken, file)
else
base_path = File.expand_path("#{pwd}/lib")
Dir.glob("#{base_path}/flows/locales/**/*.yml") { |file|
puts "Checking #{file}"
links_to_check = check_locales_file(IO.read(file))
broken = check_links(links_to_check, broken, file)
}
Dir.glob("#{base_path}/data/*.yml") { |file|
puts "Checking #{file}"
links_to_check = check_data_file(IO.read(file))
broken = check_links(links_to_check, broken, file)
}
end
File.open("log/broken_links.log", "w") { |file|
file.puts broken
}
fives = broken.select { |item| item[:resp][0] == "5" }
four_oh_fours = broken.select { |item| item[:resp][0] == "4" }
three_oh_threes = broken.select { |item| item[:resp][0] == "3" }
File.open("log/300_links.log", "w") { |file|
file.puts three_oh_threes
}
File.open("log/404_links.log", "w") { |file|
file.puts four_oh_fours
}
File.open("log/500_links.log", "w") { |file|
file.puts fives
}
if three_oh_threes.length > 0
puts "Warning: Found links that give a 3XX response. Look in log/300_links.log"
else
puts "No 3XX links found"
end
if four_oh_fours.length > 0
puts "Warning: Found 404s. Look in log/404_links.log"
else
puts "No 404s found"
end
if fives.length > 0
puts "Warning: Found links that give a 5XX response. Look in log/500_links.log"
else
puts "No 5XX links found"
end
end
end
Extending the links rake task to handle optional titles
require "uri"
require "net/http"
def check_links(links_to_check, broken, file)
links_to_check.uniq.each { |link|
begin
uri = URI.parse(link)
http = Net::HTTP.new(uri.host, uri.port)
if link.include?("https")
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
response = http.get(uri.request_uri)
puts "Checking link: #{link}"
unless response.class == Net::HTTPOK
new_hash = { :link => link, :resp => response.code, :file => file }
if response.code[0] == "3"
new_hash[:redirect] = response.header['location']
end
broken.push(new_hash)
end
rescue Exception => e
# this is here as sometimes we find wrong links through the Regexes
# dont need to do anything, just capture it to avoid the script breaking
end
}
broken
end
def prefix_link(link)
unless link.include?("http")
link = "https://www.gov.uk#{link}"
end
link
end
def check_locales_file(contents)
links_to_check = []
contents.gsub(/\[(.+)\]\((.+)\)/) { |match|
link = prefix_link($2.gsub(/ "(.+)"$/,''))
links_to_check << link
}
links_to_check
end
def check_data_file(contents)
links_to_check = []
contents.gsub(/: (\/.+)$/) {
link = prefix_link($1)
links_to_check << link
}
links_to_check
end
namespace :links do
desc 'Checks all URLs within Smart Answers for errors.'
task :check, :file do |t, args|
broken = []
pwd = Dir.pwd
# check a single file the user has passed in
if args.file
file = args.file
path = File.expand_path("#{pwd}#{file}")
puts "Checking #{file}"
links_to_check = check_locales_file(IO.read(file))
broken = check_links(links_to_check, broken, file)
else
base_path = File.expand_path("#{pwd}/lib")
Dir.glob("#{base_path}/flows/locales/**/*.yml") { |file|
puts "Checking #{file}"
links_to_check = check_locales_file(IO.read(file))
broken = check_links(links_to_check, broken, file)
}
Dir.glob("#{base_path}/data/*.yml") { |file|
puts "Checking #{file}"
links_to_check = check_data_file(IO.read(file))
broken = check_links(links_to_check, broken, file)
}
end
File.open("log/broken_links.log", "w") { |file|
file.puts broken
}
fives = broken.select { |item| item[:resp][0] == "5" }
four_oh_fours = broken.select { |item| item[:resp][0] == "4" }
three_oh_threes = broken.select { |item| item[:resp][0] == "3" }
File.open("log/300_links.log", "w") { |file|
file.puts three_oh_threes
}
File.open("log/404_links.log", "w") { |file|
file.puts four_oh_fours
}
File.open("log/500_links.log", "w") { |file|
file.puts fives
}
if three_oh_threes.length > 0
puts "Warning: Found links that give a 3XX response. Look in log/300_links.log"
else
puts "No 3XX links found"
end
if four_oh_fours.length > 0
puts "Warning: Found 404s. Look in log/404_links.log"
else
puts "No 404s found"
end
if fives.length > 0
puts "Warning: Found links that give a 5XX response. Look in log/500_links.log"
else
puts "No 5XX links found"
end
end
end
|
namespace :stops do
desc "Fetch the tram stops from the HSL data"
task fetch: :environment do
query = %(
{
routes(modes: "TRAM") {
id
agency {
id
}
shortName
longName
desc
stops {
id
gtfsId
name
code
lat
lon
}
}
}
)
resp = HTTP.post("https://api.digitransit.fi/routing/v1/routers/finland/index/graphql", body: query)
data = JSON.load(resp.body.to_s)
stop_data = data.dig("data", "routes").map {|r| r["stops"] }.flatten.group_by {|s| s["name"]}
Stop.transaction do
stop_data.each do |name, data|
print "Creating #{name}"
points = data.map {|s| [s["lat"], s["lon"]] }
centre = Geocoder::Calculations.geographic_center points
print "."
stop = Stop.find_or_initialize_by(name: name)
print "."
stop.latitude, stop.longitude = centre
print "."
stop.hsl_ids = data.map {|s| Base64.decode64(s["id"]) }
print "."
stop.stop_numbers = data.map {|s| s["code"] }
stop.save!
puts " done"
end
end
puts "Known stops: #{Stop.count}"
end
end
Ensure that the stops are unique
namespace :stops do
desc "Fetch the tram stops from the HSL data"
task fetch: :environment do
query = %(
{
routes(modes: "TRAM") {
id
agency {
id
}
shortName
longName
desc
stops {
id
gtfsId
name
code
lat
lon
}
}
}
)
resp = HTTP.post("https://api.digitransit.fi/routing/v1/routers/finland/index/graphql", body: query)
data = JSON.load(resp.body.to_s)
stop_data = data.dig("data", "routes").
map { |r| r["stops"] }.flatten.
uniq { |s| s["id"] }.
group_by { |s| s["name"] }
Stop.transaction do
stop_data.each do |name, data|
print "Creating #{name}"
points = data.map {|s| [s["lat"], s["lon"]] }
centre = Geocoder::Calculations.geographic_center points
print "."
stop = Stop.find_or_initialize_by(name: name)
print "."
stop.latitude, stop.longitude = centre
print "."
stop.hsl_ids = data.map {|s| Base64.decode64(s["id"]) }
print "."
stop.stop_numbers = data.map {|s| s["code"] }
stop.save!
puts " done"
end
end
puts "Known stops: #{Stop.count}"
end
end
|
add rubocop to rake
namespace :style do
require 'rubocop/rake_task'
desc 'Run RuboCop for style checking'
RuboCop::RakeTask.new(:rubocop) do |task|
task.options = ['--rails', '--display-cop-names']
end
end
task :style do
Rake::Task['style:rubocop'].invoke
end
task default: :style
|
require "ten_hs_server/version"
module TenHsServer
autoload :Adapter, "ten_hs_server/adapter"
autoload :Device, "ten_hs_server/device"
autoload :Event, "ten_hs_server/event"
autoload :Room, "ten_hs_server/room"
end
Add author comment
# This is a ruby wrapper around the tenHsServer Homeseer API
#
# Author: Espen Høgbakk
# Email: espen@hogbakk.no
require "ten_hs_server/version"
module TenHsServer
autoload :Adapter, "ten_hs_server/adapter"
autoload :Device, "ten_hs_server/device"
autoload :Event, "ten_hs_server/event"
autoload :Room, "ten_hs_server/room"
end |
# encoding: UTF-8
module Tetra
# encapsulates a Tetra project directory
class Project
include Logging
attr_accessor :full_path
attr_accessor :git
def initialize(path)
@full_path = Tetra::Project.find_project_dir(File.expand_path(path))
@git = Tetra::Git.new(@full_path)
end
def name
File.basename(@full_path)
end
def version
latest_tag_count(:dry_run_finished)
end
# finds the project directory up in the tree, like git does
def self.find_project_dir(starting_dir)
result = starting_dir
while project?(result) == false && result != "/"
result = File.expand_path("..", result)
end
fail NoProjectDirectoryError, starting_dir if result == "/"
result
end
# returns true if the specified directory is a valid tetra project
def self.project?(dir)
File.directory?(File.join(dir, "src")) &&
File.directory?(File.join(dir, "kit")) &&
File.directory?(File.join(dir, ".git"))
end
# returns the package name corresponding to the specified dir, if any
# raises NoPackageDirectoryError if dir is not a (sub)directory of a package
def get_package_name(dir)
dir_path = Pathname.new(File.expand_path(dir)).relative_path_from(Pathname.new(@full_path))
components = dir_path.to_s.split(File::SEPARATOR)
if components.count >= 2 &&
components.first == "src" &&
Dir.exist?(File.join(@full_path, components[0], components[1]))
components[1]
else
fail NoPackageDirectoryError
end
rescue ArgumentError, NoProjectDirectoryError
raise NoPackageDirectoryError, dir
end
# inits a new project directory structure
def self.init(dir)
Dir.chdir(dir) do
Tetra::Git.new(".").init
FileUtils.mkdir_p("src")
FileUtils.mkdir_p("kit")
# populate the project with templates and take a snapshot
project = Project.new(".")
template_path = File.join(File.dirname(__FILE__), "..", "template")
templates = {
"output" => ".",
"kit" => ".",
"src" => ".",
"gitignore" => ".gitignore"
}
templates.each do |source, destination|
FileUtils.cp_r(File.join(template_path, source), destination)
end
project.take_snapshot("Template files added", :init)
end
end
# starts a dry running phase: files added to kit/ will be added
# to the kit package, src/ will be reset at the current state
# when finished
def dry_run
return false if dry_running?
current_directory = Pathname.new(Dir.pwd).relative_path_from(Pathname.new(@full_path))
take_snapshot("Dry-run started", :dry_run_started, current_directory)
true
end
# returns true iff we are currently dry-running
def dry_running?
latest_tag_count(:dry_run_started) > latest_tag_count(:dry_run_finished)
end
# ends a dry-run.
# if abort is true, reverts the whole directory
# if abort is false, reverts sources and updates output file lists
def finish(abort)
if dry_running?
if abort
@git.revert_whole_directory(".", latest_tag(:dry_run_started))
@git.delete_tag(latest_tag(:dry_run_started))
else
take_snapshot("Changes during dry-run", :dry_run_changed)
@git.revert_whole_directory("src", latest_tag(:dry_run_started))
take_snapshot("Dry run finished", :dry_run_finished)
end
return true
end
false
end
# takes a revertable snapshot of this project
def take_snapshot(message, tag_prefix = nil, tag_message = nil)
tag = (
if tag_prefix
"#{tag_prefix}_#{latest_tag_count(tag_prefix) + 1}"
else
nil
end
)
@git.commit_whole_directory(message, tag, tag_message)
end
# replaces content in path with new_content, takes a snapshot using
# snapshot_message and tag_prefix and 3-way merges new and old content
# with a previous snapshotted file same path tag_prefix, if it exists.
# returns the number of conflicts
def merge_new_content(new_content, path, snapshot_message, tag_prefix)
from_directory do
log.debug "merging new content to #{path} with prefix #{tag_prefix}"
already_existing = File.exist? path
previous_tag = latest_tag(tag_prefix)
if already_existing
log.debug "moving #{path} to #{path}.tetra_user_edited"
File.rename path, "#{path}.tetra_user_edited"
end
File.open(path, "w") { |io| io.write(new_content) }
log.debug "taking snapshot with new content: #{snapshot_message}"
take_snapshot(snapshot_message, tag_prefix)
if already_existing
if previous_tag == ""
previous_tag = latest_tag(tag_prefix)
log.debug "there was no tag with prefix #{tag_prefix} before snapshot"
log.debug "defaulting to #{previous_tag} after snapshot"
end
# 3-way merge
conflict_count = @git.merge_with_tag("#{path}", "#{path}.tetra_user_edited", previous_tag)
File.delete "#{path}.tetra_user_edited"
return conflict_count
end
return 0
end
end
# returns the tag with maximum count for a given tag prefix
def latest_tag(prefix)
"#{prefix}_#{latest_tag_count(prefix)}"
end
# returns the maximum tag count for a given tag prefix
def latest_tag_count(prefix)
@git.get_tag_maximum_suffix(prefix)
end
# runs a block from the project directory or a subdirectory
def from_directory(subdirectory = "")
Dir.chdir(File.join(@full_path, subdirectory)) do
yield
end
end
# returns the latest dry run start directory
def latest_dry_run_directory
@git.get_message(latest_tag(:dry_run_started))
end
# returns a list of files produced during dry-runs in a certain package
def get_produced_files(package)
dry_run_count = latest_tag_count(:dry_run_changed)
log.debug "Getting produced files from #{dry_run_count} dry runs"
if dry_run_count >= 1
package_dir = File.join("src", package)
(1..dry_run_count).map do |i|
@git.changed_files_between("dry_run_started_#{i}", "dry_run_changed_#{i}", package_dir)
end
.flatten
.uniq
.sort
.map { |file| Pathname.new(file).relative_path_from(Pathname.new(package_dir)).to_s }
else
[]
end
end
# moves any .jar from src/ to kit/ and links it back
def purge_jars
from_directory do
result = []
Find.find("src") do |file|
next unless file =~ /.jar$/ && !File.symlink?(file)
new_location = File.join("kit", "jars", Pathname.new(file).split[1])
FileUtils.mv(file, new_location)
link_target = Pathname.new(new_location)
.relative_path_from(Pathname.new(file).split.first)
.to_s
File.symlink(link_target, file)
result << [file, new_location]
end
result
end
end
end
# current directory is not a tetra project
class NoProjectDirectoryError < StandardError
attr_reader :directory
def initialize(directory)
@directory = directory
end
end
# current directory is not a tetra package directory
class NoPackageDirectoryError < StandardError
attr_reader :directory
def initialize(directory)
@directory = directory
end
end
end
Refactoring: extract next_tag
# encoding: UTF-8
module Tetra
# encapsulates a Tetra project directory
class Project
include Logging
attr_accessor :full_path
attr_accessor :git
def initialize(path)
@full_path = Tetra::Project.find_project_dir(File.expand_path(path))
@git = Tetra::Git.new(@full_path)
end
def name
File.basename(@full_path)
end
def version
latest_tag_count(:dry_run_finished)
end
# finds the project directory up in the tree, like git does
def self.find_project_dir(starting_dir)
result = starting_dir
while project?(result) == false && result != "/"
result = File.expand_path("..", result)
end
fail NoProjectDirectoryError, starting_dir if result == "/"
result
end
# returns true if the specified directory is a valid tetra project
def self.project?(dir)
File.directory?(File.join(dir, "src")) &&
File.directory?(File.join(dir, "kit")) &&
File.directory?(File.join(dir, ".git"))
end
# returns the package name corresponding to the specified dir, if any
# raises NoPackageDirectoryError if dir is not a (sub)directory of a package
def get_package_name(dir)
dir_path = Pathname.new(File.expand_path(dir)).relative_path_from(Pathname.new(@full_path))
components = dir_path.to_s.split(File::SEPARATOR)
if components.count >= 2 &&
components.first == "src" &&
Dir.exist?(File.join(@full_path, components[0], components[1]))
components[1]
else
fail NoPackageDirectoryError
end
rescue ArgumentError, NoProjectDirectoryError
raise NoPackageDirectoryError, dir
end
# inits a new project directory structure
def self.init(dir)
Dir.chdir(dir) do
Tetra::Git.new(".").init
FileUtils.mkdir_p("src")
FileUtils.mkdir_p("kit")
# populate the project with templates and take a snapshot
project = Project.new(".")
template_path = File.join(File.dirname(__FILE__), "..", "template")
templates = {
"output" => ".",
"kit" => ".",
"src" => ".",
"gitignore" => ".gitignore"
}
templates.each do |source, destination|
FileUtils.cp_r(File.join(template_path, source), destination)
end
project.take_snapshot("Template files added", :init)
end
end
# starts a dry running phase: files added to kit/ will be added
# to the kit package, src/ will be reset at the current state
# when finished
def dry_run
return false if dry_running?
current_directory = Pathname.new(Dir.pwd).relative_path_from(Pathname.new(@full_path))
take_snapshot("Dry-run started", :dry_run_started, current_directory)
true
end
# returns true iff we are currently dry-running
def dry_running?
latest_tag_count(:dry_run_started) > latest_tag_count(:dry_run_finished)
end
# ends a dry-run.
# if abort is true, reverts the whole directory
# if abort is false, reverts sources and updates output file lists
def finish(abort)
if dry_running?
if abort
@git.revert_whole_directory(".", latest_tag(:dry_run_started))
@git.delete_tag(latest_tag(:dry_run_started))
else
take_snapshot("Changes during dry-run", :dry_run_changed)
@git.revert_whole_directory("src", latest_tag(:dry_run_started))
take_snapshot("Dry run finished", :dry_run_finished)
end
return true
end
false
end
# takes a revertable snapshot of this project
def take_snapshot(message, tag_prefix, tag_message = nil)
@git.commit_whole_directory(message, next_tag(tag_prefix), tag_message)
end
# replaces content in path with new_content, takes a snapshot using
# snapshot_message and tag_prefix and 3-way merges new and old content
# with a previous snapshotted file same path tag_prefix, if it exists.
# returns the number of conflicts
def merge_new_content(new_content, path, snapshot_message, tag_prefix)
from_directory do
log.debug "merging new content to #{path} with prefix #{tag_prefix}"
already_existing = File.exist? path
previous_tag = latest_tag(tag_prefix)
if already_existing
log.debug "moving #{path} to #{path}.tetra_user_edited"
File.rename path, "#{path}.tetra_user_edited"
end
File.open(path, "w") { |io| io.write(new_content) }
log.debug "taking snapshot with new content: #{snapshot_message}"
take_snapshot(snapshot_message, tag_prefix)
if already_existing
if previous_tag == ""
previous_tag = latest_tag(tag_prefix)
log.debug "there was no tag with prefix #{tag_prefix} before snapshot"
log.debug "defaulting to #{previous_tag} after snapshot"
end
# 3-way merge
conflict_count = @git.merge_with_tag("#{path}", "#{path}.tetra_user_edited", previous_tag)
File.delete "#{path}.tetra_user_edited"
return conflict_count
end
return 0
end
end
# returns the tag with maximum count for a given tag prefix
def latest_tag(prefix)
"#{prefix}_#{latest_tag_count(prefix)}"
end
# returns the maximum tag count for a given tag prefix
def latest_tag_count(prefix)
@git.get_tag_maximum_suffix(prefix)
end
# returns the next tag for a given tag prefix
def next_tag(prefix)
"#{prefix}_#{latest_tag_count(prefix) + 1}"
end
# runs a block from the project directory or a subdirectory
def from_directory(subdirectory = "")
Dir.chdir(File.join(@full_path, subdirectory)) do
yield
end
end
# returns the latest dry run start directory
def latest_dry_run_directory
@git.get_message(latest_tag(:dry_run_started))
end
# returns a list of files produced during dry-runs in a certain package
def get_produced_files(package)
dry_run_count = latest_tag_count(:dry_run_changed)
log.debug "Getting produced files from #{dry_run_count} dry runs"
if dry_run_count >= 1
package_dir = File.join("src", package)
(1..dry_run_count).map do |i|
@git.changed_files_between("dry_run_started_#{i}", "dry_run_changed_#{i}", package_dir)
end
.flatten
.uniq
.sort
.map { |file| Pathname.new(file).relative_path_from(Pathname.new(package_dir)).to_s }
else
[]
end
end
# moves any .jar from src/ to kit/ and links it back
def purge_jars
from_directory do
result = []
Find.find("src") do |file|
next unless file =~ /.jar$/ && !File.symlink?(file)
new_location = File.join("kit", "jars", Pathname.new(file).split[1])
FileUtils.mv(file, new_location)
link_target = Pathname.new(new_location)
.relative_path_from(Pathname.new(file).split.first)
.to_s
File.symlink(link_target, file)
result << [file, new_location]
end
result
end
end
end
# current directory is not a tetra project
class NoProjectDirectoryError < StandardError
attr_reader :directory
def initialize(directory)
@directory = directory
end
end
# current directory is not a tetra package directory
class NoPackageDirectoryError < StandardError
attr_reader :directory
def initialize(directory)
@directory = directory
end
end
end
|
require 'trello'
require 'kramdown'
class TrelloHelper
# Trello Config
attr_accessor :consumer_key, :consumer_secret, :oauth_token, :oauth_token_secret, :teams,
:documentation_id, :organization_id, :roadmap_board, :roadmap_id,
:public_roadmap_id, :public_roadmap_board, :documentation_board,
:documentation_next_list, :docs_planning_id, :organization_name,
:sprint_length_in_weeks, :sprint_start_day, :sprint_end_day, :logo,
:docs_new_list_name, :roadmap_board_lists, :max_lists_per_board,
:current_release_labels, :default_product, :other_products,
:sprint_card
attr_accessor :boards, :trello_login_to_email, :cards_by_list, :labels_by_card, :list_by_card, :members_by_card, :checklists_by_card, :lists_by_board
DEFAULT_RETRIES = 3
DEFAULT_RETRY_SLEEP = 10
FUTURE_TAG = '[future]'
FUTURE_LABEL = 'future'
SPRINT_REGEX = /^Sprint (\d+)/
DONE_REGEX = /^Done: ((\d+)\.(\d+)(.(\d+))?(.(\d+))?)/
SPRINT_REGEXES = Regexp.union([SPRINT_REGEX, DONE_REGEX])
ACCEPTED_STATES = {
'Accepted' => true,
'Done' => true
}
COMPLETE_STATES = {
'Complete' => true
}
IN_PROGRESS_STATES = {
'In Progress' => true,
'Design' => true
}
NEXT_STATES = {
'Stalled' => true,
'Next' => true
}
BACKLOG_STATES = {
'Backlog' => true
}
NEW_STATES = {
'New' => true
}
CURRENT_SPRINT_NOT_ACCEPTED_STATES = IN_PROGRESS_STATES.merge(COMPLETE_STATES)
CURRENT_SPRINT_NOT_IN_PROGRESS_STATES = COMPLETE_STATES.merge(ACCEPTED_STATES)
CURRENT_SPRINT_STATES = IN_PROGRESS_STATES.merge(CURRENT_SPRINT_NOT_IN_PROGRESS_STATES)
RELEASE_STATE_ORDER = {
'committed' => 0,
'targeted' => 1,
'proposed' => 2
}
RELEASE_STATES = ['committed', 'targeted', 'proposed']
RELEASE_STATE_DISPLAY_NAME = {
'committed' => 'Complete or Committed',
'targeted' => 'Targeted',
'proposed' => 'Proposed'
}
LIST_POSITION_ADJUSTMENT = {
'Done' => 10,
'Accepted' => 50,
'Complete' => 100,
'In Progress' => 200,
'Design' => 250,
'Next' => 300,
'Stalled' => 350,
'Backlog' => 400,
'New' => 800
}
MAX_LIST_POSITION_ADJUSTMENT = 1000
UNASSIGNED_RELEASE = "Unassigned Release"
FUTURE_RELEASE = "Future Release"
def initialize(opts)
opts.each do |k,v|
send("#{k}=",v)
end
Trello.configure do |config|
config.consumer_key = @consumer_key
config.consumer_secret = @consumer_secret
config.oauth_token = @oauth_token
config.oauth_token_secret = @oauth_token_secret
end
@cards_by_list = {}
@labels_by_card = {}
@list_by_card = {}
@members_by_card = {}
@checklists_by_card = {}
@lists_by_board = {}
end
def board_ids(for_sprint_report=false)
board_ids = []
teams.each do |team, team_map|
team_boards_map = team_boards_map(team_map)
unless for_sprint_report && team_map[:exclude_from_sprint_report]
team_boards_map.each do |b_name, b_id|
board_ids << b_id
end
end
end
return board_ids
end
def card_ref(card)
board_name = nil
teams.each do |team_name, team_map|
team_boards_map = team_boards_map(team_map)
team_boards_map.each do |b_name, b_id|
if b_id == card.board_id
board_name = b_name
break
end
end
end
return "#{board_name}_#{card.short_id}"
end
def team_boards(team_name)
team_map = teams[team_name.to_sym]
team_boards = []
team_boards_map = team_boards_map(team_map)
team_boards_map.each do |board_name, board_id|
team_boards << boards[board_id]
end
team_boards
end
def team_boards_map(team_map)
team_boards_map = nil
if team_map.has_key?(:boards)
team_boards_map = team_map[:boards]
else
team_boards_map = team_map
end
return team_boards_map
end
def team_board(board_name)
board_name = board_name.to_sym
teams.each do |team_name, team_map|
team_boards_map = team_boards_map(team_map)
team_boards_map.each do |b_name, b_id|
return boards[b_id] if b_name == board_name
end
end
end
def team_name(card)
teams.each do |team_name, team_map|
team_boards_map = team_boards_map(team_map)
team_boards_map.each do |b_name, b_id|
if b_id == card.board_id
return team_name.to_s
end
end
end
end
def boards
return @boards if @boards
@boards = {}
org_boards.each do |board|
if board_ids.include?(board.id)
@boards[board.id] = board
end
end
@boards
end
def boards_for_sprint_report
boards = {}
org_boards.each do |board|
if board_ids(true).include?(board.id)
boards[board.id] = board
end
end
boards
end
def sprint_card
return @sprint_card if @sprint_card
board = board(board_ids.last)
board_lists(board).each do |list|
if IN_PROGRESS_STATES.include?(list.name)
@sprint_card = list_cards(list).sort_by { |card| card.pos }.first
return @sprint_card
end
end
nil
end
def documentation_board
@documentation_board = find_board(documentation_id) unless @documentation_board
@documentation_board
end
def docs_planning_board
unless @docs_planning_board
if docs_planning_id
@docs_planning_board = find_board(docs_planning_id)
else
@docs_planning_board = documentation_board
end
end
@docs_planning_board
end
def roadmap_board
if roadmap_id
@roadmap_board = find_board(roadmap_id) unless @roadmap_board
end
@roadmap_board
end
def public_roadmap_board
if public_roadmap_id
@public_roadmap_board = find_board(public_roadmap_id) unless @public_roadmap_board
end
@public_roadmap_board
end
def find_board(board_id)
trello_do('find_board') do
return Trello::Board.find(board_id)
end
end
def find_card_by_short_id(board, card_id)
trello_do('find_card_by_short_id') do
return board.find_card(card_id)
end
end
def find_card(card_id)
trello_do('find_card') do
return Trello::Card.find(card_id)
end
end
def roadmap_boards
rbs = []
rbs << public_roadmap_board if public_roadmap_board
rbs << roadmap_board if roadmap_board
rbs
end
def roadmap_label_colors_by_name
roadmap_labels = board_labels(roadmap_board)
roadmap_label_colors_by_name = {}
roadmap_labels.each do |label|
roadmap_label_colors_by_name[label.name] = label.color
end
roadmap_label_colors_by_name
end
def tag_to_epics
tag_to_epics = {}
roadmap_boards.each do |roadmap_board|
epic_lists = epic_lists(roadmap_board)
epic_lists.each do |epic_list|
list_cards(epic_list).each do |epic_card|
card_labels(epic_card).each do |label|
if label.name.start_with? 'epic-'
tag_to_epics[label.name] = [] unless tag_to_epics[label.name]
tag_to_epics[label.name] << epic_card
end
end
epic_card.name.scan(/\[[^\]]+\]/).each do |tag|
if tag != FUTURE_TAG && !tag_to_epics["epic-#{tag[1..-2]}"]
tag_to_epics[tag] = [] unless tag_to_epics[tag]
tag_to_epics[tag] << epic_card
end
end
end
end
end
tag_to_epics
end
def board_lists(board, list_limit=max_lists_per_board)
lists = nil
lists = @lists_by_board[board.id] if max_lists_per_board.nil? || (list_limit && list_limit <= max_lists_per_board)
unless lists
trello_do('lists') do
lists = board.lists(:filter => [:all])
lists = lists.delete_if{ |list| list.name !~ TrelloHelper::SPRINT_REGEXES && list.closed? }
lists.sort_by!{ |list| [list.name =~ TrelloHelper::SPRINT_REGEXES ? ($1.to_i) : 9999999, $3.to_i, $4.to_i, $6.to_i, $8.to_i]}
lists.reverse!
end
end
@lists_by_board[board.id] = lists if ((list_limit && max_lists_per_board && (list_limit >= max_lists_per_board)) || list_limit.nil?) && !@lists_by_board[board.id]
lists = lists.first(list_limit) if list_limit
return lists
end
def epic_lists(board)
lists = []
target_boards = roadmap_board_lists || ['Epic Backlog']
board_lists(board).each do |l|
if target_boards.include?(l.name)
lists.push(l)
end
end
lists
end
def documentation_next_list
unless @documentation_next_list
new_list_name = docs_new_list_name || 'Next Sprint'
board_lists(docs_planning_board).each do |l|
if l.name == new_list_name
@documentation_next_list = l
break
end
end
end
@documentation_next_list
end
def checklist(card, checklist_name)
checklists = list_checklists(card)
checklists.each do |checklist|
if checklist.name == checklist_name
return checklist
end
end
return nil
end
def clear_epic_refs(epic_card)
checklists = list_checklists(epic_card)
checklists.each do |cl|
cl.items.each do |item|
if item.name =~ /\[.*\]\(https?:\/\/trello\.com\/[^\)]+\) \([^\)]+\) \([^\)]+\)/
begin
trello_do('checklist', 2) do
cl.delete_checklist_item(item.id)
end
rescue => e
$stderr.puts "Error deleting checklist item: #{e.message}"
end
end
end
end
create_checklist(epic_card, UNASSIGNED_RELEASE)
create_checklist(epic_card, FUTURE_RELEASE)
end
def create_checklist(card, checklist_name)
cl = checklist(card, checklist_name)
unless cl
puts "Adding #{checklist_name} to #{card.name}"
cl = Trello::Checklist.create({:name => checklist_name, :board_id => card.board_id, :card_id => card.id})
#card.add_checklist(cl)
@checklists_by_card.delete(card.id)
end
cl
end
def rename_checklist(card, old_checklist_name, new_checklist_name)
cl = checklist(card, old_checklist_name)
if cl
puts "Renaming #{old_checklist_name} on #{new_checklist_name}"
cl.name = new_checklist_name
cl.save
end
cl
end
def delete_empty_epic_checklists(epic_card)
checklists = list_checklists(epic_card)
checklists.each do |cl|
next if [UNASSIGNED_RELEASE, FUTURE_RELEASE].include? cl.name
if cl.items.empty?
begin
trello_do('checklist') do
cl.delete
@checklists_by_card.delete(epic_card.id)
end
rescue => e
$stderr.puts "Error deleting checklist: #{e.message}"
end
end
end
end
def target(ref, name='target')
trello_do(name) do
t = ref.target
return t
end
end
def card_labels(card)
labels = @labels_by_card[card.id]
return labels if labels
trello_do('card_labels') do
labels = card.labels
end
@labels_by_card[card.id] = labels if labels
labels
end
def card_list(card)
list = @list_by_card[card.id]
return list if list
trello_do('card_list') do
list = card.list
end
@list_by_card[card.id] = list if list
list
end
def card_members(card)
members = @members_by_card[card.id]
return members if members
trello_do('card_members') do
members = card.members
end
@members_by_card[card.id] = members if members
members
end
def board_labels(board)
labels = nil
label_limit = 1000
trello_do('board_labels') do
labels = board.labels(:limit => label_limit)
end
raise "Reached label API limit of 1000 entries" if labels.length >= label_limit
labels
end
def create_label(name, color, board_id)
Trello::Label.create(:name => name, :color => color, :board_id => board_id)
end
def update_label(label)
trello_do('update_label') do
label.save
end
end
def delete_label(label)
trello_do('delete_label') do
label.delete
end
end
def list_checklists(card)
checklists = @checklists_by_card[card.id]
return checklists if checklists
trello_do('checklists') do
checklists = card.checklists
end
if checklists
checklists = target(checklists, 'checklists')
@checklists_by_card[card.id] = checklists
end
checklists
end
def list_cards(list)
cards = @cards_by_list[list.id]
return cards if cards
trello_do('cards') do
cards = list.cards
end
if cards
cards = target(cards, 'cards')
@cards_by_list[list.id] = cards
end
cards
end
def print_card(card, num=nil)
print " "
print "#{num}) " if num
puts "#{card.name} (##{card.short_id})"
members = card_members(card)
if !members.empty?
puts " Assignee(s): #{members.map{|member| member.full_name}.join(',')}"
end
end
def print_list(list)
cards = list_cards(list)
if !cards.empty?
puts "\n List: #{list.name} (#cards #{cards.length})"
puts " Cards:"
cards.each_with_index do |card, index|
print_card(card, index+1)
end
end
end
def card_by_ref(card_ref)
card = nil
if card_ref =~ /^(\w+)_(\d+)/i
board_name = $1
card_short_id = $2
board = team_board(board_name)
card = find_card_by_short_id(board, card_short_id)
end
card
end
def card_by_url(card_url)
card = nil
# https://trello.com/c/6EhPEbM4
if card_url =~ /^https?:\/\/trello\.com\/c\/([[:alnum:]]+)/
card_id = $1
begin
card = find_card(card_id)
rescue
end
end
card
end
def org
trello_do('org') do
@org ||= Trello::Organization.find(organization_id)
return @org
end
end
def org_boards
trello_do('org_boards') do
return target(org.boards)
end
end
def board(board_id)
boards[board_id]
end
def member(member_name)
Trello::Member.find(member_name)
end
def member_emails(members)
unless @trello_login_to_email
@trello_login_to_email = {}
trello_login_to_email_json = File.expand_path('~/trello_login_to_email.json')
if File.exist? trello_login_to_email_json
@trello_login_to_email = JSON.parse(File.read(trello_login_to_email_json))
end
end
member_emails = []
members.each do |member|
email = @trello_login_to_email[member.username]
if email
member_emails << email
end
end
member_emails
end
def markdown_to_html(text)
Kramdown::Document.new(text).to_html
end
def trello_do(type, retries=DEFAULT_RETRIES)
i = 0
while true
begin
yield
break
rescue Exception => e
$stderr.puts "Error with #{type}: #{e.message}"
raise if i >= retries
sleep DEFAULT_RETRY_SLEEP
i += 1
end
end
end
end
Add actions
require 'trello'
require 'kramdown'
class TrelloHelper
# Trello Config
attr_accessor :consumer_key, :consumer_secret, :oauth_token, :oauth_token_secret, :teams,
:documentation_id, :organization_id, :roadmap_board, :roadmap_id,
:public_roadmap_id, :public_roadmap_board, :documentation_board,
:documentation_next_list, :docs_planning_id, :organization_name,
:sprint_length_in_weeks, :sprint_start_day, :sprint_end_day, :logo,
:docs_new_list_name, :roadmap_board_lists, :max_lists_per_board,
:current_release_labels, :default_product, :other_products,
:sprint_card
attr_accessor :boards, :trello_login_to_email, :cards_by_list, :labels_by_card, :list_by_card, :members_by_card, :checklists_by_card, :lists_by_board, :comments_by_card
DEFAULT_RETRIES = 3
DEFAULT_RETRY_SLEEP = 10
FUTURE_TAG = '[future]'
FUTURE_LABEL = 'future'
SPRINT_REGEX = /^Sprint (\d+)/
DONE_REGEX = /^Done: ((\d+)\.(\d+)(.(\d+))?(.(\d+))?)/
SPRINT_REGEXES = Regexp.union([SPRINT_REGEX, DONE_REGEX])
ACCEPTED_STATES = {
'Accepted' => true,
'Done' => true
}
COMPLETE_STATES = {
'Complete' => true
}
IN_PROGRESS_STATES = {
'In Progress' => true,
'Design' => true
}
NEXT_STATES = {
'Stalled' => true,
'Next' => true
}
BACKLOG_STATES = {
'Backlog' => true
}
NEW_STATES = {
'New' => true
}
CURRENT_SPRINT_NOT_ACCEPTED_STATES = IN_PROGRESS_STATES.merge(COMPLETE_STATES)
CURRENT_SPRINT_NOT_IN_PROGRESS_STATES = COMPLETE_STATES.merge(ACCEPTED_STATES)
CURRENT_SPRINT_STATES = IN_PROGRESS_STATES.merge(CURRENT_SPRINT_NOT_IN_PROGRESS_STATES)
RELEASE_STATE_ORDER = {
'committed' => 0,
'targeted' => 1,
'proposed' => 2
}
RELEASE_STATES = ['committed', 'targeted', 'proposed']
RELEASE_STATE_DISPLAY_NAME = {
'committed' => 'Complete or Committed',
'targeted' => 'Targeted',
'proposed' => 'Proposed'
}
LIST_POSITION_ADJUSTMENT = {
'Done' => 10,
'Accepted' => 50,
'Complete' => 100,
'In Progress' => 200,
'Design' => 250,
'Next' => 300,
'Stalled' => 350,
'Backlog' => 400,
'New' => 800
}
MAX_LIST_POSITION_ADJUSTMENT = 1000
UNASSIGNED_RELEASE = "Unassigned Release"
FUTURE_RELEASE = "Future Release"
def initialize(opts)
opts.each do |k,v|
send("#{k}=",v)
end
Trello.configure do |config|
config.consumer_key = @consumer_key
config.consumer_secret = @consumer_secret
config.oauth_token = @oauth_token
config.oauth_token_secret = @oauth_token_secret
end
@cards_by_list = {}
@labels_by_card = {}
@list_by_card = {}
@members_by_card = {}
@checklists_by_card = {}
@lists_by_board = {}
end
def board_ids(for_sprint_report=false)
board_ids = []
teams.each do |team, team_map|
team_boards_map = team_boards_map(team_map)
unless for_sprint_report && team_map[:exclude_from_sprint_report]
team_boards_map.each do |b_name, b_id|
board_ids << b_id
end
end
end
return board_ids
end
def card_ref(card)
board_name = nil
teams.each do |team_name, team_map|
team_boards_map = team_boards_map(team_map)
team_boards_map.each do |b_name, b_id|
if b_id == card.board_id
board_name = b_name
break
end
end
end
return "#{board_name}_#{card.short_id}"
end
def team_boards(team_name)
team_map = teams[team_name.to_sym]
team_boards = []
team_boards_map = team_boards_map(team_map)
team_boards_map.each do |board_name, board_id|
team_boards << boards[board_id]
end
team_boards
end
def team_boards_map(team_map)
team_boards_map = nil
if team_map.has_key?(:boards)
team_boards_map = team_map[:boards]
else
team_boards_map = team_map
end
return team_boards_map
end
def team_board(board_name)
board_name = board_name.to_sym
teams.each do |team_name, team_map|
team_boards_map = team_boards_map(team_map)
team_boards_map.each do |b_name, b_id|
return boards[b_id] if b_name == board_name
end
end
end
def team_name(card)
teams.each do |team_name, team_map|
team_boards_map = team_boards_map(team_map)
team_boards_map.each do |b_name, b_id|
if b_id == card.board_id
return team_name.to_s
end
end
end
end
def boards
return @boards if @boards
@boards = {}
org_boards.each do |board|
if board_ids.include?(board.id)
@boards[board.id] = board
end
end
@boards
end
def boards_for_sprint_report
boards = {}
org_boards.each do |board|
if board_ids(true).include?(board.id)
boards[board.id] = board
end
end
boards
end
def sprint_card
return @sprint_card if @sprint_card
board = board(board_ids.last)
board_lists(board).each do |list|
if IN_PROGRESS_STATES.include?(list.name)
@sprint_card = list_cards(list).sort_by { |card| card.pos }.first
return @sprint_card
end
end
nil
end
def documentation_board
@documentation_board = find_board(documentation_id) unless @documentation_board
@documentation_board
end
def docs_planning_board
unless @docs_planning_board
if docs_planning_id
@docs_planning_board = find_board(docs_planning_id)
else
@docs_planning_board = documentation_board
end
end
@docs_planning_board
end
def roadmap_board
if roadmap_id
@roadmap_board = find_board(roadmap_id) unless @roadmap_board
end
@roadmap_board
end
def public_roadmap_board
if public_roadmap_id
@public_roadmap_board = find_board(public_roadmap_id) unless @public_roadmap_board
end
@public_roadmap_board
end
def find_board(board_id)
trello_do('find_board') do
return Trello::Board.find(board_id)
end
end
def find_card_by_short_id(board, card_id)
trello_do('find_card_by_short_id') do
return board.find_card(card_id)
end
end
def find_card(card_id)
trello_do('find_card') do
return Trello::Card.find(card_id)
end
end
def roadmap_boards
rbs = []
rbs << public_roadmap_board if public_roadmap_board
rbs << roadmap_board if roadmap_board
rbs
end
def roadmap_label_colors_by_name
roadmap_labels = board_labels(roadmap_board)
roadmap_label_colors_by_name = {}
roadmap_labels.each do |label|
roadmap_label_colors_by_name[label.name] = label.color
end
roadmap_label_colors_by_name
end
def tag_to_epics
tag_to_epics = {}
roadmap_boards.each do |roadmap_board|
epic_lists = epic_lists(roadmap_board)
epic_lists.each do |epic_list|
list_cards(epic_list).each do |epic_card|
card_labels(epic_card).each do |label|
if label.name.start_with? 'epic-'
tag_to_epics[label.name] = [] unless tag_to_epics[label.name]
tag_to_epics[label.name] << epic_card
end
end
epic_card.name.scan(/\[[^\]]+\]/).each do |tag|
if tag != FUTURE_TAG && !tag_to_epics["epic-#{tag[1..-2]}"]
tag_to_epics[tag] = [] unless tag_to_epics[tag]
tag_to_epics[tag] << epic_card
end
end
end
end
end
tag_to_epics
end
def board_lists(board, list_limit=max_lists_per_board)
lists = nil
lists = @lists_by_board[board.id] if max_lists_per_board.nil? || (list_limit && list_limit <= max_lists_per_board)
unless lists
trello_do('lists') do
lists = board.lists(:filter => [:all])
lists = lists.delete_if{ |list| list.name !~ TrelloHelper::SPRINT_REGEXES && list.closed? }
lists.sort_by!{ |list| [list.name =~ TrelloHelper::SPRINT_REGEXES ? ($1.to_i) : 9999999, $3.to_i, $4.to_i, $6.to_i, $8.to_i]}
lists.reverse!
end
end
@lists_by_board[board.id] = lists if ((list_limit && max_lists_per_board && (list_limit >= max_lists_per_board)) || list_limit.nil?) && !@lists_by_board[board.id]
lists = lists.first(list_limit) if list_limit
return lists
end
def epic_lists(board)
lists = []
target_boards = roadmap_board_lists || ['Epic Backlog']
board_lists(board).each do |l|
if target_boards.include?(l.name)
lists.push(l)
end
end
lists
end
def documentation_next_list
unless @documentation_next_list
new_list_name = docs_new_list_name || 'Next Sprint'
board_lists(docs_planning_board).each do |l|
if l.name == new_list_name
@documentation_next_list = l
break
end
end
end
@documentation_next_list
end
def checklist(card, checklist_name)
checklists = list_checklists(card)
checklists.each do |checklist|
if checklist.name == checklist_name
return checklist
end
end
return nil
end
def clear_epic_refs(epic_card)
checklists = list_checklists(epic_card)
checklists.each do |cl|
cl.items.each do |item|
if item.name =~ /\[.*\]\(https?:\/\/trello\.com\/[^\)]+\) \([^\)]+\) \([^\)]+\)/
begin
trello_do('checklist', 2) do
cl.delete_checklist_item(item.id)
end
rescue => e
$stderr.puts "Error deleting checklist item: #{e.message}"
end
end
end
end
create_checklist(epic_card, UNASSIGNED_RELEASE)
create_checklist(epic_card, FUTURE_RELEASE)
end
def create_checklist(card, checklist_name)
cl = checklist(card, checklist_name)
unless cl
puts "Adding #{checklist_name} to #{card.name}"
cl = Trello::Checklist.create({:name => checklist_name, :board_id => card.board_id, :card_id => card.id})
#card.add_checklist(cl)
@checklists_by_card.delete(card.id)
end
cl
end
def rename_checklist(card, old_checklist_name, new_checklist_name)
cl = checklist(card, old_checklist_name)
if cl
puts "Renaming #{old_checklist_name} on #{new_checklist_name}"
cl.name = new_checklist_name
cl.save
end
cl
end
def delete_empty_epic_checklists(epic_card)
checklists = list_checklists(epic_card)
checklists.each do |cl|
next if [UNASSIGNED_RELEASE, FUTURE_RELEASE].include? cl.name
if cl.items.empty?
begin
trello_do('checklist') do
cl.delete
@checklists_by_card.delete(epic_card.id)
end
rescue => e
$stderr.puts "Error deleting checklist: #{e.message}"
end
end
end
end
def target(ref, name='target')
trello_do(name) do
t = ref.target
return t
end
end
def card_labels(card)
labels = @labels_by_card[card.id]
return labels if labels
trello_do('card_labels') do
labels = card.labels
end
@labels_by_card[card.id] = labels if labels
labels
end
def card_list(card)
list = @list_by_card[card.id]
return list if list
trello_do('card_list') do
list = card.list
end
@list_by_card[card.id] = list if list
list
end
def card_members(card)
members = @members_by_card[card.id]
return members if members
trello_do('card_members') do
members = card.members
end
@members_by_card[card.id] = members if members
members
end
def board_labels(board)
labels = nil
label_limit = 1000
trello_do('board_labels') do
labels = board.labels(:limit => label_limit)
end
raise "Reached label API limit of 1000 entries" if labels.length >= label_limit
labels
end
def create_label(name, color, board_id)
Trello::Label.create(:name => name, :color => color, :board_id => board_id)
end
def update_label(label)
trello_do('update_label') do
label.save
end
end
def delete_label(label)
trello_do('delete_label') do
label.delete
end
end
def list_checklists(card)
checklists = @checklists_by_card[card.id]
return checklists if checklists
trello_do('checklists') do
checklists = card.checklists
end
if checklists
checklists = target(checklists, 'checklists')
@checklists_by_card[card.id] = checklists
end
checklists
end
def list_cards(list)
cards = @cards_by_list[list.id]
return cards if cards
trello_do('cards') do
cards = list.cards
end
if cards
cards = target(cards, 'cards')
@cards_by_list[list.id] = cards
end
cards
end
def list_actions(card)
actions = nil
trello_do('actions') do
actions = card.actions
end
if actions
actions = target(actions, 'actions')
end
actions
end
def list_comments(card)
comments = @comments_by_card[card.id]
return comments if comments
actions = list_actions(card)
comments = []
actions.each do |action|
if action == 'createComment'
comments << action.data['text']
end
end
@comments_by_card[card.id] = comments
comments
end
def print_card(card, num=nil)
print " "
print "#{num}) " if num
puts "#{card.name} (##{card.short_id})"
members = card_members(card)
if !members.empty?
puts " Assignee(s): #{members.map{|member| member.full_name}.join(',')}"
end
puts "\nActions:\n"
list_actions(card).each do |action|
if action.type == 'updateCard'
puts "#{action.type}(#{action.member_creator.username} changed #{action.data['old'].keys.first}):"
puts action.data
puts
elsif action.type == 'createCard'
puts "#{action.type}(#{member(action.member_creator_id).username}):"
puts action.data
puts
end
end
end
def print_list(list)
cards = list_cards(list)
if !cards.empty?
puts "\n List: #{list.name} (#cards #{cards.length})"
puts " Cards:"
cards.each_with_index do |card, index|
print_card(card, index+1)
end
end
end
def card_by_ref(card_ref)
card = nil
if card_ref =~ /^(\w+)_(\d+)/i
board_name = $1
card_short_id = $2
board = team_board(board_name)
card = find_card_by_short_id(board, card_short_id)
end
card
end
def card_by_url(card_url)
card = nil
# https://trello.com/c/6EhPEbM4
if card_url =~ /^https?:\/\/trello\.com\/c\/([[:alnum:]]+)/
card_id = $1
begin
card = find_card(card_id)
rescue
end
end
card
end
def org
trello_do('org') do
@org ||= Trello::Organization.find(organization_id)
return @org
end
end
def org_boards
trello_do('org_boards') do
return target(org.boards)
end
end
def board(board_id)
boards[board_id]
end
def member(member_name)
Trello::Member.find(member_name)
end
def member_emails(members)
unless @trello_login_to_email
@trello_login_to_email = {}
trello_login_to_email_json = File.expand_path('~/trello_login_to_email.json')
if File.exist? trello_login_to_email_json
@trello_login_to_email = JSON.parse(File.read(trello_login_to_email_json))
end
end
member_emails = []
members.each do |member|
email = @trello_login_to_email[member.username]
if email
member_emails << email
end
end
member_emails
end
def markdown_to_html(text)
Kramdown::Document.new(text).to_html
end
def trello_do(type, retries=DEFAULT_RETRIES)
i = 0
while true
begin
yield
break
rescue Exception => e
$stderr.puts "Error with #{type}: #{e.message}"
raise if i >= retries
sleep DEFAULT_RETRY_SLEEP
i += 1
end
end
end
end
|
require 'ostruct'
module Twain
class Builder
@@translations = {}
@@paras = []
def self.write(options = {}, &block)
@@paras << Paragraph.new(block.call, options) # TODO: Pass whole block and call it later
end
def self.translate(key, *variations)
@@translations[key] = variations
end
def self.build(subject)
output = []
@@paras.each do |para|
output << para.compile(subject)
end
output.join
end
def self.get_translation(key)
@@translations.fetch(key.to_sym).sample
end
def get(key)
respond_to?(key) ? public_send(key) : payload[key.to_sym]
end
def tr(key)
self.class.get_translation(key)
end
def generate
self.class.build(self)
end
end
end
Use class methods
require 'ostruct'
module Twain
class Builder
@@translations = {}
@@paras = []
def self.write(options = {}, &block)
@@paras << Paragraph.new(block.call, options) # TODO: Pass whole block and call it later
end
def self.translate(key, *variations)
@@translations[key] = variations
end
def self.build(subject)
output = []
@@paras.each do |para|
output << para.compile(subject)
end
output.join
end
def self.tr(key)
@@translations.fetch(key.to_sym).sample
end
def self.generate
build(self)
end
end
end
|
# coding: utf-8
module Tweetwine
module CLI
DEFAULT_COMMAND = :home
DEFAULT_CONFIG = {
:config_file => "#{(ENV['HOME'] || ENV['USERPROFILE'])}/.tweetwine",
:env_lookouts => [],
:exec_name => "tweetwine",
:oauth => {},
:username => ENV['USER']
}.freeze
class << self
def start(args = ARGV, overriding_default_conf = nil)
init(args, overriding_default_conf)
run(args)
end
def config
@config ||= read_config
end
def http
@http ||= Http::Client.new(config)
end
def oauth
@oauth ||= OAuth.new(config[:oauth])
end
def twitter
@twitter ||= Twitter.new(config)
end
def ui
@ui ||= UI.new(config)
end
def url_shortener
@url_shorterer ||= UrlShortener.new(config[:shorten_urls])
end
def commands
@commands ||= {
:primaries => {},
:secondaries => {}
}
end
def register_command(cmd_class, names)
commands[:primaries][names.first.to_sym] = cmd_class
names[1..-1].each { |name| commands[:secondaries][name.to_sym] = cmd_class }
end
def find_command(name)
name = name.to_sym
commands[:primaries][name] || commands[:secondaries][name]
end
def global_option_parser
@global_option_parser ||= OptionParser.new do |parser, options|
parser.on '-c', '--colors', 'Enable ANSI colors for output.' do
options[:colors] = true
end
parser.on '-h', '--help', 'Show this help and exit.' do
options[:command] = :help
end
parser.on '--http-proxy <url>', String, 'Enable HTTP(S) proxy.' do |arg|
options[:http_proxy] = arg
end
parser.on '--no-colors', 'Disable ANSI colors for output.' do
options[:colors] = false
end
parser.on '--no-http-proxy', 'Disable HTTP(S) proxy.' do
options[:http_proxy] = nil
end
parser.on '--no-url-shorten', 'Disable URL shortening.' do
options.delete :shorten_urls
end
parser.on '-n', '--num <n>', Integer, "Number of statuses per page (default #{Twitter::DEFAULT_NUM_STATUSES})." do |arg|
options[:num_statuses] = arg
end
parser.on '-p', '--page <p>', Integer, "Page number for statuses (default #{Twitter::DEFAULT_PAGE_NUM})." do |arg|
options[:page] = arg
end
parser.on '-u', '--username <user>', String, "User to authenticate (default '#{DEFAULT_CONFIG[:username]}')." do |arg|
options[:username] = arg
end
parser.on '-v', '--version', "Show version and exit." do
options[:command] = :version
end
end
end
private
def init(args, overriding_default_conf = nil)
@config = read_config(args, overriding_default_conf)
@http, @oauth, @twitter, @ui, @url_shortener = nil # reset
end
def run(args)
proposed_command = config[:command]
found_command = find_command proposed_command
raise UnknownCommandError, "unknown command: #{proposed_command}" unless found_command
found_command.new(args).run
self
end
def read_config(cmdline_args = [], overriding_default_config = nil)
default_config = overriding_default_config ? DEFAULT_CONFIG.merge(overriding_default_config) : DEFAULT_CONFIG
config = Config.read(cmdline_args, default_config) do |args|
parse_config_from_cmdline(args)
end
config
end
def parse_config_from_cmdline(args)
options = global_option_parser.parse(args)
unless options[:command]
cmd_via_arg = args.shift
options[:command] = cmd_via_arg ? cmd_via_arg.to_sym : DEFAULT_COMMAND
end
options
end
end
end
class Command
class << self
def inherited(child)
# Silence warnings about uninitialized variables if a child does not
# set its about, name, or usage.
child.instance_eval do
@about, @name, @usage = nil
end
end
def about(description = nil)
return @about unless description
@about = description.chomp
end
def register(*names)
@name = names.first
CLI.register_command(self, names)
end
def name
@name
end
# Usage description for the command, use if overriding #parse.
def usage(description = nil)
return @usage unless description
@usage = description
end
def show_usage(about_cmd = self)
about = about_cmd.about
exec_name = CLI.config[:exec_name]
name = about_cmd.name
usage = about_cmd.usage
result = <<-END
#{about}
Usage: #{exec_name} #{name} #{usage}
END
CLI.ui.info result.strip!
end
def abort_with_usage
show_usage
exit CommandLineError.status_code
end
end
def initialize(args)
parsing_succeeded = parse(args)
self.class.abort_with_usage unless parsing_succeeded
end
# Default behavior, which succeeds always; override for real argument
# parsing if the command needs arguments.
def parse(args)
true
end
end
class HelpCommand < Command
register "help"
about "Show help and exit. Try it with <command> argument."
usage <<-END
[<command>]
If <command> is given, show specific help about that command. If no
<command> is given, show general help.
END
def parse(args)
# Did we arrive here via '-h' option? If so, we cannot have
# +proposed_command+ because '-h' does not take an argument. Otherwise,
# try to read the argument.
proposed_command = args.include?('-h') ? nil : args.shift
if proposed_command
@command = CLI.find_command proposed_command
CLI.ui.error "unknown command.\n\n" unless @command
@command
else
@command = nil
true
end
end
def run
if @command
show_command_help
else
show_general_help
end
end
private
def show_command_help
self.class.show_usage @command
end
def show_general_help
exec_name = CLI.config[:exec_name]
command_descriptions = CLI.commands[:primaries].
entries.
sort { |a, b| a.first.to_s <=> b.first.to_s }.
map { |cmd, klass| [cmd, klass.about] }
CLI.ui.info <<-END
A simple but tasty Twitter agent for command line use, made for fun.
Usage: #{exec_name} [global_options...] [<command>] [command_options...]
Global options:
#{CLI.global_option_parser.help}
Commands:
#{command_descriptions.map { |cmd, desc| " %-14s%s" % [cmd, desc] }.join("\n") }
END
end
end
class HomeCommand < Command
register "home", "h"
about "Show authenticated user's home timeline (the default command)."
def run
CLI.twitter.home
end
end
class FollowersCommand < Command
register "followers", "fo"
about "Show authenticated user's followers and their latest tweets."
def run
CLI.twitter.followers
end
end
class FriendsCommand < Command
register "friends", "fr"
about "Show authenticated user's friends and their latest tweets."
def run
CLI.twitter.friends
end
end
class MentionsCommand < Command
register "mentions", "men", "m"
about "Show latest tweets that mention or are replies to the authenticated user."
def run
CLI.twitter.mentions
end
end
class SearchCommand < Command
def self.parser
@parser ||= OptionParser.new do |parser, options|
parser.on '-a', '--and', 'All words match (default).' do
options[:operator] = :and
end
parser.on '-o', '--or', 'Any word matches.' do
options[:operator] = :or
end
end
end
register "search", "s"
about "Search latest public tweets."
usage(Promise.new {<<-END
[--all | --or] <word>...
Command options:
#{parser.help}
END
})
def parse(args)
options = self.class.parser.parse(args)
@operator = options[:operator]
@words = args
if @words.empty?
CLI.ui.error "No search words.\n\n"
false
else
true
end
end
def run
CLI.twitter.search @words, @operator
end
end
class UpdateCommand < Command
register "update", "up"
about "Send new tweet."
usage <<-END
[<status>]
If <status> is not given, read the contents for the tweet from STDIN.
END
def parse(args)
@msg = args.join(' ')
args.clear
true
end
def run
CLI.twitter.update @msg
end
end
class UserCommand < Command
register "user", "usr"
about "Show user's timeline."
usage <<-END
[<username>]
If <username> is not given, show authenticated user's timeline.
END
def parse(args)
@user = args.empty? ? CLI.config[:username] : args.shift
end
def run
CLI.twitter.user(@user)
end
end
class VersionCommand < Command
register "version", "ver", "v"
about "Show program version and exit."
def run
CLI.ui.info "tweetwine #{Tweetwine::VERSION}"
end
end
end
Tell what is the invalid arg to -h
# coding: utf-8
module Tweetwine
module CLI
DEFAULT_COMMAND = :home
DEFAULT_CONFIG = {
:config_file => "#{(ENV['HOME'] || ENV['USERPROFILE'])}/.tweetwine",
:env_lookouts => [],
:exec_name => "tweetwine",
:oauth => {},
:username => ENV['USER']
}.freeze
class << self
def start(args = ARGV, overriding_default_conf = nil)
init(args, overriding_default_conf)
run(args)
end
def config
@config ||= read_config
end
def http
@http ||= Http::Client.new(config)
end
def oauth
@oauth ||= OAuth.new(config[:oauth])
end
def twitter
@twitter ||= Twitter.new(config)
end
def ui
@ui ||= UI.new(config)
end
def url_shortener
@url_shorterer ||= UrlShortener.new(config[:shorten_urls])
end
def commands
@commands ||= {
:primaries => {},
:secondaries => {}
}
end
def register_command(cmd_class, names)
commands[:primaries][names.first.to_sym] = cmd_class
names[1..-1].each { |name| commands[:secondaries][name.to_sym] = cmd_class }
end
def find_command(name)
name = name.to_sym
commands[:primaries][name] || commands[:secondaries][name]
end
def global_option_parser
@global_option_parser ||= OptionParser.new do |parser, options|
parser.on '-c', '--colors', 'Enable ANSI colors for output.' do
options[:colors] = true
end
parser.on '-h', '--help', 'Show this help and exit.' do
options[:command] = :help
end
parser.on '--http-proxy <url>', String, 'Enable HTTP(S) proxy.' do |arg|
options[:http_proxy] = arg
end
parser.on '--no-colors', 'Disable ANSI colors for output.' do
options[:colors] = false
end
parser.on '--no-http-proxy', 'Disable HTTP(S) proxy.' do
options[:http_proxy] = nil
end
parser.on '--no-url-shorten', 'Disable URL shortening.' do
options.delete :shorten_urls
end
parser.on '-n', '--num <n>', Integer, "Number of statuses per page (default #{Twitter::DEFAULT_NUM_STATUSES})." do |arg|
options[:num_statuses] = arg
end
parser.on '-p', '--page <p>', Integer, "Page number for statuses (default #{Twitter::DEFAULT_PAGE_NUM})." do |arg|
options[:page] = arg
end
parser.on '-u', '--username <user>', String, "User to authenticate (default '#{DEFAULT_CONFIG[:username]}')." do |arg|
options[:username] = arg
end
parser.on '-v', '--version', "Show version and exit." do
options[:command] = :version
end
end
end
private
def init(args, overriding_default_conf = nil)
@config = read_config(args, overriding_default_conf)
@http, @oauth, @twitter, @ui, @url_shortener = nil # reset
end
def run(args)
proposed_command = config[:command]
found_command = find_command proposed_command
raise UnknownCommandError, "unknown command: #{proposed_command}" unless found_command
found_command.new(args).run
self
end
def read_config(cmdline_args = [], overriding_default_config = nil)
default_config = overriding_default_config ? DEFAULT_CONFIG.merge(overriding_default_config) : DEFAULT_CONFIG
config = Config.read(cmdline_args, default_config) do |args|
parse_config_from_cmdline(args)
end
config
end
def parse_config_from_cmdline(args)
options = global_option_parser.parse(args)
unless options[:command]
cmd_via_arg = args.shift
options[:command] = cmd_via_arg ? cmd_via_arg.to_sym : DEFAULT_COMMAND
end
options
end
end
end
class Command
class << self
def inherited(child)
# Silence warnings about uninitialized variables if a child does not
# set its about, name, or usage.
child.instance_eval do
@about, @name, @usage = nil
end
end
def about(description = nil)
return @about unless description
@about = description.chomp
end
def register(*names)
@name = names.first
CLI.register_command(self, names)
end
def name
@name
end
# Usage description for the command, use if overriding #parse.
def usage(description = nil)
return @usage unless description
@usage = description
end
def show_usage(about_cmd = self)
about = about_cmd.about
exec_name = CLI.config[:exec_name]
name = about_cmd.name
usage = about_cmd.usage
result = <<-END
#{about}
Usage: #{exec_name} #{name} #{usage}
END
CLI.ui.info result.strip!
end
def abort_with_usage
show_usage
exit CommandLineError.status_code
end
end
def initialize(args)
parsing_succeeded = parse(args)
self.class.abort_with_usage unless parsing_succeeded
end
# Default behavior, which succeeds always; override for real argument
# parsing if the command needs arguments.
def parse(args)
true
end
end
class HelpCommand < Command
register "help"
about "Show help and exit. Try it with <command> argument."
usage <<-END
[<command>]
If <command> is given, show specific help about that command. If no
<command> is given, show general help.
END
def parse(args)
# Did we arrive here via '-h' option? If so, we cannot have
# +proposed_command+ because '-h' does not take an argument. Otherwise,
# try to read the argument.
proposed_command = args.include?('-h') ? nil : args.shift
if proposed_command
@command = CLI.find_command proposed_command
CLI.ui.error "unknown command: #{proposed_command}\n\n" unless @command
@command
else
@command = nil
true
end
end
def run
if @command
show_command_help
else
show_general_help
end
end
private
def show_command_help
self.class.show_usage @command
end
def show_general_help
exec_name = CLI.config[:exec_name]
command_descriptions = CLI.commands[:primaries].
entries.
sort { |a, b| a.first.to_s <=> b.first.to_s }.
map { |cmd, klass| [cmd, klass.about] }
CLI.ui.info <<-END
A simple but tasty Twitter agent for command line use, made for fun.
Usage: #{exec_name} [global_options...] [<command>] [command_options...]
Global options:
#{CLI.global_option_parser.help}
Commands:
#{command_descriptions.map { |cmd, desc| " %-14s%s" % [cmd, desc] }.join("\n") }
END
end
end
class HomeCommand < Command
register "home", "h"
about "Show authenticated user's home timeline (the default command)."
def run
CLI.twitter.home
end
end
class FollowersCommand < Command
register "followers", "fo"
about "Show authenticated user's followers and their latest tweets."
def run
CLI.twitter.followers
end
end
class FriendsCommand < Command
register "friends", "fr"
about "Show authenticated user's friends and their latest tweets."
def run
CLI.twitter.friends
end
end
class MentionsCommand < Command
register "mentions", "men", "m"
about "Show latest tweets that mention or are replies to the authenticated user."
def run
CLI.twitter.mentions
end
end
class SearchCommand < Command
def self.parser
@parser ||= OptionParser.new do |parser, options|
parser.on '-a', '--and', 'All words match (default).' do
options[:operator] = :and
end
parser.on '-o', '--or', 'Any word matches.' do
options[:operator] = :or
end
end
end
register "search", "s"
about "Search latest public tweets."
usage(Promise.new {<<-END
[--all | --or] <word>...
Command options:
#{parser.help}
END
})
def parse(args)
options = self.class.parser.parse(args)
@operator = options[:operator]
@words = args
if @words.empty?
CLI.ui.error "No search words.\n\n"
false
else
true
end
end
def run
CLI.twitter.search @words, @operator
end
end
class UpdateCommand < Command
register "update", "up"
about "Send new tweet."
usage <<-END
[<status>]
If <status> is not given, read the contents for the tweet from STDIN.
END
def parse(args)
@msg = args.join(' ')
args.clear
true
end
def run
CLI.twitter.update @msg
end
end
class UserCommand < Command
register "user", "usr"
about "Show user's timeline."
usage <<-END
[<username>]
If <username> is not given, show authenticated user's timeline.
END
def parse(args)
@user = args.empty? ? CLI.config[:username] : args.shift
end
def run
CLI.twitter.user(@user)
end
end
class VersionCommand < Command
register "version", "ver", "v"
about "Show program version and exit."
def run
CLI.ui.info "tweetwine #{Tweetwine::VERSION}"
end
end
end
|
module Usable
class Config < BasicObject
def each(&block)
@spec.to_h.each(&block)
end
def spec(key, value = nil)
@spec ||= ::OpenStruct.new
if value
@spec[key.to_s.tr('=', '')] = value
else
@spec[key]
end
end
def [](key)
spec key
end
def []=(key, val)
spec key, val
end
def method_missing(method_name, *args, &_block)
spec method_name, *args
end
def respond_to_missing?(method_name, _private = false)
method_name.end_with?('=') || spec.respond_to?(method_name)
end
def available_methods
modules.each_with_object(::Hash.new(default_method)) do |mod, result|
mod.instance_methods.each do |method_name|
result[method_name] = mod.instance_method method_name
end
end
end
def add_module(mod)
modules << mod
end
#
# Internal
#
def modules
@modules ||= []
end
#
# Private
#
def default_method
Null.instance_method(:default_method)
end
module Null
def default_method(*, &block)
end
end
end
end
Add Config#inspect since it now inherits from BasicObject
module Usable
class Config < BasicObject
def each(&block)
@spec.to_h.each(&block)
end
def spec(key, value = nil)
@spec ||= ::OpenStruct.new
if value
@spec[key.to_s.tr('=', '')] = value
else
@spec[key]
end
end
def [](key)
spec key
end
def []=(key, val)
spec key, val
end
def method_missing(method_name, *args, &_block)
spec method_name, *args
end
def respond_to_missing?(method_name, _private = false)
method_name.end_with?('=') || spec.respond_to?(method_name)
end
def available_methods
modules.each_with_object(::Hash.new(default_method)) do |mod, result|
mod.instance_methods.each do |method_name|
result[method_name] = mod.instance_method method_name
end
end
end
def add_module(mod)
modules << mod
end
def inspect
::Object.instance_method(:inspect).bind(self).call
end
alias_method :to_s, :inspect
#
# Internal
#
def modules
@modules ||= []
end
#
# Private
#
def default_method
Null.instance_method(:default_method)
end
module Null
def default_method(*, &block)
end
end
end
end
|
# frozen_string_literal: true
module UsesThis
# A class that models a single link on the site.
class Link
attr_accessor :slug
attr_accessor :name
attr_accessor :url
attr_accessor :description
def initialize(path)
metadata = YAML.load_file(path)
@slug = File.basename(path, File.extname(path))
@name = metadata['name']
@url = metadata['url']
@description = metadata['description'] || ''
end
def inspect
"#<#{self.class} " \
"@slug=#{@slug} " \
"@name=#{@name} " \
"@url=#{@url} " \
"@description=#{@description}>"
end
end
end
Remove the link class
|
require "vagrant/registry"
module Vagrant
# This class imports and processes CLI aliases stored in ~/.vagrant.d/aliases
class Alias
def initialize(env)
@aliases = Registry.new
aliases_file = env.home_path.join("aliases")
if aliases_file.file?
aliases_file.readlines.each do |line|
# skip comments
next if line.strip.start_with?("#")
# separate keyword-command pairs
keyword, command = line.split("=").collect(&:strip)
@aliases.register(keyword.to_sym) do
lambda do |args|
# directly execute shell commands
if command.start_with?("!")
return exec "#{command[1..-1]} #{args.join(" ")}".strip
end
return CLI.new(command.split.concat(args), env).execute
end
end
end
end
end
def commands
@aliases
end
end
end
move alias registration into a separate register() method
require "vagrant/registry"
module Vagrant
# This class imports and processes CLI aliases stored in ~/.vagrant.d/aliases
class Alias
def initialize(env)
@aliases = Registry.new
aliases_file = env.home_path.join("aliases")
if aliases_file.file?
aliases_file.readlines.each do |line|
# skip comments
next if line.strip.start_with?("#")
# separate keyword-command pairs
keyword, command = line.split("=").collect(&:strip)
register(keyword, command)
end
end
end
# This returns all the registered alias commands.
def commands
@aliases
end
# This registers an alias.
def register(keyword, command)
@aliases.register(keyword.to_sym) do
lambda do |args|
# directly execute shell commands
if command.start_with?("!")
return exec "#{command[1..-1]} #{args.join(" ")}".strip
end
return CLI.new(command.split.concat(args), env).execute
end
end
end
end
end
|
require 'vcloud'
module Vcloud
class Launch
def initialize
@config_loader = Vcloud::ConfigLoader.new
end
def run(config_file = nil, options = {})
@cli_options = options
puts "cli_options:" if @cli_options[:debug]
pp @cli_options if @cli_options[:debug]
config = @config_loader.load_config(config_file)
config[:vapps].each do |vapp_config|
Vcloud.logger.info("Configuring vApp #{vapp_config[:name]}.")
begin
vapp = ::Vcloud::VappOrchestrator.provision(vapp_config)
vapp.power_on unless @cli_options[:no_power_on]
rescue RuntimeError => e
Vcloud.logger.error("Could not provision vApp: #{e.message}")
break unless options[:continue_on_error]
end
end
end
end
end
got rid of unnecessary instance variable
require 'vcloud'
module Vcloud
class Launch
def initialize
@config_loader = Vcloud::ConfigLoader.new
end
def run(config_file = nil, cli_options = {})
puts "cli_options:" if cli_options[:debug]
pp cli_options if cli_options[:debug]
config = @config_loader.load_config(config_file)
config[:vapps].each do |vapp_config|
Vcloud.logger.info("Configuring vApp #{vapp_config[:name]}.")
begin
vapp = ::Vcloud::VappOrchestrator.provision(vapp_config)
vapp.power_on unless cli_options[:no_power_on]
rescue RuntimeError => e
Vcloud.logger.error("Could not provision vApp: #{e.message}")
break unless cli_options[:continue_on_error]
end
end
end
end
end
|
$:.unshift(File.dirname(__FILE__)) unless
$:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
require 'versatile_rjs/page'
require 'versatile_rjs/proxy'
require 'versatile_rjs/template_handler'
require 'versatile_rjs/railtie'
module VersatileRJS
VERSION = '0.0.2'
class <<self
attr_accessor :javascript_framework, :debug_rjs
end
end
Add alias debug_rjs? / implementation_class_of method
$:.unshift(File.dirname(__FILE__)) unless
$:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
require 'versatile_rjs/page'
require 'versatile_rjs/proxy'
require 'versatile_rjs/template_handler'
require 'versatile_rjs/railtie'
module VersatileRJS
VERSION = '0.1.0'
class <<self
attr_accessor :javascript_framework, :debug_rjs
alias_method :debug_rjs?, :debug_rjs
end
def self.framework_module
javascript_framework.to_s.camelcase
end
def self.implementation_class_of(mod)
class_name_tree = mod.name.split('::')
class_dirnames = class_name_tree[0...-1]
class_basename = class_name_tree[-1]
implementation_class_name = [class_dirnames, framework_module, class_basename].flatten.join('::')
implementation_class_name.constantize
end
end
|
Loomio::Version::PATCH = 13
bump version
Loomio::Version::PATCH = 14 |
Loomio::Version::PATCH = 18
bump version
Loomio::Version::PATCH = 19 |
module Vultr
module VERSION
MAJOR = 2
MINOR = 0
TINY = 0
PRE = nil
[MAJOR, MINOR, TINY, PRE].compact.join(".")
end
end
Update version.rb
module Vultr
VERSION = "2.0.0"
end
|
require 'yaml'
module Watir
class Cookies
def initialize(control)
@control = control
end
#
# Returns array of cookies.
#
# @example
# browser.cookies.to_a
# #=> {:name=>"my_session", :value=>"BAh7B0kiD3Nlc3Npb25faWQGOgZFRkk", :domain=>"mysite.com"}
#
# @return [Array<Hash>]
#
def to_a
@control.all_cookies.map do |e|
e.merge(expires: e[:expires] ? e[:expires].to_time : nil)
end
end
#
# Returns a cookie by name.
#
# @example
# browser.cookies[:my_session]
# #=> {:name=>"my_session", :value=>"BAh7B0kiD3Nlc3Npb25faWQGOgZFRkk", :domain=>"mysite.com"}
#
# @param [Symbol] name
# @return <Hash> or nil if not found
#
def [](name)
to_a.find { |c| c[:name] == name.to_s }
end
#
# Adds new cookie.
#
# @example
# browser.cookies.add 'my_session', 'BAh7B0kiD3Nlc3Npb25faWQGOgZFRkk', secure: true
#
# @param [String] name
# @param [String] value
# @param [Hash] opts
# @option opts [Boolean] :secure
# @option opts [String] :path
# @option opts [Time, DateTime, NilClass] :expires
# @option opts [String] :domain
#
def add(name, value, opts = {})
cookie = {
name: name,
value: value
}
cookie[:secure] = opts[:secure] if opts.key?(:secure)
cookie[:path] = opts[:path] if opts.key?(:path)
expires = opts[:expires]
if expires
cookie[:expires] = expires.is_a?(String) ? ::Time.parse(expires) : expires
end
cookie[:domain] = opts[:domain] if opts.key?(:domain)
@control.add_cookie cookie
end
#
# Deletes cookie by given name.
#
# @example
# browser.cookies.delete 'my_session'
#
# @param [String] name
#
def delete(name)
@control.delete_cookie(name)
end
#
# Deletes all cookies.
#
# @example
# browser.cookies.clear
#
def clear
@control.delete_all_cookies
end
#
# Save cookies to file
#
# @example
# browser.cookies.save '.cookies'
#
# @param [String] file
#
def save(file = '.cookies')
IO.write(file, to_a.to_yaml)
end
#
# TODO: Use :permitted_classes keyword when minimum supported Ruby is 2.6
#
# Load cookies from file
#
# @example
# browser.cookies.load '.cookies'
#
# @param [String] file
#
def load(file = '.cookies')
YAML.safe_load(IO.read(file), [::Symbol, ::Time]).each do |c|
add(c.delete(:name), c.delete(:value), c)
end
end
end # Cookies
end # Watir
update to non-deprecated method for psych
require 'yaml'
module Watir
class Cookies
def initialize(control)
@control = control
end
#
# Returns array of cookies.
#
# @example
# browser.cookies.to_a
# #=> {:name=>"my_session", :value=>"BAh7B0kiD3Nlc3Npb25faWQGOgZFRkk", :domain=>"mysite.com"}
#
# @return [Array<Hash>]
#
def to_a
@control.all_cookies.map do |e|
e.merge(expires: e[:expires] ? e[:expires].to_time : nil)
end
end
#
# Returns a cookie by name.
#
# @example
# browser.cookies[:my_session]
# #=> {:name=>"my_session", :value=>"BAh7B0kiD3Nlc3Npb25faWQGOgZFRkk", :domain=>"mysite.com"}
#
# @param [Symbol] name
# @return <Hash> or nil if not found
#
def [](name)
to_a.find { |c| c[:name] == name.to_s }
end
#
# Adds new cookie.
#
# @example
# browser.cookies.add 'my_session', 'BAh7B0kiD3Nlc3Npb25faWQGOgZFRkk', secure: true
#
# @param [String] name
# @param [String] value
# @param [Hash] opts
# @option opts [Boolean] :secure
# @option opts [String] :path
# @option opts [Time, DateTime, NilClass] :expires
# @option opts [String] :domain
#
def add(name, value, opts = {})
cookie = {
name: name,
value: value
}
cookie[:secure] = opts[:secure] if opts.key?(:secure)
cookie[:path] = opts[:path] if opts.key?(:path)
expires = opts[:expires]
if expires
cookie[:expires] = expires.is_a?(String) ? ::Time.parse(expires) : expires
end
cookie[:domain] = opts[:domain] if opts.key?(:domain)
@control.add_cookie cookie
end
#
# Deletes cookie by given name.
#
# @example
# browser.cookies.delete 'my_session'
#
# @param [String] name
#
def delete(name)
@control.delete_cookie(name)
end
#
# Deletes all cookies.
#
# @example
# browser.cookies.clear
#
def clear
@control.delete_all_cookies
end
#
# Save cookies to file
#
# @example
# browser.cookies.save '.cookies'
#
# @param [String] file
#
def save(file = '.cookies')
IO.write(file, to_a.to_yaml)
end
#
# Load cookies from file
#
# @example
# browser.cookies.load '.cookies'
#
# @param [String] file
#
def load(file = '.cookies')
YAML.safe_load(IO.read(file), permitted_classes: [::Symbol, ::Time]).each do |c|
add(c.delete(:name), c.delete(:value), c)
end
end
end # Cookies
end # Watir
|
module Weary
VERSION = "1.1.2"
end
Update to v1.1.3
module Weary
VERSION = "1.1.3"
end
|
# frozen_string_literal: true
require 'faraday'
require 'faraday_middleware'
TIMEOUT_IN_SECONDS = 5
OPEN_TIMEOUT_IN_SECONDS = 5
module WebTest
module Util
def self.error_message(errors)
return errors.message if errors.respond_to?(:message)
errors
.map(&:to_s)
.join('; ')
.capitalize
end
def self.status(url_or_domain_name, follow: false)
code = head(url_or_domain_name, follow: follow)[0]
return code if code != 405
get(url_or_domain_name, follow: follow)[0]
end
def self.head(url_or_domain_name, follow: false)
request(:head, url_or_domain_name, follow: follow)
end
def self.get(url_or_domain_name, follow: false)
request(:get, url_or_domain_name, follow: follow)
end
def self.request(method, url_or_domain_name, follow: false)
url = make_url(url_or_domain_name)
response = recheck_on_timeout { connection(follow: follow).send(method, url) }
[response.status, response.headers]
end
# @return true if the given page has status 200,
# and follow a few redirects if necessary.
def self.up?(url_or_domain_name)
url = make_url(url_or_domain_name)
conn = connection(follow: true)
response = recheck_on_timeout { conn.head(url) }
response.status == 200
end
def self.valid_cert?(domain_name_or_url)
try_ssl_connection(domain_name_or_url)
true
rescue
# Not serving SSL, expired, or incorrect domain name in certificate
false
end
def self.try_ssl_connection(domain_name_or_url)
url = "https://#{remove_protocol(domain_name_or_url)}"
recheck_on_timeout { connection.head(url) }
true
end
# private
def self.connection(follow: false)
Faraday.new do |c|
c.options[:timeout] = TIMEOUT_IN_SECONDS
c.options[:open_timeout] = OPEN_TIMEOUT_IN_SECONDS
c.use(FaradayMiddleware::FollowRedirects, limit: 4) if follow
c.adapter :net_http
end
end
# Ensure that the given string is a URL,
# making it into one if necessary.
def self.make_url(url_or_domain_name)
if %r{^https?://} =~ url_or_domain_name
url_or_domain_name
else
"http://#{url_or_domain_name}"
end
end
def self.make_domain_name(url_or_domain_name)
if %r{^https?://(.+)} =~ url_or_domain_name
$1
else
url_or_domain_name
end
end
# Normalize the input: remove 'http(s)://' if it's there
def self.remove_protocol(domain_name_or_url)
%r{^https?://(?<name>.+)$} =~ domain_name_or_url
name || domain_name_or_url
end
def self.recheck_on_timeout
yield
rescue Faraday::Error::TimeoutError
yield
end
end
end
docs: new function
# frozen_string_literal: true
require 'faraday'
require 'faraday_middleware'
TIMEOUT_IN_SECONDS = 5
OPEN_TIMEOUT_IN_SECONDS = 5
module WebTest
module Util
def self.error_message(errors)
return errors.message if errors.respond_to?(:message)
errors
.map(&:to_s)
.join('; ')
.capitalize
end
def self.status(url_or_domain_name, follow: false)
code = head(url_or_domain_name, follow: follow)[0]
return code if code != 405
get(url_or_domain_name, follow: follow)[0]
end
def self.head(url_or_domain_name, follow: false)
request(:head, url_or_domain_name, follow: follow)
end
def self.get(url_or_domain_name, follow: false)
request(:get, url_or_domain_name, follow: follow)
end
def self.request(method, url_or_domain_name, follow: false)
url = make_url(url_or_domain_name)
response = recheck_on_timeout { connection(follow: follow).send(method, url) }
[response.status, response.headers]
end
# @return true if the given page has status 200,
# and follow a few redirects if necessary.
def self.up?(url_or_domain_name)
url = make_url(url_or_domain_name)
conn = connection(follow: true)
response = recheck_on_timeout { conn.head(url) }
response.status == 200
end
def self.valid_cert?(domain_name_or_url)
try_ssl_connection(domain_name_or_url)
true
rescue
# Not serving SSL, expired, or incorrect domain name in certificate
false
end
def self.try_ssl_connection(domain_name_or_url)
url = "https://#{remove_protocol(domain_name_or_url)}"
recheck_on_timeout { connection.head(url) }
true
end
# private
def self.connection(follow: false)
Faraday.new do |c|
c.options[:timeout] = TIMEOUT_IN_SECONDS
c.options[:open_timeout] = OPEN_TIMEOUT_IN_SECONDS
c.use(FaradayMiddleware::FollowRedirects, limit: 4) if follow
c.adapter :net_http
end
end
# Ensure that the given string is a URL,
# making it into one if necessary.
def self.make_url(url_or_domain_name)
if %r{^https?://} =~ url_or_domain_name
url_or_domain_name
else
"http://#{url_or_domain_name}"
end
end
# Return just the domain name portion of a URL if
# it's simply of the form http://name.tld
def self.make_domain_name(url_or_domain_name)
if %r{^https?://(.+)} =~ url_or_domain_name
$1
else
url_or_domain_name
end
end
# Normalize the input: remove 'http(s)://' if it's there
def self.remove_protocol(domain_name_or_url)
%r{^https?://(?<name>.+)$} =~ domain_name_or_url
name || domain_name_or_url
end
def self.recheck_on_timeout
yield
rescue Faraday::Error::TimeoutError
yield
end
end
end
|
module Woli
class Cli
desc 'edit DAY', 'Edit a diary entry for a given day.'
long_desc <<-END
Edit a diary entry for a given day.
#{DateParser.parse_date_long_desc}
END
def edit(fuzzy_date = 'today')
date = DateParser.parse_date(fuzzy_date)
entry = Woli.repository.load_entry(date) || DiaryEntry.new(date, '', Woli.repository)
temp_file_name = generate_temp_file_name(entry)
save_text_to_temp_file(entry, temp_file_name)
edit_file_in_editor(temp_file_name)
load_text_from_temp_file(entry, temp_file_name)
entry.persist
end
private
def save_text_to_temp_file(entry, temp_file_name)
File.write(temp_file_name, entry.text)
end
def load_text_from_temp_file(entry, temp_file_name)
entry.text = File.read(temp_file_name)
File.delete(temp_file_name)
end
def generate_temp_file_name(entry)
"/tmp/woli_edit_entry_#{entry.date.strftime('%Y_%m_%d')}.#{Woli.config['edit_entry_extension']}"
end
def edit_file_in_editor(file_name)
tty = `tty`.strip
`#{Woli.editor} < #{tty} > #{tty} #{file_name}`
end
end
end
use diary#load_or_create_entry for edit command
module Woli
class Cli
desc 'edit DAY', 'Edit a diary entry for a given day.'
long_desc <<-END
Edit a diary entry for a given day.
#{DateParser.parse_date_long_desc}
END
def edit(fuzzy_date = 'today')
date = DateParser.parse_date(fuzzy_date)
entry = Woli.diary.load_or_create_entry(date)
temp_file_name = generate_temp_file_name(entry)
save_text_to_temp_file(entry, temp_file_name)
edit_file_in_editor(temp_file_name)
load_text_from_temp_file(entry, temp_file_name)
entry.persist
end
private
def save_text_to_temp_file(entry, temp_file_name)
File.write(temp_file_name, entry.text)
end
def load_text_from_temp_file(entry, temp_file_name)
entry.text = File.read(temp_file_name)
File.delete(temp_file_name)
end
def generate_temp_file_name(entry)
"/tmp/woli_edit_entry_#{entry.date.strftime('%Y_%m_%d')}.#{Woli.config['edit_entry_extension']}"
end
def edit_file_in_editor(file_name)
tty = `tty`.strip
`#{Woli.editor} < #{tty} > #{tty} #{file_name}`
end
end
end
|
require 'active_support/core_ext/object/blank'
module Wprapper
class Post < Base
property :categories
property :content
property :identifier
property :image_url
property :portrait_image_url
property :published_at
property :title
property :title_position
property :url
property :status
property :author_id
property :custom_fields
class Mapper
def initialize(wp_post_hash)
@wp_post_hash = wp_post_hash
end
def to_h
r = @wp_post_hash
{
categories: fetch_categories,
content: r.fetch('post_content'),
identifier: r.fetch('post_id'),
image_url: fetch_image_url,
portrait_image_url: fetch_custom_field('portrait_image', nil),
published_at: r.fetch('post_date_gmt').to_time,
title: r.fetch('post_title'),
title_position: fetch_custom_field('title_position', nil),
url: r.fetch('link'),
status: r.fetch('post_status'),
author_id: r.fetch('post_author'),
custom_fields: fetch_custom_fields
}
end
def fetch_image_url
post_thumbnail = @wp_post_hash.fetch('post_thumbnail', {})
if post_thumbnail.is_a?(Hash)
post_thumbnail.fetch('link', nil)
else
post_thumbnail.first
end
end
def fetch_custom_fields
@custom_fields ||= @wp_post_hash.fetch('custom_fields', [])
end
def terms
@wp_post_hash.fetch('terms', [])
end
def fetch_custom_field(key, default)
field = fetch_custom_fields.find { |f|
f.fetch('key') == key
}
if field.present?
field.fetch('value')
else
default
end
end
def fetch_categories
terms.select{|t| t['taxonomy'] == 'category'}
.map{|c| Category.new_from_wp(c)}
end
def fetch_term(taxonomy, default)
term = terms.find { |t|
t.fetch('taxonomy') == taxonomy
}
if term.present?
term.fetch('name')
else
default
end
end
end
class << self
def new_from_wp(r)
new(Mapper.new(r).to_h)
end
def latest(number)
filters = {
number: number,
order: 'desc',
orderby: 'post_date_gmt',
post_status: 'publish',
post_type: 'post'
}
wordpress.posts(filters).map do |r|
Post.new_from_wp(r)
end
end
def find(post_id)
wp_post = wordpress.post(post_id)
Post.new_from_wp(wp_post)
end
def upload_feature_image(post_id, filename, image_bytes)
media = wordpress_json_api.upload_media(filename, image_bytes)
Post.set_featured_image(post_id, media['ID'])
end
def set_featured_image(post_id, media_id)
Post.wordpress.update_post(post_id, { post_thumbnail: media_id })
end
end
def published?
status == 'publish'
end
def update_custom_fields(new_custom_fields)
new_custom_fields = cleanup_hash_of_nil_values(new_custom_fields)
custom_fields_to_update = merge_custom_fields(new_custom_fields)
Post.wordpress.update_post(identifier, custom_fields: custom_fields_to_update)
end
def attributes
to_h.except(:categories, :author_id)
end
def fetch_custom_field(key, default = nil)
field = find_custom_field_by_key(key)
return field['value'] if field.present?
default
end
private
def find_custom_field_by_key(key)
custom_fields.find{|e| key == e['key'] }
end
def cleanup_hash_of_nil_values(hash)
hash.select { |key, value| !value.nil? }
end
def merge_custom_fields(new_custom_fields)
new_custom_fields.map do |key, value|
field = find_custom_field_by_key(key) || {}
field['key'] = key
field['value'] = value
field
end
end
end
end
Code style
require 'active_support/core_ext/object/blank'
module Wprapper
class Post < Base
property :categories
property :content
property :identifier
property :image_url
property :portrait_image_url
property :published_at
property :title
property :title_position
property :url
property :status
property :author_id
property :custom_fields
class Mapper
def initialize(wp_post_hash)
@wp_post_hash = wp_post_hash
end
def to_h
r = @wp_post_hash
{
categories: fetch_categories,
content: r.fetch('post_content'),
identifier: r.fetch('post_id'),
image_url: fetch_image_url,
portrait_image_url: fetch_custom_field('portrait_image', nil),
published_at: r.fetch('post_date_gmt').to_time,
title: r.fetch('post_title'),
title_position: fetch_custom_field('title_position', nil),
url: r.fetch('link'),
status: r.fetch('post_status'),
author_id: r.fetch('post_author'),
custom_fields: fetch_custom_fields
}
end
def fetch_image_url
post_thumbnail = @wp_post_hash.fetch('post_thumbnail', {})
if post_thumbnail.is_a?(Hash)
post_thumbnail.fetch('link', nil)
else
post_thumbnail.first
end
end
def fetch_custom_fields
@custom_fields ||= @wp_post_hash.fetch('custom_fields', [])
end
def terms
@wp_post_hash.fetch('terms', [])
end
def fetch_custom_field(key, default)
field = fetch_custom_fields.find do |f|
f.fetch('key') == key
end
if field.present?
field.fetch('value')
else
default
end
end
def fetch_categories
terms
.select { |t| t['taxonomy'] == 'category' }
.map { |c| Category.new_from_wp(c) }
end
def fetch_term(taxonomy, default)
term = terms.find do |t|
t.fetch('taxonomy') == taxonomy
end
if term.present?
term.fetch('name')
else
default
end
end
end
class << self
def new_from_wp(r)
new(Mapper.new(r).to_h)
end
def latest(number)
filters = {
number: number,
order: 'desc',
orderby: 'post_date_gmt',
post_status: 'publish',
post_type: 'post'
}
wordpress.posts(filters).map do |r|
Post.new_from_wp(r)
end
end
def find(post_id)
wp_post = wordpress.post(post_id)
Post.new_from_wp(wp_post)
end
def upload_feature_image(post_id, filename, image_bytes)
media = wordpress_json_api.upload_media(filename, image_bytes)
Post.set_featured_image(post_id, media['ID'])
end
def set_featured_image(post_id, media_id)
Post.wordpress.update_post(post_id, post_thumbnail: media_id)
end
end
def published?
status == 'publish'
end
def update_custom_fields(new_custom_fields)
new_custom_fields = cleanup_hash_of_nil_values(new_custom_fields)
custom_fields_to_update = merge_custom_fields(new_custom_fields)
Post.wordpress.update_post(identifier, custom_fields: custom_fields_to_update)
end
def attributes
to_h.except(:categories, :author_id)
end
def fetch_custom_field(key, default = nil)
field = find_custom_field_by_key(key)
return field['value'] if field.present?
default
end
private
def find_custom_field_by_key(key)
custom_fields.find { |e| key == e['key'] }
end
def cleanup_hash_of_nil_values(hash)
hash.select { |_, value| value.present? }
end
def merge_custom_fields(new_custom_fields)
new_custom_fields.map do |key, value|
field = find_custom_field_by_key(key) || {}
field['key'] = key
field['value'] = value
field
end
end
end
end
|
require 'json'
require 'xcode/resource'
require 'xcode/target'
require 'xcode/configuration'
require 'xcode/scheme'
module Xcode
class Project
attr_reader :name, :targets, :sdk, :path, :schemes, :groups
def initialize(path, sdk=nil)
@sdk = sdk || "iphoneos" # FIXME: should support OSX/simulator too
@path = File.expand_path path
@targets = []
@schemes = []
@groups = []
@name = File.basename(@path).gsub(/\.xcodeproj/,'')
parse_pbxproj
parse_schemes
end
def scheme(name)
scheme = @schemes.select {|t| t.name == name.to_s}.first
raise "No such scheme #{name}, available schemes are #{@schemes.map {|t| t.name}.join(', ')}" if scheme.nil?
yield scheme if block_given?
scheme
end
def target(name)
target = @targets.select {|t| t.name == name.to_s}.first
raise "No such target #{name}, available targets are #{@targets.map {|t| t.name}.join(', ')}" if target.nil?
yield target if block_given?
target
end
def describe
puts "Project #{name} contains"
targets.each do |t|
puts " + target:#{t.name}"
t.configs.each do |c|
puts " + config:#{c.name}"
end
end
schemes.each do |s|
puts " + scheme #{s.name}"
puts " + Launch action => target:#{s.launch.target.name}, config:#{s.launch.name}" unless s.launch.nil?
puts " + Test action => target:#{s.test.target.name}, config:#{s.test.name}" unless s.test.nil?
end
end
private
def parse_schemes
# schemes are in project/**/xcschemes/*.xcscheme
Dir["#{@path}/**/xcschemes/*.xcscheme"].each do |scheme|
@schemes << Xcode::Scheme.new(self, scheme)
end
end
def parse_pbxproj
json = JSON.parse(`plutil -convert json -o - "#{@path}/project.pbxproj"`)
project = Xcode::Resource.new json['rootObject'], json
project.targets.each do |target|
target.project = self
@targets << target
end
end
end
end
ParsePBXProj will return the project object ; targets are created when asked for
require 'json'
require 'xcode/resource'
require 'xcode/target'
require 'xcode/configuration'
require 'xcode/scheme'
module Xcode
class Project
attr_reader :name, :sdk, :path, :schemes, :groups
def initialize(path, sdk=nil)
@sdk = sdk || "iphoneos" # FIXME: should support OSX/simulator too
@path = File.expand_path path
@schemes = []
@groups = []
@name = File.basename(@path).gsub(/\.xcodeproj/,'')
@project = parse_pbxproj
parse_schemes
end
def scheme(name)
scheme = @schemes.select {|t| t.name == name.to_s}.first
raise "No such scheme #{name}, available schemes are #{@schemes.map {|t| t.name}.join(', ')}" if scheme.nil?
yield scheme if block_given?
scheme
end
def targets
@project.targets.map do |target|
target.project = self
target
end
end
def target(name)
target = targets.select {|t| t.name == name.to_s}.first
raise "No such target #{name}, available targets are #{targets.map {|t| t.name}.join(', ')}" if target.nil?
yield target if block_given?
target
end
def describe
puts "Project #{name} contains"
targets.each do |t|
puts " + target:#{t.name}"
t.configs.each do |c|
puts " + config:#{c.name}"
end
end
schemes.each do |s|
puts " + scheme #{s.name}"
puts " + Launch action => target:#{s.launch.target.name}, config:#{s.launch.name}" unless s.launch.nil?
puts " + Test action => target:#{s.test.target.name}, config:#{s.test.name}" unless s.test.nil?
end
end
private
def parse_schemes
# schemes are in project/**/xcschemes/*.xcscheme
Dir["#{@path}/**/xcschemes/*.xcscheme"].each do |scheme|
@schemes << Xcode::Scheme.new(self, scheme)
end
end
def parse_pbxproj
json = JSON.parse(`plutil -convert json -o - "#{@path}/project.pbxproj"`)
Xcode::Resource.new json['rootObject'], json
end
end
end
|
class XPool::Process
#
# @return [XPool::Process]
# Returns an instance of XPool::Process
#
def initialize
@id = spawn
end
#
# A graceful shutdown of the process.
#
# The signal 'SIGUSR1' is caught in the subprocess and exit is
# performed through Kernel#exit after the process has finished
# executing its work.
#
# @return [void]
#
def shutdown
_shutdown :graceful
end
#
# A non-graceful shutdown through SIGKILL.
#
# @return [void]
#
def shutdown!
_shutdown :force
end
#
# @return [Fixnum]
# The number of times the process has been asked to schedule work.
#
def frequency
@frequency
end
#
# @param [#run] unit
# The unit of work
#
# @param [Object] *args
# A variable number of arguments to be passed to #run
#
# @raise [RuntimeError]
# When the process is dead.
#
# @return [XPool::Process]
# Returns self
#
def schedule(unit,*args)
if dead?
raise RuntimeError,
"cannot schedule work on a dead process (with ID: #{@id})"
end
@frequency += 1
@channel.put unit: unit, args: args
self
end
#
# @return [Boolean]
# Returns true when the process is executing work.
#
def busy?
if dead?
false
else
synchronize!
@states[:busy]
end
end
def failed?
synchronize!
@states[:failed]
end
#
# @return [Boolean]
# Returns true when the process is alive.
#
def alive?
!dead?
end
#
# @return [Boolean]
# Returns true when the process is no longer running.
#
def dead?
synchronize!
@states[:dead]
end
#
# If a process has failed (see #failed?) this method returns the backtrace of
# the exception that caused the process to fail.
#
# @return [Array<String>]
# Returns the backtrace.
#
def backtrace
synchronize!
@states[:backtrace]
end
#
# @return [Fixnum]
# Returns the process ID of the new process.
#
def restart
shutdown
@id = spawn
end
private
def _shutdown(action)
sig = action == :force ? 'SIGKILL' : 'SIGUSR1'
Process.kill sig, @id
Process.wait @id
rescue Errno::ECHILD, Errno::ESRCH
ensure
if action == :force
@states = {dead: true}
else
synchronize!
end
@shutdown = true
@channel.close
@s_channel.close
end
def synchronize!
return if @shutdown
while @s_channel.readable?
@states = @s_channel.get
end
end
def reset
@channel = IChannel.new Marshal
@s_channel = IChannel.new Marshal
@shutdown = false
@states = {}
@frequency = 0
end
def spawn
reset
fork do
trap :SIGUSR1 do
XPool.log "#{::Process.pid} got request to shutdown."
@shutdown_requested = true
end
loop &method(:read_loop)
end
end
def read_loop
if @channel.readable?
@s_channel.put busy: true
msg = @channel.get
msg[:unit].run *msg[:args]
@s_channel.put busy: false
end
rescue Exception => e
XPool.log "#{::Process.pid} has failed."
@s_channel.put failed: true, dead: true, backtrace: e.backtrace
raise e
ensure
if @shutdown_requested && !@channel.readable?
@s_channel.put dead: true
XPool.log "#{::Process.pid} is about to exit."
exit 0
end
end
end
quick refactor
class XPool::Process
#
# @return [XPool::Process]
# Returns an instance of XPool::Process
#
def initialize
@id = spawn
end
#
# A graceful shutdown of the process.
#
# The signal 'SIGUSR1' is caught in the subprocess and exit is
# performed through Kernel#exit after the process has finished
# executing its work.
#
# @return [void]
#
def shutdown
_shutdown :graceful
end
#
# A non-graceful shutdown through SIGKILL.
#
# @return [void]
#
def shutdown!
_shutdown :force
end
#
# @return [Fixnum]
# The number of times the process has been asked to schedule work.
#
def frequency
@frequency
end
#
# @param [#run] unit
# The unit of work
#
# @param [Object] *args
# A variable number of arguments to be passed to #run
#
# @raise [RuntimeError]
# When the process is dead.
#
# @return [XPool::Process]
# Returns self
#
def schedule(unit,*args)
if dead?
raise RuntimeError,
"cannot schedule work on a dead process (with ID: #{@id})"
end
@frequency += 1
@channel.put unit: unit, args: args
self
end
#
# @return [Boolean]
# Returns true when the process is executing work.
#
def busy?
if dead?
false
else
synchronize!
@states[:busy]
end
end
def failed?
synchronize!
@states[:failed]
end
#
# @return [Boolean]
# Returns true when the process is alive.
#
def alive?
!dead?
end
#
# @return [Boolean]
# Returns true when the process is no longer running.
#
def dead?
synchronize!
@states[:dead]
end
#
# If a process has failed (see #failed?) this method returns the backtrace of
# the exception that caused the process to fail.
#
# @return [Array<String>]
# Returns the backtrace.
#
def backtrace
synchronize!
@states[:backtrace]
end
#
# @return [Fixnum]
# Returns the process ID of the new process.
#
def restart
shutdown
@id = spawn
end
private
def _shutdown(action)
sig = action == :force ? 'SIGKILL' : 'SIGUSR1'
Process.kill sig, @id
Process.wait @id
rescue Errno::ECHILD, Errno::ESRCH
ensure
@states = action == :force ? {dead: true} : synchronize!
@shutdown = true
@channel.close
@s_channel.close
end
def synchronize!
return if @shutdown
while @s_channel.readable?
@states = @s_channel.get
end
@states
end
def reset
@channel = IChannel.new Marshal
@s_channel = IChannel.new Marshal
@shutdown = false
@states = {}
@frequency = 0
end
def spawn
reset
fork do
trap :SIGUSR1 do
XPool.log "#{::Process.pid} got request to shutdown."
@shutdown_requested = true
end
loop &method(:read_loop)
end
end
def read_loop
if @channel.readable?
@s_channel.put busy: true
msg = @channel.get
msg[:unit].run *msg[:args]
@s_channel.put busy: false
end
rescue Exception => e
XPool.log "#{::Process.pid} has failed."
@s_channel.put failed: true, dead: true, backtrace: e.backtrace
raise e
ensure
if @shutdown_requested && !@channel.readable?
@s_channel.put dead: true
XPool.log "#{::Process.pid} is about to exit."
exit 0
end
end
end
|
module YARD
module CLI
# @since 0.6.0
class Gems < Command
def initialize
@rebuild = false
@gems = []
end
def description; "Builds YARD index for gems" end
# Runs the commandline utility, parsing arguments and generating
# YARD indexes for gems.
#
# @param [Array<String>] args the list of arguments
# @return [void]
def run(*args)
require 'rubygems'
optparse(*args)
build_gems
end
private
# Builds .yardoc files for all non-existing gems
def build_gems
require 'rubygems'
@gems.each do |spec|
ver = "= #{spec.version}"
dir = Registry.yardoc_file_for_gem(spec.name, ver)
if dir && File.directory?(dir) && !@rebuild
log.debug "#{spec.name} index already exists at '#{dir}'"
else
yfile = Registry.yardoc_file_for_gem(spec.name, ver, true)
next unless yfile
next unless File.directory?(spec.full_gem_path)
Registry.clear
Dir.chdir(spec.full_gem_path)
log.info "Building yardoc index for gem: #{spec.full_name}"
Yardoc.run('--no-stats', '-n', '-b', yfile)
end
end
end
def add_gems(gems)
0.step(gems.size - 1, 2) do |index|
gem, ver_require = gems[index], gems[index + 1] || ">= 0"
specs = Gem.source_index.find_name(gem, ver_require)
if specs.empty?
log.warn "#{gem} #{ver_require} could not be found in RubyGems index"
else
@gems += specs
end
end
end
# Parses options
def optparse(*args)
opts = OptionParser.new
opts.banner = 'Usage: yard gems [options] [gem_name [version]]'
opts.separator ""
opts.separator "#{description}. If no gem_name is given,"
opts.separator "all gems are built."
opts.separator ""
opts.on('--rebuild', 'Rebuilds index') do
@rebuild = true
end
common_options(opts)
parse_options(opts, args)
add_gems(args)
if !args.empty? && @gems.empty?
log.error "No specified gems could be found for command"
elsif @gems.empty?
@gems += Gem.source_index.find_name('') if @gems.empty?
end
end
end
end
end
yard gems: use transactional chdir
Avoids changing pwd state when running yard gems
module YARD
module CLI
# @since 0.6.0
class Gems < Command
def initialize
@rebuild = false
@gems = []
end
def description; "Builds YARD index for gems" end
# Runs the commandline utility, parsing arguments and generating
# YARD indexes for gems.
#
# @param [Array<String>] args the list of arguments
# @return [void]
def run(*args)
require 'rubygems'
optparse(*args)
build_gems
end
private
# Builds .yardoc files for all non-existing gems
def build_gems
require 'rubygems'
@gems.each do |spec|
ver = "= #{spec.version}"
dir = Registry.yardoc_file_for_gem(spec.name, ver)
if dir && File.directory?(dir) && !@rebuild
log.debug "#{spec.name} index already exists at '#{dir}'"
else
yfile = Registry.yardoc_file_for_gem(spec.name, ver, true)
next unless yfile
next unless File.directory?(spec.full_gem_path)
Registry.clear
Dir.chdir(spec.full_gem_path) do
log.info "Building yardoc index for gem: #{spec.full_name}"
Yardoc.run('--no-stats', '-n', '-b', yfile)
end
end
end
end
def add_gems(gems)
0.step(gems.size - 1, 2) do |index|
gem, ver_require = gems[index], gems[index + 1] || ">= 0"
specs = Gem.source_index.find_name(gem, ver_require)
if specs.empty?
log.warn "#{gem} #{ver_require} could not be found in RubyGems index"
else
@gems += specs
end
end
end
# Parses options
def optparse(*args)
opts = OptionParser.new
opts.banner = 'Usage: yard gems [options] [gem_name [version]]'
opts.separator ""
opts.separator "#{description}. If no gem_name is given,"
opts.separator "all gems are built."
opts.separator ""
opts.on('--rebuild', 'Rebuilds index') do
@rebuild = true
end
common_options(opts)
parse_options(opts, args)
add_gems(args)
if !args.empty? && @gems.empty?
log.error "No specified gems could be found for command"
elsif @gems.empty?
@gems += Gem.source_index.find_name('') if @gems.empty?
end
end
end
end
end |
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{neo4jr-simple}
s.version = "0.1.5"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Matthew Deiters"]
s.date = %q{2009-12-23}
s.default_executable = %q{neosh}
s.description = %q{A simple, ready to go JRuby wrapper for the Neo4j graph database engine. Nothing more then Neo4j and Ruby goodness}
s.email = %q{matthew_deiters@mckinsey.com}
s.executables = ["neosh"]
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
"bin/neosh",
"lib/jars/graph-algo-0.2-20090815.182816-1.jar",
"lib/jars/jta-1.1.jar",
"lib/jars/neo-1.0-b10.jar",
"lib/jars/shell-1.0-b10.jar",
"lib/neo4jr-simple.rb",
"lib/neo4jr/cli.rb",
"lib/neo4jr/configuration.rb",
"lib/neo4jr/db.rb",
"lib/neo4jr/direction.rb",
"lib/neo4jr/embedded_neo_extension.rb",
"lib/neo4jr/int_array_iterator_extension.rb",
"lib/neo4jr/node_extension.rb",
"lib/neo4jr/order.rb",
"lib/neo4jr/property_container_extension.rb",
"lib/neo4jr/relationship_extension.rb",
"lib/neo4jr/relationship_type.rb",
"lib/neo4jr/returnable_evaluator.rb",
"lib/neo4jr/stop_evaluator.rb",
"lib/neo4jr/traverser_extension.rb",
"lib/neo4jr/version.rb"
]
s.homepage = %q{http://github.com/mdeiters/neo4jr-simple}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{A simple, ready to go JRuby wrapper for the Neo4j graph database engine.}
s.test_files = [
"VERSION",
"spec/db_spec.rb",
"spec/direction_spec.rb",
"spec/embedded_neo_extension_spec.rb",
"spec/functional_example_spec.rb",
"spec/int_array_iterator_extension_spec.rb",
"spec/node_extension_spec.rb",
"spec/property_container_extension_spec.rb",
"spec/relationship_type_spec.rb",
"spec/returnable_evaluator_spec.rb",
"spec/spec.opts",
"spec/spec_helper.rb",
"spec/stop_evaluator_spec.rb",
"spec/test-imdb-database"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 1.2.9"])
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
end
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
end
end
Regenerated gemspec for version 0.1.6
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{neo4jr-simple}
s.version = "0.1.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Matthew Deiters"]
s.date = %q{2009-12-24}
s.default_executable = %q{neosh}
s.description = %q{A simple, ready to go JRuby wrapper for the Neo4j graph database engine. Nothing more then Neo4j and Ruby goodness}
s.email = %q{matthew_deiters@mckinsey.com}
s.executables = ["neosh"]
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
"bin/neosh",
"lib/jars/graph-algo-0.2-20090815.182816-1.jar",
"lib/jars/jta-1.1.jar",
"lib/jars/neo-1.0-b10.jar",
"lib/jars/shell-1.0-b10.jar",
"lib/neo4jr-simple.rb",
"lib/neo4jr/cli.rb",
"lib/neo4jr/configuration.rb",
"lib/neo4jr/db.rb",
"lib/neo4jr/direction.rb",
"lib/neo4jr/embedded_neo_extension.rb",
"lib/neo4jr/int_array_iterator_extension.rb",
"lib/neo4jr/node_extension.rb",
"lib/neo4jr/order.rb",
"lib/neo4jr/property_container_extension.rb",
"lib/neo4jr/relationship_extension.rb",
"lib/neo4jr/relationship_type.rb",
"lib/neo4jr/returnable_evaluator.rb",
"lib/neo4jr/stop_evaluator.rb",
"lib/neo4jr/traverser_extension.rb",
"lib/neo4jr/version.rb"
]
s.homepage = %q{http://github.com/mdeiters/neo4jr-simple}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{A simple, ready to go JRuby wrapper for the Neo4j graph database engine.}
s.test_files = [
"VERSION",
"spec/db_spec.rb",
"spec/direction_spec.rb",
"spec/embedded_neo_extension_spec.rb",
"spec/functional_example_spec.rb",
"spec/int_array_iterator_extension_spec.rb",
"spec/node_extension_spec.rb",
"spec/property_container_extension_spec.rb",
"spec/relationship_type_spec.rb",
"spec/returnable_evaluator_spec.rb",
"spec/spec.opts",
"spec/spec_helper.rb",
"spec/stop_evaluator_spec.rb",
"spec/test-imdb-database"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 1.2.9"])
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
end
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
end
end
|
require 'ipaddr'
require 'json'
def crowbar?
!defined?(Chef::Recipe::Barclamp).nil?
end
def get_mon_nodes(extra_search = nil)
if crowbar?
mon_roles = search(:role, 'name:crowbar-* AND run_list:role\[ceph-mon\]')
unless mon_roles.empty?
search_string = mon_roles.map { |role_object| 'roles:' + role_object.name }.join(' OR ')
search_string = "(#{search_string}) AND ceph_config_environment:#{node['ceph']['config']['environment']}"
end
else
search_string = "ceph_is_mon:true AND chef_environment:#{node.chef_environment}"
end
unless extra_search.nil?
search_string = "(#{search_string}) AND (#{extra_search})"
end
search(:node, search_string)
end
# If public_network is specified
# we need to search for the monitor IP
# in the node environment.
# 1. We look if the network is IPv6 or IPv4
# 2. We look for a route matching the network
# 3. We grab the IP and return it with the port
def find_node_ip_in_network(network, nodeish = nil)
nodeish = node unless nodeish
net = IPAddr.new(network)
nodeish['network']['interfaces'].each do |iface, addrs|
addresses = addrs['addresses'] || []
addresses.each do |ip, params|
return ip_address_to_ceph_address(ip, params) if net.include?(ip)
end
end
nil
end
def ip_address_to_ceph_address(ip, params)
if params['family'].eql?('inet6') && net.include?(ip)
return "[#{ip}]:6789"
elsif params['family'].eql?('inet') && net.include?(ip)
return "#{ip}:6789"
end
end
def mon_addresses
mon_ips = []
if File.exist?("/var/run/ceph/ceph-mon.#{node['hostname']}.asok")
mon_ips = quorum_members_ips
else
mons = []
# make sure if this node runs ceph-mon, it's always included even if
# search is laggy; put it first in the hopes that clients will talk
# primarily to local node
mons << node if node['ceph']['is_mon']
mons += get_mon_nodes
if crowbar?
mon_ips = mons.map { |node| Chef::Recipe::Barclamp::Inventory.get_network_by_type(node, 'admin').address }
else
if node['ceph']['config']['global'] && node['ceph']['config']['global']['public network']
mon_ips = mons.map { |nodeish| find_node_ip_in_network(node['ceph']['config']['global']['public network'], nodeish) }
else
mon_ips = mons.map { |node| node['ipaddress'] + ':6789' }
end
end
end
mon_ips.reject { |m| m.nil? }.uniq
end
def mon_secret
# find the monitor secret
mon_secret = ''
mons = get_mon_nodes
if !mons.empty?
mon_secret = mons[0]['ceph']['monitor-secret']
elsif mons.empty? && node['ceph']['monitor-secret']
mon_secret = node['ceph']['monitor-secret']
else
Chef::Log.warn('No monitor secret found')
end
mon_secret
end
def quorum_members_ips
mon_ips = []
cmd = Mixlib::ShellOut.new("ceph --admin-daemon /var/run/ceph/ceph-mon.#{node['hostname']}.asok mon_status")
cmd.run_command
cmd.error!
mons = JSON.parse(cmd.stdout)['monmap']['mons']
mons.each do |k|
mon_ips.push(k['addr'][0..-3])
end
mon_ips
end
QUORUM_STATES = %w(leader, peon)
def quorum?
# "ceph auth get-or-create-key" would hang if the monitor wasn't
# in quorum yet, which is highly likely on the first run. This
# helper lets us delay the key generation into the next
# chef-client run, instead of hanging.
#
# Also, as the UNIX domain socket connection has no timeout logic
# in the ceph tool, this exits immediately if the ceph-mon is not
# running for any reason; trying to connect via TCP/IP would wait
# for a relatively long timeout.
cmd = Mixlib::ShellOut.new("ceph --admin-daemon /var/run/ceph/ceph-mon.#{node['hostname']}.asok mon_status")
cmd.run_command
cmd.error!
state = JSON.parse(cmd.stdout)['state']
QUORUM_STATES.include?(state)
end
Fixes the mon_addresses refactor
require 'ipaddr'
require 'json'
def crowbar?
!defined?(Chef::Recipe::Barclamp).nil?
end
def get_mon_nodes(extra_search = nil)
if crowbar?
mon_roles = search(:role, 'name:crowbar-* AND run_list:role\[ceph-mon\]')
unless mon_roles.empty?
search_string = mon_roles.map { |role_object| 'roles:' + role_object.name }.join(' OR ')
search_string = "(#{search_string}) AND ceph_config_environment:#{node['ceph']['config']['environment']}"
end
else
search_string = "ceph_is_mon:true AND chef_environment:#{node.chef_environment}"
end
unless extra_search.nil?
search_string = "(#{search_string}) AND (#{extra_search})"
end
search(:node, search_string)
end
# If public_network is specified
# we need to search for the monitor IP
# in the node environment.
# 1. We look if the network is IPv6 or IPv4
# 2. We look for a route matching the network
# 3. We grab the IP and return it with the port
def find_node_ip_in_network(network, nodeish = nil)
nodeish = node unless nodeish
net = IPAddr.new(network)
nodeish['network']['interfaces'].each do |iface, addrs|
addresses = addrs['addresses'] || []
addresses.each do |ip, params|
return ip_address_to_ceph_address(ip, params) if ip_address_in_network(ip, params, net)
end
end
nil
end
def ip_address_in_network(ip, params, net)
['inet', 'inet6'].include?(params['family']) && net.include?(ip)
end
def ip_address_to_ceph_address(ip, params)
if params['family'].eql?('inet6')
return "[#{ip}]:6789"
elsif params['family'].eql?('inet')
return "#{ip}:6789"
end
end
def mon_addresses
mon_ips = []
if File.exist?("/var/run/ceph/ceph-mon.#{node['hostname']}.asok")
mon_ips = quorum_members_ips
else
mons = []
# make sure if this node runs ceph-mon, it's always included even if
# search is laggy; put it first in the hopes that clients will talk
# primarily to local node
mons << node if node['ceph']['is_mon']
mons += get_mon_nodes
if crowbar?
mon_ips = mons.map { |node| Chef::Recipe::Barclamp::Inventory.get_network_by_type(node, 'admin').address }
else
if node['ceph']['config']['global'] && node['ceph']['config']['global']['public network']
mon_ips = mons.map { |nodeish| find_node_ip_in_network(node['ceph']['config']['global']['public network'], nodeish) }
else
mon_ips = mons.map { |node| node['ipaddress'] + ':6789' }
end
end
end
mon_ips.reject { |m| m.nil? }.uniq
end
def mon_secret
# find the monitor secret
mon_secret = ''
mons = get_mon_nodes
if !mons.empty?
mon_secret = mons[0]['ceph']['monitor-secret']
elsif mons.empty? && node['ceph']['monitor-secret']
mon_secret = node['ceph']['monitor-secret']
else
Chef::Log.warn('No monitor secret found')
end
mon_secret
end
def quorum_members_ips
mon_ips = []
cmd = Mixlib::ShellOut.new("ceph --admin-daemon /var/run/ceph/ceph-mon.#{node['hostname']}.asok mon_status")
cmd.run_command
cmd.error!
mons = JSON.parse(cmd.stdout)['monmap']['mons']
mons.each do |k|
mon_ips.push(k['addr'][0..-3])
end
mon_ips
end
QUORUM_STATES = %w(leader, peon)
def quorum?
# "ceph auth get-or-create-key" would hang if the monitor wasn't
# in quorum yet, which is highly likely on the first run. This
# helper lets us delay the key generation into the next
# chef-client run, instead of hanging.
#
# Also, as the UNIX domain socket connection has no timeout logic
# in the ceph tool, this exits immediately if the ceph-mon is not
# running for any reason; trying to connect via TCP/IP would wait
# for a relatively long timeout.
cmd = Mixlib::ShellOut.new("ceph --admin-daemon /var/run/ceph/ceph-mon.#{node['hostname']}.asok mon_status")
cmd.run_command
cmd.error!
state = JSON.parse(cmd.stdout)['state']
QUORUM_STATES.include?(state)
end
|
module HttpdCookbook
module Helpers
def parsed_version
return new_resource.version if new_resource.version
default_apache_version
end
def parsed_httpd_version
return new_resource.httpd_version if new_resource.httpd_version
default_apache_version
end
def default_apache_version
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'] == '10.04'
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'] == '12.04'
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'] == '13.04'
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'] == '13.10'
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'].to_i == 6
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'].to_i == 7
return '2.2' if node['platform_family'] == 'freebsd'
return '2.2' if node['platform_family'] == 'omnios'
return '2.2' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 5
return '2.2' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 6
return '2.2' if node['platform_family'] == 'suse'
return '2.4' if node['platform_family'] == 'debian' && node['platform_version'] == '14.04'
return '2.4' if node['platform_family'] == 'debian' && node['platform_version'] == '14.10'
return '2.4' if node['platform_family'] == 'debian' && node['platform_version'] == 'jessie/sid'
return '2.4' if node['platform_family'] == 'fedora'
return '2.4' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 2013
return '2.4' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 2014
return '2.4' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 7
return '2.4' if node['platform_family'] == 'smartos'
end
def parsed_symbolname
return new_resource.symbolname if new_resource.symbolname
# Put all exceptions here
return 'php5_module' if module_name == 'php'
return 'php5_module' if module_name == 'php-zts'
"#{module_name}_module"
end
def parsed_filename
return new_resource.filename if new_resource.filename
# Put all exceptions here
case node['platform_family']
when 'debian'
return 'libphp5.so' if %w(php5 php).include? module_name
when 'rhel'
return 'libmodnss.so' if module_name == 'nss'
return 'mod_rev.so' if module_name == 'revocator'
return 'libphp5.so' if module_name == 'php'
return 'libphp5-zts.so' if module_name == 'php-zts'
end
"mod_#{module_name}.so"
end
def parsed_module_package_name
return new_resource.package_name if new_resource.package_name
package_name_for_module(
module_name,
parsed_httpd_version,
node['platform'],
node['platform_family'],
node['platform_version']
)
end
def parsed_service_package_name
return new_resource.package_name if new_resource.package_name
package_name_for_service(
node['platform'],
node['platform_family'],
node['platform_version'],
parsed_version
)
end
def parsed_maxclients
return new_resource.maxclients if new_resource.maxclients
default_value_for(parsed_version, parsed_mpm, :maxclients)
end
def parsed_maxconnectionsperchild
return new_resource.maxconnectionsperchild if new_resource.maxconnectionsperchild
default_value_for(parsed_version, parsed_mpm, :maxconnectionsperchild)
end
def parsed_maxrequestsperchild
return new_resource.maxrequestsperchild if new_resource.maxrequestsperchild
default_value_for(parsed_version, parsed_mpm, :maxrequestsperchild)
end
def parsed_maxrequestworkers
return new_resource.maxrequestworkers if new_resource.maxrequestworkers
default_value_for(parsed_version, parsed_mpm, :maxrequestworkers)
end
def parsed_maxspareservers
return new_resource.maxspareservers if new_resource.maxspareservers
default_value_for(parsed_version, parsed_mpm, :maxspareservers)
end
def parsed_maxsparethreads
return new_resource.maxsparethreads if new_resource.maxsparethreads
default_value_for(parsed_version, parsed_mpm, :maxsparethreads)
end
def parsed_minspareservers
return new_resource.minspareservers if new_resource.minspareservers
default_value_for(parsed_version, parsed_mpm, :minspareservers)
end
def parsed_minsparethreads
return new_resource.minsparethreads if new_resource.minsparethreads
default_value_for(parsed_version, parsed_mpm, :minsparethreads)
end
def parsed_modules
return new_resource.modules if new_resource.modules
return %w(
alias autoindex dir
env mime negotiation
setenvif status auth_basic
deflate authz_default
authz_user authz_groupfile
authn_file authz_host
reqtimeout
) if parsed_version == '2.2'
return %w(
authz_core authz_host authn_core
auth_basic access_compat authn_file
authz_user alias dir autoindex
env mime negotiation setenvif
filter deflate status
) if parsed_version == '2.4'
end
def parsed_mpm
return new_resource.mpm if new_resource.mpm
parsed_version == '2.4' ? 'event' : 'worker'
end
def parsed_run_group
return new_resource.run_group if new_resource.run_group
node['platform_family'] == 'debian' ? 'www-data' : 'apache'
end
def parsed_run_user
return new_resource.run_user if new_resource.run_user
node['platform_family'] == 'debian' ? 'www-data' : 'apache'
end
def parsed_servername
return new_resource.servername if new_resource.servername
node['hostname']
end
def parsed_startservers
return new_resource.startservers if new_resource.startservers
default_value_for(parsed_version, parsed_mpm, :startservers)
end
def parsed_threadlimit
return new_resource.threadlimit if new_resource.threadlimit
default_value_for(parsed_version, parsed_mpm, :threadlimit)
end
def parsed_threadsperchild
return new_resource.threadsperchild if new_resource.threadsperchild
default_value_for(parsed_version, parsed_mpm, :threadsperchild)
end
end
end
limit check to PHP5
module HttpdCookbook
module Helpers
def parsed_version
return new_resource.version if new_resource.version
default_apache_version
end
def parsed_httpd_version
return new_resource.httpd_version if new_resource.httpd_version
default_apache_version
end
def default_apache_version
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'] == '10.04'
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'] == '12.04'
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'] == '13.04'
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'] == '13.10'
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'].to_i == 6
return '2.2' if node['platform_family'] == 'debian' && node['platform_version'].to_i == 7
return '2.2' if node['platform_family'] == 'freebsd'
return '2.2' if node['platform_family'] == 'omnios'
return '2.2' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 5
return '2.2' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 6
return '2.2' if node['platform_family'] == 'suse'
return '2.4' if node['platform_family'] == 'debian' && node['platform_version'] == '14.04'
return '2.4' if node['platform_family'] == 'debian' && node['platform_version'] == '14.10'
return '2.4' if node['platform_family'] == 'debian' && node['platform_version'] == 'jessie/sid'
return '2.4' if node['platform_family'] == 'fedora'
return '2.4' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 2013
return '2.4' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 2014
return '2.4' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 7
return '2.4' if node['platform_family'] == 'smartos'
end
def parsed_symbolname
return new_resource.symbolname if new_resource.symbolname
# Put all exceptions here
return 'php5_module' if module_name == 'php'
return 'php5_module' if module_name == 'php-zts'
"#{module_name}_module"
end
def parsed_filename
return new_resource.filename if new_resource.filename
# Put all exceptions here
case node['platform_family']
when 'debian'
return 'libphp5.so' if module_name == 'php5'
when 'rhel'
return 'libmodnss.so' if module_name == 'nss'
return 'mod_rev.so' if module_name == 'revocator'
return 'libphp5.so' if module_name == 'php'
return 'libphp5-zts.so' if module_name == 'php-zts'
end
"mod_#{module_name}.so"
end
def parsed_module_package_name
return new_resource.package_name if new_resource.package_name
package_name_for_module(
module_name,
parsed_httpd_version,
node['platform'],
node['platform_family'],
node['platform_version']
)
end
def parsed_service_package_name
return new_resource.package_name if new_resource.package_name
package_name_for_service(
node['platform'],
node['platform_family'],
node['platform_version'],
parsed_version
)
end
def parsed_maxclients
return new_resource.maxclients if new_resource.maxclients
default_value_for(parsed_version, parsed_mpm, :maxclients)
end
def parsed_maxconnectionsperchild
return new_resource.maxconnectionsperchild if new_resource.maxconnectionsperchild
default_value_for(parsed_version, parsed_mpm, :maxconnectionsperchild)
end
def parsed_maxrequestsperchild
return new_resource.maxrequestsperchild if new_resource.maxrequestsperchild
default_value_for(parsed_version, parsed_mpm, :maxrequestsperchild)
end
def parsed_maxrequestworkers
return new_resource.maxrequestworkers if new_resource.maxrequestworkers
default_value_for(parsed_version, parsed_mpm, :maxrequestworkers)
end
def parsed_maxspareservers
return new_resource.maxspareservers if new_resource.maxspareservers
default_value_for(parsed_version, parsed_mpm, :maxspareservers)
end
def parsed_maxsparethreads
return new_resource.maxsparethreads if new_resource.maxsparethreads
default_value_for(parsed_version, parsed_mpm, :maxsparethreads)
end
def parsed_minspareservers
return new_resource.minspareservers if new_resource.minspareservers
default_value_for(parsed_version, parsed_mpm, :minspareservers)
end
def parsed_minsparethreads
return new_resource.minsparethreads if new_resource.minsparethreads
default_value_for(parsed_version, parsed_mpm, :minsparethreads)
end
def parsed_modules
return new_resource.modules if new_resource.modules
return %w(
alias autoindex dir
env mime negotiation
setenvif status auth_basic
deflate authz_default
authz_user authz_groupfile
authn_file authz_host
reqtimeout
) if parsed_version == '2.2'
return %w(
authz_core authz_host authn_core
auth_basic access_compat authn_file
authz_user alias dir autoindex
env mime negotiation setenvif
filter deflate status
) if parsed_version == '2.4'
end
def parsed_mpm
return new_resource.mpm if new_resource.mpm
parsed_version == '2.4' ? 'event' : 'worker'
end
def parsed_run_group
return new_resource.run_group if new_resource.run_group
node['platform_family'] == 'debian' ? 'www-data' : 'apache'
end
def parsed_run_user
return new_resource.run_user if new_resource.run_user
node['platform_family'] == 'debian' ? 'www-data' : 'apache'
end
def parsed_servername
return new_resource.servername if new_resource.servername
node['hostname']
end
def parsed_startservers
return new_resource.startservers if new_resource.startservers
default_value_for(parsed_version, parsed_mpm, :startservers)
end
def parsed_threadlimit
return new_resource.threadlimit if new_resource.threadlimit
default_value_for(parsed_version, parsed_mpm, :threadlimit)
end
def parsed_threadsperchild
return new_resource.threadsperchild if new_resource.threadsperchild
default_value_for(parsed_version, parsed_mpm, :threadsperchild)
end
end
end
|
#
# Cookbook Name:: rvm
# Library:: helpers
#
# Author:: Fletcher Nichol <fnichol@nichol.ca>
#
# Copyright 2011, Fletcher Nichol
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
begin
require 'rvm'
rescue LoadError
Chef::Log.warn("Missing gem 'rvm'")
end
##
# Determines if given ruby string is moderatley sane and potentially legal
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby string sane?
def ruby_string_sane?(rubie)
return true if "goruby" == rubie # gorubie has no versions yet
return true if rubie =~ /^[^-]+-[^-]+/ # must be xxx-vvv at least
end
##
# Lists all installed RVM rubies on the system.
#
# **Note** that these values are cached for lookup speed. To flush these
# values and force an update, call #update_installed_rubies.
#
# @return [Array] the cached list of currently installed rvm rubies
def installed_rubies
@installed_rubies ||= update_installed_rubies
end
##
# Updates the list of all installed RVM rubies on the system
#
# @return [Array] the list of currently installed rvm rubies
def update_installed_rubies
@installed_rubies = RVM.list_strings
@installed_rubies
end
##
# Determines whether or not the given ruby is already installed
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby installed?
def ruby_installed?(rubie)
return false unless ruby_string_sane?(rubie)
! installed_rubies.select { |r| r.start_with?(rubie) }.empty?
end
##
# Inverse of #ruby_installed?
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby not installed?
def ruby_not_installed?(rubie)
!ruby_installed?(rubie)
end
##
# Determines whether or not the given ruby is a known ruby string
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby in the known ruby string list?
def ruby_known?(rubie)
return false unless ruby_string_sane?(rubie)
matches = known_rubies.select { |r| r.start_with?(rubie) }
if matches.empty?
# last-ditch attempt at matching. we'll drop the last -.*$ bit off the
# string assuming that the rubie contains a non-default patchlevel that
# will actually exist
fuzzier_rubie = rubie.sub(/-[^-]+$/, '')
return ! known_rubies.select { |r| r.start_with?(fuzzier_rubie) }.empty?
else
return true
end
end
##
# List all known RVM ruby strings.
#
# **Note** that these values are cached for lookup speed. To flush these
# values and force an update, call #update_known_rubies.
#
# @return [Array] the cached list of known ruby strings
def known_rubies
@known_rubies ||= update_known_rubies
end
##
# Updates the list of all known RVM strings.
#
# @return [Array] the list of known ruby strings
def update_known_rubies
@known_rubies = RVM.list_known_strings
@known_rubies
end
##
# Inverse of #ruby_known?
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby an unknown ruby string?
def ruby_unknown?(rubie)
!ruby_known?(rubie)
end
##
# Fetches the current default ruby string, potentially with gemset
#
# @return [String] the fully qualified RVM ruby string, nil if none is set
def current_ruby_default
RVM.list_default
end
##
# Determines whether or not the given ruby is the default one
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby the default one?
def ruby_default?(rubie)
return false unless ruby_string_sane?(rubie)
current_default = current_ruby_default
return false if current_default.nil?
current_default.start_with?(rubie)
end
##
# Determines whether or not and ruby/gemset environment exists
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] does this environment exist?
def env_exists?(ruby_string)
rubie = select_ruby(ruby_string)
gemset = select_gemset(ruby_string)
if gemset
gemset_exists?(:ruby => rubie, :gemset => gemset)
else
ruby_installed?(rubie)
end
end
##
# Lists all gemsets for a given RVM ruby.
#
# **Note** that these values are cached for lookup speed. To flush these
# values and force an update, call #update_installed_gemsets.
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Array] a cached list of gemset names
def installed_gemsets(rubie)
@installed_gemsets = Hash.new if @installed_gemsets.nil?
@installed_gemsets[rubie] ||= update_installed_gemsets(rubie)
end
##
# Updates the list of all gemsets for a given RVM ruby on the system
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Array] the current list of gemsets
def update_installed_gemsets(rubie)
env = RVM::Environment.new
env.use rubie
@installed_gemsets ||= {}
@installed_gemsets[rubie] = env.gemset_list
@installed_gemsets[rubie]
end
##
# Determines whether or not a gemset exists for a given ruby
#
# @param [Hash] the options to query a gemset with
# @option opts [String] :ruby the ruby the query within
# @option opts [String] :gemset the gemset to look for
def gemset_exists?(opts={})
return false if opts[:ruby].nil? || opts[:gemset].nil?
return false unless ruby_string_sane?(opts[:ruby])
return false unless ruby_installed?(opts[:ruby])
installed_gemsets(opts[:ruby]).include?(opts[:gemset])
end
##
# Determines whether or not there is a gemset defined in a given ruby string
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] does the ruby string appear to have a gemset included?
def string_include_gemset?(ruby_string)
ruby_string.include?('@')
end
##
# Filters out any gemset declarations in an RVM ruby string
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [String] the ruby string, minus gemset
def select_ruby(ruby_string)
ruby_string.split('@').first
end
##
# Filters out any ruby declaration in an RVM ruby string
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [String] the gemset string, minus ruby or nil if no gemset given
def select_gemset(ruby_string)
if string_include_gemset?(ruby_string)
ruby_string.split('@').last
else
nil
end
end
##
# Sanitizes a ruby string so that it's more normalized.
#
# @param [String, #to_s] an RVM ruby string
# @return [String] a fully qualified RVM ruby string
def normalize_ruby_string(ruby_string)
# get the actual ruby string that corresponds to "default"
if ruby_string.start_with?("default")
ruby_string.sub(/default/, current_ruby_default)
else
ruby_string
end
end
##
# Finds the correct shell profile to source to init an RVM-aware
# shell environment
#
# @return [String] full path the shell profile
def find_profile_to_source
if ::File.directory?("/etc/profile.d")
"/etc/profile.d/rvm.sh"
else
"/etc/profile"
end
end
##
# Returns a shell command that is RVM-aware
#
# @param [String, #to_s] the shell command to be wrapped
# @return [String] the command wrapped in RVM-initialized bash command
def rvm_wrap_cmd(cmd)
return <<-WRAP.sub(/^ {4}/, '')
bash -c "source #{find_profile_to_source} && #{cmd.gsub(/"/, '\"')}"
WRAP
end
##
# Installs any package dependencies needed by a given ruby
#
# @param [String, #to_s] the fully qualified RVM ruby string
def install_ruby_dependencies(rubie)
pkgs = []
if rubie =~ /^1\.[89]\../ || rubie =~ /^ree/ || rubie =~ /^ruby-/
case node[:platform]
when "debian","ubuntu"
pkgs = %w{ build-essential bison openssl libreadline6 libreadline6-dev
zlib1g zlib1g-dev libssl-dev libyaml-dev libsqlite3-0
libsqlite3-dev sqlite3 libxml2-dev libxslt1-dev ssl-cert }
pkgs += %w{ git-core subversion autoconf } if rubie =~ /^ruby-head$/
when "suse"
pkgs = %w{ gcc-c++ patch zlib zlib-devel libffi-devel
sqlite3-devel libxml2-devel libxslt-devel }
if node.platform_version.to_f >= 11.0
pkgs += %w{ libreadline5 readline-devel libopenssl-devel }
else
pkgs += %w{ readline readline-devel openssl-devel }
end
pkgs += %w{ git subversion autoconf } if rubie =~ /^ruby-head$/
when "centos","redhat","fedora"
pkgs = %w{ gcc-c++ patch readline readline-devel zlib zlib-devel
libyaml-devel libffi-devel openssl-devel }
pkgs += %w{ git subversion autoconf } if rubie =~ /^ruby-head$/
end
elsif rubie =~ /^jruby/
# TODO: need to figure out how to pull in java recipe only when needed. For
# now, users of jruby will have to add the "java" recipe to their run_list.
#include_recipe "java"
pkgs << "g++"
end
pkgs.each do |pkg|
p = package pkg do
action :nothing
end
p.run_action(:install)
end
end
Wowzers, we can make that simpler.
#
# Cookbook Name:: rvm
# Library:: helpers
#
# Author:: Fletcher Nichol <fnichol@nichol.ca>
#
# Copyright 2011, Fletcher Nichol
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
begin
require 'rvm'
rescue LoadError
Chef::Log.warn("Missing gem 'rvm'")
end
##
# Determines if given ruby string is moderatley sane and potentially legal
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby string sane?
def ruby_string_sane?(rubie)
return true if "goruby" == rubie # gorubie has no versions yet
return true if rubie =~ /^[^-]+-[^-]+/ # must be xxx-vvv at least
end
##
# Lists all installed RVM rubies on the system.
#
# **Note** that these values are cached for lookup speed. To flush these
# values and force an update, call #update_installed_rubies.
#
# @return [Array] the cached list of currently installed rvm rubies
def installed_rubies
@installed_rubies ||= update_installed_rubies
end
##
# Updates the list of all installed RVM rubies on the system
#
# @return [Array] the list of currently installed rvm rubies
def update_installed_rubies
@installed_rubies = RVM.list_strings
@installed_rubies
end
##
# Determines whether or not the given ruby is already installed
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby installed?
def ruby_installed?(rubie)
return false unless ruby_string_sane?(rubie)
! installed_rubies.select { |r| r.start_with?(rubie) }.empty?
end
##
# Inverse of #ruby_installed?
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby not installed?
def ruby_not_installed?(rubie)
!ruby_installed?(rubie)
end
##
# Determines whether or not the given ruby is a known ruby string
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby in the known ruby string list?
def ruby_known?(rubie)
return false unless ruby_string_sane?(rubie)
matches = known_rubies.select { |r| r.start_with?(rubie) }
if matches.empty?
# last-ditch attempt at matching. we'll drop the last -.*$ bit off the
# string assuming that the rubie contains a non-default patchlevel that
# will actually exist
fuzzier_rubie = rubie.sub(/-[^-]+$/, '')
return ! known_rubies.select { |r| r.start_with?(fuzzier_rubie) }.empty?
else
return true
end
end
##
# List all known RVM ruby strings.
#
# **Note** that these values are cached for lookup speed. To flush these
# values and force an update, call #update_known_rubies.
#
# @return [Array] the cached list of known ruby strings
def known_rubies
@known_rubies ||= update_known_rubies
end
##
# Updates the list of all known RVM strings.
#
# @return [Array] the list of known ruby strings
def update_known_rubies
@known_rubies = RVM.list_known_strings
@known_rubies
end
##
# Inverse of #ruby_known?
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby an unknown ruby string?
def ruby_unknown?(rubie)
!ruby_known?(rubie)
end
##
# Fetches the current default ruby string, potentially with gemset
#
# @return [String] the fully qualified RVM ruby string, nil if none is set
def current_ruby_default
RVM.list_default
end
##
# Determines whether or not the given ruby is the default one
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] is this ruby the default one?
def ruby_default?(rubie)
return false unless ruby_string_sane?(rubie)
current_default = current_ruby_default
return false if current_default.nil?
current_default.start_with?(rubie)
end
##
# Determines whether or not and ruby/gemset environment exists
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] does this environment exist?
def env_exists?(ruby_string)
rubie = select_ruby(ruby_string)
gemset = select_gemset(ruby_string)
if gemset
gemset_exists?(:ruby => rubie, :gemset => gemset)
else
ruby_installed?(rubie)
end
end
##
# Lists all gemsets for a given RVM ruby.
#
# **Note** that these values are cached for lookup speed. To flush these
# values and force an update, call #update_installed_gemsets.
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Array] a cached list of gemset names
def installed_gemsets(rubie)
@installed_gemsets = Hash.new if @installed_gemsets.nil?
@installed_gemsets[rubie] ||= update_installed_gemsets(rubie)
end
##
# Updates the list of all gemsets for a given RVM ruby on the system
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Array] the current list of gemsets
def update_installed_gemsets(rubie)
env = RVM::Environment.new
env.use rubie
@installed_gemsets ||= {}
@installed_gemsets[rubie] = env.gemset_list
@installed_gemsets[rubie]
end
##
# Determines whether or not a gemset exists for a given ruby
#
# @param [Hash] the options to query a gemset with
# @option opts [String] :ruby the ruby the query within
# @option opts [String] :gemset the gemset to look for
def gemset_exists?(opts={})
return false if opts[:ruby].nil? || opts[:gemset].nil?
return false unless ruby_string_sane?(opts[:ruby])
return false unless ruby_installed?(opts[:ruby])
installed_gemsets(opts[:ruby]).include?(opts[:gemset])
end
##
# Determines whether or not there is a gemset defined in a given ruby string
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [Boolean] does the ruby string appear to have a gemset included?
def string_include_gemset?(ruby_string)
ruby_string.include?('@')
end
##
# Filters out any gemset declarations in an RVM ruby string
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [String] the ruby string, minus gemset
def select_ruby(ruby_string)
ruby_string.split('@').first
end
##
# Filters out any ruby declaration in an RVM ruby string
#
# @param [String, #to_s] the fully qualified RVM ruby string
# @return [String] the gemset string, minus ruby or nil if no gemset given
def select_gemset(ruby_string)
if string_include_gemset?(ruby_string)
ruby_string.split('@').last
else
nil
end
end
##
# Sanitizes a ruby string so that it's more normalized.
#
# @param [String, #to_s] an RVM ruby string
# @return [String] a fully qualified RVM ruby string
def normalize_ruby_string(ruby_string)
# get the actual ruby string that corresponds to "default"
if ruby_string.start_with?("default")
ruby_string.sub(/default/, current_ruby_default)
else
ruby_string
end
end
##
# Finds the correct shell profile to source to init an RVM-aware
# shell environment
#
# @return [String] full path the shell profile
def find_profile_to_source
if ::File.directory?("/etc/profile.d")
"/etc/profile.d/rvm.sh"
else
"/etc/profile"
end
end
##
# Returns a shell command that is RVM-aware
#
# @param [String, #to_s] the shell command to be wrapped
# @return [String] the command wrapped in RVM-initialized bash command
def rvm_wrap_cmd(cmd)
%{bash -c "source #{find_profile_to_source} && #{cmd.gsub(/"/, '\"')}"}
end
##
# Installs any package dependencies needed by a given ruby
#
# @param [String, #to_s] the fully qualified RVM ruby string
def install_ruby_dependencies(rubie)
pkgs = []
if rubie =~ /^1\.[89]\../ || rubie =~ /^ree/ || rubie =~ /^ruby-/
case node[:platform]
when "debian","ubuntu"
pkgs = %w{ build-essential bison openssl libreadline6 libreadline6-dev
zlib1g zlib1g-dev libssl-dev libyaml-dev libsqlite3-0
libsqlite3-dev sqlite3 libxml2-dev libxslt1-dev ssl-cert }
pkgs += %w{ git-core subversion autoconf } if rubie =~ /^ruby-head$/
when "suse"
pkgs = %w{ gcc-c++ patch zlib zlib-devel libffi-devel
sqlite3-devel libxml2-devel libxslt-devel }
if node.platform_version.to_f >= 11.0
pkgs += %w{ libreadline5 readline-devel libopenssl-devel }
else
pkgs += %w{ readline readline-devel openssl-devel }
end
pkgs += %w{ git subversion autoconf } if rubie =~ /^ruby-head$/
when "centos","redhat","fedora"
pkgs = %w{ gcc-c++ patch readline readline-devel zlib zlib-devel
libyaml-devel libffi-devel openssl-devel }
pkgs += %w{ git subversion autoconf } if rubie =~ /^ruby-head$/
end
elsif rubie =~ /^jruby/
# TODO: need to figure out how to pull in java recipe only when needed. For
# now, users of jruby will have to add the "java" recipe to their run_list.
#include_recipe "java"
pkgs << "g++"
end
pkgs.each do |pkg|
p = package pkg do
action :nothing
end
p.run_action(:install)
end
end
|
module Dcos
module Helpers
def dcos_generate_config_url
return node['dcos']['dcos_generate_config_url'] if node['dcos'].key?('dcos_generate_config_url')
case node['dcos']['dcos_version']
when 'EarlyAccess', 'earlyaccess'
"#{dcos_base_url}/dcos_generate_config.sh"
else
"#{dcos_base_url}/commit/#{dcos_commit_id}/dcos_generate_config.sh"
end
end
private
def dcos_base_url
case node['dcos']['dcos_version']
when '1.9.4', '1.9.3', '1.9.2', '1.9.1', '1.8.9'
"https://downloads.dcos.io/dcos/stable/#{node['dcos']['dcos_version']}"
when 'EarlyAccess', 'earlyaccess'
'https://downloads.dcos.io/dcos/EarlyAccess'
else # stable or older releases
'https://downloads.dcos.io/dcos/stable'
end
end
def dcos_commit_id
case node['dcos']['dcos_version']
when 'stable', '1.10.0'
'e38ab2aa282077c8eb7bf103c6fff7b0f08db1a4'
when '1.9.4'
'ff2481b1d2c1008010bdc52554f872d66d9e5904'
when '1.9.3'
'744f5ea28fc52517e344a5250a5fd12554da91b8'
when '1.9.2'
'af6ddc2f5e95b1c1d9bd9fd3d3ef1891928136b9'
when '1.9.1'
'008d3bfe4acca190100fcafad9a18a205a919590'
when '1.9.0'
'0ce03387884523f02624d3fb56c7fbe2e06e181b'
when '1.8.9'
'65d66d7f399fe13bba8960c1f2c42ef9fa5dcf8d'
when '1.8.8'
'602edc1b4da9364297d166d4857fc8ed7b0b65ca'
when '1.8.7'
'1b43ff7a0b9124db9439299b789f2e2dc3cc086c'
end
end
end
end
Chef::Recipe.send(:include, Dcos::Helpers)
Chef::Resource.send(:include, Dcos::Helpers)
Add DC/OS 1.9.5 and 1.10.1 to private helpers
Signed-off-by: Chris Gianelloni <ef2880787b7c7d40508a6b0a5368add313fc9ad5@gmail.com>
module Dcos
module Helpers
def dcos_generate_config_url
return node['dcos']['dcos_generate_config_url'] if node['dcos'].key?('dcos_generate_config_url')
case node['dcos']['dcos_version']
when 'EarlyAccess', 'earlyaccess'
"#{dcos_base_url}/dcos_generate_config.sh"
else
"#{dcos_base_url}/commit/#{dcos_commit_id}/dcos_generate_config.sh"
end
end
private
def dcos_base_url
case node['dcos']['dcos_version']
when '1.10.1', '1.10.0', '1.9.5', '1.9.4', '1.9.3', '1.9.2', '1.9.1', '1.8.9'
"https://downloads.dcos.io/dcos/stable/#{node['dcos']['dcos_version']}"
when 'EarlyAccess', 'earlyaccess'
'https://downloads.dcos.io/dcos/EarlyAccess'
else # stable or older releases
'https://downloads.dcos.io/dcos/stable'
end
end
def dcos_commit_id
case node['dcos']['dcos_version']
when 'stable', '1.10.1'
'd932fc405eb80d8e5b3516eaabf2bd41a2c25c9f'
when '1.10.0'
'e38ab2aa282077c8eb7bf103c6fff7b0f08db1a4'
when '1.9.5'
'4308d88bfc0dd979703f4ef500ce415c5683b3c5'
when '1.9.4'
'ff2481b1d2c1008010bdc52554f872d66d9e5904'
when '1.9.3'
'744f5ea28fc52517e344a5250a5fd12554da91b8'
when '1.9.2'
'af6ddc2f5e95b1c1d9bd9fd3d3ef1891928136b9'
when '1.9.1'
'008d3bfe4acca190100fcafad9a18a205a919590'
when '1.9.0'
'0ce03387884523f02624d3fb56c7fbe2e06e181b'
when '1.8.9'
'65d66d7f399fe13bba8960c1f2c42ef9fa5dcf8d'
when '1.8.8'
'602edc1b4da9364297d166d4857fc8ed7b0b65ca'
when '1.8.7'
'1b43ff7a0b9124db9439299b789f2e2dc3cc086c'
end
end
end
end
Chef::Recipe.send(:include, Dcos::Helpers)
Chef::Resource.send(:include, Dcos::Helpers)
|
# Kalen build status bot
require 'daemons'
Daemons.run_proc('kalen.rb') do
require 'cinch'
require_relative 'config.rb'
require 'cinch/plugins/identify'
require 'sinatra/base'
require 'ipaddr'
require 'multi_json'
require 'net/http'
require 'googl'
# Listener
class POSTListener
include Cinch::Plugin
@@start_messages = ["@project@ \#@build@: On your marks, get set, build!",
"@project@ \#@build@: Building... with blocks.",
"@project@ \#@build@: Starting build. If it's broken, blame the person sitting next to me.",
"@project@ \#@build@: <press any key to build>",
"@project@ \#@build@: Starting build. What, you expected me to say something witty?",
"@project@ \#@build@: Let's build this thing, yo!",
"@project@ \#@build@: Tab A into Slot B? Where's Slot B?!",
"@project@ \#@build@: Prepare the smelteries!",
"@project@ \#@build@: Starting build. If it's broken, blame Sun.",
"@project@ \#@build@: Starting build. If it's broken, blame Sun- no, Oracle!",
"@project@ \#@build@: Starting build. If it's broken, blame Diyo.",
"@project@ \#@build@: Starting build. If it's broken, blame fudgy.",
"@project@ \#@build@: Starting build. If it's broken, blame Soaryn.",
"@project@ \#@build@: Starting build. If it's broken, blame Direwolf. Somehow. How does that work anyway?",
"@project@ \#@build@: For the love of builds, remember the fish!",
"@project@ \#@build@: Cheer for success, blame Soaryn for failure.",
"@project@ \#@build@: Starting build. If it's broken, remember, Diyo loves you",
"@project@ \#@build@: Build... build... BUILD! MUAHAHAHA!"]
@@success_messages = ["@project@ (@channel@): All your build @version@ are belong to @link@",
"@project@ (@channel@): I'MA FIRING MY LAZOR!!! @version@ @link@",
"@project@ (@channel@): Build @version@ completed. What else did you want? @link@",
"@project@ (@channel@): Build @version@: Buried treasure ahoy! Set sail for adventure! @link@",
"@project@ (@channel@): Build @version@: Arrr.... We got that ticking gator @link@",
"@project@ (@channel@): My little @version@, my little @link@",
"@project@ (@channel@): We've got high @version@, we've got high @link@",
"@project@ (@channel@): Build @version@ feels pretty good! @link@",
"@project@ (@channel@): Build @version@ has high hopes for the future. @link@",
"@project@ (@channel@): @link@. Oops, shouldn't that have been for @version@?",
"@project@ (@channel@): Build @version@. Heyo! @link@",
"@project@ (@channel@): Gardevoir is the best pokemon! Build @version@, @link@",
"@project@ (@channel@): We like a bit of Shinx too. Build @version@, @link@",
"@project@ (@channel@): Build @version@ would like to thank their mother, father, and the great cube in the sky. @link@",
"@project@ (@channel@): Prepare for the jarpocalypse! Build @version@, @link@",
"@project@ (@channel@): Build @version@, a developer's best friend. @link@",
"@project@ (@channel@): Once upon a time, there was a build @version@ that visited their grandma @link@",
"@project@ (@channel@): Build @version@ Press any key to start. Well, where's the any key? @link@",
"@project@ (@channel@): Build @version@ belongs to @link@. My precioussssss",
"@project@ (@channel@): Bob the Builder! Can we fix it!? Bob the builder! Yes we can! @version@, @link@",
"@project@ (@channel@): Ahhh yes, the sound of molten metal ready for casting. @version@, @link@",
"@project@ (@channel@): Wow, some sorcery really made @version@ work! @link@",
"@project@ (@channel@): You did it, @version@. Come, grab a cookie. @link@",
"@project@ (@channel@): Diyo was here at @link@ during @version@.",
"@project@ (@channel@): I LIKE TO MOVE IT MOVE IT. Build @version@, @link@"]
@@failure_messages = ["@project@ build \#@build@ thought ice cream was better.",
"@project@ build \#@build@ walked the plank.",
"@project@ build \#@build@ fell off a cliff.",
"@project@ build \#@build@ stubbed its toe.",
"@project@ build \#@build@ walked the plank, yaarrr.",
"@project@ build \#@build@ was assassinated by pudding.",
"@project@ build \#@build@ has made a mess on the floor.",
"@project@ build \#@build@ was playing on the tracks.",
"@project@ build \#@build@ couldn't find the 'any' key.",
"@project@ build \#@build@ fell into a smeltery. Head first.",
"@project@ build \#@build@: Bob the Builder! Can we fix it!? Bob the builder! No we can't!",
"@project@ build \#@build@: Hullo, Kalen here, have you tried turning it off and on again?",
"@project@ build \#@build@: Halp! It's borked! It's on fire! D:",
"@project@ build \#@build@: The Carrot Parrot of Doom was here.",
"@project@ build \#@build@: Soaryn broke it, don't look at me.",
"@project@ build \#@build@: It wasn't me, I swear!",
"@project@ build \#@build@: Moar, moar, MOAR bugs!",
"@project@ build \#@build@: Your choice was... poor.",
"@project@ build \#@build@: Who stole the semicolon?!",
"@project@ build \#@build@: `git blame Soaryn`",
"@project@ build \#@build@: `git blame Sun`",
"@project@ build \#@build@: `git blame Diyo`",
"@project@ build \#@build@: `git blame fudgy`",
"@project@ build \#@build@: `git blame prog`",
"@project@ build \#@build@: `git blame Direwolf`",
"@project@ build \#@build@: prog broke the build again! :<",
"@project@ build \#@build@: Alright who did it? WHO BROKE THE BUILD NOW!?",
"(@project@) Cleanup on aisle @build@...",
"@project@ build \#@build@: Vex was here."]
@@commitless_messages = ["@project@ (@channel@): I herd you liek weird deploys. @version@, @link@",
"@project@ (@channel@): I'm Kalen, and I'm not a Mac or PC - This... Is... LINUX! *sparta-kicks @version@ into @link@*",
"@project@ (@channel@): The cookie monster was here. @version@, @link@",
"@project@ (@channel@): Don't mind the explosions, I'm sure @version@ is perfectly behaved back there... @link@"]
def initialize(bot)
super bot
t = Thread.new(self) { |callback|
SinatraServer.set :controller, callback
SinatraServer.set :port, config[:port]
SinatraServer.run!
}
end
def report(data)
begin
channel = Channel(config[:channel])
project = data['project']
devchannel = data['channel']
version = data['version']
url = Googl.shorten(data['url']).short_url
channel.msg @@success_messages.sample.gsub("@project@", project).gsub("@channel@", devchannel).gsub("@version@", version).gsub("@link@", url)
reportChangesForBuild(data)
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def report_start(data)
begin
channel = Channel(config[:channel])
project = data['project']
build = data['build']
msg = @@start_messages.sample.gsub("@project@", project).gsub("@build@", build)
channel.msg msg
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def report_fail(data)
begin
channel = Channel(config[:channel])
project = data['project']
build = data['build']
msg = @@failure_messages.sample.gsub("@project@", project).gsub("@build@", build)
channel.msg msg
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def report_commitless(data)
begin
channel = Channel(config[:channel])
project = data['project']
devchannel = data['channel']
version = data['version']
url = Googl.shorten(data['url']).short_url
channel.msg @@commitless_messages.sample.gsub("@project@", project).gsub("@channel@", devchannel).gsub("@version@", version).gsub("@link@", url)
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def handle(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build endpoint: #{data.inspect}"
report(data)
end
ret
end
def handle_start(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build-start endpoint: #{data.inspect}"
report_start(data)
end
ret
end
def handle_fail(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build-fail endpoint: #{data.inspect}"
report_fail(data)
end
ret
end
def handle_commitless(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build-commitless endpoint: #{data.inspect}"
report_commitless(data)
end
ret
end
def reportChangesForBuild(data)
channel = Channel(config[:channel])
msgs = getChangesFromREST(data['buildkey'])
msgs.each do |m|
channel.msg m
end
end
def getChangesFromREST(buildkey)
uri = URI("#{config[:api]}/result/#{buildkey}.json?expand=changes.change")
req = Net::HTTP::Get.new(uri)
req.basic_auth config[:user], config[:password]
res = Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == 'https', :verify_mode => OpenSSL::SSL::VERIFY_NONE) do |http|
http.request(req)
end
if res.is_a?(Net::HTTPSuccess)
json = MultiJson.decode res.body
puts json.inspect
puts json['changes']
if json['changes']['size'] <= 0
matches = /.+>(.+)<\/a>/.match json['buildReason']
unless matches == nil
return ["This build appears to be a manual run by #{matches[1]}."]
else
return []
end
else
out = []
json['changes']['change'].each do |change|
puts change.inspect
out.push("[#{change['changesetId'][0..5]}] #{change['comment'].chomp}")
end
return out
end
end
end
end
class SinatraServer < Sinatra::Base
set :port, 9090
set :environment, :production
server.delete 'webrick'
post '/build' do
settings.controller.handle(request, params[:payload])
end
post '/build-start' do
settings.controller.handle_start(request, params[:payload])
end
post '/build-fail' do
settings.controller.handle_fail(request, params[:payload])
end
post '/build-commitless' do
settings.controller.handle_commitless(request, params[:payload])
end
end
# END Listener
# START !messages
class MessagesCommand
include Cinch::Plugin
match /messages?/i, {:method => :messages}
def messages(msg)
msg.reply("Give us more build messages! http://goo.gl/639t7S")
end
end
# END !messages
plugins = [Cinch::Plugins::Identify, POSTListener, MessagesCommand]
bot = Cinch::Bot.new do
configure do |c|
c.server = SERVER
c.port = PORT
c.nick = NICK
c.user = NICK
c.realname = "Kalen::Cinch Status Bot"
c.channels = [CHANNEL]
c.plugins.plugins = plugins
c.messages_per_second = 5
c.local_host = "0.0.0.0"
if NS_ENABLED
c.plugins.options[Cinch::Plugins::Identify] = {
:username => NICK,
:password => NS_PASSWORD,
:type => :nickserv,
}
end
c.plugins.options[POSTListener] = {
:port => POST_PORT,
:channel => CHANNEL,
:api => REST_API,
:user => REST_USERNAME,
:password => REST_PASSWORD
}
end
end
bot.start
end
build-hidden endpoint.
# Kalen build status bot
require 'daemons'
Daemons.run_proc('kalen.rb') do
require 'cinch'
require_relative 'config.rb'
require 'cinch/plugins/identify'
require 'sinatra/base'
require 'ipaddr'
require 'multi_json'
require 'net/http'
require 'googl'
# Listener
class POSTListener
include Cinch::Plugin
@@start_messages = ["@project@ \#@build@: On your marks, get set, build!",
"@project@ \#@build@: Building... with blocks.",
"@project@ \#@build@: Starting build. If it's broken, blame the person sitting next to me.",
"@project@ \#@build@: <press any key to build>",
"@project@ \#@build@: Starting build. What, you expected me to say something witty?",
"@project@ \#@build@: Let's build this thing, yo!",
"@project@ \#@build@: Tab A into Slot B? Where's Slot B?!",
"@project@ \#@build@: Prepare the smelteries!",
"@project@ \#@build@: Starting build. If it's broken, blame Sun.",
"@project@ \#@build@: Starting build. If it's broken, blame Sun- no, Oracle!",
"@project@ \#@build@: Starting build. If it's broken, blame Diyo.",
"@project@ \#@build@: Starting build. If it's broken, blame fudgy.",
"@project@ \#@build@: Starting build. If it's broken, blame Soaryn.",
"@project@ \#@build@: Starting build. If it's broken, blame Direwolf. Somehow. How does that work anyway?",
"@project@ \#@build@: For the love of builds, remember the fish!",
"@project@ \#@build@: Cheer for success, blame Soaryn for failure.",
"@project@ \#@build@: Starting build. If it's broken, remember, Diyo loves you",
"@project@ \#@build@: Build... build... BUILD! MUAHAHAHA!"]
@@success_messages = ["@project@ (@channel@): All your build @version@ are belong to @link@",
"@project@ (@channel@): I'MA FIRING MY LAZOR!!! @version@ @link@",
"@project@ (@channel@): Build @version@ completed. What else did you want? @link@",
"@project@ (@channel@): Build @version@: Buried treasure ahoy! Set sail for adventure! @link@",
"@project@ (@channel@): Build @version@: Arrr.... We got that ticking gator @link@",
"@project@ (@channel@): My little @version@, my little @link@",
"@project@ (@channel@): We've got high @version@, we've got high @link@",
"@project@ (@channel@): Build @version@ feels pretty good! @link@",
"@project@ (@channel@): Build @version@ has high hopes for the future. @link@",
"@project@ (@channel@): @link@. Oops, shouldn't that have been for @version@?",
"@project@ (@channel@): Build @version@. Heyo! @link@",
"@project@ (@channel@): Gardevoir is the best pokemon! Build @version@, @link@",
"@project@ (@channel@): We like a bit of Shinx too. Build @version@, @link@",
"@project@ (@channel@): Build @version@ would like to thank their mother, father, and the great cube in the sky. @link@",
"@project@ (@channel@): Prepare for the jarpocalypse! Build @version@, @link@",
"@project@ (@channel@): Build @version@, a developer's best friend. @link@",
"@project@ (@channel@): Once upon a time, there was a build @version@ that visited their grandma @link@",
"@project@ (@channel@): Build @version@ Press any key to start. Well, where's the any key? @link@",
"@project@ (@channel@): Build @version@ belongs to @link@. My precioussssss",
"@project@ (@channel@): Bob the Builder! Can we fix it!? Bob the builder! Yes we can! @version@, @link@",
"@project@ (@channel@): Ahhh yes, the sound of molten metal ready for casting. @version@, @link@",
"@project@ (@channel@): Wow, some sorcery really made @version@ work! @link@",
"@project@ (@channel@): You did it, @version@. Come, grab a cookie. @link@",
"@project@ (@channel@): Diyo was here at @link@ during @version@.",
"@project@ (@channel@): I LIKE TO MOVE IT MOVE IT. Build @version@, @link@"]
@@failure_messages = ["@project@ build \#@build@ thought ice cream was better.",
"@project@ build \#@build@ walked the plank.",
"@project@ build \#@build@ fell off a cliff.",
"@project@ build \#@build@ stubbed its toe.",
"@project@ build \#@build@ walked the plank, yaarrr.",
"@project@ build \#@build@ was assassinated by pudding.",
"@project@ build \#@build@ has made a mess on the floor.",
"@project@ build \#@build@ was playing on the tracks.",
"@project@ build \#@build@ couldn't find the 'any' key.",
"@project@ build \#@build@ fell into a smeltery. Head first.",
"@project@ build \#@build@: Bob the Builder! Can we fix it!? Bob the builder! No we can't!",
"@project@ build \#@build@: Hullo, Kalen here, have you tried turning it off and on again?",
"@project@ build \#@build@: Halp! It's borked! It's on fire! D:",
"@project@ build \#@build@: The Carrot Parrot of Doom was here.",
"@project@ build \#@build@: Soaryn broke it, don't look at me.",
"@project@ build \#@build@: It wasn't me, I swear!",
"@project@ build \#@build@: Moar, moar, MOAR bugs!",
"@project@ build \#@build@: Your choice was... poor.",
"@project@ build \#@build@: Who stole the semicolon?!",
"@project@ build \#@build@: `git blame Soaryn`",
"@project@ build \#@build@: `git blame Sun`",
"@project@ build \#@build@: `git blame Diyo`",
"@project@ build \#@build@: `git blame fudgy`",
"@project@ build \#@build@: `git blame prog`",
"@project@ build \#@build@: `git blame Direwolf`",
"@project@ build \#@build@: prog broke the build again! :<",
"@project@ build \#@build@: Alright who did it? WHO BROKE THE BUILD NOW!?",
"(@project@) Cleanup on aisle @build@...",
"@project@ build \#@build@: Vex was here."]
@@commitless_messages = ["@project@ (@channel@): I herd you liek weird deploys. @version@, @link@",
"@project@ (@channel@): I'm Kalen, and I'm not a Mac or PC - This... Is... LINUX! *sparta-kicks @version@ into @link@*",
"@project@ (@channel@): The cookie monster was here. @version@, @link@",
"@project@ (@channel@): Don't mind the explosions, I'm sure @version@ is perfectly behaved back there... @link@"]
@@hidden_messages = ["@project@ (@channel@): Pssst. @version@ is a secret!",
"@project@ (@channel@): Hey, @version, give me a link :<",
"@project@ (@channel@): @version@ is trapped under an NDA.",
"@project@ (@channel@): @version@ was here, stealing all the builds.",
"@project@ (@channel@): Wtf is this shit? @version@",
"@project@ (@channel@): Hey, hey! Hey listen! @version@",
"@project@ (@channel@): I was talking to Aeon yesterday about @version@. Boring-ass Maven server."]
def initialize(bot)
super bot
t = Thread.new(self) { |callback|
SinatraServer.set :controller, callback
SinatraServer.set :port, config[:port]
SinatraServer.run!
}
end
def report(data)
begin
channel = Channel(config[:channel])
project = data['project']
devchannel = data['channel']
version = data['version']
url = Googl.shorten(data['url']).short_url
channel.msg @@success_messages.sample.gsub("@project@", project).gsub("@channel@", devchannel).gsub("@version@", version).gsub("@link@", url)
reportChangesForBuild(data)
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def report_start(data)
begin
channel = Channel(config[:channel])
project = data['project']
build = data['build']
msg = @@start_messages.sample.gsub("@project@", project).gsub("@build@", build)
channel.msg msg
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def report_fail(data)
begin
channel = Channel(config[:channel])
project = data['project']
build = data['build']
msg = @@failure_messages.sample.gsub("@project@", project).gsub("@build@", build)
channel.msg msg
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def report_commitless(data)
begin
channel = Channel(config[:channel])
project = data['project']
devchannel = data['channel']
version = data['version']
url = Googl.shorten(data['url']).short_url
channel.msg @@commitless_messages.sample.gsub("@project@", project).gsub("@channel@", devchannel).gsub("@version@", version).gsub("@link@", url)
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def report_hidden(data)
begin
channel = Channel(config[:channel])
project = data['project']
devchannel = data['channel']
version = data['version']
channel.msg @@hidden_messages.sample.gsub("@project@", project).gsub("@channel@", devchannel).gsub("@version@", version)
reportChangesForBuild(data)
rescue Exception => e
warn "Failed to send message: #{e.message}"
end
end
def handle(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build endpoint: #{data.inspect}"
report(data)
end
ret
end
def handle_start(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build-start endpoint: #{data.inspect}"
report_start(data)
end
ret
end
def handle_fail(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build-fail endpoint: #{data.inspect}"
report_fail(data)
end
ret
end
def handle_commitless(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build-commitless endpoint: #{data.inspect}"
report_commitless(data)
end
ret
end
def handle_hidden(req, payload)
ret = 200
begin
data = MultiJson.decode payload
info "Got POST from build-hidden endpoint: #{data.inspect}"
report_hidden(data)
end
ret
end
def reportChangesForBuild(data)
channel = Channel(config[:channel])
msgs = getChangesFromREST(data['buildkey'])
msgs.each do |m|
channel.msg m
end
end
def getChangesFromREST(buildkey)
uri = URI("#{config[:api]}/result/#{buildkey}.json?expand=changes.change")
req = Net::HTTP::Get.new(uri)
req.basic_auth config[:user], config[:password]
res = Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == 'https', :verify_mode => OpenSSL::SSL::VERIFY_NONE) do |http|
http.request(req)
end
if res.is_a?(Net::HTTPSuccess)
json = MultiJson.decode res.body
puts json.inspect
puts json['changes']
if json['changes']['size'] <= 0
matches = /.+>(.+)<\/a>/.match json['buildReason']
unless matches == nil
return ["This build appears to be a manual run by #{matches[1]}."]
else
return []
end
else
out = []
json['changes']['change'].each do |change|
puts change.inspect
out.push("[#{change['changesetId'][0..5]}] #{change['comment'].chomp}")
end
return out
end
end
end
end
class SinatraServer < Sinatra::Base
set :port, 9090
set :environment, :production
server.delete 'webrick'
post '/build' do
settings.controller.handle(request, params[:payload])
end
post '/build-start' do
settings.controller.handle_start(request, params[:payload])
end
post '/build-fail' do
settings.controller.handle_fail(request, params[:payload])
end
post '/build-commitless' do
settings.controller.handle_commitless(request, params[:payload])
end
post '/build-hidden' do
settings.controller.handle_hidden(request, params[:payload])
end
end
# END Listener
# START !messages
class MessagesCommand
include Cinch::Plugin
match /messages?/i, {:method => :messages}
def messages(msg)
msg.reply("Give us more build messages! http://goo.gl/639t7S")
end
end
# END !messages
plugins = [Cinch::Plugins::Identify, POSTListener, MessagesCommand]
bot = Cinch::Bot.new do
configure do |c|
c.server = SERVER
c.port = PORT
c.nick = NICK
c.user = NICK
c.realname = "Kalen::Cinch Status Bot"
c.channels = [CHANNEL]
c.plugins.plugins = plugins
c.messages_per_second = 5
c.local_host = "0.0.0.0"
if NS_ENABLED
c.plugins.options[Cinch::Plugins::Identify] = {
:username => NICK,
:password => NS_PASSWORD,
:type => :nickserv,
}
end
c.plugins.options[POSTListener] = {
:port => POST_PORT,
:channel => CHANNEL,
:api => REST_API,
:user => REST_USERNAME,
:password => REST_PASSWORD
}
end
end
bot.start
end
|
# make sure we're running inside Merb
if defined?(Merb::Plugins)
# Merb gives you a Merb::Plugins.config hash...feel free to put your stuff in your piece of it
Merb::Plugins.config[:.] = {
:chickens => false
}
Merb::BootLoader.before_app_loads do
# require code that must be loaded before the application
end
Merb::BootLoader.after_app_loads do
# code that can be required after the application loads
end
Merb::Plugins.add_rakefiles "./merbtasks"
end
deleting crud from repo
|
class Lmfit < Formula
desc "Levenberg-Marquardt least-squares minimization and curve fitting."
homepage "http://apps.jcns.fz-juelich.de/doku/sc/lmfit"
url "http://apps.jcns.fz-juelich.de/src/lmfit/lmfit-6.1.tgz"
sha256 "54366788400e3b1eb47cff44c9dae9906da079400cec2df2fb0b865c9e04c6a0"
bottle do
cellar :any
sha256 "454e223c39a4a049c9001584137077147f85d12a3657c10d6888b4db8415106d" => :sierra
sha256 "4998c7cfa91014494c02dc1601c11951348b999269d6fed9cd1a4cc50283608e" => :el_capitan
sha256 "85cd1061ef09f90b819f611d4b1e86a81f04e88e49f9c67e674cf47b171c484c" => :yosemite
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
(pkgshare/"demos").install Dir["demo/*.c"]
end
def caveats
"Demo sources have been placed in " + (pkgshare/"lmfit/demos")
end
test do
# curve1.c tests lmcurve.h
system ENV.cc, (pkgshare/"demos/curve1.c"), "-I#{include}", "-L#{lib}", "-llmfit", "-o", "curve1"
system "./curve1"
# surface1.c tests lmmin.h
system ENV.cc, (pkgshare/"demos/surface1.c"), "-I#{include}", "-L#{lib}", "-llmfit", "-o", "surface1"
system "./surface1"
end
end
lmfit: update 6.1 bottle for Linuxbrew.
class Lmfit < Formula
desc "Levenberg-Marquardt least-squares minimization and curve fitting."
homepage "http://apps.jcns.fz-juelich.de/doku/sc/lmfit"
url "http://apps.jcns.fz-juelich.de/src/lmfit/lmfit-6.1.tgz"
sha256 "54366788400e3b1eb47cff44c9dae9906da079400cec2df2fb0b865c9e04c6a0"
bottle do
cellar :any
sha256 "454e223c39a4a049c9001584137077147f85d12a3657c10d6888b4db8415106d" => :sierra
sha256 "4998c7cfa91014494c02dc1601c11951348b999269d6fed9cd1a4cc50283608e" => :el_capitan
sha256 "85cd1061ef09f90b819f611d4b1e86a81f04e88e49f9c67e674cf47b171c484c" => :yosemite
sha256 "9fbd898fc70d99e318676e1d32f0802f522019453bb6cea594353df604345e97" => :x86_64_linux
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
(pkgshare/"demos").install Dir["demo/*.c"]
end
def caveats
"Demo sources have been placed in " + (pkgshare/"lmfit/demos")
end
test do
# curve1.c tests lmcurve.h
system ENV.cc, (pkgshare/"demos/curve1.c"), "-I#{include}", "-L#{lib}", "-llmfit", "-o", "curve1"
system "./curve1"
# surface1.c tests lmmin.h
system ENV.cc, (pkgshare/"demos/surface1.c"), "-I#{include}", "-L#{lib}", "-llmfit", "-o", "surface1"
system "./surface1"
end
end
|
#!/usr/bin/ruby -w
#
# Copyright (c) 2013 Andrew "Jamoozy" Correa,
#
# This file is part of Picture Viewer.
#
# Picture Viewer is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
require 'yaml'
require 'ftools'
require 'sqlite3'
require 'optparse'
# Makes a page for an album according to the entry.
# entry: And entry like what's described in the comments below.
def make_page(entry)
`ln -s "#{entry[:loc]}" "#$tmp_dir/#{entry[:sln]}"` unless File.exists?("#$tmp_dir/#{entry[:sln]}")
# Check if DB already exists. If not, create it.
full_db_name = "#$tmp_dir/#{entry[:sln]}/comments.db"
unless File.exists?(full_db_name)
begin
db = SQLite3::Database.new(full_db_name)
rescue SQLite3::CantOpenException => e
puts "Can't open db: ", full_db_name
end
db.execute("create table images (
id integer primary key,
name text,
title text
)")
db.execute("create table comments (
id integer primary key,
name text,
comment text,
utime datetime,
ip text,
img_id integer,
foreign key(img_id) references images(id)
)")
# Initial entries for all the images.
db.transaction
entry[:images].each do |img|
db.execute('insert into images (name,title) values (?,?)', [img[1], img[2]])
end
db.commit
# Set the right permissions for the webserver to handle the DB & prompt
# the user to run the chown on the DB and its directory.
`chmod 664 #{full_db_name}`
puts "Please run \"sudo chown :www-data #{full_db_name} && sudo chown :www-data #$tmp_dir/#{entry[:sln]}\""
else
begin
db = SQLite3::Database.new(full_db_name)
rescue SQLite3::CantOpenException => e
puts "Can't open db: ", full_db_name
end
# Make sure all the images' titles are up to date.
entry[:images].each do |img|
r = db.execute('select * from images where name=?', [img[1]])
if !r || r.size < 1
db.execute('insert into images (name,title) values (?,?)', [img[1], img[2]])
elsif r.size > 1
puts "Error: got #{r.size} results for img #{img[1]}"
elsif r[0][2] != img[2]
puts "Updating #{img[1]} title to \"#{img[2]}\""
db.execute('update images set title=? where id=?', [img[2], r[0][0]])
end
end
end
# Write HTML file.
html_name = "#$tmp_dir/#{entry[:bname]}.html"
f = File.new(html_name, 'w')
f.write('<!DOCTYPE html>')
f.write('<html><head>')
f.write('<meta charset="utf-8">')
f.write("<title>#{entry[:title]}</title>")
f.write('<link rel="stylesheet" type="text/css" href="style.css">')
f.write('<script src="jquery-1.10.1.min.js" type="text/javascript"></script>')
f.write('<script src="jail.min.js" type="text/javascript"></script>')
f.write('<script src="photos.js" type="text/javascript"></script>')
f.write('<link rel="stylesheet" type="text/css" href="http://fonts.googleapis.com/css?family=Tangerine|Oregano">')
f.write('</head><body>')
f.write('<div id="background"><img src="background.jpg" class="stretch"/></div>')
f.write('<h1 class="subtitle">')
f.write(entry[:title])
f.write('</h1><div class="content"><ul>')
entry[:images].each do |image|
thumb_path = "#{entry[:sln]}/#{image[0]}"
path = "#{entry[:sln]}/#{image[1]}"
f.write("<li><span src=\"#{path}\"><img class=\"lazy\" data-src=\"#{thumb_path}\" src=\"loading.png\" title=\"#{image[1]}\" /><noscript><img src=\"#{thumb_path}\" title=\"#{image[1]}\"></noscript><div class=\"fname\">#{image[1]}</div></span>")
end
f.write('</ul></div><div id="exit-bg"><div id="overlay"><div id="x"><img src="x.png""></div><div id="img-pane"><div id="left" class="navs"><img src="left-arrow.png"></div><div id="right" class="navs"><img src="right-arrow.png"></div><img id="image" src=""></div><div id="desc"></div><div id="comments"><ul class="comments-list"></ul><div id="form">Leave a comment!<br>Name:<input size="30" value="" id="name" type="text"><br><textarea cols="34" rows="5" id="comment"></textarea><input type="button" id="submit" value="Submit"></div><div id="full-size-dl"><a href="">(full size)</a></div></div></div></div>')
f.write('</body><html>')
f.close
end
class Options
attr_accessor :verbose
attr_accessor :tmp
attr_accessor :dst
attr_accessor :entries
end
def parse_args
options = Options.new
options.verbose = false
options.tmp = '.gen'
options.dst = '/home/jamoozy/www/pv'
options.entries = 'entries.yaml'
OptionParser.new do |opts|
opts.banner = "Usage: maker.rb [options]"
opts.on('-v', '--[no-]verbose', 'Run verbosely') do |v|
puts 'verbose: ' + v.to_s
options.verbose = v
end
opts.on('-TTP', '--transfer-protocol=TP', 'Speicfy transfer protocol') do |tp|
puts 'transfer-protocol: ' + tp.to_s
options.tp = tp
end
opts.on('-dDST', '--destination=DST', 'Specify a destination') do |d|
puts 'destination: ' + d.to_s
options.dst = d
end
opts.on('-tTMP', '--tmp=TMP', 'Specify temorary directory.') do |t|
puts 'tmp: ' + t.to_s
options.tmp = t
end
opts.on('-yYML', '--yaml=YML', 'Specify YAML-formatted ') do |y|
puts 'yml: ' + y.to_s
options.entries = y
end
end.parse!
options
end
if __FILE__ == $0
options = parse_args
$tmp_dir = options.tmp
$dst_dir = options.dst
$entries = options.entries
File.makedirs($tmp_dir) unless File.exists?($tmp_dir)
File.makedirs($dst_dir) unless File.exists?($dst_dir)
`cp icons/*.png .htaccess upload.rb dbi.rb *.js style.css background.jpg #$tmp_dir`
# Load entries from the yaml file.
entries = File.open($entries) {|f| YAML.load(f)}
# Generate the list of albums for the main page.
content = '<ul>'
entries.each do |entry|
html_name = "#{entry[:bname]}.html"
content << "<li><a href=\"#{html_name}\"><img src=\"#{entry[:sln]}/#{entry[:thumb]}\"><br>#{entry[:title]}</a>"
make_page(entry)
end
content << '</ul>'
# Write index.html with the above content.
f = File.new("#$tmp_dir/index.html", 'w')
f.write('<!DOCTYPE html><html><head><meta charset="utf-8"><link rel="stylesheet" type="text/css" href="http://fonts.googleapis.com/css?family=Tangerine|Oregano"><link rel="stylesheet" type="text/css" href="style.css">')
f.write('<script src="jquery-1.10.1.min.js" type="text/javascript"></script>')
f.write('<script src="upload.js" type="text/javascript"></script>')
f.write('</head><body><div id="background"><img src="background.jpg" class="stretch"/></div><h1 class="title">Ashley & Andrew</h1><p>Please feel free to leave comments ^_^</p>')
f.write('<div class="p">Have something you\'d like to share? Upload it and I\'ll post it ASAP:<br/><form enctype="multipart/form-data"><input name="files[]" type="file" multiple/><input type="button" value="Upload!" disabled="disabled"></form><progress style="display:none;"></progress><div id="status"></div></div>')
f.write("<div class='content'>#{content}</div>")
f.write('<div class="co-notice"><a class="left" rel="license" href="http://creativecommons.org/licenses/by-nc-nd/3.0/deed.en_US"><img alt="Creative Commons License" style="border-width:0" src="http://i.creativecommons.org/l/by-nc-nd/3.0/88x31.png"/></a>All work in the albums "Maui!" and "Maui Underwater" are licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-nc-nd/3.0/deed.en_US">Creative Commons Attribution-NonCommercial-NoDerivs 3.0 Unported License</a>.<br/>©Andrew Sabisch and Ashley Sabisch 2013–2014.</div>')
f.write('<div class="co-notice">All work in the "Engagement" Album: © Lindsay Newcomb <a href="http://www.lindsaynewcomb.com/">http://www.lindsaynewcomb.com/</a></div>')
f.write('<div class="co-notice">All work in the "Details", "Getting Ready", "Ceremony", "Bride and Groom", "Wedding Party", "Formal Portraits", "Reception, Part 1", and "Reception 2" Albums: ©Burns Photography <a href="http://burnsphotographystudio.com/">http://burnsphotographystudio.com/</a></div>')
f.write('</body></html>')
f.close
# Copy tmp dir to final location.
`rsync -avzP #$tmp_dir/ #$dst_dir/`
end
Moved info about images to image dirs.
#!/usr/bin/ruby -w
#
# Copyright (c) 2013 Andrew "Jamoozy" Correa,
#
# This file is part of Picture Viewer.
#
# Picture Viewer is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
require 'yaml'
require 'ftools'
require 'sqlite3'
require 'optparse'
include SQLite3
# Makes a page for an album according to the entry.
# entry: And entry like what's described in the comments below.
def make_page(entry)
`ln -s "#{entry[:loc]}" "#$tmp_dir/#{entry[:sln]}"` unless File.exists?("#$tmp_dir/#{entry[:sln]}")
images = YAML.load(entry[:loc] + '/data.yml')
# Check if DB already exists. If not, create it.
full_db_name = "#$tmp_dir/#{entry[:sln]}/comments.db"
unless File.exists?(full_db_name)
begin
puts 'Making new DB at: ' + full_db_name
$db = Database.new(full_db_name)
rescue CantOpenException => e
puts "Can't open db: ", full_db_name
return
end
$db.execute("create table images (
id integer primary key,
name text,
title text
)")
$db.execute("create table comments (
id integer primary key,
name text,
comment text,
utime datetime,
ip text,
img_id integer,
foreign key(img_id) references images(id)
)")
# Initial entries for all the images.
$db.transaction
images.each do |img|
$db.execute('insert into images (name,title) values (?,?)', [img[1], img[2]])
end
$db.commit
# Set the right permissions for the webserver to handle the DB & prompt
# the user to run the chown on the DB and its directory.
`chmod 664 #{full_db_name}`
puts "Please run \"sudo chown :www-data #{full_db_name} && sudo chown :www-data #$tmp_dir/#{entry[:sln]}\""
else
begin
$db = Database.new(full_db_name)
rescue CantOpenException => e
puts "Can't open db: ", full_db_name
end
# Make sure all the images' titles are up to date.
images.each do |img|
r = $db.execute('select * from images where name=?', [img[1]])
if !r || r.size < 1
$db.execute('insert into images (name,title) values (?,?)', [img[1], img[2]])
elsif r.size > 1
puts "Error: got #{r.size} results for img #{img[1]}"
elsif r[0][2] != img[2]
puts "Updating #{img[1]} title to \"#{img[2]}\""
$db.execute('update images set title=? where id=?', [img[2], r[0][0]])
end
end
end
# Write HTML file.
html_name = "#$tmp_dir/#{entry[:bname]}.html"
f = File.new(html_name, 'w')
f.write('<!DOCTYPE html>')
f.write('<html><head>')
f.write('<meta charset="utf-8">')
f.write("<title>#{entry[:title]}</title>")
f.write('<link rel="stylesheet" type="text/css" href="style.css">')
f.write('<script src="jquery-1.10.1.min.js" type="text/javascript"></script>')
f.write('<script src="jail.min.js" type="text/javascript"></script>')
f.write('<script src="photos.js" type="text/javascript"></script>')
f.write('<link rel="stylesheet" type="text/css" href="http://fonts.googleapis.com/css?family=Tangerine|Oregano">')
f.write('</head><body>')
f.write('<div id="background"><img src="background.jpg" class="stretch"/></div>')
f.write('<h1 class="subtitle">')
f.write(entry[:title])
f.write('</h1><div class="content"><ul>')
images.each do |image|
thumb_path = "#{entry[:sln]}/#{image[0]}"
path = "#{entry[:sln]}/#{image[1]}"
f.write("<li><span src=\"#{path}\"><img class=\"lazy\" data-src=\"#{thumb_path}\" src=\"loading.png\" title=\"#{image[1]}\" /><noscript><img src=\"#{thumb_path}\" title=\"#{image[1]}\"></noscript><div class=\"fname\">#{image[1]}</div></span>")
end
f.write('</ul></div><div id="exit-bg"><div id="overlay"><div id="x"><img src="x.png""></div><div id="img-pane"><div id="left" class="navs"><img src="left-arrow.png"></div><div id="right" class="navs"><img src="right-arrow.png"></div><img id="image" src=""></div><div id="desc"></div><div id="comments"><ul class="comments-list"></ul><div id="form">Leave a comment!<br>Name:<input size="30" value="" id="name" type="text"><br><textarea cols="34" rows="5" id="comment"></textarea><input type="button" id="submit" value="Submit"></div><div id="full-size-dl"><a href="">(full size)</a></div></div></div></div>')
f.write('</body><html>')
f.close
end
class Options
attr_accessor :verbose
attr_accessor :tmp
attr_accessor :dst
attr_accessor :entries
end
def parse_args
options = Options.new
options.verbose = false
options.tmp = '.gen'
options.dst = '/home/jamoozy/www/pv'
options.entries = 'entries.yaml'
OptionParser.new do |opts|
opts.banner = "Usage: maker.rb [options]"
opts.on('-v', '--[no-]verbose', 'Run verbosely') do |v|
puts 'verbose: ' + v.to_s
options.verbose = v
end
opts.on('-TTP', '--transfer-protocol=TP', 'Speicfy transfer protocol') do |tp|
puts 'transfer-protocol: ' + tp.to_s
options.tp = tp
end
opts.on('-dDST', '--destination=DST', 'Specify a destination') do |d|
puts 'destination: ' + File.expand_path(d.to_s)
options.dst = d
end
opts.on('-tTMP', '--tmp=TMP', 'Specify temorary directory.') do |t|
puts 'tmp: ' + t.to_s
options.tmp = File.expand_path(t)
end
opts.on('-yYML', '--yaml=YML', 'Specify YAML-formatted ') do |y|
puts 'yml: ' + y.to_s
options.entries = File.expand_path(y)
end
end.parse!
options
end
if __FILE__ == $0
options = parse_args
$tmp_dir = options.tmp
$dst_dir = options.dst
$entries = options.entries
File.makedirs($tmp_dir) unless File.exists?($tmp_dir)
File.makedirs($dst_dir) unless File.exists?($dst_dir)
`cp icons/*.png .htaccess upload.rb dbi.rb *.js style.css background.jpg #$tmp_dir`
# Load entries from the yaml file.
entries = File.open($entries) {|f| YAML.load(f)}
# Generate the list of albums for the main page.
content = '<ul>'
entries.each do |entry|
html_name = "#{entry[:bname]}.html"
content << "<li><a href=\"#{html_name}\"><img src=\"#{entry[:sln]}/#{entry[:thumb]}\"><br>#{entry[:title]}</a>"
make_page(entry)
end
content << '</ul>'
# Write index.html with the above content.
f = File.new("#$tmp_dir/index.html", 'w')
f.write('<!DOCTYPE html><html><head><meta charset="utf-8"><link rel="stylesheet" type="text/css" href="http://fonts.googleapis.com/css?family=Tangerine|Oregano"><link rel="stylesheet" type="text/css" href="style.css">')
f.write('<script src="jquery-1.10.1.min.js" type="text/javascript"></script>')
f.write('<script src="upload.js" type="text/javascript"></script>')
f.write('</head><body><div id="background"><img src="background.jpg" class="stretch"/></div><h1 class="title">Ashley & Andrew</h1><p>Please feel free to leave comments ^_^</p>')
f.write('<div class="p">Have something you\'d like to share? Upload it and I\'ll post it ASAP:<br/><form enctype="multipart/form-data"><input name="files[]" type="file" multiple/><input type="button" value="Upload!" disabled="disabled"></form><progress style="display:none;"></progress><div id="status"></div></div>')
f.write("<div class='content'>#{content}</div>")
f.write('<div class="co-notice"><a class="left" rel="license" href="http://creativecommons.org/licenses/by-nc-nd/3.0/deed.en_US"><img alt="Creative Commons License" style="border-width:0" src="http://i.creativecommons.org/l/by-nc-nd/3.0/88x31.png"/></a>All work in the albums "Maui!" and "Maui Underwater" are licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-nc-nd/3.0/deed.en_US">Creative Commons Attribution-NonCommercial-NoDerivs 3.0 Unported License</a>.<br/>©Andrew Sabisch and Ashley Sabisch 2013–2014.</div>')
f.write('<div class="co-notice">All work in the "Engagement" Album: © Lindsay Newcomb <a href="http://www.lindsaynewcomb.com/">http://www.lindsaynewcomb.com/</a></div>')
f.write('<div class="co-notice">All work in the "Details", "Getting Ready", "Ceremony", "Bride and Groom", "Wedding Party", "Formal Portraits", "Reception, Part 1", and "Reception 2" Albums: ©Burns Photography <a href="http://burnsphotographystudio.com/">http://burnsphotographystudio.com/</a></div>')
f.write('</body></html>')
f.close
# Copy tmp dir to final location.
`rsync -avzP #$tmp_dir/ #$dst_dir/`
end
|
require 'erb'
require 'highline/import'
Metric = Struct.new(:energy_exp, :delay_exp)
FPE = Struct.new(:max_power, :min_power)
Code = Struct.new(:name, :energy, :time, :power)
module Metrics
Energy = Metric.new(1.0, 0.0) # Simple energy
EDP = Metric.new(1.0, 1.0) # Energy Delay Product
ED2P = Metric.new(1.0, 2.0) # Energy Delay Squared Product
ED3P = Metric.new(1.0, 3.0) # Energy Delay Cubed Product
end
def metric_parameters
choose do |menu|
menu.prompt = 'Please select a metric: '
menu.choice('Energy') { Metrics::Energy }
menu.choice('Energy Delay Product') { Metrics::EDP }
menu.choice('Energy Delay Squared Product') { Metrics::ED2P }
menu.choice('Energy Delay Cubed Product') { Metrics::ED3P }
menu.choice('Custom') do
custom = Metric.new
custom.energy_exp = ask("Energy Exponent (m): ", Float)
custom.delay_exp = ask("Delay Exponent (n): ", Float)
custom
end
end
end
def envelope_parameters
envelope = FPE.new
envelope.min_power = ask('System Min Power (W): ', Float) { |q| q.above = 0 }
envelope.max_power = ask('System Max Power (W): ', Float) { |q| q.above = envelope.min_power }
envelope
end
def code_parameters
code = Code.new
code.name = ask('Code Name: ')
code.energy = ask('Code Energy (J): ', Float) { |q| q.above = 0 }
code.time = ask('Code Time (S): ', Float) { |q| q.above = 0 }
code.power = code.energy / code.time
code
end
# TODO - add bounds checking for parameters
# TODO - support arguments or queries (make it query for missing arguments)
# TODO - support 2 out of 3 runtime / power / energy
say 'POSE Model creation'
metric = metric_parameters()
envelope = envelope_parameters()
code = code_parameters()
if code.power > envelope.max_power or code.power < envelope.min_power
raise 'Invalid Model Parameters'
end
erbfile = ask('Report Template: ') { |t| t.default = 'templates/report.erb' }
template = ERB.new(File.new(erbfile).read, nil, '-')
outfile = ask ('Output Filename: ') { |o| o.default = 'report.tex' }
report = template.result(binding)
File.write(outfile, report)
Minor refactoring
require 'erb'
require 'highline/import'
Metric = Struct.new(:energy_exp, :delay_exp)
Envelope = Struct.new(:max_power, :min_power)
Code = Struct.new(:name, :energy, :time, :power)
module Metrics
Energy = Metric.new(1.0, 0.0) # Simple energy
EDP = Metric.new(1.0, 1.0) # Energy Delay Product
ED2P = Metric.new(1.0, 2.0) # Energy Delay Squared Product
ED3P = Metric.new(1.0, 3.0) # Energy Delay Cubed Product
end
def metric_parameters
choose do |menu|
menu.prompt = 'Please select a metric: '
menu.choice('Energy') { Metrics::Energy }
menu.choice('Energy Delay Product') { Metrics::EDP }
menu.choice('Energy Delay Squared Product') { Metrics::ED2P }
menu.choice('Energy Delay Cubed Product') { Metrics::ED3P }
menu.choice('Custom') do
custom = Metric.new
custom.energy_exp = ask("Energy Exponent (m): ", Float)
custom.delay_exp = ask("Delay Exponent (n): ", Float)
custom
end
end
end
def envelope_parameters
envelope = Envelope.new
envelope.min_power = ask('System Min Power (W): ', Float) { |q| q.above = 0 }
envelope.max_power = ask('System Max Power (W): ', Float) { |q| q.above = envelope.min_power }
envelope
end
def code_parameters
code = Code.new
code.name = ask('Code Name: ')
code.energy = ask('Code Energy (J): ', Float) { |q| q.above = 0 }
code.time = ask('Code Time (S): ', Float) { |q| q.above = 0 }
code.power = code.energy / code.time
code
end
# TODO - add bounds checking for parameters
# TODO - support arguments or queries (make it query for missing arguments)
# TODO - support 2 out of 3 runtime / power / energy
say 'POSE Model creation'
metric = metric_parameters()
envelope = envelope_parameters()
code = code_parameters()
if code.power > envelope.max_power or code.power < envelope.min_power
raise 'Invalid Model Parameters'
end
erbfile = ask('Report Template: ') { |t| t.default = 'templates/report.erb' }
template = ERB.new(File.new(erbfile).read, nil, '-')
outfile = ask ('Output Filename: ') { |o| o.default = 'report.tex' }
report = template.result(binding)
File.write(outfile, report)
|
require 'uri'
require 'net/http'
module RandomOrg
class Integer
def initialize(min, max)
begin
res = Net::HTTP.get('www.random.org', "/integers/?num=1&min=#{min}&max=#{max}&col=1&base=10&format=plain&rnd=new").chomp
@value = res.to_i
rescue Net::HTTPFatalError => e
puts "Error: " + e
end
end
def to_i
@value
end
end
end
greatness created
require 'net/http'
module PerfectRandom
extend self
M = 2**32
A = 1664525
C = 1013904223
@cur_val = 0
def seed min=0, max=100000
begin
@cur_val = Net::HTTP.get('www.random.org', "/integers/?num=1&min=#{min}&max=#{max}&col=1&base=10&format=plain&rnd=new").chomp.to_i
rescue Net::HTTPFatalError => e
puts e
end
end
def rand
@cur_val = (A*@cur_val + C)%M
end
end
PerfectRandom::seed
puts PerfectRandom::rand
puts PerfectRandom::rand
puts PerfectRandom::rand |
require 'rubygems'
require 'bud'
require 'progress_timer'
module Raft
import ProgressTimer => :timer
state do
# see Figure 2 in Raft paper to see definitions of RPCs
# TODO: do we need from field in responses?
channel :request_vote_request, [:@dest, :from, :term, :last_log_index, :last_log_term]
channel :request_vote_response, [:@dest, :from, :term, :is_granted]
channel :append_entries_request, [:@dest, :from, :term, :prev_log_index, :prev_log_term, :entries, :commit_index]
channel :append_entries_response, [:@dest, :from, :term, :is_success]
# all of the members in the system, host is respective ip_port
table :members, [:host]
table :server_state, [] => [:state]
table :current_term, [] => [:term]
# keep record of all votes
table :votes, [:term, :from] => [:is_granted]
scratch :votes_granted_in_current_term, [:from]
scratch :request_vote_term_max, current_term.schema
end
# TODO: is <= right to update an empty key in a table? does it overwrite or result in error?
bootstrap do
# add all the members of the system except yourself
# TODO: create mechanism to add all members programatically
members <= [['localhost:54321'], ['localhost:54322'], ['localhost:54323']]
# TODO: is this going to work to remove yourself? need it to happen now, not later
members <- [[ip_port]]
server_state <= [['follower']]
current_term <= [[1]]
# start the timer with random timeout between 100-500 ms
timer.set_alarm <= [['electionTimeout', 100 + rand(400)]]
end
bloom :timeout do
# TODO: change timer so that we can just reset it, not name it every time
# increment current term
current_term <= (timer.alarm * current_term).pairs {|a,t| [t.term + 1]}
# transition to candidate state
server_state <= timer.alarm {|t| [['candidate']]}
# vote for yourself
votes <= (timer.alarm * current_term).pairs {|a,t| [t.term, ip_port, true]}
# reset timer
# TODO: do this correctly
timer.set_alarm <= [['electionTimeout', 100 + rand(400)]]
# send out request vote RPCs
request_vote_request <= (timer.alarm * members * current_term).combos do |a,m,t|
# TODO: put actual indicies in here after we implement logs
[m.host, ip_port, t.term, 0, 0]
end
end
bloom :vote_counting do
# step down to follower if our term is stale
server_state <= (server_state * request_vote_response * current_term).combos do |s, v, t|
['follower'] if s.state == 'candidate' and v.term > t.term
end
# record votes if we are in the correct term
votes <= (server_state * request_vote_response * current_term).combos do |s, v, t|
[v.term, v.from, v.is_granted] if s.state == 'candidate' and v.term == t.term
end
# store votes granted in the current term
votes_granted_in_current_term <= (server_state * votes * current_term).combos(votes.term => current_term.term) do |s, v, t|
[v.from] if s.state == 'candidate' and v.is_granted
end
# if we have the majority of votes, then we are leader
server_state <= server_state do |s|
['leader'] if s.state == 'candidate' and votes_granted_in_current_term.count > (members.count/2)
end
end
bloom :vote_responses do
all_votes_for_given_term <= (request_vote_response * current_term).pairs do |rv, ct|
if ct.term <= rv.term
# our terms match, or our term is stale
[rv.term, rv.from, rv.from]
end
# otherwise the receiver term is stale and we do nothing
end
# update our term
request_vote_term_max <= request_vote.argmax([:term], :term) do |rv|
[rv.term]
end
current_term <= (request_vote_term_max * current_term).pairs do |reqmax, ct|
reqmax if ct < reqmax.term
end
end
bloom :send_heartbeats do
end
end
Change column name from Bud reserved keyword
require 'rubygems'
require 'bud'
require 'progress_timer'
module Raft
import ProgressTimer => :timer
state do
# see Figure 2 in Raft paper to see definitions of RPCs
# TODO: do we need from field in responses?
channel :request_vote_request, [:@dest, :from, :term, :last_log_index, :last_log_term]
channel :request_vote_response, [:@dest, :from, :term, :is_granted]
channel :append_entries_request, [:@dest, :from, :term, :prev_log_index, :prev_log_term, :request_entry, :commit_index]
channel :append_entries_response, [:@dest, :from, :term, :is_success]
# all of the members in the system, host is respective ip_port
table :members, [:host]
table :server_state, [] => [:state]
table :current_term, [] => [:term]
# keep record of all votes
table :votes, [:term, :from] => [:is_granted]
scratch :votes_granted_in_current_term, [:from]
scratch :request_vote_term_max, current_term.schema
end
# TODO: is <= right to update an empty key in a table? does it overwrite or result in error?
bootstrap do
# add all the members of the system except yourself
# TODO: create mechanism to add all members programatically
members <= [['localhost:54321'], ['localhost:54322'], ['localhost:54323']]
# TODO: is this going to work to remove yourself? need it to happen now, not later
members <- [[ip_port]]
server_state <= [['follower']]
current_term <= [[1]]
# start the timer with random timeout between 100-500 ms
timer.set_alarm <= [['electionTimeout', 100 + rand(400)]]
end
bloom :timeout do
# TODO: change timer so that we can just reset it, not name it every time
# increment current term
current_term <= (timer.alarm * current_term).pairs {|a,t| [t.term + 1]}
# transition to candidate state
server_state <= timer.alarm {|t| [['candidate']]}
# vote for yourself
votes <= (timer.alarm * current_term).pairs {|a,t| [t.term, ip_port, true]}
# reset timer
# TODO: do this correctly
timer.set_alarm <= [['electionTimeout', 100 + rand(400)]]
# send out request vote RPCs
request_vote_request <= (timer.alarm * members * current_term).combos do |a,m,t|
# TODO: put actual indicies in here after we implement logs
[m.host, ip_port, t.term, 0, 0]
end
end
bloom :vote_counting do
# step down to follower if our term is stale
server_state <= (server_state * request_vote_response * current_term).combos do |s, v, t|
['follower'] if s.state == 'candidate' and v.term > t.term
end
# record votes if we are in the correct term
votes <= (server_state * request_vote_response * current_term).combos do |s, v, t|
[v.term, v.from, v.is_granted] if s.state == 'candidate' and v.term == t.term
end
# store votes granted in the current term
votes_granted_in_current_term <= (server_state * votes * current_term).combos(votes.term => current_term.term) do |s, v, t|
[v.from] if s.state == 'candidate' and v.is_granted
end
# if we have the majority of votes, then we are leader
server_state <= server_state do |s|
['leader'] if s.state == 'candidate' and votes_granted_in_current_term.count > (members.count/2)
end
end
bloom :vote_responses do
all_votes_for_given_term <= (request_vote_response * current_term).pairs do |rv, ct|
if ct.term <= rv.term
# our terms match, or our term is stale
[rv.term, rv.from, rv.from]
end
# otherwise the receiver term is stale and we do nothing
end
# update our term
request_vote_term_max <= request_vote.argmax([:term], :term) do |rv|
[rv.term]
end
current_term <= (request_vote_term_max * current_term).pairs do |reqmax, ct|
reqmax if ct < reqmax.term
end
end
bloom :send_heartbeats do
end
end
|
# encoding: US-ASCII
require 'util/miq_winrm'
require 'Scvmm/miq_scvmm_parse_powershell'
require 'base64'
require 'securerandom'
require 'memory_buffer'
require 'rufus/lru'
class MiqHyperVDisk
MIN_SECTORS_TO_CACHE = 8
DEF_BLOCK_CACHE_SIZE = 300
DEBUG_CACHE_STATS = false
BREAD_RETRIES = 3
attr_reader :hostname, :virtual_disk, :file_offset, :file_size, :parser, :vm_name, :temp_snapshot_name
def initialize(hyperv_host, user, pass, port = nil, network = nil)
@hostname = hyperv_host
@winrm = MiqWinRM.new
port ||= 5985
@winrm.connect(:port => port, :user => user, :pass => pass, :hostname => @hostname)
@parser = MiqScvmmParsePowershell.new
@block_size = 4096
@file_size = 0
@block_cache = LruHash.new(DEF_BLOCK_CACHE_SIZE)
@cache_hits = Hash.new(0)
@cache_misses = Hash.new(0)
@network = network
@total_read_execution_time = @total_copy_from_remote_time = 0
end
def open(vm_disk)
@virtual_disk = vm_disk
@file_offset = 0
stat_script = <<-STAT_EOL
(Get-Item "#{@virtual_disk}").length
STAT_EOL
file_size, stderr = @parser.parse_single_powershell_value(run_correct_powershell(stat_script))
raise "Unable to obtain virtual disk size for #{vm_disk}" if stderr.include?("At line:")
@file_size = file_size.to_i
@end_byte_addr = @file_size - 1
@size_in_blocks, rem = @file_size.divmod(@block_size)
@size_in_blocks += 1 if rem > 0
@lba_end = @size_in_blocks - 1
end
def size
@file_size
end
def close
hit_or_miss if DEBUG_CACHE_STATS
@file_offset = 0
@winrm = nil
end
def hit_or_miss
$log.debug "\nmiq_hyperv_disk cache hits:"
@cache_hits.keys.sort.each do |block|
$log.debug "block #{block} - #{@cache_hits[block]}"
end
$log.debug "\nmiq_hyperv_disk cache misses:"
@cache_misses.keys.sort.each do |block|
$log.debug "block #{block} - #{@cache_misses[block]}"
end
$log.debug "Total time spent copying reads from remote system is #{@total_copy_from_remote_time}"
$log.debug "Total time spent transferring and decoding reads on local system is #{@total_read_execution_time - @total_copy_from_remote_time}"
$log.debug "Total time spent processing remote reads is #{@total_read_execution_time}"
end
def seek(offset, whence = IO::SEEK_SET)
$log.debug "miq_hyperv_disk.seek(#{offset})"
case whence
when IO::SEEK_CUR
@file_offset += offset
when IO::SEEK_END
@file_offset = @end_byte_addr + offset
when IO::SEEK_SET
@file_offset = offset
end
@file_offset
end
def read(size)
$log.debug "miq_hyperv_disk.read(#{size})"
return nil if @file_offset >= @file_size
size = @file_size - @file_offset if (@file_offset + size) > @file_size
start_sector, start_offset = @file_offset.divmod(@block_size)
end_sector = (@file_offset + size - 1) / @block_size
number_sectors = end_sector - start_sector + 1
@file_offset += size
bread_cached(start_sector, number_sectors)[start_offset, size]
end
def bread_cached(start_sector, number_sectors)
$log.debug "miq_hyperv_disk.bread_cached(#{start_sector}, #{number_sectors})"
@block_cache.keys.each do |block_range|
sector_offset = start_sector - block_range.first
buffer_offset = sector_offset * @block_size
if block_range.include?(start_sector) && block_range.include?(start_sector + number_sectors - 1)
length = number_sectors * @block_size
@cache_hits[start_sector] += 1
return @block_cache[block_range][buffer_offset, length]
elsif block_range.include?(start_sector)
sectors_in_range = block_range.last - start_sector
length = sectors_in_range * @block_size
remaining_blocks = number_sectors - sectors_in_range
@cache_hits[start_sector] += 1
return @block_cache[block_range][buffer_offset, length] + bread_cached(block_range.last + 1, remaining_blocks)
elsif block_range.include?(start_sector + number_sectors - 1)
sectors_in_range = (start_sector + number_sectors) - block_range.first
length = sectors_in_range * @block_size
remaining_blocks = number_sectors - sectors_in_range
@cache_hits[start_sector] += 1
return bread_cached(start_sector, remaining_blocks) + @block_cache[block_range][block_range.first, length]
end
end
block_range = entry_range(start_sector, number_sectors)
@block_cache[block_range] = bread(block_range.first, block_range.last - block_range.first + 1)
@cache_misses[start_sector] += 1
sector_offset = start_sector - block_range.first
buffer_offset = sector_offset * @block_size
length = number_sectors * @block_size
@block_cache[block_range][buffer_offset, length]
end
def bread(start_sector, number_sectors)
log_header = "MIQ(#{self.class.name}.#{__method__}:"
$log.debug "#{log_header} (#{start_sector}, #{number_sectors})"
return nil if start_sector > @lba_end
number_sectors = @size_in_blocks - start_sector if (start_sector + number_sectors) > @size_in_blocks
expected_bytes = number_sectors * @block_size
read_script = <<-READ_EOL
$file_stream = [System.IO.File]::Open("#{@virtual_disk}", "Open", "Read", "Read")
$buffer = New-Object System.Byte[] #{number_sectors * @block_size}
$file_stream.seek(#{start_sector * @block_size}, 0)
$file_stream.read($buffer, 0, #{expected_bytes})
[System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes($buffer))
$file_stream.Close()
READ_EOL
(0...BREAD_RETRIES).each do
t1 = Time.now.getlocal
encoded_data = @parser.output_to_attribute(run_correct_powershell(read_script))
t2 = Time.now.getlocal
buffer = ""
Base64.decode64(encoded_data).split(' ').each { |c| buffer += c.to_i.chr }
@total_copy_from_remote_time += t2 - t1
@total_read_execution_time += Time.now.getlocal - t1
return buffer if expected_bytes == buffer.size
$log.debug "#{log_header} expected #{expected_bytes} bytes - got #{buffer.size}"
end
raise "#{log_header} expected #{expected_bytes} bytes - got #{buffer.size}"
end
def snap(vm_name)
@vm_name = vm_name
@temp_snapshot_name = vm_name + SecureRandom.hex
snap_script = <<-SNAP_EOL
Checkpoint-VM -Name #{@vm_name} -SnapshotName #{@temp_snapshot_name}
SNAP_EOL
@vm_name = vm_name
@temp_snapshot_name = vm_name + SecureRandom.hex
@winrm.run_powershell_script(snap_script)
end
def delete_snap
delete_snap_script = <<-DELETE_SNAP_EOL
Remove-VMSnapShot -VMName #{@vm_name} -Name #{@temp_snapshot_name}
DELETE_SNAP_EOL
@winrm.run_powershell_script(delete_snap_script)
end
private
def run_correct_powershell(script)
return @winrm.run_elevated_powershell_script(script) if @network
@winrm.run_powershell_script(script)
end
def entry_range(start_sector, number_sectors)
real_start_block, sector_offset = start_sector.divmod(MIN_SECTORS_TO_CACHE)
number_blocks = number_sectors % MIN_SECTORS_TO_CACHE
sectors_to_read = (number_blocks + (sector_offset > 0 ? 1 : 0)) * MIN_SECTORS_TO_CACHE
real_start_sector = real_start_block * MIN_SECTORS_TO_CACHE
end_sector = real_start_sector + sectors_to_read - 1
Range.new(real_start_sector, end_sector)
end
end
Rewrite Powershell Encoding and Ruby Decoding for bread method
1) Change buffer size from 32K to 128K.
2) Increase number of buffer cache entries from 300 to 1200.
3) Change the PowerShell encoding from ToBase64String to ToBase64CharArray and use
[string]::join
4) The join above will simply the ruby decoding step - no longer need the loop to split
the string on spaces.
# encoding: US-ASCII
require 'util/miq_winrm'
require 'Scvmm/miq_scvmm_parse_powershell'
require 'base64'
require 'securerandom'
require 'memory_buffer'
require 'rufus/lru'
class MiqHyperVDisk
MIN_SECTORS_TO_CACHE = 64
DEF_BLOCK_CACHE_SIZE = 1200
DEBUG_CACHE_STATS = false
BREAD_RETRIES = 3
attr_reader :hostname, :virtual_disk, :file_offset, :file_size, :parser, :vm_name, :temp_snapshot_name
def initialize(hyperv_host, user, pass, port = nil, network = nil)
@hostname = hyperv_host
@winrm = MiqWinRM.new
port ||= 5985
@winrm.connect(:port => port, :user => user, :pass => pass, :hostname => @hostname)
@parser = MiqScvmmParsePowershell.new
@block_size = 4096
@file_size = 0
@block_cache = LruHash.new(DEF_BLOCK_CACHE_SIZE)
@cache_hits = Hash.new(0)
@cache_misses = Hash.new(0)
@network = network
@total_read_execution_time = @total_copy_from_remote_time = 0
end
def open(vm_disk)
@virtual_disk = vm_disk
@file_offset = 0
stat_script = <<-STAT_EOL
(Get-Item "#{@virtual_disk}").length
STAT_EOL
file_size, stderr = @parser.parse_single_powershell_value(run_correct_powershell(stat_script))
raise "Unable to obtain virtual disk size for #{vm_disk}" if stderr.include?("At line:")
@file_size = file_size.to_i
@end_byte_addr = @file_size - 1
@size_in_blocks, rem = @file_size.divmod(@block_size)
@size_in_blocks += 1 if rem > 0
@lba_end = @size_in_blocks - 1
end
def size
@file_size
end
def close
hit_or_miss if DEBUG_CACHE_STATS
@file_offset = 0
@winrm = nil
end
def hit_or_miss
$log.debug "\nmiq_hyperv_disk cache hits:"
@cache_hits.keys.sort.each do |block|
$log.debug "block #{block} - #{@cache_hits[block]}"
end
$log.debug "\nmiq_hyperv_disk cache misses:"
@cache_misses.keys.sort.each do |block|
$log.debug "block #{block} - #{@cache_misses[block]}"
end
$log.debug "Total time spent copying reads from remote system is #{@total_copy_from_remote_time}"
$log.debug "Total time spent transferring and decoding reads on local system is #{@total_read_execution_time - @total_copy_from_remote_time}"
$log.debug "Total time spent processing remote reads is #{@total_read_execution_time}"
end
def seek(offset, whence = IO::SEEK_SET)
$log.debug "miq_hyperv_disk.seek(#{offset})"
case whence
when IO::SEEK_CUR
@file_offset += offset
when IO::SEEK_END
@file_offset = @end_byte_addr + offset
when IO::SEEK_SET
@file_offset = offset
end
@file_offset
end
def read(size)
$log.debug "miq_hyperv_disk.read(#{size})"
return nil if @file_offset >= @file_size
size = @file_size - @file_offset if (@file_offset + size) > @file_size
start_sector, start_offset = @file_offset.divmod(@block_size)
end_sector = (@file_offset + size - 1) / @block_size
number_sectors = end_sector - start_sector + 1
@file_offset += size
bread_cached(start_sector, number_sectors)[start_offset, size]
end
def bread_cached(start_sector, number_sectors)
$log.debug "miq_hyperv_disk.bread_cached(#{start_sector}, #{number_sectors})"
@block_cache.keys.each do |block_range|
sector_offset = start_sector - block_range.first
buffer_offset = sector_offset * @block_size
if block_range.include?(start_sector) && block_range.include?(start_sector + number_sectors - 1)
length = number_sectors * @block_size
@cache_hits[start_sector] += 1
return @block_cache[block_range][buffer_offset, length]
elsif block_range.include?(start_sector)
sectors_in_range = block_range.last - start_sector
length = sectors_in_range * @block_size
remaining_blocks = number_sectors - sectors_in_range
@cache_hits[start_sector] += 1
return @block_cache[block_range][buffer_offset, length] + bread_cached(block_range.last + 1, remaining_blocks)
elsif block_range.include?(start_sector + number_sectors - 1)
sectors_in_range = (start_sector + number_sectors) - block_range.first
length = sectors_in_range * @block_size
remaining_blocks = number_sectors - sectors_in_range
@cache_hits[start_sector] += 1
return bread_cached(start_sector, remaining_blocks) + @block_cache[block_range][block_range.first, length]
end
end
block_range = entry_range(start_sector, number_sectors)
@block_cache[block_range] = bread(block_range.first, block_range.last - block_range.first + 1)
@cache_misses[start_sector] += 1
sector_offset = start_sector - block_range.first
buffer_offset = sector_offset * @block_size
length = number_sectors * @block_size
@block_cache[block_range][buffer_offset, length]
end
def bread(start_sector, number_sectors)
log_header = "MIQ(#{self.class.name}.#{__method__}:"
$log.debug "#{log_header} (#{start_sector}, #{number_sectors})"
return nil if start_sector > @lba_end
number_sectors = @size_in_blocks - start_sector if (start_sector + number_sectors) > @size_in_blocks
expected_bytes = number_sectors * @block_size
read_script = <<-READ_EOL
$file_stream = [System.IO.File]::Open("#{@virtual_disk}", "Open", "Read", "Read")
$bufsize = #{number_sectors * @block_size}
$buffer = New-Object System.Byte[] $bufsize
$encodedbuflen = $bufsize * 4 / 3
if (($encodedbuflen % 4) -ne 0)
{
$encodedbuflen += 4 - ($encodedbuflen % 4)
}
$encodedarray = New-Object Char[] $encodedbuflen
$file_stream.seek(#{start_sector * @block_size}, 0)
$file_stream.read($buffer, 0, #{expected_bytes})
$file_stream.Close()
[System.Convert]::ToBase64CharArray($buffer, 0, $bufsize, $encodedarray, 0)
[string]::join("", $encodedarray)
READ_EOL
i = 0
(0...BREAD_RETRIES).each do
t1 = Time.now.getlocal
encoded_data = @parser.output_to_attribute(run_correct_powershell(read_script))
if encoded_data.empty?
$log.debug "#{log_header} no encoded data returned on attempt #{i}"
i += 1
continue
end
t2 = Time.now.getlocal
decoded_data = Base64.decode64(encoded_data)
@total_copy_from_remote_time += t2 - t1
@total_read_execution_time += Time.now.getlocal - t1
decoded_size = decoded_data.size
return decoded_data if expected_bytes == decoded_size
$log.debug "#{log_header} expected #{expected_bytes} bytes - got #{decoded_size} on attempt #{i}"
i += 1
end
raise "#{log_header} expected #{expected_bytes} bytes - got #{decoded_size}"
end
def snap(vm_name)
@vm_name = vm_name
@temp_snapshot_name = vm_name + SecureRandom.hex
snap_script = <<-SNAP_EOL
Checkpoint-VM -Name #{@vm_name} -SnapshotName #{@temp_snapshot_name}
SNAP_EOL
@vm_name = vm_name
@temp_snapshot_name = vm_name + SecureRandom.hex
@winrm.run_powershell_script(snap_script)
end
def delete_snap
delete_snap_script = <<-DELETE_SNAP_EOL
Remove-VMSnapShot -VMName #{@vm_name} -Name #{@temp_snapshot_name}
DELETE_SNAP_EOL
@winrm.run_powershell_script(delete_snap_script)
end
private
def run_correct_powershell(script)
return @winrm.run_elevated_powershell_script(script) if @network
@winrm.run_powershell_script(script)
end
def entry_range(start_sector, number_sectors)
real_start_block, sector_offset = start_sector.divmod(MIN_SECTORS_TO_CACHE)
number_blocks = number_sectors % MIN_SECTORS_TO_CACHE
sectors_to_read = (number_blocks + (sector_offset > 0 ? 1 : 0)) * MIN_SECTORS_TO_CACHE
real_start_sector = real_start_block * MIN_SECTORS_TO_CACHE
end_sector = real_start_sector + sectors_to_read - 1
Range.new(real_start_sector, end_sector)
end
end
|
#!/usr/bin/ruby
require 'optparse'
options = {}
#Defaults
options[:exclusion_prefixes] = "NS|UI|CA|CG|CI"
options[:derived_data_paths] = ["~/Library/Developer/Xcode/DerivedData", "~/Library/Caches/appCode*/DerivedData"]
options[:project_name] = ""
derived_data_project_pattern = "*-*"
parser = OptionParser.new do |o|
o.separator "General options:"
o.on('-p PATH', "Path to directory where are your .o files were placed by the compiler") { |directory|
options[:search_directory] = directory
}
o.on('-D DERIVED_DATA', "Path to directory where DerivedData is") { |derived_data|
options[:derived_data_paths] = [derived_data]
derived_data_project_pattern = "*"
}
o.on('-s PROJECT_NAME', "Search project .o files by specified project name") { |project_name|
options[:project_name] = project_name
}
o.on('-t TARGET_NAME', "Target of project") { |target_name|
options[:target_name] = target_name
}
o.on('-e PREFIXES', "Prefixes of classes those will be exсluded from visualization. \n\t\t\t\t\tNS|UI\n\t\t\t\t\tUI|CA|MF") { |exclusion_prefixes|
options[:exclusion_prefixes] = exclusion_prefixes
}
o.on("-d", "--use-dwarf-info", "Use DWARF Information also") { |v|
options[:use_dwarf] = v
}
o.separator "Common options:"
o.on_tail('-h', "Prints this help") { puts o; exit }
o.parse!
end
if !options[:search_directory]
paths = []
# looking for derived data
options[:derived_data_paths].each do |derived_data_path|
IO.popen("find #{derived_data_path} -name \"#{options[:project_name]}#{derived_data_project_pattern}\" -type d -depth 1 -exec find {} -type d -name \"i386\" -o -name \"armv*\" -o -name \"x86_64\" \\; ") { |f|
f.each do |line|
paths << line
end
}
end
$stderr.puts "There were #{paths.length} directories found"
if paths.empty?
$stderr.puts "Cannot find projects that starts with '#{options[:project_name]}'"
exit 1
end
filtered_by_target_paths = paths
if options[:target_name]
filtered_by_target_paths = paths.find_all { |path| /#{options[:target_name]}[^\.]*\.build\/Objects-normal/.match path }
$stderr.puts "After target filtration there is #{filtered_by_target_paths.length} directories left"
if paths.empty?
$stderr.puts "Cannot find projects that starts with '#{options[:project_name]}'' and has target name that starts with '#{options[:target_name]}'"
exit 1
end
end
paths_sorted_by_time = filtered_by_target_paths.sort_by{ |f|
File.ctime(f.chomp)
}
last_modified_dir = paths_sorted_by_time.last.chomp
$stderr.puts "Last modifications were in\n#{last_modified_dir}\ndirectory at\n#{File.ctime(last_modified_dir)}"
options[:search_directory] = last_modified_dir
end
if !options[:search_directory]
puts parser.help
exit 1
end
#Header
puts <<-THEEND
var dependencies = {
links:
[
THEEND
links = {}
#Searching all the .o files and showing its information through nm call
IO.popen("find \"#{options[:search_directory]}\" -name \"*.o\" -exec /usr/bin/nm -o {} \\;") { |f|
f.each do |line|
# Gathering only OBC_CLASSES
match = /_OBJC_CLASS_\$_/.match line
if match != nil
exclusion_match = /_OBJC_CLASS_\$_(#{options[:exclusion_prefixes]})/.match line
# Excluding base frameworks prefixes
if exclusion_match == nil
#Capturing filename (We'll think that this is source)
#And dependency (We'll think that this is the destination)
source,dest = /[^\w]*([^\.\/]+)\.o.*_OBJC_CLASS_\$_(.*)/.match(line)[1,2]
if source != dest
destinations = links[source] ? links[source] : (links[source] = {})
destinations[dest] = "set up"
end
end
end
end
}
if options[:use_dwarf]
# Search files again
IO.popen("find \"#{options[:search_directory]}\" -name \"*.o\"") { |f|
f.each do |line|
# puts "Running dwarfdump #{line} | grep -A1 TAG_pointer_type"
source = /.*\/(.+)\.o/.match(line)[1]
IO.popen("dwarfdump #{line.strip} | grep -A1 TAG_pointer_type") { |fd|
fd.each do |line2|
# Finding the name in types
# AT_type( {0x00000456} ( objc_object ) )
name = /.*?AT_type\(\s\{.*?\}.*\(\s((function|const)\s)?([A-Z][^\)]+?)\*?\s\).*/.match(line2)
if name != nil
dest = name[3]
if /^(#{options[:exclusion_prefixes]})/.match(dest) == nil
if source != dest and dest != "BOOL"
destinations = links[source] ? links[source] : (links[source] = {})
destinations[dest] = "set up"
end
end
end
end
}
end
}
end
sources_count = links.length
links_count = 0
links.each do |source, dest_hash|
links_count = links_count + dest_hash.length
dest_hash.each do |dest, _ |
puts " { \"source\" : \"#{source}\", \"dest\" : \"#{dest}\" },"
end
end
puts <<-THEEND
],
"source_files_count":#{sources_count},
"links_count":#{links_count},
}
;
THEEND
Added swift dependencies support
#!/usr/bin/ruby
require 'optparse'
require 'yaml'
options = {}
#Defaults
options[:exclusion_prefixes] = "NS|UI|CA|CG|CI|CF"
options[:derived_data_paths] = ["~/Library/Developer/Xcode/DerivedData", "~/Library/Caches/appCode*/DerivedData"]
options[:project_name] = ""
derived_data_project_pattern = "*-*"
parser = OptionParser.new do |o|
o.separator "General options:"
o.on('-p PATH', "Path to directory where are your .o files were placed by the compiler") { |directory|
options[:search_directory] = directory
}
o.on('-D DERIVED_DATA', "Path to directory where DerivedData is") { |derived_data|
options[:derived_data_paths] = [derived_data]
derived_data_project_pattern = "*"
}
o.on('-s PROJECT_NAME', "Search project .o files by specified project name") { |project_name|
options[:project_name] = project_name
}
o.on('-t TARGET_NAME', "Target of project") { |target_name|
options[:target_name] = target_name
}
o.on('-e PREFIXES', "Prefixes of classes those will be exсluded from visualization. \n\t\t\t\t\tNS|UI\n\t\t\t\t\tUI|CA|MF") { |exclusion_prefixes|
options[:exclusion_prefixes] = exclusion_prefixes
}
o.on("-d", "--use-dwarf-info", "Use DWARF Information also") { |v|
options[:use_dwarf] = v
}
o.on("-w", "--swift-dependencies", "Generate swift project dependencies") { |v|
options[:swift_dependencies] = v
}
o.separator "Common options:"
o.on_tail('-h', "Prints this help") { puts o; exit }
o.parse!
end
if !options[:search_directory]
paths = []
# looking for derived data
options[:derived_data_paths].each do |derived_data_path|
IO.popen("find #{derived_data_path} -name \"#{options[:project_name]}#{derived_data_project_pattern}\" -type d -depth 1 -exec find {} -type d -name \"i386\" -o -name \"armv*\" -o -name \"x86_64\" \\; ") { |f|
f.each do |line|
paths << line
end
}
end
$stderr.puts "There were #{paths.length} directories found"
if paths.empty?
$stderr.puts "Cannot find projects that starts with '#{options[:project_name]}'"
exit 1
end
filtered_by_target_paths = paths
if options[:target_name]
filtered_by_target_paths = paths.find_all { |path| /#{options[:target_name]}[^\.]*\.build\/Objects-normal/.match path }
$stderr.puts "After target filtration there is #{filtered_by_target_paths.length} directories left"
if paths.empty?
$stderr.puts "Cannot find projects that starts with '#{options[:project_name]}'' and has target name that starts with '#{options[:target_name]}'"
exit 1
end
end
paths_sorted_by_time = filtered_by_target_paths.sort_by { |f|
File.ctime(f.chomp)
}
last_modified_dir = paths_sorted_by_time.last.chomp
$stderr.puts "Last modifications were in\n#{last_modified_dir}\ndirectory at\n#{File.ctime(last_modified_dir)}"
options[:search_directory] = last_modified_dir
end
if !options[:search_directory]
puts parser.help
exit 1
end
#Header
puts <<-THEEND
var dependencies = {
links:
[
THEEND
links = {}
def is_primitive_swift_type?(dest)
/^(Int|Int32|Int64|Int16|Int8|UInt|UInt32|UInt64|UInt16|UInt8|String|Character|Bool|Float|Double|Dictionary|Array|Set|AnyObject|Void)$/.match(dest) != nil
end
def is_filtered_swift_type?(dest)
/(ClusterType|ScalarType|LiteralType)$/.match(dest) != nil #or /^([a-z])/.match(dest) != nil
end
def can_be_used_as_destination(dest, exclusion_prefixs)
/^(#{exclusion_prefixs})/.match(dest) == nil and /^\w/.match(dest) != nil and !is_primitive_swift_type?(dest) and !is_filtered_swift_type?(dest)
end
if options[:swift_dependencies]
# This thing need to be commented :) It's removes too many connections
# YAML.add_domain_type("", "private") { |type, val|
# 'AnyObject'
# }
Dir.glob("#{options[:search_directory]}/*.swiftdeps") do |my_text_file|
# puts my_text_file
swiftdeps = YAML.load_file(my_text_file)
if swiftdeps["provides"] && swiftdeps["provides"].length == 1
swiftdeps["provides"].each { |source|
destinations = links[source] ? links[source] : (links[source] = {})
swiftdeps["top-level"].each { |unparseddest|
# puts unparseddest
dest = unparseddest
if can_be_used_as_destination(dest, options[:exclusion_prefixes])
destinations[dest] = "set up"
end
}
}
elsif swiftdeps["provides"]
classes_declared_in_file = swiftdeps["provides"]
filename = '< ' + File.basename(my_text_file, ".swiftdeps") +' >'
swiftdeps["provides"].each { |source|
destinations = links[source] ? links[source] : (links[source] = {})
destinations[filename] = "set up"
}
source = filename
destinations = links[source] ? links[source] : (links[source] = {})
swiftdeps["top-level"].each { |unparseddest|
# puts unparseddest
dest = unparseddest
if can_be_used_as_destination(dest, options[:exclusion_prefixes]) and not classes_declared_in_file.include?(dest)
destinations[dest] = "set up"
end
}
end
end
else
#Searching all the .o files and showing its information through nm call
IO.popen("find \"#{options[:search_directory]}\" -name \"*.o\" -exec /usr/bin/nm -o {} \\;") { |f|
f.each do |line|
# Gathering only OBC_CLASSES
match = /_OBJC_CLASS_\$_/.match line
if match != nil
exclusion_match = /_OBJC_CLASS_\$_(#{options[:exclusion_prefixes]})/.match line
# Excluding base frameworks prefixes
if exclusion_match == nil
#Capturing filename (We'll think that this is source)
#And dependency (We'll think that this is the destination)
source, dest = /[^\w]*([^\.\/]+)\.o.*_OBJC_CLASS_\$_(.*)/.match(line)[1, 2]
if source != dest
destinations = links[source] ? links[source] : (links[source] = {})
destinations[dest] = "set up"
end
end
end
end
}
if options[:use_dwarf]
# Search files again
IO.popen("find \"#{options[:search_directory]}\" -name \"*.o\"") { |f|
f.each do |line|
# puts "Running dwarfdump #{line} | grep -A1 TAG_pointer_type"
source = /.*\/(.+)\.o/.match(line)[1]
IO.popen("dwarfdump #{line.strip} | grep -A1 TAG_pointer_type") { |fd|
fd.each do |line2|
# Finding the name in types
# AT_type( {0x00000456} ( objc_object ) )
name = /.*?AT_type\(\s\{.*?\}.*\(\s((function|const)\s)?([A-Z][^\)]+?)\*?\s\).*/.match(line2)
if name != nil
dest = name[3]
if can_be_used_as_destination(dest, options[:exclusion_prefixes])
if source != dest and dest != "BOOL"
destinations = links[source] ? links[source] : (links[source] = {})
destinations[dest] = "set up"
end
end
end
end
}
end
}
end
end
sources_count = links.length
links_count = 0
links.each do |source, dest_hash|
links_count = links_count + dest_hash.length
dest_hash.each do |dest, _|
puts " { \"source\" : \"#{source}\", \"dest\" : \"#{dest}\" },"
end
end
puts <<-THEEND
],
"source_files_count":#{sources_count},
"links_count":#{links_count},
}
;
THEEND
|
update
|
require 'spec_helper'
describe 'daemon' do
describe service('my-program') do
it { should be_enabled }
it { should be_running }
end
# With *sysvinit* we check monit for process
check_initv = "monit summary | grep -qE 'Process+.+my-program'"
# With *systemd* we check monit for program
check_systemd = "monit summary | grep -qE 'Program+.+my-program+.+ok$'"
describe command("if (test -e /bin/systemctl); then #{check_systemd}; else #{check_initv}; fi") do
its(:exit_status) { should eq 0 }
end
# With *sysvinit* we don't test anything
check_initv = "echo ok"
# With *systemd* we check that monit has alerted
check_systemd = "journalctl -u monit --no-pager | grep -qE 'my-program+.+\/bin\/systemctl+.+failed'"
describe command("if (test -e /bin/systemctl); then #{check_systemd}; else #{check_initv}; fi") do
its(:exit_status) { should eq 0 }
end
end
debug spec
require 'spec_helper'
describe 'daemon' do
describe service('my-program') do
it { should be_enabled }
it { should be_running }
end
# With *sysvinit* we check monit for process
check_initv = "monit summary | grep -qE 'Process+.+my-program'"
# With *systemd* we check monit for program
check_systemd = "monit summary | grep -qE 'Program+.+my-program+.+ok'"
describe command("if (test -e /bin/systemctl); then #{check_systemd}; else #{check_initv}; fi") do
its(:exit_status) { should eq 0 }
end
# With *sysvinit* we don't test anything
check_initv = "echo ok"
# With *systemd* we check that monit has alerted
check_systemd = "journalctl -u monit --no-pager | grep -qE 'my-program+.+\/bin\/systemctl+.+failed'"
describe command("if (test -e /bin/systemctl); then #{check_systemd}; else #{check_initv}; fi") do
its(:exit_status) { should eq 0 }
end
end
|
require 'open3'
puts 'Building'
`rm -rf build/server`
puts `tsc -p server`
if $?.exitstatus > 0
puts "Build failed"
exit false
end
$users = [ "alice", "bob", "frank" ]
$error = false
def run_tests(tests)
tests.each do |t|
puts "Running tests for #{t}"
puts `newman run tests/#{t}.json`
$error = $?.exitstatus > 0
end
end
def run_servers(tests)
ths = []
started = []
$users.each do |user|
ths << Thread.new do
Open3.popen3("npm run start:#{user}") do |stdin, stdout, stderr, thread|
puts "Starting server for #{user} with PID #{thread.pid}"
Thread.new do
while line=stderr.gets
puts line
end
end
while line=stdout.gets do
if line =~ /Server running at/
puts `ps -ef | grep "build/server"`
puts `ps -ef | grep "npm"`
started << thread.pid;
puts "#{started.length}/#{$users.length} servers started"
if started.length == $users.length
puts "All servers running!"
run_tests(tests)
border = $users.length - 1
for i in 0..border do
pid = started[i]
Process.kill(:SIGINT, pid)
puts "Killed process #{pid}"
thr = ths[i]
thr.exit unless thr == Thread.current
end
Thread.current.exit
end
end
end
end
end
end
ths.each { |thr| thr.join }
end
puts `./resetdb.sh #{$users.join(" ")} --test`
run_servers([ "me", "posts", "comments", "reactions" ])
puts `./resetdb.sh #{$users.join(" ")}`
run_servers([ "friends" ])
exit !$error
Trying to run with bash
require 'open3'
puts 'Building'
`rm -rf build/server`
puts `tsc -p server`
if $?.exitstatus > 0
puts "Build failed"
exit false
end
$users = [ "alice", "bob", "frank" ]
$error = false
def run_tests(tests)
tests.each do |t|
puts "Running tests for #{t}"
puts `newman run tests/#{t}.json`
$error = $?.exitstatus > 0
end
end
def run_servers(tests)
ths = []
started = []
$users.each do |user|
ths << Thread.new do
Open3.popen3("bash -c \"npm run start:#{user}\"") do |stdin, stdout, stderr, thread|
puts "Starting server for #{user} with PID #{thread.pid}"
Thread.new do
while line=stderr.gets
puts line
end
end
while line=stdout.gets do
if line =~ /Server running at/
puts `ps -ef | grep "build/server"`
puts `ps -ef | grep "npm"`
started << thread.pid;
puts "#{started.length}/#{$users.length} servers started"
if started.length == $users.length
puts "All servers running!"
run_tests(tests)
border = $users.length - 1
for i in 0..border do
pid = started[i]
Process.kill(:SIGINT, pid)
puts "Killed process #{pid}"
thr = ths[i]
thr.exit unless thr == Thread.current
end
Thread.current.exit
end
end
end
end
end
end
ths.each { |thr| thr.join }
end
puts `./resetdb.sh #{$users.join(" ")} --test`
run_servers([ "me", "posts", "comments", "reactions" ])
puts `./resetdb.sh #{$users.join(" ")}`
run_servers([ "friends" ])
exit !$error
|
#!/usr/local/bin/ruby
require 'xmlrpc/client'
require 'uri'
require 'net/http'
require 'net/ftp'
#Credentials for Loopia DNS API
username = 'registered_api_username@loopiaapi'
password = 'registered_api_password'
global_domain_server_url = "https://api.loopia.se/RPCSERV"
client = XMLRPC::Client.new2(global_domain_server_url)
#If no arguments are provided the script lists registered domain names
if ARGV.empty?
response = client.call("getDomains", username, password)
response.each do |r|
puts r['domain']
end
else
#Get external IP of the current host
url = URI('http://www.myexternalip.com/raw')
extip = Net::HTTP.get(url).strip
#Get record ID and IP of A record for @ (root domain)
records = client.call("getZoneRecords", username, password, ARGV[0], "@")
records.each do |r|
@arecord = r['record_id'] if r['type'] == "A"
@dnsip = r['rdata'] if r['type'] == "A"
end
if extip == @dnsip
system("logger -t LoopiaDns 'External IP is the same as current DNS IP, exiting'")
else
#If updateMikrotik is appended as a second argument, an update script is generated and uploaded to Mikrotik
if ARGV[1] == "updateMikrotik"
#Generate script to update existing IPsec policies and GRE interfaces with the new IP
file = File.open("/srv/newip.auto.rsc", "w+")
file.write("/log info \"Updating ipsec with new IP\"\n")
file.write("/ip ipsec policy set src-address=#{extip}/32 sa-src-address=#{extip} [ find src-address=#{@dnsip}/32 ]\n")
file.write("/ip ipsec policy set dst-address=#{extip}/32 sa-dst-address=#{extip} [ find dst-address=#{@dnsip}/32 ]\n")
file.write("/log info \"Done updating ipsec\"\n")
file.write("/log info \"Updating gre tunnels with new IP\"\n")
file.write("/interface gre set local-address=#{extip} [ find local-address=#{@dnsip} ]\n")
file.write("/log info \"Done updating gre tunnels\"\n")
file.write("/log info \"Killing current IPsec connections\"\n")
file.write("/ip ipsec remote-peers kill-connections\"\n")
file.write("/log info \"Flushing installed SA\"\n")
file.write("/ip ipsec installed-sa flush\"\n")
file.close
#Upload script to mikrotik device for automatic execution
ftp = Net::FTP.new('url_to_mikrotik')
ftp.login("mikrotik_ftp_username", "mikrotik_ftp_password")
ftp.puttextfile("./updateIpsec.auto.rsc")
ftp.close
system("logger -t LoopiaDns 'Updating IPsec related settings on Mikrotik'")
end
#Update the A record for @ (root domain) with new IP
record = { 'type' => 'A', 'ttl' => 600, 'rdata' => "#{ip.strip}", 'record_id' => @arecord, 'priority' => 1 }
response = client.call("updateZoneRecord", username, password, ARGV[0], "@", record)
system("logger -t LoopiaDns 'Updating #{ARGV[0]}: #{response.inspect}'")
end
end
Update dynIP.rb
Updated with correct path when uploading script to Mikrotik.
#!/usr/local/bin/ruby
require 'xmlrpc/client'
require 'uri'
require 'net/http'
require 'net/ftp'
#Credentials for Loopia DNS API
username = 'registered_api_username@loopiaapi'
password = 'registered_api_password'
global_domain_server_url = "https://api.loopia.se/RPCSERV"
client = XMLRPC::Client.new2(global_domain_server_url)
#If no arguments are provided the script lists registered domain names
if ARGV.empty?
response = client.call("getDomains", username, password)
response.each do |r|
puts r['domain']
end
else
#Get external IP of the current host
url = URI('http://www.myexternalip.com/raw')
extip = Net::HTTP.get(url).strip
#Get record ID and IP of A record for @ (root domain)
records = client.call("getZoneRecords", username, password, ARGV[0], "@")
records.each do |r|
@arecord = r['record_id'] if r['type'] == "A"
@dnsip = r['rdata'] if r['type'] == "A"
end
if extip == @dnsip
system("logger -t LoopiaDns 'External IP is the same as current DNS IP, exiting'")
else
#If updateMikrotik is appended as a second argument, an update script is generated and uploaded to Mikrotik
if ARGV[1] == "updateMikrotik"
#Generate script to update existing IPsec policies and GRE interfaces with the new IP
file = File.open("/srv/newip.auto.rsc", "w+")
file.write("/log info \"Updating ipsec with new IP\"\n")
file.write("/ip ipsec policy set src-address=#{extip}/32 sa-src-address=#{extip} [ find src-address=#{@dnsip}/32 ]\n")
file.write("/ip ipsec policy set dst-address=#{extip}/32 sa-dst-address=#{extip} [ find dst-address=#{@dnsip}/32 ]\n")
file.write("/log info \"Done updating ipsec\"\n")
file.write("/log info \"Updating gre tunnels with new IP\"\n")
file.write("/interface gre set local-address=#{extip} [ find local-address=#{@dnsip} ]\n")
file.write("/log info \"Done updating gre tunnels\"\n")
file.write("/log info \"Killing current IPsec connections\"\n")
file.write("/ip ipsec remote-peers kill-connections\"\n")
file.write("/log info \"Flushing installed SA\"\n")
file.write("/ip ipsec installed-sa flush\"\n")
file.close
#Upload script to mikrotik device for automatic execution
ftp = Net::FTP.new('url_to_mikrotik')
ftp.login("mikrotik_ftp_username", "mikrotik_ftp_password")
ftp.puttextfile("/srv/updateIpsec.auto.rsc")
ftp.close
system("logger -t LoopiaDns 'Updating IPsec related settings on Mikrotik'")
end
#Update the A record for @ (root domain) with new IP
record = { 'type' => 'A', 'ttl' => 600, 'rdata' => "#{ip.strip}", 'record_id' => @arecord, 'priority' => 1 }
response = client.call("updateZoneRecord", username, password, ARGV[0], "@", record)
system("logger -t LoopiaDns 'Updating #{ARGV[0]}: #{response.inspect}'")
end
end
|
gem 'haml-rails'
gem 'meta-tags', :require => 'meta_tags'
gem 'devise'
gem 'seed', :path => "/Users/wschenk/src/seed"
gsub_file "Gemfile", /^#\s*Turbolinks.*$/,'# No one likes Turbolinks.'
gsub_file "Gemfile", /^gem\s+["']turbolinks["'].*$/,'# gem \'turbolinks\''
# Run the base generator
generate "seed:foreman"
if yes? "Would you like to install bootstrap?"
generate "seed:bootstrap"
if yes? "Would you like to install splash page?"
generate "seed:splash"
end
end
if yes? "Would you like to install devise?"
generate "seed:devise"
if yes? "Would you like to install twitter?"
generate "seed:twitter"
end
if yes? "Would you like to install facebook connect?"
generate "seed:facebook"
end
end
if yes? "Would you like to install active admin?"
generate "seed:admin"
end
puts "Setting up git"
git :init
Remove hard-coded seed gem path
gem 'haml-rails'
gem 'meta-tags', :require => 'meta_tags'
gem 'devise'
gem 'seed', :path => File.dirname(__FILE__)
gsub_file "Gemfile", /^#\s*Turbolinks.*$/,'# No one likes Turbolinks.'
gsub_file "Gemfile", /^gem\s+["']turbolinks["'].*$/,'# gem \'turbolinks\''
# Run the base generator
generate "seed:foreman"
if yes? "Would you like to install bootstrap?"
generate "seed:bootstrap"
if yes? "Would you like to install splash page?"
generate "seed:splash"
end
end
if yes? "Would you like to install devise?"
generate "seed:devise"
if yes? "Would you like to install twitter?"
generate "seed:twitter"
end
if yes? "Would you like to install facebook connect?"
generate "seed:facebook"
end
end
if yes? "Would you like to install active admin?"
generate "seed:admin"
end
puts "Setting up git"
git :init
|
require 'matrix'
require 'pry'
require_relative 'hole'
class Ship
attr_accessor :length, :ship_holes
def initialize length
@length = length
@ship_holes = []
end
def place x, y, horizontal
if @ship_holes.empty?
@length.times do |a, b|
@ship_holes << Hole.new(x,y)
if horizontal
x += 1
else
y += 1
end
end
end
end
def covers? x,y
@ship_holes.find{|hole| hole.x == x && hole.y == y}
end
def overlaps_with? ship
self.ship_holes.each do |hole|
hole.to_arr
end
end
def fire_at x, y
hole = covers? x, y
if hole
hole.hit!
return true
else
return false
end
end
def sunk?
end
end
=begin
ship1 = Ship.new(4)
ship1.place(2, 1, true)
ship2 = Ship.new(4)
ship2.place(3, 1, true)
ship3 = Ship.new(4)
ship3.place(2, 1, false)
binding.pry
=end
Most recent.
require 'matrix'
require 'pry'
require_relative 'hole'
class Ship
attr_accessor :length, :ship_holes
def initialize length
@length = length
@ship_holes = []
end
def place x, y, horizontal
if @ship_holes.empty?
@length.times do |a, b|
@ship_holes << Hole.new(x,y)
if horizontal
x += 1
else
y += 1
end
end
end
end
def covers? x,y
@ship_holes.find{|hole| hole.x == x && hole.y == y}
end
def overlaps_with? other_ship
@ship_holes.find{|hole| other_ship.covers?(hole.x, hole.y)}
end
def fire_at x, y
hole = covers? x, y
if hole
hole.hit!
return true
else
return false
end
end
def sunk?
@ship_holes.find{|hole| hole.state == :hit}.count == @length
end
end
#=begin
ship1 = Ship.new(4)
ship1.place(2, 1, true)
ship2 = Ship.new(4)
ship2.place(3, 1, true)
ship3 = Ship.new(4)
ship3.place(2, 1, false)
binding.pry
#=end
|
class Ship < OwnedObject
include Health
attr_reader :window, :shape, :damage, :owner, :ship_range
SIZE = 5
SPEED = 0.5
ROTATE_SPEED = 0.00333
SHIP_RANGE = 10
def initialize(window, owner)
super(window, owner, SIZE * 0.75)
@shape.layers = 0b11
@shape.object = self
@shape.collision_type = :ship
# Range attack shape
@shape_range = CP::Shape::Circle.new(@body, size * SHIP_RANGE, CP::Vec2.new(0, 0))
@shape_range.layers = 0b01
@shape_range.object = self
@shape_range.collision_type = :ship_range
window.space.add_shape(@shape_range)
@damage = 1
@facing_angle = nil
init_health(100)
end
def size
SIZE
end
def accelerate
@body.apply_force(@body.a.radians_to_vec2 * SPEED, CP::Vec2.new(0.0, 0.0))
end
def stop
@body.apply_force(CP::Vec2.new(-@body.f.x, -@body.f.y), CP::Vec2.new(0.0, 0.0))
end
def move
auto_movement
auto_rotation
auto_attack
@body.p.x += @body.f.x
@body.p.y += @body.f.y
@body.p.x %= window.width
@body.p.y %= window.height
end
def auto_movement
if @move_to_x && @move_to_y
move_to_x = @move_to_x
move_to_y = @move_to_y
elsif @move_to_obj
move_to_x = @move_to_obj.x
move_to_y = @move_to_obj.y
end
if move_to_x && move_to_y
@body.a = TwoDeeGeo.angle_between_points(x, y, move_to_x, move_to_y)
if close_to?(move_to_x, move_to_y, size * 3)
stop
else
accelerate
end
end
end
def auto_rotation
if @rotate_around_obj
obj = @rotate_around_obj
radius = obj.shape.radius
# Add 180 since Y is negative
@rotating_angle ||= (TwoDeeGeo.angle_between_points(x, y, obj.x, obj.y) + 180).gosu_to_radians
@rotating_angle += ROTATE_SPEED;
new_x = Math.cos(@rotating_angle) * radius;
new_y = Math.sin(@rotating_angle) * radius;
jump_to(obj.x + new_x, obj.y + new_y)
@body.a = TwoDeeGeo.angle_between_points(x, y, obj.x, obj.y)
end
end
def auto_attack
if @attack_ship
@facing_angle = TwoDeeGeo.angle_between_points(x, y, @attack_ship.x, @attack_ship.y)
attack(@attack_ship)
elsif @attack_base
if @attack_base.health <= 0 || owner.owns?(@attack_base)
stop_attacking_base
else
attack(@attack_base)
end
end
end
def take_damage_from(obj)
take_damage(obj.damage) do
@destroy = true
end
end
def attack_ship(obj)
@attack_ship = obj
end
def stop_attacking_ship
@attack_ship = nil
@facing_angle = nil
end
def attack_base(obj)
@attack_base = obj unless owner.owns?(obj)
stop
rotate_around(obj)
end
def stop_attacking_base
@attack_base = nil
end
def attack(obj)
obj.take_damage_from(self)
end
def move_to_coords(x, y)
clear_orders
@move_to_x = x
@move_to_y = y
end
def move_to_obj(obj)
return if @move_to_obj == obj || @rotate_around_obj == obj
clear_orders
@move_to_obj = obj
end
def moving_to?(obj)
return false unless obj
@move_to_obj == obj || @rotate_around_obj == obj
end
def rotate_around(obj)
clear_moving_orders
@rotate_around_obj = obj
end
def clear_orders
clear_moving_orders
@attack_base = nil
end
def clear_moving_orders
@move_to_x = nil
@move_to_y = nil
@move_to_obj = nil
@rotating_angle = nil
@rotate_around_obj = nil
end
def facing_angle
if @facing_angle
@facing_angle
else
angle
end
end
def destroy?
!!@destroy
end
def remove_from_owner
owner.remove_ship(self)
end
def draw
x1 = x + size
x2 = x
x3 = x - size
y1 = y3 = y
y2 = y - size * 2
c = owner.color
Gosu.rotate(facing_angle, x, y - size) do
Gosu.draw_triangle(x1, y1, c, x2, y2, c, x3, y3, c)
Gosu.draw_triangle(x1, y1, c, x2, y2 + size * 3, c, x3, y3, c)
end
# Draw bullet line attacking ship
if @attack_ship
Gosu.draw_line(x, y, c, @attack_ship.x, @attack_ship.y, c)
end
# Draw bullet line attacking base
if @attack_base
Gosu.draw_line(x, y, c, @attack_base.x, @attack_base.y, c)
end
end
end
Fixes starting point of ship bullet draw
class Ship < OwnedObject
include Health
attr_reader :window, :shape, :damage, :owner, :ship_range
SIZE = 5
SPEED = 0.5
ROTATE_SPEED = 0.00333
SHIP_RANGE = 10
def initialize(window, owner)
super(window, owner, SIZE * 0.75)
@shape.layers = 0b11
@shape.object = self
@shape.collision_type = :ship
# Range attack shape
@shape_range = CP::Shape::Circle.new(@body, size * SHIP_RANGE, CP::Vec2.new(0, 0))
@shape_range.layers = 0b01
@shape_range.object = self
@shape_range.collision_type = :ship_range
window.space.add_shape(@shape_range)
@damage = 1
@facing_angle = nil
init_health(100)
end
def size
SIZE
end
def accelerate
@body.apply_force(@body.a.radians_to_vec2 * SPEED, CP::Vec2.new(0.0, 0.0))
end
def stop
@body.apply_force(CP::Vec2.new(-@body.f.x, -@body.f.y), CP::Vec2.new(0.0, 0.0))
end
def move
auto_movement
auto_rotation
auto_attack
@body.p.x += @body.f.x
@body.p.y += @body.f.y
@body.p.x %= window.width
@body.p.y %= window.height
end
def auto_movement
if @move_to_x && @move_to_y
move_to_x = @move_to_x
move_to_y = @move_to_y
elsif @move_to_obj
move_to_x = @move_to_obj.x
move_to_y = @move_to_obj.y
end
if move_to_x && move_to_y
@body.a = TwoDeeGeo.angle_between_points(x, y, move_to_x, move_to_y)
if close_to?(move_to_x, move_to_y, size * 3)
stop
else
accelerate
end
end
end
def auto_rotation
if @rotate_around_obj
obj = @rotate_around_obj
radius = obj.shape.radius
# Add 180 since Y is negative
@rotating_angle ||= (TwoDeeGeo.angle_between_points(x, y, obj.x, obj.y) + 180).gosu_to_radians
@rotating_angle += ROTATE_SPEED;
new_x = Math.cos(@rotating_angle) * radius;
new_y = Math.sin(@rotating_angle) * radius;
jump_to(obj.x + new_x, obj.y + new_y)
@body.a = TwoDeeGeo.angle_between_points(x, y, obj.x, obj.y)
end
end
def auto_attack
if @attack_ship
@facing_angle = TwoDeeGeo.angle_between_points(x, y, @attack_ship.x, @attack_ship.y)
attack(@attack_ship)
elsif @attack_base
if @attack_base.health <= 0 || owner.owns?(@attack_base)
stop_attacking_base
else
attack(@attack_base)
end
end
end
def take_damage_from(obj)
take_damage(obj.damage) do
@destroy = true
end
end
def attack_ship(obj)
@attack_ship = obj
end
def stop_attacking_ship
@attack_ship = nil
@facing_angle = nil
end
def attack_base(obj)
@attack_base = obj unless owner.owns?(obj)
stop
rotate_around(obj)
end
def stop_attacking_base
@attack_base = nil
end
def attack(obj)
obj.take_damage_from(self)
end
def move_to_coords(x, y)
clear_orders
@move_to_x = x
@move_to_y = y
end
def move_to_obj(obj)
return if @move_to_obj == obj || @rotate_around_obj == obj
clear_orders
@move_to_obj = obj
end
def moving_to?(obj)
return false unless obj
@move_to_obj == obj || @rotate_around_obj == obj
end
def rotate_around(obj)
clear_moving_orders
@rotate_around_obj = obj
end
def clear_orders
clear_moving_orders
@attack_base = nil
end
def clear_moving_orders
@move_to_x = nil
@move_to_y = nil
@move_to_obj = nil
@rotating_angle = nil
@rotate_around_obj = nil
end
def facing_angle
if @facing_angle
@facing_angle
else
angle
end
end
def destroy?
!!@destroy
end
def remove_from_owner
owner.remove_ship(self)
end
def draw
x1 = x + size
x2 = x
x3 = x - size
y1 = y3 = y
y2 = y - size * 2
c = owner.color
Gosu.rotate(facing_angle, x, y) do
Gosu.draw_triangle(x1, y1, c, x2, y2, c, x3, y3, c)
Gosu.draw_triangle(x1, y1, c, x2, y2 + size * 3, c, x3, y3, c)
end
if @attack_ship || @attack_base
bx = x + Gosu.offset_x(facing_angle, size)
by = y + Gosu.offset_y(facing_angle, size)
# Draw bullet line attacking ship
if @attack_ship
Gosu.draw_line(bx, by, c, @attack_ship.x, @attack_ship.y, c)
end
# Draw bullet line attacking base
if @attack_base
Gosu.draw_line(bx, by, c, @attack_base.x, @attack_base.y, c)
end
end
end
end
|
moved solo.rb
file_cache_path '/root/chef-solo'
cookbook_path '/root/chef-repo/cookbooks'
|
#! /bin/env ruby
#
############################################################
# Author: Alice "Duchess" Archer
# Name: rirc
# Description: IRC framework for IRC bots written in ruby
############################################################
require 'socket'
require 'openssl'
class IRC_message
def initialize(command, nick, channel, message, ircmsg)
@command = command
@nick = nick
@channel = channel
@message = message
@ircmsg = ircmsg
end
def ircmsg
return @ircmsg
end
def message
return @message
end
def nick
return @nick
end
def command
return @command
end
def channel
return @channel
end
def check_regex(type, regex)
if type == "command"
if @command.match(regex) then return true end
elsif type == "nick"
if @nick.match(regex) then return true end
elsif type == "channel"
if @channel.match(regex) then return true end
elsif type == "message"
if @message.match(regex) then return true end
else
if @message.match(regex) then return true end
end
return false
end
def message_regex(regex)
if @message.match(regex) then return true end
return false
end
end
class Pluginf
def initialize(regex, name, file_name, help)
@regexp = Regexp.new(regex.to_s)
@name = name.to_s
@file_name = file_name.to_s
@help = help
@chan_list = []
@chan_list.push("any")
end
# default function
def script(message, nick, chan)
end
def regex
return @regexp
end
def chans
return @chan_list
end
def name
return @name
end
def file_name
return @file_name
end
def help
return @help
end
def cleanup
return ""
end
end
class Plugin_manager
def initialize(plugin_folder)
@plugins = []
@plugin_folder = plugin_folder
end
# returns all the plugins
def plugins
if @plugins.length == 0
return []
end
return @plugins
end
# search functions
def get_names
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.name) }
return names
end
def get_helps
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.help) }
return names
end
def get_files
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.file_name) }
return names
end
def get_chans
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.chans) }
return names
end
def get_regexps
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.regex) }
return names
end
def get_plugin(name) # gets a plugin by name or nil if it is not loaded
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a end }
return nil
end
def plugin_help(name) # gets the help for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.help end }
return nil
end
def plugin_file_name(name) # gets the file name for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.file_name end }
return nil
end
def plugin_chans(name) # gets the array of channels for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.chans end }
return nil
end
def plugin_regex(name) # gets the regex for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.regex end }
return nil
end
# check if a plugin is loaded
def plugin_loaded(name)
if @plugins.length == 0
return false
end
@plugins.each do |a|
if a.name == name
return true
end
end
return false
end
# regex check function
# this function uses the IRC_message object for message input
# inputs:
# - name
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: string
def check_plugin(name, message, admins, backlog) #checks an individual plugin's (by name) regex against message
if @plugins.length == 0
return ""
end
if !plugin_loaded(name)
return ""
else
if message.message.match(get_plugin(name).regex) and (get_plugin(name).chans.include? "any" or get_plugin(name).chans.include? message.channel)
begin
return get_plugin(name).script(message, admins, backlog) # plugins use the IRC_message object
rescue => e
return "an error occured for plugin: #{name}"
end
end
end
return ""
end
# regex check function that returns responses for all plugins in an array
# inputs:
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: array of strings
def check_all(message, admins, backlog)
if @plugins.length == 0
return []
end
response = []
# this is incredibly inneficient but it makes check_plugin flexible
@plugins.each { |a| response.push(check_plugin(a.name, message, admins, backlog)) }
return response
end
# load
def plugin_load(name)
$LOAD_PATH << "#{@plugin_folder}"
response = ""
temp_plugin = nil
if name.match(/.rb$/)
begin
load "#{name}"
plugin_loader = Loader.new
temp_plugin = plugin_loader.get_plugin
if plugin_loaded(temp_plugin.name)
temp_plugin = nil
return "Plugin #{name} is already loaded"
end
@plugins.push(temp_plugin)
temp_plugin = nil
response = "#{name[0..-4]} loaded"
rescue => e
response = "cannot load plugin"
end
else
begin
load "#{name}.rb"
plugin_loader = Loader.new
temp_plugin = plugin_loader.get_plugin
if plugin_loaded(temp_plugin.name)
temp_plugin = nil
return "Plugin #{name} is already loaded"
end
@plugins.push(temp_plugin)
temp_plugin = nil
response = "#{name} loaded"
rescue => e
response = "cannot load plugin"
end
end
$LOAD_PATH << './'
return response
end
# unload
def unload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
get_plugin(name).cleanup
@plugins.delete_if { |a| a.name == name }
return "plugin #{name} unloaded"
end
# reload
def reload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
temp_file_name = get_plugin(name).file_name
unload(name)
plugin_load(temp_file_name)
return "plugin #{name} reloaded"
end
end
class IRCBot
def initialize(network, port, nick, user_name, real_name)
@network = network
@port = port
@nick = nick
@user_name = user_name
@real_name = real_name
@socket = nil
@channels = []
@admins = []
@ignore = []
@hooks = {}
@backlog = []
end
def backlog
return @backlog
end
def ignore
return @ignore
end
def channels
return @channels
end
def admins
return @admins
end
def network
return @network
end
def port
return @port
end
def nick_name
return @nick
end
def user_name
return @user_name
end
def real_name
return @real_name
end
def socket
return @socket
end
def say(message)
@socket.puts message
end
def join(channel)
say "JOIN #{channel}"
if !@channels.include? channel then @channels.push(channel) end
end
def connect
@socket = TCPSocket.open(@network, @port)
end
def connect_ssl
ssl_context = OpenSSL::SSL::SSLContext.new
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
@socket = OpenSSL::SSL::SSLSocket.new(@socket, ssl_context)
@socket.sync = true
@socket.connect
end
def connect_pass(pass)
say "PASS #{pass}"
end
def nick(nick)
@nick = nick
say "NICK #{nick}"
end
def privmsg(dest, message)
say "PRIVMSG #{dest} :#{message}"
end
def action(dest, message)
privmsg(dest, "\01ACTION #{message}\07\01")
end
def notice(dest, message)
say "NOTICE #{dest} :#{message}"
end
def ctcp(dest, message)
privmsg(dest, "\01VERSION #{message}\07\01")
end
def part(dest, message)
say "PART #{dest} :#{message}"
end
def quit(message)
say "QUIT :#{message}"
end
def names(dest)
say "NAMES #{dest}"
end
def identify(nickserv_pass)
say "PRIVMSG nickserv :identify #{nickserv_pass}"
end
def auth(nickserv_pass)
say "VERSION"
say "USER #{@user_name} * * :#{@real_name}"
nick(@nick)
if nickserv_pass != "" and nickserv_pass != nil
identify(nickserv_pass)
end
end
def read
if !@socket.eof
msg = @socket.gets
if msg.match(/^PING :(.*)$/)
say "PONG #{$~[1]}"
return "PING"
end
return msg
else
return nil
end
end
def parse(msg)
message_reg = msg.match(/^(:(?<prefix>\S+) )?(?<command>\S+)( (?!:)(?<params>.+?))?( :(?<trail>.+))?$/)
nick_n = message_reg[:prefix].to_s.split("!")[0]
command = message_reg[:command].to_s
chan = message_reg[:params].to_s
message = message_reg[:trail].to_s
message = message.chomp
if chan == @nick then chan = nick_n end
ircmsg = IRC_message.new(command, nick_n, chan, message, msg)
return ircmsg
end
def add_admin(nick)
@admins.push(nick)
end
def remove_admin(nick)
@admins.delete_if { |a| a == nick }
end
def add_ignore(nick)
@ignore.push(nick)
end
def remove_ignore(nick)
@ignore.delete_if { |a| a == nick }
end
def on(type, &block)
type = type.to_s
@hooks[type] ||= []
@hooks[type] << block
end
def set_admins(admins_s)
admins_s.each { |a| self.add_admin(a) }
end
def join_channels(channels_s)
channels_s.each { |a| self.join(a) }
end
def create_log
if !File.exist?("./log")
File.open("./log", "w+") { |fw| fw.write("Command and Privmsg LOGS") }
end
end
def setup(use_ssl, use_pass, pass, nickserv_pass, channels_s)
self.connect
if use_ssl then self.connect_ssl end
if use_pass then self.connect_pass(pass) end
self.auth(nickserv_pass)
self.create_log
self.join_channels(channels_s)
self.on :message do |msg|
if msg.channel == msg.nick
File.write("./log", msg.ircmsg, File.size("./log"), mode: 'a')
end
if !self.nick_name == msg.nick and !self.ignore.include? msg.nick
@backlog.push(msg)
end
end
self.on :message do |msg|
if self.admins.include? msg.nick and msg.message_regex(/^`plsgo$/) then abort end
end
end
def start!
until self.socket.eof? do
ircmsg = self.read
msg = self.parse(ircmsg)
if ircmsg == "PING" or self.ignore.include?(msg.nick) then next end
begin
hooks = @hooks['message']
if hooks != nil
hooks.each { |h| h.call(msg) }
end
rescue => e
# do not do anything
end
begin
hooks = @hooks['command']
if hooks != nil
hooks.each { |h| h.call(msg.channel, msg.command) }
end
rescue => e
# do not do anything
end
begin
hooks = @hooks['ircmsg']
if hooks != nil
hooks.each { |h| h.call(msg.nick, msg.command, msg.channel, msg.message) }
end
rescue => e
# do not do anything
end
end
end
end
class Commands_manager
def initialize
@reg_s = []
@hook_s = []
@size = 0
end
def on(reg, &block)
reg = Regexp.new(reg.to_s)
p reg.to_s
@reg_s.push(reg)
p @reg_s.to_s
@hook_s << block
@size += 1
end
def check_all(bot, msg, plugins)
0.upto(@size - 1) do |i|
p "reg: #{reg_s[i]}"
p "hook #{hook_s[i]}"
if msg.message_regex(@reg_s[i])
@hook_s[i].call(bot, msg, plugins)
end
end
end
def hooks
return @hook_s
end
def regexes
return @reg_s
end
def size
return @size
end
end
update
#! /bin/env ruby
#
############################################################
# Author: Alice "Duchess" Archer
# Name: rirc
# Description: IRC framework for IRC bots written in ruby
############################################################
require 'socket'
require 'openssl'
class IRC_message
def initialize(command, nick, channel, message, ircmsg)
@command = command
@nick = nick
@channel = channel
@message = message
@ircmsg = ircmsg
end
def ircmsg
return @ircmsg
end
def message
return @message
end
def nick
return @nick
end
def command
return @command
end
def channel
return @channel
end
def check_regex(type, regex)
if type == "command"
if @command.match(regex) then return true end
elsif type == "nick"
if @nick.match(regex) then return true end
elsif type == "channel"
if @channel.match(regex) then return true end
elsif type == "message"
if @message.match(regex) then return true end
else
if @message.match(regex) then return true end
end
return false
end
def message_regex(regex)
if @message.match(regex) then return true end
return false
end
end
class Pluginf
def initialize(regex, name, file_name, help)
@regexp = Regexp.new(regex.to_s)
@name = name.to_s
@file_name = file_name.to_s
@help = help
@chan_list = []
@chan_list.push("any")
end
# default function
def script(message, nick, chan)
end
def regex
return @regexp
end
def chans
return @chan_list
end
def name
return @name
end
def file_name
return @file_name
end
def help
return @help
end
def cleanup
return ""
end
end
class Plugin_manager
def initialize(plugin_folder)
@plugins = []
@plugin_folder = plugin_folder
end
# returns all the plugins
def plugins
if @plugins.length == 0
return []
end
return @plugins
end
# search functions
def get_names
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.name) }
return names
end
def get_helps
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.help) }
return names
end
def get_files
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.file_name) }
return names
end
def get_chans
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.chans) }
return names
end
def get_regexps
if @plugins.length == 0
return []
end
names = []
@plugins.each { |a| names.push(a.regex) }
return names
end
def get_plugin(name) # gets a plugin by name or nil if it is not loaded
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a end }
return nil
end
def plugin_help(name) # gets the help for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.help end }
return nil
end
def plugin_file_name(name) # gets the file name for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.file_name end }
return nil
end
def plugin_chans(name) # gets the array of channels for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.chans end }
return nil
end
def plugin_regex(name) # gets the regex for a plugin
if @plugins.length == 0
return nil
end
@plugins.each { |a| if a.name == name then return a.regex end }
return nil
end
# check if a plugin is loaded
def plugin_loaded(name)
if @plugins.length == 0
return false
end
@plugins.each do |a|
if a.name == name
return true
end
end
return false
end
# regex check function
# this function uses the IRC_message object for message input
# inputs:
# - name
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: string
def check_plugin(name, message, admins, backlog) #checks an individual plugin's (by name) regex against message
if @plugins.length == 0
return ""
end
if !plugin_loaded(name)
return ""
else
if message.message.match(get_plugin(name).regex) and (get_plugin(name).chans.include? "any" or get_plugin(name).chans.include? message.channel)
begin
return get_plugin(name).script(message, admins, backlog) # plugins use the IRC_message object
rescue => e
return "an error occured for plugin: #{name}"
end
end
end
return ""
end
# regex check function that returns responses for all plugins in an array
# inputs:
# - IRC_message object
# - array of admins [can be an empty array]
# - backlog array [can be an empty array]
# output: array of strings
def check_all(message, admins, backlog)
if @plugins.length == 0
return []
end
response = []
# this is incredibly inneficient but it makes check_plugin flexible
@plugins.each { |a| response.push(check_plugin(a.name, message, admins, backlog)) }
return response
end
# load
def plugin_load(name)
$LOAD_PATH << "#{@plugin_folder}"
response = ""
temp_plugin = nil
if name.match(/.rb$/)
begin
load "#{name}"
plugin_loader = Loader.new
temp_plugin = plugin_loader.get_plugin
if plugin_loaded(temp_plugin.name)
temp_plugin = nil
return "Plugin #{name} is already loaded"
end
@plugins.push(temp_plugin)
temp_plugin = nil
response = "#{name[0..-4]} loaded"
rescue => e
response = "cannot load plugin"
end
else
begin
load "#{name}.rb"
plugin_loader = Loader.new
temp_plugin = plugin_loader.get_plugin
if plugin_loaded(temp_plugin.name)
temp_plugin = nil
return "Plugin #{name} is already loaded"
end
@plugins.push(temp_plugin)
temp_plugin = nil
response = "#{name} loaded"
rescue => e
response = "cannot load plugin"
end
end
$LOAD_PATH << './'
return response
end
# unload
def unload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
get_plugin(name).cleanup
@plugins.delete_if { |a| a.name == name }
return "plugin #{name} unloaded"
end
# reload
def reload(name)
if !plugin_loaded(name)
return "plugin is not loaded"
end
temp_file_name = get_plugin(name).file_name
unload(name)
plugin_load(temp_file_name)
return "plugin #{name} reloaded"
end
end
class IRCBot
def initialize(network, port, nick, user_name, real_name)
@network = network
@port = port
@nick = nick
@user_name = user_name
@real_name = real_name
@socket = nil
@channels = []
@admins = []
@ignore = []
@hooks = {}
@backlog = []
end
def backlog
return @backlog
end
def ignore
return @ignore
end
def channels
return @channels
end
def admins
return @admins
end
def network
return @network
end
def port
return @port
end
def nick_name
return @nick
end
def user_name
return @user_name
end
def real_name
return @real_name
end
def socket
return @socket
end
def say(message)
@socket.puts message
end
def join(channel)
say "JOIN #{channel}"
if !@channels.include? channel then @channels.push(channel) end
end
def connect
@socket = TCPSocket.open(@network, @port)
end
def connect_ssl
ssl_context = OpenSSL::SSL::SSLContext.new
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
@socket = OpenSSL::SSL::SSLSocket.new(@socket, ssl_context)
@socket.sync = true
@socket.connect
end
def connect_pass(pass)
say "PASS #{pass}"
end
def nick(nick)
@nick = nick
say "NICK #{nick}"
end
def privmsg(dest, message)
say "PRIVMSG #{dest} :#{message}"
end
def action(dest, message)
privmsg(dest, "\01ACTION #{message}\07\01")
end
def notice(dest, message)
say "NOTICE #{dest} :#{message}"
end
def ctcp(dest, message)
privmsg(dest, "\01VERSION #{message}\07\01")
end
def part(dest, message)
say "PART #{dest} :#{message}"
end
def quit(message)
say "QUIT :#{message}"
end
def names(dest)
say "NAMES #{dest}"
end
def identify(nickserv_pass)
say "PRIVMSG nickserv :identify #{nickserv_pass}"
end
def auth(nickserv_pass)
say "VERSION"
say "USER #{@user_name} * * :#{@real_name}"
nick(@nick)
if nickserv_pass != "" and nickserv_pass != nil
identify(nickserv_pass)
end
end
def read
if !@socket.eof
msg = @socket.gets
if msg.match(/^PING :(.*)$/)
say "PONG #{$~[1]}"
return "PING"
end
return msg
else
return nil
end
end
def parse(msg)
message_reg = msg.match(/^(:(?<prefix>\S+) )?(?<command>\S+)( (?!:)(?<params>.+?))?( :(?<trail>.+))?$/)
nick_n = message_reg[:prefix].to_s.split("!")[0]
command = message_reg[:command].to_s
chan = message_reg[:params].to_s
message = message_reg[:trail].to_s
message = message.chomp
if chan == @nick then chan = nick_n end
ircmsg = IRC_message.new(command, nick_n, chan, message, msg)
return ircmsg
end
def add_admin(nick)
@admins.push(nick)
end
def remove_admin(nick)
@admins.delete_if { |a| a == nick }
end
def add_ignore(nick)
@ignore.push(nick)
end
def remove_ignore(nick)
@ignore.delete_if { |a| a == nick }
end
def on(type, &block)
type = type.to_s
@hooks[type] ||= []
@hooks[type] << block
end
def set_admins(admins_s)
admins_s.each { |a| self.add_admin(a) }
end
def join_channels(channels_s)
channels_s.each { |a| self.join(a) }
end
def create_log
if !File.exist?("./log")
File.open("./log", "w+") { |fw| fw.write("Command and Privmsg LOGS") }
end
end
def setup(use_ssl, use_pass, pass, nickserv_pass, channels_s)
self.connect
if use_ssl then self.connect_ssl end
if use_pass then self.connect_pass(pass) end
self.auth(nickserv_pass)
self.create_log
self.join_channels(channels_s)
self.on :message do |msg|
if msg.channel == msg.nick
File.write("./log", msg.ircmsg, File.size("./log"), mode: 'a')
end
if !self.nick_name == msg.nick and !self.ignore.include? msg.nick
@backlog.push(msg)
end
end
self.on :message do |msg|
if self.admins.include? msg.nick and msg.message_regex(/^`plsgo$/) then abort end
end
end
def start!
until self.socket.eof? do
ircmsg = self.read
msg = self.parse(ircmsg)
if ircmsg == "PING" or self.ignore.include?(msg.nick) then next end
begin
hooks = @hooks['message']
if hooks != nil
hooks.each { |h| h.call(msg) }
end
rescue => e
# do not do anything
end
begin
hooks = @hooks['command']
if hooks != nil
hooks.each { |h| h.call(msg.channel, msg.command) }
end
rescue => e
# do not do anything
end
begin
hooks = @hooks['ircmsg']
if hooks != nil
hooks.each { |h| h.call(msg.nick, msg.command, msg.channel, msg.message) }
end
rescue => e
# do not do anything
end
end
end
end
class Commands_manager
def initialize
@reg_s = []
@hook_s = []
@size = 0
end
def on(reg, &block)
reg = Regexp.new(reg.to_s)
@reg_s.push(reg)
puts block
@hook_s << block
@size += 1
end
def check_all(bot, msg, plugins)
0.upto(@size - 1) do |i|
p "reg: #{reg_s[i]}"
p "hook #{hook_s[i]}"
if msg.message_regex(@reg_s[i])
@hook_s[i].call(bot, msg, plugins)
end
end
end
def hooks
return @hook_s
end
def regexes
return @reg_s
end
def size
return @size
end
end
|
# roto.rb - simple Ruby rotation module v1.0
# 2D/3D rotation of arbitrary point in the space.
# version 1.0 released on December 21, 2014
'''
The MIT License (MIT)
Copyright (c) 2014 Jaime Ortiz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
module Roto
def Roto.info()
return "rotor - rotates a given point around an arbitrary defined axis"
end
# Vector and Quaternion algebra
def Roto.vectorCrossProduct( u, v )
"""Returns the vector resulting from the cross product between two vectors"""
return [ u[1] * v[2] - u[2] * v[1], u[2] * v[0] - u[0] * v[2], u[0] * v[1] - u[1] * v[0] ]
end
def Roto.vectorDotProduct( v0, v1 )
"""Returns the scalar quantity representing the dot product of two vectors"""
return v0[0] * v1[0] + v0[1] * v1[1] + v0[2] * v1[2]
end
def Roto.vectorSum( v0, v1 )
"""Returns the sum of two vectors"""
return [ v0[0] + v1[0], v0[1] + v1[1], v0[2] + v1[2] ]
end
def Roto.vectorScaling( scale, v )
"""Returns the multiplication of a vector and a scalar"""
return [ scale * v[0], scale * v[1], scale * v[2] ]
end
def Roto.vectorMagnitude( v )
"""Returns the magnitude of the vector"""
return ( ( v[0] ** 2 + v[1] ** 2 + v[2] ** 2 ) ** 0.5 )
end
def Roto.vectorNormalized( v )
"""Returns de normalized vector"""
v_mag = Roto.vectorMagnitude( v );
return [ v[0] / v_mag, v[1] / v_mag, v[2] / v_mag ]
end
def Roto.angleBetween2VectorsRad( a, b )
a_mag = Roto.vectorMagnitude( a )
b_mag = Roto.vectorMagnitude( b )
adotb = Roto.vectorDotProduct( a, b )
return Math.acos( adotb / ( a_mag * b_mag ) )
end
def Roto.angleBetween2VectorsDeg( a, b )
return Roto.angleBetween2VectorsRad( a, b ) * 180.0 / Math::PI
end
def Roto.quaternionDotProduct( q0, q1 )
"""Returns the scalar quantiry representing the dot product of two vectors"""
return q0[0] * q1[0] + q0[1] * q1[1] + q0[2] * q1[2] + q0[3] * q1[3]
end
def Roto.quaternionProduct( q0, q1 )
s0 = q0[0]
s1 = q1[0]
v0 = [ q0[1], q0[2], q0[3] ]
v1 = [ q1[1], q1[2], q1[3] ]
real_part = s0 * s1 - Roto.vectorDotProduct( v0, v1 )
vector_scaling_1 = Roto.vectorScaling( s0, v1 )
vector_scaling_2 = Roto.vectorScaling( s1, v0 )
vector_cross_product_1 = Roto.vectorCrossProduct( v0, v1 )
vector_sum_1 = Roto.vectorSum( vector_scaling_1, vector_scaling_2 )
vector_sum_2 = Roto.vectorSum( vector_sum_1, vector_cross_product_1 )
return[ real_part, vector_sum_2[0], vector_sum_2[1], vector_sum_2[2] ]
end
def Roto.quaternionMagnitude( q )
"""Returns the magnitude of a quaternion"""
return ( ( q[0] ** 2 + q[1] ** 2 + q[2] ** 2 + q[3] ** 2 ) ** 0.5 )
end
def Roto.quaternionInverse( q )
"""Returns the inverse of a quaternion"""
return ( [ q[0], -q[1], -q[2], -q[3] ] )
end
def Roto.quaternionRotor( v, phi )
"""Returns the quaternion representing the rotation around the vector v by an angle phi expressed in radians"""
return [ Math.cos( phi / 2.0 ),
Math.sin( phi / 2.0 ) * v[0],
Math.sin( phi / 2.0 ) * v[1],
Math.sin( phi / 2.0 ) * v[2] ]
end
def Roto.deg2rad( angle_deg )
"""Converts the given angle to radians"""
return angle_deg * 2.0 * Math::PI / 360.0
end
# === Rotation functions ===
def Roto.rotate( p0, angle, v )
"""Rotates an arbitrary point p0 around an arbitrary axis v by an angle expessed in degrees"""
v = Roto.vectorNormalized( v )
p = [ 0, p0[0], p0[1], p0[2] ]
angle_rad = Roto.deg2rad( angle )
q = Roto.quaternionRotor( v, angle_rad )
invq = Roto.quaternionInverse( q )
qp = Roto.quaternionProduct( q, p )
qpinvq = Roto.quaternionProduct( qp, invq )
return [ qpinvq[ 1 ], qpinvq[ 2 ], qpinvq[ 3 ] ]
end
def Roto.rotateX( p0, angle )
"""Rotates an arbitrary point p0 around the X axis by an angle expressed in degrees"""
q1 = Roto.rotate( p0, angle, [ 1, 0, 0 ] )
return [ q1[ 0 ], q1[ 1 ], q1[ 2 ] ]
end
def Roto.rotateY( p0, angle )
"""Rotates an arbitrary point p0 around the Y axis by an angle expressed in degrees"""
q1 = Roto.rotate( p0, angle, [ 0, 1, 0 ] )
return [ q1[ 0 ], q1[ 1 ], q1[ 2 ] ]
end
def Roto.rotateZ( p0, angle )
"""Rotates an arbitrary point p0 around the Z axis by an angle expressed in degrees"""
q1 = Roto.rotate( p0, angle, [ 0, 0, 1 ] )
return [ q1[ 0 ], q1[ 1 ], q1[ 2 ] ]
end
end
changed some variable names, added rad2deg fuction
# roto.rb - simple Ruby rotation module v1.0
# 2D/3D rotation of arbitrary point in the space.
# version 1.0 released on December 21, 2014
'''
The MIT License (MIT)
Copyright (c) 2014 Jaime Ortiz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
module Roto
def Roto.info()
return "rotor - rotates a given point around an arbitrary defined axis"
end
# Vector and Quaternion algebra
def Roto.vectorCrossProduct( u, v )
"""Returns the vector resulting from the cross product between two vectors"""
return [ u[1] * v[2] - u[2] * v[1], u[2] * v[0] - u[0] * v[2], u[0] * v[1] - u[1] * v[0] ]
end
def Roto.vectorDotProduct( u, v )
"""Returns the scalar quantity representing the dot product of two vectors"""
return u[0] * v[0] + u[1] * v[1] + u[2] * v[2]
end
def Roto.vectorSum( u, v )
"""Returns the sum of two vectors"""
return [ u[0] + v[0], u[1] + v[1], u[2] + v[2] ]
end
def Roto.vectorScaling( scale, v )
"""Returns the multiplication of a vector and a scalar"""
return [ scale * v[0], scale * v[1], scale * v[2] ]
end
def Roto.vectorMagnitude( v )
"""Returns the magnitude of the vector"""
return ( ( v[0] ** 2 + v[1] ** 2 + v[2] ** 2 ) ** 0.5 )
end
def Roto.vectorNormalized( v )
"""Returns de normalized vector"""
v_mag = Roto.vectorMagnitude( v );
return [ v[0] / v_mag, v[1] / v_mag, v[2] / v_mag ]
end
def Roto.angleBetween2VectorsRad( u, v )
u_mag = Roto.vectorMagnitude( u )
v_mag = Roto.vectorMagnitude( v )
udotv = Roto.vectorDotProduct( u, v )
return Math.acos( udotv / ( u_mag * v_mag ) )
end
def Roto.angleBetween2VectorsDeg( u, v )
return Roto.rad2deg( Roto.angleBetween2VectorsRad( u, v ) )
end
def Roto.quaternionDotProduct( q0, q1 )
"""Returns the scalar quantiry representing the dot product of two vectors"""
return q0[0] * q1[0] + q0[1] * q1[1] + q0[2] * q1[2] + q0[3] * q1[3]
end
def Roto.quaternionProduct( q0, q1 )
s0 = q0[0]
s1 = q1[0]
v0 = [ q0[1], q0[2], q0[3] ]
v1 = [ q1[1], q1[2], q1[3] ]
real_part = s0 * s1 - Roto.vectorDotProduct( v0, v1 )
vector_scaling_1 = Roto.vectorScaling( s0, v1 )
vector_scaling_2 = Roto.vectorScaling( s1, v0 )
vector_cross_product_1 = Roto.vectorCrossProduct( v0, v1 )
vector_sum_1 = Roto.vectorSum( vector_scaling_1, vector_scaling_2 )
vector_sum_2 = Roto.vectorSum( vector_sum_1, vector_cross_product_1 )
return[ real_part, vector_sum_2[0], vector_sum_2[1], vector_sum_2[2] ]
end
def Roto.quaternionMagnitude( q )
"""Returns the magnitude of a quaternion"""
return ( ( q[0] ** 2 + q[1] ** 2 + q[2] ** 2 + q[3] ** 2 ) ** 0.5 )
end
def Roto.quaternionInverse( q )
"""Returns the inverse of a quaternion"""
return ( [ q[0], -q[1], -q[2], -q[3] ] )
end
def Roto.quaternionRotor( v, phi )
"""Returns the quaternion representing the rotation around the vector v by an angle phi expressed in radians"""
return [ Math.cos( phi / 2.0 ),
Math.sin( phi / 2.0 ) * v[0],
Math.sin( phi / 2.0 ) * v[1],
Math.sin( phi / 2.0 ) * v[2] ]
end
def Roto.deg2rad( angle_deg )
"""Converts the given angle in degrees to radians"""
return angle_deg * Math::PI / 180.0
end
def Roto.rad2deg( angle_rad )
"""Converts the given angle in radians to degrees"""
return angle_rad * 180.0 / Math::PI
end
# === Rotation functions ===
def Roto.rotate( p0, angle, v )
"""Rotates an arbitrary point p0 around an arbitrary axis v by an angle expessed in degrees"""
v = Roto.vectorNormalized( v )
p = [ 0, p0[0], p0[1], p0[2] ]
angle_rad = Roto.deg2rad( angle )
q = Roto.quaternionRotor( v, angle_rad )
invq = Roto.quaternionInverse( q )
qp = Roto.quaternionProduct( q, p )
qpinvq = Roto.quaternionProduct( qp, invq )
return [ qpinvq[ 1 ], qpinvq[ 2 ], qpinvq[ 3 ] ]
end
def Roto.rotateX( p0, angle )
"""Rotates an arbitrary point p0 around the X axis by an angle expressed in degrees"""
q1 = Roto.rotate( p0, angle, [ 1, 0, 0 ] )
return [ q1[ 0 ], q1[ 1 ], q1[ 2 ] ]
end
def Roto.rotateY( p0, angle )
"""Rotates an arbitrary point p0 around the Y axis by an angle expressed in degrees"""
q1 = Roto.rotate( p0, angle, [ 0, 1, 0 ] )
return [ q1[ 0 ], q1[ 1 ], q1[ 2 ] ]
end
def Roto.rotateZ( p0, angle )
"""Rotates an arbitrary point p0 around the Z axis by an angle expressed in degrees"""
q1 = Roto.rotate( p0, angle, [ 0, 0, 1 ] )
return [ q1[ 0 ], q1[ 1 ], q1[ 2 ] ]
end
end
|
require 'formula'
class Rpm4 < Formula
homepage 'http://www.rpm.org/'
url 'http://rpm.org/releases/rpm-4.11.x/rpm-4.11.1.tar.bz2'
sha1 '31ddc4185137ce3f718c99e91dcb040614fe820c'
depends_on 'pkg-config' => :build
depends_on 'nss'
depends_on 'nspr'
depends_on 'libmagic'
depends_on 'popt'
depends_on 'lua'
depends_on 'berkeley-db'
depends_on 'xz'
depends_on :python
conflicts_with 'rpm', :because => 'These are two different forks of the same tool.'
def patches
DATA
end
def install
# some of nss/nspr formulae might be keg-only:
ENV.append 'CPPFLAGS', "-I#{Formula.factory('nss').include}/nss"
ENV.append 'CPPFLAGS', "-I#{Formula.factory('nspr').include}/nspr"
ENV.append 'LDFLAGS', "-L#{python.libdir}"
# pkg-config support was removed from lua 5.2:
ENV['LUA_CFLAGS'] = "-I#{HOMEBREW_PREFIX}/include"
ENV['LUA_LIBS'] = "-L#{HOMEBREW_PREFIX}/lib -llua"
ENV['__PYTHON'] = python.binary
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--sysconfdir=#{HOMEBREW_PREFIX}/etc
--localstatedir=#{HOMEBREW_PREFIX}/var
--with-external-db
--with-lua
--without-hackingdocs
--enable-python
]
system './configure', *args
system "make"
system "make install"
# the default install makes /usr/bin/rpmquery a symlink to /bin/rpm
# by using ../.. but that doesn't really work with any other prefix.
ln_sf "rpm", "#{bin}/rpmquery"
ln_sf "rpm", "#{bin}/rpmverify"
end
end
__END__
diff --git a/lib/poptALL.c b/lib/poptALL.c
index 541e8c4..5cecc2a 100644
--- a/lib/poptALL.c
+++ b/lib/poptALL.c
@@ -244,7 +244,7 @@ rpmcliInit(int argc, char *const argv[], struct poptOption * optionsTable)
int rc;
const char *ctx, *execPath;
- setprogname(argv[0]); /* Retrofit glibc __progname */
+ xsetprogname(argv[0]); /* Retrofit glibc __progname */
/* XXX glibc churn sanity */
if (__progname == NULL) {
diff --git a/rpm2cpio.c b/rpm2cpio.c
index 89ebdfa..f35c7c8 100644
--- a/rpm2cpio.c
+++ b/rpm2cpio.c
@@ -21,7 +21,7 @@ int main(int argc, char *argv[])
off_t payload_size;
FD_t gzdi;
- setprogname(argv[0]); /* Retrofit glibc __progname */
+ xsetprogname(argv[0]); /* Retrofit glibc __progname */
rpmReadConfigFiles(NULL, NULL);
if (argc == 1)
fdi = fdDup(STDIN_FILENO);
diff --git a/rpmqv.c b/rpmqv.c
index da5f2ca..d033d21 100644
--- a/rpmqv.c
+++ b/rpmqv.c
@@ -92,8 +92,8 @@ int main(int argc, char *argv[])
/* Set the major mode based on argv[0] */
#ifdef IAM_RPMQV
- if (rstreq(__progname, "rpmquery")) bigMode = MODE_QUERY;
- if (rstreq(__progname, "rpmverify")) bigMode = MODE_VERIFY;
+ if (rstreq(__progname ? __progname : "", "rpmquery")) bigMode = MODE_QUERY;
+ if (rstreq(__progname ? __progname : "", "rpmverify")) bigMode = MODE_VERIFY;
#endif
#if defined(IAM_RPMQV)
diff --git a/system.h b/system.h
index f3b1bab..bf264f5 100644
--- a/system.h
+++ b/system.h
@@ -21,6 +21,7 @@
#ifdef __APPLE__
#include <crt_externs.h>
#define environ (*_NSGetEnviron())
+#define fdatasync fsync
#else
extern char ** environ;
#endif /* __APPLE__ */
@@ -116,10 +117,10 @@ typedef char * security_context_t;
#if __GLIBC_MINOR__ >= 1
#define __progname __assert_program_name
#endif
-#define setprogname(pn)
+#define xsetprogname(pn)
#else
#define __progname program_name
-#define setprogname(pn) \
+#define xsetprogname(pn) \
{ if ((__progname = strrchr(pn, '/')) != NULL) __progname++; \
else __progname = pn; \
}
Fix for changed python behavior in homebrew
require 'formula'
n -c 'import sys;print(sys.version[:3])'
class Rpm4 < Formula
homepage 'http://www.rpm.org/'
url 'http://rpm.org/releases/rpm-4.11.x/rpm-4.11.1.tar.bz2'
sha1 '31ddc4185137ce3f718c99e91dcb040614fe820c'
depends_on 'pkg-config' => :build
depends_on 'nss'
depends_on 'nspr'
depends_on 'libmagic'
depends_on 'popt'
depends_on 'lua'
depends_on 'berkeley-db'
depends_on 'xz'
depends_on :python
conflicts_with 'rpm', :because => 'These are two different forks of the same tool.'
def patches
DATA
end
def install
# Fix for removed python. stuff... Argh!
pyvers = "python" + %x(python -c 'import sys;print(sys.version[:3])').chomp
pypref = %x(python-config --prefix).chomp
pyincdir = "#{pypref}/include/#{pyvers}"
pylibdir = "#{pypref}/lib/lib#{pyvers}.dylib"
pybin = "#{pypref}/bin/python"
# some of nss/nspr formulae might be keg-only:
ENV.append 'CPPFLAGS', "-I#{Formula.factory('nss').include}/nss"
ENV.append 'CPPFLAGS', "-I#{Formula.factory('nspr').include}/nspr"
ENV.append 'LDFLAGS', "-L#{pylibdir}"
# pkg-config support was removed from lua 5.2:
ENV['LUA_CFLAGS'] = "-I#{HOMEBREW_PREFIX}/include"
ENV['LUA_LIBS'] = "-L#{HOMEBREW_PREFIX}/lib -llua"
ENV['__PYTHON'] = $pybin
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--sysconfdir=#{HOMEBREW_PREFIX}/etc
--localstatedir=#{HOMEBREW_PREFIX}/var
--with-external-db
--with-lua
--without-hackingdocs
--enable-python
]
system './configure', *args
system "make"
system "make install"
# the default install makes /usr/bin/rpmquery a symlink to /bin/rpm
# by using ../.. but that doesn't really work with any other prefix.
ln_sf "rpm", "#{bin}/rpmquery"
ln_sf "rpm", "#{bin}/rpmverify"
end
end
__END__
diff --git a/lib/poptALL.c b/lib/poptALL.c
index 541e8c4..5cecc2a 100644
--- a/lib/poptALL.c
+++ b/lib/poptALL.c
@@ -244,7 +244,7 @@ rpmcliInit(int argc, char *const argv[], struct poptOption * optionsTable)
int rc;
const char *ctx, *execPath;
- setprogname(argv[0]); /* Retrofit glibc __progname */
+ xsetprogname(argv[0]); /* Retrofit glibc __progname */
/* XXX glibc churn sanity */
if (__progname == NULL) {
diff --git a/rpm2cpio.c b/rpm2cpio.c
index 89ebdfa..f35c7c8 100644
--- a/rpm2cpio.c
+++ b/rpm2cpio.c
@@ -21,7 +21,7 @@ int main(int argc, char *argv[])
off_t payload_size;
FD_t gzdi;
- setprogname(argv[0]); /* Retrofit glibc __progname */
+ xsetprogname(argv[0]); /* Retrofit glibc __progname */
rpmReadConfigFiles(NULL, NULL);
if (argc == 1)
fdi = fdDup(STDIN_FILENO);
diff --git a/rpmqv.c b/rpmqv.c
index da5f2ca..d033d21 100644
--- a/rpmqv.c
+++ b/rpmqv.c
@@ -92,8 +92,8 @@ int main(int argc, char *argv[])
/* Set the major mode based on argv[0] */
#ifdef IAM_RPMQV
- if (rstreq(__progname, "rpmquery")) bigMode = MODE_QUERY;
- if (rstreq(__progname, "rpmverify")) bigMode = MODE_VERIFY;
+ if (rstreq(__progname ? __progname : "", "rpmquery")) bigMode = MODE_QUERY;
+ if (rstreq(__progname ? __progname : "", "rpmverify")) bigMode = MODE_VERIFY;
#endif
#if defined(IAM_RPMQV)
diff --git a/system.h b/system.h
index f3b1bab..bf264f5 100644
--- a/system.h
+++ b/system.h
@@ -21,6 +21,7 @@
#ifdef __APPLE__
#include <crt_externs.h>
#define environ (*_NSGetEnviron())
+#define fdatasync fsync
#else
extern char ** environ;
#endif /* __APPLE__ */
@@ -116,10 +117,10 @@ typedef char * security_context_t;
#if __GLIBC_MINOR__ >= 1
#define __progname __assert_program_name
#endif
-#define setprogname(pn)
+#define xsetprogname(pn)
#else
#define __progname program_name
-#define setprogname(pn) \
+#define xsetprogname(pn) \
{ if ((__progname = strrchr(pn, '/')) != NULL) __progname++; \
else __progname = pn; \
}
|
# encoding: UTF-8
# Author:: Akira FUNAI
# Copyright:: Copyright (c) 2009 Akira FUNAI
class Sofa
Dir['./sofa/*.rb'].sort.each {|file| require file }
module REX
ID_SHORT = /[a-z][a-z0-9\_\-]*/
ID = /^(\d{8})_(\d{4,}|#{ID_SHORT})/
ID_NEW = /^_\d/
COND = /^(.+?)=(.+)$/
COND_D = /^(19\d\d|2\d\d\d|9999)\d{0,4}$/
PATH_ID = /\/((?:19|2\d)\d{6})\/(\d+)/
TID = /\d{10}\.\d+/
end
def self.[](name)
@config ||= YAML.load_file './sofa.yaml'
@config[name]
end
def self.current
Thread.current
end
def self.session
self.current[:session] || ($fake_session ||= {})
end
def self.transaction
self.session[:transaction] ||= {}
end
def self.client
self.session[:client] ||= 'nobody'
end
def self.client=(id)
self.session[:client] = id
end
def self.base
self.current[:base]
end
def call(env)
req = Rack::Request.new env
method = req.request_method.downcase
params = params_from_request req
path = req.path_info
tid = Sofa::Path.tid_of path
Sofa.current[:env] = env
Sofa.current[:req] = req
Sofa.current[:session] = env['rack.session']
base = Sofa.transaction[tid] || Sofa::Path.base_of(path)
return response_not_found unless base
base[:tid] = tid
Sofa.current[:base] = base
begin
if params[:action] == :logout
logout(base,params)
elsif method == 'get'
get(base,params)
elsif params[:action] == :login
login(base,params)
elsif params[:action] == :confirm
confirm(base,params)
else
post(base,params)
end
rescue Sofa::Error::Forbidden
if params[:action] && Sofa.client == 'nobody'
params[:dest_action] = (method == 'post') ? :index : params[:action]
params[:action] = :login
end
begin
response_unprocessable_entity :body => _get(base,params)
rescue Sofa::Error::Forbidden
response_forbidden
end
end
end
private
def login(base,params)
user = Sofa::Set::Static::Folder.root.item('_users','main',params['id'].to_s)
if user && params['pw'].to_s.crypt(user.val('password')) == user.val('password')
Sofa.client = params['id']
else
Sofa.client = nil
raise Sofa::Error::Forbidden
end
path = Sofa::Path.path_of params[:conds]
action = (params['dest_action'] =~ /\A\w+\z/) ? params['dest_action'] : 'index'
response_see_other(
:location => "#{base[:path]}/#{path}#{action}.html"
)
end
def logout(base,params)
Sofa.client = nil
path = Sofa::Path.path_of params[:conds]
response_see_other(
:location => "#{base[:path]}/#{path}index.html"
)
end
def get(base,params)
response_ok :body => _get(base,params)
end
def confirm(base,params)
Sofa.transaction[base[:tid]] ||= base
base.update params
base.commit :temp
if base.result
action = "confirm_#{params[:sub_action]}"
id_step = Sofa::Path.path_of(
:id => base.result.values.collect {|item| item[:id] }
)
response_see_other(
:location => base[:path] + "/#{base[:tid]}/#{id_step}#{action}.html"
)
else
params = {:action => :update}
params[:conds] = {:id => base.send(:pending_items).keys}
return response_unprocessable_entity(:body => _get(base,params))
end
end
def post(base,params)
base.update params
if params[:status]
base[:folder].commit :persistent
if base.result
Sofa.transaction[base[:tid]] = nil
action = base.workflow.next_action params
id_step = Sofa::Path.path_of(
:id => base.result.values.collect {|item| item[:id] }
) if base[:parent] == base[:folder] && action != :done
response_see_other(
:location => base[:path] + "/#{base[:tid]}/#{id_step}#{action}.html"
)
else
params = {:action => :update}
params[:conds] = {:id => base.send(:pending_items).keys}
response_unprocessable_entity :body => _get(base,params)
end
else
Sofa.transaction[base[:tid]] ||= base
id_step = Sofa::Path.path_of(:id => base.send(:pending_items).keys)
base.commit :temp
response_see_other(
:location => base[:path] + "/#{base[:tid]}/#{id_step}update.html"
)
end
end
def _get(f,params)
until f.is_a? Sofa::Set::Static::Folder
params = {
:action => (f.default_action == :read) ? :read : nil,
:sub_action => f.send(:summary?,params) ? nil : :detail,
f[:id] => params,
}
params[:conds] = {:id => f[:id]} if f[:parent].is_a? Sofa::Set::Dynamic
f = f[:parent]
end if f.is_a? Sofa::Set::Dynamic
f.get params
end
def params_from_request(req)
params = {
:action => Sofa::Path.action_of(req.path_info),
:sub_action => Sofa::Path.sub_action_of(req.path_info),
}
params.merge!(rebuild_params req.params)
params[:conds] ||= {}
params[:conds].merge!(Sofa::Path.conds_of req.path_info)
params
end
def rebuild_params(src)
src.each_key.sort.reverse.inject({}) {|params,key|
name,special = key.split('.',2)
steps = name.split '-'
if special
special_id,special_val = special.split('-',2)
else
item_id = steps.pop
end
hash = steps.inject(params) {|v,k| v[k] ||= {} }
val = src[key]
if special_id == 'action'
action,sub_action = (special_val || val).split('_',2)
hash[:action] = action.intern
hash[:sub_action] = sub_action.intern if sub_action
elsif special_id == 'status'
hash[:status] = (special_val || val).intern
elsif special_id == 'conds'
hash[:conds] ||= {}
hash[:conds][special_val.intern] = val
elsif hash[item_id].is_a? ::Hash
hash[item_id][:self] = val
else
hash[item_id] = val
end
params
}
end
def response_ok(result = {})
[
200,
(
result[:headers] ||
{
'Content-Type' => 'text/html',
'Content-Length' => result[:body].size.to_s,
}
),
result[:body],
]
end
def response_no_content(result = {})
[
204,
(result[:headers] || {}),
[]
]
end
def response_see_other(result = {})
location = 'http://localhost:9292' + result[:location]
body = <<_html
<a href="#{location}">updated</a>
_html
[
303,
{
'Content-Type' => 'text/html',
'Content-Length' => body.size.to_s,
'Location' => location,
},
body
]
end
def response_forbidden(result = {})
[
403,
{},
result[:body] || 'Forbidden'
]
end
def response_not_found(result = {})
[
404,
{},
'Not Found'
]
end
def response_unprocessable_entity(result = {})
[
422,
(
result[:headers] ||
{
'Content-Type' => 'text/html',
'Content-Length' => result[:body].size.to_s,
}
),
result[:body],
]
end
end
check if Sofa.transaction[tid] is a Sofa::Field (could be a message).
# encoding: UTF-8
# Author:: Akira FUNAI
# Copyright:: Copyright (c) 2009 Akira FUNAI
class Sofa
Dir['./sofa/*.rb'].sort.each {|file| require file }
module REX
ID_SHORT = /[a-z][a-z0-9\_\-]*/
ID = /^(\d{8})_(\d{4,}|#{ID_SHORT})/
ID_NEW = /^_\d/
COND = /^(.+?)=(.+)$/
COND_D = /^(19\d\d|2\d\d\d|9999)\d{0,4}$/
PATH_ID = /\/((?:19|2\d)\d{6})\/(\d+)/
TID = /\d{10}\.\d+/
end
def self.[](name)
@config ||= YAML.load_file './sofa.yaml'
@config[name]
end
def self.current
Thread.current
end
def self.session
self.current[:session] || ($fake_session ||= {})
end
def self.transaction
self.session[:transaction] ||= {}
end
def self.client
self.session[:client] ||= 'nobody'
end
def self.client=(id)
self.session[:client] = id
end
def self.base
self.current[:base]
end
def call(env)
req = Rack::Request.new env
method = req.request_method.downcase
params = params_from_request req
path = req.path_info
tid = Sofa::Path.tid_of path
Sofa.current[:env] = env
Sofa.current[:req] = req
Sofa.current[:session] = env['rack.session']
base = Sofa.transaction[tid]
base = Sofa::Path.base_of(path) unless base.is_a? Sofa::Field # could be a message.
return response_not_found unless base
base[:tid] = tid
Sofa.current[:base] = base
begin
if params[:action] == :logout
logout(base,params)
elsif method == 'get'
get(base,params)
elsif params[:action] == :login
login(base,params)
elsif params[:action] == :confirm
confirm(base,params)
else
post(base,params)
end
rescue Sofa::Error::Forbidden
if params[:action] && Sofa.client == 'nobody'
params[:dest_action] = (method == 'post') ? :index : params[:action]
params[:action] = :login
end
begin
response_unprocessable_entity :body => _get(base,params)
rescue Sofa::Error::Forbidden
response_forbidden
end
end
end
private
def login(base,params)
user = Sofa::Set::Static::Folder.root.item('_users','main',params['id'].to_s)
if user && params['pw'].to_s.crypt(user.val('password')) == user.val('password')
Sofa.client = params['id']
else
Sofa.client = nil
raise Sofa::Error::Forbidden
end
path = Sofa::Path.path_of params[:conds]
action = (params['dest_action'] =~ /\A\w+\z/) ? params['dest_action'] : 'index'
response_see_other(
:location => "#{base[:path]}/#{path}#{action}.html"
)
end
def logout(base,params)
Sofa.client = nil
path = Sofa::Path.path_of params[:conds]
response_see_other(
:location => "#{base[:path]}/#{path}index.html"
)
end
def get(base,params)
response_ok :body => _get(base,params)
end
def confirm(base,params)
Sofa.transaction[base[:tid]] ||= base
base.update params
base.commit :temp
if base.result
action = "confirm_#{params[:sub_action]}"
id_step = Sofa::Path.path_of(
:id => base.result.values.collect {|item| item[:id] }
)
response_see_other(
:location => base[:path] + "/#{base[:tid]}/#{id_step}#{action}.html"
)
else
params = {:action => :update}
params[:conds] = {:id => base.send(:pending_items).keys}
return response_unprocessable_entity(:body => _get(base,params))
end
end
def post(base,params)
base.update params
if params[:status]
base[:folder].commit :persistent
if base.result
Sofa.transaction[base[:tid]] = nil
action = base.workflow.next_action params
id_step = Sofa::Path.path_of(
:id => base.result.values.collect {|item| item[:id] }
) if base[:parent] == base[:folder] && action != :done
response_see_other(
:location => base[:path] + "/#{base[:tid]}/#{id_step}#{action}.html"
)
else
params = {:action => :update}
params[:conds] = {:id => base.send(:pending_items).keys}
response_unprocessable_entity :body => _get(base,params)
end
else
Sofa.transaction[base[:tid]] ||= base
id_step = Sofa::Path.path_of(:id => base.send(:pending_items).keys)
base.commit :temp
response_see_other(
:location => base[:path] + "/#{base[:tid]}/#{id_step}update.html"
)
end
end
def _get(f,params)
until f.is_a? Sofa::Set::Static::Folder
params = {
:action => (f.default_action == :read) ? :read : nil,
:sub_action => f.send(:summary?,params) ? nil : :detail,
f[:id] => params,
}
params[:conds] = {:id => f[:id]} if f[:parent].is_a? Sofa::Set::Dynamic
f = f[:parent]
end if f.is_a? Sofa::Set::Dynamic
f.get params
end
def params_from_request(req)
params = {
:action => Sofa::Path.action_of(req.path_info),
:sub_action => Sofa::Path.sub_action_of(req.path_info),
}
params.merge!(rebuild_params req.params)
params[:conds] ||= {}
params[:conds].merge!(Sofa::Path.conds_of req.path_info)
params
end
def rebuild_params(src)
src.each_key.sort.reverse.inject({}) {|params,key|
name,special = key.split('.',2)
steps = name.split '-'
if special
special_id,special_val = special.split('-',2)
else
item_id = steps.pop
end
hash = steps.inject(params) {|v,k| v[k] ||= {} }
val = src[key]
if special_id == 'action'
action,sub_action = (special_val || val).split('_',2)
hash[:action] = action.intern
hash[:sub_action] = sub_action.intern if sub_action
elsif special_id == 'status'
hash[:status] = (special_val || val).intern
elsif special_id == 'conds'
hash[:conds] ||= {}
hash[:conds][special_val.intern] = val
elsif hash[item_id].is_a? ::Hash
hash[item_id][:self] = val
else
hash[item_id] = val
end
params
}
end
def response_ok(result = {})
[
200,
(
result[:headers] ||
{
'Content-Type' => 'text/html',
'Content-Length' => result[:body].size.to_s,
}
),
result[:body],
]
end
def response_no_content(result = {})
[
204,
(result[:headers] || {}),
[]
]
end
def response_see_other(result = {})
location = 'http://localhost:9292' + result[:location]
body = <<_html
<a href="#{location}">updated</a>
_html
[
303,
{
'Content-Type' => 'text/html',
'Content-Length' => body.size.to_s,
'Location' => location,
},
body
]
end
def response_forbidden(result = {})
[
403,
{},
result[:body] || 'Forbidden'
]
end
def response_not_found(result = {})
[
404,
{},
'Not Found'
]
end
def response_unprocessable_entity(result = {})
[
422,
(
result[:headers] ||
{
'Content-Type' => 'text/html',
'Content-Length' => result[:body].size.to_s,
}
),
result[:body],
]
end
end
|
require 'splunk-sdk-ruby'
require 'json'
require 'httparty'
require 'em-http-request'
require 'digest/sha1'
require 'byebug'
#class ElasticSearch
# include HTTParty
# attr_accessor :host
#
# def initialize(host="http://np32.c1.dev:9200", index="gds", type="timing")
# @host = host
# @index = index
# @type = type
# end
#
# def url(index, type, id)
# "#{@host}/#{index}/#{type}/#{id}"
# end
#
# def request(id)
# url(@index, @type, id)
# end
#end
class GDS
include HTTParty
attr_accessor :id, :index, :host, :date, :listing_id, :method, :timing
def initialize(host="http://np32.c1.dev:9292")
@host = host
@data = {}
@tags = []
end
def self.from_str(str)
gds = GDS.new
hsh = str.match(/\[(?<date>.*)\]\[(?<type>.*)\].* (?<listing_id>[0-9]*) (?<message>.*)/)
msg = gds.massage(hsh["message"])
gds.index = "gds-#{hsh["date"].sub(/ .*/, '')}"
gds.date = hsh["date"].sub(/ /, "T").sub(/ .*/, "")
gds.listing_id = hsh["listing_id"]
gds.timing = msg[:timing]
gds.method = msg[:method]
gds.id = Digest::SHA1.hexdigest "#{gds.date}|#{gds.listing_id}|#{gds.method}"
gds
end
def massage(msg)
execution=/(?<method>.*): PT(?<time>.*)S/
hsh=msg.match(execution)
{timing: hsh["time"], method: hsh["method"]}
end
# def from_json(json)
# hsh = JSON.parse(json)
# gds = GDS.new
# gds.date = hsh["date"]
# gds.data = hsh["data"]
# gds.tags = hsh["tags"]
# gds.index = hsh["index"]
# gds
# end
def to_json(opts={})
{id: id, date: date, host: host, listing_id: listing_id, method: method, index: index, timing: timing}.to_json(opts)
end
def self.save(json,iter, callback)
http = EventMachine::HttpRequest.new("http://np32.c1.dev:9292/entries/gds").post body: json
http.callback { callback.call(http.response, iter)}
end
end
def massage2(json_str)
begin
obj = JSON.parse(json_str)
obj["data"].each do |k,v|
obj[k] = v
end
ret = obj.delete_if do |k,v|
["data", "created_at", "updated_at"].select{|x| x == k}.size > 0
end
ret["index"] = "test-gds-#{ret["date"].sub(/ .*/, '')}"
ret["date"].sub!(/ /, "T")
ret["date"].sub!(/ .*/, "")
ret["timing"] = ret["timing"].to_f
rescue
end
ret
end
service = Splunk::connect(
scheme: :https,
host: ARGV[0],
port: ARGV[1].to_i,
username: ARGV[2],
password: ARGV[3]
)
service.login
start_date = ARGV[5] || (DateTime.strptime(GDS.new.latest.date, "%Y-%m-%d %H:%M:%S ")).iso8601
end_date = ARGV[6] || "now"
puts "getting splunk data between #{start_date} and #{end_date}"
stream = service.create_export("search #{ARGV[4]}",
earliest_time: start_date,
latest_time: end_date
)
readers= Splunk::MultiResultsReader.new(stream)
def request(id, index)
"http://np32.c1.dev:9200/#{index}/timing/#{id}"
end
cnt = 0
EventMachine.run do
puts "em loop"
readers.each do |reader|
puts 'reader'
EM::Iterator.new(reader, 25).each do |result, iter|
gds = GDS.from_str(result["_raw"])
http = EventMachine::HttpRequest.new(request(gds.id, gds.index)).put body: gds.to_json()
http.callback {
iter.next
}
http.errback { print http.error; EM.stop }
cnt += 1
if cnt % 100 == 0
40.times {|i| print "\b" }
print "#{gds.date}: #{cnt}"
end
end
end
end
print "\n#{cnt}"
print "done"
EM.stop
print "\n"
update to unhinged id generation
require 'splunk-sdk-ruby'
require 'json'
require 'httparty'
require 'em-http-request'
require 'digest/sha1'
require 'byebug'
#class ElasticSearch
# include HTTParty
# attr_accessor :host
#
# def initialize(host="http://np32.c1.dev:9200", index="gds", type="timing")
# @host = host
# @index = index
# @type = type
# end
#
# def url(index, type, id)
# "#{@host}/#{index}/#{type}/#{id}"
# end
#
# def request(id)
# url(@index, @type, id)
# end
#end
class GDS
include HTTParty
attr_accessor :id, :index, :host, :date, :listing_id, :method, :timing
def initialize(host="http://np32.c1.dev:9292")
@host = host
@data = {}
@tags = []
end
def self.from_str(str)
gds = GDS.new
hsh = str.match(/\[(?<date>.*)\]\[(?<type>.*)\].* (?<listing_id>[0-9]*) (?<message>.*)/)
msg = gds.massage(hsh["message"])
gds.index = "gds-#{hsh["date"].sub(/ .*/, '')}"
gds.date = hsh["date"].sub(/ /, "T").sub(/ .*/, "")
gds.listing_id = hsh["listing_id"]
gds.timing = msg[:timing]
gds.method = msg[:method]
gds.id = Digest::SHA1.hexdigest "#{gds.date}|#{gds.listing_id}|#{gds.method}"
gds
end
def massage(msg)
execution=/(?<method>.*): PT(?<time>.*)S/
hsh=msg.match(execution)
{timing: hsh["time"], method: hsh["method"]}
end
# def from_json(json)
# hsh = JSON.parse(json)
# gds = GDS.new
# gds.date = hsh["date"]
# gds.data = hsh["data"]
# gds.tags = hsh["tags"]
# gds.index = hsh["index"]
# gds
# end
def to_json(opts={})
{id: id, date: date, host: host, listing_id: listing_id, method: method, index: index, timing: timing}.to_json(opts)
end
def self.save(json,iter, callback)
http = EventMachine::HttpRequest.new("http://np32.c1.dev:9292/entries/gds").post body: json
http.callback { callback.call(http.response, iter)}
end
end
def massage2(json_str)
begin
obj = JSON.parse(json_str)
obj["data"].each do |k,v|
obj[k] = v
end
ret = obj.delete_if do |k,v|
["data", "created_at", "updated_at"].select{|x| x == k}.size > 0
end
ret["index"] = "test-gds-#{ret["date"].sub(/ .*/, '')}"
ret["date"].sub!(/ /, "T")
ret["date"].sub!(/ .*/, "")
ret["timing"] = ret["timing"].to_f
rescue
end
ret
end
service = Splunk::connect(
scheme: :https,
host: ARGV[0],
port: ARGV[1].to_i,
username: ARGV[2],
password: ARGV[3]
)
service.login
start_date = ARGV[5] || (DateTime.strptime(GDS.new.latest.date, "%Y-%m-%d %H:%M:%S ")).iso8601
end_date = ARGV[6] || "now"
puts "getting splunk data between #{start_date} and #{end_date}"
stream = service.create_export("search #{ARGV[4]}",
earliest_time: start_date,
latest_time: end_date
)
readers= Splunk::MultiResultsReader.new(stream)
def request(id, index)
"http://np32.c1.dev:9200/#{index}/timing/#{id}"
end
cnt = 0
EventMachine.run do
puts "em loop"
readers.each do |reader|
puts 'reader'
EM::Iterator.new(reader, 30).each do |result, iter|
gds = GDS.from_str(result["_raw"])
http = EventMachine::HttpRequest.new(request(gds.id, gds.index)).put body: gds.to_json()
http.callback {
iter.next
}
http.errback {
print http.error;
#EM.stop
}
cnt += 1
if cnt % 100 == 0
# 40.times {|i| print "\b" }
puts "#{gds.date}: #{cnt}"
end
end
end
end
print "\n#{cnt}"
print "done"
EM.stop
print "\n"
|
add sample solo.r
chef_dir = File.expand_path(File.dirname(__FILE__))
cookbook_path [
"#{chef_dir}/chef-cookbooks",
"#{chef_dir}/site-cookbooks",
]
role_path "#{chef_dir}/chef-repo/roles"
|
#
# Cookbook:: gdefault
# Recipe:: default
#
# Copyright:: 2017, The Authors, All Rights Reserved.
tmp_path = Chef::Config[:file_cache_path]
#Instalar ferramentas de apoio
package ['htop', 'iftop', 'iotop', 'iptables-persistent', 'unzip', 'mysql-client']
package ['automake', 'autotools-dev', 'g++', 'libcurl4-gnutls-dev', 'libfuse-dev', 'libssl-dev', 'libxml2-dev', 'make', 'pkg-config']
#Configurar o timezone
bash 'timezone' do
user 'root'
code <<-EOH
echo "America/Sao_Paulo" > /etc/timezone
dpkg-reconfigure --frontend noninteractive tzdata
EOH
end
#Criar pasta de configuração
directory '/comunidade21/config' do
owner 'root'
group 'tomcat'
mode '0775'
recursive true
action :create
end
#Definir configurações do iptables
template "/opt/iptables.rules" do
source 'iptables-save.erb'
owner 'root'
mode '0644'
end
bash 'iptables restore' do
user 'root'
code <<-EOH
iptables-restore < /opt/iptables.rules
EOH
end
#Criar pasta para logs das aplicações
directory '/var/log/comunidades21' do
owner 'root'
group 'tomcat'
mode '0660'
recursive true
action :create
end
#Criar pastas de apoio da aplicação
directory '/opt/comunidade21' do
owner 'root'
group 'tomcat'
mode '0770'
recursive true
action :create
end
directory '/opt/comunidades21' do
owner 'root'
group 'tomcat'
mode '0770'
recursive true
action :create
end
#Instalação do s3fs
remote_file "#{tmp_path}/s3fs.tgz" do
source 'https://s3.amazonaws.com/gopswordksdeps/s3fs-fuse.tgz'
owner 'root'
mode '0644'
action :create
end
template "/root/.passwd-s3fs" do
source 's3pass.erb'
owner 'root'
mode '0600'
end
directory '/opt/s3fs-fuse' do
owner 'root'
group 'root'
mode '0770'
recursive true
action :create
end
bash 'Extract s3fs archive' do
user 'root'
cwd '/opt/s3fs-fuse'
code <<-EOH
tar -zxvf #{tmp_path}/s3fs.tgz --strip 1
./configure
make clean && make
make install
s3fs -o allow_other -o passwd_file=/root/.passwd-s3fs comunidade21.config /comunidade21/
EOH
action :run
end
Ajustes na criacao da pasta para o S3FS ser montado
#
# Cookbook:: gdefault
# Recipe:: default
#
# Copyright:: 2017, The Authors, All Rights Reserved.
tmp_path = Chef::Config[:file_cache_path]
#Instalar ferramentas de apoio
package ['htop', 'iftop', 'iotop', 'iptables-persistent', 'unzip', 'mysql-client']
package ['automake', 'autotools-dev', 'g++', 'libcurl4-gnutls-dev', 'libfuse-dev', 'libssl-dev', 'libxml2-dev', 'make', 'pkg-config']
#Configurar o timezone
bash 'timezone' do
user 'root'
code <<-EOH
echo "America/Sao_Paulo" > /etc/timezone
dpkg-reconfigure --frontend noninteractive tzdata
EOH
end
#Criar pasta de configuração
directory '/comunidade21' do
owner 'root'
group 'tomcat'
mode '0775'
recursive true
action :create
end
#Definir configurações do iptables
template "/opt/iptables.rules" do
source 'iptables-save.erb'
owner 'root'
mode '0644'
end
bash 'iptables restore' do
user 'root'
code <<-EOH
iptables-restore < /opt/iptables.rules
EOH
end
#Criar pasta para logs das aplicações
directory '/var/log/comunidades21' do
owner 'root'
group 'tomcat'
mode '0660'
recursive true
action :create
end
#Criar pastas de apoio da aplicação
directory '/opt/comunidade21' do
owner 'root'
group 'tomcat'
mode '0770'
recursive true
action :create
end
directory '/opt/comunidades21' do
owner 'root'
group 'tomcat'
mode '0770'
recursive true
action :create
end
#Instalação do s3fs
remote_file "#{tmp_path}/s3fs.tgz" do
source 'https://s3.amazonaws.com/gopswordksdeps/s3fs-fuse.tgz'
owner 'root'
mode '0644'
action :create
end
template "/root/.passwd-s3fs" do
source 's3pass.erb'
owner 'root'
mode '0600'
end
directory '/opt/s3fs-fuse' do
owner 'root'
group 'root'
mode '0770'
recursive true
action :create
end
bash 'Extract s3fs archive' do
user 'root'
cwd '/opt/s3fs-fuse'
code <<-EOH
tar -zxvf #{tmp_path}/s3fs.tgz --strip 1
./configure
make clean && make
make install
s3fs -o allow_other -o passwd_file=/root/.passwd-s3fs comunidade21.config /comunidade21/
EOH
action :run
end
|
module Gdk
extend GLib::Deprecatable
define_deprecated_enums :GrabStatus, 'GRAB'
define_deprecated_enums :Status
define_deprecated_const :Image, :raise => "Use 'Gdk::Pixbuf' or 'Cairo::Surface' instead."
define_deprecated_const :Colormap, :raise => "Use 'Gdk::Visual' instead."
define_deprecated_const :Input, :raise => "Use 'GLib::IOChannel' instead."
define_deprecated_const :X11, 'GdkX11'
class Cursor
extend GLib::Deprecatable
define_deprecated_enums :Type
define_deprecated_method :pixmap?, :warn => "Don't use this method." do |_self|
false
end
Gdk::CursorType.constants.each do |cursor_type|
define_deprecated_const cursor_type, ['Gdk::CursorType', cursor_type].join('::')
end
end
class Device
extend GLib::Deprecatable
define_deprecated_enums :AxisUse, 'AXIS'
define_deprecated_enums :ExtensionMode, 'EXTENSION_EVENTS'
define_deprecated_enums :InputMode, 'MODE'
define_deprecated_enums :InputSource, 'SOURCE'
end
class Display
extend GLib::Deprecatable
define_deprecated_method :double_click_time, :raise => "Don't use this method."
define_deprecated_method :double_click_distance, :raise => "Don't use this method."
define_deprecated_method :button_click_time, :raise => "Don't use this method."
define_deprecated_method :button_window, :raise => "Don't use this method."
define_deprecated_method :button_number, :raise => "Don't use this method."
define_deprecated_method :button_x, :raise => "Don't use this method."
define_deprecated_method :button_y, :raise => "Don't use this method."
end
class DragContext
extend GLib::Deprecatable
define_deprecated_flags :Action, 'ACTION'
define_deprecated_enums :Protocol, 'PROTO'
define_deprecated_singleton_method :drag_begin, :warn => "Use 'Gdk::Window#drag_begin'." do |_self, window, targets|
window.drag_begin(targets)
end
define_deprecated_singleton_method :get_protocol, :raise => "Use 'Gdk::Window#drag_protocol'."
define_deprecated_method :initialize, :raise => "Use 'Gdk::Window#drag_begin'."
define_deprecated_method :action, :selected_action
define_deprecated_method :source?, :warn => "Don't use this method."
define_deprecated_method :start_time, :raise => "Don't use this method."
end
class Event
extend GLib::Deprecatable
define_deprecated_const :Mask, "Gdk::EventMask"
define_deprecated_enums "Gdk::EventMask"
define_deprecated_const :Type, "Gdk::EventType"
define_deprecated_enums "Gdk::EventType"
end
class EventCrossing
extend GLib::Deprecatable
define_deprecated_enums :Mode
define_deprecated_enums :NotifyType, 'NOTIFY'
end
class EventOwnerChange
extend GLib::Deprecatable
define_deprecated_enums :OwnerChange
end
class EventProperty
extend GLib::Deprecatable
define_deprecated_enums :State, 'PROPERTY'
end
class EventScroll
extend GLib::Deprecatable
define_deprecated_enums :Direction
end
class EventSetting
extend GLib::Deprecatable
define_deprecated_enums :Action, 'ACTION'
end
class EventVisibility
extend GLib::Deprecatable
define_deprecated_enums :State
end
class EventWindowState
extend GLib::Deprecatable
define_deprecated_flags :WindowState
end
module Keyval
extend GLib::Deprecatable
constants.each do |key|
old_names = []
old_names << key.to_s.sub(/^KEY_/, 'GDK_KEY_')
old_names << key.to_s.sub(/^KEY_/, 'GDK_')
old_names.each do |old_name|
define_deprecated_const old_name, [self, key].join('::')
end
end
end
class Pixmap
extend GLib::Deprecatable
define_deprecated_method :initialize, :raise => "Use 'Gdk::Window#create_similar_surface'."
define_deprecated_singleton_method :create_from_data, :raise => "Use 'Cairo::ImageSurface.new'."
define_deprecated_singleton_method :create_from_xbm, :raise => "Use 'Cairo::ImageSurface.new'."
define_deprecated_singleton_method :create_from_xpm, :raise => "Use 'Gdk::Pixbuf.new'."
define_deprecated_singleton_method :create_from_xpm_d, :raise => "Use 'Gdk::Pixbuf.new'."
define_deprecated_singleton_method :colormap_create_from_xpm, :raise => "Use 'Gdk::Pixbuf.new'."
define_deprecated_singleton_method :colormap_create_from_xpm_d, :raise => "Use 'Gdk::Pixbuf.new'."
define_deprecated_singleton_method :foreign_new, :raise => "Don't use this method."
define_deprecated_singleton_method :lookup, :raise => "Don't use this method."
end
module Property
extend GLib::Deprecatable
define_deprecated_enums :PropMode, 'MODE'
end
class Visual
extend GLib::Deprecatable
define_deprecated_enums :ByteOrder
define_deprecated_enums :Type
end
class Window
extend GLib::Deprecatable
define_deprecated_const :Gravity, "Gdk::Gravity"
define_deprecated_enums "Gdk::Gravity"
define_deprecated_const :AttributesType, "Gdk::WindowAttributesType"
define_deprecated_enums "Gdk::WindowAttributesType"
define_deprecated_const :Edge, "Gdk::WindowEdge"
define_deprecated_enums "Gdk::WindowEdge"
define_deprecated_const :FilterReturn, "Gdk::FilterReturn"
define_deprecated_enums "Gdk::FilterReturn"
define_deprecated_const :Hints, "Gdk::WindowHints"
define_deprecated_enums "Gdk::WindowHints"
define_deprecated_const :ModifierType, "Gdk::ModifierType"
define_deprecated_enums "Gdk::ModifierType"
define_deprecated_const :Type, "Gdk::WindowType"
define_deprecated_enums "Gdk::WindowType"
define_deprecated_const :TypeHint, "Gdk::WindowTypeHint"
define_deprecated_enums "Gdk::TypeHint"
define_deprecated_const :WindowClass, "Gdk::WindowWindowClass"
define_deprecated_enums "Gdk::WindowClass"
define_deprecated_const :Class, "Gdk::WindowWindowClass"
define_deprecated_const :WMDecoration, "Gdk::WMDecoration"
define_deprecated_enums "Gdk::WMDecoration"
define_deprecated_const :WMFunction, "Gdk::WMFunction"
define_deprecated_enums "Gdk::WMFunction"
define_deprecated_const :PARENT_RELATIVE, "Gdk::PARENT_RELATIVE"
define_deprecated_method :clear, :warn => "Don't use this method."
define_deprecated_method :clear_area, :warn => "Don't use this method."
define_deprecated_method :internal_paint_info, :raise => "Don't use this method."
define_deprecated_method :shape_combine_mask, :warn => "Don't use this method."
define_deprecated_method :input_shape_combine_mask, :warn => "Don't use this method."
define_deprecated_method :set_back_pixmap, :warn => "Don't use this method."
end
class WindowAttr
extend GLib::Deprecatable
define_deprecated_method :colormap, :raise => "Don't use this method."
define_deprecated_method :set_colormap, :warn => "Don't use this method."
alias :colormap= :set_colormap
end
end
gdk3: fix wrong flag object name
module Gdk
extend GLib::Deprecatable
define_deprecated_enums :GrabStatus, 'GRAB'
define_deprecated_enums :Status
define_deprecated_const :Image, :raise => "Use 'Gdk::Pixbuf' or 'Cairo::Surface' instead."
define_deprecated_const :Colormap, :raise => "Use 'Gdk::Visual' instead."
define_deprecated_const :Input, :raise => "Use 'GLib::IOChannel' instead."
define_deprecated_const :X11, 'GdkX11'
class Cursor
extend GLib::Deprecatable
define_deprecated_enums :CursorType
define_deprecated_method :pixmap?, :warn => "Don't use this method." do |_self|
false
end
Gdk::CursorType.constants.each do |cursor_type|
define_deprecated_const cursor_type, ['Gdk::CursorType', cursor_type].join('::')
end
end
class Device
extend GLib::Deprecatable
define_deprecated_enums :AxisUse, 'AXIS'
define_deprecated_enums :ExtensionMode, 'EXTENSION_EVENTS'
define_deprecated_enums :InputMode, 'MODE'
define_deprecated_enums :InputSource, 'SOURCE'
end
class Display
extend GLib::Deprecatable
define_deprecated_method :double_click_time, :raise => "Don't use this method."
define_deprecated_method :double_click_distance, :raise => "Don't use this method."
define_deprecated_method :button_click_time, :raise => "Don't use this method."
define_deprecated_method :button_window, :raise => "Don't use this method."
define_deprecated_method :button_number, :raise => "Don't use this method."
define_deprecated_method :button_x, :raise => "Don't use this method."
define_deprecated_method :button_y, :raise => "Don't use this method."
end
class DragContext
extend GLib::Deprecatable
define_deprecated_flags :Action, 'ACTION'
define_deprecated_enums :Protocol, 'PROTO'
define_deprecated_singleton_method :drag_begin, :warn => "Use 'Gdk::Window#drag_begin'." do |_self, window, targets|
window.drag_begin(targets)
end
define_deprecated_singleton_method :get_protocol, :raise => "Use 'Gdk::Window#drag_protocol'."
define_deprecated_method :initialize, :raise => "Use 'Gdk::Window#drag_begin'."
define_deprecated_method :action, :selected_action
define_deprecated_method :source?, :warn => "Don't use this method."
define_deprecated_method :start_time, :raise => "Don't use this method."
end
class Event
extend GLib::Deprecatable
define_deprecated_const :Mask, "Gdk::EventMask"
define_deprecated_enums "Gdk::EventMask"
define_deprecated_const :Type, "Gdk::EventType"
define_deprecated_enums "Gdk::EventType"
end
class EventCrossing
extend GLib::Deprecatable
define_deprecated_enums :Mode
define_deprecated_enums :NotifyType, 'NOTIFY'
end
class EventOwnerChange
extend GLib::Deprecatable
define_deprecated_enums :OwnerChange
end
class EventProperty
extend GLib::Deprecatable
define_deprecated_enums :State, 'PROPERTY'
end
class EventScroll
extend GLib::Deprecatable
define_deprecated_enums :Direction
end
class EventSetting
extend GLib::Deprecatable
define_deprecated_enums :Action, 'ACTION'
end
class EventVisibility
extend GLib::Deprecatable
define_deprecated_enums :State
end
class EventWindowState
extend GLib::Deprecatable
define_deprecated_flags :WindowState
end
module Keyval
extend GLib::Deprecatable
constants.each do |key|
old_names = []
old_names << key.to_s.sub(/^KEY_/, 'GDK_KEY_')
old_names << key.to_s.sub(/^KEY_/, 'GDK_')
old_names.each do |old_name|
define_deprecated_const old_name, [self, key].join('::')
end
end
end
class Pixmap
extend GLib::Deprecatable
define_deprecated_method :initialize, :raise => "Use 'Gdk::Window#create_similar_surface'."
define_deprecated_singleton_method :create_from_data, :raise => "Use 'Cairo::ImageSurface.new'."
define_deprecated_singleton_method :create_from_xbm, :raise => "Use 'Cairo::ImageSurface.new'."
define_deprecated_singleton_method :create_from_xpm, :raise => "Use 'Gdk::Pixbuf.new'."
define_deprecated_singleton_method :create_from_xpm_d, :raise => "Use 'Gdk::Pixbuf.new'."
define_deprecated_singleton_method :colormap_create_from_xpm, :raise => "Use 'Gdk::Pixbuf.new'."
define_deprecated_singleton_method :colormap_create_from_xpm_d, :raise => "Use 'Gdk::Pixbuf.new'."
define_deprecated_singleton_method :foreign_new, :raise => "Don't use this method."
define_deprecated_singleton_method :lookup, :raise => "Don't use this method."
end
module Property
extend GLib::Deprecatable
define_deprecated_enums :PropMode, 'MODE'
end
class Visual
extend GLib::Deprecatable
define_deprecated_enums :ByteOrder
define_deprecated_enums :Type
end
class Window
extend GLib::Deprecatable
define_deprecated_const :Gravity, "Gdk::Gravity"
define_deprecated_enums "Gdk::Gravity"
define_deprecated_const :AttributesType, "Gdk::WindowAttributesType"
define_deprecated_enums "Gdk::WindowAttributesType"
define_deprecated_const :Edge, "Gdk::WindowEdge"
define_deprecated_enums "Gdk::WindowEdge"
define_deprecated_const :FilterReturn, "Gdk::FilterReturn"
define_deprecated_enums "Gdk::FilterReturn"
define_deprecated_const :Hints, "Gdk::WindowHints"
define_deprecated_enums "Gdk::WindowHints"
define_deprecated_const :ModifierType, "Gdk::ModifierType"
define_deprecated_enums "Gdk::ModifierType"
define_deprecated_const :Type, "Gdk::WindowType"
define_deprecated_enums "Gdk::WindowType"
define_deprecated_const :TypeHint, "Gdk::WindowTypeHint"
define_deprecated_enums "Gdk::TypeHint"
define_deprecated_const :WindowClass, "Gdk::WindowWindowClass"
define_deprecated_enums "Gdk::WindowClass"
define_deprecated_const :Class, "Gdk::WindowWindowClass"
define_deprecated_const :WMDecoration, "Gdk::WMDecoration"
define_deprecated_enums "Gdk::WMDecoration"
define_deprecated_const :WMFunction, "Gdk::WMFunction"
define_deprecated_enums "Gdk::WMFunction"
define_deprecated_const :PARENT_RELATIVE, "Gdk::PARENT_RELATIVE"
define_deprecated_method :clear, :warn => "Don't use this method."
define_deprecated_method :clear_area, :warn => "Don't use this method."
define_deprecated_method :internal_paint_info, :raise => "Don't use this method."
define_deprecated_method :shape_combine_mask, :warn => "Don't use this method."
define_deprecated_method :input_shape_combine_mask, :warn => "Don't use this method."
define_deprecated_method :set_back_pixmap, :warn => "Don't use this method."
end
class WindowAttr
extend GLib::Deprecatable
define_deprecated_method :colormap, :raise => "Don't use this method."
define_deprecated_method :set_colormap, :warn => "Don't use this method."
alias :colormap= :set_colormap
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'globelabs_sms_rails/version'
Gem::Specification.new do |spec|
spec.name = "globelabs_sms_rails"
spec.version = GlobelabsSmsRails::VERSION
spec.authors = ["Nujian Den Mark Meralpis"]
spec.email = ["meralpisdenmark@gmail.com"]
spec.summary = "A simple gem for sending SMS through Globe Labs API."
spec.homepage = "https://github.com/denmarkmeralpis/globelabs_sms_rails"
spec.description = "This gem will help you send SMS using Globe Labs API. Please take note that this gem will work if the subscriber consent workflow is disabled. You can apply the process by asking Globe"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
# if spec.respond_to?(:metadata)
# spec.metadata["allowed_push_host"] = "https://github.com/denmarkmeralpis/globelabs_sms_rails"
# else
# raise "RubyGems 2.0 or newer is required to protect against " \
# "public gem pushes."
# end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "dotenv", "~> 2.5"
spec.add_development_dependency "webmock", "~> 3.4"
spec.add_development_dependency "rails", "~> 4.2"
end
set spec meta
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'globelabs_sms_rails/version'
Gem::Specification.new do |spec|
spec.name = "globelabs_sms_rails"
spec.version = GlobelabsSmsRails::VERSION
spec.authors = ["Nujian Den Mark Meralpis"]
spec.email = ["meralpisdenmark@gmail.com"]
spec.summary = "A simple gem for sending SMS through Globe Labs API."
spec.homepage = "https://github.com/denmarkmeralpis/globelabs_sms_rails"
spec.description = "This gem will help you send SMS using Globe Labs API. Please take note that this gem will work if the subscriber consent workflow is disabled. You can apply the process by asking Globe"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "https://rubygems.org/gems/globelabs_sms_rails"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "dotenv", "~> 2.5"
spec.add_development_dependency "webmock", "~> 3.4"
spec.add_development_dependency "rails", "~> 4.2"
end
|
require 'optparse'
require 'tmpdir'
require 'open3'
require 'json'
require_relative 'xamarin-builder/builder'
# -----------------------
# --- Constants
# -----------------------
@work_dir = ENV['BITRISE_SOURCE_DIR']
@result_log_path = File.join(@work_dir, 'TestResult.xml')
# -----------------------
# --- Functions
# -----------------------
def log_info(message)
puts
puts "\e[34m#{message}\e[0m"
end
def log_details(message)
puts " #{message}"
end
def log_done(message)
puts " \e[32m#{message}\e[0m"
end
def log_warning(message)
puts "\e[33m#{message}\e[0m"
end
def log_error(message)
puts "\e[31m#{message}\e[0m"
end
def log_fail(message)
system('envman add --key BITRISE_XAMARIN_TEST_RESULT --value failed')
puts "\e[31m#{message}\e[0m"
exit(1)
end
def export_dsym(archive_path)
log_info "Exporting dSYM from archive at path #{archive_path}"
archive_dsyms_folder = File.join(archive_path, 'dSYMs')
app_dsym_paths = Dir[File.join(archive_dsyms_folder, '*.app.dSYM')]
app_dsym_paths.each do |app_dsym_path|
log_details "dSym found at path: #{app_dsym_path}"
end
if app_dsym_paths.count == 0
log_warning 'No dSym found'
elsif app_dsym_paths.count > 1
log_warning 'Multiple dSyms found'
else
return app_dsym_paths.first
end
nil
end
def export_ios_xcarchive(archive_path, export_options)
log_info("Exporting ios archive at path: #{archive_path}")
export_options_path = export_options
unless export_options_path
log_info('Generating export options')
# Generate export options
# Bundle install
current_dir = File.expand_path(File.dirname(__FILE__))
gemfile_path = File.join(current_dir, 'export-options', 'Gemfile')
bundle_install_command = [
"BUNDLE_GEMFILE=\"#{gemfile_path}\"",
'bundle',
'install'
]
log_info(bundle_install_command.join(' '))
success = system(bundle_install_command.join(' '))
fail_with_message('Failed to create export options (required gem install failed)') unless success
# Bundle exec
temp_dir = Dir.mktmpdir('_bitrise_')
export_options_path = File.join(temp_dir, 'export_options.plist')
export_options_generator = File.join(current_dir, 'export-options', 'generate_ios_export_options.rb')
bundle_exec_command = [
"BUNDLE_GEMFILE=\"#{gemfile_path}\"",
'bundle',
'exec',
'ruby',
export_options_generator,
"-o \"#{export_options_path}\"",
"-a \"#{archive_path}\""
]
log_info(bundle_exec_command.join(' '))
success = system(bundle_exec_command.join(' '))
fail_with_message('Failed to create export options') unless success
end
# Export ipa
export_command = [
'xcodebuild',
'-exportArchive',
"-archivePath \"#{archive_path}\"",
"-exportPath \"#{temp_dir}\"",
"-exportOptionsPlist \"#{export_options_path}\""
]
log_info(export_command.join(' '))
success = system(export_command.join(' '))
fail_with_message('Failed to export IPA') unless success
temp_ipa_path = Dir[File.join(temp_dir, '*.ipa')].first
fail_with_message('No generated IPA found') unless temp_ipa_path
temp_ipa_path
end
# -----------------------
# --- Main
# -----------------------
#
# Parse options
options = {
project: nil,
configuration: nil,
platform: nil,
api_key: nil,
user: nil,
devices: nil,
async: 'yes',
series: 'master',
parallelization: nil,
other_parameters: nil
}
parser = OptionParser.new do |opts|
opts.banner = 'Usage: step.rb [options]'
opts.on('-s', '--project path', 'Project path') { |s| options[:project] = s unless s.to_s == '' }
opts.on('-c', '--configuration config', 'Configuration') { |c| options[:configuration] = c unless c.to_s == '' }
opts.on('-p', '--platform platform', 'Platform') { |p| options[:platform] = p unless p.to_s == '' }
opts.on('-a', '--api key', 'Api key') { |a| options[:api_key] = a unless a.to_s == '' }
opts.on('-u', '--user user', 'User') { |u| options[:user] = u unless u.to_s == '' }
opts.on('-d', '--devices devices', 'Devices') { |d| options[:devices] = d unless d.to_s == '' }
opts.on('-y', '--async async', 'Async') { |y| options[:async] = y unless y.to_s == '' }
opts.on('-r', '--series series', 'Series') { |r| options[:series] = r unless r.to_s == '' }
opts.on('-l', '--parallelization parallelization', 'Parallelization') { |l| options[:parallelization] = l unless l.to_s == '' }
opts.on('-g', '--sign parameters', 'Sign') { |g| options[:sign_parameters] = g unless g.to_s == '' }
opts.on('-m', '--other parameters', 'Other') { |m| options[:other_parameters] = m unless m.to_s == '' }
opts.on('-h', '--help', 'Displays Help') do
exit
end
end
parser.parse!
#
# Print options
log_info 'Configs:'
log_details("* project: #{options[:project]}")
log_details("* configuration: #{options[:configuration]}")
log_details("* platform: #{options[:platform]}")
log_details('* api_key: ***')
log_details("* user: #{options[:user]}")
log_details("* devices: #{options[:devices]}")
log_details("* async: #{options[:async]}")
log_details("* series: #{options[:series]}")
log_details("* parallelization: #{options[:parallelization]}")
log_details("* other_parameters: #{options[:other_parameters]}")
#
# Validate options
log_fail('No project file found') unless options[:project] && File.exist?(options[:project])
log_fail('configuration not specified') unless options[:configuration]
log_fail('platform not specified') unless options[:platform]
log_fail('api_key not specified') unless options[:api_key]
log_fail('user not specified') unless options[:user]
log_fail('devices not specified') unless options[:devices]
#
# Main
begin
builder = Builder.new(options[:project], options[:configuration], options[:platform], [Api::IOS])
builder.build
builder.build_test
rescue => ex
log_error(ex.inspect.to_s)
log_error('--- Stack trace: ---')
log_fail(ex.backtrace.to_s)
end
output = builder.generated_files
log_fail('No output generated') if output.nil? || output.empty?
any_uitest_built = false
output.each do |_, project_output|
next if project_output[:xcarchive].nil? || project_output[:uitests].nil? || project_output[:uitests].empty?
ipa_path = export_ios_xcarchive(project_output[:xcarchive], options[:export_options])
log_fail('failed to export ipa') unless ipa_path
dsym_path = export_dsym(project_output[:xcarchive])
log_warning('failed to export dsym') unless dsym_path
project_output[:uitests].each do |dll_path|
any_uitest_built = true
assembly_dir = File.dirname(dll_path)
log_info("Uploading #{ipa_path} with #{dll_path}")
#
# Get test cloud path
test_cloud = Dir[File.join(@work_dir, '/**/packages/Xamarin.UITest.*/tools/test-cloud.exe')].last
log_fail("Can't find test-cloud.exe") unless test_cloud
#
# Build Request
request = [
"mono \"#{test_cloud}\"",
"submit \"#{ipa_path}\"",
options[:api_key],
"--assembly-dir \"#{assembly_dir}\"",
"--nunit-xml \"#{@result_log_path}\"",
"--user #{options[:user]}",
"--devices \"#{options[:devices]}\""
]
request << '--async-json' if options[:async] == 'yes'
request << "--dsym \"#{dsym_path}\"" if dsym_path
request << "--series \"#{options[:series]}\"" if options[:series]
request << '--fixture-chunk' if options[:parallelization] == 'by_test_fixture'
request << '--test-chunk' if options[:parallelization] == 'by_test_chunk'
request << options[:other_parameters].to_s if options[:other_parameters]
log_details(request.join(' '))
puts
#
# Run Test Cloud Upload
captured_stdout_err_lines = []
success = Open3.popen2e(request.join(' ')) do |stdin, stdout_err, wait_thr|
stdin.close
while line = stdout_err.gets
puts line
captured_stdout_err_lines << line
end
wait_thr.value.success?
end
puts
#
# Process output
result_log = ''
if File.exist? @result_log_path
file = File.open(@result_log_path)
result_log = file.read
file.close
system("envman add --key BITRISE_XAMARIN_TEST_FULL_RESULTS_TEXT --value \"#{result_log}\"") if result_log.to_s != ''
log_details "Logs are available at path: #{@result_log_path}"
puts
end
unless success
puts
puts result_log
puts
log_fail('Xamarin Test Cloud submit failed')
end
#
# Set output envs
if options[:async] == 'yes'
captured_stdout_err = captured_stdout_err_lines.join('')
test_run_id_regexp = /"TestRunId":"(?<id>.*)",/
test_run_id = ''
match = captured_stdout_err.match(test_run_id_regexp)
if match
captures = match.captures
test_run_id = captures[0] if captures && captures.length == 1
if test_run_id.to_s != ''
system("envman add --key BITRISE_XAMARIN_TEST_TO_RUN_ID --value \"#{test_run_id}\"")
log_details "Found Test Run ID: #{test_run_id}"
end
end
error_messages_regexp = /"ErrorMessages":\[(?<error>.*)\],/
error_messages = ''
match = captured_stdout_err.match(error_messages_regexp)
if match
captures = match.captures
error_messages = captures[0] if captures && captures.length == 1
if error_messages.to_s != ''
log_fail("Xamarin Test Cloud submit failed, with error(s): #{error_messages}")
end
end
end
system('envman add --key BITRISE_XAMARIN_TEST_RESULT --value succeeded')
log_done('Xamarin Test Cloud submit succeeded')
end
end
unless any_uitest_built
puts "generated_files: #{output}"
log_fail 'No xcarchive or built UITest found in outputs'
end
log updates
require 'optparse'
require 'tmpdir'
require 'open3'
require 'json'
require_relative 'xamarin-builder/builder'
# -----------------------
# --- Constants
# -----------------------
@work_dir = ENV['BITRISE_SOURCE_DIR']
@result_log_path = File.join(@work_dir, 'TestResult.xml')
# -----------------------
# --- Functions
# -----------------------
def log_info(message)
puts
puts "\e[34m#{message}\e[0m"
end
def log_details(message)
puts " #{message}"
end
def log_done(message)
puts " \e[32m#{message}\e[0m"
end
def log_warning(message)
puts "\e[33m#{message}\e[0m"
end
def log_error(message)
puts "\e[31m#{message}\e[0m"
end
def log_fail(message)
system('envman add --key BITRISE_XAMARIN_TEST_RESULT --value failed')
puts "\e[31m#{message}\e[0m"
exit(1)
end
def export_dsym(archive_path)
log_info "Exporting dSYM from archive at path #{archive_path}"
archive_dsyms_folder = File.join(archive_path, 'dSYMs')
app_dsym_paths = Dir[File.join(archive_dsyms_folder, '*.app.dSYM')]
app_dsym_paths.each do |app_dsym_path|
log_details "dSym found at path: #{app_dsym_path}"
end
if app_dsym_paths.count == 0
log_warning 'No dSym found'
elsif app_dsym_paths.count > 1
log_warning 'Multiple dSyms found'
else
return app_dsym_paths.first
end
nil
end
def export_ios_xcarchive(archive_path, export_options)
log_info("Exporting ios archive at path: #{archive_path}")
export_options_path = export_options
unless export_options_path
log_info('Generating export options')
# Generate export options
# Bundle install
current_dir = File.expand_path(File.dirname(__FILE__))
gemfile_path = File.join(current_dir, 'export-options', 'Gemfile')
bundle_install_command = [
"BUNDLE_GEMFILE=\"#{gemfile_path}\"",
'bundle',
'install'
]
log_info(bundle_install_command.join(' '))
success = system(bundle_install_command.join(' '))
fail_with_message('Failed to create export options (required gem install failed)') unless success
# Bundle exec
temp_dir = Dir.mktmpdir('_bitrise_')
export_options_path = File.join(temp_dir, 'export_options.plist')
export_options_generator = File.join(current_dir, 'export-options', 'generate_ios_export_options.rb')
bundle_exec_command = [
"BUNDLE_GEMFILE=\"#{gemfile_path}\"",
'bundle',
'exec',
'ruby',
export_options_generator,
"-o \"#{export_options_path}\"",
"-a \"#{archive_path}\""
]
log_info(bundle_exec_command.join(' '))
success = system(bundle_exec_command.join(' '))
fail_with_message('Failed to create export options') unless success
end
# Export ipa
export_command = [
'xcodebuild',
'-exportArchive',
"-archivePath \"#{archive_path}\"",
"-exportPath \"#{temp_dir}\"",
"-exportOptionsPlist \"#{export_options_path}\""
]
log_info(export_command.join(' '))
success = system(export_command.join(' '))
fail_with_message('Failed to export IPA') unless success
temp_ipa_path = Dir[File.join(temp_dir, '*.ipa')].first
fail_with_message('No generated IPA found') unless temp_ipa_path
temp_ipa_path
end
# -----------------------
# --- Main
# -----------------------
#
# Parse options
options = {
project: nil,
configuration: nil,
platform: nil,
api_key: nil,
user: nil,
devices: nil,
async: 'yes',
series: 'master',
parallelization: nil,
other_parameters: nil
}
parser = OptionParser.new do |opts|
opts.banner = 'Usage: step.rb [options]'
opts.on('-s', '--solution path', 'Solution path') { |s| options[:project] = s unless s.to_s == '' }
opts.on('-c', '--configuration config', 'Configuration') { |c| options[:configuration] = c unless c.to_s == '' }
opts.on('-p', '--platform platform', 'Platform') { |p| options[:platform] = p unless p.to_s == '' }
opts.on('-a', '--api key', 'Api key') { |a| options[:api_key] = a unless a.to_s == '' }
opts.on('-u', '--user user', 'User') { |u| options[:user] = u unless u.to_s == '' }
opts.on('-d', '--devices devices', 'Devices') { |d| options[:devices] = d unless d.to_s == '' }
opts.on('-y', '--async async', 'Async') { |y| options[:async] = y unless y.to_s == '' }
opts.on('-r', '--series series', 'Series') { |r| options[:series] = r unless r.to_s == '' }
opts.on('-l', '--parallelization parallelization', 'Parallelization') { |l| options[:parallelization] = l unless l.to_s == '' }
opts.on('-g', '--sign parameters', 'Sign') { |g| options[:sign_parameters] = g unless g.to_s == '' }
opts.on('-m', '--other parameters', 'Other') { |m| options[:other_parameters] = m unless m.to_s == '' }
opts.on('-h', '--help', 'Displays Help') do
exit
end
end
parser.parse!
#
# Print options
log_info 'Configs:'
log_details("* solution: #{options[:project]}")
log_details("* configuration: #{options[:configuration]}")
log_details("* platform: #{options[:platform]}")
log_details('* api_key: ***')
log_details("* user: #{options[:user]}")
log_details("* devices: #{options[:devices]}")
log_details("* async: #{options[:async]}")
log_details("* series: #{options[:series]}")
log_details("* parallelization: #{options[:parallelization]}")
log_details("* other_parameters: #{options[:other_parameters]}")
#
# Validate options
log_fail('no solution file found') unless options[:project] && File.exist?(options[:project])
log_fail('configuration not specified') unless options[:configuration]
log_fail('platform not specified') unless options[:platform]
log_fail('api_key not specified') unless options[:api_key]
log_fail('user not specified') unless options[:user]
log_fail('devices not specified') unless options[:devices]
#
# Main
begin
builder = Builder.new(options[:project], options[:configuration], options[:platform], [Api::IOS])
builder.build
builder.build_test
rescue => ex
log_error(ex.inspect.to_s)
log_error('--- Stack trace: ---')
log_fail(ex.backtrace.to_s)
end
output = builder.generated_files
log_fail('No output generated') if output.nil? || output.empty?
any_uitest_built = false
output.each do |_, project_output|
next if project_output[:xcarchive].nil? || project_output[:uitests].nil? || project_output[:uitests].empty?
ipa_path = export_ios_xcarchive(project_output[:xcarchive], options[:export_options])
log_fail('failed to export ipa') unless ipa_path
dsym_path = export_dsym(project_output[:xcarchive])
log_warning('failed to export dsym') unless dsym_path
project_output[:uitests].each do |dll_path|
any_uitest_built = true
assembly_dir = File.dirname(dll_path)
log_info("Uploading #{ipa_path} with #{dll_path}")
#
# Get test cloud path
test_cloud = Dir[File.join(@work_dir, '/**/packages/Xamarin.UITest.*/tools/test-cloud.exe')].last
log_fail("Can't find test-cloud.exe") unless test_cloud
#
# Build Request
request = [
"mono \"#{test_cloud}\"",
"submit \"#{ipa_path}\"",
options[:api_key],
"--assembly-dir \"#{assembly_dir}\"",
"--nunit-xml \"#{@result_log_path}\"",
"--user #{options[:user]}",
"--devices \"#{options[:devices]}\""
]
request << '--async-json' if options[:async] == 'yes'
request << "--dsym \"#{dsym_path}\"" if dsym_path
request << "--series \"#{options[:series]}\"" if options[:series]
request << '--fixture-chunk' if options[:parallelization] == 'by_test_fixture'
request << '--test-chunk' if options[:parallelization] == 'by_test_chunk'
request << options[:other_parameters].to_s if options[:other_parameters]
log_details(request.join(' '))
puts
#
# Run Test Cloud Upload
captured_stdout_err_lines = []
success = Open3.popen2e(request.join(' ')) do |stdin, stdout_err, wait_thr|
stdin.close
while line = stdout_err.gets
puts line
captured_stdout_err_lines << line
end
wait_thr.value.success?
end
puts
#
# Process output
result_log = ''
if File.exist? @result_log_path
file = File.open(@result_log_path)
result_log = file.read
file.close
system("envman add --key BITRISE_XAMARIN_TEST_FULL_RESULTS_TEXT --value \"#{result_log}\"") if result_log.to_s != ''
log_details "Logs are available at path: #{@result_log_path}"
puts
end
unless success
puts
puts result_log
puts
log_fail('Xamarin Test Cloud submit failed')
end
#
# Set output envs
if options[:async] == 'yes'
captured_stdout_err = captured_stdout_err_lines.join('')
test_run_id_regexp = /"TestRunId":"(?<id>.*)",/
test_run_id = ''
match = captured_stdout_err.match(test_run_id_regexp)
if match
captures = match.captures
test_run_id = captures[0] if captures && captures.length == 1
if test_run_id.to_s != ''
system("envman add --key BITRISE_XAMARIN_TEST_TO_RUN_ID --value \"#{test_run_id}\"")
log_details "Found Test Run ID: #{test_run_id}"
end
end
error_messages_regexp = /"ErrorMessages":\[(?<error>.*)\],/
error_messages = ''
match = captured_stdout_err.match(error_messages_regexp)
if match
captures = match.captures
error_messages = captures[0] if captures && captures.length == 1
if error_messages.to_s != ''
log_fail("Xamarin Test Cloud submit failed, with error(s): #{error_messages}")
end
end
end
system('envman add --key BITRISE_XAMARIN_TEST_RESULT --value succeeded')
log_done('Xamarin Test Cloud submit succeeded')
end
end
unless any_uitest_built
puts "generated_files: #{output}"
log_fail 'No xcarchive or built UITest found in outputs'
end
|
# encoding: utf-8
Gem::Specification.new do |specification|
specification.name = "gom-couchdb-adapter"
specification.version = "0.4.1"
specification.date = "2011-05-10"
specification.authors = [ "Philipp Brüll" ]
specification.email = "b.phifty@gmail.com"
specification.homepage = "http://github.com/phifty/gom-couchdb-adapter"
specification.rubyforge_project = "gom-couchdb-adapter"
specification.summary = "CouchDB storage adapter for the General Object Mapper."
specification.description = "CouchDB storage adapter for the General Object Mapper. Currently, version 1.0 of CouchDB is supported."
specification.has_rdoc = true
specification.files = [ "README.rdoc", "LICENSE", "Rakefile" ] + Dir["lib/**/*"] + Dir["spec/**/*"]
specification.extra_rdoc_files = [ "README.rdoc" ]
specification.require_path = "lib"
specification.test_files = Dir["spec/**/*_spec.rb"]
specification.add_dependency "gom", ">= 0.4.0"
specification.add_dependency "couchdb", ">= 0.1.2"
specification.add_development_dependency "rspec", ">= 2"
specification.add_development_dependency "reek", ">= 1.2"
end
bumped to version 0.4.1
# encoding: utf-8
Gem::Specification.new do |specification|
specification.name = "gom-couchdb-adapter"
specification.version = "0.4.1"
specification.date = "2011-05-22"
specification.authors = [ "Philipp Brüll" ]
specification.email = "b.phifty@gmail.com"
specification.homepage = "http://github.com/phifty/gom-couchdb-adapter"
specification.rubyforge_project = "gom-couchdb-adapter"
specification.summary = "CouchDB storage adapter for the General Object Mapper."
specification.description = "CouchDB storage adapter for the General Object Mapper. Currently, version 1.0 of CouchDB is supported."
specification.has_rdoc = true
specification.files = [ "README.rdoc", "LICENSE", "Rakefile" ] + Dir["lib/**/*"] + Dir["spec/**/*"]
specification.extra_rdoc_files = [ "README.rdoc" ]
specification.require_path = "lib"
specification.test_files = Dir["spec/**/*_spec.rb"]
specification.add_dependency "gom", ">= 0.4.0"
specification.add_dependency "couchdb", ">= 0.1.3"
specification.add_development_dependency "rspec", ">= 2"
specification.add_development_dependency "reek", ">= 1.2"
end
|
module Celluloid
module Sync
unless defined? @@updated
@@gem_path ||= File.expand_path("../../", __FILE__)
$:.push( @@gem_path)
puts "Synchronizing Celluloid Culture //"
@@update = `cd #{@@gem_path}/culture; git pull`
@@updated = !@@update.include?("up-to-date")
@@required ||= [
"#{@@gem_path}/culture/sync.rb",
"#{@@gem_path}/culture/gems/loader"
]
end
class << self
def updated?
@@updated
end
if @@updated
def update!
if @@updated
puts "Celluloid Culture was updated."
@@required.each { |rb| load(rb) }
puts "Reloaded Culture::Sync itself:\n#{@@update}"
end
end
end
else
require(@@required.last)
GEM = Celluloid::Gems::SELF unless defined? GEM
LIB_PATH = File.expand_path("../../lib/#{GEM.split("-").join("/")}", __FILE__)
if File.exist?(version="#{LIB_PATH}/version.rb")
require(version)
end
class << self
def gems(loader)
case loader.class
when Gem::Specification
Gems.gemspec(loader)
when Bundler::Dsl
Gems.bundler(loader)
end
end
end
end
end
end
end
Celluloid::Sync.update! if Celluloid::Sync.updated?
fix ruby errors
module Celluloid
module Sync
unless defined? @@updated
@@gem_path ||= File.expand_path("../../", __FILE__)
$:.push( @@gem_path)
puts "Synchronizing Celluloid Culture //"
@@update = `cd #{@@gem_path}/culture; git pull`
@@updated = !@@update.include?("up-to-date")
@@required ||= [
"#{@@gem_path}/culture/sync.rb",
"#{@@gem_path}/culture/gems/loader"
]
end
class << self
def updated?
@@updated
end
if @@updated
def update!
if @@updated
puts "Celluloid Culture was updated."
@@required.each { |rb| load(rb) }
puts "Reloaded Culture::Sync itself:\n#{@@update}"
end
end
else
require(@@required.last)
GEM = Celluloid::Gems::SELF unless defined? GEM
LIB_PATH = File.expand_path("../../lib/#{GEM.split("-").join("/")}", __FILE__)
if File.exist?(version="#{LIB_PATH}/version.rb")
require(version)
end
def gems(loader)
case loader.class
when Gem::Specification
Gems.gemspec(loader)
when Bundler::Dsl
Gems.bundler(loader)
end
end
end
end
end
end
Celluloid::Sync.update! if Celluloid::Sync.updated? |
module Celluloid
module Sync
class << self
undef gem_path rescue nil
def gem_path
File.expand_path("../../", __FILE__)
end
undef gem_name rescue nil
def gem_name
Dir["#{File.expand_path('../../', __FILE__)}/*.gemspec"].first.gsub(".gemspec", "").split("/").last
end
undef gem_name? rescue nil
def gem_name?
!gem_name.nil?
end
undef lib_path rescue nil
def lib_path
File.expand_path("../../lib", __FILE__)
end
undef lib_gempath rescue nil
def lib_gempath
"#{lib_path}/#{gem_name.split('-').join('/')}"
end
end
fail "Missing gemspec." unless gem_name?
$LOAD_PATH.push(gem_path)
$LOAD_PATH.push(lib_path)
# TODO: This will likely need to be done differently if INSIDE a cut gem.
case File.basename($PROGRAM_NAME)
when "bundle"
if ARGV.first == 'update'
puts "Celluloid::Sync // Gem: #{gem_name}"
`cd #{gem_path}/culture; git pull origin master`
end
end
require("#{gem_path}/culture/gems/loader")
if File.exist?(version = "#{lib_gempath}/version.rb")
require(version)
end
end
end
make rubocop happy
module Celluloid
module Sync
class << self
undef gem_path rescue nil
def gem_path
File.expand_path("../../", __FILE__)
end
undef gem_name rescue nil
def gem_name
Dir["#{File.expand_path('../../', __FILE__)}/*.gemspec"].first.gsub(".gemspec", "").split("/").last
end
undef gem_name? rescue nil
def gem_name?
!gem_name.nil?
end
undef lib_path rescue nil
def lib_path
File.expand_path("../../lib", __FILE__)
end
undef lib_gempath rescue nil
def lib_gempath
"#{lib_path}/#{gem_name.split('-').join('/')}"
end
end
fail "Missing gemspec." unless gem_name?
$LOAD_PATH.push(gem_path)
$LOAD_PATH.push(lib_path)
# TODO: This will likely need to be done differently if INSIDE a cut gem.
case File.basename($PROGRAM_NAME)
when "bundle"
if ARGV.first == 'update'
puts "Celluloid::Sync // Gem: #{gem_name}"
`cd #{gem_path}/culture; git pull origin master`
end
end
require("#{gem_path}/culture/gems/loader")
if File.exist?(version = "#{lib_gempath}/version.rb")
require(version)
end
end
end
|
# Copyright (C) 2014 Ruby-GNOME2 Project Team
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
class TestGtkStack < Test::Unit::TestCase
include GtkTestUtils
def setup
only_gtk_version(3, 10, 0)
@stack = Gtk::Stack.new
end
class TestAdd < self
def setup
super
@child = Gtk::EventBox.new
end
def test_return_value
assert_equal(@stack, @stack.add(@child))
end
def test_added
@stack.add(@child)
assert_equal([@child], @stack.children)
end
def test_name
widget_name = "set widget name"
@stack.add(@child, widget_name)
assert_equal(widget_name,
@stack.child_get_property(@child, "name"))
end
def test_name_add_title
widget_name = "set widget name"
widget_title = "set widget title"
@stack.add(@child, widget_name, widget_title)
assert_equal([
widget_name,
widget_title,
],
[
@stack.child_get_property(@child, "name"),
@stack.child_get_property(@child, "title"),
])
end
end
def test_homogeneous_accessors
@stack.homogeneous = false
assert_false(@stack.homogeneous?)
end
def test_transition_duration_accessors
duration = 500
@stack.transition_duration = duration
assert_equal(duration, @stack.transition_duration)
end
def test_transition_type_accessors
stack_transition_type = Gtk::Stack::TransitionType::SLIDE_UP
@stack.transition_type = stack_transition_type
assert_equal(stack_transition_type, @stack.transition_type)
end
class TestVisibleChild < self
def test_visible_child_accessors
visible_widget = Gtk::EventBox.new
visible_widget.show
@stack.add(visible_widget)
@stack.visible_child = visible_widget
assert_equal(visible_widget, @stack.visible_child)
end
end
class TestEnum < self
def test_transition_type
assert_const_defined(Gtk::Stack::TransitionType, :CROSSFADE)
end
end
end
gtk3 test: indent
# Copyright (C) 2014 Ruby-GNOME2 Project Team
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
class TestGtkStack < Test::Unit::TestCase
include GtkTestUtils
def setup
only_gtk_version(3, 10, 0)
@stack = Gtk::Stack.new
end
class TestAdd < self
def setup
super
@child = Gtk::EventBox.new
end
def test_return_value
assert_equal(@stack, @stack.add(@child))
end
def test_added
@stack.add(@child)
assert_equal([@child], @stack.children)
end
def test_name
widget_name = "set widget name"
@stack.add(@child, widget_name)
assert_equal(widget_name,
@stack.child_get_property(@child, "name"))
end
def test_name_add_title
widget_name = "set widget name"
widget_title = "set widget title"
@stack.add(@child, widget_name, widget_title)
assert_equal([
widget_name,
widget_title,
],
[
@stack.child_get_property(@child, "name"),
@stack.child_get_property(@child, "title"),
])
end
end
def test_homogeneous_accessors
@stack.homogeneous = false
assert_false(@stack.homogeneous?)
end
def test_transition_duration_accessors
duration = 500
@stack.transition_duration = duration
assert_equal(duration, @stack.transition_duration)
end
def test_transition_type_accessors
stack_transition_type = Gtk::Stack::TransitionType::SLIDE_UP
@stack.transition_type = stack_transition_type
assert_equal(stack_transition_type, @stack.transition_type)
end
class TestVisibleChild < self
def test_visible_child_accessors
visible_widget = Gtk::EventBox.new
visible_widget.show
@stack.add(visible_widget)
@stack.visible_child = visible_widget
assert_equal(visible_widget, @stack.visible_child)
end
end
class TestEnum < self
def test_transition_type
assert_const_defined(Gtk::Stack::TransitionType, :CROSSFADE)
end
end
end
|
require "bytestream"
foo=ByteStream.new("fooz")
p foo
foo^="123"
p foo
p foo^"fooz"
Get yes/no responses from the tests
require "bytestream"
foo=ByteStream.new("fooz")
puts "Checking integrity..."
puts(if(foo=="fooz")
"Ok"
else
"No"
end)
puts "Checking XOR..."
foo^="123"
puts(if(foo=="W]\\z")
"Ok"
else
"No"
end)
puts "Checking that XOR won't de-pad..."
foo^="fooz"
puts(if(foo=="123\000")
"Ok"
else
"No"
end)
|
require './source'
require './provider'
require './consumer'
require './deliverer'
require './user_stream'
class SimpleConsumer < Chizuru::Consumer
def receive(data)
p data
end
end
class LoggingConsumer < Chizuru::Consumer
def initialize(log_file)
@log_file = log_file
end
def receive(data)
open(@log_file, 'a:utf-8') do |file|
file.puts(data.inspect)
end
end
end
provider = Chizuru::Provider.new
provider.add_consumer(SimpleConsumer.new)
provider.add_consumer(LoggingConsumer.new('./stream.log'))
source = Chizuru::UserStream.new(provider, 'credential.yaml', 'meitanbot', 'cert', 'meitanbot/2.0 (Chizuru/1.0)')
source.start
Modify test code for recent changes
require './provider'
require './consumer'
require './credential'
require './user_stream'
class SimpleConsumer < Chizuru::Consumer
def receive(data)
deliver(data)
end
end
class EchoDeliverer
def deliver(data)
p data
end
end
class LoggingDeliverer
def initialize(log_file)
@log_file = log_file
end
def deliver(data)
open(@log_file, 'a:utf-8') do |file|
file.puts(data.inspect)
end
end
end
provider = Chizuru::Provider.new
consumer = SimpleConsumer.new
consumer.add_deliverer(EchoDeliverer.new)
consumer.add_deliverer(LoggingDeliverer.new('./stream.log'))
provider.add_consumer(consumer)
credential = Chizuru::Credential.new('credential.yaml')
source = Chizuru::UserStream.new(provider, credential, 'meitanbot', 'cert', 'meitanbot/2.0 (Chizuru/1.0)')
source.start
|
# git branch testing
# 创建分支 testing
# git checkout testing
# 切换到 testing 分支
# git checkout master
# 切换到 master 分支,指向 master 分支
git diff --cached test
# git branch testing
# 创建分支 testing
# git checkout testing
# 切换到 testing 分支
# git checkout master
# 切换到 master 分支,指向 master 分支
why ? luan ma ?:
|
#!/usr/bin/env ruby
require 'open-uri'
require 'twitter'
require 'json'
require 'fileutils'
PIC_BASE='pics'
auth = JSON.parse IO.read('auth.json')
Twitter.configure do |config|
config.consumer_key = auth['consumer_key']
config.consumer_secret = auth['consumer_secret']
config.oauth_token = auth['token']
config.oauth_token_secret = auth['secret']
end
# def get_tweets user, count, offset
# first_page = if offset
# offset/100 + 1
# else
# 1
# end
# page_indizies = [first_page]
# page_count = count/100
# (page_count - 1 ).times do
# page_indizies.push(page_indizies.last+1)
# end
# tweets = []
# until page_indizies.empty? do
# idx = page_indizies.shift
# p [100, idx]
# # tweets << Twitter.user_timeline(user, count: 200, page: idx)
# end
# remaining = count - page_count*100
# if remaining > 0
# p [remaining, first_page + page_count ]
# # tweets << Twitter.user_timeline(user, count: remaining, page: first_page + page_count + 1 )
# end
# p "fetched tweets from ##{first_page*100} - ##{100*(page_count+first_page)}"
# tweets.flatten
# end
def collect_with_max_id(collection=[], count=200, max_id=nil, &block)
response = nil
if count > 200
response = yield max_id, 200
count -= 200
else
response = yield max_id, count
count = 0
end
collection += response
response.empty? ? collection.flatten : collect_with_max_id(collection, count, response.last.id - 1, &block)
end
def get_tweets(user, count=50, max_id=nil)
collect_with_max_id([], count, max_id) do |max_id|
options = {:count => 200, :exclude_replies => true}
options[:max_id] = max_id unless max_id.nil?
Twitter.user_timeline(user, options)
end
end
def extract_image_url tweet
tweet.media.map do |e|
# if e.sizes[:large]
# "#{e.media_url}:large"
# else
e.media_url
# end
end
end
def download path, url
path = "#{PIC_BASE}/#{path}"
return if File.exists?(path)
puts "downloading #{url} as #{path}"
open(path,'wb') {|f| f << open(url).read }
rescue => e
p e
return
end
def download_images user, images
images.select!{|e| !(e.nil? || e['url'].empty? )}
images.each do |img|
next if img['url'].nil?
img['timestamp'] = img['timestamp'].gsub(' ','0')
if img['url'].size == 1
download("#{user}/#{img['timestamp']}_#{img['id']}.jpg", img['url'].first)
else
img['url'].each.with_index do |url,index|
download("#{user}/#{img['timestamp']}_#{img['id']}(#{index}).jpg", url)
end
end
end
end
def run_cached user
download_images user, JSON.parse(IO.read("#{PIC_BASE}/#{user}/images.json"))
end
def run user, count: nil, max_id: nil
base = "#{PIC_BASE}/#{user}"
unless File.exists?(base) && Dir.exists?(base)
FileUtils.mkdir base
end
tweets = get_tweets user, count, max_id
images = tweets.map do |tweet|
urls = extract_image_url(tweet)
next if urls.nil? || urls.empty?
{ 'timestamp' => tweet.created_at.strftime('%Y%m%d+%H%M'),
'url' => urls,
'id' => tweet.id }
end
IO.write("#{base}/images.json", images.to_json)
p images.first['id']
download_images user, images
p images.last['id']
end
require "optparse"
options = {count: 200}
user = nil
ARGV.options do |opts|
opts.banner = "Usage: #{File.basename($PROGRAM_NAME)} [OPTIONS] TWITTER_HANDLE"
opts.separator ""
opts.separator "Specific Options:"
opts.on( "-c", "--count COUNT", Integer,
"Count of tweets to search for images" ) do |opt|
options[:count] = opt
end
opts.on( "-m", "--max-id MAXID", Integer,
"max_id of newest tweet -> only load older tweets" ) do |opt|
options[:max_id] = opt
end
opts.separator "Common Options:"
opts.on( "-h", "--help",
"Show this message." ) do
puts opts
exit
end
begin
opts.parse!
user = ARGV.first
rescue
puts opts
exit
end
end
run user, options
# case ARGV.size
# when 0
# puts 'no user given'
# exit 1
# when 1
# user = ARGV.first
# puts "getting picture in the last 200 tweets for user #{user}"
# run user
# when 2
# user = ARGV.first
# count = ARGV[1].to_i
# puts "getting picture in the last #{count} tweets for user #{user}"
# run user, count: count
# else
# p ARGV
# end
remove comment
#!/usr/bin/env ruby
require 'open-uri'
require 'twitter'
require 'json'
require 'fileutils'
PIC_BASE='pics'
auth = JSON.parse IO.read('auth.json')
Twitter.configure do |config|
config.consumer_key = auth['consumer_key']
config.consumer_secret = auth['consumer_secret']
config.oauth_token = auth['token']
config.oauth_token_secret = auth['secret']
end
def collect_with_max_id(collection=[], count=200, max_id=nil, &block)
response = nil
if count > 200
response = yield max_id, 200
count -= 200
else
response = yield max_id, count
count = 0
end
collection += response
response.empty? ? collection.flatten : collect_with_max_id(collection, count, response.last.id - 1, &block)
end
def get_tweets(user, count=50, max_id=nil)
collect_with_max_id([], count, max_id) do |max_id|
options = {:count => 200, :exclude_replies => true}
options[:max_id] = max_id unless max_id.nil?
Twitter.user_timeline(user, options)
end
end
def extract_image_url tweet
tweet.media.map do |e|
# if e.sizes[:large]
# "#{e.media_url}:large"
# else
e.media_url
# end
end
end
def download path, url
path = "#{PIC_BASE}/#{path}"
return if File.exists?(path)
puts "downloading #{url} as #{path}"
open(path,'wb') {|f| f << open(url).read }
rescue => e
p e
return
end
def download_images user, images
images.select!{|e| !(e.nil? || e['url'].empty? )}
images.each do |img|
next if img['url'].nil?
img['timestamp'] = img['timestamp'].gsub(' ','0')
if img['url'].size == 1
download("#{user}/#{img['timestamp']}_#{img['id']}.jpg", img['url'].first)
else
img['url'].each.with_index do |url,index|
download("#{user}/#{img['timestamp']}_#{img['id']}(#{index}).jpg", url)
end
end
end
end
def run_cached user
download_images user, JSON.parse(IO.read("#{PIC_BASE}/#{user}/images.json"))
end
def run user, count: nil, max_id: nil
base = "#{PIC_BASE}/#{user}"
unless File.exists?(base) && Dir.exists?(base)
FileUtils.mkdir base
end
tweets = get_tweets user, count, max_id
images = tweets.map do |tweet|
urls = extract_image_url(tweet)
next if urls.nil? || urls.empty?
{ 'timestamp' => tweet.created_at.strftime('%Y%m%d+%H%M'),
'url' => urls,
'id' => tweet.id }
end
IO.write("#{base}/images.json", images.to_json)
p images.first['id']
download_images user, images
p images.last['id']
end
require "optparse"
options = {count: 200}
user = nil
ARGV.options do |opts|
opts.banner = "Usage: #{File.basename($PROGRAM_NAME)} [OPTIONS] TWITTER_HANDLE"
opts.separator ""
opts.separator "Specific Options:"
opts.on( "-c", "--count COUNT", Integer,
"Count of tweets to search for images" ) do |opt|
options[:count] = opt
end
opts.on( "-m", "--max-id MAXID", Integer,
"max_id of newest tweet -> only load older tweets" ) do |opt|
options[:max_id] = opt
end
opts.separator "Common Options:"
opts.on( "-h", "--help",
"Show this message." ) do
puts opts
exit
end
begin
opts.parse!
user = ARGV.first
rescue
puts opts
exit
end
end
run user, options
# case ARGV.size
# when 0
# puts 'no user given'
# exit 1
# when 1
# user = ARGV.first
# puts "getting picture in the last 200 tweets for user #{user}"
# run user
# when 2
# user = ARGV.first
# count = ARGV[1].to_i
# puts "getting picture in the last #{count} tweets for user #{user}"
# run user, count: count
# else
# p ARGV
# end
|
class Vips < Formula
desc "Image processing library"
homepage "https://github.com/jcupitt/libvips"
url "https://github.com/jcupitt/libvips/releases/download/v8.5.4/vips-8.5.4.tar.gz"
sha256 "fc641833a080319eb03d7e251708ebbcf87d2df507604eb4b32b19308000578e"
bottle do
sha256 "9dbd5f1936ae43c487564ba275af996f56f0778245062a93e93c3a05cbc060ba" => :sierra
sha256 "67f9e20785029b87356a830867ef79ab4bedccb89e1bab70f63dc53a53840c44" => :el_capitan
sha256 "659279ce7a150f1135468d50e8cb0d131bf7cbb92592d80ea3d84c24e5f19c0f" => :yosemite
sha256 "700df92f25710571bfe63fbcba55582dc81b59abd6c1a25c7b001ae248586450" => :x86_64_linux
end
option "without-test", "Disable build time checks (not recommended)"
deprecated_option "without-check" => "without-test"
depends_on "pkg-config" => :build
depends_on "fontconfig"
depends_on "gettext"
depends_on "glib"
depends_on "libpng" => :recommended
depends_on "jpeg" => :recommended
depends_on "orc" => :recommended
depends_on "libgsf" => :recommended
depends_on "libtiff" => :recommended
depends_on "fftw" => :recommended
depends_on "little-cms2" => :recommended
depends_on "pango" => :recommended
depends_on "libexif" => :recommended
depends_on "gobject-introspection" => :recommended
depends_on "pygobject3" => :recommended
depends_on "python" => :recommended
depends_on "poppler" => :recommended
depends_on "librsvg" => :recommended
depends_on "giflib" => :recommended
depends_on "openslide" => :optional
depends_on "imagemagick" => :optional
depends_on "graphicsmagick" => :optional
depends_on "openexr" => :optional
depends_on "cfitsio" => :optional
depends_on "webp" => :optional
depends_on "python3" => :optional
depends_on "libmatio" => :optional
depends_on "mozjpeg" => :optional
depends_on "jpeg-turbo" => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
]
args.concat %w[--with-magick --with-magickpackage=GraphicsMagick] if build.with? "graphicsmagick"
system "./configure", *args
system "make", "check" if build.with? "check"
system "make", "install"
end
test do
system "#{bin}/vips", "-l"
system "#{bin}/vipsheader", test_fixtures("test.png")
end
end
vips 8.5.5
Closes #5715.
Signed-off-by: Michka Popoff <7b0496f66f66ee22a38826c310c38b415671b832@gmail.com>
class Vips < Formula
desc "Image processing library"
homepage "https://github.com/jcupitt/libvips"
url "https://github.com/jcupitt/libvips/releases/download/v8.5.5/vips-8.5.5.tar.gz"
sha256 "0891af4531d6f951a16ca6d03020b73796522d5fcf7c6247f2f04c896ecded28"
bottle do
sha256 "9dbd5f1936ae43c487564ba275af996f56f0778245062a93e93c3a05cbc060ba" => :sierra
sha256 "67f9e20785029b87356a830867ef79ab4bedccb89e1bab70f63dc53a53840c44" => :el_capitan
sha256 "659279ce7a150f1135468d50e8cb0d131bf7cbb92592d80ea3d84c24e5f19c0f" => :yosemite
sha256 "700df92f25710571bfe63fbcba55582dc81b59abd6c1a25c7b001ae248586450" => :x86_64_linux
end
option "without-test", "Disable build time checks (not recommended)"
deprecated_option "without-check" => "without-test"
depends_on "pkg-config" => :build
depends_on "fontconfig"
depends_on "gettext"
depends_on "glib"
depends_on "libpng" => :recommended
depends_on "jpeg" => :recommended
depends_on "orc" => :recommended
depends_on "libgsf" => :recommended
depends_on "libtiff" => :recommended
depends_on "fftw" => :recommended
depends_on "little-cms2" => :recommended
depends_on "pango" => :recommended
depends_on "libexif" => :recommended
depends_on "gobject-introspection" => :recommended
depends_on "pygobject3" => :recommended
depends_on "python" => :recommended
depends_on "poppler" => :recommended
depends_on "librsvg" => :recommended
depends_on "giflib" => :recommended
depends_on "openslide" => :optional
depends_on "imagemagick" => :optional
depends_on "graphicsmagick" => :optional
depends_on "openexr" => :optional
depends_on "cfitsio" => :optional
depends_on "webp" => :optional
depends_on "python3" => :optional
depends_on "libmatio" => :optional
depends_on "mozjpeg" => :optional
depends_on "jpeg-turbo" => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
]
args.concat %w[--with-magick --with-magickpackage=GraphicsMagick] if build.with? "graphicsmagick"
system "./configure", *args
system "make", "check" if build.with? "check"
system "make", "install"
end
test do
system "#{bin}/vips", "-l"
system "#{bin}/vipsheader", test_fixtures("test.png")
end
end
|
class Xcdf < Formula
desc "High performance bitpacking algorithm."
homepage "https://github.com/jimbraun/XCDF"
url "https://github.com/jimbraun/XCDF/archive/v2.09.00.tar.gz"
sha256 "49a2357392008cf12dc956a2d43e4b0948f1d8c42e014fa04db7e8ac4d267567"
head "https://github.com/jimbraun/XCDF.git"
depends_on "cmake" => :build
depends_on :python
def install
mktemp do
pypref = `python -c 'import sys;print(sys.prefix)'`.strip
pyinc = `python -c 'from distutils import sysconfig;print(sysconfig.get_python_inc(True))'`.strip
args = %W[
-DCMAKE_INSTALL_PREFIX=#{prefix}
-DPYTHON_INCLUDE_DIR='#{pyinc}'
-DPYTHON_LIBRARY='#{pypref}/lib/libpython2.7.dylib'
]
system "cmake", buildpath, *(std_cmake_args + args)
system "make"
system "make", "install"
end
end
test do
system "#{bin}/xcdf-append-test"
system "#{bin}/xcdf-buffer-fill-test"
system "#{bin}/xcdf-concat-seek-test"
system "#{bin}/xcdf-random-test"
system "#{bin}/xcdf-seek-test"
system "#{bin}/xcdf-simple-test"
system "#{bin}/xcdf-speed-test"
end
end
xcdf: add 2.09.00 bottle.
class Xcdf < Formula
desc "High performance bitpacking algorithm."
homepage "https://github.com/jimbraun/XCDF"
url "https://github.com/jimbraun/XCDF/archive/v2.09.00.tar.gz"
sha256 "49a2357392008cf12dc956a2d43e4b0948f1d8c42e014fa04db7e8ac4d267567"
head "https://github.com/jimbraun/XCDF.git"
bottle do
cellar :any
sha256 "ad83615a6d90b1c6d8e9bf53ec615fad0e5803f055bf234e103356f6adc2f50a" => :el_capitan
sha256 "84d224c1bc39bb28549ad8e4c08f0d457fbd3fce472132be95fe71e9fabc05ad" => :yosemite
sha256 "223d55367d2891499e9f5b0deeb0b63f6ef9ec61d0a953912b8fc4388c205104" => :mavericks
end
depends_on "cmake" => :build
depends_on :python
def install
mktemp do
pypref = `python -c 'import sys;print(sys.prefix)'`.strip
pyinc = `python -c 'from distutils import sysconfig;print(sysconfig.get_python_inc(True))'`.strip
args = %W[
-DCMAKE_INSTALL_PREFIX=#{prefix}
-DPYTHON_INCLUDE_DIR='#{pyinc}'
-DPYTHON_LIBRARY='#{pypref}/lib/libpython2.7.dylib'
]
system "cmake", buildpath, *(std_cmake_args + args)
system "make"
system "make", "install"
end
end
test do
system "#{bin}/xcdf-append-test"
system "#{bin}/xcdf-buffer-fill-test"
system "#{bin}/xcdf-concat-seek-test"
system "#{bin}/xcdf-random-test"
system "#{bin}/xcdf-seek-test"
system "#{bin}/xcdf-simple-test"
system "#{bin}/xcdf-speed-test"
end
end
|
class Xfig < Formula
desc "Interactive drawing tool for X"
homepage "https://mcj.sourceforge.io"
url "https://downloads.sourceforge.net/project/mcj/xfig-3.2.7a.tar.xz"
sha256 "ca89986fc9ddb9f3c5a4f6f70e5423f98e2f33f5528a9d577fb05bbcc07ddf24"
depends_on "fig2dev"
depends_on "ghostscript"
depends_on "jpeg"
depends_on :x11
def install
system "./configure", "--prefix=#{prefix}",
"--disable-dependency-tracking",
"--disable-silent-rules"
system "make", "install"
end
test do
assert_match "Xfig #{version}", shell_output("#{bin}/xfig -v 2>&1")
end
end
xfig: replace :x11 dep with Homebrew libxaw3d
class Xfig < Formula
desc "Interactive drawing tool for X"
homepage "https://mcj.sourceforge.io"
url "https://downloads.sourceforge.net/project/mcj/xfig-3.2.7a.tar.xz"
sha256 "ca89986fc9ddb9f3c5a4f6f70e5423f98e2f33f5528a9d577fb05bbcc07ddf24"
revision 1
depends_on "fig2dev"
depends_on "ghostscript"
depends_on "jpeg"
depends_on "libxaw3d"
def install
system "./configure", "--prefix=#{prefix}",
"--disable-dependency-tracking",
"--disable-silent-rules"
system "make", "install"
end
test do
assert_match "Xfig #{version}", shell_output("#{bin}/xfig -v 2>&1")
end
end
|
#! /usr/bin/env ruby
#
# skyline.rb
#
#
# DESCRIPTION:
# This handler sends graphite like metrics (sensu's default) to a Skyline server
# (https://github.com/etsy/skyline) via a UDP socket.
#
# OUTPUT:
# msgpack binary format [<metric name>, [<timestamp>, <value>]] over UDP
#
# PLATFORMS:
# Linux, Windows
#
# DEPENDENCIES:
# gem: sensu-handler
# gem: msgpack
#
# USAGE:
# example commands
# {
# "skyline": {
# "port": "2025",
# "server": "skyline.example.com"
# },
# {
# "handlers": {
# "sentry": {
# "type": "pipe",
# "command": "/etc/sensu/handlers/skyline.rb",
# "severities": [
# "ok",
# "warning",
# "critical",
# "unknown"
# ]
# }
# }
# }
# }
#
# NOTES:
# Skyline 'server' and 'port' must be specified in a config file in /etc/sensu/conf.d.
# See skyline.json for an example.
#
# LICENSE:
# Derek Tracy tracyde@gmail.com
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-handler'
require 'socket'
require 'msgpack'
class Skyline < Sensu::Handler
# override filters from Sensu::Handler. not appropriate for metric handlers
def filter; end
def handle
server = settings['skyline']['server']
port = settings['skyline']['port']
sock = UDPSocket.new
sock.connect(server, port)
@event['check']['output'].each_line do |metric|
m = metric.split
next unless m.count == 3
# skyline needs ["metric_name", [timestamp, value]]
name = m[0]
value = m[1].to_f
time = m[2].to_i
msg = [name, [time, value]].to_msgpack
sock.send msg, 0
end
sock.flush
sock.close
end
end
Update skyline.rb
removed trailing whitespace
#! /usr/bin/env ruby
#
# skyline.rb
#
#
# DESCRIPTION:
# This handler sends graphite like metrics (sensu's default) to a Skyline server
# (https://github.com/etsy/skyline) via a UDP socket.
#
# OUTPUT:
# msgpack binary format [<metric name>, [<timestamp>, <value>]] over UDP
#
# PLATFORMS:
# Linux, Windows
#
# DEPENDENCIES:
# gem: sensu-handler
# gem: msgpack
#
# USAGE:
# example commands
# {
# "skyline": {
# "port": "2025",
# "server": "skyline.example.com"
# },
# {
# "handlers": {
# "sentry": {
# "type": "pipe",
# "command": "/etc/sensu/handlers/skyline.rb",
# "severities": [
# "ok",
# "warning",
# "critical",
# "unknown"
# ]
# }
# }
# }
# }
#
# NOTES:
# Skyline 'server' and 'port' must be specified in a config file in /etc/sensu/conf.d.
# See skyline.json for an example.
#
# LICENSE:
# Derek Tracy tracyde@gmail.com
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-handler'
require 'socket'
require 'msgpack'
class Skyline < Sensu::Handler
# override filters from Sensu::Handler. not appropriate for metric handlers
def filter; end
def handle
server = settings['skyline']['server']
port = settings['skyline']['port']
sock = UDPSocket.new
sock.connect(server, port)
@event['check']['output'].each_line do |metric|
m = metric.split
next unless m.count == 3
# skyline needs ["metric_name", [timestamp, value]]
name = m[0]
value = m[1].to_f
time = m[2].to_i
msg = [name, [time, value]].to_msgpack
sock.send msg, 0
end
sock.flush
sock.close
end
end
|
class Pyfa < Formula
desc "Ship fitting tool for EVE Online game"
homepage "https://github.com/DarkFenX/Pyfa/wiki"
url "https://github.com/thorr18/Pyfa/archive/v1.18.0.tar.gz"
#version already tagged by repo
sha256 "d31b61091394939e5a6fb992eb7d9d215294544b405ae51c5bbc6d91507e5dd2"
bottle do
cellar :any
puts "bottle is empty"
end
head "https://github.com/thorr18/Pyfa.git", :branch => "master"
option "with-external", "use Python dependencies installed with Pip instead of bundling"
deprecated_option "Wx3" => "noWx3"
option "noWx3", "use Pyfa with wx 2.X"
if MacOS.version <= :snow_leopard
depends_on :python
else
depends_on :python => [:optional, "framework"]
end
if build.with? "external"
puts "with external option"
depends_on "wxPython" => [:python, "framework"]
depends_on "matplotlib" => [:python, :recommended]
depends_on "numpy" => [:python, :recommended]
depends_on "python-dateutil" => [:python, "dateutil"] if build.without? "matplotlib"
depends_on "SQLAlchemy" => :python
depends_on "requests" => :python
else
puts "default bundled option; not external"
depends_on "wxPython" => "framework" if build.without? "noWx3"
depends_on "homebrew/versions/wxPython2.8" => "framework" if build.with? "noWx3"
depends_on "homebrew/python/matplotlib" => :recommended
depends_on "homebrew/python/numpy" => :recommended
resource "python-dateutil" if build.without? "matplotlib" do
url "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.2.tar.gz"
sha256 "3e95445c1db500a344079a47b171c45ef18f57d188dffdb0e4165c71bea8eb3d"
end
resource "SQLAlchemy" do
url "https://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-1.0.11.tar.gz"
sha256 "0b24729787fa1455009770880ea32b1fa5554e75170763b1aef8b1eb470de8a3"
end
resource "requests" do
url "https://pypi.python.org/packages/source/r/requests/requests-2.6.2.tar.gz"
sha256 "0577249d4b6c4b11fd97c28037e98664bfaa0559022fee7bcef6b752a106e505"
end
end
def install
pyver = Language::Python.major_minor_version "python"
pathsitetail = "lib/python"+pyver+"/site-packages"
pathvendor = libexec+"vendor"
pathvendorsite = pathvendor+pathsitetail
pathsite = libexec+pathsitetail
ENV.prepend_create_path "PYTHONPATH", pathvendorsite
resources.each do |r|
r.stage do
system "python", *Language::Python.setup_install_args(pathvendor)
end
end
ENV.prepend_create_path "PYTHONPATH", pathsite
system "python", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
ENV.prepend_create_path "PYTHONPATH", libexec
%w["somestuff" | "otherstuff" | "iforget"].each do |d|
libexec.install Dir[d]
end
end
def caveats; <<-EOS.undent
This formula is still under construction.
EOS
end
test do
Language::Python.each_python(build) do |python, _version|
system "#{python} -c import wx; print wx.version()"
end
system "#{bin}/Pyfa", "-d -test"
end
end
update pyfa SHA to match the pyfa version bump
class Pyfa < Formula
desc "Ship fitting tool for EVE Online game"
homepage "https://github.com/DarkFenX/Pyfa/wiki"
url "https://github.com/thorr18/Pyfa/archive/v1.18.0.tar.gz"
#version already tagged by repo
sha256 "32a598f336cca869180110b51e9149cc2007f0f760c4c5eb8b9e81f73f8abee3"
bottle do
cellar :any
puts "bottle is empty"
end
head "https://github.com/thorr18/Pyfa.git", :branch => "master"
option "with-external", "use Python dependencies installed with Pip instead of bundling"
deprecated_option "Wx3" => "noWx3"
option "noWx3", "use Pyfa with wx 2.X"
if MacOS.version <= :snow_leopard
depends_on :python
else
depends_on :python => [:optional, "framework"]
end
if build.with? "external"
puts "with external option"
depends_on "wxPython" => [:python, "framework"]
depends_on "matplotlib" => [:python, :recommended]
depends_on "numpy" => [:python, :recommended]
depends_on "python-dateutil" => [:python, "dateutil"] if build.without? "matplotlib"
depends_on "SQLAlchemy" => :python
depends_on "requests" => :python
else
puts "default bundled option; not external"
depends_on "wxPython" => "framework" if build.without? "noWx3"
depends_on "homebrew/versions/wxPython2.8" => "framework" if build.with? "noWx3"
depends_on "homebrew/python/matplotlib" => :recommended
depends_on "homebrew/python/numpy" => :recommended
resource "python-dateutil" if build.without? "matplotlib" do
url "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.2.tar.gz"
sha256 "3e95445c1db500a344079a47b171c45ef18f57d188dffdb0e4165c71bea8eb3d"
end
resource "SQLAlchemy" do
url "https://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-1.0.11.tar.gz"
sha256 "0b24729787fa1455009770880ea32b1fa5554e75170763b1aef8b1eb470de8a3"
end
resource "requests" do
url "https://pypi.python.org/packages/source/r/requests/requests-2.6.2.tar.gz"
sha256 "0577249d4b6c4b11fd97c28037e98664bfaa0559022fee7bcef6b752a106e505"
end
end
def install
pyver = Language::Python.major_minor_version "python"
pathsitetail = "lib/python"+pyver+"/site-packages"
pathvendor = libexec+"vendor"
pathvendorsite = pathvendor+pathsitetail
pathsite = libexec+pathsitetail
ENV.prepend_create_path "PYTHONPATH", pathvendorsite
resources.each do |r|
r.stage do
system "python", *Language::Python.setup_install_args(pathvendor)
end
end
ENV.prepend_create_path "PYTHONPATH", pathsite
system "python", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
ENV.prepend_create_path "PYTHONPATH", libexec
%w["somestuff" | "otherstuff" | "iforget"].each do |d|
libexec.install Dir[d]
end
end
def caveats; <<-EOS.undent
This formula is still under construction.
EOS
end
test do
Language::Python.each_python(build) do |python, _version|
system "#{python} -c import wx; print wx.version()"
end
system "#{bin}/Pyfa", "-d -test"
end
end
|
pjs = %w(
caminante brujo vengador hoplita beastslord bersérker
hechicero invocador conjurador paladín clérigo sacerdote
derviche arquero druida ladrón asesino nigromante
matador falangista ingeniero
)
# Generic clase creator
pjs.each do |pj|
define_method(pj + 's') do
load_yaml("skills/#{pj}")
end
define_method(pj) do |id|
send(pj + 's')[id].merge(char: pj)
end
end
# Maestro de Armas
def maestrodearmas
load_yaml('skills/maestrodearma')
end
def maestrodearma(id)
maestrodearmas.find { |m| m['id'] == id }.merge('char' => 'maestrodearmas')
end
def fama(personaje)
load_yaml('salones')[personaje]
end
def habilidad_base(personaje)
load_yaml('skills/base')[personaje].map{ |hb| Hab.new(hb)}
end
Fix syntax lints
pjs = %w(
caminante brujo vengador hoplita beastslord bersérker
hechicero invocador conjurador paladín clérigo sacerdote
derviche arquero druida ladrón asesino nigromante
matador falangista ingeniero
)
# Generic clase creator
pjs.each do |pj|
define_method(pj + 's') do
load_yaml("skills/#{pj}")
end
define_method(pj) do |id|
send(pj + 's')[id].merge(char: pj)
end
end
# Maestro de Armas
def maestrodearmas
load_yaml('skills/maestrodearma')
end
def maestrodearma(id)
maestrodearmas.find { |m| m['id'] == id }.merge('char' => 'maestrodearmas')
end
def fama(personaje)
load_yaml('salones')[personaje]
end
def habilidad_base(personaje)
load_yaml('skills/base')[personaje].map { |hb| Hab.new(hb) }
end
|
require "bundler/setup"
require "hasu"
Hasu.load "ball.rb"
class Pong < Hasu::Window
WIDTH = 768
HEIGHT = 576
def initialize
super(WIDTH, HEIGHT, false)
end
def reset
@ball = Ball.new
end
def draw
@ball.draw(self)
end
def update
@ball.move!
end
end
Pong.run
Draw scores
require "bundler/setup"
require "hasu"
Hasu.load "ball.rb"
class Pong < Hasu::Window
WIDTH = 768
HEIGHT = 576
def initialize
super(WIDTH, HEIGHT, false)
end
def reset
@ball = Ball.new
@left_score = 0
@right_score = 0
@font = Gosu::Font.new(self, "Arial", 30)
end
def draw
@ball.draw(self)
@font.draw(@left_score, 30, 30, 0)
@font.draw(@right_score, WIDTH-50, 30, 0)
end
def update
@ball.move!
end
end
Pong.run
|
require "formula"
class Radx < Formula
homepage "http://www.ral.ucar.edu/projects/titan/docs/radial_formats/radx.html"
url "ftp://ftp.rap.ucar.edu/pub/titan/radx/radx-20140417.src.tgz"
mirror "http://science-annex.org/pub/radx/radx-20140417.src.tgz"
version "20140417"
sha1 "2959154e6c8aea4502dbb9fe98723c54fcd1bf39"
depends_on 'hdf5' => 'enable-cxx'
depends_on 'udunits'
depends_on 'netcdf' => 'enable-cxx-compat'
depends_on 'fftw'
def install
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}"
system "make install"
end
def test
system "#{bin}/RadxPrint", "-h"
end
end
radx: fix url, audit warnings
require "formula"
class Radx < Formula
homepage "http://www.ral.ucar.edu/projects/titan/docs/radial_formats/radx.html"
url "ftp://ftp.rap.ucar.edu/pub/titan/radx/previous_releases/radx-20140417.src.tgz"
mirror "http://science-annex.org/pub/radx/radx-20140417.src.tgz"
version "20140417"
sha1 "2959154e6c8aea4502dbb9fe98723c54fcd1bf39"
depends_on "hdf5" => "enable-cxx"
depends_on "udunits"
depends_on "netcdf" => "enable-cxx-compat"
depends_on "fftw"
def install
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/RadxPrint", "-h"
end
end
|
#!/usr/bin/ruby
# encoding: utf-8
begin
require_relative "lib/db.rb"
rescue Exception => e
$stderr.puts "Please fix your database access / creds in config.json"
exit 1
end
require_relative "lib/fetcher.rb"
require_relative "lib/logger.rb"
require_relative "lib/utils.rb"
$YT_DELETED_VIDEO_REASONS = [
"This video does not exist.",
"This video is not available.",
"The YouTube account associated with this video has been terminated due to multiple third-party notifications of copyright infringement",
"This video has been removed by the user",
"This video has been removed for violating YouTube's Terms of Service.",
"This video is no longer available because the YouTube account associated with this video has been terminated.",
"This video is private",
"This video is no longer available because the uploader has closed their YouTube account",
"This video has been removed for violating YouTube's policy on nudity or sexual content.",
"This video has been removed for violating YouTube's policy on violent or graphic content.",
"This video has been removed for violating YouTube's policy on harassment and bullying."
]
$YT_COUNTRY_BLOCKED_MSG = [
/blocked it in your country/,
/not available on this country domain/,
/This video contains content from .* who has blocked it on copyright grounds/,
]
class Main
require "fileutils"
require "json"
$: << File.join(File.dirname(__FILE__),"lib/taglib")
require "taglib"
def initialize(config_file)
load_conf(config_file)
@log = MyLogger.new()
@log.add_logger(Logger.new(STDOUT))
FileUtils.mkdir_p($CONF["download"]["destination_dir"])
FileUtils.mkdir_p($CONF["download"]["tmp_dir"])
@threads=[]
end
def load_sites
$CONF["sites"].each do |site|
filename = site+".rb" unless site.end_with?(".rb")
path = File.join("sites",filename)
begin
require_relative path
rescue LoadError=>e
@log.warn "Cannot load #{path}"
end
end
end
def load_conf(file)
unless File.exist?(file)
@log.err "Couldn't find config file #{file}."
exit 1
end
begin
$CONF = JSON.parse(File.read(file))
rescue Exception => e
@log.err "Problem opening config file #{file}#"
raise e
end
end
def start_fetcher_threads()
load_sites()
if Fetcher.sites.empty?
@log.err "Didn't find any site to parse for youtube URL."
@log.err "Add some in config.json, maybe?"
exit 1
end
@fetcher_threads = []
tick = 5 # Verify everything every tick
t = Thread.new{
while true
now = Time.now().to_i
# Retry when we've waited "wait" time + up to 10% of wait, to appear not too bot-y
Fetcher.sites.select{|site| now - site.last_check > (site.wait*(1 + (rand() / 10))) }.each do |site|
count = 0
begin
site.get_yids().each { |yid|
@log.info "#{site} found #{yid}"
DBUtils.add_yid(yid, site.name)
count += 1
}
@log.info "#{site} found #{count} videos. Will retry in #{site.wait} seconds" unless site.wait < 30
rescue SocketError => e
# Internet is down, let's wait for a bit
@log.err "Failed to fetch yids from #{site}. Internet or your proxy is down, let's retry later"
rescue Exception => e
# TODO don't break but send an email or something
@log.err "Failed to fetch yids from #{site}"
end
site.last_check = now
end
sleep tick
end
}
t.abort_on_exception = true
return t
end
def update_video_infos(infos)
yid = infos["yid"]
if infos["status"] == "ok"
DBUtils.update_video_infos_from_hash(yid, infos["infos"])
DBUtils.save_thumbs(yid, infos["thumbs"])
else
reason = YoutubeUtils.get_reason(yid)
DBUtils.update_video_infos_from_hash(yid, {downloaded: reason, deletion: Time.now()})
end
end
def start_informer_threads()
@informer = Thread.new {
Thread.current[:name]="Informer"
@log.info "Informer thread starts"
while true
count = 0
if $CONF["youtube_key"] and $CONF["youtube_key"].size > 5
DBUtils.get_all_yids_without_infos.each_slice(10).to_a.each do |yid_slice|
YoutubeUtils.get_batch_infos_with_key(yid_slice, $CONF["youtube_key"]).each do |infos|
yid = infos["yid"]
if infos["infos"][:duration] < $CONF["download"]["minimum_duration"]
@log.info("#{infos["infos"][:duration]} < #{$CONF["download"]["minimum_duration"]} setting downloaded to #{DBUtils::DLDONE}")
DBUtils.set_downloaded(yid)
infos["infos"][:bien] = false
end
if infos["infos"][:duration] > $CONF["download"]["maximum_duration"]
@log.info("#{infos["infos"][:duration]} > #{$CONF["download"]["maximum_duration"]} setting downloaded to #{DBUtils::DLDONE}")
DBUtils.set_downloaded(yid)
infos["infos"][:bien] = false
end
update_video_infos(infos)
count+=1
end
end
else
DBUtils.get_all_yids_without_infos.each do |yid|
update_video_infos(YoutubeUtils.get_infos_without_key(yid))
count+=1
sleep 5 # We don't want to hit the youtube.com website too much and be seen too bot-y
end
end
@log.info "Informer updated #{count} videos infos" unless count == 0
sleep 5
end
}
@informer.abort_on_exception = true
return @informer
end
def add_cover(fmp4,image_data)
if not fmp4 =~ /\.mp4$/
@log.warn "ERROR: file not MP4, not adding nice tags"
else
cover_art = TagLib::MP4::CoverArt.new(TagLib::MP4::CoverArt::JPEG, image_data)
item = TagLib::MP4::Item.from_cover_art_list([cover_art])
TagLib::MP4::File.open(fmp4) do |mp4|
mp4.tag.item_list_map.insert('covr', item)
mp4.save
end
end
end
def ytdlfail(yid, errmsg)
DBUtils.set_downloaded(yid, DBUtils::YTDLFAIL)
@log.warn "The current version of youtube-dl failed to download #{yid} with error #{errmsg}."
@log.warn "Please update your youtube-dl version."
@log.warn "You can also re-run the last youtube-dl command with all the verbose flags to debugi"
end
def do_error(error_message, yid, proxy_to_try, tried=false)
@log.debug "Handling error #{error_message}"
case error_message
when /#{yid}: YouTube said: (.*)$/i
yt_error = $1
case yt_error
when Regexp.union($YT_COUNTRY_BLOCKED_MSG)
if tried
DBUtils.set_downloaded(yid, "RETRY: "+JSON.generate(tried.merge(proxy_to_try)))
else
DBUtils.set_downloaded(yid, "RETRY: {}")
end
when /Playback on other websites has been disabled by the video owner./
err_msg = "Youtube said '#{yt_error}'"
DBUtils.set_downloaded(yid, "#{DBUtils::YTERROR} #{yt_error}")
@log.warn err_msg
# when /content too short/
# let's just retry later
# ytdlfail(yid, yt_error)
when /Please sign in to view this video./
_msg = ""
if $CONF["youtube_username"]
# WTF we are signed in
_msg="#{DBUtils::YTDLFAIL} #{yt_error}"
else
_msg="#{DBUtils::YTDLFAIL} need credentials"
end
@log.warn _msg
DBUtils.set_downloaded(yid, _msg)
when Regexp.union($YT_DELETED_VIDEO_REASONS)
# Unrecoverable error, videos sent to Youtube Limbo.
err_msg = "Youtube said '#{yt_error}'"
DBUtils.set_downloaded(yid, "#{DBUtils::YTERROR} #{yt_error}")
@log.warn err_msg
else
raise Exception.new("Problem with download of #{yid} : Unknown YouTube error '#{yt_error}'")
end
when /The uploader has not made this video available in your country/
if tried
DBUtils.set_downloaded(yid, "RETRY: "+JSON.generate(tried.merge(proxy_to_try)))
else
DBUtils.set_downloaded(yid, "RETRY: {}")
end
when /Signature extraction failed/
ytdlfail(yid, error_message)
return
when /would lead to an infinite loop/
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /Connection reset by peer/
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /content too short/i
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /This live stream recording is not available/
ytdlfail(yid, error_message)
return
else
DBUtils.set_downloaded(yid, "#{DBUtils::YTDLFAIL} #{error_message}")
raise Exception.new("Problem with download of #{yid} : Unknown youtube-dl error '#{error_message}'")
end
end
def do_download(yid)
video_file = nil
Dir.chdir($CONF["download"]["tmp_dir"])
available_proxies = $CONF["download"]["proxies"]
proxy_cmd = ""
tried = DBUtils.get_retried(yid)
if tried
@log.info "We need to retry with a proxy. Already tried: #{tried}"
available_proxies = available_proxies.dup.delete_if {|k,_| tried.key?(k)}
if available_proxies.empty?
@log.warn "No more proxy to try =("
# TODO mark the download accordingly
return
end
proxy_to_try = [available_proxies.to_a.sample()].to_h
proxy_cmd = "--proxy #{proxy_to_try.first[1]}"
end
command = "#{@youtube_dl_cmd} #{proxy_cmd} https://www.youtube.com/watch?v=#{yid} 2>&1"
@log.debug command
DBUtils.set_downloaded(yid, msg=DBUtils::DLING)
ytdl_msg = nil
IO.popen(command) do |io|
ytdl_msg = io.read.split("\n").join(" ")
end
case ytdl_msg
when /error: (.+)$/i
do_error(ytdl_msg, yid, proxy_to_try, tried)
return nil
when /WARNING: (.+)$/
warn = $1
if warn!=""
@log.warn warn unless warn=~/Your copy of avconv is outdated, update avconv to version 10-0 or newer if you encounter any errors/
if warn=~/unable to log in: .*password/i
warn = "Use a webbrowser to connect to the YT Account, which was probabbly flagged as bot/spam"
raise warn
end
end
when /has already been downloaded and merged/
# continue
when ""
# Continue
else
raise Exception.new("WTF #{ytdl_msg}")
end
@log.success "Downloading finished, now post processing"
output_files = Dir.glob("*#{yid}*",File::FNM_DOTMATCH)
if output_files.size > 2
pp output_files
raise "Too many output files in #{`pwd`}"
end
video_file = output_files.reject{ |f| f=~/\.jpg$/ }[0]
jpg_file = output_files.select{|f| f=~/\.jpg$/}[0]
if not jpg_file or not File.exist?(jpg_file)
if @video_converter_cmd
`#{@video_converter_cmd} -i \"#{video_file}\" -vframes 1 -f image2 \"#{jpg_file}\"`
end
end
if File.exist?(jpg_file)
add_cover(video_file, File.read(jpg_file))
File.delete(jpg_file)
end
FileUtils.mv(video_file, $CONF["download"]["destination_dir"])
video_file = File.join($CONF["download"]["destination_dir"], video_file)
@log.success "PostProcessing #{yid} over."
DBUtils.set_downloaded(yid)
file_size = File.stat(video_file).size.to_i
DBUtils.update_video_infos_from_hash(yid,{file: File.basename(video_file), size: file_size})
return nil
end
def start_downloader_threads()
@youtube_dl_cmd = $CONF["download"]["youtube_dl_cmd"] || `which youtube-dl`.strip()
if @youtube_dl_cmd == ""
@log.err "Please update \"youtube_dl_cmd\" in config.json to your local installation of youtube-dl, or remove that key altogether, to use the one in your PATH"
exit 1
else
begin
res = `#{@youtube_dl_cmd} --version | egrep "^20[0-9.]+$"`
raise unless res=~/^20[0-9.]+$/
rescue Exception => e
@log.err "'#{@youtube_dl_cmd}' is not a valid youtube-dl binary"
exit
end
end
#
# TODO move to "download"?
if not $CONF["youtube_username"]
@log.warn "You have not set a Youtube username in config.json."
@log.warn "You won't be able to download '18+' videos."
end
@video_converter_cmd = $CONF["download"]["video_converter_cmd"] || "avconv"
begin
res = `#{@video_converter_cmd} -version 2>&1 | egrep "Copyright"`
raise unless res=~/developers/
rescue Exception => e
@log.warn "'#{@video_converter_cmd}' is not a valid video conversion command (use ffmpeg or avconv)"
@video_converter_cmd = nil
end
if $CONF["download"]["youtube_dl_extra_args"]
@youtube_dl_cmd << " " << $CONF["download"]["youtube_dl_extra_args"]
end
if $CONF["youtube_username"]
@youtube_dl_cmd << " -u \"#{$CONF['youtube_username']}\""
@youtube_dl_cmd << " -p \"#{$CONF['youtube_password']}\""
end
if not ($CONF["youtube_key"] and $CONF["youtube_key"].size > 5)
@log.warn "You have not set a Youtube API key in config.json."
end
# TODO have more than 1 ?
@downloader = Thread.new {
while true
yid = DBUtils.pop_yid_to_download(minimum_duration: $CONF["download"]["minimum_duration"],
maximum_duration: $CONF["download"]["maximum_duration"])
if yid
cur_dir=Dir.pwd()
begin
do_download(yid)
nb_to_dl = DBUtils.get_nb_to_dl()
@log.info "Still #{nb_to_dl} videos to download"
rescue Exception => e
@log.err "Exception when downloading #{yid}"
raise e
end
Dir.chdir(cur_dir)
else
@log.info "nothing to download, sleeping"
sleep 60
end
end
sleep 1
}
@downloader.abort_on_exception = true
return @downloader
end
def start_local_downloaded_threads()
@local_downloader = Thread.new{
#Inotify stuff
while true
end
sleep 10
}
@local_downloader.abort_on_exception = true
return @local_downloader
end
def go()
DBUtils.clean_dl()
failed_dl_vids = DBUtils.get_ytdlfail().size
if failed_dl_vids > 0
@log.warn "You have #{failed_dl_vids} videos that youtube-dl couldn't download."
end
DBUtils.retry_old_failed_videos()
@threads << start_informer_threads()
@threads << start_fetcher_threads()
@threads << start_downloader_threads()
@threads.each {|t| t.join()}
end
end
def main(conf)
m = Main.new(conf)
m.go()
end
trap("INT"){
# TODO
# remove ytdl temps,
exit
}
main(ARGV[0] || "config.json")
iint optparse
#!/usr/bin/ruby
# encoding: utf-8
require "optparse"
begin
require_relative "lib/db.rb"
rescue Exception => e
$stderr.puts "Please fix your database access / creds in config.json"
exit 1
end
require_relative "lib/fetcher.rb"
require_relative "lib/logger.rb"
require_relative "lib/utils.rb"
$YT_DELETED_VIDEO_REASONS = [
"This video does not exist.",
"This video is not available.",
"The YouTube account associated with this video has been terminated due to multiple third-party notifications of copyright infringement",
"This video has been removed by the user",
"This video has been removed for violating YouTube's Terms of Service.",
"This video is no longer available because the YouTube account associated with this video has been terminated.",
"This video is private",
"This video is no longer available because the uploader has closed their YouTube account",
"This video has been removed for violating YouTube's policy on nudity or sexual content.",
"This video has been removed for violating YouTube's policy on violent or graphic content.",
"This video has been removed for violating YouTube's policy on harassment and bullying."
]
$YT_COUNTRY_BLOCKED_MSG = [
/blocked it in your country/,
/not available on this country domain/,
/This video contains content from .* who has blocked it on copyright grounds/,
]
class Main
require "fileutils"
require "json"
$: << File.join(File.dirname(__FILE__),"lib/taglib")
require "taglib"
def initialize(config_file, arguments)
load_conf(config_file)
@arguments = arguments
@log = MyLogger.new()
@log.add_logger(Logger.new(STDOUT))
if @arguments[:logfile]
@log.add_logger(Logger.new(@arguments[:logfile]))
end
FileUtils.mkdir_p($CONF["download"]["destination_dir"])
FileUtils.mkdir_p($CONF["download"]["tmp_dir"])
@threads=[]
end
def load_sites
$CONF["sites"].each do |site|
filename = site+".rb" unless site.end_with?(".rb")
path = File.join("sites",filename)
begin
require_relative path
rescue LoadError=>e
@log.warn "Cannot load #{path}"
end
end
end
def load_conf(file)
unless File.exist?(file)
@log.err "Couldn't find config file #{file}."
exit 1
end
begin
$CONF = JSON.parse(File.read(file))
rescue Exception => e
@log.err "Problem opening config file #{file}#"
raise e
end
end
def start_fetcher_threads()
load_sites()
if Fetcher.sites.empty?
@log.err "Didn't find any site to parse for youtube URL."
@log.err "Add some in config.json, maybe?"
exit 1
end
@fetcher_threads = []
tick = 5 # Verify everything every tick
t = Thread.new{
while true
now = Time.now().to_i
# Retry when we've waited "wait" time + up to 10% of wait, to appear not too bot-y
Fetcher.sites.select{|site| now - site.last_check > (site.wait*(1 + (rand() / 10))) }.each do |site|
count = 0
begin
site.get_yids().each { |yid|
@log.info "#{site} found #{yid}"
DBUtils.add_yid(yid, site.name)
count += 1
}
@log.info "#{site} found #{count} videos. Will retry in #{site.wait} seconds" unless site.wait < 30
rescue SocketError => e
# Internet is down, let's wait for a bit
@log.err "Failed to fetch yids from #{site}. Internet or your proxy is down, let's retry later"
rescue Exception => e
# TODO don't break but send an email or something
@log.err "Failed to fetch yids from #{site}"
end
site.last_check = now
end
sleep tick
end
}
t.abort_on_exception = true
return t
end
def update_video_infos(infos)
yid = infos["yid"]
if infos["status"] == "ok"
DBUtils.update_video_infos_from_hash(yid, infos["infos"])
DBUtils.save_thumbs(yid, infos["thumbs"])
else
reason = YoutubeUtils.get_reason(yid)
DBUtils.update_video_infos_from_hash(yid, {downloaded: reason, deletion: Time.now()})
end
end
def start_informer_threads()
@informer = Thread.new {
Thread.current[:name]="Informer"
@log.info "Informer thread starts"
while true
count = 0
if $CONF["youtube_key"] and $CONF["youtube_key"].size > 5
DBUtils.get_all_yids_without_infos.each_slice(10).to_a.each do |yid_slice|
YoutubeUtils.get_batch_infos_with_key(yid_slice, $CONF["youtube_key"]).each do |infos|
yid = infos["yid"]
if infos["infos"][:duration] < $CONF["download"]["minimum_duration"]
@log.info("#{infos["infos"][:duration]} < #{$CONF["download"]["minimum_duration"]} setting downloaded to #{DBUtils::DLDONE}")
DBUtils.set_downloaded(yid)
infos["infos"][:bien] = false
end
if infos["infos"][:duration] > $CONF["download"]["maximum_duration"]
@log.info("#{infos["infos"][:duration]} > #{$CONF["download"]["maximum_duration"]} setting downloaded to #{DBUtils::DLDONE}")
DBUtils.set_downloaded(yid)
infos["infos"][:bien] = false
end
update_video_infos(infos)
count+=1
end
end
else
DBUtils.get_all_yids_without_infos.each do |yid|
update_video_infos(YoutubeUtils.get_infos_without_key(yid))
count+=1
sleep 5 # We don't want to hit the youtube.com website too much and be seen too bot-y
end
end
@log.info "Informer updated #{count} videos infos" unless count == 0
sleep 5
end
}
@informer.abort_on_exception = true
return @informer
end
def add_cover(fmp4,image_data)
if not fmp4 =~ /\.mp4$/
@log.warn "ERROR: file not MP4, not adding nice tags"
else
cover_art = TagLib::MP4::CoverArt.new(TagLib::MP4::CoverArt::JPEG, image_data)
item = TagLib::MP4::Item.from_cover_art_list([cover_art])
TagLib::MP4::File.open(fmp4) do |mp4|
mp4.tag.item_list_map.insert('covr', item)
mp4.save
end
end
end
def ytdlfail(yid, errmsg)
DBUtils.set_downloaded(yid, DBUtils::YTDLFAIL)
@log.warn "The current version of youtube-dl failed to download #{yid} with error #{errmsg}."
@log.warn "Please update your youtube-dl version."
@log.warn "You can also re-run the last youtube-dl command with all the verbose flags to debugi"
end
def do_error(error_message, yid, proxy_to_try, tried=false)
@log.debug "Handling error #{error_message}"
case error_message
when /#{yid}: YouTube said: (.*)$/i
yt_error = $1
case yt_error
when Regexp.union($YT_COUNTRY_BLOCKED_MSG)
if tried
DBUtils.set_downloaded(yid, "RETRY: "+JSON.generate(tried.merge(proxy_to_try)))
else
DBUtils.set_downloaded(yid, "RETRY: {}")
end
when /Playback on other websites has been disabled by the video owner./
err_msg = "Youtube said '#{yt_error}'"
DBUtils.set_downloaded(yid, "#{DBUtils::YTERROR} #{yt_error}")
@log.warn err_msg
# when /content too short/
# let's just retry later
# ytdlfail(yid, yt_error)
when /Please sign in to view this video./
_msg = ""
if $CONF["youtube_username"]
# WTF we are signed in
_msg="#{DBUtils::YTDLFAIL} #{yt_error}"
else
_msg="#{DBUtils::YTDLFAIL} need credentials"
end
@log.warn _msg
DBUtils.set_downloaded(yid, _msg)
when Regexp.union($YT_DELETED_VIDEO_REASONS)
# Unrecoverable error, videos sent to Youtube Limbo.
err_msg = "Youtube said '#{yt_error}'"
DBUtils.set_downloaded(yid, "#{DBUtils::YTERROR} #{yt_error}")
@log.warn err_msg
else
raise Exception.new("Problem with download of #{yid} : Unknown YouTube error '#{yt_error}'")
end
when /The uploader has not made this video available in your country/
if tried
DBUtils.set_downloaded(yid, "RETRY: "+JSON.generate(tried.merge(proxy_to_try)))
else
DBUtils.set_downloaded(yid, "RETRY: {}")
end
when /Signature extraction failed/
ytdlfail(yid, error_message)
return
when /would lead to an infinite loop/
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /Connection reset by peer/
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /content too short/i
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /This live stream recording is not available/
ytdlfail(yid, error_message)
return
else
DBUtils.set_downloaded(yid, "#{DBUtils::YTDLFAIL} #{error_message}")
raise Exception.new("Problem with download of #{yid} : Unknown youtube-dl error '#{error_message}'")
end
end
def do_download(yid)
video_file = nil
Dir.chdir($CONF["download"]["tmp_dir"])
available_proxies = $CONF["download"]["proxies"]
proxy_cmd = ""
tried = DBUtils.get_retried(yid)
if tried
@log.info "We need to retry with a proxy. Already tried: #{tried}"
available_proxies = available_proxies.dup.delete_if {|k,_| tried.key?(k)}
if available_proxies.empty?
@log.warn "No more proxy to try =("
# TODO mark the download accordingly
return
end
proxy_to_try = [available_proxies.to_a.sample()].to_h
proxy_cmd = "--proxy #{proxy_to_try.first[1]}"
end
command = "#{@youtube_dl_cmd} #{proxy_cmd} https://www.youtube.com/watch?v=#{yid} 2>&1"
@log.debug command
DBUtils.set_downloaded(yid, msg=DBUtils::DLING)
ytdl_msg = nil
IO.popen(command) do |io|
ytdl_msg = io.read.split("\n").join(" ")
end
case ytdl_msg
when /error: (.+)$/i
do_error(ytdl_msg, yid, proxy_to_try, tried)
return nil
when /WARNING: (.+)$/
warn = $1
if warn!=""
@log.warn warn unless warn=~/Your copy of avconv is outdated, update avconv to version 10-0 or newer if you encounter any errors/
if warn=~/unable to log in: .*password/i
warn = "Use a webbrowser to connect to the YT Account, which was probabbly flagged as bot/spam"
raise warn
end
end
when /has already been downloaded and merged/
# continue
when ""
# Continue
else
raise Exception.new("WTF #{ytdl_msg}")
end
@log.success "Downloading finished, now post processing"
output_files = Dir.glob("*#{yid}*",File::FNM_DOTMATCH)
if output_files.size > 2
pp output_files
raise "Too many output files in #{`pwd`}"
end
video_file = output_files.reject{ |f| f=~/\.jpg$/ }[0]
jpg_file = output_files.select{|f| f=~/\.jpg$/}[0]
if not jpg_file or not File.exist?(jpg_file)
if @video_converter_cmd
`#{@video_converter_cmd} -i \"#{video_file}\" -vframes 1 -f image2 \"#{jpg_file}\"`
end
end
if File.exist?(jpg_file)
add_cover(video_file, File.read(jpg_file))
File.delete(jpg_file)
end
FileUtils.mv(video_file, $CONF["download"]["destination_dir"])
video_file = File.join($CONF["download"]["destination_dir"], video_file)
@log.success "PostProcessing #{yid} over."
DBUtils.set_downloaded(yid)
file_size = File.stat(video_file).size.to_i
DBUtils.update_video_infos_from_hash(yid,{file: File.basename(video_file), size: file_size})
return nil
end
def start_downloader_threads()
@youtube_dl_cmd = $CONF["download"]["youtube_dl_cmd"] || `which youtube-dl`.strip()
if @youtube_dl_cmd == ""
@log.err "Please update \"youtube_dl_cmd\" in config.json to your local installation of youtube-dl, or remove that key altogether, to use the one in your PATH"
exit 1
else
begin
res = `#{@youtube_dl_cmd} --version | egrep "^20[0-9.]+$"`
raise unless res=~/^20[0-9.]+$/
rescue Exception => e
@log.err "'#{@youtube_dl_cmd}' is not a valid youtube-dl binary"
exit
end
end
#
# TODO move to "download"?
if not $CONF["youtube_username"]
@log.warn "You have not set a Youtube username in config.json."
@log.warn "You won't be able to download '18+' videos."
end
@video_converter_cmd = $CONF["download"]["video_converter_cmd"] || "avconv"
begin
res = `#{@video_converter_cmd} -version 2>&1 | egrep "Copyright"`
raise unless res=~/developers/
rescue Exception => e
@log.warn "'#{@video_converter_cmd}' is not a valid video conversion command (use ffmpeg or avconv)"
@video_converter_cmd = nil
end
if $CONF["download"]["youtube_dl_extra_args"]
@youtube_dl_cmd << " " << $CONF["download"]["youtube_dl_extra_args"]
end
if $CONF["youtube_username"]
@youtube_dl_cmd << " -u \"#{$CONF['youtube_username']}\""
@youtube_dl_cmd << " -p \"#{$CONF['youtube_password']}\""
end
if not ($CONF["youtube_key"] and $CONF["youtube_key"].size > 5)
@log.warn "You have not set a Youtube API key in config.json."
end
# TODO have more than 1 ?
@downloader = Thread.new {
while true
yid = DBUtils.pop_yid_to_download(minimum_duration: $CONF["download"]["minimum_duration"],
maximum_duration: $CONF["download"]["maximum_duration"])
if yid
cur_dir=Dir.pwd()
begin
do_download(yid)
nb_to_dl = DBUtils.get_nb_to_dl()
@log.info "Still #{nb_to_dl} videos to download"
rescue Exception => e
@log.err "Exception when downloading #{yid}"
raise e
end
Dir.chdir(cur_dir)
else
@log.info "nothing to download, sleeping"
sleep 60
end
end
sleep 1
}
@downloader.abort_on_exception = true
return @downloader
end
def start_local_downloaded_threads()
@local_downloader = Thread.new{
#Inotify stuff
while true
end
sleep 10
}
@local_downloader.abort_on_exception = true
return @local_downloader
end
def go()
DBUtils.clean_dl()
failed_dl_vids = DBUtils.get_ytdlfail().size
if failed_dl_vids > 0
@log.warn "You have #{failed_dl_vids} videos that youtube-dl couldn't download."
end
DBUtils.retry_old_failed_videos()
@threads << start_informer_threads()
@threads << start_fetcher_threads()
@threads << start_downloader_threads()
@threads.each {|t| t.join()}
end
end
def main(conf, options)
m = Main.new(conf, options)
m.go()
end
trap("INT"){
# TODO
# remove ytdl temps,
exit
}
options = {
download: true,
fetch: true,
inform: true,
}
OptionParser.new do |opts|
opts.banner = "Usage: #{__FILE__}"
opts.on("--[no-]download") {|v| options[:download] = v}
opts.on("--[no-]fetch") {|v| options[:fetch] = v}
opts.on("--[no-]inform") {|v| options[:inform] = v}
opts.on("--logfile logfile") {|v| options[:logfile] = v}
end
main(ARGV[0] || "config.json", options)
|
#!/usr/bin/env ruby
ENV['TZ'] = 'Asia/Tokyo'
Dir.chdir __dir__
require 'bundler/setup'
require 'pp'
require 'yaml'
require 'time'
require 'uri'
require 'open-uri'
require 'nokogiri'
require 'fluent-logger'
require 'aws-sdk'
require 'json'
require 'fileutils'
require 'socket'
require 'tempfile'
@logger = Fluent::Logger::FluentLogger.new("recorder", :host=>'127.0.0.1', :port=>24224)
def tweet(message)
@logger.post("aandg", message: message)
end
class Program
def self.acquire
self.new open('http://www.uniqueradio.jp/aandg', &:read)
end
def initialize(js)
m = js.match(/^\s*var Program_name = "(.+?)"/)
@name = URI.decode_www_form_component(m[1]) if m
m = js.match(/^\s*var Program_img = "(.+?)"/)
@img = m[1] if m
m = js.match(/^\s*var Program_link = "(.+?)"/)
@link = m[1] if m
m = js.match(/^\s*var Program_text = "(.+?)"/)
@text = URI.decode_www_form_component(m[1]) if m
m = js.match(/^\s*var Program_personality = "(.+?)"/)
@personality = URI.decode_www_form_component(m[1]) if m
m = js.match(/^\s*var Now_music = "(.+?)"/)
@music = URI.decode_www_form_component(m[1]) if m && !m[1].empty?
m = js.match(/^\s*var Now_artist = "(.+?)"/)
@artist = URI.decode_www_form_component(m[1]) if m && !m[1].empty?
end
attr_reader :name, :img, :link, :text, :personality, :music, :artist
def inspect
"#<Program #{@name} / #{@personality}>"
end
end
class FmsList
class Server < Struct.new(:cryptography, :protocol, :server, :app, :stream)
def encrypted?
!cryptography.empty?
end
def rtmp
"#{protocol}://#{server.sub(/\/.*$/,'/')}"
end
alias app_orig app
def app
"?rtmp://#{server.sub(/^.*\//,'')}/#{app_orig}/"
end
def playpath
stream
end
end
def self.acquire
self.new Nokogiri::XML(open('http://www.uniqueradio.jp/agplayerf/getfmsListHD.php'))
end
def initialize(xml)
@servers = xml.search('ag serverlist serverinfo').map do |serverinfo|
Server.new(*%w(cryptography protocol server app stream).map { |_|
serverinfo.at(_).text
})
end
end
def available_servers
@servers.reject { |server| server.encrypted? }
end
attr_reader :servers
end
if ARGV.size < 2
abort "usage: #{$0} name seconds [start]"
end
name, seconds, start = *ARGV
seconds = seconds.to_i
config_path = "#{__dir__}/config.yml"
if File.exists?(config_path)
config = YAML.load_file(config_path)
else
config = {}
end
RECORD_DIR = ENV['AGQR_RECORD_DIR'] || config['record_dir'] || "#{__dir__}/recorded"
S3_REGION = ENV['AGQR_S3_REGION'] || config['s3_region']
S3_BUCKET = ENV['AGQR_S3_BUCKET'] || config['s3_bucket']
S3_PREFIX = (ENV['AGQR_S3_PREFIX'] || config['s3_prefix'] || '').sub(/\/\z/,'')
S3_ACCESS_KEY_ID = config['aws_access_key_id']
S3_SECRET_ACCESS_KEY = config['aws_secret_access_key']
HTTP_BASE = ENV['AGQR_URL_BASE'] || config['http_base'] || "http://localhost"
MARGIN_BEFORE = (ENV['AGQR_MARGIN_BEFORE'] || config['margin_before'] || 12).to_i
MARGIN_AFTER = (ENV['AGQR_MARGIN_AFTER'] || config['margin_after'] || 20).to_i
ALLOW_EARLY_EXIT = (ENV['AGQR_EARLY_EXIT_ALLOWANCE'] || config['allow_early_exit'] || 10).to_i
HOSTNAME = (ENV['AGQR_HOSTNAME'] || Socket.gethostname)
TIMEOUT = (ENV['AGQR_TIMEOUT'] || config['timeout'] || 10).to_i
raise 'specify s3_bucket and s3_region' unless S3_BUCKET && S3_REGION
if start
if start.size == 4
h,m = start[0,2].to_i, start[2,2].to_i
now = Time.now
time = Time.new(now.year, now.month, now.day, h, m, 0)
time += 86400 if time < now
else
time = Time.at(start.to_i)
end
waittime = time - MARGIN_BEFORE
puts " * Sleep until #{waittime} "
$stdout.flush
sleep 1 until waittime <= Time.now
end
pubdate = time || Time.now
pubdate_str = pubdate.strftime('%Y-%m-%d_%H%M%S')
safe_name = name.
gsub("/", "/").
tr("[](){}", "[](){}").
gsub('"','').
gsub("'", '').
gsub(" ", " ").
gsub(" ", "_")
target_dir = File.join(RECORD_DIR, "#{pubdate_str}.#{$$}")
FileUtils.mkdir_p(target_dir) unless File.exists?(target_dir)
prog = nil
Thread.new { # acquire program information after few seconds
if seconds < 10
sleep 0
elsif seconds < 70
sleep 30
else
sleep 60
end
prog = Program.acquire
puts " * #{prog.name}"
puts " * #{prog.text.inspect}"
tweet "agqr.#{name}.watching: #{prog.name} (#{pubdate})"
}
servers = FmsList.acquire.available_servers
try = 0
if start
stop = MARGIN_BEFORE+seconds+MARGIN_AFTER
else
stop = seconds
end
flv_paths = []
2.times do
servers.each do |server|
3.times do |server_try|
break if stop < 1
flv_path = File.join(target_dir, "#{try}.flv")
flv_paths << flv_path
cmd = [
'rtmpdump',
# '--verbose',
'--live',
'-o', flv_path,
'--stop', stop.to_i,
'--timeout', TIMEOUT,
'--rtmp', server.rtmp,
'--app', server.app,
'--playpath', server.playpath,
].map(&:to_s)
record_start = Time.now
puts "==> #{cmd.join(' ')}"
tweet "agqr.#{name}.start: #{stop} seconds (try:#{try}, #{pubdate})"
status = nil
out = ""
IO.popen([*cmd, err: [:child, :out]], 'r') do |io|
th = Thread.new {
begin
buf = ""
until io.eof?
str = io.read(10)
buf << str; out << str
lines = buf.split(/\r|\n/)
if 1 < lines.size
buf = lines.pop
lines.each do |line|
puts line
end
end
end
rescue Exception => e
p e
puts e.backtrace
end
}
pid, status = Process.waitpid(io.pid)
th.kill if th && th.alive?
end
elapsed = (Time.now - record_start).to_i
if status && !status.success?
puts " * May be failed"
tweet "agqr.#{name}.fail: #{pubdate.rfc2822}"
elsif /^Download may be incomplete/ === out
puts " * Download may be incomplete"
tweet "agqr.#{name}.incomplete: #{pubdate.rfc2822}"
elsif elapsed < (seconds-ALLOW_EARLY_EXIT)
puts " * Exited earlier (#{elapsed} seconds elapsed, #{stop} seconds expected)"
tweet "agqr.#{name}.early-exit: #{pubdate.rfc2822}; #{elapsed}s elapsed, #{stop}s expected"
else
puts " * Done!"
if prog
tweet "agqr.#{name}.watched: #{prog.name} (#{pubdate.to_i})"
else
tweet "agqr.#{name}.watched: #{pubdate.rfc2822}"
end
break nil
end
try += 1
stop -= elapsed
sleep 2
end || break
end || break
end
mp3_paths = flv_paths.map do |flv_path|
mp3_path = flv_path.sub(/\.flv$/, '.mp3')
cmd = ["ffmpeg", "-i", flv_path, "-b:a", "64k", mp3_path]
puts "==> #{cmd.join(' ')}"
status = system(*cmd)
if status
puts " * Done!"
mp3_path
else
puts " * Failed ;("
nil
end
end.compact
puts "==> Concatenating MP3"
single_mp3_path = File.join(target_dir, 'all.mp3')
playlist = Tempfile.new("agqr-#{pubdate_str}-#{$$}-mp3.txt")
playlist.puts mp3_paths.map { |_| "file '#{_}'" }.join("\n")
playlist.flush
cmd = ["ffmpeg", "-f", "concat", "-i", playlist.path, "-c", "copy", single_mp3_path]
status = system(*cmd)
if status
puts " * Done!"
else
puts " * Failed ;("
nil
end
puts "==> Concatenating FLV"
single_mp4_path = File.join(target_dir, 'all.mp4')
playlist = Tempfile.new("agqr-#{pubdate_str}-#{$$}-mp4.txt")
playlist.puts flv_paths.map { |_| "file '#{_}'" }.join("\n")
playlist.flush
cmd = ["ffmpeg", "-f", "concat", "-i", playlist.path, "-vcodec", "libx264", "-acodec", "libfaac", "-b:a", "64k", single_mp4_path]
status = system(*cmd)
if status
puts " * Done!"
else
puts " * Failed ;("
nil
end
puts "==> Generating metadata"
meta_path = File.join(target_dir, 'meta.json')
meta = {
host: HOSTNAME,
try: try,
date: {
unix: pubdate.to_i,
str: pubdate_str,
pubdate: pubdate.rfc2822,
},
flv_paths: flv_paths.map { |_| File.basename(_) },
mp3_paths: mp3_paths.map { |_| File.basename(_) },
}
if File.exist?(single_mp3_path)
meta[:single_mp3_path] = File.basename(single_mp3_path)
end
if File.exist?(single_mp4_path)
meta[:single_mp4_path] = File.basename(single_mp4_path)
end
if prog
meta.merge!(
program: {
title: prog.name,
description: prog.text,
link: prog.link,
personality: prog.personality,
}
)
end
pp meta
File.write meta_path, "#{meta.to_json}\n"
puts "==> Uploading to S3"
if S3_BUCKET && S3_REGION
if S3_ACCESS_KEY_ID && S3_SECRET_ACCESS_KEY
s3 = Aws::S3::Client.new(region: S3_REGION, credentials: Aws::Credentials.new(S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY))
else
s3 = Aws::S3::Client.new(region: S3_REGION)
end
s3_key_base = "#{S3_PREFIX}/#{safe_name}/work/#{pubdate_str}/#{HOSTNAME}"
flv_paths.each do |_|
open(_, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(_)}"
puts " * #{_} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
p s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'video/x-flv',
)
end
end
mp3_paths.each do |_|
open(_, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(_)}"
puts " * #{_} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
p s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'audio/mpeg',
)
end
end
if File.exist?(single_mp3_path)
open(single_mp3_path, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(single_mp3_path)}"
puts " * #{single_mp3_path} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
p s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'audio/mpeg',
)
end
end
if File.exist?(single_mp4_path)
open(single_mp4_path, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(single_mp4_path)}"
puts " * #{single_mp4_path} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
p s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'video/mpeg',
)
end
end
open(meta_path, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(meta_path)}"
puts " * #{meta_path} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
p s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'application/json',
)
end
vote = rand(1000)
puts " * Vote #{vote}"
key = "#{s3_key_base}/vote.txt"
p s3.put_object(
bucket: S3_BUCKET,
key: key,
body: vote.to_s,
content_type: 'text/plain',
)
else
puts " * Skipping"
end
FileUtils.remove_entry_secure(target_dir)
if prog
tweet "agqr.#{name}.done: #{prog.name} (#{pubdate.to_i})"
else
tweet "agqr.#{name}.done: #{pubdate.rfc2822}"
end
logger
#!/usr/bin/env ruby
ENV['TZ'] = 'Asia/Tokyo'
Dir.chdir __dir__
require 'bundler/setup'
require 'pp'
require 'yaml'
require 'time'
require 'uri'
require 'open-uri'
require 'nokogiri'
require 'fluent-logger'
require 'aws-sdk'
require 'json'
require 'fileutils'
require 'socket'
require 'tempfile'
require 'logger'
$stdout.sync = true
@logger = Logger.new($stdout)
@logger.progname = 'rec'
def tweet(message)
end
class Program
def self.acquire
self.new open('http://www.uniqueradio.jp/aandg', &:read)
end
def initialize(js)
m = js.match(/^\s*var Program_name = "(.+?)"/)
@name = URI.decode_www_form_component(m[1]) if m
m = js.match(/^\s*var Program_img = "(.+?)"/)
@img = m[1] if m
m = js.match(/^\s*var Program_link = "(.+?)"/)
@link = m[1] if m
m = js.match(/^\s*var Program_text = "(.+?)"/)
@text = URI.decode_www_form_component(m[1]) if m
m = js.match(/^\s*var Program_personality = "(.+?)"/)
@personality = URI.decode_www_form_component(m[1]) if m
m = js.match(/^\s*var Now_music = "(.+?)"/)
@music = URI.decode_www_form_component(m[1]) if m && !m[1].empty?
m = js.match(/^\s*var Now_artist = "(.+?)"/)
@artist = URI.decode_www_form_component(m[1]) if m && !m[1].empty?
end
attr_reader :name, :img, :link, :text, :personality, :music, :artist
def inspect
"#<Program #{@name} / #{@personality}>"
end
end
class FmsList
class Server < Struct.new(:cryptography, :protocol, :server, :app, :stream)
def encrypted?
!cryptography.empty?
end
def rtmp
"#{protocol}://#{server.sub(/\/.*$/,'/')}"
end
alias app_orig app
def app
"?rtmp://#{server.sub(/^.*\//,'')}/#{app_orig}/"
end
def playpath
stream
end
end
def self.acquire
self.new Nokogiri::XML(open('http://www.uniqueradio.jp/agplayerf/getfmsListHD.php'))
end
def initialize(xml)
@servers = xml.search('ag serverlist serverinfo').map do |serverinfo|
Server.new(*%w(cryptography protocol server app stream).map { |_|
serverinfo.at(_).text
})
end
end
def available_servers
@servers.reject { |server| server.encrypted? }
end
attr_reader :servers
end
if ARGV.size < 2
abort "usage: #{$0} name seconds [start]"
end
name, seconds, start = *ARGV
seconds = seconds.to_i
config_path = "#{__dir__}/config.yml"
if File.exists?(config_path)
config = YAML.load_file(config_path)
else
config = {}
end
RECORD_DIR = ENV['AGQR_RECORD_DIR'] || config['record_dir'] || "#{__dir__}/recorded"
LOG_DIR = Pathname.new(ENV['AGQR_LOG_DIR'] || config['log'] || './log').tap(&:mkpath)
S3_REGION = ENV['AGQR_S3_REGION'] || config['s3_region']
S3_BUCKET = ENV['AGQR_S3_BUCKET'] || config['s3_bucket']
S3_PREFIX = (ENV['AGQR_S3_PREFIX'] || config['s3_prefix'] || '').sub(/\/\z/,'')
S3_ACCESS_KEY_ID = config['aws_access_key_id']
S3_SECRET_ACCESS_KEY = config['aws_secret_access_key']
HTTP_BASE = ENV['AGQR_URL_BASE'] || config['http_base'] || "http://localhost"
MARGIN_BEFORE = (ENV['AGQR_MARGIN_BEFORE'] || config['margin_before'] || 12).to_i
MARGIN_AFTER = (ENV['AGQR_MARGIN_AFTER'] || config['margin_after'] || 20).to_i
ALLOW_EARLY_EXIT = (ENV['AGQR_EARLY_EXIT_ALLOWANCE'] || config['allow_early_exit'] || 10).to_i
HOSTNAME = (ENV['AGQR_HOSTNAME'] || Socket.gethostname)
TIMEOUT = (ENV['AGQR_TIMEOUT'] || config['timeout'] || 10).to_i
raise 'specify s3_bucket and s3_region' unless S3_BUCKET && S3_REGION
if start
if start.size == 4
h,m = start[0,2].to_i, start[2,2].to_i
now = Time.now
time = Time.new(now.year, now.month, now.day, h, m, 0)
time += 86400 if time < now
else
time = Time.at(start.to_i)
end
waittime = time - MARGIN_BEFORE
@logger.info "Sleep until #{waittime} "
sleep 1 until waittime <= Time.now
end
pubdate = time || Time.now
pubdate_str = pubdate.strftime('%Y-%m-%d_%H%M%S')
safe_name = name.
gsub("/", "/").
tr("[](){}", "[](){}").
gsub('"','').
gsub("'", '').
gsub(" ", " ").
gsub(" ", "_")
target_dir = File.join(RECORD_DIR, "#{pubdate_str}.#{$$}")
FileUtils.mkdir_p(target_dir) unless File.exists?(target_dir)
prog = nil
Thread.new { # acquire program information after few seconds
if seconds < 10
sleep 0
elsif seconds < 70
sleep 30
else
sleep 60
end
prog = Program.acquire
@logger.info "=> program: #{prog.name}"
@logger.info " #{prog.text.inspect}"
tweet "agqr.#{name}.watching: #{prog.name} (#{pubdate})"
}
servers = FmsList.acquire.available_servers
try = 0
if start
stop = MARGIN_BEFORE+seconds+MARGIN_AFTER
else
stop = seconds
end
flv_paths = []
2.times do
servers.each do |server|
3.times do |server_try|
break if stop < 1
flv_path = File.join(target_dir, "#{try}.flv")
flv_paths << flv_path
cmd = [
'rtmpdump',
# '--verbose',
'--live',
'-o', flv_path,
'--stop', stop.to_i,
'--timeout', TIMEOUT,
'--rtmp', server.rtmp,
'--app', server.app,
'--playpath', server.playpath,
].map(&:to_s)
record_start = Time.now
@logger.info "==> #{cmd.join(' ')}"
tweet "agqr.#{name}.start: #{stop} seconds (try:#{try}, #{pubdate})"
status = nil
log_path = LOG_DIR.join("rtmpdump.#{$$}.#{try}.log")
open(log_path, 'w') do |log_io|
log_io.puts "=> #{safe_name}: #{cmd.inspect}"
log_io.flush
pid = spawn(*cmd, out: log_io, err: log_io)
pid, status = Process.waitpid(pid)
end
elapsed = (Time.now - record_start).to_i
if status && !status.success?
@logger.warn "May be failed"
tweet "agqr.#{name}.fail: #{pubdate.rfc2822}"
elsif /^Download may be incomplete/ === File.read(log_path)
@logger.warn "Download may be incomplete"
tweet "agqr.#{name}.incomplete: #{pubdate.rfc2822}"
elsif elapsed < (seconds-ALLOW_EARLY_EXIT)
@logger.warn "Exited earlier (#{elapsed} seconds elapsed, #{stop} seconds expected)"
tweet "agqr.#{name}.early-exit: #{pubdate.rfc2822}; #{elapsed}s elapsed, #{stop}s expected"
else
@logger.info "ok"
if prog
tweet "agqr.#{name}.watched: #{prog.name} (#{pubdate.to_i})"
else
tweet "agqr.#{name}.watched: #{pubdate.rfc2822}"
end
break nil
end
try += 1
stop -= elapsed
sleep 2
end || break
end || break
end
mp3_paths = nil
single_mp3_path =nil
single_mp4_path = nil
open(LOG_DIR.join("ffmpeg.#{$$}.log"), 'w') do |log_io|
mp3_paths = flv_paths.map do |flv_path|
mp3_path = flv_path.sub(/\.flv$/, '.mp3')
cmd = ["ffmpeg", "-i", flv_path, "-b:a", "64k", mp3_path]
@logger.info "==> #{cmd.join(' ')}"
log_io.puts "=> #{safe_name}: #{cmd.inspect}"
log_io.flush
status = system(*cmd, out: log_io, err: log_io)
if status
@logger.info "ok"
mp3_path
else
@logger.error "mp3 encoding Failed ;("
nil
end
end.compact
@logger.info "==> Concatenating MP3"
single_mp3_path = File.join(target_dir, 'all.mp3')
playlist = Tempfile.new("agqr-#{pubdate_str}-#{$$}-mp3.txt")
playlist.puts mp3_paths.map { |_| "file '#{_}'" }.join("\n")
playlist.flush
cmd = ["ffmpeg", "-f", "concat", "-i", playlist.path, "-c", "copy", single_mp3_path]
@logger.info "#{cmd.join(' ')}"
log_io.puts "=> #{safe_name}: #{cmd.inspect}"
log_io.flush
status = system(*cmd, out: log_io, err: log_io)
if status
@logger.info "ok"
else
@logger.error "mp3 concat Failed ;("
nil
end
@logger.info "==> Concatenating FLV"
single_mp4_path = File.join(target_dir, 'all.mp4')
playlist = Tempfile.new("agqr-#{pubdate_str}-#{$$}-mp4.txt")
playlist.puts flv_paths.map { |_| "file '#{_}'" }.join("\n")
playlist.flush
cmd = ["ffmpeg", "-f", "concat", "-i", playlist.path, "-vcodec", "libx264", "-acodec", "libfaac", "-b:a", "64k", single_mp4_path]
@logger.info "#{cmd.join(' ')}"
log_io.puts "=> #{safe_name}: #{cmd.inspect}"
log_io.flush
status = system(*cmd, out: log_io, err: log_io)
if status
@logger.info "ok"
else
@logger.error "FLV->MP4 Failed ;("
nil
end
end
@logger.info "==> Generating metadata"
meta_path = File.join(target_dir, 'meta.json')
meta = {
host: HOSTNAME,
try: try,
date: {
unix: pubdate.to_i,
str: pubdate_str,
pubdate: pubdate.rfc2822,
},
flv_paths: flv_paths.map { |_| File.basename(_) },
mp3_paths: mp3_paths.map { |_| File.basename(_) },
}
if File.exist?(single_mp3_path)
meta[:single_mp3_path] = File.basename(single_mp3_path)
end
if File.exist?(single_mp4_path)
meta[:single_mp4_path] = File.basename(single_mp4_path)
end
if prog
meta.merge!(
program: {
title: prog.name,
description: prog.text,
link: prog.link,
personality: prog.personality,
}
)
end
@logger.info meta.inspect
File.write meta_path, "#{meta.to_json}\n"
@logger.info "==> Uploading to S3"
if S3_BUCKET && S3_REGION
if S3_ACCESS_KEY_ID && S3_SECRET_ACCESS_KEY
s3 = Aws::S3::Client.new(region: S3_REGION, credentials: Aws::Credentials.new(S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY), logger: @logger)
else
s3 = Aws::S3::Client.new(region: S3_REGION, logger: @logger)
end
s3_key_base = "#{S3_PREFIX}/#{safe_name}/work/#{pubdate_str}/#{HOSTNAME}"
flv_paths.each do |_|
open(_, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(_)}"
@logger.info "#{_} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'video/x-flv',
)
end
end
mp3_paths.each do |_|
open(_, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(_)}"
@logger.info "#{_} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'audio/mpeg',
)
end
end
if File.exist?(single_mp3_path)
open(single_mp3_path, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(single_mp3_path)}"
@logger.info "#{single_mp3_path} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'audio/mpeg',
)
end
end
if File.exist?(single_mp4_path)
open(single_mp4_path, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(single_mp4_path)}"
@logger.info "#{single_mp4_path} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'video/mpeg',
)
end
end
open(meta_path, 'r') do |io|
key = "#{s3_key_base}/#{File.basename(meta_path)}"
@logger.info "#{meta_path} => s3://#{S3_BUCKET}/#{key} @ #{S3_REGION}"
s3.put_object(
bucket: S3_BUCKET,
key: key,
body: io,
content_type: 'application/json',
)
end
vote = rand(1000)
@logger.info " * Vote #{vote}"
key = "#{s3_key_base}/vote.txt"
s3.put_object(
bucket: S3_BUCKET,
key: key,
body: vote.to_s,
content_type: 'text/plain',
)
else
@logger.info "Skipping"
end
FileUtils.remove_entry_secure(target_dir)
if prog
tweet "agqr.#{name}.done: #{prog.name} (#{pubdate.to_i})"
else
tweet "agqr.#{name}.done: #{pubdate.rfc2822}"
end
@logger.info "Done!"
|
class Ship
attr_accessor :length
def initialize length
@length = length
end
occupied_spaces = []
def place(x, y, horizontal)
if horizontal == true
#generate a range for the x component
stop = x + length - 1
x_range = (x..stop).to_a
coordinates = x_range.map { |x| [x,y]}
elsif horizontal == false
#generate a range for the y component
stop = x + length - 1
y_range = (y..stop).to_a
coordinates = y_range.map { |y| [x,y]}
end
return coordinates
end
end
Add Ship.place and Ship.covers?
class Ship
attr_accessor :length
def initialize length
@length = length
end
def place(x, y, horizontal)
if horizontal == true
#generate a range for the x component
stop = x + length - 1
x_range = (x..stop).to_a
@coordinates = x_range.map { |x| [x,y]}
elsif horizontal == false
#generate a range for the y component
stop = x + length - 1
y_range = (y..stop).to_a
@coordinates = y_range.map { |y| [x,y]}
end
@coordinates
end
def covers?(x, y)
return @coordinates.include?([x, y])
end
end
|
sort method
ary = ["Anca", "Silviu", "Oana", "Corina", "mami", "tati", "matusa", "unchiu", "vara", "varu"]
ary.sort
yra = [55, 2, 444, 95, 112, 1, 347, 98, 34, 434, 121, 467, 13, 226]
yra.sort |
module Celluloid
module Sync
unless defined? @@updated
@@gem_path ||= File.expand_path("../../", __FILE__)
$:.push( @@gem_path)
puts "Synchronizing Celluloid Culture //"
@@update = %x[cd #{@@gem_path}/culture; git pull]
@@updated = !@@update.include?("up-to-date")
@@required ||= [
"#{@@gem_path}/culture/sync.rb",
"#{@@gem_path}/culture/gems/loader"
]
puts @@update
end
class << self
def update!
if @@updated
puts "Celluloid Culture was updated."
@@required.each { |rb| load(rb) }
puts "Reloaded Culture::Sync itself."
end
end
end
unless @@updated
require(@@required.last)
GEM = Celluloid::Gems::SELF unless defined? GEM
LIB_PATH = File.expand_path("../../lib/#{GEM.split("-").join("/")}", __FILE__)
if File.exist?(version="#{LIB_PATH}/version.rb")
require(version)
end
class << self
def gems(loader)
case loader.class
when Gem::Specification
Gems.gemspec(loader)
when Bundler::Dsl
Gems.bundler(loader)
end
end
end
end
end
end
Celluloid::Sync.update!
nix infinite loop
module Celluloid
module Sync
unless defined? @@updated
@@gem_path ||= File.expand_path("../../", __FILE__)
$:.push( @@gem_path)
puts "Synchronizing Celluloid Culture //"
@@update = `cd #{@@gem_path}/culture; git pull`
@@updated = !@@update.include?("up-to-date")
@@required ||= [
"#{@@gem_path}/culture/sync.rb",
"#{@@gem_path}/culture/gems/loader"
]
end
class << self
def updated?
@@updated
end
if @@updated
def update!
if @@updated
puts "Celluloid Culture was updated."
@@required.each { |rb| load(rb) }
puts "Reloaded Culture::Sync itself:\n#{@@update}"
end
end
end
else
require(@@required.last)
GEM = Celluloid::Gems::SELF unless defined? GEM
LIB_PATH = File.expand_path("../../lib/#{GEM.split("-").join("/")}", __FILE__)
if File.exist?(version="#{LIB_PATH}/version.rb")
require(version)
end
class << self
def gems(loader)
case loader.class
when Gem::Specification
Gems.gemspec(loader)
when Bundler::Dsl
Gems.bundler(loader)
end
end
end
end
end
end
end
Celluloid::Sync.update! if Celluloid::Sync.updated? |
module Celluloid
module Sync
GEM_PATH ||= File.expand_path("../../", __FILE__)
$LOAD_PATH.push(GEM_PATH)
# TODO: This will likely need to be done differently if INSIDE a cut gem.
puts "Synchronizing Celluloid Culture //"
`cd #{GEM_PATH}/culture; git pull`
require("#{GEM_PATH}/culture/gems/loader")
GEM = Celluloid::Gems::SELF unless defined? GEM
LIB_PATH = File.expand_path("../../lib", __FILE__)
LIB_GEMPATH = "#{LIB_PATH}/#{GEM.split('-').join('/')}"
$LOAD_PATH.push(LIB_PATH)
if File.exist?(version = "#{LIB_GEMPATH}/version.rb")
require(version)
end
end
end
specify source and branch in sync
module Celluloid
module Sync
GEM_PATH ||= File.expand_path("../../", __FILE__)
$LOAD_PATH.push(GEM_PATH)
# TODO: This will likely need to be done differently if INSIDE a cut gem.
puts "Synchronizing Celluloid Culture //"
`cd #{GEM_PATH}/culture; git pull origin master`
require("#{GEM_PATH}/culture/gems/loader")
GEM = Celluloid::Gems::SELF unless defined? GEM
LIB_PATH = File.expand_path("../../lib", __FILE__)
LIB_GEMPATH = "#{LIB_PATH}/#{GEM.split('-').join('/')}"
$LOAD_PATH.push(LIB_PATH)
if File.exist?(version = "#{LIB_GEMPATH}/version.rb")
require(version)
end
end
end
|
require "rubygems"
require "midilib"
require "ostruct"
require "json"
require "debugger"
class LyricSyllable < OpenStruct
def as_json
{
"start" => self.seq.pulses_to_seconds(self.start.to_f).round(3),
"duration" => self.seq.pulses_to_seconds(self.duration.to_f).round(3),
"text" => self.text
}
end
end
seq = MIDI::Sequence.new()
if ARGV[0].nil?
puts "usage #{$0} input.mid"
exit
end
# Read the contents of a MIDI file into the sequence.
File.open(ARGV[0], "rb") do | file |
seq.read(file)
end
lyrics_track = MIDI::Track.new(seq)
noteon_track = MIDI::Track.new(seq)
seq.tracks[1].each do | event |
if event.kind_of?(MIDI::MetaEvent) && event.meta_type == MIDI::META_LYRIC
text = event.data.collect{|x| x.chr(Encoding::UTF_8)}.join
if text.gsub(" ", "") != ""
lyrics_track.events << event
end
end
if event.kind_of?(MIDI::NoteOn)
noteon_track.events << event
end
end
durations = {}
noteon_track.each do |event|
durations[event.time_from_start] = event.off.time_from_start - event.time_from_start
end
lyrics_syllables = []
lyrics_track.each do |event|
lyrics_syllables << LyricSyllable.new(
:seq => seq,
:start => event.time_from_start,
:duration => durations[event.time_from_start],
:text => event.data.collect{|x| x.chr(Encoding::UTF_8)}.join,
)
end
puts lyrics_syllables.collect(&:as_json).to_json
Removes blank notes at start
require "rubygems"
require "midilib"
require "ostruct"
require "json"
require "debugger"
class LyricSyllable < OpenStruct
def as_json
{
"start" => self.seq.pulses_to_seconds(self.start.to_f).round(3),
"duration" => self.seq.pulses_to_seconds(self.duration.to_f).round(3),
"text" => self.text
}
end
end
seq = MIDI::Sequence.new()
if ARGV[0].nil?
puts "usage #{$0} input.mid"
exit
end
# Read the contents of a MIDI file into the sequence.
File.open(ARGV[0], "rb") do | file |
seq.read(file)
end
lyrics_track = MIDI::Track.new(seq)
noteon_track = MIDI::Track.new(seq)
seq.tracks[1].each do | event |
if event.kind_of?(MIDI::MetaEvent) && event.meta_type == MIDI::META_LYRIC
text = event.data.collect{|x| x.chr(Encoding::UTF_8)}.join
if text.gsub(" ", "") != ""
lyrics_track.events << event
end
end
if event.kind_of?(MIDI::NoteOn)
noteon_track.events << event
end
end
durations = {}
noteon_track.each do |event|
durations[event.time_from_start] = event.off.time_from_start - event.time_from_start
end
while lyrics_track.events.first.data.collect{|x| x.chr(Encoding::UTF_8)}.join.strip == ""
lyrics_track.events.shift
end
lyrics_syllables = []
lyrics_track.each do |event|
lyrics_syllables << LyricSyllable.new(
:seq => seq,
:start => event.time_from_start,
:duration => durations[event.time_from_start],
:text => event.data.collect{|x| x.chr(Encoding::UTF_8)}.join,
)
end
puts lyrics_syllables.collect(&:as_json).to_json
|
#!/usr/bin/env ruby
require 'solareventcalculator'
class NewTime
attr_accessor :hours, :minutes, :seconds, :fractional
def initialize(hours, minutes, seconds, fractional)
@hours, @minutes, @seconds, @fractional = hours, minutes, seconds, fractional
end
def self.current_time(latitude, longitude, tz)
time = DateTime.now
yesterday = SolarEventCalculator.new(time.to_date - 1, latitude, longitude)
today = SolarEventCalculator.new(time.to_date, latitude, longitude)
tomorrow = SolarEventCalculator.new(time.to_date + 1, latitude, longitude)
sunset_yesterday = yesterday.compute_official_sunset(tz)
sunrise_today = today.compute_official_sunrise(tz)
sunset_today = today.compute_official_sunset(tz)
sunrise_tomorrow = tomorrow.compute_official_sunrise(tz)
if time < sunrise_today
start, finish = sunset_yesterday, sunrise_today
start_hour = 18
elsif time < sunset_today
start, finish = sunrise_today, sunset_today
start_hour = 6
else
start, finish = sunset_today, sunrise_tomorrow
start_hour = 18
end
seconds = (start_hour + (time - start).to_f / (finish - start) * 12) * 60 * 60
fractional = seconds - seconds.floor
seconds = seconds.floor
minutes = seconds / 60
seconds -= minutes * 60
hours = minutes / 60
minutes -= hours * 60
hours -= 24 if hours >= 24
NewTime.new(hours, minutes, seconds, fractional)
end
def to_s
if hours > 12
"%i:%02i pm" % [hours - 12, minutes]
else
"%i:%02i am" % [hours, minutes]
end
end
end
latitude = -33.714955
longitude = 150.311407
tz = "Australia/Sydney"
puts NewTime.current_time(latitude, longitude, tz)
Extract method
#!/usr/bin/env ruby
require 'solareventcalculator'
class NewTime
attr_accessor :hours, :minutes, :seconds, :fractional
def initialize(hours, minutes, seconds, fractional)
@hours, @minutes, @seconds, @fractional = hours, minutes, seconds, fractional
end
def self.current_time(latitude, longitude, tz)
convert(DateTime.now, latitude, longitude, tz)
end
def self.convert(date_time, latitude, longitude, tz)
yesterday = SolarEventCalculator.new(date_time.to_date - 1, latitude, longitude)
today = SolarEventCalculator.new(date_time.to_date, latitude, longitude)
tomorrow = SolarEventCalculator.new(date_time.to_date + 1, latitude, longitude)
sunset_yesterday = yesterday.compute_official_sunset(tz)
sunrise_today = today.compute_official_sunrise(tz)
sunset_today = today.compute_official_sunset(tz)
sunrise_tomorrow = tomorrow.compute_official_sunrise(tz)
if date_time < sunrise_today
start, finish = sunset_yesterday, sunrise_today
start_hour = 18
elsif date_time < sunset_today
start, finish = sunrise_today, sunset_today
start_hour = 6
else
start, finish = sunset_today, sunrise_tomorrow
start_hour = 18
end
seconds = (start_hour + (date_time - start).to_f / (finish - start) * 12) * 60 * 60
fractional = seconds - seconds.floor
seconds = seconds.floor
minutes = seconds / 60
seconds -= minutes * 60
hours = minutes / 60
minutes -= hours * 60
hours -= 24 if hours >= 24
NewTime.new(hours, minutes, seconds, fractional)
end
def to_s
if hours > 12
"%i:%02i pm" % [hours - 12, minutes]
else
"%i:%02i am" % [hours, minutes]
end
end
end
latitude = -33.714955
longitude = 150.311407
tz = "Australia/Sydney"
puts NewTime.current_time(latitude, longitude, tz)
|
#!/usr/bin/env ruby
require 'rake'
require 'date'
working_dir = "/home/scanner/brisket/"
remote_ports = "22,23,3389,5900"
app_ports = "21,69,53,389"
ms_ports = "135,139,445"
mail_ports = "25,110,995,993,465"
web_ports = "80,443,8080"
db_ports = "1433,1521,3306,5432"
# IRC, tor, tcp syslog, DNP3 (SCADA networks)
special_ports = "6667,9050,1514,20000"
rate = "2337" #restriction by the service provider is 4000/second
rate_cmd = "--rate " + rate
cmd = "/usr/local/sbin/masscan"
exclude_file = working_dir+"/masscan/data/exclude.conf"
results_dir = working_dir+"/results/"
data_dir = working_dir+"/data/"
include_file_cmd = " --includefile " + data_dir
conf_dir = working_dir+"/conf/"
dir_date = Date.today.year.to_s+"/"+Date.today.month.to_s+"/"+Date.today.day.to_s+"/"
results_dir_date = results_dir + dir_date
results_out = "-oX " + results_dir_date
opt_sel = ['remote', 'apps', 'web', 'db','special', 'ms', 'mail', 'all']
opt_sel_err = "[-] Usage: ./trim.rb <remote|apps|web|db|all>"
timenow = Time.new
conf_txt = "[+] Configuration files successfully generated for "+ARGV[0]+" ports at "+timenow.inspect +"."
commands = []
ARGV.each {|arg| commands << arg}
## Create the latest conf files
if ARGV[0] == opt_sel[0]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + remote_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[1]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + app_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[2]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + web_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[3]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + db_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[4]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + special_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[5]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + ms_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[6]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + mail_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[7]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + remote_ports + "," + app_ports + "," + web_ports + "," + db_ports + "," + special_ports + "," + ms_ports + "," + mail_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
else puts opt_sel_err
end
stupid
#!/usr/bin/env ruby
require 'rake'
require 'date'
working_dir = "/home/scanner/brisket/"
remote_ports = "22,23,3389,5900"
app_ports = "21,69,53,389"
ms_ports = "135,139,445"
mail_ports = "25,110,995,993,465"
web_ports = "80,443,8080"
db_ports = "1433,1521,3306,5432"
# IRC, tor, tcp syslog, DNP3 (SCADA networks)
special_ports = "6667,9050,1514,20000"
rate = "2337" #restriction by the service provider is 4000/second
rate_cmd = "--rate " + rate
cmd = "/usr/local/sbin/masscan"
exclude_file = working_dir+"/masscan/data/exclude.conf"
results_dir = working_dir+"/results/"
data_dir = working_dir+"/data/"
include_file_cmd = " --includefile " + data_dir
conf_dir = working_dir+"/conf/"
dir_date = Date.today.year.to_s+"/"+Date.today.month.to_s+"/"+Date.today.day.to_s+"/"
results_dir_date = results_dir + dir_date
results_out = "-oX " + results_dir_date
opt_sel = ['remote', 'apps', 'web', 'db','special', 'ms', 'mail', 'all']
opt_sel_err = "[-] Usage: ./trim.rb <remote|apps|web|db|all>"
timenow = Time.new
conf_txt = "[+] Configuration files successfully generated for " +ARGV[0]+ " ports at " +timenow.inspect + "."
commands = []
ARGV.each {|arg| commands << arg}
## Create the latest conf files
if ARGV[0] == opt_sel[0]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + remote_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[1]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + app_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[2]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + web_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[3]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + db_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[4]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + special_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[5]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + ms_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[6]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + mail_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
elsif ARGV[0] == opt_sel[7]
Dir.foreach(data_dir) do |item|
next if item == '.' or item == '..'
item_dir = conf_dir + item.gsub(/(.ip)/, '.conf')
item_xml = item.gsub(/(.ip)/, '.xml')
system(cmd + " -p" + remote_ports + "," + app_ports + "," + web_ports + "," + db_ports + "," + special_ports + "," + ms_ports + "," + mail_ports + include_file_cmd + item + " " + rate_cmd + " " + results_out + item_xml + " --echo > " + item_dir)
end
puts conf_txt
else puts opt_sel_err
end |
class User
include ActiveModel::Model
attr_accessor :name
validates :name, presence: true
#この辺にいつも通りmethodもかける。
Delete user.rb
|
class Visp < Formula
desc "Visual Servoing Platform library"
homepage "https://visp.inria.fr"
url "https://gforge.inria.fr/frs/download.php/latestfile/475/visp-3.0.1.tar.gz"
sha256 "8aefd21f30dd4f6d210c59c28704f9e3adf874e3337571a3ae65a65946c94326"
revision 3
bottle do
sha256 "318323c72d5828819c0c292bed90e5143ed19a5258ab7a29756e47a857011a39" => :sierra
sha256 "f522ea60bad78c29b911bc883151d3709e19ceaf40fb883ee3408a9438bc5530" => :el_capitan
sha256 "32e6b87d2d53231e9864810556d7128c7ff7a97a70ad5cb2fca13e9c37befe90" => :yosemite
end
option :cxx11
depends_on "cmake" => :build
depends_on "gsl" => :recommended
depends_on "jpeg" => :recommended
depends_on "libdc1394" => :recommended
depends_on "libpng" => :recommended
depends_on "libxml2" => :recommended
depends_on "opencv3" => :recommended
depends_on "zbar" => :recommended
depends_on :x11 => :recommended
def arg_switch(opt)
build.with?(opt) ? "ON" : "OFF"
end
def install
ENV.cxx11 if build.cxx11?
args = std_cmake_args + %w[
-DCMAKE_OSX_DEPLOYMENT_TARGET=
-DBUILD_DEMOS=OFF
-DBUILD_EXAMPLES=OFF
-DBUILD_TESTS=OFF
-DBUILD_TUTORIALS=OFF
]
args << "-DUSE_CPP11=ON" if build.cxx11?
args << "-DUSE_DC1394=" + arg_switch("libdc1394")
args << "-DUSE_GSL=" + arg_switch("gsl")
args << "-DUSE_JPEG=" + arg_switch("jpeg")
args << "-DUSE_OPENCV=" + arg_switch("opencv3")
args << "-DUSE_PNG=" + arg_switch("libpng")
args << "-DUSE_X11=" + arg_switch("x11")
args << "-DUSE_XML2=" + arg_switch("libxml2")
args << "-DUSE_ZBAR=" + arg_switch("zbar")
mkdir "macbuild" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
end
test do
(testpath/"test.cpp").write <<-EOS.undent
#include <visp3/core/vpConfig.h>
#include <iostream>
int main()
{
std::cout << VISP_VERSION_MAJOR << "." << VISP_VERSION_MINOR <<
"." << VISP_VERSION_PATCH << std::endl;
return 0;
}
EOS
system ENV.cxx, "test.cpp", "-I#{include}", "-L#{lib}", "-o", "test"
assert_equal `./test`.strip, version.to_s
end
end
visp: update 3.0.1_3 bottle for Linuxbrew.
class Visp < Formula
desc "Visual Servoing Platform library"
homepage "https://visp.inria.fr"
url "https://gforge.inria.fr/frs/download.php/latestfile/475/visp-3.0.1.tar.gz"
sha256 "8aefd21f30dd4f6d210c59c28704f9e3adf874e3337571a3ae65a65946c94326"
revision 3
bottle do
sha256 "318323c72d5828819c0c292bed90e5143ed19a5258ab7a29756e47a857011a39" => :sierra
sha256 "f522ea60bad78c29b911bc883151d3709e19ceaf40fb883ee3408a9438bc5530" => :el_capitan
sha256 "32e6b87d2d53231e9864810556d7128c7ff7a97a70ad5cb2fca13e9c37befe90" => :yosemite
sha256 "f43f93b6192a21eec9c0212ef7e6df3a4ad3a76758d5e2c892e1f74157455f3e" => :x86_64_linux
end
option :cxx11
depends_on "cmake" => :build
depends_on "gsl" => :recommended
depends_on "jpeg" => :recommended
depends_on "libdc1394" => :recommended
depends_on "libpng" => :recommended
depends_on "libxml2" => :recommended
depends_on "opencv3" => :recommended
depends_on "zbar" => :recommended
depends_on :x11 => :recommended
def arg_switch(opt)
build.with?(opt) ? "ON" : "OFF"
end
def install
ENV.cxx11 if build.cxx11?
args = std_cmake_args + %w[
-DCMAKE_OSX_DEPLOYMENT_TARGET=
-DBUILD_DEMOS=OFF
-DBUILD_EXAMPLES=OFF
-DBUILD_TESTS=OFF
-DBUILD_TUTORIALS=OFF
]
args << "-DUSE_CPP11=ON" if build.cxx11?
args << "-DUSE_DC1394=" + arg_switch("libdc1394")
args << "-DUSE_GSL=" + arg_switch("gsl")
args << "-DUSE_JPEG=" + arg_switch("jpeg")
args << "-DUSE_OPENCV=" + arg_switch("opencv3")
args << "-DUSE_PNG=" + arg_switch("libpng")
args << "-DUSE_X11=" + arg_switch("x11")
args << "-DUSE_XML2=" + arg_switch("libxml2")
args << "-DUSE_ZBAR=" + arg_switch("zbar")
mkdir "macbuild" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
end
test do
(testpath/"test.cpp").write <<-EOS.undent
#include <visp3/core/vpConfig.h>
#include <iostream>
int main()
{
std::cout << VISP_VERSION_MAJOR << "." << VISP_VERSION_MINOR <<
"." << VISP_VERSION_PATCH << std::endl;
return 0;
}
EOS
system ENV.cxx, "test.cpp", "-I#{include}", "-L#{lib}", "-o", "test"
assert_equal `./test`.strip, version.to_s
end
end
|
class Xrmc < Formula
homepage "https://github.com/golosio/xrmc"
desc "Monte Carlo simulation of X-ray imaging and spectroscopy experiments"
url "http://lvserver.ugent.be/xrmc/files/xrmc-6.5.0.tar.gz"
mirror "https://xrmc.s3.amazonaws.com/xrmc-6.5.0.tar.gz"
sha256 "4995eaaf3b4583d443d0cf2003d73d1855b443938e431a4f758a607f540e026a"
revision 1
bottle do
sha256 "c65c774606b4f4828b9ecaa9da78fb294c943ff95496288b9f75640cb2b10f53" => :yosemite
sha256 "a90b22ee5bb19e9c2aff0e342fae61f66323608334b932b8be23023e20201d40" => :mavericks
sha256 "cc9fd9634165a26fcadfc8a7ec9632fea2122c5458db368f6bc111fe4e6ccaea" => :mountain_lion
end
depends_on "xraylib"
depends_on "pkg-config" => :build
needs :openmp
depends_on "xmi-msim" => :optional
option "with-check", "Run build-time tests (may take a long time)"
fails_with :llvm do
cause <<-EOS.undent
llvm-gcc's OpenMP does not support the collapse statement,
required to build xrmc
EOS
end
def install
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--enable-openmp
]
args << ((build.with? "xmi-msim") ? "--enable-xmi-msim" : "--disable-xmi-msim")
system "./configure", *args
system "make"
system "make", "check" if build.with? "check"
system "make", "install"
end
test do
cp Dir.glob("#{share}/examples/xrmc/cylind_cell/*"), "."
system "#{bin}/xrmc", "input.dat"
end
end
xrmc: rebuild against gcc6
Closes #4231.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Xrmc < Formula
desc "Monte Carlo simulation of X-ray imaging and spectroscopy experiments"
homepage "https://github.com/golosio/xrmc"
url "http://lvserver.ugent.be/xrmc/files/xrmc-6.5.0.tar.gz"
mirror "https://xrmc.s3.amazonaws.com/xrmc-6.5.0.tar.gz"
sha256 "4995eaaf3b4583d443d0cf2003d73d1855b443938e431a4f758a607f540e026a"
revision 2
bottle do
sha256 "c65c774606b4f4828b9ecaa9da78fb294c943ff95496288b9f75640cb2b10f53" => :yosemite
sha256 "a90b22ee5bb19e9c2aff0e342fae61f66323608334b932b8be23023e20201d40" => :mavericks
sha256 "cc9fd9634165a26fcadfc8a7ec9632fea2122c5458db368f6bc111fe4e6ccaea" => :mountain_lion
end
option "without-test", "Don't run build-time tests (may take a long time)"
needs :openmp
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "xmi-msim" => :optional
if build.with? "xmi-msim"
depends_on "xraylib" => "with-fortran"
else
depends_on "xraylib"
end
fails_with :llvm do
cause <<-EOS.undent
llvm-gcc's OpenMP does not support the collapse statement,
required to build xrmc
EOS
end
def install
inreplace Dir.glob("{examples,test}/*/Makefile.am"),
"$(datadir)/examples/xrmc/", "$(datadir)/examples/"
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--enable-openmp
--docdir=#{doc}
--datarootdir=#{pkgshare}
]
if build.with? "xmi-msim"
args << "--enable-xmi-msim"
else
args << "--disable-xmi-msim"
end
system "autoreconf", "-fiv"
system "./configure", *args
system "make"
system "make", "check" if build.with? "test"
system "make", "install"
end
test do
cp_r (pkgshare/"examples/cylind_cell").children, testpath
system bin/"xrmc", "input.dat"
end
end
|
# -*- encoding: utf-8 -*-
require 'rubygems' unless Object.const_defined?(:Gem)
require File.dirname(__FILE__) + "/lib/lightning/version"
Gem::Specification.new do |s|
s.name = "lightning"
s.version = Lightning::VERSION
s.authors = ["Gabriel Horner"]
s.email = "gabriel.horner@gmail.com"
s.homepage = "http://tagaholic.me/lightning/"
s.summary = "Lightning is a commandline framework that generates shell functions which wrap around commands to autocomplete and translate full paths by their basenames."
s.description = "Lightning is a commandline framework that lets users wrap commands with shell functions that are able to refer to any filesystem path by its basename. To achieve this, a group of paths to be translated are defined with shell globs. These shell globs, known as a lightning _bolt_, are then applied to commands to produce functions. In addition to translating basenames to full paths, lightning _functions_ can autocomplete these basenames, resolve conflicts if they have the same name, leave any non-basename arguments untouched, and autocomplete directories above and below a basename. To make bolts shareable between users and functions easier to create, lightning has _generators_. A _generator_ generates filesystem-specific globs for a bolt. Lightning comes with some default generators. Users can make their own generators with generator plugins placed under ~/.lightning/generators/."
s.required_rubygems_version = ">= 1.3.6"
s.rubyforge_project = 'tagaholic'
s.executables = %w{lightning lightning-complete lightning-translate}
s.has_rdoc = 'yard'
s.rdoc_options = ['--title', "Lightning #{Lightning::VERSION} Documentation"]
s.add_development_dependency 'bacon', '>= 1.1.0'
s.add_development_dependency 'rr', '>= 1.0'
s.add_development_dependency 'bacon-bits'
s.add_development_dependency 'bacon-rr'
s.files = Dir.glob(%w[{lib,test}/**/*.rb bin/* [A-Z]*.{txt,rdoc} ext/**/*.{rb,c} **/deps.rip]) + %w{Rakefile .gemspec}
s.files += Dir.glob(['test/*.yml', 'man/*'])
s.extra_rdoc_files = ["README.rdoc", "LICENSE.txt"]
s.license = 'MIT'
end
oh rubyforge
# -*- encoding: utf-8 -*-
require 'rubygems' unless Object.const_defined?(:Gem)
require File.dirname(__FILE__) + "/lib/lightning/version"
Gem::Specification.new do |s|
s.name = "lightning"
s.version = Lightning::VERSION
s.authors = ["Gabriel Horner"]
s.email = "gabriel.horner@gmail.com"
s.homepage = "http://tagaholic.me/lightning/"
s.summary = "Lightning is a commandline framework that generates shell functions which wrap around commands to autocomplete and translate full paths by their basenames."
s.description = "Lightning is a commandline framework that lets users wrap commands with shell functions that are able to refer to any filesystem path by its basename. To achieve this, a group of paths to be translated are defined with shell globs. These shell globs, known as a lightning _bolt_, are then applied to commands to produce functions. In addition to translating basenames to full paths, lightning _functions_ can autocomplete these basenames, resolve conflicts if they have the same name, leave any non-basename arguments untouched, and autocomplete directories above and below a basename. To make bolts shareable between users and functions easier to create, lightning has _generators_. A _generator_ generates filesystem-specific globs for a bolt. Lightning comes with some default generators. Users can make their own generators with generator plugins placed under ~/.lightning/generators/."
s.required_rubygems_version = ">= 1.3.6"
s.executables = %w{lightning lightning-complete lightning-translate}
s.has_rdoc = 'yard'
s.rdoc_options = ['--title', "Lightning #{Lightning::VERSION} Documentation"]
s.add_development_dependency 'bacon', '>= 1.1.0'
s.add_development_dependency 'rr', '>= 1.0'
s.add_development_dependency 'bacon-bits'
s.add_development_dependency 'bacon-rr'
s.files = Dir.glob(%w[{lib,test}/**/*.rb bin/* [A-Z]*.{txt,rdoc} ext/**/*.{rb,c} **/deps.rip]) + %w{Rakefile .gemspec}
s.files += Dir.glob(['test/*.yml', 'man/*'])
s.extra_rdoc_files = ["README.rdoc", "LICENSE.txt"]
s.license = 'MIT'
end
|
Add gemspec file.
# encoding: utf-8
require 'yaml'
module DotRuby
#
class GemSpec
# For which revision of .ruby is this gemspec intended?
REVISION = 0 unless defined?(REVISION)
#
PATTERNS = {
:bin_files => 'bin/*',
:lib_files => 'lib/{**/}*.rb',
:ext_files => 'ext/{**/}extconf.rb',
:doc_files => '*.{txt,rdoc,md,markdown,tt,textile}',
:test_files => '{test/{**/}*_test.rb,spec/{**/}*_spec.rb}'
} unless defined?(PATTERNS)
#
def self.instance
new.to_gemspec
end
attr :metadata
attr :manifest
#
def initialize
@metadata = YAML.load_file('.ruby')
@manifest = Dir.glob('manifest{,.txt}', File::FNM_CASEFOLD).first
if @metadata['revision'].to_i != REVISION
warn "You have the wrong revision. Trying anyway..."
end
end
#
def scm
@scm ||= \
case
when File.directory?('.git')
:git
end
end
#
def files
@files ||= \
#glob_files[patterns[:files]]
case
when manifest
File.readlines(manifest).
map{ |line| line.strip }.
reject{ |line| line.empty? || line[0,1] == '#' }
when scm == :git
`git ls-files -z`.split("\0")
else
Dir.glob('{**/}{.*,*}') # TODO: be more specific using standard locations ?
end.select{ |path| File.file?(path) }
end
#
def glob_files(pattern)
Dir.glob(pattern).select { |path|
File.file?(path) && files.include?(path)
}
end
#
def patterns
PATTERNS
end
#
def executables
@executables ||= \
glob_files(patterns[:bin_files]).map do |path|
File.basename(path)
end
end
def extensions
@extensions ||= \
glob_files(patterns[:ext_files]).map do |path|
File.basename(path)
end
end
#
def name
metadata['name'] || metadata['title'].downcase.gsub(/\W+/,'_')
end
#
def to_gemspec
Gem::Specification.new do |gemspec|
gemspec.name = name
gemspec.version = metadata['version']
gemspec.summary = metadata['summary']
gemspec.description = metadata['description']
metadata['authors'].each do |author|
gemspec.authors << author['name']
if author.has_key?('email')
if gemspec.email
gemspec.email << author['email']
else
gemspec.email = [author['email']]
end
end
end
gemspec.licenses = metadata['copyrights'].map{ |c| c['license'] }.compact
metadata['requirements'].each do |req|
name = req['name']
version = req['version']
groups = req['groups'] || []
case version
when /^(.*?)\+$/
version = ">= #{$1}"
when /^(.*?)\-$/
version = "< #{$1}"
when /^(.*?)\~$/
version = "~> #{$1}"
end
if groups.empty? or groups.include?('runtime')
# populate runtime dependencies
if gemspec.respond_to?(:add_runtime_dependency)
gemspec.add_runtime_dependency(name,*version)
else
gemspec.add_dependency(name,*version)
end
else
# populate development dependencies
if gemspec.respond_to?(:add_development_dependency)
gemspec.add_development_dependency(name,*version)
else
gemspec.add_dependency(name,*version)
end
end
end
# convert external dependencies into a requirements
if metadata['external_dependencies']
##gemspec.requirements = [] unless metadata['external_dependencies'].empty?
metadata['external_dependencies'].each do |req|
gemspec.requirements << req.to_s
end
end
# determine homepage from resources
homepage = metadata['resources'].find{ |key, url| key =~ /^home/ }
gemspec.homepage = homepage.last if homepage
gemspec.require_paths = metadata['load_path'] || ['lib']
gemspec.post_install_message = metadata['install_message']
# RubyGems specific metadata
gemspec.files = files
gemspec.extensions = extensions
gemspec.executables = executables
if Gem::VERSION < '1.7.'
gemspec.default_executable = gemspec.executables.first
end
gemspec.test_files = glob_files(patterns[:test_files])
unless gemspec.files.include?('.document')
gemspec.extra_rdoc_files = glob_files(patterns[:doc_files])
end
end
end
end #class GemSpec
end
DotRuby::GemSpec.instance
|
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
GEMSPEC = Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = 'rdf'
gem.homepage = 'http://rdf.rubyforge.org/'
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = 'A Ruby library for working with Resource Description Framework (RDF) data.'
gem.description = 'RDF.rb is a pure-Ruby library for working with Resource Description Framework (RDF) data.'
gem.rubyforge_project = 'rdf'
gem.authors = ['Arto Bendiken', 'Ben Lavender']
gem.email = 'arto.bendiken@gmail.com'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION bin/rdf etc/doap.nt) + Dir.glob('lib/**/*.rb')
gem.bindir = %q(bin)
gem.executables = %w(rdf)
gem.default_executable = gem.executables.first
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.8.2'
gem.requirements = []
gem.add_development_dependency 'rdf-spec', '>= 0.1.4'
gem.add_development_dependency 'rspec', '>= 1.3.0'
gem.add_development_dependency 'yard' , '>= 0.5.3'
gem.add_runtime_dependency 'addressable', '>= 2.1.1'
gem.post_install_message = nil
end
Bumped the RDF::Spec dependency.
#!/usr/bin/env ruby -rubygems
# -*- encoding: utf-8 -*-
GEMSPEC = Gem::Specification.new do |gem|
gem.version = File.read('VERSION').chomp
gem.date = File.mtime('VERSION').strftime('%Y-%m-%d')
gem.name = 'rdf'
gem.homepage = 'http://rdf.rubyforge.org/'
gem.license = 'Public Domain' if gem.respond_to?(:license=)
gem.summary = 'A Ruby library for working with Resource Description Framework (RDF) data.'
gem.description = 'RDF.rb is a pure-Ruby library for working with Resource Description Framework (RDF) data.'
gem.rubyforge_project = 'rdf'
gem.authors = ['Arto Bendiken', 'Ben Lavender']
gem.email = 'arto.bendiken@gmail.com'
gem.platform = Gem::Platform::RUBY
gem.files = %w(AUTHORS README UNLICENSE VERSION bin/rdf etc/doap.nt) + Dir.glob('lib/**/*.rb')
gem.bindir = %q(bin)
gem.executables = %w(rdf)
gem.default_executable = gem.executables.first
gem.require_paths = %w(lib)
gem.extensions = %w()
gem.test_files = %w()
gem.has_rdoc = false
gem.required_ruby_version = '>= 1.8.2'
gem.requirements = []
gem.add_development_dependency 'rdf-spec', '>= 0.1.6'
gem.add_development_dependency 'rspec', '>= 1.3.0'
gem.add_development_dependency 'yard' , '>= 0.5.3'
gem.add_runtime_dependency 'addressable', '>= 2.1.1'
gem.post_install_message = nil
end
|
#!/usr/bin/ruby
# encoding: utf-8
require "optparse"
require "pp"
begin
require_relative "lib/db.rb"
rescue Exception => e
$stderr.puts "Please fix your database access / creds in config.json"
exit 1
end
require_relative "lib/fetcher.rb"
require_relative "lib/logger.rb"
require_relative "lib/utils.rb"
$YT_DELETED_VIDEO_REASONS = [
"This video does not exist.",
"This video is unavailable.",
"This video is not available.",
"The YouTube account associated with this video has been terminated due to multiple third-party notifications of copyright infringement",
"This video has been removed by the user",
"This video has been removed for violating YouTube's Terms of Service.",
"due to a copyright claim by a third party",
"This video is no longer available because the YouTube account associated with this video has been terminated.",
"This video is private",
"This video is no longer available because the uploader has closed their YouTube account",
"This video has been removed for violating YouTube's policy on nudity or sexual content.",
"This video has been removed for violating YouTube's policy on violent or graphic content.",
"This video has been removed for violating YouTube's policy on harassment and bullying."
]
$YT_COUNTRY_BLOCKED_MSG = [
/blocked it in your country/,
/not available on this country domain/,
/This video is not available in your country/,
/This video contains content from .* who has blocked it on copyright grounds/,
]
class YTDLException < Exception
end
class Main
require "fileutils"
require "json"
$: << File.join(File.dirname(__FILE__),"lib/taglib")
require "taglib"
def initialize(options)
@log = MyLogger.new()
stdout_logger = Logger.new(STDOUT)
stdout_logger.level = options[:debug] ? Logger::DEBUG : Logger::INFO
@log.add_logger(stdout_logger)
@arguments = options
load_conf(@arguments[:config])
logfile = @arguments[:logfile] || $CONF["logfile"]
if logfile
logger = Logger.new(logfile)
logger.level = Logger::DEBUG
@log.add_logger(logger)
end
@threads=[]
end
def load_sites
$CONF["sites"].each do |site|
filename = site+".rb" unless site.end_with?(".rb")
path = File.join("sites",filename)
begin
require_relative path
rescue LoadError=>e
@log.warn "Cannot load #{path}"
end
end
end
def load_conf(file)
unless File.exist?(file)
@log.err "Couldn't find config file #{file}."
exit 1
end
begin
$CONF = JSON.parse(File.read(file))
rescue Exception => e
@log.err "Problem opening config file #{file}#"
raise e
end
end
def start_fetcher_threads()
load_sites()
if Fetcher.sites.empty?
@log.err "Didn't find any site to parse for youtube URL."
@log.err "Add some in config.json, maybe?"
exit 1
end
@fetcher_threads = []
tick = 5 # Verify everything every tick
t = Thread.new{
@log.info "Starting fetcher thread"
while true
now = Time.now().to_i
# Retry when we've waited "wait" time + up to 10% of wait, to appear not too bot-y
Fetcher.sites.select{|site| now - site.last_check > (site.wait*(1 + (rand() / 10))) }.each do |site|
count = 0
begin
site.get_yids().each { |yid|
#@log.info "#{site} found #{yid}"
DBUtils.add_yid(yid, site.name)
count += 1
}
@log.info "#{site} found #{count} videos. Will retry in #{site.wait} seconds" unless site.wait < 30
rescue SocketError => e
# Internet is down, let's wait for a bit
@log.err "Failed to fetch yids from #{site}. Internet or your proxy is down, let's retry later"
rescue Exception => e
# TODO don't break but send an email or something
@log.err "Failed to fetch yids from #{site}"
end
site.last_check = now
end
sleep tick
end
}
t.abort_on_exception = true
return t
end
def update_video_infos(infos)
yid = infos["yid"]
if infos["status"] == "ok"
DBUtils.update_video_infos_from_hash(yid, infos["infos"])
DBUtils.save_thumbs(yid, infos["thumbs"])
else
reason = YoutubeUtils.get_reason(yid)
DBUtils.update_video_infos_from_hash(yid, {downloaded: reason, deletion: Time.now()})
end
end
def start_informer_threads()
@informer = Thread.new {
@log.info "Starting informer thread"
Thread.current[:name]="Informer"
while true
begin
count = 0
if $CONF["youtube_key"] and $CONF["youtube_key"].size > 5
DBUtils.get_all_yids_without_infos.each_slice(10).to_a.each do |yid_slice|
YoutubeUtils.get_batch_infos_with_key(yid_slice, $CONF["youtube_key"]).each do |infos|
yid = infos["yid"]
if infos["infos"][:duration] < $CONF["download"]["minimum_duration"]
# @log.info("#{infos["infos"][:duration]} < #{$CONF["download"]["minimum_duration"]} setting downloaded to #{DBUtils::DLDONE}")
DBUtils.set_downloaded(yid)
infos["infos"][:bien] = false
end
if infos["infos"][:duration] > $CONF["download"]["maximum_duration"]
# @log.info("#{infos["infos"][:duration]} > #{$CONF["download"]["maximum_duration"]} setting downloaded to #{DBUtils::DLDONE}")
DBUtils.set_downloaded(yid)
infos["infos"][:bien] = false
end
update_video_infos(infos)
count+=1
end
end
else
DBUtils.get_all_yids_without_infos.each do |yid|
update_video_infos(YoutubeUtils.get_infos_without_key(yid))
count+=1
sleep 5 # We don't want to hit the youtube.com website too much and be seen too bot-y
end
end
@log.info "Informer updated #{count} videos infos" unless count == 0
rescue Net::OpenTimeout, SocketError
@log.warn("woops, youtube is slow today")
sleep 10
end
sleep 5
end
}
@informer.abort_on_exception = true
return @informer
end
def add_cover(fmp4,image_data)
if not fmp4 =~ /\.mp4$/
@log.warn "ERROR: file not MP4, not adding nice tags"
else
cover_art = TagLib::MP4::CoverArt.new(TagLib::MP4::CoverArt::JPEG, image_data)
item = TagLib::MP4::Item.from_cover_art_list([cover_art])
TagLib::MP4::File.open(fmp4) do |mp4|
mp4.tag.item_list_map.insert('covr', item)
mp4.save
end
end
end
def ytdlfail(yid, errmsg)
DBUtils.set_downloaded(yid, DBUtils::YTDLFAIL)
@log.warn "The current version of youtube-dl failed to download #{yid} with error #{errmsg}."
@log.warn "Please update your youtube-dl version."
@log.warn "You can also re-run the last youtube-dl command with all the verbose flags to debug"
end
def do_error(error_message, yid, proxy_to_try, tried=false)
@log.debug "Handling error #{error_message}"
case error_message
when /ERROR: (.*)$/i
#when /#{yid}: YouTube said: (.*)$/i
yt_error = $1
case yt_error
when Regexp.union($YT_COUNTRY_BLOCKED_MSG)
if tried
DBUtils.set_downloaded(yid, "RETRY: "+JSON.generate(tried.merge(proxy_to_try)))
else
DBUtils.set_downloaded(yid, "RETRY: {}")
end
when /Playback on other websites has been disabled by the video owner./
err_msg = "Youtube said '#{yt_error}'"
DBUtils.set_downloaded(yid, "#{DBUtils::YTERROR} #{yt_error}")
@log.warn err_msg
# when /content too short/
# let's just retry later
# ytdlfail(yid, yt_error)
when /Please sign in to view this video./
_msg = ""
if $CONF["youtube_username"]
# WTF we are signed in
_msg="#{DBUtils::YTDLFAIL} #{yt_error}"
else
_msg="#{DBUtils::YTDLFAIL} need credentials"
end
@log.warn _msg
DBUtils.set_downloaded(yid, _msg)
when Regexp.union($YT_DELETED_VIDEO_REASONS)
# Unrecoverable error, videos sent to Youtube Limbo.
err_msg = "Youtube said '#{yt_error}', deleting"
DBUtils.set_downloaded(yid, "#{DBUtils::YTERROR} #{yt_error}")
@log.warn err_msg
when /The uploader has not made this video available in your country/
if tried
DBUtils.set_downloaded(yid, "RETRY: "+JSON.generate(tried.merge(proxy_to_try)))
else
DBUtils.set_downloaded(yid, "RETRY: {}")
end
else
raise YTDLException.new("Unknown YouTube error '#{yt_error}'")
end
when /Signature extraction failed/
ytdlfail(yid, error_message)
return
when /would lead to an infinite loop/
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /Connection reset by peer/
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /content too short/i
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /This live stream recording is not available/
ytdlfail(yid, error_message)
return
else
DBUtils.set_downloaded(yid, "#{DBUtils::YTDLFAIL} #{error_message}")
raise YTDLException.new("Unknown youtube-dl error '#{error_message}'")
end
end
def do_download(yid)
video_file = nil
Dir.chdir($CONF["download"]["tmp_dir"])
available_proxies = $CONF["download"]["proxies"]
proxy_cmd = ""
tried = DBUtils.get_retried(yid)
if tried
@log.info "We need to retry with a proxy. Already tried: #{tried}"
available_proxies = available_proxies.dup.delete_if {|k,_| tried.key?(k)}
if available_proxies.empty?
@log.warn "No more proxy to try =("
# TODO mark the download accordingly
return
end
proxy_to_try = [available_proxies.to_a.sample()].to_h
proxy_cmd = "--proxy #{proxy_to_try.first[1]}"
end
command = "#{@youtube_dl_cmd} #{proxy_cmd} https://www.youtube.com/watch?v=#{yid} 2>&1"
@log.debug command
DBUtils.set_downloaded(yid, msg=DBUtils::DLING)
ytdl_msg = nil
IO.popen(command) do |io|
ytdl_msg = io.read.split("\n").join(" ")
end
@log.debug ytdl_msg
case ytdl_msg
when /error: (.+)$/i
do_error(ytdl_msg, yid, proxy_to_try, tried)
return nil
when /WARNING: (.+)$/
warn = $1
if warn!=""
@log.warn warn unless warn=~/Your copy of avconv is outdated, update avconv to version/
if warn=~/unable to log in: .*password/i
warn = "Use a webbrowser to connect to the YT Account, which was probabbly flagged as bot/spam"
raise warn
end
end
when /has already been downloaded and merged/
# continue
when ""
# Continue
else
raise Exception.new("WTF #{ytdl_msg}")
end
@log.success "Downloading finished, now post processing"
output_files = Dir.glob("*#{yid}*",File::FNM_DOTMATCH)
if output_files.size > 2
pp output_files
raise "Too many output files in #{`pwd`}"
end
video_file = output_files.reject{ |f| f=~/\.jpg$/ }[0]
jpg_file = output_files.select{|f| f=~/\.jpg$/}[0]
if not jpg_file or not File.exist?(jpg_file)
if @video_converter_cmd
`#{@video_converter_cmd} -i \"#{video_file}\" -vframes 1 -f image2 \"#{jpg_file}\"`
end
end
if File.exist?(jpg_file)
add_cover(video_file, File.read(jpg_file))
File.delete(jpg_file)
end
FileUtils.mv(video_file, $CONF["download"]["destination_dir"])
video_file = File.join($CONF["download"]["destination_dir"], video_file)
@log.success "PostProcessing #{yid} over."
DBUtils.set_downloaded(yid)
file_size = File.stat(video_file).size.to_i
DBUtils.update_video_infos_from_hash(yid,{file: File.basename(video_file), size: file_size})
return nil
end
def start_downloader_threads()
FileUtils.mkdir_p($CONF["download"]["destination_dir"])
FileUtils.mkdir_p($CONF["download"]["tmp_dir"])
@youtube_dl_cmd = $CONF["download"]["youtube_dl_cmd"] || `which youtube-dl`.strip()
if @youtube_dl_cmd == ""
@log.err "Please update \"youtube_dl_cmd\" in config.json to your local installation of youtube-dl, or remove that key altogether, to use the one in your PATH"
exit 1
else
begin
res = `#{@youtube_dl_cmd} --version | egrep "^20[0-9.]+$"`
raise unless res=~/^20[0-9.]+$/
rescue Exception => e
@log.err "'#{@youtube_dl_cmd}' is not a valid youtube-dl binary"
exit
end
end
#
# TODO move to "download"?
if not $CONF["youtube_username"]
@log.warn "You have not set a Youtube username in config.json."
@log.warn "You won't be able to download '18+' videos."
end
@video_converter_cmd = $CONF["download"]["video_converter_cmd"] || "avconv"
begin
res = `#{@video_converter_cmd} -version 2>&1 | egrep "Copyright"`
raise unless res=~/developers/
rescue Exception => e
@log.warn "'#{@video_converter_cmd}' is not a valid video conversion command (use ffmpeg or avconv)"
@video_converter_cmd = nil
end
if $CONF["download"]["youtube_dl_extra_args"]
@youtube_dl_cmd << " " << $CONF["download"]["youtube_dl_extra_args"]
end
if $CONF["youtube_username"]
@youtube_dl_cmd << " -u \"#{$CONF['youtube_username']}\""
@youtube_dl_cmd << " -p \"#{$CONF['youtube_password']}\""
end
if not ($CONF["youtube_key"] and $CONF["youtube_key"].size > 5)
@log.warn "You have not set a Youtube API key in config.json."
end
# TODO have more than 1 ?
@downloader = Thread.new {
@log.info "Starting downloader thread"
while true
yid = DBUtils.pop_yid_to_download(minimum_duration: $CONF["download"]["minimum_duration"],
maximum_duration: $CONF["download"]["maximum_duration"])
if yid
cur_dir=Dir.pwd()
begin
do_download(yid)
nb_to_dl = DBUtils.get_nb_to_dl()
@log.info "Still #{nb_to_dl} videos to download"
rescue YTDLException => e
@log.err "Exception when downloading #{yid}: #{e.message}"
rescue Exception => e
@log.err "Exception when downloading #{yid}"
raise e
end
Dir.chdir(cur_dir)
else
@log.debug "Nothing worthy of downloading"
sleep 60
end
end
@log.debug "downloader thread ded =("
sleep 1
}
@downloader.abort_on_exception = true
return @downloader
end
def start_local_downloaded_threads()
@local_downloader = Thread.new{
#Inotify stuff
while true
end
sleep 10
}
@local_downloader.abort_on_exception = true
return @local_downloader
end
def go()
DBUtils.clean_dl()
failed_dl_vids = DBUtils.get_ytdlfail().size
if failed_dl_vids > 0
@log.warn "You have #{failed_dl_vids} videos that youtube-dl couldn't download."
end
DBUtils.retry_old_failed_videos()
@threads << start_informer_threads() if @arguments[:inform]
@threads << start_fetcher_threads() if @arguments[:fetch]
@threads << start_downloader_threads() if @arguments[:download]
@threads.each {|t| t.join()}
end
end
def main(options)
m = Main.new(options)
m.go()
end
trap("INT"){
# TODO
# remove ytdl temps,
exit
}
options = {
config: "config.json",
download: true,
fetch: true,
inform: true
}
OptionParser.new do |opts|
used_only = false
opts.banner = "Usage: #{__FILE__}"
opts.on("--download-only") {|v|
options[:download] = true
options[:fetch] = false
options[:inform] = false
used_only = true
}
opts.on("--fetch-only") {|v|
options[:download] = false
options[:fetch] = true
options[:inform] = false
used_only = true
}
opts.on("--inform-only") {|v|
options[:download] = false
options[:fetch] = false
options[:inform] = true
used_only = true
}
opts.on("--[no-]download") {|v|
raise Exception.new("Can't use --[no-]download with a --*-only switch on ") if used_only
options[:download] = v
}
opts.on("--[no-]fetch") {|v|
raise Exception.new("Can't use --[no-]fetch with a --*-only switch on ") if used_only
options[:fetch] = v
}
opts.on("--[no-]inform") {|v|
raise Exception.new("Can't use --[no-]inform with a --*-only switch on ") if used_only
options[:inform] = v
}
opts.on("--debug") {|v|
options[:debug] = true
}
opts.on("--config config") {|v|
options[:config] = v
}
opts.on("--logfile logfile") {|v|
options[:logfile] = v
}
end.parse!
main(options)
Check if taglib is installed on the system, otherwise load locally
#!/usr/bin/ruby
# encoding: utf-8
require "optparse"
require "pp"
begin
require_relative "lib/db.rb"
rescue Exception => e
$stderr.puts "Please fix your database access / creds in config.json"
exit 1
end
require_relative "lib/fetcher.rb"
require_relative "lib/logger.rb"
require_relative "lib/utils.rb"
$YT_DELETED_VIDEO_REASONS = [
"This video does not exist.",
"This video is unavailable.",
"This video is not available.",
"The YouTube account associated with this video has been terminated due to multiple third-party notifications of copyright infringement",
"This video has been removed by the user",
"This video has been removed for violating YouTube's Terms of Service.",
"due to a copyright claim by a third party",
"This video is no longer available because the YouTube account associated with this video has been terminated.",
"This video is private",
"This video is no longer available because the uploader has closed their YouTube account",
"This video has been removed for violating YouTube's policy on nudity or sexual content.",
"This video has been removed for violating YouTube's policy on violent or graphic content.",
"This video has been removed for violating YouTube's policy on harassment and bullying."
]
$YT_COUNTRY_BLOCKED_MSG = [
/blocked it in your country/,
/not available on this country domain/,
/This video is not available in your country/,
/This video contains content from .* who has blocked it on copyright grounds/,
]
class YTDLException < Exception
end
class Main
require "fileutils"
require "json"
begin
require "taglib"
rescue Exception => e
$: << File.join(File.dirname(__FILE__),"lib/taglib")
require "taglib"
end
def initialize(options)
@log = MyLogger.new()
stdout_logger = Logger.new(STDOUT)
stdout_logger.level = options[:debug] ? Logger::DEBUG : Logger::INFO
@log.add_logger(stdout_logger)
@arguments = options
load_conf(@arguments[:config])
logfile = @arguments[:logfile] || $CONF["logfile"]
if logfile
logger = Logger.new(logfile)
logger.level = Logger::DEBUG
@log.add_logger(logger)
end
@threads=[]
end
def load_sites
$CONF["sites"].each do |site|
filename = site+".rb" unless site.end_with?(".rb")
path = File.join("sites",filename)
begin
require_relative path
rescue LoadError=>e
@log.warn "Cannot load #{path}"
end
end
end
def load_conf(file)
unless File.exist?(file)
@log.err "Couldn't find config file #{file}."
exit 1
end
begin
$CONF = JSON.parse(File.read(file))
rescue Exception => e
@log.err "Problem opening config file #{file}#"
raise e
end
end
def start_fetcher_threads()
load_sites()
if Fetcher.sites.empty?
@log.err "Didn't find any site to parse for youtube URL."
@log.err "Add some in config.json, maybe?"
exit 1
end
@fetcher_threads = []
tick = 5 # Verify everything every tick
t = Thread.new{
@log.info "Starting fetcher thread"
while true
now = Time.now().to_i
# Retry when we've waited "wait" time + up to 10% of wait, to appear not too bot-y
Fetcher.sites.select{|site| now - site.last_check > (site.wait*(1 + (rand() / 10))) }.each do |site|
count = 0
begin
site.get_yids().each { |yid|
#@log.info "#{site} found #{yid}"
DBUtils.add_yid(yid, site.name)
count += 1
}
@log.info "#{site} found #{count} videos. Will retry in #{site.wait} seconds" unless site.wait < 30
rescue SocketError => e
# Internet is down, let's wait for a bit
@log.err "Failed to fetch yids from #{site}. Internet or your proxy is down, let's retry later"
rescue Exception => e
# TODO don't break but send an email or something
@log.err "Failed to fetch yids from #{site}"
end
site.last_check = now
end
sleep tick
end
}
t.abort_on_exception = true
return t
end
def update_video_infos(infos)
yid = infos["yid"]
if infos["status"] == "ok"
DBUtils.update_video_infos_from_hash(yid, infos["infos"])
DBUtils.save_thumbs(yid, infos["thumbs"])
else
reason = YoutubeUtils.get_reason(yid)
DBUtils.update_video_infos_from_hash(yid, {downloaded: reason, deletion: Time.now()})
end
end
def start_informer_threads()
@informer = Thread.new {
@log.info "Starting informer thread"
Thread.current[:name]="Informer"
while true
begin
count = 0
if $CONF["youtube_key"] and $CONF["youtube_key"].size > 5
DBUtils.get_all_yids_without_infos.each_slice(10).to_a.each do |yid_slice|
YoutubeUtils.get_batch_infos_with_key(yid_slice, $CONF["youtube_key"]).each do |infos|
yid = infos["yid"]
if infos["infos"][:duration] < $CONF["download"]["minimum_duration"]
# @log.info("#{infos["infos"][:duration]} < #{$CONF["download"]["minimum_duration"]} setting downloaded to #{DBUtils::DLDONE}")
DBUtils.set_downloaded(yid)
infos["infos"][:bien] = false
end
if infos["infos"][:duration] > $CONF["download"]["maximum_duration"]
# @log.info("#{infos["infos"][:duration]} > #{$CONF["download"]["maximum_duration"]} setting downloaded to #{DBUtils::DLDONE}")
DBUtils.set_downloaded(yid)
infos["infos"][:bien] = false
end
update_video_infos(infos)
count+=1
end
end
else
DBUtils.get_all_yids_without_infos.each do |yid|
update_video_infos(YoutubeUtils.get_infos_without_key(yid))
count+=1
sleep 5 # We don't want to hit the youtube.com website too much and be seen too bot-y
end
end
@log.info "Informer updated #{count} videos infos" unless count == 0
rescue Net::OpenTimeout, SocketError
@log.warn("woops, youtube is slow today")
sleep 10
end
sleep 5
end
}
@informer.abort_on_exception = true
return @informer
end
def add_cover(fmp4,image_data)
if not fmp4 =~ /\.mp4$/
@log.warn "ERROR: file not MP4, not adding nice tags"
else
cover_art = TagLib::MP4::CoverArt.new(TagLib::MP4::CoverArt::JPEG, image_data)
item = TagLib::MP4::Item.from_cover_art_list([cover_art])
TagLib::MP4::File.open(fmp4) do |mp4|
mp4.tag.item_list_map.insert('covr', item)
mp4.save
end
end
end
def ytdlfail(yid, errmsg)
DBUtils.set_downloaded(yid, DBUtils::YTDLFAIL)
@log.warn "The current version of youtube-dl failed to download #{yid} with error #{errmsg}."
@log.warn "Please update your youtube-dl version."
@log.warn "You can also re-run the last youtube-dl command with all the verbose flags to debug"
end
def do_error(error_message, yid, proxy_to_try, tried=false)
@log.debug "Handling error #{error_message}"
case error_message
when /ERROR: (.*)$/i
#when /#{yid}: YouTube said: (.*)$/i
yt_error = $1
case yt_error
when Regexp.union($YT_COUNTRY_BLOCKED_MSG)
if tried
DBUtils.set_downloaded(yid, "RETRY: "+JSON.generate(tried.merge(proxy_to_try)))
else
DBUtils.set_downloaded(yid, "RETRY: {}")
end
when /Playback on other websites has been disabled by the video owner./
err_msg = "Youtube said '#{yt_error}'"
DBUtils.set_downloaded(yid, "#{DBUtils::YTERROR} #{yt_error}")
@log.warn err_msg
# when /content too short/
# let's just retry later
# ytdlfail(yid, yt_error)
when /Please sign in to view this video./
_msg = ""
if $CONF["youtube_username"]
# WTF we are signed in
_msg="#{DBUtils::YTDLFAIL} #{yt_error}"
else
_msg="#{DBUtils::YTDLFAIL} need credentials"
end
@log.warn _msg
DBUtils.set_downloaded(yid, _msg)
when Regexp.union($YT_DELETED_VIDEO_REASONS)
# Unrecoverable error, videos sent to Youtube Limbo.
err_msg = "Youtube said '#{yt_error}', deleting"
DBUtils.set_downloaded(yid, "#{DBUtils::YTERROR} #{yt_error}")
@log.warn err_msg
when /The uploader has not made this video available in your country/
if tried
DBUtils.set_downloaded(yid, "RETRY: "+JSON.generate(tried.merge(proxy_to_try)))
else
DBUtils.set_downloaded(yid, "RETRY: {}")
end
else
raise YTDLException.new("Unknown YouTube error '#{yt_error}'")
end
when /Signature extraction failed/
ytdlfail(yid, error_message)
return
when /would lead to an infinite loop/
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /Connection reset by peer/
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /content too short/i
DBUtils.set_downloaded(yid, DBUtils::RETRYDL)
when /This live stream recording is not available/
ytdlfail(yid, error_message)
return
else
DBUtils.set_downloaded(yid, "#{DBUtils::YTDLFAIL} #{error_message}")
raise YTDLException.new("Unknown youtube-dl error '#{error_message}'")
end
end
def do_download(yid)
video_file = nil
Dir.chdir($CONF["download"]["tmp_dir"])
available_proxies = $CONF["download"]["proxies"]
proxy_cmd = ""
tried = DBUtils.get_retried(yid)
if tried
@log.info "We need to retry with a proxy. Already tried: #{tried}"
available_proxies = available_proxies.dup.delete_if {|k,_| tried.key?(k)}
if available_proxies.empty?
@log.warn "No more proxy to try =("
# TODO mark the download accordingly
return
end
proxy_to_try = [available_proxies.to_a.sample()].to_h
proxy_cmd = "--proxy #{proxy_to_try.first[1]}"
end
command = "#{@youtube_dl_cmd} #{proxy_cmd} https://www.youtube.com/watch?v=#{yid} 2>&1"
@log.debug command
DBUtils.set_downloaded(yid, msg=DBUtils::DLING)
ytdl_msg = nil
IO.popen(command) do |io|
ytdl_msg = io.read.split("\n").join(" ")
end
@log.debug ytdl_msg
case ytdl_msg
when /error: (.+)$/i
do_error(ytdl_msg, yid, proxy_to_try, tried)
return nil
when /WARNING: (.+)$/
warn = $1
if warn!=""
@log.warn warn unless warn=~/Your copy of avconv is outdated, update avconv to version/
if warn=~/unable to log in: .*password/i
warn = "Use a webbrowser to connect to the YT Account, which was probabbly flagged as bot/spam"
raise warn
end
end
when /has already been downloaded and merged/
# continue
when ""
# Continue
else
raise Exception.new("WTF #{ytdl_msg}")
end
@log.success "Downloading finished, now post processing"
output_files = Dir.glob("*#{yid}*",File::FNM_DOTMATCH)
if output_files.size > 2
pp output_files
raise "Too many output files in #{`pwd`}"
end
video_file = output_files.reject{ |f| f=~/\.jpg$/ }[0]
jpg_file = output_files.select{|f| f=~/\.jpg$/}[0]
if not jpg_file or not File.exist?(jpg_file)
if @video_converter_cmd
`#{@video_converter_cmd} -i \"#{video_file}\" -vframes 1 -f image2 \"#{jpg_file}\"`
end
end
if File.exist?(jpg_file)
add_cover(video_file, File.read(jpg_file))
File.delete(jpg_file)
end
FileUtils.mv(video_file, $CONF["download"]["destination_dir"])
video_file = File.join($CONF["download"]["destination_dir"], video_file)
@log.success "PostProcessing #{yid} over."
DBUtils.set_downloaded(yid)
file_size = File.stat(video_file).size.to_i
DBUtils.update_video_infos_from_hash(yid,{file: File.basename(video_file), size: file_size})
return nil
end
def start_downloader_threads()
FileUtils.mkdir_p($CONF["download"]["destination_dir"])
FileUtils.mkdir_p($CONF["download"]["tmp_dir"])
@youtube_dl_cmd = $CONF["download"]["youtube_dl_cmd"] || `which youtube-dl`.strip()
if @youtube_dl_cmd == ""
@log.err "Please update \"youtube_dl_cmd\" in config.json to your local installation of youtube-dl, or remove that key altogether, to use the one in your PATH"
exit 1
else
begin
res = `#{@youtube_dl_cmd} --version | egrep "^20[0-9.]+$"`
raise unless res=~/^20[0-9.]+$/
rescue Exception => e
@log.err "'#{@youtube_dl_cmd}' is not a valid youtube-dl binary"
exit
end
end
#
# TODO move to "download"?
if not $CONF["youtube_username"]
@log.warn "You have not set a Youtube username in config.json."
@log.warn "You won't be able to download '18+' videos."
end
@video_converter_cmd = $CONF["download"]["video_converter_cmd"] || "avconv"
begin
res = `#{@video_converter_cmd} -version 2>&1 | egrep "Copyright"`
raise unless res=~/developers/
rescue Exception => e
@log.warn "'#{@video_converter_cmd}' is not a valid video conversion command (use ffmpeg or avconv)"
@video_converter_cmd = nil
end
if $CONF["download"]["youtube_dl_extra_args"]
@youtube_dl_cmd << " " << $CONF["download"]["youtube_dl_extra_args"]
end
if $CONF["youtube_username"]
@youtube_dl_cmd << " -u \"#{$CONF['youtube_username']}\""
@youtube_dl_cmd << " -p \"#{$CONF['youtube_password']}\""
end
if not ($CONF["youtube_key"] and $CONF["youtube_key"].size > 5)
@log.warn "You have not set a Youtube API key in config.json."
end
# TODO have more than 1 ?
@downloader = Thread.new {
@log.info "Starting downloader thread"
while true
yid = DBUtils.pop_yid_to_download(minimum_duration: $CONF["download"]["minimum_duration"],
maximum_duration: $CONF["download"]["maximum_duration"])
if yid
cur_dir=Dir.pwd()
begin
do_download(yid)
nb_to_dl = DBUtils.get_nb_to_dl()
@log.info "Still #{nb_to_dl} videos to download"
rescue YTDLException => e
@log.err "Exception when downloading #{yid}: #{e.message}"
rescue Exception => e
@log.err "Exception when downloading #{yid}"
raise e
end
Dir.chdir(cur_dir)
else
@log.debug "Nothing worthy of downloading"
sleep 60
end
end
@log.debug "downloader thread ded =("
sleep 1
}
@downloader.abort_on_exception = true
return @downloader
end
def start_local_downloaded_threads()
@local_downloader = Thread.new{
#Inotify stuff
while true
end
sleep 10
}
@local_downloader.abort_on_exception = true
return @local_downloader
end
def go()
DBUtils.clean_dl()
failed_dl_vids = DBUtils.get_ytdlfail().size
if failed_dl_vids > 0
@log.warn "You have #{failed_dl_vids} videos that youtube-dl couldn't download."
end
DBUtils.retry_old_failed_videos()
@threads << start_informer_threads() if @arguments[:inform]
@threads << start_fetcher_threads() if @arguments[:fetch]
@threads << start_downloader_threads() if @arguments[:download]
@threads.each {|t| t.join()}
end
end
def main(options)
m = Main.new(options)
m.go()
end
trap("INT"){
# TODO
# remove ytdl temps,
exit
}
options = {
config: "config.json",
download: true,
fetch: true,
inform: true
}
OptionParser.new do |opts|
used_only = false
opts.banner = "Usage: #{__FILE__}"
opts.on("--download-only") {|v|
options[:download] = true
options[:fetch] = false
options[:inform] = false
used_only = true
}
opts.on("--fetch-only") {|v|
options[:download] = false
options[:fetch] = true
options[:inform] = false
used_only = true
}
opts.on("--inform-only") {|v|
options[:download] = false
options[:fetch] = false
options[:inform] = true
used_only = true
}
opts.on("--[no-]download") {|v|
raise Exception.new("Can't use --[no-]download with a --*-only switch on ") if used_only
options[:download] = v
}
opts.on("--[no-]fetch") {|v|
raise Exception.new("Can't use --[no-]fetch with a --*-only switch on ") if used_only
options[:fetch] = v
}
opts.on("--[no-]inform") {|v|
raise Exception.new("Can't use --[no-]inform with a --*-only switch on ") if used_only
options[:inform] = v
}
opts.on("--debug") {|v|
options[:debug] = true
}
opts.on("--config config") {|v|
options[:config] = v
}
opts.on("--logfile logfile") {|v|
options[:logfile] = v
}
end.parse!
main(options)
|
require 'formula'
class Root < Formula
homepage 'http://root.cern.ch'
url 'ftp://root.cern.ch/root/root_v5.34.08.source.tar.gz'
version '5.34.08'
sha1 '23ca250f9c66797972f94bb8f20d04cf455d6c53'
depends_on 'fftw' => :optional
depends_on :x11
def install
#Determine architecture
arch = MacOS.prefer_64_bit? ? 'macosx64' : 'macosx'
# N.B. that it is absolutely essential to specify
# the --etcdir flag to the configure script. This is
# due to a long-known issue with ROOT where it will
# not display any graphical components if the directory
# is not specified
#
# => http://root.cern.ch/phpBB3/viewtopic.php?f=3&t=15072
system "./configure",
"#{arch}",
"--all",
"--enable-builtin-glew",
"--prefix=#{prefix}",
"--etcdir=#{prefix}/etc/root",
"--mandir=#{man}"
system "make"
system "make install"
prefix.install 'test' # needed to run test suite
end
def test
system "make -C #{prefix}/test/ hsimple"
system "#{prefix}/test/hsimple"
end
def caveats; <<-EOS.undent
Because ROOT depends on several installation-dependent
environment variables to function properly, you should
add the following commands to your shell initialization
script (.bashrc/.profile/etc.), or call them directly
before using ROOT.
For csh/tcsh users:
source `brew --prefix root`/bin/thisroot.csh
For bash/zsh users:
. $(brew --prefix root)/bin/thisroot.sh
EOS
end
end
ROOT: Add Python as a dependency to assure PyROOT bindings are correctly linked.
ROOT currently does not have a :python dependency, which causes ROOT to
link against system python instead of homebrew python. This commit adds
a :python dependency, which does not affect users who use system python
(since homebrew will still link them to system python), but which does
fix linking for users who use a brewed python. The logic behind making
this dependency non-optional is:
1) Speaking from first-hand experience, most LHC analyses use PyROOT
more than ROOT's built-in CINT interpreter, so it makes sense to have
this always-on, as users will expect it.
2) For users who don't have a brewed python, this doesn't add any extra
build dependencies.
3) The existing formula is compiled with the --all flag, and has no
option to disable python bindings, so this really doesn't change the
interface to the formula.
Closes #20704.
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class Root < Formula
homepage 'http://root.cern.ch'
url 'ftp://root.cern.ch/root/root_v5.34.08.source.tar.gz'
version '5.34.08'
sha1 '23ca250f9c66797972f94bb8f20d04cf455d6c53'
depends_on 'fftw' => :optional
depends_on :x11
depends_on :python
def install
#Determine architecture
arch = MacOS.prefer_64_bit? ? 'macosx64' : 'macosx'
# N.B. that it is absolutely essential to specify
# the --etcdir flag to the configure script. This is
# due to a long-known issue with ROOT where it will
# not display any graphical components if the directory
# is not specified
#
# => http://root.cern.ch/phpBB3/viewtopic.php?f=3&t=15072
system "./configure",
"#{arch}",
"--all",
"--enable-builtin-glew",
"--prefix=#{prefix}",
"--etcdir=#{prefix}/etc/root",
"--mandir=#{man}"
system "make"
system "make install"
prefix.install 'test' # needed to run test suite
end
def test
system "make -C #{prefix}/test/ hsimple"
system "#{prefix}/test/hsimple"
end
def caveats; <<-EOS.undent
Because ROOT depends on several installation-dependent
environment variables to function properly, you should
add the following commands to your shell initialization
script (.bashrc/.profile/etc.), or call them directly
before using ROOT.
For csh/tcsh users:
source `brew --prefix root`/bin/thisroot.csh
For bash/zsh users:
. $(brew --prefix root)/bin/thisroot.sh
EOS
end
end
|
Set the port to 10080.
#!/usr/bin/ruby
##require 'rubygems'
require 'sinatra'
## Configuration
SCANWEB_HOME = File.dirname(__FILE__)
SCRIPT_FILE = SCANWEB_HOME + "/scan.sh"
OUTPUT_DIR = '/export/work/scan'
#OUTPUT_DIR = 'c:/temp/scan'
set :port, 10080
##
def create_path_table(basename)
{
:pdf_path => "#{OUTPUT_DIR}/#{basename}.pdf",
:log_path => "#{OUTPUT_DIR}/#{basename}.log",
:thumbs_path => "#{OUTPUT_DIR}/#{basename}_thumbs.zip",
}
end
# too ad-hoc implementation. sleep 1 if it conflicts...
def create_basename
time_str = Time.now.strftime("%Y%m%d-%H%M%S")
basename = "scan-#{time_str}"
path_table = create_path_table(basename)
if File.exists?(path_table[:pdf_path]) ||
File.exists?(path_table[:log_path]) ||
File.exists?(path_table[:thumbs_path]) then
sleep 1
return create_basename()
end
return basename
end
helpers do
include Rack::Utils
alias_method :h, :escape_html
alias_method :u, :escape
end
get '/' do
erb :index
end
get '/invalid' do
return 'パラメータの指定が不正です。'
end
post '/scan' do
source = params[:source]
mode = params[:mode]
resolution = params[:resolution]
unless source =~ /^[a-zA-Z ]+$/ &&
mode =~ /^[a-zA-Z]+$/ &&
resolution =~ /^[0-9]+$/ then
redirect '/invalid'
end
basename = create_basename()
path_table = create_path_table(basename)
##File.open(path_table[:log_path], "w") do |fp|
## fp.puts("TEXT!")
##end
system(%Q(#{SCRIPT_FILE} "#{path_table[:pdf_path]}" "#{path_table[:log_path]}" "#{path_table[:thumbs_path]}" "#{source}" "#{mode}" "#{resolution}"))
# If output exists, treat as scanning is succeeded.
if File.exists?(path_table[:pdf_path]) then
##if true
@success = true
@message = 'スキャンが完了しました。'
@basename = basename
else
@success = false
@message = 'スキャンに失敗しました。'
if File.exists?(path_table[:log_path]) then
@log_text = File.read(path_table[:log_path])
else
@log_text = 'ログファイルが見つかりません。'
end
end
erb :scan
end
get '/image/:basename' do
basename = params[:basename]
unless basename =~ /^[a-zA-Z0-9]+$/ then
redirect '/invalid'
end
return basename
end
|
#
require 'pathname'
options = {
step_repo: ENV['STEP_TESTER_STEP_REPO'],
step_version: ENV['STEP_TESTER_STEP_VERSION_TAG'],
step_test_branch: ENV['STEP_TESTER_STEP_TEST_BRANCH'],
step_args_file: ENV['__INPUT_FILE__']
}
git_checkout_option = '', # will be either the Tag (if specified) or the test-branch
step_args_content = ''
$summary_info = {
is_clone_ok: false,
is_step_sh_file_found: false,
is_step_yml_file_found: false,
is_readme_file_found: false,
is_license_file_found: false
}
$formatted_output_file_path = ENV['BITRISE_STEP_FORMATTED_OUTPUT_FILE_PATH']
system("rm #{$formatted_output_file_path}")
def puts_string_to_formatted_output(text, is_log_print=false)
open($formatted_output_file_path, 'a') { |f|
f.puts(text)
}
if is_log_print
puts text
end
end
def puts_section_to_formatted_output(section_text, is_log_print=false)
open($formatted_output_file_path, 'a') { |f|
f.puts
f.puts(section_text)
f.puts
}
if is_log_print
puts
puts section_text
puts
end
end
def print_formatted_summary
is_required_missing = false
is_recommended_missing = false
puts_section_to_formatted_output("# Repository Check", true)
if $summary_info[:is_clone_ok]
puts_string_to_formatted_output("* Step Repository Clone [ok]", true)
else
puts_string_to_formatted_output("* **Step Repository Clone [FAILED]**", true)
is_required_missing = true
end
if $summary_info[:is_step_sh_file_found]
puts_string_to_formatted_output("* step.sh [found]", true)
else
puts_string_to_formatted_output("* **step.sh [NOT FOUND] (Required!)**", true)
is_required_missing = true
end
if $summary_info[:is_step_yml_file_found]
puts_string_to_formatted_output("* step.yml [found]", true)
else
puts_string_to_formatted_output("* **step.yml [not found] (recommended)**", true)
is_recommended_missing = true
end
if $summary_info[:is_readme_file_found]
puts_string_to_formatted_output("* README.md [found]", true)
else
puts_string_to_formatted_output("* **README.md [not found] (recommended)**", true)
is_recommended_missing = true
end
if $summary_info[:is_license_file_found]
puts_string_to_formatted_output("* LICENSE [found]", true)
else
puts_string_to_formatted_output("* **LICENSE [not found] (recommended)**", true)
is_recommended_missing = true
end
puts_section_to_formatted_output("## Summary")
if is_required_missing
puts_string_to_formatted_output("* **Required Step file(s) missing [FAILED]**", true)
else
puts_string_to_formatted_output("* Every required Step file found [ok]", true)
end
if is_recommended_missing
puts_string_to_formatted_output("* **Recommended Step file(s) missing**", true)
else
puts_string_to_formatted_output("* Every recommended Step file found [awesome]", true)
end
puts_section_to_formatted_output("---------------------------------------", true)
return !is_required_missing
end
def print_error(err_msg)
puts " [!] Failed: #{err_msg}"
puts_section_to_formatted_output "# Failed"
puts_section_to_formatted_output err_msg
exit 1
end
def print_warning(warning_msg)
puts " (!) #{warning_msg}"
puts_section_to_formatted_output "# Warning"
puts_section_to_formatted_output warning_msg
end
unless options[:step_repo]
print_error "Step Repository URL not defined"
exit 1
end
if options[:step_version]
git_checkout_option = options[:step_version]
else
print_warning "Step Version (Tag) not defined"
if options[:step_test_branch]
print_warning "Will use the provided Test-Branch instead - this is only recommended for testing, for production you should always use Version Tags instead of branch!"
git_checkout_option = options[:step_test_branch]
else
print_error "Neither a Version Tag nor a Test-Branch defined!"
exit 1
end
end
if options[:step_args_file]
step_args_content = File.read(options[:step_args_file])
end
unless step_args_content
print_warning "Step Args not defined - no Input will be passed to the Step"
end
# -----------------------
# --- MAIN
def collect_step_input_envs_from_string(str)
step_envs = []
str.each_line {|line|
splits = line.split("=")
env_key = splits.shift.strip # first item is the key
env_value = splits.join("=") # rest is the value
if env_key and env_value
step_envs << {
key: env_key,
value: env_value.chomp
}
else
print_warning("Possibly invalid input (will be skipped), key or value missing: #{line}")
end
}
return step_envs
end
is_failed = false
begin
unless system(%Q{git clone -b "#{git_checkout_option}" "#{options[:step_repo]}" ./stepdir})
raise "Failed to clone the Step Repository"
end
step_base_dir = Pathname.new('./stepdir').realpath.to_s
puts " (debug) step_base_dir: #{step_base_dir}"
$summary_info[:is_clone_ok] = true
$summary_info[:is_step_sh_file_found] = true if File.file?(File.join(step_base_dir, "step.sh"))
$summary_info[:is_step_yml_file_found] = true if File.file?(File.join(step_base_dir, "step.yml"))
$summary_info[:is_license_file_found] = true if File.file?(File.join(step_base_dir, "LICENSE"))
$summary_info[:is_readme_file_found] = true if File.file?(File.join(step_base_dir, "README.md"))
unless print_formatted_summary()
raise "A required Step file is missing!"
end
puts_section_to_formatted_output("# Running the Step", true)
step_envs = collect_step_input_envs_from_string(step_args_content)
puts " (debug) step_envs: #{step_envs}"
# clean up own ENV
ENV.delete('STEP_TESTER_STEP_REPO')
ENV.delete('STEP_TESTER_STEP_VERSION_TAG')
ENV.delete('STEP_TESTER_STEP_TEST_BRANCH')
ENV.delete('__INPUT_FILE__')
# set Step's ENV
if step_envs.length > 0
puts_section_to_formatted_output("## Envs:", true)
step_envs.each {|an_env|
ENV[an_env[:key]] = an_env[:value]
puts_string_to_formatted_output("* `#{an_env[:key]}` : `#{ENV[an_env[:key]]}`", true)
}
end
puts_section_to_formatted_output("---------------------------------------", true)
unless system(%Q{cd stepdir && bash step.sh})
raise "Step Failed"
end
rescue => ex
print_error("#{ex}")
is_failed = true
ensure
system(%Q{rm -rf "#{step_base_dir}"})
end
exit (is_failed ? 1 : 0)
expand input env vars
#
require 'pathname'
options = {
step_repo: ENV['STEP_TESTER_STEP_REPO'],
step_version: ENV['STEP_TESTER_STEP_VERSION_TAG'],
step_test_branch: ENV['STEP_TESTER_STEP_TEST_BRANCH'],
step_args_file: ENV['__INPUT_FILE__']
}
git_checkout_option = '', # will be either the Tag (if specified) or the test-branch
step_args_content = ''
$summary_info = {
is_clone_ok: false,
is_step_sh_file_found: false,
is_step_yml_file_found: false,
is_readme_file_found: false,
is_license_file_found: false
}
$formatted_output_file_path = ENV['BITRISE_STEP_FORMATTED_OUTPUT_FILE_PATH']
system("rm #{$formatted_output_file_path}")
def puts_string_to_formatted_output(text, is_log_print=false)
open($formatted_output_file_path, 'a') { |f|
f.puts(text)
}
if is_log_print
puts text
end
end
def puts_section_to_formatted_output(section_text, is_log_print=false)
open($formatted_output_file_path, 'a') { |f|
f.puts
f.puts(section_text)
f.puts
}
if is_log_print
puts
puts section_text
puts
end
end
def print_formatted_summary
is_required_missing = false
is_recommended_missing = false
puts_section_to_formatted_output("# Repository Check", true)
if $summary_info[:is_clone_ok]
puts_string_to_formatted_output("* Step Repository Clone [ok]", true)
else
puts_string_to_formatted_output("* **Step Repository Clone [FAILED]**", true)
is_required_missing = true
end
if $summary_info[:is_step_sh_file_found]
puts_string_to_formatted_output("* step.sh [found]", true)
else
puts_string_to_formatted_output("* **step.sh [NOT FOUND] (Required!)**", true)
is_required_missing = true
end
if $summary_info[:is_step_yml_file_found]
puts_string_to_formatted_output("* step.yml [found]", true)
else
puts_string_to_formatted_output("* **step.yml [not found] (recommended)**", true)
is_recommended_missing = true
end
if $summary_info[:is_readme_file_found]
puts_string_to_formatted_output("* README.md [found]", true)
else
puts_string_to_formatted_output("* **README.md [not found] (recommended)**", true)
is_recommended_missing = true
end
if $summary_info[:is_license_file_found]
puts_string_to_formatted_output("* LICENSE [found]", true)
else
puts_string_to_formatted_output("* **LICENSE [not found] (recommended)**", true)
is_recommended_missing = true
end
puts_section_to_formatted_output("## Summary")
if is_required_missing
puts_string_to_formatted_output("* **Required Step file(s) missing [FAILED]**", true)
else
puts_string_to_formatted_output("* Every required Step file found [ok]", true)
end
if is_recommended_missing
puts_string_to_formatted_output("* **Recommended Step file(s) missing**", true)
else
puts_string_to_formatted_output("* Every recommended Step file found [awesome]", true)
end
puts_section_to_formatted_output("---------------------------------------", true)
return !is_required_missing
end
def print_error(err_msg)
puts " [!] Failed: #{err_msg}"
puts_section_to_formatted_output "# Failed"
puts_section_to_formatted_output err_msg
exit 1
end
def print_warning(warning_msg)
puts " (!) #{warning_msg}"
puts_section_to_formatted_output "# Warning"
puts_section_to_formatted_output warning_msg
end
unless options[:step_repo]
print_error "Step Repository URL not defined"
exit 1
end
if options[:step_version]
git_checkout_option = options[:step_version]
else
print_warning "Step Version (Tag) not defined"
if options[:step_test_branch]
print_warning "Will use the provided Test-Branch instead - this is only recommended for testing, for production you should always use Version Tags instead of branch!"
git_checkout_option = options[:step_test_branch]
else
print_error "Neither a Version Tag nor a Test-Branch defined!"
exit 1
end
end
if options[:step_args_file]
step_args_content = File.read(options[:step_args_file])
end
unless step_args_content
print_warning "Step Args not defined - no Input will be passed to the Step"
end
# -----------------------
# --- MAIN
def collect_step_input_envs_from_string(str)
step_envs = []
str.each_line {|line|
splits = line.split("=")
env_key = splits.shift.strip # first item is the key
env_value = splits.join("=") # rest is the value
if env_key and env_value
step_envs << {
key: env_key,
value: env_value.chomp
}
else
print_warning("Possibly invalid input (will be skipped), key or value missing: #{line}")
end
}
return step_envs
end
def expand_env_vars_in_string(str)
return str.gsub(/\$([a-zA-Z_]+[a-zA-Z0-9_]*)|\$\{(.+)\}/) { ENV[$1 || $2] }
end
is_failed = false
begin
unless system(%Q{git clone -b "#{git_checkout_option}" "#{options[:step_repo]}" ./stepdir})
raise "Failed to clone the Step Repository"
end
step_base_dir = Pathname.new('./stepdir').realpath.to_s
puts " (debug) step_base_dir: #{step_base_dir}"
$summary_info[:is_clone_ok] = true
$summary_info[:is_step_sh_file_found] = true if File.file?(File.join(step_base_dir, "step.sh"))
$summary_info[:is_step_yml_file_found] = true if File.file?(File.join(step_base_dir, "step.yml"))
$summary_info[:is_license_file_found] = true if File.file?(File.join(step_base_dir, "LICENSE"))
$summary_info[:is_readme_file_found] = true if File.file?(File.join(step_base_dir, "README.md"))
unless print_formatted_summary()
raise "A required Step file is missing!"
end
puts_section_to_formatted_output("# Running the Step", true)
step_envs = collect_step_input_envs_from_string(step_args_content)
puts " (debug) step_envs: #{step_envs}"
# clean up own ENV
ENV.delete('STEP_TESTER_STEP_REPO')
ENV.delete('STEP_TESTER_STEP_VERSION_TAG')
ENV.delete('STEP_TESTER_STEP_TEST_BRANCH')
ENV.delete('__INPUT_FILE__')
# set Step's ENV
if step_envs.length > 0
puts_section_to_formatted_output("## Envs:", true)
step_envs.each {|an_env|
an_original_value = an_env[:value]
an_expanded_value = expand_env_vars_in_string(an_original_value)
ENV[an_env[:key]] = an_expanded_value
puts_string_to_formatted_output("* `#{an_env[:key]}` : `#{ENV[an_env[:key]]}`", true)
puts_string_to_formatted_output(" * (not expanded / original input: `#{an_original_value}`)", true)
}
end
puts_section_to_formatted_output("---------------------------------------", true)
unless system(%Q{cd stepdir && bash step.sh})
raise "Step Failed"
end
rescue => ex
print_error("#{ex}")
is_failed = true
ensure
system(%Q{rm -rf "#{step_base_dir}"})
end
exit (is_failed ? 1 : 0)
|
require 'rubygems'
require 'aws/s3'
require 'optparse'
def sync_file(path, remote_root, bucket, options)
remote_path = remote_root + "/" + path
if !AWS::S3::S3Object.exists? remote_path, bucket.name
puts "file #{path} does not exist on remote server - syncing"
do_sync(remote_path, path, bucket) if !options[:dry_run]
else
remote_file = AWS::S3::S3Object.find(remote_path, bucket.name)
remote_file_mtime = DateTime.strptime(remote_file.about["last-modified"], '%a, %d %b %Y %X %Z')
local_file_mtime = DateTime.parse(File.mtime(path).to_s)
if local_file_mtime > remote_file_mtime
puts "local file #{path} is newer - syncing"
do_sync(remote_path, path, bucket) if !options[:dry_run]
end
end
end
def sync_dir(dir, dest_dir, bucket, options)
Dir.foreach(dir) do|filename|
next if filename == '.' || filename == '..'
if dir == '.'
path = filename
else
path = dir + "/" + filename
end
puts "Checking path #{path}"
if File.directory?(path)
puts "file #{path} is a directory - entering directory"
sync_dir(path, dest_dir, bucket, options)
else
sync_file(path, dest_dir, bucket, options)
end
end
end
def do_sync(remote_path, path, bucket)
AWS::S3::S3Object.store(remote_path, open(path), bucket.name)
end
options = {}
optparse = OptionParser.new do |opts|
opts.banner = "Usage: sync.rb [options] source dest_dir"
options[:dry_run] = false
opts.on( '-n', '--dry-run', 'Dry run' ) do
options[:dry_run] = true
end
end
optparse.parse!
if ARGV.length != 2
puts "Usage: sync.rb [options] source dest_dir"
exit
end
source = ARGV[0]
dest_dir = ARGV[1]
puts "source: #{source}"
puts "dest_dir: #{dest_dir}"
puts "DRY RUN" if options[:dry_run]
AWS::S3::Base.establish_connection!(
:access_key_id => 'AKIAIVMC57UNL7U5O4WA',
:secret_access_key => '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'
)
bucket = AWS::S3::Bucket.find('kevinthorley.com')
if (File.directory? source)
sync_dir(source, dest_dir, bucket, options)
else
sync_file(source, dest_dir, bucket, options)
end
Add verbose option
require 'rubygems'
require 'aws/s3'
require 'optparse'
def sync_file(path, remote_root, bucket, options)
remote_path = remote_root + "/" + path
if !AWS::S3::S3Object.exists? remote_path, bucket.name
puts "file #{path} does not exist on remote server - syncing"
do_sync(remote_path, path, bucket) if !options[:dry_run]
else
remote_file = AWS::S3::S3Object.find(remote_path, bucket.name)
remote_file_mtime = DateTime.strptime(remote_file.about["last-modified"], '%a, %d %b %Y %X %Z')
local_file_mtime = DateTime.parse(File.mtime(path).to_s)
if local_file_mtime > remote_file_mtime
puts "local file #{path} is newer - syncing"
do_sync(remote_path, path, bucket) if !options[:dry_run]
end
end
end
def sync_dir(dir, dest_dir, bucket, options)
Dir.foreach(dir) do|filename|
next if filename == '.' || filename == '..'
if dir == '.'
path = filename
else
path = dir + "/" + filename
end
puts "Checking path #{path}" if options[:verbose]
if File.directory?(path)
puts "file #{path} is a directory - entering directory" if options[:verbose]
sync_dir(path, dest_dir, bucket, options)
else
sync_file(path, dest_dir, bucket, options)
end
end
end
def do_sync(remote_path, path, bucket)
AWS::S3::S3Object.store(remote_path, open(path), bucket.name)
end
options = {}
optparse = OptionParser.new do |opts|
opts.banner = "Usage: sync.rb [options] source dest_dir"
options[:dry_run] = false
opts.on( '-n', '--dry-run', 'Dry run' ) do
options[:dry_run] = true
end
options[:verbose] = false
opts.on( '-v', '--verbose', 'Verbose' ) do
options[:verbose] = true
end
end
optparse.parse!
if ARGV.length != 2
puts "Usage: sync.rb [options] source dest_dir"
exit
end
source = ARGV[0]
dest_dir = ARGV[1]
puts "source: #{source}" if options[:verbose]
puts "dest_dir: #{dest_dir}" if options[:verbose]
puts "DRY RUN" if options[:dry_run]
AWS::S3::Base.establish_connection!(
:access_key_id => 'AKIAIVMC57UNL7U5O4WA',
:secret_access_key => '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'
)
bucket = AWS::S3::Bucket.find('kevinthorley.com')
if (File.directory? source)
sync_dir(source, dest_dir, bucket, options)
else
sync_file(source, dest_dir, bucket, options)
end
|
module Jekyll
class TagPage < Page
def initialize(site, base, dir, tag)
@site = site
@base = base
@dir = dir
@name = 'index.html'
self.process(name)
self.read_yaml(File.join(base, '_layouts'), 'tag.html')
self.data['title'] = "Tag:#{tag}"
self.data['posts'] = site.tags[tag]
self.data['title_detail'] = 'タグ「' + tag + '」' + 'がつけられた記事'
end
end
class TagPageGenerator < Generator
safe true
def generate(site)
site.tags.each_key do |tag|
site.pages << TagPage.new(site, site.source, File.join('tags', tag), tag)
end
end
end
class TagCloud < Liquid::Tag
def initialize(tag_name, text, tokens)
super
end
def render(context)
tag_array = Array.new(0)
site = context.registers[:site]
site.tags.each do |tag, tag_pages|
tag_array << {:title => tag, :count => tag_pages.count}
end
tag_array.sort_by!{|item| -item[:count]}
tagcloud = "<ul>"
tag_array.each do |tag|
tagcloud << "<li><a href='#{site.baseurl}/tags/#{tag[:title]}/index.html'>#{tag[:title]} (#{tag[:count]})</a></li>"
end
tagcloud << "</ul>"
"#{tagcloud}"
end
end
class TagCloudPage < Page
def initialize(site, base, dir)
@site = site
@base = base
@dir = dir
@name = 'index.html'
self.process(name)
self.read_yaml(File.join(base, '_layouts'), 'tag_list.html')
self.data['title'] = "タグ一覧"
self.data['posts'] = site.documents
end
end
class TagCloudPageGenerator < Generator
safe true
def generate(site)
site.pages << TagCloudPage.new(site, site.source, 'tag_list')
end
end
end
Liquid::Template.register_tag('tag_cloud', Jekyll::TagCloud)
encodingを追加。
# encoding: utf-8
#
module Jekyll
class TagPage < Page
def initialize(site, base, dir, tag)
@site = site
@base = base
@dir = dir
@name = 'index.html'
self.process(name)
self.read_yaml(File.join(base, '_layouts'), 'tag.html')
self.data['title'] = "Tag:#{tag}"
self.data['posts'] = site.tags[tag]
self.data['title_detail'] = 'タグ「' + tag + '」' + 'がつけられた記事'
end
end
class TagPageGenerator < Generator
safe true
def generate(site)
site.tags.each_key do |tag|
site.pages << TagPage.new(site, site.source, File.join('tags', tag), tag)
end
end
end
class TagCloud < Liquid::Tag
def initialize(tag_name, text, tokens)
super
end
def render(context)
tag_array = Array.new(0)
site = context.registers[:site]
site.tags.each do |tag, tag_pages|
tag_array << {:title => tag, :count => tag_pages.count}
end
tag_array.sort_by!{|item| -item[:count]}
tagcloud = "<ul>"
tag_array.each do |tag|
tagcloud << "<li><a href='#{site.baseurl}/tags/#{tag[:title]}/index.html'>#{tag[:title]} (#{tag[:count]})</a></li>"
end
tagcloud << "</ul>"
"#{tagcloud}"
end
end
class TagCloudPage < Page
def initialize(site, base, dir)
@site = site
@base = base
@dir = dir
@name = 'index.html'
self.process(name)
self.read_yaml(File.join(base, '_layouts'), 'tag_list.html')
self.data['title'] = "タグ一覧"
self.data['posts'] = site.documents
end
end
class TagCloudPageGenerator < Generator
safe true
def generate(site)
site.pages << TagCloudPage.new(site, site.source, 'tag_list')
end
end
end
Liquid::Template.register_tag('tag_cloud', Jekyll::TagCloud)
|
load 'packet.rb'
# ---Globals, cause they're cool---
#queue for the window
#technically it's an array because you can't
#access queue elements using array.[num]
$window = Array.new
#socket for sending/receiving
$socket = UDPSocket.new
# $sIn = UDPSocket.new
# #socket for sending
# $sOut = UDPSocket.new
$windowSize
$port
$networkIP
#IP address of the other client
$clientIP
#generate the initial window
def genWindow(initNum, windowSize, destIP)
i = 1
seqNum = initNum
while(i <= windowSize.to_i)
packet = makePacket(destIP, 1, seqNum, 0)
$window.push(packet)
seqNum += 1
i += 1
end
return seqNum + 1
end
#sends our entire current window
def tx1(socket, port, destIP, networkIP,currentSequenceNum, numPackets, windowSize)
i = 0
while i < $window.size
packet = $window[i]
sendPacket(socket, port, packet, networkIP)
end
packetsAcked = tx2(windowSize, destIP, currentSequenceNum)
return packetsAcked
end
def tx2(windowSize, destIP, currentSequenceNum)
#wait for acks in seq
i = 0
numLoop = windowSize
if($window.size < windowSize)
numLoop = $window.size
end
while i < numLoop
#we expect to receive the ACK for the seqNum at the front of the queue
expectedAck = $window[0].seqNum
recvd = getPacket($recv)
packet = recvd[0]
#if the packet is an ack and the ackNum is the ack we're expecting
if(packet.type == 0 && packet.ackNum == expectedAck)
lastSeqNum = window
$window.shift
newPacket = makePacket(destIP, 1, currentSequenceNum, 0)
puts "Pushing packet num #{currentSequenceNum} to the queue"
currentSequenceNum += 1
$windowSize.push(newPacket)
end
#if recv ack we expect, window.shift and push new packet to end
end
end
def transmit(socket, numPackets, windowSize, destIP, networkIP, port)
#numer of packets sent and successfully ack'd
packetsSent = 0
#used to generate new packets to put into window
#could be random if we want
initialSequenceNum = 0
currentSequenceNum = genWindow(initialSequenceNum, windowSize, destIP)
while ((packetsSent = tx1(socket, port, destIP, networkIP, currentSequenceNum, numPackets, windowSize)) < numPackets)
puts "ok"
end
#send eot
sendPacket(socket, port, makePacket(destIP, 2, 0, 0), networkIP)
end
#frame recv'd
# def rx1
# end
#check frame valid, send ACK
# def rx2
# end
#is this function necessary?
def receive(recvIP, networkIP, socket, port)
run = 1
while run == 1
#read a packet from the socket
#rx1
packet = getPacket($recv)
if packet.type = 2
run = 2
next
end
#rx2
#validate packet
sendPacket(socket, port, makePacket(recvIP, 0, 0, packet.seqNum), networkIP)
end
puts "EOT received, ending receive function"
end
def setup
puts "Setup, please configure the application accordingly."
puts "Enter the window size:"
$windowSize = gets.chomp.to_i
puts "Enter a port:"
$port = gets.chomp.to_i
# puts "Enter the outgoing port #:"
# $portOut = gets.chomp.to_i
# puts "Enter the incomming port #:"
# $portIn = gets.chomp.to_i
puts "Please enter network IP:"
$networkIP = gets.chomp
puts "Please enter the client IP:"
$clientIP = gets.chomp
$socket.bind('', $port)
$socket.connect($networkIP, $port)
end
#get a port isntead of defining it
# puts "Enter the port #:"
# port = gets.chomp
# client = UDPSocket.new
# puts "Enter the network IP:"
# networkIP = gets.chomp
# client.connect(networkIP, port)
# puts "Enter the window size:"
# winSize = gets.chomp
setup
#main script
run = 1
while(run == 1)
continue = 1
puts "Enter program state, 1 for SEND or 2 for RECEIVE:"
state = gets.chomp
if(state.to_i == 1)
# puts "Input the IP you would like to send to:"
# ip = gets.chomp
# $recv.bind(ip, port.to_i)
valid = 0
num = 0
while(valid == 0)
puts "Enter the number of packets you want to send"
num = gets.chomp
if(num < winSize)
next
end
valid = 1
end
transmit(socket, num, winSize, ip, networkIP, port)
# while(continue == 1)
# puts "Enter the # of packets you would like to send:"
# num = gets.chomp
# packet = makePacket(ip, 1, 1, 1)
# #pass the port here
# sendPacket(client, port, packet, networkIP)
# end
elsif(state.to_i == 2)
# $recv.bind('', port.to_i)
# puts "Input the IP you want to receive"
# recvIP = gets.chomp
# while(continue == 1)
# packet = getPacket($recv)
# puts packet.data
# end
receive(recvIP, networkIP, $recv, port)
else
next
end
end
puts "done"
more typos
load 'packet.rb'
# ---Globals, cause they're cool---
#queue for the window
#technically it's an array because you can't
#access queue elements using array.[num]
$window = Array.new
#socket for sending/receiving
$socket = UDPSocket.new
# $sIn = UDPSocket.new
# #socket for sending
# $sOut = UDPSocket.new
$windowSize
$port
$networkIP
#IP address of the other client
$clientIP
#generate the initial window
def genWindow(initNum, windowSize, destIP)
i = 1
seqNum = initNum
while(i <= windowSize.to_i)
packet = makePacket(destIP, 1, seqNum, 0)
$window.push(packet)
seqNum += 1
i += 1
end
return seqNum + 1
end
#sends our entire current window
def tx1(socket, port, destIP, networkIP,currentSequenceNum, numPackets, windowSize)
i = 0
while i < $window.size
packet = $window[i]
sendPacket(socket, port, packet, networkIP)
end
packetsAcked = tx2(windowSize, destIP, currentSequenceNum)
return packetsAcked
end
def tx2(windowSize, destIP, currentSequenceNum)
#wait for acks in seq
i = 0
numLoop = windowSize
if($window.size < windowSize)
numLoop = $window.size
end
while i < numLoop
#we expect to receive the ACK for the seqNum at the front of the queue
expectedAck = $window[0].seqNum
recvd = getPacket($recv)
packet = recvd[0]
#if the packet is an ack and the ackNum is the ack we're expecting
if(packet.type == 0 && packet.ackNum == expectedAck)
lastSeqNum = window
$window.shift
newPacket = makePacket(destIP, 1, currentSequenceNum, 0)
puts "Pushing packet num #{currentSequenceNum} to the queue"
currentSequenceNum += 1
$windowSize.push(newPacket)
end
#if recv ack we expect, window.shift and push new packet to end
end
end
def transmit(socket, numPackets, windowSize, destIP, networkIP, port)
#numer of packets sent and successfully ack'd
packetsSent = 0
#used to generate new packets to put into window
#could be random if we want
initialSequenceNum = 0
currentSequenceNum = genWindow(initialSequenceNum, windowSize, destIP)
while ((packetsSent = tx1(socket, port, destIP, networkIP, currentSequenceNum, numPackets, windowSize)) < numPackets)
puts "ok"
end
#send eot
sendPacket(socket, port, makePacket(destIP, 2, 0, 0), networkIP)
end
#frame recv'd
# def rx1
# end
#check frame valid, send ACK
# def rx2
# end
#is this function necessary?
def receive(recvIP, networkIP, socket, port)
run = 1
while run == 1
#read a packet from the socket
#rx1
packet = getPacket($recv)
if packet.type = 2
run = 2
next
end
#rx2
#validate packet
sendPacket(socket, port, makePacket(recvIP, 0, 0, packet.seqNum), networkIP)
end
puts "EOT received, ending receive function"
end
def setup
puts "Setup, please configure the application accordingly."
puts "Enter the window size:"
$windowSize = gets.chomp.to_i
puts "Enter a port:"
$port = gets.chomp.to_i
# puts "Enter the outgoing port #:"
# $portOut = gets.chomp.to_i
# puts "Enter the incomming port #:"
# $portIn = gets.chomp.to_i
puts "Please enter network IP:"
$networkIP = gets.chomp
puts "Please enter the client IP:"
$clientIP = gets.chomp
$socket.bind('', $port)
$socket.connect($networkIP, $port)
end
#get a port isntead of defining it
# puts "Enter the port #:"
# port = gets.chomp
# client = UDPSocket.new
# puts "Enter the network IP:"
# networkIP = gets.chomp
# client.connect(networkIP, port)
# puts "Enter the window size:"
# winSize = gets.chomp
setup
#main script
run = 1
while(run == 1)
continue = 1
puts "Enter program state, 1 for SEND or 2 for RECEIVE:"
state = gets.chomp
if(state.to_i == 1)
# puts "Input the IP you would like to send to:"
# ip = gets.chomp
# $recv.bind(ip, port.to_i)
valid = 0
num = 0
while(valid == 0)
puts "Enter the number of packets you want to send"
num = gets.chomp
if(num < winSize)
next
end
valid = 1
end
transmit(socket, num, winSize, ip, networkIP, port)
# while(continue == 1)
# puts "Enter the # of packets you would like to send:"
# num = gets.chomp
# packet = makePacket(ip, 1, 1, 1)
# #pass the port here
# sendPacket(client, port, packet, networkIP)
# end
elsif(state.to_i == 2)
# $recv.bind('', port.to_i)
# puts "Input the IP you want to receive"
# recvIP = gets.chomp
# while(continue == 1)
# packet = getPacket($recv)
# puts packet.data
# end
receive()
receive($clientIP, $networkIP, $socket, $port)
else
next
end
end
puts "done"
|
require 'rubygems'
require 'nokogiri'
require 'open-uri'
url = "http://www.yellowpages.ca/search/si/1/gym/Vancouver%2C%20BC"
url = "http://www.yellowpages.ca/search/si/1/gym/Vancouver%2C%20BC"
doc = Nokogiri::HTML(open(url))
puts doc.at_css("title").text
Scraping Test Done
require 'rubygems'
require 'nokogiri'
require 'open-uri'
url = "http://www.yellowpages.ca/search/si/1/gym/Vancouver%2C%20BC"
doc = Nokogiri::HTML(open(url))
puts doc.at_css("title").text |
require 'rubygems'
require 'lib/ur-product'
results = UR::Product.search({
:queries => 'Antarktis',
:filters => { :search_product_type => 'programtv' },
:page => 1,
:per_page => 1
})
if results.ok?
puts "Sökning: #{results.solr.params.inspect}\n\n"
results.products.map { |p| puts p.title }
puts "\nFöregående sida: #{results.previous_page}"
puts "Nästa sida: #{results.next_page}"
puts "\nAntal träffar: #{results.num_found}\n"
results.facets.each_pair do |name, items|
puts "\nFacett: #{name}"
items.each do |item|
puts " - #{item.value} => #{item.hits}"
end
end
p = results.products.first
puts "\n\nFörsta produkten: #{p.title} (#{p.ur_product_id})"
puts " -> Distribution: #{p.distribution_events.map { |e| e.platform }.join(', ')}" if p.has_distribution_events?
puts " -> Lagring: #{p.storages.map { |s| s.storage_format }.join(', ')}" if p.has_storages?
puts " -> Andra stora bilden: #{p.image_url(2, '_l')}" if p.has_image?(2, '_l')
end
Testing documents, url and related_product_ids
require 'rubygems'
require 'lib/ur-product'
results = UR::Product.search({
:queries => 'lärarhandledning',
:filters => { :search_product_type => 'packageseries' },
:page => 1,
:per_page => 1
})
if results.ok?
puts "Sökning: #{results.solr.params.inspect}\n\n"
results.products.map { |p| puts p.title }
puts "\nFöregående sida: #{results.previous_page}"
puts "Nästa sida: #{results.next_page}"
puts "\nAntal träffar: #{results.num_found}\n"
results.facets.each_pair do |name, items|
puts "\nFacett: #{name}"
items.each do |item|
puts " - #{item.value} => #{item.hits}"
end
end
p = results.products.first
puts "\n\nFörsta produkten: #{p.title} (#{p.ur_product_id})"
puts " -> Distribution: #{p.distribution_events.map { |e| e.platform }.join(', ')}" if p.has_distribution_events?
puts " -> Lagring: #{p.storages.map { |s| s.storage_format }.join(', ')}" if p.has_storages?
puts " -> Andra stora bilden: #{p.image_url(2, '_l')}" if p.has_image?(2, '_l')
p = UR::Product.find(106485)
if p.has_documents?
puts p.documents.first.storages.first.location
end
p = UR::Product.find(143664)
puts p.url
p = UR::Product.find(140502)
puts p.url
puts p.related_product_ids
end
|
test1 = true
test branches
test1 = true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.