repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/formatter.rb | lib/simplecov/formatter.rb | # frozen_string_literal: true
module SimpleCov
# TODO: Documentation on how to build your own formatters
module Formatter
end
end
require_relative "formatter/simple_formatter"
require_relative "formatter/multi_formatter"
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/defaults.rb | lib/simplecov/defaults.rb | # frozen_string_literal: true
# Load default formatter gem
require "pathname"
require_relative "default_formatter"
require_relative "profiles/root_filter"
require_relative "profiles/test_frameworks"
require_relative "profiles/bundler_filter"
require_relative "profiles/hidden_filter"
require_relative "profiles/rails"
# Default configuration
SimpleCov.configure do
formatter SimpleCov::Formatter::MultiFormatter.new(
SimpleCov::Formatter.from_env(ENV)
)
load_profile "bundler_filter"
load_profile "hidden_filter"
# Exclude files outside of SimpleCov.root
load_profile "root_filter"
end
# Gotta stash this a-s-a-p, see the CommandGuesser class and i.e. #110 for further info
SimpleCov::CommandGuesser.original_run_command = "#{$PROGRAM_NAME} #{ARGV.join(' ')}"
at_exit do
next if SimpleCov.external_at_exit?
SimpleCov.at_exit_behavior
end
# Autoload config from ~/.simplecov if present
require_relative "load_global_config"
# Autoload config from .simplecov if present
# Recurse upwards until we find .simplecov or reach the root directory
config_path = Pathname.new(SimpleCov.root)
loop do
filename = config_path.join(".simplecov")
if filename.exist?
begin
load filename
rescue LoadError, StandardError
warn "Warning: Error occurred while trying to load #{filename}. " \
"Error message: #{$!.message}"
end
break
end
config_path, = config_path.split
break if config_path.root?
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/load_global_config.rb | lib/simplecov/load_global_config.rb | # frozen_string_literal: true
require "etc"
home_dir = (ENV.fetch("HOME", nil) && File.expand_path("~")) || Etc.getpwuid.dir || (ENV.fetch("USER", nil) && File.expand_path("~#{ENV.fetch('USER', nil)}"))
if home_dir
global_config_path = File.join(home_dir, ".simplecov")
load global_config_path if File.exist?(global_config_path)
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/coverage_statistics.rb | lib/simplecov/coverage_statistics.rb | # frozen_string_literal: true
module SimpleCov
# Holds the individual data of a coverage result.
#
# This is uniform across coverage criteria as they all have:
#
# * total - how many things to cover there are (total relevant loc/branches)
# * covered - how many of the coverables are hit
# * missed - how many of the coverables are missed
# * percent - percentage as covered/missed
# * strength - average hits per/coverable (will not exist for one shot lines format)
class CoverageStatistics
attr_reader :total, :covered, :missed, :strength, :percent
def self.from(coverage_statistics)
sum_covered, sum_missed, sum_total_strength =
coverage_statistics.reduce([0, 0, 0.0]) do |(covered, missed, total_strength), file_coverage_statistics|
[
covered + file_coverage_statistics.covered,
missed + file_coverage_statistics.missed,
# gotta remultiply with loc because files have different strength and loc
# giving them a different "weight" in total
total_strength + (file_coverage_statistics.strength * file_coverage_statistics.total)
]
end
new(covered: sum_covered, missed: sum_missed, total_strength: sum_total_strength)
end
# Requires only covered, missed and strength to be initialized.
#
# Other values are computed by this class.
def initialize(covered:, missed:, total_strength: 0.0)
@covered = covered
@missed = missed
@total = covered + missed
@percent = compute_percent(covered, missed, total)
@strength = compute_strength(total_strength, total)
end
private
def compute_percent(covered, missed, total)
return 100.0 if missed.zero?
covered * 100.0 / total
end
def compute_strength(total_strength, total)
return 0.0 if total.zero?
total_strength.to_f / total
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/lines_classifier.rb | lib/simplecov/lines_classifier.rb | # frozen_string_literal: true
module SimpleCov
# Classifies whether lines are relevant for code coverage analysis.
# Comments & whitespace lines, and :nocov: token blocks, are considered not relevant.
class LinesClassifier
RELEVANT = 0
NOT_RELEVANT = nil
WHITESPACE_LINE = /^\s*$/.freeze
COMMENT_LINE = /^\s*#/.freeze
WHITESPACE_OR_COMMENT_LINE = Regexp.union(WHITESPACE_LINE, COMMENT_LINE)
def self.no_cov_line
/^(\s*)#(\s*)(:#{SimpleCov.nocov_token}:)/o
end
def self.no_cov_line?(line)
no_cov_line.match?(line)
rescue ArgumentError
# E.g., line contains an invalid byte sequence in UTF-8
false
end
def self.whitespace_line?(line)
WHITESPACE_OR_COMMENT_LINE.match?(line)
rescue ArgumentError
# E.g., line contains an invalid byte sequence in UTF-8
false
end
def classify(lines)
skipping = false
lines.map do |line|
if self.class.no_cov_line?(line)
skipping = !skipping
NOT_RELEVANT
elsif skipping || self.class.whitespace_line?(line)
NOT_RELEVANT
else
RELEVANT
end
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/exit_codes/minimum_coverage_by_file_check.rb | lib/simplecov/exit_codes/minimum_coverage_by_file_check.rb | # frozen_string_literal: true
module SimpleCov
module ExitCodes
class MinimumCoverageByFileCheck
def initialize(result, minimum_coverage_by_file)
@result = result
@minimum_coverage_by_file = minimum_coverage_by_file
end
def failing?
minimum_violations.any?
end
def report
minimum_violations.each do |violation|
$stderr.printf(
"%<criterion>s coverage by file (%<covered>.2f%%) is below the expected minimum coverage (%<minimum_coverage>.2f%%).\n",
covered: SimpleCov.round_coverage(violation.fetch(:actual)),
minimum_coverage: violation.fetch(:minimum_expected),
criterion: violation.fetch(:criterion).capitalize
)
end
end
def exit_code
SimpleCov::ExitCodes::MINIMUM_COVERAGE
end
private
attr_reader :result, :minimum_coverage_by_file
def minimum_violations
@minimum_violations ||=
compute_minimum_coverage_data.select do |achieved|
achieved.fetch(:actual) < achieved.fetch(:minimum_expected)
end
end
def compute_minimum_coverage_data
minimum_coverage_by_file.flat_map do |criterion, expected_percent|
result.coverage_statistics_by_file.fetch(criterion).map do |actual_coverage|
{
criterion: criterion,
minimum_expected: expected_percent,
actual: SimpleCov.round_coverage(actual_coverage.percent)
}
end
end
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/exit_codes/exit_code_handling.rb | lib/simplecov/exit_codes/exit_code_handling.rb | # frozen_string_literal: true
module SimpleCov
module ExitCodes
module ExitCodeHandling
module_function
def call(result, coverage_limits:)
checks = coverage_checks(result, coverage_limits)
failing_check = checks.find(&:failing?)
if failing_check
failing_check.report
failing_check.exit_code
else
SimpleCov::ExitCodes::SUCCESS
end
end
def coverage_checks(result, coverage_limits)
[
MinimumOverallCoverageCheck.new(result, coverage_limits.minimum_coverage),
MinimumCoverageByFileCheck.new(result, coverage_limits.minimum_coverage_by_file),
MaximumCoverageDropCheck.new(result, coverage_limits.maximum_coverage_drop)
]
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/exit_codes/minimum_overall_coverage_check.rb | lib/simplecov/exit_codes/minimum_overall_coverage_check.rb | # frozen_string_literal: true
module SimpleCov
module ExitCodes
class MinimumOverallCoverageCheck
def initialize(result, minimum_coverage)
@result = result
@minimum_coverage = minimum_coverage
end
def failing?
minimum_violations.any?
end
def report
minimum_violations.each do |violation|
$stderr.printf(
"%<criterion>s coverage (%<covered>.2f%%) is below the expected minimum coverage (%<minimum_coverage>.2f%%).\n",
covered: SimpleCov.round_coverage(violation.fetch(:actual)),
minimum_coverage: violation.fetch(:minimum_expected),
criterion: violation.fetch(:criterion).capitalize
)
end
end
def exit_code
SimpleCov::ExitCodes::MINIMUM_COVERAGE
end
private
attr_reader :result, :minimum_coverage
def minimum_violations
@minimum_violations ||= calculate_minimum_violations
end
def calculate_minimum_violations
coverage_achieved = minimum_coverage.map do |criterion, percent|
{
criterion: criterion,
minimum_expected: percent,
actual: result.coverage_statistics.fetch(criterion).percent
}
end
coverage_achieved.select do |achieved|
achieved.fetch(:actual) < achieved.fetch(:minimum_expected)
end
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/exit_codes/maximum_coverage_drop_check.rb | lib/simplecov/exit_codes/maximum_coverage_drop_check.rb | # frozen_string_literal: true
module SimpleCov
module ExitCodes
class MaximumCoverageDropCheck
def initialize(result, maximum_coverage_drop)
@result = result
@maximum_coverage_drop = maximum_coverage_drop
end
def failing?
return false unless maximum_coverage_drop && last_run
coverage_drop_violations.any?
end
def report
coverage_drop_violations.each do |violation|
$stderr.printf(
"%<criterion>s coverage has dropped by %<drop_percent>.2f%% since the last time (maximum allowed: %<max_drop>.2f%%).\n",
criterion: violation[:criterion].capitalize,
drop_percent: SimpleCov.round_coverage(violation[:drop_percent]),
max_drop: violation[:max_drop]
)
end
end
def exit_code
SimpleCov::ExitCodes::MAXIMUM_COVERAGE_DROP
end
private
attr_reader :result, :maximum_coverage_drop
def last_run
return @last_run if defined?(@last_run)
@last_run = SimpleCov::LastRun.read
end
def coverage_drop_violations
@coverage_drop_violations ||=
compute_coverage_drop_data.select do |achieved|
achieved.fetch(:max_drop) < achieved.fetch(:drop_percent)
end
end
def compute_coverage_drop_data
maximum_coverage_drop.map do |criterion, percent|
{
criterion: criterion,
max_drop: percent,
drop_percent: drop_percent(criterion)
}
end
end
# if anyone says "max_coverage_drop 0.000000000000000001" I appologize. Please don't.
MAX_DROP_ACCURACY = 10
def drop_percent(criterion)
drop = last_coverage(criterion) -
SimpleCov.round_coverage(
result.coverage_statistics.fetch(criterion).percent
)
# floats, I tell ya.
# irb(main):001:0* 80.01 - 80.0
# => 0.010000000000005116
drop.floor(MAX_DROP_ACCURACY)
end
def last_coverage(criterion)
last_coverage_percent = last_run[:result][criterion]
# fallback for old file format
last_coverage_percent = last_run[:result][:covered_percent] if !last_coverage_percent && criterion == :line
last_coverage_percent || 0
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/combine/lines_combiner.rb | lib/simplecov/combine/lines_combiner.rb | # frozen_string_literal: true
module SimpleCov
module Combine
#
# Combine two different lines coverage results on same file
#
# Should be called through `SimpleCov.combine`.
module LinesCombiner
module_function
def combine(coverage_a, coverage_b)
acc = coverage_a.size > coverage_b.size ? coverage_a : coverage_b
acc.size.times do |index|
acc[index] = merge_line_coverage(coverage_a[index], coverage_b[index])
end
acc
end
# Return depends on coverage in a specific line
#
# @param [Integer || nil] first_val
# @param [Integer || nil] second_val
#
# Logic:
#
# => nil + 0 = nil
# => nil + nil = nil
# => int + int = int
#
# @return [Integer || nil]
def merge_line_coverage(first_val, second_val)
sum = first_val.to_i + second_val.to_i
if sum.zero? && (first_val.nil? || second_val.nil?)
nil
else
sum
end
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/combine/files_combiner.rb | lib/simplecov/combine/files_combiner.rb | # frozen_string_literal: true
module SimpleCov
module Combine
#
# Handle combining two coverage results for same file
#
# Should be called through `SimpleCov.combine`.
module FilesCombiner
module_function
#
# Combines the results for 2 coverages of a file.
#
# @return [Hash]
#
def combine(coverage_a, coverage_b)
combination = {"lines" => Combine.combine(LinesCombiner, coverage_a["lines"], coverage_b["lines"])}
combination["branches"] = Combine.combine(BranchesCombiner, coverage_a["branches"], coverage_b["branches"]) || {} if SimpleCov.branch_coverage?
combination
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/combine/branches_combiner.rb | lib/simplecov/combine/branches_combiner.rb | # frozen_string_literal: true
module SimpleCov
module Combine
#
# Combine different branch coverage results on single file.
#
# Should be called through `SimpleCov.combine`.
module BranchesCombiner
module_function
#
# Return merged branches or the existed branch if other is missing.
#
# Branches inside files are always same if they exist, the difference only in coverage count.
# Branch coverage report for any conditional case is built from hash, it's key is a condition and
# it's body is a hash << keys from condition and value is coverage rate >>.
# ex: branches =>{ [:if, 3, 8, 6, 8, 36] => {[:then, 4, 8, 6, 8, 12] => 1, [:else, 5, 8, 6, 8, 36]=>2}, other conditions...}
# We create copy of result and update it values depending on the combined branches coverage values.
#
# @return [Hash]
#
def combine(coverage_a, coverage_b)
coverage_a.merge(coverage_b) do |_condition, branches_inside_a, branches_inside_b|
branches_inside_a.merge(branches_inside_b) do |_branch, a_count, b_count|
a_count + b_count
end
end
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/combine/results_combiner.rb | lib/simplecov/combine/results_combiner.rb | # frozen_string_literal: true
module SimpleCov
module Combine
# There might be reports from different kinds of tests,
# e.g. RSpec and Cucumber. We need to combine their results
# into unified one. This class does that.
# To unite the results on file basis, it leverages
# the combine of lines and branches inside each file within given results.
module ResultsCombiner
module_function
#
# Combine process explanation
# => ResultCombiner: define all present files between results and start combine on file level.
# ==> FileCombiner: collect result of next combine levels lines and branches.
# ===> LinesCombiner: combine lines results.
# ===> BranchesCombiner: combine branches results.
#
# @return [Hash]
#
def combine(*results)
results.reduce({}) do |combined_results, next_result|
combine_result_sets(combined_results, next_result)
end
end
#
# Manage combining results on files level
#
# @param [Hash] combined_results
# @param [Hash] result
#
# @return [Hash]
#
def combine_result_sets(combined_results, result)
results_files = combined_results.keys | result.keys
results_files.each_with_object({}) do |file_name, file_combination|
file_combination[file_name] = combine_file_coverage(
combined_results[file_name],
result[file_name]
)
end
end
#
# Combine two files coverage results
#
# @param [Hash] coverage_a
# @param [Hash] coverage_b
#
# @return [Hash]
#
def combine_file_coverage(coverage_a, coverage_b)
Combine.combine(Combine::FilesCombiner, coverage_a, coverage_b)
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/formatter/simple_formatter.rb | lib/simplecov/formatter/simple_formatter.rb | # frozen_string_literal: true
module SimpleCov
module Formatter
#
# A ridiculously simple formatter for SimpleCov results.
#
class SimpleFormatter
# Takes a SimpleCov::Result and generates a string out of it
def format(result)
output = +""
result.groups.each do |name, files|
output << "Group: #{name}\n"
output << ("=" * 40)
output << "\n"
files.each do |file|
output << "#{file.filename} (coverage: #{file.covered_percent.floor(2)}%)\n"
end
output << "\n"
end
output
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/formatter/multi_formatter.rb | lib/simplecov/formatter/multi_formatter.rb | # frozen_string_literal: true
module SimpleCov
module Formatter
class MultiFormatter
module InstanceMethods
def format(result)
formatters.map do |formatter|
formatter.new.format(result)
rescue StandardError => e
warn("Formatter #{formatter} failed with #{e.class}: #{e.message} (#{e.backtrace.first})")
nil
end
end
end
def self.new(formatters = nil)
Class.new do
define_method :formatters do
@formatters ||= Array(formatters)
end
include InstanceMethods
end
end
def self.[](*args)
warn "#{Kernel.caller.first}: [DEPRECATION] ::[] is deprecated. Use ::new instead."
new(Array(args))
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/profiles/rails.rb | lib/simplecov/profiles/rails.rb | # frozen_string_literal: true
SimpleCov.profiles.define "rails" do
load_profile "test_frameworks"
add_filter %r{^/config/}
add_filter %r{^/db/}
add_group "Controllers", "app/controllers"
add_group "Channels", "app/channels"
add_group "Models", "app/models"
add_group "Mailers", "app/mailers"
add_group "Helpers", "app/helpers"
add_group "Jobs", %w[app/jobs app/workers]
add_group "Libraries", "lib/"
track_files "{app,lib}/**/*.rb"
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/profiles/hidden_filter.rb | lib/simplecov/profiles/hidden_filter.rb | # frozen_string_literal: true
SimpleCov.profiles.define "hidden_filter" do
add_filter %r{^/\..*}
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/profiles/bundler_filter.rb | lib/simplecov/profiles/bundler_filter.rb | # frozen_string_literal: true
SimpleCov.profiles.define "bundler_filter" do
add_filter "/vendor/bundle/"
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/profiles/root_filter.rb | lib/simplecov/profiles/root_filter.rb | # frozen_string_literal: true
SimpleCov.profiles.define "root_filter" do
# Exclude all files outside of simplecov root
root_filter = nil
add_filter do |src|
root_filter ||= /\A#{Regexp.escape(SimpleCov.root + File::SEPARATOR)}/io
src.filename !~ root_filter
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/profiles/test_frameworks.rb | lib/simplecov/profiles/test_frameworks.rb | # frozen_string_literal: true
SimpleCov.profiles.define "test_frameworks" do
add_filter "/test/"
add_filter "/features/"
add_filter "/spec/"
add_filter "/autotest/"
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/source_file/line.rb | lib/simplecov/source_file/line.rb | # frozen_string_literal: true
module SimpleCov
class SourceFile
# Representation of a single line in a source file including
# this specific line's source code, line_number and code coverage,
# with the coverage being either nil (coverage not applicable, e.g. comment
# line), 0 (line not covered) or >1 (the amount of times the line was
# executed)
class Line
# The source code for this line. Aliased as :source
attr_reader :src
# The line number in the source file. Aliased as :line, :number
attr_reader :line_number
# The coverage data for this line: either nil (never), 0 (missed) or >=1 (times covered)
attr_reader :coverage
# Whether this line was skipped
attr_reader :skipped
# Lets grab some fancy aliases, shall we?
alias source src
alias line line_number
alias number line_number
def initialize(src, line_number, coverage)
raise ArgumentError, "Only String accepted for source" unless src.is_a?(String)
raise ArgumentError, "Only Integer accepted for line_number" unless line_number.is_a?(Integer)
raise ArgumentError, "Only Integer and nil accepted for coverage" unless coverage.is_a?(Integer) || coverage.nil?
@src = src
@line_number = line_number
@coverage = coverage
@skipped = false
end
# Returns true if this is a line that should have been covered, but was not
def missed?
!never? && !skipped? && coverage.zero?
end
# Returns true if this is a line that has been covered
def covered?
!never? && !skipped? && coverage.positive?
end
# Returns true if this line is not relevant for coverage
def never?
!skipped? && coverage.nil?
end
# Flags this line as skipped
def skipped!
@skipped = true
end
# Returns true if this line was skipped, false otherwise. Lines are skipped if they are wrapped with
# # :nocov: comment lines.
def skipped?
skipped
end
# The status of this line - either covered, missed, skipped or never. Useful i.e. for direct use
# as a css class in report generation
def status
return "skipped" if skipped?
return "never" if never?
return "missed" if missed?
"covered" if covered?
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
simplecov-ruby/simplecov | https://github.com/simplecov-ruby/simplecov/blob/afcf15e42fde20abbdb6fe9591a3e5d98acc088e/lib/simplecov/source_file/branch.rb | lib/simplecov/source_file/branch.rb | # frozen_string_literal: true
module SimpleCov
class SourceFile
#
# Representing single branch that has been detected in coverage report.
# Give us support methods that handle needed calculations.
class Branch
attr_reader :start_line, :end_line, :coverage, :type
# rubocop:disable Metrics/ParameterLists
def initialize(start_line:, end_line:, coverage:, inline:, type:)
@start_line = start_line
@end_line = end_line
@coverage = coverage
@inline = inline
@type = type
@skipped = false
end
# rubocop:enable Metrics/ParameterLists
def inline?
@inline
end
#
# Return true if there is relevant count defined > 0
#
# @return [Boolean]
#
def covered?
!skipped? && coverage.positive?
end
#
# Check if branch missed or not
#
# @return [Boolean]
#
def missed?
!skipped? && coverage.zero?
end
# The line on which we want to report the coverage
#
# Usually we choose the line above the start of the branch (so that it shows up
# at if/else) because that
# * highlights the condition
# * makes it distinguishable if the first line of the branch is an inline branch
# (see the nested_branches fixture)
#
def report_line
if inline?
start_line
else
start_line - 1
end
end
# Flags the branch as skipped
def skipped!
@skipped = true
end
# Returns true if the branch was marked skipped by virtue of nocov comments.
def skipped?
@skipped
end
def overlaps_with?(line_range)
start_line <= line_range.end && end_line >= line_range.begin
end
#
# Return array with coverage count and badge
#
# @return [Array]
#
def report
[type, coverage]
end
end
end
end
| ruby | MIT | afcf15e42fde20abbdb6fe9591a3e5d98acc088e | 2026-01-04T15:45:13.013465Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/certs_manager.rb | fs_overlay/opt/certs_manager/certs_manager.rb | Dir[File.dirname(__FILE__) + '/lib/*.rb'].each { |file| require file }
require_relative 'models/domain'
require 'fileutils'
class CertsManager
include Commands
attr_accessor :lock
def setup
setup_config(true)
end
def reconfig
setup_config(false)
end
def setup_config(initial)
with_lock do
ensure_dockerhost_in_hosts
ensure_crontab
NAConfig.domains.each do |domain|
if NAConfig.debug_mode?
domain.print_debug_info
end
domain.ensure_welcome_page
end
ensure_dummy_certificate
OpenSSL.ensure_dhparam
OpenSSL.ensure_account_key
generate_ht_access(NAConfig.domains)
ensure_keys_and_certs_exist(NAConfig.domains)
config_domains(NAConfig.domains)
Nginx.setup
if initial
Nginx.start
else
Nginx.reload
end
ensure_signed(NAConfig.domains_w_unique_names, true)
if initial
Nginx.stop
else
Nginx.reload
end
end
if initial
sleep 1 # Give Nginx some time to shutdown
end
end
def renew
puts "Renewing ..."
NAConfig.domains.each(&:print_debug_info) if NAConfig.debug_mode?
with_lock do
NAConfig.domains_w_unique_names.each do |domain|
if NAConfig.debug_mode?
domain.print_debug_info
end
if OpenSSL.need_to_sign_or_renew? domain
ACME.sign(domain) # CSR is preserved
chain_certs(domain)
Nginx.reload
puts "Renewed certs for #{domain.name}"
else
puts "Renewal skipped for #{domain.name}, it expires at #{OpenSSL.expires_in_days(domain.signed_cert_path)} days from now."
end
end
end
puts "Renewal done."
end
private
def config_domains(domains)
Dir['/etc/nginx/conf.d/*.conf'].each { |file| File.delete file }
domains.each do |domain|
Nginx.config_domain(domain)
end
end
def ensure_keys_and_certs_exist(domains)
# Just to make sure there is some sort of certificate existing,
# whether being dummy or real,
# so Nginx can start
dummy_cert_path = File.join(NAConfig.portal_base_dir, "default_server/default_server.crt")
dummy_key_path = File.join(NAConfig.portal_base_dir, "default_server/default_server.key")
domains.each do |domain|
mkdir(domain)
if NAConfig.force_renew? || !OpenSSL.key_and_cert_exist?(domain)
Logger.debug "copying dummy key and cert for #{domain.name}"
FileUtils.cp(dummy_key_path, domain.key_path)
FileUtils.cp(dummy_cert_path, domain.signed_cert_path)
chain_certs(domain)
end
end
end
def ensure_signed(domains_w_unique_names, exit_on_failure = false)
Logger.debug ("ensure_signed")
domains_w_unique_names.each do |domain|
if OpenSSL.need_to_sign_or_renew? domain
mkdir(domain)
OpenSSL.create_ongoing_domain_key(domain)
OpenSSL.create_csr(domain)
if ACME.sign(domain)
chain_certs(domain)
Nginx.reload || fail_and_shutdown
puts "Signed certificate for #{domain.name}"
else
puts("Failed to obtain certs for #{domain.name}")
fail_and_shutdown if exit_on_failure
end
else
puts "Signing skipped for #{domain.name}, it expires at #{OpenSSL.expires_in_days(domain.signed_cert_path)} days from now."
end
end
end
def with_lock(&block)
File.open('/tmp/https-portal.lock', File::CREAT) do |lock|
lock.flock File::LOCK_EX
yield(block)
end
end
def ensure_crontab
crontab = '/etc/crontab'
unless File.exist?(crontab)
File.open(crontab, 'w') do |file|
file.write compiled_crontab
end
end
end
def compiled_crontab
ERBBinding.new('/var/lib/crontab.erb', {}).compile
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/models/domain.rb | fs_overlay/opt/certs_manager/models/domain.rb | require 'fileutils'
class Domain
STAGES = %w(production staging local).freeze
attr_reader :descriptor
def initialize(descriptor)
@descriptor = descriptor
create_dir
end
def csr_path
File.join(dir, 'domain.csr')
end
def signed_cert_path
File.join(dir, 'signed.crt')
end
# For backward compatibility
def chained_cert_path
File.join(dir, 'chained.crt')
end
def ongoing_cert_path
File.join(dir, 'signed.ongoing.crt')
end
def ongoing_key_path
File.join(dir, 'domain.ongoing.key')
end
def key_path
File.join(dir, 'domain.key')
end
def htaccess_path
File.join(dir, 'htaccess')
end
def dir
File.join(NAConfig.portal_base_dir, name, stage)
end
def www_root
File.join("/var/www/vhosts/", name)
end
def ensure_welcome_page
return if upstreams.length > 0 || redirect_target_url
index_html = File.join(www_root, 'index.html')
unless File.exist?(index_html)
FileUtils.mkdir_p www_root
File.open(index_html, 'w') do |file|
file.write compiled_welcome_page
end
end
end
def ca
case stage
when 'production'
'https://acme-v02.api.letsencrypt.org/directory'
when 'local'
nil
when 'staging'
'https://acme-staging-v02.api.letsencrypt.org/directory'
end
end
def name
parsed_descriptor[:domain]
end
def port
parsed_descriptor[:port] || '443'
end
def env_format_name
name.upcase.tr('^A-Z0-9', '_')
end
def upstream_backend_name
"backend_" + parsed_descriptor[:domain]
end
def upstream_proto
mode = parsed_descriptor[:mode]
return unless ["->", "=>"].include? mode
default = mode == "->" ? "http://" : "https://"
parsed_descriptor[:upstream_proto] || default
end
def upstreams
upstreams = parsed_descriptor[:upstreams].to_s.split("|").delete_if { |v| v.empty? }
upstreams.map do |v|
match = v.match(/^(?<address>[^\[]+)(?:\[(?<parameters>.*)\])?$/)
raise "Invalid upstream: #{v}" unless match
match.named_captures.transform_keys(&:to_sym)
end
end
def multiple_upstreams?
upstreams.length > 1
end
def upstream
# For backward compatibility it is important to return nil for static site and redirect mode
return unless parsed_descriptor[:mode] == '->'
upstream = upstreams.first
return if upstream.nil?
return upstream_proto + upstream[:address]
end
def redirect_target_url
return unless parsed_descriptor[:mode] == '=>'
upstream = upstreams.first
return if upstream.nil?
raise "Parameters not supported on redirect-target" unless upstream[:parameters].nil?
upstream_proto + upstream[:address]
end
def stage
val = parsed_descriptor[:stage].to_s.empty? ? NAConfig.stage : parsed_descriptor[:stage]
if STAGES.include?(val)
val
else
STDERR.puts "Error: Invalid stage #{val}"
nil
end
end
def basic_auth_username
parsed_descriptor[:user]
end
def basic_auth_password
parsed_descriptor[:pass]
end
def basic_auth_enabled?
basic_auth_username && basic_auth_password
end
def access_restriction
if defined? @access_restriction
@access_restriction
else
if parsed_descriptor[:ips].nil?
@access_restriction = nil
else
@access_restriction = parsed_descriptor[:ips].split(' ')
end
end
end
def print_debug_info
puts "----------- BEGIN DOMAIN CONFIG -------------"
puts "name: #{name}"
puts "port: #{port}"
puts "stage: #{stage}"
puts "upstream: #{upstream}"
puts "upstreams: #{upstreams.inspect}"
puts "upstream_proto: #{upstream_proto}"
puts "redirect_target_url: #{redirect_target_url}"
puts "basic_auth_username: #{basic_auth_username}"
puts "basic_auth_password: #{basic_auth_password}"
puts "access_restriction: #{access_restriction}"
puts "-------- --- END DOMAIN CONFIG -------------"
end
private
def create_dir
FileUtils.mkdir_p dir
end
def parsed_descriptor
if defined? @parsed_descriptor
@parsed_descriptor
else
regex = %r{
^
(?:\[(?<ips>[0-9.:\/, ]*)\]\s*)?
(?:(?<user>[^:@\[\]]+)(?::(?<pass>[^@]*))?@)?(?<domain>[a-z0-9._\-]+?)(?:\:(?<port>\d+))?
(?:
\s*(?<mode>[-=]>)\s*
(?<upstream_proto>https?:\/\/)?
(?<upstreams>[a-z0-9.:\/_|\[= \]\-]+?)
)?
(:?\s+\#(?<stage>[a-z]*))?
$
}xi
match = descriptor.strip.match(regex)
if match.nil?
STDERR.puts "Error: Invalid descriptor #{descriptor}"
@parsed_descriptor = nil
else
match = match.named_captures.transform_keys(&:to_sym)
@parsed_descriptor = match
end
end
end
def compiled_welcome_page
binding_hash = {
domain: self,
NAConfig: NAConfig
}
ERBBinding.new('/var/www/default/index.html.erb', binding_hash).compile
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/lib/logger.rb | fs_overlay/opt/certs_manager/lib/logger.rb | module Logger
def self.debug(str)
if NAConfig.debug_mode?
puts "[DEBUG] #{str}"
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/lib/erb_binding.rb | fs_overlay/opt/certs_manager/lib/erb_binding.rb | require 'erb'
class ERBBinding
class CleanBinding
def initialize(hash)
hash.each do |key, value|
singleton_class.send(:define_method, key) { value }
end
end
def get
binding
end
end
def initialize(template_path, binding_hash)
@template = File.read(template_path)
@binding_hash = binding_hash
end
def compile
clean_binding = CleanBinding.new(@binding_hash)
ERB.new(@template).result(clean_binding.get)
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/lib/open_ssl.rb | fs_overlay/opt/certs_manager/lib/open_ssl.rb | require 'date'
module OpenSSL
def self.ensure_account_key
path = "#{NAConfig.portal_base_dir}/account.key"
unless File.exist?(path) && system("openssl rsa --in #{path} --noout --check")
system "openssl genrsa 4096 > #{path}"
end
end
def self.create_ongoing_domain_key(domain)
algo = NAConfig.certificate_algorithm
Logger.debug "create_ongoing_domain_key #{algo} for #{domain.name}"
if algo == "rsa"
system "openssl genrsa #{NAConfig.key_length} > #{domain.ongoing_key_path}"
else
system "openssl ecparam -genkey -name #{algo} -noout -out #{domain.ongoing_key_path}"
end
end
def self.create_csr(domain)
Logger.debug "create_csr for #{domain.name}"
system "openssl req -new -sha256 -key #{domain.ongoing_key_path} -subj '/CN=#{domain.name}' > #{domain.csr_path}"
end
def self.key_and_cert_exist?(domain)
File.exist?(domain.key_path) && File.exist?(domain.signed_cert_path)
end
def self.need_to_sign_or_renew?(domain)
return true if NAConfig.force_renew?
skip_conditions = File.exist?(domain.key_path) &&
File.exist?(domain.signed_cert_path) &&
!dummy?(domain.signed_cert_path) &&
expires_in_days(domain.signed_cert_path) > NAConfig.renew_margin_days
!skip_conditions
end
def self.expires_in_days(pem)
(expires_at(pem) - Date.today).to_i
end
def self.ensure_dhparam
unless dhparam_valid?(NAConfig.dhparam_path)
system "mkdir -p #{File.dirname(NAConfig.dhparam_path)} && openssl dhparam -out #{NAConfig.dhparam_path} 2048"
end
end
def self.self_sign(domain)
puts "Self-signing test certificate for #{domain.name}"
command = <<-EOC
openssl req -x509 \
-in #{domain.csr_path} \
-key #{domain.ongoing_key_path} \
-out #{domain.ongoing_cert_path} \
-days 90 \
-batch \
-addext "extendedKeyUsage = serverAuth" \
-addext "subjectAltName = DNS:#{domain.name}"
EOC
(system command) && ACME.rename_ongoing_cert_and_key(domain)
end
def self.generate_dummy_certificate(dir, out_path, keyout_path)
puts "Generating dummy certificate for default fallback server"
command = <<-EOC
mkdir -p #{dir} && \
openssl req -x509 -newkey \
rsa:#{NAConfig.key_length} -nodes \
-out #{out_path} \
-keyout #{keyout_path} \
-days 36500 \
-batch \
-subj "/CN=default-server.example.com"
EOC
system command
end
private
def self.dummy?(pem)
issuer = `openssl x509 -issuer -noout -in #{pem}`
issuer.include? "default-server.example.com"
end
def self.expires_at(pem)
date_str = `openssl x509 -enddate -noout -in #{pem}`.sub('notAfter=', '')
Date.parse date_str
end
def self.dhparam_valid?(path)
File.exist?(path) && system("openssl dhparam -check < #{path}")
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/lib/commands.rb | fs_overlay/opt/certs_manager/lib/commands.rb | require 'open-uri'
require 'fileutils'
module Commands
def chain_certs(domain)
# Keeping it for backward compatibility
unless File.exist?(domain.chained_cert_path)
FileUtils.ln_s(domain.signed_cert_path, domain.chained_cert_path)
end
end
def mkdir(domain)
system "mkdir -p #{domain.dir}"
end
def ensure_dockerhost_in_hosts
unless File.foreach("/etc/hosts").grep(/dockerhost/).any?
docker_host_ip = `/sbin/ip route|awk '/default/ { print $3 }'`.strip
File.open('/etc/hosts', 'a') do |f|
f.puts "#{docker_host_ip}\tdockerhost"
end
end
end
def generate_ht_access(domains)
domains.each do |domain|
if domain.basic_auth_enabled?
system "htpasswd -bc '#{domain.htaccess_path}' '#{domain.basic_auth_username}' '#{domain.basic_auth_password}'"
end
end
end
def ensure_dummy_certificate
base_dir = File.join(NAConfig.portal_base_dir, "default_server")
cert_path = File.join(NAConfig.portal_base_dir, "default_server/default_server.crt")
key_path = File.join(NAConfig.portal_base_dir, "default_server/default_server.key")
unless File.exist?(cert_path) && File.exist?(key_path)
OpenSSL.generate_dummy_certificate(
base_dir,
cert_path,
key_path
)
end
end
def fail_and_shutdown
Logger.debug ("Fail and Shutdown")
Nginx.stop
exit(1)
end
module_function :fail_and_shutdown
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/lib/acme.rb | fs_overlay/opt/certs_manager/lib/acme.rb | require 'timeout'
require 'fileutils'
module ACME
class FailedToSignException < RuntimeError; end
def self.sign(domain)
if domain.stage == 'local'
OpenSSL.self_sign(domain)
else
le_sign(domain)
end
rescue FailedToSignException, Timeout::Error => e
false
end
private
def self.le_sign(domain)
Timeout.timeout(120) do
puts "Signing certificates from #{domain.ca} ..."
command = <<-EOC
acme_tiny \
--account-key #{NAConfig.portal_base_dir}/account.key \
--csr #{domain.csr_path} \
--acme-dir /var/www/default/challenges/ \
--disable-check \
--directory-url #{domain.ca} > #{domain.ongoing_cert_path}
EOC
raise FailedToSignException unless system(command)
rename_ongoing_cert_and_key(domain)
end
rescue Exception => e
puts <<-HERE
================================================================================
Failed to sign #{domain.name}.
Make sure your DNS is configured correctly and is propagated to this host
machine. Sometimes that takes a while.
================================================================================
HERE
raise e
end
def self.rename_ongoing_cert_and_key(domain)
FileUtils.mv(domain.ongoing_cert_path, domain.signed_cert_path, force: true)
FileUtils.mv(domain.ongoing_key_path, domain.key_path, force: true)
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/lib/na_config.rb | fs_overlay/opt/certs_manager/lib/na_config.rb | module NAConfig
def self.portal_base_dir
"/var/lib/https-portal"
end
def self.domains
(env_domains + auto_discovered_domains).uniq {|d| [d.name, d.port] }
end
def self.domains_w_unique_names
(env_domains + auto_discovered_domains).uniq(&:name)
end
def self.stage
if ENV['STAGE']
ENV['STAGE']
else # legacy
if production_key?
'production'
else
'staging'
end
end
end
def self.production_key?
ENV['PRODUCTION'] && ENV['PRODUCTION'].casecmp('true').zero?
end
def self.force_renew?
ENV['FORCE_RENEW'] && ENV['FORCE_RENEW'].casecmp('true').zero?
end
def self.dhparam_path
"#{NAConfig.portal_base_dir}/dhparam.pem"
end
def self.env_domains
if ENV['DOMAINS']
parse ENV['DOMAINS']
else
[]
end
end
def self.auto_discovered_domains
if File.exist? '/var/run/domains'
parse File.read('/var/run/domains')
else
[]
end
end
def self.debug_mode?
ENV['DEBUG']
end
def self.renew_margin_days
ENV['RENEW_MARGIN_DAYS'].to_i != 0 ? ENV['RENEW_MARGIN_DAYS'].to_i : 30
end
def self.certificate_algorithm
ENV['CERTIFICATE_ALGORITHM'] =~ /^prime256v1$/ ? ENV['CERTIFICATE_ALGORITHM'] : 'rsa'
end
def self.key_length
ENV['NUMBITS'] =~ /^[0-9]+$/ ? ENV['NUMBITS'] : 2048
end
private
def self.parse(domain_desc)
domain_desc.split(',').map(&:strip).delete_if { |s| s == '' }.map do |descriptor|
Domain.new(descriptor)
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/fs_overlay/opt/certs_manager/lib/nginx.rb | fs_overlay/opt/certs_manager/lib/nginx.rb | require_relative './commands'
module Nginx
class NginxReloadException < RuntimeError; end
def self.setup
compiled_basic_config = ERBBinding.new('/var/lib/nginx-conf/nginx.conf.erb', {}).compile
File.open('/etc/nginx/nginx.conf', 'w') do |f|
f.write compiled_basic_config
end
end
def self.config_http(domain)
File.open("/etc/nginx/conf.d/#{domain.name}.conf", 'w') do |f|
f.write compiled_domain_config(domain, false)
end
end
def self.config_ssl(domain)
if domain.port == "443"
file_path = "/etc/nginx/conf.d/#{domain.name}.ssl.conf" # Backwards compatibility
else
file_path = "/etc/nginx/conf.d/#{domain.name}_#{domain.port}.ssl.conf"
end
File.open(file_path, 'w') do |f|
f.write compiled_domain_config(domain, true)
end
end
def self.config_domain(domain)
config_http(domain)
config_ssl(domain)
end
def self.start(daemon = true)
Logger.debug "Starting Nginx, daemon mode = #{daemon}"
if daemon
success = system 'nginx -q'
else
success = system 'nginx -q -g "daemon off;"'
end
unless success
puts "Nginx failed to start, exiting ..."
Commands.fail_and_shutdown
end
end
def self.reload(kill_on_failure = false)
Logger.debug "Reloading Nginx, kill_on_failure = #{kill_on_failure}"
success = system 'nginx -s reload'
if (!success && kill_on_failure)
kill
end
success
end
def self.stop
system 'nginx -s stop'
end
def self.kill
system 'pkill -F /var/run/nginx.pid'
end
private
def self.compiled_domain_config(domain, ssl)
binding_hash = {
domain: domain,
acme_challenge_location: acme_challenge_location_snippet,
dhparam_path: NAConfig.dhparam_path
}
ERBBinding.new(template_path(domain, ssl), binding_hash).compile
end
def self.template_path(domain, ssl)
ssl_ext = ssl ? '.ssl' : ''
override = "/var/lib/nginx-conf/#{domain.name}#{ssl_ext}.conf.erb"
default = "/var/lib/nginx-conf/default#{ssl_ext}.conf.erb"
if File.exist? override
override
else
default
end
end
def self.acme_challenge_location_snippet
ENV['ACME_CHALLENGE_BLOCK'] || <<-SNIPPET
location /.well-known/acme-challenge/ {
allow all;
alias /var/www/default/challenges/;
try_files $uri =404;
}
SNIPPET
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/spec_helper.rb | spec/spec_helper.rb | # This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'pathname'
ENV['TEST_DOMAIN'] ||= 'test.nginx-acme.site'
ENV['FORCE_RENEW'] ||= 'false'
RootPath = Pathname(File.expand_path('../..', __FILE__))
CompositionsPath = RootPath.join('spec/compositions')
Dir[RootPath.join('spec/support/**/*.rb')].each { |f| require f }
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => 'be bigger than 2 and smaller than 4'
# ...rather than:
# # => 'be bigger than 2'
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = 'spec/examples.txt'
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Intentional configure RSpec to run ours specs in defined order.
# This is important since some example groups reuse existing containers
# created by previous example group.
config.order = :defined
config.include PortalHelpers
config.before :suite do
unless ENV['SKIP_BUILD']
puts "TEST_DOMAIN: #{ENV['TEST_DOMAIN']}"
# Ensure the build process have existing cached layers to reuse, by
# explicitly rebuild the docker image for spec.
puts 'Rebuilding docker image for spec...'
Dir.chdir CompositionsPath.children.first do
PortalHelpers.docker_compose :build
end
end
end
config.before :all, type: :feature do |example|
unless example.class.metadata[:reuse_container]
Dir.chdir CompositionsPath.join(example.class.metadata[:composition]) do
purge_existing_containers
end
end
end
config.around :example, type: :feature do |example|
Dir.chdir CompositionsPath.join(example.metadata[:composition]) do
example.run
end
end
config.after :example, type: :feature do
docker_compose :stop
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/support/portal_helpers.rb | spec/support/portal_helpers.rb | require 'open-uri'
require 'openssl'
module PortalHelpers
extend self
def docker_compose(command, env: {})
case command.to_sym
when :up
command = 'up -d'
end
system(env, "docker-compose --project-name portalspec #{command}")
end
def purge_existing_containers
system 'docker rm --force --volumes $(docker-compose --project-name portalspec ps -q) &> /dev/null'
end
def read_https_content(path = nil)
tries = 60
open("https://#{ENV['TEST_DOMAIN']}/#{path}", ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE, &:read)
rescue Errno::ECONNREFUSED
if (tries -= 1) > 0
sleep 10
retry
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/models/domain_spec.rb | spec/models/domain_spec.rb | require 'spec_helper'
require_relative '../../fs_overlay/opt/certs_manager/certs_manager'
RSpec.describe Domain do
before do
allow(NAConfig).to receive(:stage).and_return('local')
allow(FileUtils).to receive(:mkdir_p)
end
it 'returns correct names, upstream. redirect_target_url, stage etc.' do
keys = [:descriptor, :name, :env_format_name, :upstream_proto, :upstreams, :redirect_target_url, :stage, :basic_auth_username, :basic_auth_password, :access_restriction, :port]
domain_configs = [
['example.com', 'example.com', 'EXAMPLE_COM', nil, [], nil, 'local', nil, nil, nil, "443"],
['example.com:4443', 'example.com', 'EXAMPLE_COM', nil, [], nil, 'local', nil, nil, nil, "4443"],
['4example.com', '4example.com', '4EXAMPLE_COM', nil, [], nil, 'local', nil, nil, nil, "443"],
[' example.com ', 'example.com', 'EXAMPLE_COM', nil, [], nil, 'local', nil, nil, nil, "443"],
['example.com #staging', 'example.com', 'EXAMPLE_COM', nil, [], nil, 'staging', nil, nil, nil, "443"],
['example.com -> http://target ', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'local', nil, nil, nil, "443"],
["example.com \n-> http://target \n", 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'local', nil, nil, nil, "443"],
["example.com\n-> http://target ", 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'local', nil, nil, nil, "443"],
['example.com -> http://target:8000', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target:8000', :parameters => nil}], nil, 'local', nil, nil, nil, "443"],
['example.com -> target:8000', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target:8000', :parameters => nil}], nil, 'local', nil, nil, nil, "443"],
['example.com => http://target', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], 'http://target', 'local', nil, nil, nil, "443"],
['example.com => https://target', 'example.com', 'EXAMPLE_COM', 'https://', [{:address => 'target', :parameters => nil}], 'https://target', 'local', nil, nil, nil, "443"],
['example.com => target', 'example.com', 'EXAMPLE_COM', 'https://', [{:address => 'target', :parameters => nil}], 'https://target', 'local', nil, nil, nil, "443"],
['example.com=>http://target', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], 'http://target', 'local', nil, nil, nil, "443"],
['example.com -> http://target #staging', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'staging', nil, nil, nil, "443"],
['example.com => http://target #staging', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], 'http://target', 'staging', nil, nil, nil, "443"],
['example.com->http://target #staging', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'staging', nil, nil, nil, "443"],
['exam-ple.com->http://tar-get #staging', 'exam-ple.com', 'EXAM_PLE_COM', 'http://', [{:address => 'tar-get', :parameters => nil}], nil, 'staging', nil, nil, nil, "443"],
['example_.com->http://target #staging', 'example_.com', 'EXAMPLE__COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'staging', nil, nil, nil, "443"],
['example.com->http://tar_get_ #staging', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'tar_get_', :parameters => nil}], nil, 'staging', nil, nil, nil, "443"],
['username:password@example.com', 'example.com', 'EXAMPLE_COM', nil, [], nil, 'local', 'username', 'password', nil, "443"],
['username:password@example.com -> http://target #staging', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'staging', 'username', 'password', nil, "443"],
['[1.2.3.4/24]username:password@example.com -> http://target #staging', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'staging', 'username', 'password', %w(1.2.3.4/24), "443"],
[' [ 1.2.3.4 4.3.2.1/24 ] username:password@example.com -> http://target #staging', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target', :parameters => nil}], nil, 'staging', 'username', 'password', %w(1.2.3.4 4.3.2.1/24), "443"],
['example.com -> https://target1|target2:8000', 'example.com', 'EXAMPLE_COM', 'https://', [{:address => 'target1', :parameters => nil}, {:address => 'target2:8000', :parameters => nil}], nil, 'local', nil, nil, nil, "443"],
['example.com -> http://target1:8000|target2:8001[backup max_conns=100]', 'example.com', 'EXAMPLE_COM', 'http://', [{:address => 'target1:8000', :parameters => nil}, {:address => 'target2:8001', :parameters => 'backup max_conns=100'}], nil, 'local', nil, nil, nil, "443"],
]
domain_configs.map { |config|
Hash[keys.zip(config)]
}.each do |config|
domain = Domain.new(config[:descriptor])
expect(domain.name).to eq(config[:name]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :name, expected #{config[:name].inspect}, got #{domain.name.inspect}" }
expect(domain.env_format_name).to eq(config[:env_format_name]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :env_format_name, expected #{config[:env_format_name].inspect}, got #{domain.env_format_name.inspect}" }
expect(domain.upstream_proto).to eq(config[:upstream_proto]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :upstream_proto, expected #{config[:upstream_proto].inspect}, got #{domain.upstream_proto.inspect}" }
expect(domain.upstreams).to eq(config[:upstreams]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :upstreams, expected #{config[:upstreams].inspect}, got #{domain.upstreams.inspect}" }
expect(domain.redirect_target_url).to eq(config[:redirect_target_url]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :redirect_target_url, expected #{config[:redirect_target_url].inspect}, got #{domain.redirect_target_url.inspect}" }
expect(domain.stage).to eq(config[:stage]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :stage, expected #{config[:stage].inspect}, got #{domain.stage.inspect}" }
expect(domain.basic_auth_username).to eq(config[:basic_auth_username]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :basic_auth_username, expected #{config[:basic_auth_username].inspect}, got #{domain.basic_auth_username.inspect}" }
expect(domain.basic_auth_password).to eq(config[:basic_auth_password]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :basic_auth_password, expected #{config[:basic_auth_password].inspect}, got #{domain.basic_auth_password.inspect}" }
expect(domain.access_restriction).to eq(config[:access_restriction]), lambda { "Parsing failed on #{config[:descriptor].inspect} method :access_restriction, expected #{config[:access_restriction].inspect}, got #{domain.access_restriction.inspect}" }
expect(domain.port).to eq(config[:port]), lambda { "Parsing failed on #{config[:port].inspect} method :port, expected #{config[:port].inspect}, got #{domain.port.inspect}" }
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/features/local_stage_spec.rb | spec/features/local_stage_spec.rb | require 'spec_helper'
RSpec.describe 'Local stage', composition: 'local', type: :feature do
context 'when no certificates are stored' do
it 'should serve a welcome page' do
docker_compose :up, env: { 'FORCE_RENEW' => 'true', 'STAGE' => 'local' }
page = read_https_content
expect(page).to include 'Welcome to HTTPS-PORTAL!'
end
end
context 'when certificates are stored in a data volume' do
it 'should serve a welcome page' do
docker_compose :up, env: { 'STAGE' => 'local' }
page = read_https_content
expect(page).to include 'Welcome to HTTPS-PORTAL!'
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/features/auto_discovery_spec.rb | spec/features/auto_discovery_spec.rb | require 'spec_helper'
RSpec.describe 'Auto discovery', composition: 'auto-discovery', type: :feature do
context 'when no certificates are stored' do
it 'should forward request to auto discovered WordPress container' do
docker_compose :up, env: { 'FORCE_RENEW' => 'true' }
page = read_https_content
expect(page).to include 'WordPress'
end
end
context 'when certificates are stored in a data volume' do
it 'should forward request to auto discovered WordPress container' do
docker_compose :up
page = read_https_content
expect(page).to include 'WordPress'
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/features/renewal_spec.rb | spec/features/renewal_spec.rb | require 'spec_helper'
# This spec intentionally reuse containers created by previous example group.
# Since we don't retry the docker command here, to ensure it success, an
# already initialized https-portal instance is required.
RSpec.describe 'Renewal', :reuse_container, composition: 'minimal-setup', type: :feature do
let(:docker_command) do
'docker exec portalspec_https-portal_1 bash -c ' \
"'test -x /usr/sbin/anacron || ( cd / && run-parts --report /etc/cron.weekly )'"
end
context 'when certs already signed and no FORCE_RENEW specified' do
it 'should not renew certs' do
docker_compose :up
read_https_content
output = `#{docker_command}`
expect(output).to include "No need to renew certs for #{ENV['TEST_DOMAIN']}"
end
end
context 'when certs already signed and FORCE_RENEW specified' do
it 'should force renew the certs' do
docker_compose :up, env: { 'FORCE_RENEW' => 'true' }
read_https_content
output = `#{docker_command}`
expect(output).to include "Renewed certs for #{ENV['TEST_DOMAIN']}"
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/features/minimal_setup_spec.rb | spec/features/minimal_setup_spec.rb | require 'spec_helper'
RSpec.describe 'Minimal setup', composition: 'minimal-setup', type: :feature do
context 'when no certificates are stored' do
it 'should serve a welcome page' do
docker_compose :up, env: { 'FORCE_RENEW' => 'true' }
page = read_https_content
expect(page).to include 'Welcome to HTTPS-PORTAL!'
end
end
context 'when certificates are stored in a data volume' do
it 'should serve a welcome page' do
docker_compose :up
page = read_https_content
expect(page).to include 'Welcome to HTTPS-PORTAL!'
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/features/linked_containers_spec.rb | spec/features/linked_containers_spec.rb | require 'spec_helper'
RSpec.describe 'Linked containers', composition: 'linked-containers', type: :feature do
context 'when no certificates are stored' do
it 'should forward request to linked WordPress container' do
docker_compose :up, env: { 'FORCE_RENEW' => 'true' }
page = read_https_content
expect(page).to include 'WordPress'
end
end
context 'when certificates are stored in a data volume' do
it 'should forward request to linked WordPress container' do
docker_compose :up
page = read_https_content
expect(page).to include 'WordPress'
end
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
SteveLTN/https-portal | https://github.com/SteveLTN/https-portal/blob/f666dc997dc62150df530f978c7b66cf0e0e516c/spec/features/static_site_spec.rb | spec/features/static_site_spec.rb | require 'spec_helper'
RSpec.describe 'Serving static site', composition: 'static-site', type: :feature do
before :all do
system 'docker-machine ssh $DOCKER_MACHINE_NAME rm -rf /data/https-portal'
end
it 'should serve a custom index page' do
docker_compose :up
system "docker-machine scp index.html $DOCKER_MACHINE_NAME:/data/https-portal/vhosts/#{ENV['TEST_DOMAIN']}/"
page = read_https_content
expect(page).to include 'Welcome to my awesome static site powered by HTTPS-PORTAL!'
end
it 'should serve a custom page' do
docker_compose :up
system "docker-machine scp index.html $DOCKER_MACHINE_NAME:/data/https-portal/vhosts/#{ENV['TEST_DOMAIN']}/welcome"
page = read_https_content('welcome')
expect(page).to include 'Welcome to my awesome static site powered by HTTPS-PORTAL!'
end
end
| ruby | MIT | f666dc997dc62150df530f978c7b66cf0e0e516c | 2026-01-04T15:45:51.233749Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen.rb | lib/shenzhen.rb | require 'shenzhen/version'
require 'shenzhen/agvtool'
require 'shenzhen/xcodebuild'
require 'shenzhen/plistbuddy'
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plistbuddy.rb | lib/shenzhen/plistbuddy.rb | module Shenzhen::PlistBuddy
class << self
def print(file, key)
output = `/usr/libexec/PlistBuddy -c "Print :#{key}" "#{file}" 2> /dev/null`
!output || output.empty? || /Does Not Exist/ === output ? nil : output.strip
end
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/version.rb | lib/shenzhen/version.rb | module Shenzhen
VERSION = '0.14.3'
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/xcodebuild.rb | lib/shenzhen/xcodebuild.rb | require 'ostruct'
module Shenzhen::XcodeBuild
class Info < OpenStruct; end
class Settings < OpenStruct
include Enumerable
def initialize(hash = {})
super
self.targets = hash.keys
end
def members
self.targets
end
def each
members.each do |target|
yield target, send(target)
end
self
end
end
class Error < StandardError; end
class NilOutputError < Error; end
class << self
def info(*args)
options = args.last.is_a?(Hash) ? args.pop : {}
output = `xcrun xcodebuild -list #{(args + args_from_options(options)).join(" ")} 2>&1`
raise Error.new $1 if /^xcodebuild\: error\: (.+)$/ === output
return nil unless /\S/ === output
lines = output.split(/\n/)
info, group = {}, nil
info[:project] = lines.shift.match(/\"(.+)\"\:/)[1] rescue nil
lines.each do |line|
if /\:$/ === line
group = line.strip[0...-1].downcase.gsub(/\s+/, '_')
info[group] = []
next
end
unless group.nil? or /\.$/ === line
info[group] << line.strip
end
end
info.each do |group, values|
next unless Array === values
values.delete("") and values.uniq!
end
Info.new(info)
end
def settings(*args)
options = args.last.is_a?(Hash) ? args.pop : {}
output = `xcrun xcodebuild #{(args + args_from_options(options)).join(" ")} -showBuildSettings 2> /dev/null`
return nil unless /\S/ === output
raise Error.new $1 if /^xcodebuild\: error\: (.+)$/ === output
lines = output.split(/\n/)
settings, target = {}, nil
lines.each do |line|
case line
when /Build settings for action build and target \"?([^":]+)/
target = $1
settings[target] = {}
else
key, value = line.split(/\=/).collect(&:strip)
settings[target][key] = value if target
end
end
Settings.new(settings)
end
def version
output = `xcrun xcodebuild -version`
output.scan(/([\d+\.?]+)/).flatten.first rescue nil
end
private
def args_from_options(options = {})
options.reject{|key, value| value.nil?}.collect{|key, value| "-#{key} '#{value}'"}
end
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/agvtool.rb | lib/shenzhen/agvtool.rb | module Shenzhen::Agvtool
class << self
def what_version
output = `agvtool what-version -terse`
output.length > 0 ? output : nil
end
alias :vers :what_version
def what_marketing_version
output = `agvtool what-marketing-version -terse`
output.scan(/\=(.+)$/).flatten.first
end
alias :mvers :what_marketing_version
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/commands.rb | lib/shenzhen/commands.rb | $:.push File.expand_path('../', __FILE__)
require 'plugins/rivierabuild'
require 'plugins/hockeyapp'
require 'plugins/testfairy'
require 'plugins/deploygate'
require 'plugins/itunesconnect'
require 'plugins/ftp'
require 'plugins/s3'
require 'plugins/crashlytics'
require 'plugins/fir'
require 'plugins/pgyer'
require 'commands/build'
require 'commands/distribute'
require 'commands/info'
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/commands/build.rb | lib/shenzhen/commands/build.rb | require 'fileutils'
command :build do |c|
c.syntax = 'ipa build [options]'
c.summary = 'Create a new .ipa file for your app'
c.description = ''
c.option '-w', '--workspace WORKSPACE', 'Workspace (.xcworkspace) file to use to build app (automatically detected in current directory)'
c.option '-p', '--project PROJECT', 'Project (.xcodeproj) file to use to build app (automatically detected in current directory, overridden by --workspace option, if passed)'
c.option '-c', '--configuration CONFIGURATION', 'Configuration used to build'
c.option '-s', '--scheme SCHEME', 'Scheme used to build app'
c.option '--xcconfig XCCONFIG', 'use an extra XCCONFIG file to build the app'
c.option '--xcargs XCARGS', 'pass additional arguments to xcodebuild when building the app. Be sure to quote multiple args.'
c.option '--[no-]clean', 'Clean project before building'
c.option '--[no-]archive', 'Archive project after building'
c.option '-d', '--destination DESTINATION', 'Destination. Defaults to current directory'
c.option '-m', '--embed PROVISION', 'Sign .ipa file with .mobileprovision'
c.option '-i', '--identity IDENTITY', 'Identity to be used along with --embed'
c.option '--sdk SDK', 'use SDK as the name or path of the base SDK when building the project'
c.option '--ipa IPA', 'specify the name of the .ipa file to generate (including file extension)'
c.action do |args, options|
validate_xcode_version!
@workspace = options.workspace
@project = options.project unless @workspace
@xcodebuild_info = Shenzhen::XcodeBuild.info(:workspace => @workspace, :project => @project)
@scheme = options.scheme
@sdk = options.sdk || 'iphoneos'
@configuration = options.configuration
@xcconfig = options.xcconfig
@xcargs = options.xcargs
@destination = options.destination || Dir.pwd
@ipa_name_override = options.ipa
FileUtils.mkdir_p(@destination) unless File.directory?(@destination)
determine_workspace_or_project! unless @workspace || @project
if @project
determine_configuration! unless @configuration
say_error "Configuration #{@configuration} not found" and abort unless (@xcodebuild_info.build_configurations.include?(@configuration) rescue false)
end
determine_scheme! unless @scheme
say_error "Scheme #{@scheme} not found" and abort unless (@xcodebuild_info.schemes.include?(@scheme) rescue false)
@configuration = options.configuration
flags = []
flags << %{-sdk #{@sdk}}
flags << %{-workspace "#{@workspace}"} if @workspace
flags << %{-project "#{@project}"} if @project
flags << %{-scheme "#{@scheme}"} if @scheme
flags << %{-configuration "#{@configuration}"} if @configuration
flags << %{-xcconfig "#{@xcconfig}"} if @xcconfig
flags << @xcargs if @xcargs
@target, @xcodebuild_settings = Shenzhen::XcodeBuild.settings(*flags).detect{|target, settings| settings['WRAPPER_EXTENSION'] == "app"}
say_error "App settings could not be found." and abort unless @xcodebuild_settings
if !@configuration
@configuration = @xcodebuild_settings['CONFIGURATION']
flags << "-configuration '#{@configuration}'"
end
say_warning "Building \"#{@workspace || @project}\" with Scheme \"#{@scheme}\" and Configuration \"#{@configuration}\"\n" if $verbose
log "xcodebuild", (@workspace || @project)
xcode = `xcode-select --print-path`.strip
actions = []
actions << :clean unless options.clean == false
actions << :build
actions << :archive unless options.archive == false
ENV['CC'] = nil # Fix for RVM
command = %{xcodebuild #{flags.join(' ')} #{actions.join(' ')} #{'1> /dev/null' unless $verbose}}
puts command if $verbose
abort unless system command
@target, @xcodebuild_settings = Shenzhen::XcodeBuild.settings(*flags).detect{|target, settings| settings['WRAPPER_EXTENSION'] == "app"}
say_error "App settings could not be found." and abort unless @xcodebuild_settings
@app_path = File.join(@xcodebuild_settings['BUILT_PRODUCTS_DIR'], @xcodebuild_settings['WRAPPER_NAME'])
@dsym_path = @app_path + ".dSYM"
@dsym_filename = File.expand_path("#{@xcodebuild_settings['WRAPPER_NAME']}.dSYM", @destination)
@ipa_name = @ipa_name_override || @xcodebuild_settings['WRAPPER_NAME'].gsub(@xcodebuild_settings['WRAPPER_SUFFIX'], "") + ".ipa"
@ipa_path = File.expand_path(@ipa_name, @destination)
log "xcrun", "PackageApplication"
command = %{xcrun -sdk #{@sdk} PackageApplication -v "#{@app_path}" -o "#{@ipa_path}" --embed "#{options.embed || @dsym_path}" #{"-s \"#{options.identity}\"" if options.identity} #{'--verbose' if $verbose} #{'1> /dev/null' unless $verbose}}
puts command if $verbose
abort unless system command
# Determine whether this is a Swift project and, eventually, the list of libraries to copy from
# Xcode's toolchain directory since there's no "xcodebuild" target to do just that (it is done
# post-build when exporting an archived build from the "Organizer").
@ipa_swift_frameworks = Dir["#{@app_path}/Frameworks/libswift*"]
if not @ipa_swift_frameworks.empty?
Dir.mktmpdir do |tmpdir|
# Copy all necessary Swift libraries to a temporary "SwiftSupport" directory so that we can
# easily add it to the .ipa later.
swift_support = File.join(tmpdir, "SwiftSupport")
Dir.mkdir(swift_support)
@ipa_swift_frameworks.each do |path|
framework = File.basename(path)
FileUtils.copy_file("#{xcode}/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift/#{@sdk}/#{framework}", File.join(swift_support, framework))
end
# Add "SwiftSupport" to the .ipa archive
Dir.chdir(tmpdir) do
abort unless system %{zip --recurse-paths "#{@ipa_path}" "SwiftSupport" #{'> /dev/null' unless $verbose}}
end
end
end
if watchkit_present?
log "Adding WatchKit support files", "#{xcode}/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/Library/Application Support/WatchKit/WK"
Dir.mktmpdir do |tmpdir|
# Make watchkit support directory
watchkit_support = File.join(tmpdir, "WatchKitSupport")
Dir.mkdir(watchkit_support)
# Copy WK from Xcode into WatchKitSupport
FileUtils.copy_file("#{xcode}/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/Library/Application Support/WatchKit/WK", File.join(watchkit_support, "WK"))
# Add "WatchKitSupport" to the .ipa archive
Dir.chdir(tmpdir) do
abort unless system %{zip --recurse-paths "#{@ipa_path}" "WatchKitSupport" #{'> /dev/null' unless $verbose}}
end
end
end
log "zip", @dsym_filename
abort unless system %{cp -r "#{@dsym_path}" "#{@destination}" && pushd "#{File.dirname(@dsym_filename)}" && zip -r "#{@dsym_filename}.zip" "#{File.basename(@dsym_filename)}" #{'> /dev/null' unless $verbose} && popd && rm -rf "#{@dsym_filename}"}
say_ok "Successfully built:"
say_ok @ipa_path
end
private
def validate_xcode_version!
version = Shenzhen::XcodeBuild.version
major_version = version.split('.')[0].to_i
say_error "Shenzhen requires Xcode 4 (found #{version}). Please install or switch to the latest Xcode." and abort if major_version < 4
end
def determine_workspace_or_project!
workspaces, projects = Dir["*.xcworkspace"], Dir["*.xcodeproj"]
if workspaces.empty?
if projects.empty?
say_error "No Xcode projects or workspaces found in current directory" and abort
else
if projects.length == 1
@project = projects.first
else
@project = choose "Select a project:", *projects
end
end
else
if workspaces.length == 1
@workspace = workspaces.first
else
@workspace = choose "Select a workspace:", *workspaces
end
end
end
def determine_scheme!
say_error "No schemes found in Xcode project or workspace" and abort unless @xcodebuild_info.schemes
if @xcodebuild_info.schemes.length == 1
@scheme = @xcodebuild_info.schemes.first
else
@scheme = choose "Select a scheme:", *@xcodebuild_info.schemes
end
end
def determine_configuration!
configurations = @xcodebuild_info.build_configurations rescue []
if configurations.nil? or configurations.empty? or configurations.include?("Debug")
@configuration = "Debug"
elsif configurations.length == 1
@configuration = configurations.first
end
if @configuration
say_warning "Configuration was not passed, defaulting to #{@configuration}"
else
@configuration = choose "Select a configuration:", *configurations
end
end
def watchkit_present?
Dir["#{@app_path}/**/*.plist"].any? do |plist_path|
`/usr/libexec/PlistBuddy -c 'Print WKWatchKitApp' '#{plist_path}' 2>&1`.strip == 'true'
end
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/commands/info.rb | lib/shenzhen/commands/info.rb | require 'plist'
require 'tempfile'
require 'zip'
require 'zip/filesystem'
command :info do |c|
c.syntax = 'ipa info [options]'
c.summary = 'Show mobile provisioning information about an .ipa file'
c.description = ''
c.action do |args, options|
say_error "`security` command not found in $PATH" and abort if `which security` == ""
say_error "`codesign` command not found in $PATH" and abort if `which codesign` == ""
determine_file! unless @file = args.pop
say_error "Missing or unspecified .ipa file" and abort unless @file and ::File.exist?(@file)
Zip::File.open(@file) do |zipfile|
app_entry = zipfile.find_entry("Payload/#{File.basename(@file, File.extname(@file))}.app")
provisioning_profile_entry = zipfile.find_entry("#{app_entry.name}embedded.mobileprovision") if app_entry
if (!provisioning_profile_entry)
zipfile.dir.entries("Payload").each do |dir_entry|
if dir_entry =~ /.app$/
say "Using .app: #{dir_entry}"
app_entry = zipfile.find_entry("Payload/#{dir_entry}")
provisioning_profile_entry = zipfile.find_entry("#{app_entry.name}embedded.mobileprovision") if app_entry
break
end
end
end
say_error "Embedded mobile provisioning file not found in #{@file}" and abort unless provisioning_profile_entry
tempdir = ::File.new(Dir.mktmpdir)
begin
zipfile.each do |zip_entry|
temp_entry_path = ::File.join(tempdir.path, zip_entry.name)
FileUtils.mkdir_p(::File.dirname(temp_entry_path))
zipfile.extract(zip_entry, temp_entry_path) unless ::File.exist?(temp_entry_path)
end
temp_provisioning_profile = ::File.new(::File.join(tempdir.path, provisioning_profile_entry.name))
temp_app_directory = ::File.new(::File.join(tempdir.path, app_entry.name))
plist = Plist::parse_xml(`security cms -D -i "#{temp_provisioning_profile.path}"`)
codesign = `codesign -dv "#{temp_app_directory.path}" 2>&1`
codesigned = /Signed Time/ === codesign
table = Terminal::Table.new do |t|
plist.each do |key, value|
next if key == "DeveloperCertificates"
columns = []
columns << key
columns << case value
when Hash
value.collect{|k, v| "#{k}: #{v}"}.join("\n")
when Array
value.join("\n")
else
value.to_s
end
t << columns
end
t << ["Codesigned", codesigned.to_s.capitalize]
end
puts table
rescue => e
say_error e.message
ensure
FileUtils.remove_entry_secure tempdir
end
end
end
private
def determine_file!
files = Dir['*.ipa']
@file ||= case files.length
when 0 then nil
when 1 then files.first
else
@file = choose "Select an .ipa File:", *files
end
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/commands/distribute.rb | lib/shenzhen/commands/distribute.rb | private
def determine_file!
files = Dir['*.ipa']
@file ||= case files.length
when 0 then nil
when 1 then files.first
else
@file = choose "Select an .ipa File:", *files
end
end
def determine_dsym!
dsym_files = Dir['*.dSYM.zip']
@dsym ||= case dsym_files.length
when 0 then nil
when 1 then dsym_files.first
else
dsym_files.detect do |dsym|
File.basename(dsym, ".app.dSYM.zip") == File.basename(@file, ".ipa")
end or choose "Select a .dSYM.zip file:", *dsym_files
end
end
def determine_notes!
placeholder = %{What's new in this release: }
@notes = ask_editor placeholder
@notes = nil if @notes == placeholder
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/crashlytics.rb | lib/shenzhen/plugins/crashlytics.rb | require 'pathname'
module Shenzhen::Plugins
module Crashlytics
class Client
def initialize(crashlytics_path, api_token, build_secret)
@api_token, @build_secret = api_token, build_secret
@crashlytics_path = Pathname.new("#{crashlytics_path}/submit").cleanpath.to_s
say_error "Path to Crashlytics.framework/submit is invalid" and abort unless File.exist?(@crashlytics_path)
end
def upload_build(ipa, options)
command = "#{@crashlytics_path} #{@api_token} #{@build_secret} -ipaPath '#{options[:file]}'"
command += " -notesPath '#{options[:notes]}'" if options[:notes]
command += " -emails #{options[:emails]}" if options[:emails]
command += " -groupAliases #{options[:groups]}" if options[:groups]
command += " -notifications #{options[:notifications] ? 'YES' : 'NO'}"
system command
end
end
end
end
command :'distribute:crashlytics' do |c|
c.syntax = "ipa distribute:crashlytics [options]"
c.summary = "Distribute an .ipa file over Crashlytics"
c.description = ""
c.option '-c', '--crashlytics_path PATH', "/path/to/Crashlytics.framework/"
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-a', '--api_token TOKEN', "API Token. Available at https://www.crashlytics.com/settings/organizations"
c.option '-s', '--build_secret SECRET', "Build Secret. Available at https://www.crashlytics.com/settings/organizations"
c.option '-m', '--notes PATH', "Path to release notes file"
c.option '-e', '--emails EMAIL1,EMAIL2', "Emails of users for access"
c.option '-g', '--groups GROUPS', "Groups for users for access"
c.option '-n', '--notifications [YES | NO]', "Should send notification email to testers?"
c.action do |args, options|
determine_file! unless @file = options.file
say_error "Missing or unspecified .ipa file" and abort unless @file and File.exist?(@file)
determine_crashlytics_path! unless @crashlytics_path = options.crashlytics_path || ENV['CRASHLYTICS_FRAMEWORK_PATH']
say_error "Missing path to Crashlytics.framework" and abort unless @crashlytics_path
determine_crashlytics_api_token! unless @api_token = options.api_token || ENV['CRASHLYTICS_API_TOKEN']
say_error "Missing API Token" and abort unless @api_token
determine_crashlytics_build_secret! unless @build_secret = options.build_secret || ENV['CRASHLYTICS_BUILD_SECRET']
say_error "Missing Build Secret" and abort unless @build_secret
parameters = {}
parameters[:file] = @file
parameters[:notes] = options.notes if options.notes
parameters[:emails] = options.emails if options.emails
parameters[:groups] = options.groups if options.groups
parameters[:notifications] = options.notifications == 'YES' if options.notifications
client = Shenzhen::Plugins::Crashlytics::Client.new(@crashlytics_path, @api_token, @build_secret)
if client.upload_build(@file, parameters)
say_ok "Build successfully uploaded to Crashlytics"
else
say_error "Error uploading to Crashlytics" and abort
end
end
private
def determine_crashlytics_path!
@crashlytics_path ||= ask "Path to Crashlytics.framework:"
end
def determine_crashlytics_api_token!
@api_token ||= ask "API Token:"
end
def determine_crashlytics_build_secret!
@build_secret ||= ask "Build Secret:"
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/hockeyapp.rb | lib/shenzhen/plugins/hockeyapp.rb | require 'json'
require 'openssl'
require 'faraday'
require 'faraday_middleware'
module Shenzhen::Plugins
module HockeyApp
class Client
HOSTNAME = 'upload.hockeyapp.net'
def initialize(api_token)
@api_token = api_token
@connection = Faraday.new(:url => "https://#{HOSTNAME}") do |builder|
builder.request :multipart
builder.request :url_encoded
builder.response :json, :content_type => /\bjson$/
builder.use FaradayMiddleware::FollowRedirects
builder.adapter :net_http
end
end
def upload_build(ipa, options)
options[:ipa] = Faraday::UploadIO.new(ipa, 'application/octet-stream') if ipa and File.exist?(ipa)
if dsym_filename = options.delete(:dsym_filename)
options[:dsym] = Faraday::UploadIO.new(dsym_filename, 'application/octet-stream')
end
@connection.post do |req|
if options[:public_identifier].nil?
req.url("/api/2/apps/upload")
else
req.url("/api/2/apps/#{options.delete(:public_identifier)}/app_versions/upload")
end
req.headers['X-HockeyAppToken'] = @api_token
req.body = options
end.on_complete do |env|
yield env[:status], env[:body] if block_given?
end
end
end
end
end
command :'distribute:hockeyapp' do |c|
c.syntax = "ipa distribute:hockeyapp [options]"
c.summary = "Distribute an .ipa file over HockeyApp"
c.description = ""
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-d', '--dsym FILE', "zipped .dsym package for the build"
c.option '-a', '--token TOKEN', "API Token. Available at https://rink.hockeyapp.net/manage/auth_tokens"
c.option '-i', '--identifier PUBLIC_IDENTIFIER', "Public identifier of the app you are targeting, if not specified HockeyApp will use the bundle identifier to choose the right"
c.option '-m', '--notes NOTES', "Release notes for the build (Default: Textile)"
c.option '-r', '--release RELEASE', [:beta, :store, :alpha, :enterprise], "Release type: 0 - Beta, 1 - Store, 2 - Alpha , 3 - Enterprise"
c.option '--markdown', 'Notes are written with Markdown'
c.option '--tags TAGS', "Comma separated list of tags which will receive access to the build"
c.option '--teams TEAMS', "Comma separated list of team ID numbers to which this build will be restricted"
c.option '--users USERS', "Comma separated list of user ID numbers to which this build will be restricted"
c.option '--notify', "Notify permitted teammates to install the build"
c.option '--downloadOff', "Upload but don't allow download of this version just yet"
c.option '--mandatory', "Make this update mandatory"
c.option '--commit-sha SHA', "The Git commit SHA for this build"
c.option '--build-server-url URL', "The URL of the build job on your build server"
c.option '--repository-url URL', "The URL of your source repository"
c.action do |args, options|
determine_file! unless @file = options.file
say_warning "Missing or unspecified .ipa file" unless @file and File.exist?(@file)
determine_dsym! unless @dsym = options.dsym
say_warning "Specified dSYM.zip file doesn't exist" if @dsym and !File.exist?(@dsym)
determine_hockeyapp_api_token! unless @api_token = options.token || ENV['HOCKEYAPP_API_TOKEN']
say_error "Missing API Token" and abort unless @api_token
determine_notes! unless @notes = options.notes
say_error "Missing release notes" and abort unless @notes
parameters = {}
parameters[:public_identifier] = options.identifier if options.identifier
parameters[:notes] = @notes
parameters[:notes_type] = options.markdown ? "1" : "0"
parameters[:notify] = "1" if options.notify && !options.downloadOff
parameters[:status] = options.downloadOff ? "1" : "2"
parameters[:tags] = options.tags if options.tags
parameters[:teams] = options.teams if options.teams
parameters[:users] = options.users if options.users
parameters[:dsym_filename] = @dsym if @dsym
parameters[:mandatory] = "1" if options.mandatory
parameters[:release_type] = case options.release
when :beta
"0"
when :store
"1"
when :alpha
"2"
when :enterprise
"3"
end
parameters[:commit_sha] = options.commit_sha if options.commit_sha
parameters[:build_server_url] = options.build_server_url if options.build_server_url
parameters[:repository_url] = options.repository_url if options.repository_url
client = Shenzhen::Plugins::HockeyApp::Client.new(@api_token)
response = client.upload_build(@file, parameters)
case response.status
when 200...300
say_ok "Build successfully uploaded to HockeyApp"
else
say_error "Error uploading to HockeyApp: #{response.body}"
end
end
private
def determine_hockeyapp_api_token!
@api_token ||= ask "API Token:"
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/itunesconnect.rb | lib/shenzhen/plugins/itunesconnect.rb | require 'security'
require 'fileutils'
require 'digest/md5'
require 'shellwords'
module Shenzhen::Plugins
module ITunesConnect
ITUNES_CONNECT_SERVER = 'Xcode:itunesconnect.apple.com'
class Client
attr_reader :ipa, :sdk, :params
def initialize(ipa, apple_id, sdk, account, password, params = [])
@ipa = ipa
@apple_id = apple_id
@sdk = sdk
@account = account
@password = password
@params = params
@filename = File.basename(@ipa).tr(" ", "_")
end
def upload_build!
size = File.size(@ipa)
checksum = Digest::MD5.file(@ipa)
begin
FileUtils.mkdir_p("Package.itmsp")
FileUtils.copy_entry(@ipa, "Package.itmsp/#{@filename}")
File.write("Package.itmsp/metadata.xml", metadata(@apple_id, checksum, size))
raise if /(error)|(fail)/i === transport
rescue
say_error "An error occurred when trying to upload the build to iTunesConnect.\nRun with --verbose for more info." and abort
ensure
FileUtils.rm_rf("Package.itmsp", :secure => true)
end
end
private
def transport
xcode = `xcode-select --print-path`.strip
tool = File.join(File.dirname(xcode), "Applications/Application Loader.app/Contents/MacOS/itms/bin/iTMSTransporter").gsub(/\s/, '\ ')
tool = File.join(File.dirname(xcode), "Applications/Application Loader.app/Contents/itms/bin/iTMSTransporter").gsub(/\s/, '\ ') if !File.exist?(tool)
escaped_password = Shellwords.escape(@password)
args = [tool, "-m upload", "-f Package.itmsp", "-u #{Shellwords.escape(@account)}", "-p #{escaped_password}"]
command = args.join(' ')
puts command.sub("-p #{escaped_password}", "-p ******") if $verbose
output = `#{command} 2> /dev/null`
puts output.chomp if $verbose
raise "Failed to upload package to iTunes Connect" unless $?.exitstatus == 0
output
end
def metadata(apple_id, checksum, size)
%{<?xml version="1.0" encoding="UTF-8"?>
<package version="software4.7" xmlns="http://apple.com/itunes/importer">
<software_assets apple_id="#{apple_id}">
<asset type="bundle">
<data_file>
<file_name>#{@filename}</file_name>
<checksum type="md5">#{checksum}</checksum>
<size>#{size}</size>
</data_file>
</asset>
</software_assets>
</package>
}
end
end
end
end
command :'distribute:itunesconnect' do |c|
c.syntax = "ipa distribute:itunesconnect [options]"
c.summary = "Upload an .ipa file to iTunes Connect"
c.description = "Upload an .ipa file directly to iTunes Connect for review. Requires that the app is in the 'Waiting for upload' state and the --upload flag to be set."
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-a', '--account ACCOUNT', "Apple ID used to log into https://itunesconnect.apple.com"
c.option '-p', '--password PASSWORD', "Password for the account unless already stored in the keychain"
c.option '-u', '--upload', "Actually attempt to upload the build to iTunes Connect"
c.option '-w', '--warnings', "Check for warnings when validating the ipa"
c.option '-e', '--errors', "Check for errors when validating the ipa"
c.option '-i', '--apple-id STRING', "Apple ID from iTunes Connect"
c.option '--sdk SDK', "SDK to use when validating the ipa. Defaults to 'iphoneos'"
c.option '--save-keychain', "Save the provided account in the keychain for future use"
c.action do |args, options|
options.default :upload => false, :sdk => 'iphoneos', :save_keychain => true
determine_file! unless @file = options.file
say_error "Missing or unspecified .ipa file" and abort unless @file and File.exist?(@file)
determine_itunes_connect_account! unless @account = options.account || ENV['ITUNES_CONNECT_ACCOUNT']
say_error "Missing iTunes Connect account" and abort unless @account
apple_id = options.apple_id
say_error "Missing Apple ID" and abort unless apple_id
@password = options.password || ENV['ITUNES_CONNECT_PASSWORD']
if @password.nil? && @password = Security::GenericPassword.find(:s => Shenzhen::Plugins::ITunesConnect::ITUNES_CONNECT_SERVER, :a => @account)
@password = @password.password
say_ok "Found password in keychain for account: #{@account}" if options.verbose
else
determine_itunes_connect_password! unless @password
say_error "Missing iTunes Connect password" and abort unless @password
Security::GenericPassword.add(Shenzhen::Plugins::ITunesConnect::ITUNES_CONNECT_SERVER, @account, @password, {:U => nil}) if options.save_keychain
end
parameters = []
parameters << :warnings if options.warnings
parameters << :errors if options.errors
client = Shenzhen::Plugins::ITunesConnect::Client.new(@file, apple_id, options.sdk, @account, @password, parameters)
client.upload_build!
say_ok "Upload complete."
say_warning "You may want to double check iTunes Connect to make sure it was received correctly."
end
private
def determine_itunes_connect_account!
@account ||= ask "iTunes Connect account:"
end
def determine_itunes_connect_password!
@password ||= password "iTunes Connect password:"
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/fir.rb | lib/shenzhen/plugins/fir.rb | require 'json'
require 'openssl'
require 'faraday'
require 'faraday_middleware'
module Shenzhen::Plugins
module Fir
class Client
HOSTNAME = 'api.fir.im'
def initialize(user_token)
@user_token = user_token
@connection = Faraday.new(:url => "http://#{HOSTNAME}") do |builder|
builder.request :url_encoded
builder.response :json
builder.use FaradayMiddleware::FollowRedirects
builder.adapter :net_http
end
end
def get_upload_ticket(bundle_id)
options = {
:type => 'ios',
:bundle_id => bundle_id,
:api_token => @user_token
}
response = @connection.post('/apps', options) do |env|
yield env[:status], env[:body] if block_given?
end
rescue Faraday::Error::TimeoutError
say_error "Timed out while geting upload ticket." and abort
end
def upload_build(ipa, options)
connection = Faraday.new(:url => options['url'], :request => { :timeout => 360 }) do |builder|
builder.request :multipart
builder.response :json
builder.use FaradayMiddleware::FollowRedirects
builder.adapter :net_http
end
upload_options = {
'key' => options['key'],
'token' => options['token'],
'file' => Faraday::UploadIO.new(ipa, 'application/octet-stream'),
"x:name" => options['name'],
"x:version" => options['version'],
"x:build" => options['build'],
"x:release_type" => options["release_type"],
"x:changelog" => options["changelog"]
}
connection.post(options['upload_url'], upload_options).on_complete do |env|
yield env[:status], env[:body] if block_given?
end
rescue Errno::EPIPE
say_error "Upload failed. Check internet connection is ok." and abort
rescue Faraday::Error::TimeoutError
say_error "Timed out while uploading build. Check https://fir.im// to see if the upload was completed." and abort
end
end
end
end
command :'distribute:fir' do |c|
c.syntax = "ipa distribute:fir [options]"
c.summary = "Distribute an .ipa file over fir.im"
c.description = ""
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-u', '--user_token TOKEN', "User Token. Available at http://fir.im/user/info"
c.option '-a', '--app_id APPID', "App Id (iOS Bundle identifier)"
c.option '-n', '--notes NOTES', "Release notes for the build"
c.option '-N', '--app_name APP_NAME', "Name for app"
c.option '-R', '--relase_type RELEASE_TYPE', "Release type for app, default is adhoc"
c.option '-V', '--app_version VERSION', "App Version"
c.option '-S', '--short_version SHORT', "App Short Version"
c.action do |args, options|
determine_file! unless @file = options.file
say_error "Missing or unspecified .ipa file" and abort unless @file and File.exist?(@file)
determine_fir_user_token! unless @user_token = options.user_token || ENV['FIR_USER_TOKEN']
say_error "Missing User Token" and abort unless @user_token
determine_fir_app_name! unless @app_name = options.app_name || ENV['FIR_APP_NAME']
say_error "Missing App Name" and abort unless @app_name
determine_fir_app_id! unless @app_id = options.app_id || ENV['FIR_APP_ID']
say_error "Missing App Id" and abort unless @app_id
determine_notes! unless @notes = options.notes
say_error "Missing release notes" and abort unless @notes
determine_app_version! unless @app_version = options.app_version
determine_short_version! unless @short_version = options.short_version
client = Shenzhen::Plugins::Fir::Client.new(@user_token)
app_response = client.get_upload_ticket(@app_id)
if app_response.status.to_s =~ /20\d/
upload_app_options = app_response.body['cert']['binary']
app_short_uri = app_response.body['short']
upload_app_options['name'] = @app_name
upload_app_options['release_type'] = options.release_type || "adhoc"
upload_app_options['version'] = @app_version
upload_app_options['build'] = @short_version
upload_app_options['changelog'] = @notes
upload_response = client.upload_build(@file, upload_app_options)
if upload_response.status.to_s =~ /20\d/
say_ok "Build successfully uploaded to Fir, visit url: http://fir.im/#{app_short_uri}"
else
say_error "Error uploading to Fir: #{upload_response.body[:error]}" and abort
end
else
say_error "Error getting app information: #{app_response.body[:error]}"
end
end
private
def determine_fir_user_token!
@user_token ||= ask "User Token:"
end
def determine_fir_app_id!
@app_id ||= ask "App Id:"
end
def determine_app_version!
@app_version ||= ask "App Version:"
end
def determine_short_version!
@short_version ||= ask "Short Version:"
end
def determine_fir_app_name!
@app_name ||= ask "App Name:"
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/pgyer.rb | lib/shenzhen/plugins/pgyer.rb | require 'json'
require 'openssl'
require 'faraday'
require 'faraday_middleware'
module Shenzhen::Plugins
module Pgyer
class Client
HOSTNAME = 'www.pgyer.com'
def initialize(user_key, api_key)
@user_key, @api_key = user_key, api_key
@connection = Faraday.new(:url => "http://#{HOSTNAME}", :request => { :timeout => 120 }) do |builder|
builder.request :multipart
builder.request :json
builder.response :json, :content_type => /\bjson$/
builder.use FaradayMiddleware::FollowRedirects
builder.adapter :net_http
end
end
def upload_build(ipa, options)
options.update({
:uKey => @user_key,
:_api_key => @api_key,
:file => Faraday::UploadIO.new(ipa, 'application/octet-stream')
})
@connection.post("/apiv1/app/upload", options).on_complete do |env|
yield env[:status], env[:body] if block_given?
end
rescue Faraday::Error::TimeoutError
say_error "Timed out while uploading build. Check http://www.pgyer.com/my to see if the upload was completed." and abort
end
def update_app_info(options)
connection = Faraday.new(:url => "http://#{HOSTNAME}", :request => { :timeout => 120 }) do |builder|
builder.request :url_encoded
builder.request :json
builder.response :logger
builder.response :json, :content_type => /\bjson$/
builder.use FaradayMiddleware::FollowRedirects
builder.adapter :net_http
end
options.update({
:uKey => @user_key,
:_api_key => @api_key,
})
connection.post("/apiv1/app/update", options) do |env|
yield env[:status], env[:body] if block_given?
end
rescue Faraday::Error::TimeoutError
say_error "Timed out while uploading build. Check http://www.pgyer.com/my to see if the upload was completed." and abort
end
end
end
end
command :'distribute:pgyer' do |c|
c.syntax = "ipa distribute:pgyer [options]"
c.summary = "Distribute an .ipa file over Pgyer"
c.description = ""
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-a', '--api_key KEY', "API KEY. Available at http://www.pgyer.com/doc/api#uploadApp"
c.option '-u', '--user_key KEY', "USER KEY. Available at http://www.pgyer.com/doc/api#uploadApp/"
c.option '--range RANGE', "Publish range. e.g. 1 (default), 2, 3"
c.option '--[no-]public', "Allow build app on public to download. it is not public default."
c.option '--password PASSWORD', "Set password to allow visit app web page."
c.action do |args, options|
determine_file! unless @file = options.file
say_error "Missing or unspecified .ipa file" and abort unless @file and File.exist?(@file)
determine_pgyer_user_key! unless @user_key = options.user_key || ENV['PGYER_USER_KEY']
say_error "Missing User Key" and abort unless @user_key
determine_pgyer_api_key! unless @api_key = options.api_key || ENV['PGYER_API_KEY']
say_error "Missing API Key" and abort unless @api_key
determine_publish_range! unless @publish_range = options.range
say_error "Missing Publish Range" and abort unless @publish_range
determine_is_public! unless @is_public = !!options.public
@is_public = @is_public ? 1 : 2
parameters = {}
parameters[:publishRange] = @publish_range
parameters[:isPublishToPublic] = @is_public
parameters[:password] = options.password.chomp if options.password
client = Shenzhen::Plugins::Pgyer::Client.new(@user_key, @api_key)
response = client.upload_build(@file, parameters)
case response.status
when 200...300
app_id = response.body['data']['appKey']
app_short_uri = response.body['data']['appShortcutUrl']
app_response = client.update_app_info({
:aKey => app_id,
:appUpdateDescription => @notes
})
if app_response.status == 200
say_ok "Build successfully uploaded to Pgyer, visit url: http://www.pgyer.com/#{app_short_uri}"
else
say_error "Error update build information: #{response.body}" and abort
end
else
say_error "Error uploading to Pgyer: #{response.body}" and abort
end
end
private
def determine_pgyer_api_key!
@api_key ||= ask "API Key:"
end
def determine_pgyer_user_key!
@user_key ||= ask "User Key:"
end
def determine_publish_range!
@publish_range ||= "1"
end
def determine_is_public!
@is_public ||= false
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/testfairy.rb | lib/shenzhen/plugins/testfairy.rb | require 'json'
require 'openssl'
require 'faraday'
require 'faraday_middleware'
module Shenzhen::Plugins
module TestFairy
class Client
HOSTNAME = 'app.testfairy.com'
def initialize(api_key)
@api_key = api_key
@connection = Faraday.new(:url => "https://#{HOSTNAME}") do |builder|
builder.request :multipart
builder.request :url_encoded
builder.response :json, :content_type => /\bjson$/
builder.use FaradayMiddleware::FollowRedirects
builder.adapter :net_http
end
end
def upload_build(ipa, options)
options[:file] = Faraday::UploadIO.new(ipa, 'application/octet-stream') if ipa and File.exist?(ipa)
if symbols_file = options.delete(:symbols_file)
options[:symbols_file] = Faraday::UploadIO.new(symbols_file, 'application/octet-stream')
end
@connection.post do |req|
req.url("/api/upload/")
req.body = options
end.on_complete do |env|
yield env[:status], env[:body] if block_given?
end
end
end
end
end
command :'distribute:testfairy' do |c|
c.syntax = "ipa distribute:testfairy [options]"
c.summary = "Distribute an .ipa file over TestFairy"
c.description = ""
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-d', '--dsym FILE', "zipped .dsym package for the build"
c.option '-a', '--key KEY', "API Key. Available at https://app.testfairy.com/settings for details."
c.option '-c', '--comment COMMENT', "Comment for the build"
c.option '--testers-groups GROUPS', 'Comma-separated list of tester groups to be notified on the new build. Or "all" to notify all testers.'
c.option '--metrics METRICS', "Comma-separated list of metrics to record"
c.option '--max-duration DURATION', 'Maximum session recording length, eg 20m or 1h. Default is "10m". Maximum 24h.'
c.option '--video ACTIVE', 'Video recording settings "on", "off" or "wifi" for recording video only when wifi is available. Default is "on".'
c.option '--video-quality QUALITY', 'Video quality settings, "high", "medium" or "low". Default is "high".'
c.option '--video-rate RATE', 'Video rate recording in frames per second, default is "1.0".'
c.option '--icon-watermark ADD', 'Add a small watermark to app icon. Default is "off".'
c.action do |args, options|
determine_file! unless @file = options.file
say_warning "Missing or unspecified .ipa file" unless @file and File.exist?(@file)
determine_dsym! unless @dsym = options.dsym
say_warning "Specified dSYM.zip file doesn't exist" if @dsym and !File.exist?(@dsym)
determine_testfairy_api_key! unless @api_key = options.key || ENV['TESTFAIRY_API_KEY']
say_error "Missing API Key" and abort unless @api_key
determine_notes! unless @comment = options.comment
say_error "Missing release comment" and abort unless @comment
parameters = {}
# Required
parameters[:api_key] = @api_key
# Optional
parameters[:comment] = @comment
parameters[:symbols_file] = @dsym if @dsym
parameters[:testers_groups] = options.testers_groups if options.testers_groups
parameters[:'max-duration'] = options.max_duration if options.max_duration
parameters[:video] = options.video if options.video
parameters[:'video-quality'] = options.video_quality if options.video_quality
parameters[:'video-rate'] = options.video_rate if options.video_rate
parameters[:'icon-watermark'] = options.icon_watermark if options.icon_watermark
parameters[:metrics] = options.metrics if options.metrics
client = Shenzhen::Plugins::TestFairy::Client.new(@api_key)
response = client.upload_build(@file, parameters)
case response.status
when 200...300
say_ok "Build successfully uploaded to TestFairy"
else
say_error "Error uploading to TestFairy: #{response.body}"
end
end
private
def determine_testfairy_api_key!
@api_key ||= ask "API Key:"
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/ftp.rb | lib/shenzhen/plugins/ftp.rb | require 'net/ftp'
require 'net/sftp'
module Shenzhen::Plugins
module FTP
class Client
def initialize(host, port, user, password)
@host, @port, @user, @password = host, port, user, password
end
def upload(ipa, options = {})
connection = Net::FTP.new
connection.passive = true
connection.connect(@host, @port)
path = expand_path_with_substitutions_from_ipa_plist(ipa, options[:path])
begin
connection.login(@user, @password) rescue raise "Login authentication failed"
if options[:mkdir]
components, pwd = path.split(/\//).reject(&:empty?), nil
components.each do |component|
pwd = File.join(*[pwd, component].compact)
begin
connection.mkdir pwd
rescue => exception
raise exception unless /File exists/ === exception.message
end
end
end
connection.chdir path unless path.empty?
connection.putbinaryfile ipa, File.basename(ipa)
connection.putbinaryfile(options[:dsym], File.basename(options[:dsym])) if options[:dsym]
ensure
connection.close
end
end
private
def expand_path_with_substitutions_from_ipa_plist(ipa, path)
substitutions = path.scan(/\{CFBundle[^}]+\}/)
return path if substitutions.empty?
Dir.mktmpdir do |dir|
system "unzip -q #{ipa} -d #{dir} 2> /dev/null"
plist = Dir["#{dir}/**/*.app/Info.plist"].last
substitutions.uniq.each do |substitution|
key = substitution[1...-1]
value = Shenzhen::PlistBuddy.print(plist, key)
path.gsub!(Regexp.new(substitution), value) if value
end
end
return path
end
end
end
module SFTP
class Client < Shenzhen::Plugins::FTP::Client
def upload(ipa, options = {})
session = Net::SSH.start(@host, @user, :password => @password, :port => @port)
connection = Net::SFTP::Session.new(session).connect!
path = expand_path_with_substitutions_from_ipa_plist(ipa, options[:path])
begin
connection.stat!(path) do |response|
connection.mkdir! path if options[:mkdir] and not response.ok?
connection.upload! ipa, determine_file_path(File.basename(ipa), path)
connection.upload! options[:dsym], determine_file_path(File.basename(options[:dsym]), path) if options[:dsym]
end
ensure
connection.close_channel
session.shutdown!
end
end
def determine_file_path(file_name, path)
if path.empty?
file_name
else
"#{path}/#{file_name}"
end
end
end
end
end
command :'distribute:ftp' do |c|
c.syntax = "ipa distribute:ftp [options]"
c.summary = "Distribute an .ipa file over FTP"
c.description = ""
c.example '', '$ ipa distribute:ftp --host 127.0.0.1 -f ./file.ipa -u username --path "/path/to/folder/{CFBundleVersion}/" --mkdir'
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-d', '--dsym FILE', "zipped .dsym package for the build"
c.option '-h', '--host HOST', "FTP host"
c.option '-u', '--user USER', "FTP user"
c.option '-p', '--password PASS', "FTP password"
c.option '-P', '--path PATH', "FTP path. Values from Info.plist will be substituted for keys wrapped in {} \n\t\t e.g. \"/path/to/folder/{CFBundleVersion}/\" would be evaluated as \"/path/to/folder/1.0.0/\""
c.option '--port PORT', "FTP port"
c.option '--protocol [PROTOCOL]', [:ftp, :sftp], "Protocol to use (ftp, sftp)"
c.option '--[no-]mkdir', "Create directories on FTP if they don't already exist"
c.action do |args, options|
options.default :mkdir => true
determine_file! unless @file = options.file
say_error "Missing or unspecified .ipa file" and abort unless @file and File.exist?(@file)
determine_dsym! unless @dsym = options.dsym
say_warning "Specified dSYM.zip file doesn't exist" unless @dsym and File.exist?(@dsym)
determine_host! unless @host = options.host
say_error "Missing FTP host" and abort unless @host
determine_port!(options.protocol) unless @port = options.port
determine_user! unless @user = options.user
say_error "Missing FTP user" and abort unless @user
@password = options.password
if !@password && options.protocol != :sftp
determine_password!
say_error "Missing FTP password" and abort unless @password
end
@path = options.path || ""
client = case options.protocol
when :sftp
Shenzhen::Plugins::SFTP::Client.new(@host, @port, @user, @password)
else
Shenzhen::Plugins::FTP::Client.new(@host, @port, @user, @password)
end
begin
client.upload @file, {:path => @path, :dsym => @dsym, :mkdir => !!options.mkdir}
say_ok "Build successfully uploaded to FTP"
rescue => exception
say_error "Error while uploading to FTP: #{exception}"
raise if options.trace
end
end
private
def determine_host!
@host ||= ask "FTP Host:"
end
def determine_port!(protocol)
@port = case protocol
when :sftp
Net::SSH::Transport::Session::DEFAULT_PORT
else
Net::FTP::FTP_PORT
end
end
def determine_user!
@user ||= ask "Username:"
end
def determine_password!
@password ||= password "Password:"
end
end
alias_command :'distribute:sftp', :'distribute:ftp', '--protocol', 'sftp'
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/s3.rb | lib/shenzhen/plugins/s3.rb | require 'aws-sdk'
require 'shellwords'
module Shenzhen::Plugins
module S3
class Client
def initialize(access_key_id, secret_access_key, region)
@s3 = AWS::S3.new(:access_key_id => access_key_id,
:secret_access_key => secret_access_key,
:region => region)
end
def upload_build(ipa, options)
path = expand_path_with_substitutions_from_ipa_plist(ipa, options[:path]) if options[:path]
@s3.buckets.create(options[:bucket]) if options[:create]
bucket = @s3.buckets[options[:bucket]]
uploaded_urls = []
files = []
files << ipa
files << options[:dsym] if options[:dsym]
files.each do |file|
basename = File.basename(file)
key = path ? File.join(path, basename) : basename
File.open(file) do |descriptor|
obj = bucket.objects.create(key, descriptor, :acl => options[:acl])
uploaded_urls << obj.public_url.to_s
end
end
uploaded_urls
end
private
def expand_path_with_substitutions_from_ipa_plist(ipa, path)
substitutions = path.scan(/\{CFBundle[^}]+\}/)
return path if substitutions.empty?
Dir.mktmpdir do |dir|
system "unzip -q #{Shellwords.escape(ipa)} -d #{Shellwords.escape(dir)} 2> /dev/null"
plist = Dir["#{dir}/**/*.app/Info.plist"].last
substitutions.uniq.each do |substitution|
key = substitution[1...-1]
value = Shenzhen::PlistBuddy.print(plist, key)
path.gsub!(Regexp.new(substitution), value) if value
end
end
return path
end
end
end
end
command :'distribute:s3' do |c|
c.syntax = "ipa distribute:s3 [options]"
c.summary = "Distribute an .ipa file over Amazon S3"
c.description = ""
c.example '', '$ ipa distribute:s3 -f ./file.ipa -a accesskeyid --bucket bucket-name'
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-d', '--dsym FILE', "zipped .dsym package for the build"
c.option '-a', '--access-key-id ACCESS_KEY_ID', "AWS Access Key ID"
c.option '-s', '--secret-access-key SECRET_ACCESS_KEY', "AWS Secret Access Key"
c.option '-b', '--bucket BUCKET', "S3 bucket"
c.option '--[no-]create', "Create bucket if it doesn't already exist"
c.option '-r', '--region REGION', "Optional AWS region (for bucket creation)"
c.option '--acl ACL', "Uploaded object permissions e.g public_read (default), private, public_read_write, authenticated_read"
c.option '--source-dir SOURCE', "Optional source directory e.g. ./build"
c.option '-P', '--path PATH', "S3 'path'. Values from Info.plist will be substituded for keys wrapped in {} \n\t\t eg. \"/path/to/folder/{CFBundleVersion}/\" could be evaluated as \"/path/to/folder/1.0.0/\""
c.action do |args, options|
Dir.chdir(options.source_dir) if options.source_dir
determine_file! unless @file = options.file
say_error "Missing or unspecified .ipa file" and abort unless @file and File.exist?(@file)
determine_dsym! unless @dsym = options.dsym
say_error "Specified dSYM.zip file doesn't exist" if @dsym and !File.exist?(@dsym)
determine_access_key_id! unless @access_key_id = options.access_key_id
say_error "Missing AWS Access Key ID" and abort unless @access_key_id
determine_secret_access_key! unless @secret_access_key = options.secret_access_key
say_error "Missing AWS Secret Access Key" and abort unless @secret_access_key
determine_bucket! unless @bucket = options.bucket
say_error "Missing bucket" and abort unless @bucket
determine_region! unless @region = options.region
determine_acl! unless @acl = options.acl
say_error "Missing ACL" and abort unless @acl
@path = options.path
client = Shenzhen::Plugins::S3::Client.new(@access_key_id, @secret_access_key, @region)
begin
urls = client.upload_build @file, {:bucket => @bucket, :create => !!options.create, :acl => @acl, :dsym => @dsym, :path => @path}
urls.each { |url| say_ok url}
say_ok "Build successfully uploaded to S3"
rescue => exception
say_error "Error while uploading to S3: #{exception}"
end
end
private
def determine_access_key_id!
@access_key_id ||= ENV['AWS_ACCESS_KEY_ID']
@access_key_id ||= ask "Access Key ID:"
end
def determine_secret_access_key!
@secret_access_key ||= ENV['AWS_SECRET_ACCESS_KEY']
@secret_access_key ||= ask "Secret Access Key:"
end
def determine_bucket!
@bucket ||= ENV['S3_BUCKET']
@bucket ||= ask "S3 Bucket:"
end
def determine_region!
@region ||= ENV['AWS_REGION']
end
def determine_acl!
@acl ||= "public_read"
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/deploygate.rb | lib/shenzhen/plugins/deploygate.rb | require 'json'
require 'openssl'
require 'faraday'
require 'faraday_middleware'
module Shenzhen::Plugins
module DeployGate
class Client
HOSTNAME = 'deploygate.com'
def initialize(api_token, user_name)
@api_token, @user_name = api_token, user_name
@connection = Faraday.new(:url => "https://#{HOSTNAME}", :request => { :timeout => 120 }) do |builder|
builder.request :multipart
builder.request :json
builder.response :json, :content_type => /\bjson$/
builder.use FaradayMiddleware::FollowRedirects
builder.adapter :net_http
end
end
def upload_build(ipa, options)
options.update({
:token => @api_token,
:file => Faraday::UploadIO.new(ipa, 'application/octet-stream'),
:message => options[:message] || ''
})
@connection.post("/api/users/#{@user_name}/apps", options).on_complete do |env|
yield env[:status], env[:body] if block_given?
end
rescue Faraday::Error::TimeoutError
say_error "Timed out while uploading build. Check https://deploygate.com/ to see if the upload was completed." and abort
end
end
end
end
command :'distribute:deploygate' do |c|
c.syntax = "ipa distribute:deploygate [options]"
c.summary = "Distribute an .ipa file over deploygate"
c.description = ""
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-a', '--api_token TOKEN', "API Token. Available at https://deploygate.com/settings"
c.option '-u', '--user_name USER_NAME', "User Name. Available at https://deploygate.com/settings"
c.option '-m', '--message MESSAGE', "Release message for the build"
c.option '-d', '--distribution_key DESTRIBUTION_KEY', "distribution key for distribution page"
c.option '-n', '--disable_notify', "disable notification"
c.option '-r', '--release_note RELEASE_NOTE', "release note for distribution page"
c.option '-v', '--visibility (private|public)', "privacy setting ( require public for personal free account)"
c.action do |args, options|
determine_file! unless @file = options.file
say_error "Missing or unspecified .ipa file" and abort unless @file and File.exist?(@file)
determine_deploygate_api_token! unless @api_token = options.api_token || ENV['DEPLOYGATE_API_TOKEN']
say_error "Missing API Token" and abort unless @api_token
determine_deploygate_user_name! unless @user_name = options.user_name || ENV['DEPLOYGATE_USER_NAME']
say_error "Missing User Name" and abort unless @api_token
@message = options.message
@distribution_key = options.distribution_key || ENV['DEPLOYGATE_DESTRIBUTION_KEY']
@release_note = options.release_note
@disable_notify = ! options.disable_notify.nil? ? "yes" : nil
@visibility = options.visibility
@message = options.message
parameters = {}
parameters[:file] = @file
parameters[:message] = @message
parameters[:distribution_key] = @distribution_key if @distribution_key
parameters[:release_note] = @release_note if @release_note
parameters[:disable_notify] = @disable_notify if @disable_notify
parameters[:visibility] = @visibility if @visibility
parameters[:replace] = "true" if options.replace
client = Shenzhen::Plugins::DeployGate::Client.new(@api_token, @user_name)
response = client.upload_build(@file, parameters)
if (200...300) === response.status and not response.body["error"]
say_ok "Build successfully uploaded to DeployGate"
else
say_error "Error uploading to DeployGate: #{response.body["error"] || "(Unknown Error)"}" and abort
end
end
private
def determine_deploygate_api_token!
@api_token ||= ask "API Token:"
end
def determine_deploygate_user_name!
@user_name ||= ask "User Name:"
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
nomad-cli/shenzhen | https://github.com/nomad-cli/shenzhen/blob/b745dc21a234f2003d929c88d57fafd96bfe8f97/lib/shenzhen/plugins/rivierabuild.rb | lib/shenzhen/plugins/rivierabuild.rb | require 'json'
require 'openssl'
require 'faraday'
require 'faraday_middleware'
module Shenzhen::Plugins
module RivieraBuild
class Client
HOSTNAME = 'apps.rivierabuild.com'
def initialize(api_token)
@api_token = api_token
@connection = Faraday.new(:url => "https://#{HOSTNAME}", :request => { :timeout => 120 }) do |builder|
builder.request :multipart
builder.request :url_encoded
builder.response :json, :content_type => /\bjson$/
builder.use FaradayMiddleware::FollowRedirects
builder.adapter :net_http
end
end
def upload_build(ipa, options)
options[:file] = Faraday::UploadIO.new(ipa, 'application/octet-stream') if ipa and File.exist?(ipa)
@connection.post do |req|
req.url("/api/upload")
req.body = options
end.on_complete do |env|
yield env[:status], env[:body] if block_given?
end
end
end
end
end
command :'distribute:rivierabuild' do |c|
c.syntax = "ipa distribute:rivierabuild [options]"
c.summary = "Distribute an .ipa file over RivieraBuild"
c.description = ""
c.option '-f', '--file FILE', ".ipa file for the build"
c.option '-k', '--key KEY', "API KEY. Available at https://apps.rivierabuild.com/settings"
c.option '-a', '--availability AVAILABILITY', "For how long the build will be available? More info: http://api.rivierabuild.com"
c.option '-p', '--passcode PASSCODE', "Optional passcode required to install the build on a device"
c.option '-n', '--note NOTE', "Release notes for the build, Markdown"
c.option '--commit-sha SHA', "The Git commit SHA for this build"
c.option '--app-id', "Riviera Build Application ID"
c.action do |args, options|
determine_file! unless @file = options.file
say_warning "Missing or unspecified .ipa file" unless @file and File.exist?(@file)
determine_rivierabuild_api_token! unless @api_token = options.key || ENV['RIVIERA_API_KEY']
say_error "Missing API Token" and abort unless @api_token
determine_availability! unless @availability = options.availability
say_error "Missing availability" and abort unless @availability
parameters = {}
parameters[:api_key] = @api_token
parameters[:availability] = @availability
parameters[:passcode] = options.passcode if options.passcode
parameters[:app_id] = options.app_id if options.app_id
parameters[:note] = options.note if options.note
parameters[:commit_sha] = options.commit_sha if options.commit_sha
client = Shenzhen::Plugins::RivieraBuild::Client.new(@api_token)
response = client.upload_build(@file, parameters)
case response.status
when 200...300
say_ok "Build successfully uploaded to RivieraBuild: #{response.body['file_url']}"
else
say_error "Error uploading to RivieraBuild: #{response.body}"
end
end
private
def determine_rivierabuild_api_token!
@api_token ||= ask "API Key:"
end
end
| ruby | MIT | b745dc21a234f2003d929c88d57fafd96bfe8f97 | 2026-01-04T15:45:52.367764Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/helper_spec.rb | spec/helper_spec.rb | require "_helpers"
describe "Wraith helpers classes and functions" do
describe "the convert_to_absolute function" do
it "should return false if no filepath provided" do
expect(convert_to_absolute(nil)).to eq 'false'
end
it "should convert a relative path to absolute" do
relative = 'my/filepath'
absolute = convert_to_absolute relative
expect(absolute[0]).to eq '/'
expect(absolute.length).to be > relative.length
expect(absolute).to match(/\/(.+)\/(.+)\/my\/filepath/)
end
it "should leave an absolute path unchanged" do
relative = '/my/filepath'
absolute = convert_to_absolute relative
expect(absolute).to eq relative
end
it "should leave a Windows-flavoured absolute path unchanged" do
relative = 'C:/Code/Wraith/javascript/global.js'
absolute = convert_to_absolute relative
expect(absolute).to eq relative
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/resize_reload_spec.rb | spec/resize_reload_spec.rb | require "_helpers"
describe "wraith config" do
let(:config_name) { get_path_relative_to __FILE__, "./configs/test_config--phantom.yaml" }
let(:saving) { Wraith::SaveImages.new(config_name) }
describe "saving images" do
it "should pass the width plainly to the CLI when running in inefficient mode" do
prepared_width = saving.prepare_widths_for_cli 432
expect(prepared_width).to eq 432
end
it "should pass an array of widths to CLI when running in efficient mode" do
prepared_width = saving.prepare_widths_for_cli [432, 21, 100]
expect(prepared_width).to eq "432,21,100"
end
it "should create fewer jobs when in efficient mode" do
base_config = '
domains:
test: http://www.bbc.com
paths:
test: /mypage
directory:
test
screen_widths:
- 320
- 464
- 624
'
efficient_config = YAML.load(base_config + "
resize_or_reload: resize
")
efficient_saving = Wraith::SaveImages.new(efficient_config, false, true)
inefficient_config = YAML.load(base_config + "
resize_or_reload: reload
")
inefficient_saving = Wraith::SaveImages.new(inefficient_config, false, true)
efficient_jobs = efficient_saving.define_jobs
inefficient_jobs = inefficient_saving.define_jobs
expect(efficient_jobs.length).to be 1
expect(inefficient_jobs.length).to be 3 # 1 for each screen width
# [["test", "/mypage", "320,464,624", "http://www.bbc.com/mypage", "test/MULTI__test.png", " ", "false", "false"]]
expect(efficient_jobs[0][2]).to eq "320,464,624"
# [["test", "/mypage", 320, "http://www.bbc.com/mypage", "test/320__test.png", " ", "false", "false"], ["test", "/mypage", 464, "http://www.bbc.com/mypage", "/test/464__test.png", " ", "false", "false"], ["test", "/mypage", 624, "http://www.bbc.com/mypage", "/test/624__test.png", " ", "false", "false"]]
expect(inefficient_jobs[0][2]).to eq 320
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/construct_command_spec.rb | spec/construct_command_spec.rb | require "_helpers"
describe "Wraith config to CLI argument mapping" do
describe "passing variables to construct_command" do
# set default variables we can override if necessary
let(:config_name) { get_path_relative_to __FILE__, "./configs/test_config--phantom.yaml" }
let(:saving) { Wraith::SaveImages.new(config_name) }
let(:width) { 320 }
let(:url) { "http://example.com/my-page" }
let(:file_name) { "wraith/my-page/320_phantomjs_latest.png" }
let(:selector) { ".my_selector" }
let(:global_bc) { "javascript/before_capture.js" }
let(:path_bc) { false }
it "should take a load of variables and construct a command" do
expected = "phantomjs '#{Dir.pwd}/lib/wraith/javascript/phantom.js' 'http://example.com/my-page' '320' 'wraith/my-page/320_phantomjs_latest.png' '.my_selector' '#{Dir.pwd}/javascript/before_capture.js' 'false'"
actual = saving.construct_command(width, url, file_name, selector, global_bc, path_bc)
expect(actual).to eq expected
end
it "should allow hashtags in selectors" do
selector = '#some-id'
expected = "phantomjs '#{Dir.pwd}/lib/wraith/javascript/phantom.js' 'http://example.com/my-page' '320' 'wraith/my-page/320_phantomjs_latest.png' '\\#some-id' '#{Dir.pwd}/javascript/before_capture.js' 'false'"
actual = saving.construct_command(width, url, file_name, selector, global_bc, path_bc)
expect(actual).to eq expected
end
it "should be able to pass multiple widths at once" do
width = [320, 624, 976]
expected = "phantomjs '#{Dir.pwd}/lib/wraith/javascript/phantom.js' 'http://example.com/my-page' '320,624,976' 'wraith/my-page/320_phantomjs_latest.png' '.my_selector' '#{Dir.pwd}/javascript/before_capture.js' 'false'"
actual = saving.construct_command(width, url, file_name, selector, global_bc, path_bc)
expect(actual).to eq expected
end
it "should call casperjs when the config says so" do
config_name = get_path_relative_to(__FILE__, "./configs/test_config--casper.yaml")
saving = Wraith::SaveImages.new(config_name)
expected = "casperjs '#{Dir.pwd}/spec/js/custom_snap_file.js' 'http://example.com/my-page' '320' 'wraith/my-page/320_phantomjs_latest.png' '.my_selector' '#{Dir.pwd}/javascript/before_capture.js' 'false'"
actual = saving.construct_command(width, url, file_name, selector, global_bc, path_bc)
expect(actual).to eq expected
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/_helpers.rb | spec/_helpers.rb | require "rspec"
require "./lib/wraith/cli"
def create_diff_image
capture_image = saving.construct_command(320, test_url1, test_image1, selector, false, false)
`#{capture_image}`
capture_image = saving.construct_command(320, test_url2, test_image2, selector, false, false)
`#{capture_image}`
end
def crop_images
Wraith::CropImages.new(config_name).crop_images
end
def compare_images
Wraith::CompareImages.new(config_name).compare_task(test_image1, test_image2, diff_image, data_txt)
end
def get_path_relative_to(current_file, file_to_find)
File.expand_path(File.join(File.dirname(current_file), file_to_find))
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/save_images_spec.rb | spec/save_images_spec.rb | require "_helpers"
require "image_size"
describe Wraith do
let(:config_name) { get_path_relative_to __FILE__, "./configs/test_config--phantom.yaml" }
let(:config_chrome) { get_path_relative_to __FILE__, "./configs/test_config--chrome.yaml" }
let(:test_url1) { "http://www.bbc.com/afrique" }
let(:test_url2) { "http://www.bbc.com/russian" }
let(:test_image1) { "shots/test/test1.png" }
let(:test_image_chrome) { "shots_chrome/test/test_chrome.png" }
let(:test_image_chrome_selector) { "shots_chrome/test/test_chrome_selector.png" }
let(:test_image2) { "shots/test/test(2).png" }
let(:diff_image) { "shots/test/test_diff.png" }
let(:data_txt) { "shots/test/test.txt" }
let(:selector) { "" }
let(:saving) { Wraith::SaveImages.new(config_name) }
let(:saving_chrome) { Wraith::SaveImages.new(config_chrome) }
let(:wraith) { Wraith::Wraith.new(config_name) }
before(:each) do
Wraith::FolderManager.new(config_name).clear_shots_folder
Wraith::FolderManager.new(config_chrome).clear_shots_folder
Dir.mkdir("shots/test")
Dir.mkdir("shots_chrome/test")
end
describe "When capturing an image" do
let(:image_size) { ImageSize.path(test_image1).size }
it "saves image" do
capture_image = saving.construct_command(320, test_url1, test_image1, selector, false, false)
`#{capture_image}`
expect(image_size[0]).to eq 320
end
it "saves image chrome" do
capture_image = saving_chrome.capture_image_selenium("1080x600", test_url1, test_image_chrome, selector, false, false)
image_size_chrome = ImageSize.path(test_image_chrome).size
expect(image_size_chrome[0]).to eq 1080
end
it "crops around a selector" do
selector = "#orb-nav-more"
capture_image = saving_chrome.capture_image_selenium(1440, test_url1, test_image_chrome_selector, selector, false, false)
image_size_chrome_selector = ImageSize.path(test_image_chrome_selector).size
expect(image_size_chrome_selector[0]).to eq 673
expect(image_size_chrome_selector[1]).to eq 40
end
end
describe "When comparing images" do
it "should compare" do
create_diff_image
crop_images
compare_images
diff = ImageSize.path(diff_image).size
expect(diff[0]).to eq 320
end
end
describe "When generating thumbnails" do
it "produce thumbnails" do
create_diff_image
crop_images
Wraith::CompareImages.new(config_name).compare_task(test_image1, test_image2, diff_image, data_txt)
Wraith::Thumbnails.new(config_name).generate_thumbnails
expect(File).to exist("shots/thumbnails/test/test1.png")
expect(File).to exist("shots/thumbnails/test/test(2).png")
expect(File).to exist("shots/thumbnails/test/test_diff.png")
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/validate_spec.rb | spec/validate_spec.rb | require "_helpers"
describe "Wraith config validator" do
let(:config) do
YAML.load('
domains:
test: http://www.bbc.com
browser: "casperjs"
directory: some/dir
')
end
describe "universal, basic validation for all modes" do
it "should validate a basic config" do
Wraith::Validate.new(config, { yaml_passed: true }).validate
end
it "should complain if the `domains` property is missing" do
config["domains"] = nil
expect { Wraith::Validate.new(config, { yaml_passed: true }).validate }.to raise_error MissingRequiredPropertyError
end
it "should complain if the `browser` property is missing" do
config["browser"] = nil
expect { Wraith::Validate.new(config, { yaml_passed: true }).validate }.to raise_error MissingRequiredPropertyError
end
it "should complain if the config file doesn't exist" do
expect { Wraith::Wraith.new('configs/some_made_up_config.yml') }.to raise_error ConfigFileDoesNotExistError
end
end
describe "validation specific to capture mode" do
it "should complain if fewer than two domains are specified" do
expect { Wraith::Validate.new(config, { yaml_passed: true }).validate("capture") }.to raise_error InvalidDomainsError
end
it "should complain if more than two domains are specified" do
config["domains"] = YAML.load('
test: http://something.bbc.com
stage: http://something-else.bbc.com
live: http://www.bbc.com
')
expect { Wraith::Validate.new(config, { yaml_passed: true }).validate("capture") }.to raise_error InvalidDomainsError
end
it "should be happy if exactly two domains are specified" do
config["domains"] = YAML.load('
test: http://something.bbc.com
live: http://www.bbc.com
')
Wraith::Validate.new(config, { yaml_passed: true }).validate("capture")
end
it "should fail if no directory is specified" do
config["domains"] = YAML.load('
test: http://something.bbc.com
live: http://www.bbc.com
')
config["directory"] = nil
expect { Wraith::Validate.new(config, { yaml_passed: true }).validate("capture") }.to raise_error MissingRequiredPropertyError
end
end
describe "validations specific to history mode" do
let(:history_conf) do
config.merge(YAML.load('
history_dir: "history_shots"
'))
end
it "should complain if more than one domain is specified" do
history_conf["domains"] = YAML.load('
test: http://something.bbc.com
live: http://www.bbc.com
')
expect { Wraith::Validate.new(history_conf, { yaml_passed: true }).validate("history") }.to raise_error InvalidDomainsError
end
it "should complain if no history_dir is specified" do
history_conf["history_dir"] = nil
expect { Wraith::Validate.new(history_conf, { yaml_passed: true }).validate("history") }.to raise_error MissingRequiredPropertyError
end
it "should be happy if a history_dir and one domain is specified" do
Wraith::Validate.new(history_conf, { yaml_passed: true }).validate("history")
end
end
describe "validations specific to spider mode" do
let(:spider_conf) do
YAML.load('
domains:
test: http://www.bbc.com
browser: "casperjs"
directory: some/dir
imports: "spider_paths.yml"
')
end
it "should complain if imports is empty" do
spider_conf.delete 'imports'
expect { Wraith::Validate.new(spider_conf, { yaml_passed: true, imports_must_resolve: false }).validate("spider") }.to raise_error MissingRequiredPropertyError
end
# @TODO - would be good to get this passing. Right now we get a false positive if you've run `wraith spider` once already - thereby 'paths' being set, and this error being raised.
# it "should complain if paths is set" do
# spider_conf.merge!(YAML.load('
# paths:
# home: /
# '))
# expect { Wraith::Validate.new(spider_conf, { yaml_passed: true, imports_must_resolve: false }).validate("spider") }.to raise_error PropertyOutOfContextError
# end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/gallery_spec.rb | spec/gallery_spec.rb | require "_helpers"
describe Wraith do
let(:config_name) { get_path_relative_to __FILE__, "./configs/test_config--phantom.yaml" }
let(:gallery) { Wraith::GalleryGenerator.new(config_name, false) }
describe "When generating gallery" do
it "should not break when there is a `-` in the filename" do
dirs = gallery.parse_directories "spec/thumbnails"
images = [
{
:filename => "home/test_image-afrique.png",
:thumb => "thumbnails/home/test_image-afrique.png"
},
{
:filename => "home/test_image-russian.png",
:thumb => "thumbnails/home/test_image-russian.png"
}
]
dirs["home"][0][:variants].each_with_index do |image, i|
expect(image[:filename]).to eq images[i][:filename]
expect(image[:thumb]).to eq images[i][:thumb]
end
expect(dirs["home"][0][:diff][:filename]).to eq "home/test_image-diff.png"
expect(dirs["home"][0][:diff][:thumb]).to eq "thumbnails/home/test_image-diff.png"
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/config_spec.rb | spec/config_spec.rb | require "_helpers"
describe "wraith config" do
let(:config_name) { get_path_relative_to __FILE__, "./configs/test_config--phantom.yaml" }
let(:wraith) { Wraith::Wraith.new(config_name) }
describe "Config" do
it "returns a Wraith class" do
expect(wraith).is_a? Wraith::Wraith
end
it "when config is loaded" do
expect(wraith).to respond_to :config
end
it "contains shot options" do
expect(wraith.config).to include "directory" => "shots"
end
it 'returns default values for threads' do
expect(wraith.threads).to eq 8
end
it 'returns default values for settle' do
expect(wraith.settle).to eq 10
end
context 'non-standard config values' do
let(:config) { YAML.load "browser: phantomjs\nthreads: 2\nsettle: 5"}
let(:non_standard_wraith) { Wraith::Wraith.new( config, { yaml_passed: true }) }
it 'returns overridden value when threads is specified in config' do
expect(non_standard_wraith.threads).to eq 2
end
it 'returns overridden value when settle is specified in config' do
expect(non_standard_wraith.settle).to eq 5
end
end
it "should be able to import other configs" do
config_name = get_path_relative_to __FILE__, "./configs/test_config--imports.yaml"
wraith = Wraith::Wraith.new(config_name)
# retain the imported config settings
expect(wraith.paths).to eq("home" => "/", "uk_index" => "/uk")
# ...but override the imported config in places
expect(wraith.widths).to eq [1337]
end
end
describe "When creating a wraith worker" do
it "should have a browser engine defined" do
expect(wraith.engine).to be_a String
end
it "should have a directory defined" do
expect(wraith.directory).to be_a String
end
it "should have domains defined" do
expect(wraith.domains).to be_a Hash
end
it "should have screen widths defined" do
expect(wraith.widths).to be_a Array
end
it "should have paths defined" do
expect(wraith.paths).to be_a Hash
end
it "should have fuzz defined" do
expect(wraith.fuzz).to be_a String
end
it "should have widths" do
expect(wraith.widths).to eq [320, 600, 1280]
end
it "include base domain" do
expect(wraith.base_domain).to eq "http://www.bbc.com/afrique"
end
it "include compare domain" do
expect(wraith.comp_domain).to eq "http://www.bbc.com/russian"
end
it "include base label" do
expect(wraith.base_domain_label).to eq "afrique"
end
it "include compare label" do
expect(wraith.comp_domain_label).to eq "russian"
end
it "include compare label" do
expect(wraith.paths).to eq("home" => "/", "uk_index" => "/uk")
end
end
describe "different ways of initialising browser engine" do
it "should let us directly specify the engine" do
config = YAML.load "browser: phantomjs"
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.engine).to eq "phantomjs"
end
it "should be backwards compatible with the old way" do
config = YAML.load '
browser:
phantomjs: "casperjs"
'
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.engine).to eq "casperjs"
end
end
describe "different ways of determining the snap file" do
it "should calculate the snap file from the engine" do
config = YAML.load "browser: phantomjs"
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.snap_file).to include "lib/wraith/javascript/phantom.js"
config = YAML.load "browser: casperjs"
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.snap_file).to include "lib/wraith/javascript/casper.js"
end
it "should calculate the snap file in a backwards-compatible way" do
config = YAML.load '
browser:
phantomjs: "casperjs"
'
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.snap_file).to include "lib/wraith/javascript/casper.js"
end
it "should allow users to specify the relative path to their own snap file" do
config = YAML.load '
browser: casperjs
snap_file: path/to/snap.js
'
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
# not sure about having code IN the test, but we want to get this right.
expect(wraith.snap_file).to eq(Dir.pwd + "/path/to/snap.js")
end
it "should allow users to specify the absolute path to their own snap file" do
config = YAML.load '
browser: casperjs
snap_file: /Users/my_username/Sites/bbc/wraith/path/to/snap.js
'
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.snap_file).to eq("/Users/my_username/Sites/bbc/wraith/path/to/snap.js")
end
end
describe "different modes of efficiency (resize or reload)" do
it "should trigger efficient mode if resize was specified" do
config = YAML.load 'resize_or_reload: "resize"'
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.resize)
end
it "should fall back to slow mode if reload was specified" do
config = YAML.load 'resize_or_reload: "reload"'
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.resize).to eq false
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/spec/before_capture_spec.rb | spec/before_capture_spec.rb | require "_helpers"
def run_js_then_capture(config)
saving = Wraith::SaveImages.new(config_name)
generated_image = "shots/test/temporary_jsified_image.png"
capture_image = saving.construct_command(320, "http://www.bbc.com/afrique", generated_image, selector, config[:global_js], config[:path_js])
`#{capture_image}`
Wraith::CompareImages.new(config_name).compare_task(generated_image, config[:output_should_look_like], "shots/test/test_diff.png", "shots/test/test.txt")
diff = File.open("shots/test/test.txt", "rb").read
expect(diff).to eq "0.0"
end
def run_js_then_capture_chrome(config)
saving = Wraith::SaveImages.new(config_chrome)
generated_image = "shots_chrome/test/temporary_jsified_image.png"
saving.capture_image_selenium('320', 'http://www.bbc.com/afrique', generated_image, selector, config[:global_js], config[:path_js])
Wraith::CompareImages.new(config_chrome).compare_task(generated_image, config[:output_should_look_like], "shots/test/test_diff.png", "shots/test/test.txt")
diff = File.open("shots/test/test.txt", "rb").read
expect(diff).to eq "0.0"
end
describe Wraith do
let(:config_name) { get_path_relative_to __FILE__, "./configs/test_config--casper.yaml" }
let(:config_chrome) { get_path_relative_to __FILE__, "./configs/test_config--chrome.yaml" }
let(:wraith) { Wraith::Wraith.new(config_name) }
let(:selector) { "body" }
let(:before_suite_js) { "spec/js/global.js" }
let(:before_capture_js) { "spec/js/path.js" }
let(:before_suite_js_chrome) { "spec/js/global--chrome.js" }
let(:before_capture_js_chrome) { "spec/js/path--chrome.js" }
before(:each) do
Wraith::FolderManager.new(config_name).clear_shots_folder
Wraith::FolderManager.new(config_chrome).clear_shots_folder
Dir.mkdir("shots/test")
Dir.mkdir("shots_chrome/test")
end
describe "different ways of determining the before_capture file" do
it "should allow users to specify the relative path to the before_capture file" do
config = YAML.load '
browser: casperjs
before_capture: javascript/do_something.js
'
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
# not sure about having code IN the test, but we want to get this right.
expect(wraith.before_capture).to eq(Dir.pwd + "/javascript/do_something.js")
end
it "should allow users to specify the absolute path to the before_capture file" do
config = YAML.load '
browser: casperjs
before_capture: /Users/some_user/wraith/javascript/do_something.js
'
wraith = Wraith::Wraith.new(config, { yaml_passed: true })
expect(wraith.before_capture).to eq("/Users/some_user/wraith/javascript/do_something.js")
end
end
describe "When hooking into before_capture (Chrome)" do
it "Executes the global JS before capturing" do
run_js_then_capture_chrome(
:global_js => before_suite_js_chrome,
:path_js => false,
:output_should_look_like => "spec/base/global.png",
:engine => "chrome"
)
end
it "Executes the path-level JS before capturing" do
run_js_then_capture_chrome(
:global_js => false,
:path_js => before_capture_js_chrome,
:output_should_look_like => "spec/base/path.png",
:engine => "chrome"
)
end
it "Executes the global JS before the path-level JS" do
run_js_then_capture_chrome(
:global_js => before_suite_js_chrome,
:path_js => before_capture_js_chrome,
:output_should_look_like => "spec/base/path.png",
:engine => "chrome"
)
end
end
# @TODO - we need tests determining the path to "path-level before_capture hooks"
# @TODO - uncomment and figure out why broken OR deprecate
# describe "When hooking into before_capture (CasperJS)" do
# it "Executes the global JS before capturing" do
# run_js_then_capture(
# :global_js => before_suite_js,
# :path_js => false,
# :output_should_look_like => "spec/base/global.png",
# :engine => "casperjs"
# )
# end
# it "Executes the path-level JS before capturing" do
# run_js_then_capture(
# :global_js => false,
# :path_js => before_capture_js,
# :output_should_look_like => "spec/base/path.png",
# :engine => "casperjs"
# )
# end
# it "Executes the global JS before the path-level JS" do
# run_js_then_capture(
# :global_js => before_suite_js,
# :path_js => before_capture_js,
# :output_should_look_like => "spec/base/path.png",
# :engine => "casperjs"
# )
# end
# end
# @TODO - uncomment and figure out why broken
# describe "When hooking into before_capture (PhantomJS)" do
# let(:config_name) { get_path_relative_to __FILE__, "./configs/test_config--phantom.yaml" }
# let(:saving) { Wraith::SaveImages.new(config_name) }
# let(:wraith) { Wraith::Wraith.new(config_name) }
# let(:selector) { "body" }
# let(:before_suite_js) { "../../spec/js/global.js" }
# let(:before_capture_js) { "../../spec/js/path.js" }
# it "Executes the global JS before capturing" do
# run_js_then_capture(
# global_js: before_suite_js,
# path_js: 'false',
# output_should_look_like: 'spec/base/global.png',
# engine: 'phantomjs'
# )
# end
# it "Executes the path-level JS before capturing" do
# run_js_then_capture(
# global_js: 'false',
# path_js: before_capture_js,
# output_should_look_like: 'spec/base/path.png',
# engine: 'phantomjs'
# )
# end
# it "Executes the global JS before the path-level JS" do
# run_js_then_capture(
# global_js: before_suite_js,
# path_js: before_capture_js,
# output_should_look_like: 'spec/base/path.png',
# engine: 'phantomjs'
# )
# end
# end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith.rb | lib/wraith.rb | require "wraith/version"
module Wraith
autoload :CLI, "wraith/cli"
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/wraith.rb | lib/wraith/wraith.rb | require "yaml"
require "wraith/helpers/logger"
require "wraith/helpers/utilities"
class Wraith::Wraith
include Logging
attr_accessor :config
def initialize(config, options = {})
options = {
yaml_passed: false,
imports_must_resolve: true,
}.merge(options)
if options[:yaml_passed]
@config = config
else
filepath = determine_config_path config
@config = YAML.load_file filepath
if !@config
fail InvalidYamlError, "could not parse \"#{config}\" as YAML"
end
end
if @config['imports']
@config = apply_imported_config(@config['imports'], @config, options[:imports_must_resolve])
end
logger.level = verbose ? Logger::DEBUG : Logger::INFO
end
def determine_config_path(config_name)
possible_filenames = [
config_name,
"#{config_name}.yml",
"#{config_name}.yaml",
"configs/#{config_name}.yml",
"configs/#{config_name}.yaml"
]
possible_filenames.each do |filepath|
if File.exist?(filepath)
@calculated_config_dir = absolute_path_of_dir(convert_to_absolute filepath)
return convert_to_absolute filepath
end
end
fail ConfigFileDoesNotExistError, "unable to find config \"#{config_name}\""
end
def config_dir
@calculated_config_dir
end
def apply_imported_config(config_to_import, config, imports_must_resolve)
path_to_config = "#{config_dir}/#{config_to_import}"
if File.exist?(path_to_config)
yaml = YAML.load_file path_to_config
return yaml.merge(config)
end
if imports_must_resolve
fail ConfigFileDoesNotExistError, "unable to find referenced imported config \"#{config_to_import}\""
else
config # return original config
end
end
def directory
# Legacy support for those using array configs
@config["directory"].is_a?(Array) ? @config["directory"].first : @config["directory"]
end
def history_dir
@config.fetch('history_dir', false)
end
def engine
engine = @config["browser"]
# Legacy support for those using the old style "browser: \n phantomjs: 'casperjs'" configs
engine = engine.values.first if engine.is_a? Hash
engine
end
def snap_file
@config["snap_file"] ? convert_to_absolute(@config["snap_file"]) : snap_file_from_engine(engine)
end
def snap_file_from_engine(engine)
path_to_js_templates = File.dirname(__FILE__) + "/javascript"
case engine
when "phantomjs"
path_to_js_templates + "/phantom.js"
when "casperjs"
path_to_js_templates + "/casper.js"
# @TODO - add a SlimerJS option
else
logger.error "Wraith does not recognise the browser engine '#{engine}'"
end
end
def before_capture
@config["before_capture"] ? convert_to_absolute(@config["before_capture"]) : false
end
def widths
@config["screen_widths"]
end
def resize
# @TODO make this default to true, once it's been tested a bit more thoroughly
@config.fetch('resize_or_reload', 'reload') == "resize"
end
def domains
@config["domains"]
end
def base_domain
domains[base_domain_label]
end
def comp_domain
domains[comp_domain_label]
end
def base_domain_label
domains.keys[0]
end
def comp_domain_label
domains.keys[1]
end
def settle
@config.fetch('settle', 10)
end
def threads
@config.fetch('threads', '8').to_i
end
def spider_file
@config.fetch('spider_file', 'spider.txt')
end
def spider_days
@config["spider_days"]
end
def sitemap
@config["sitemap"]
end
def spider_skips
@config["spider_skips"]
end
def paths
@config["paths"]
end
def fuzz
@config["fuzz"]
end
def highlight_color
@config.fetch('highlight_color', 'blue')
end
def mode
if %w(diffs_only diffs_first alphanumeric).include?(@config["mode"])
@config["mode"]
else
"alphanumeric"
end
end
def threshold
@config.fetch('threshold', 0)
end
def gallery_template
@config.fetch('gallery', {}).fetch('template', 'basic_template')
end
def thumb_height
@config.fetch('gallery', {}).fetch('thumb_height', 200)
end
def thumb_width
@config.fetch('gallery', {}).fetch('thumb_width', 200)
end
def phantomjs_options
@config["phantomjs_options"]
end
def imports
@config.fetch('imports', false)
end
def verbose
# @TODO - also add a `--verbose` CLI flag which overrides whatever you have set in the config
@config.fetch('verbose', false)
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/version.rb | lib/wraith/version.rb | module Wraith
VERSION = "4.2.4"
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/save_images.rb | lib/wraith/save_images.rb | require "parallel"
require "shellwords"
require "wraith"
require "wraith/helpers/capture_options"
require "wraith/helpers/logger"
require "wraith/helpers/save_metadata"
require "wraith/helpers/utilities"
require "selenium-webdriver"
require 'mini_magick'
class Wraith::SaveImages
include Logging
attr_reader :wraith, :history, :meta
def initialize(config, history = false, yaml_passed = false)
@wraith = Wraith::Wraith.new(config, { yaml_passed: yaml_passed })
@history = history
@meta = SaveMetadata.new(@wraith, history)
end
def check_paths
if !wraith.paths
path = File.read(wraith.spider_file)
eval(path)
else
wraith.paths
end
end
def save_images
jobs = define_jobs
parallel_task(jobs)
end
def define_jobs
jobs = []
check_paths.each do |label, options|
settings = CaptureOptions.new(options, wraith)
if settings.resize
jobs += define_individual_job(label, settings, wraith.widths)
else
wraith.widths.each do |width|
jobs += define_individual_job(label, settings, width)
end
end
end
jobs
end
def define_individual_job(label, settings, width)
base_file_name = meta.file_names(width, label, meta.base_label)
compare_file_name = meta.file_names(width, label, meta.compare_label)
jobs = []
jobs << [label, settings.path, prepare_widths_for_cli(width), settings.base_url, base_file_name, settings.selector, wraith.before_capture, settings.before_capture, 'invalid1.jpg']
jobs << [label, settings.path, prepare_widths_for_cli(width), settings.compare_url, compare_file_name, settings.selector, wraith.before_capture, settings.before_capture, 'invalid2.jpg'] unless settings.compare_url.nil?
jobs
end
def prepare_widths_for_cli(width)
# prepare for the command line. [30,40,50] => "30,40,50"
width = width.join(",") if width.is_a? Array
width
end
def run_command(command)
output = []
command.gsub!(/'/, '')
IO.popen(command).each do |line|
logger.info line
output << line.chomp!
end.close
output
end
def parallel_task(jobs)
Parallel.each(jobs, :in_threads => wraith.threads) do |_label, _path, width, url, filename, selector, global_before_capture, path_before_capture|
begin
if meta.engine == "chrome"
capture_image_selenium(width, url, filename, selector, global_before_capture, path_before_capture)
else
command = construct_command(width, url, filename, selector, global_before_capture, path_before_capture)
attempt_image_capture(command, filename)
end
rescue => e
logger.error "#{e}\n URL = #{url}"
create_invalid_image(filename, width, invalid_image_name)
end
end
end
# currently only chrome headless at 1x scaling
def get_driver
case meta.engine
when "chrome"
options = Selenium::WebDriver::Chrome::Options.new
[
'disable-gpu',
'headless',
'no-sandbox',
'device-scale-factor=1',
'force-device-scale-factor',
'window-size=1200,1500',
'hide-scrollbars',
'ignore-certificate-errors'
].each { |arg| options.add_argument("--#{arg}") }
Selenium::WebDriver.for :chrome, options: options
end
end
# resize to fit entire page
def resize_to_fit_page driver
width = driver.execute_script("return Math.max(document.body.scrollWidth, document.body.offsetWidth, document.documentElement.clientWidth, document.documentElement.scrollWidth, document.documentElement.offsetWidth);")
height = driver.execute_script("return Math.max(document.body.scrollHeight, document.body.offsetHeight, document.documentElement.clientHeight, document.documentElement.scrollHeight, document.documentElement.offsetHeight);")
driver.manage.window.resize_to(width, height)
end
# crop an image around the coordinates of an element
def crop_selector driver, selector, image_location
el = driver.find_element(:css, selector)
image = MiniMagick::Image.open(image_location)
image.crop "#{el.rect.width}x#{el.rect.height}+#{el.rect.x}+#{el.rect.y}"
image.write(image_location)
end
def capture_image_selenium(screen_sizes, url, file_name, selector, global_before_capture, path_before_capture)
driver = get_driver
driver.manage.timeouts.implicit_wait = 10;
screen_sizes.to_s.split(",").each do |screen_size|
for attempt in 1..3 do
begin
width, height = screen_size.split("x")
new_file_name = file_name.sub('MULTI', screen_size)
driver.manage.window.resize_to(width, height || 1500)
driver.navigate.to url
driver.manage.timeouts.implicit_wait = wraith.settle
driver.execute_script(File.read(global_before_capture)) if global_before_capture
driver.execute_script(File.read(path_before_capture)) if path_before_capture
resize_to_fit_page(driver) unless height
driver.save_screenshot(new_file_name)
crop_selector(driver, selector, new_file_name) if selector && selector.length > 0
break
rescue Net::ReadTimeout => e
logger.error "Got #{e} on attempt #{attempt} at screen size #{screensize}. URL = #{url}"
end
end
end
driver.quit
end
def construct_command(width, url, file_name, selector, global_before_capture, path_before_capture)
width = prepare_widths_for_cli(width)
selector = selector.gsub '#', '\#' # make sure id selectors aren't escaped in the CLI
global_before_capture = convert_to_absolute global_before_capture
path_before_capture = convert_to_absolute path_before_capture
command_to_run = "#{meta.engine} #{wraith.phantomjs_options} '#{wraith.snap_file}' '#{url}' '#{width}' '#{file_name}' '#{selector}' '#{global_before_capture}' '#{path_before_capture}'"
logger.debug command_to_run
command_to_run
end
def attempt_image_capture(capture_page_image, filename)
max_attempts = 5
max_attempts.times do |i|
run_command capture_page_image
return true if image_was_created filename
logger.warn "Failed to capture image #{filename} on attempt number #{i + 1} of #{max_attempts}"
end
fail "Unable to capture image #{filename} after #{max_attempts} attempt(s)" unless image_was_created filename
end
def image_was_created(filename)
# @TODO - need to check if the image was generated even if in resize mode
wraith.resize or File.exist? filename
end
def create_invalid_image(filename, width, invalid_image_name)
logger.warn "Using fallback image instead"
invalid = File.expand_path("../../assets/#{invalid_image_name}", File.dirname(__FILE__))
FileUtils.cp invalid, filename
set_image_width(filename, width)
end
def set_image_width(image, width)
`convert #{image.shellescape} -background none -extent #{width}x0 #{image.shellescape}`
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/thumbnails.rb | lib/wraith/thumbnails.rb | require "wraith"
require "parallel"
require "fileutils"
require "shellwords"
class Wraith::Thumbnails
attr_reader :wraith
def initialize(config)
@wraith = Wraith::Wraith.new(config)
end
def generate_thumbnails
files = Dir.glob("#{wraith.directory}/*/*.png")
Parallel.each(files, :in_processes => Parallel.processor_count) do |filename|
new_name = filename.gsub(/^#{wraith.directory}/, "#{wraith.directory}/thumbnails")
thumbnail_image(filename, new_name)
end
end
def thumbnail_image(png_path, output_path)
unless File.directory?(File.dirname(output_path))
FileUtils.mkdir_p(File.dirname(output_path))
end
`convert #{png_path.shellescape} -thumbnail 200 -crop #{wraith.thumb_width.to_s}x#{wraith.thumb_height.to_s}+0+0 #{output_path.shellescape}`
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/validate.rb | lib/wraith/validate.rb | require "wraith/wraith"
require "wraith/helpers/logger"
require "wraith/helpers/utilities"
class Wraith::Validate
include Logging
attr_reader :wraith
def initialize(config, options = {})
@wraith = Wraith::Wraith.new(config, options)
end
def validate(mode = false)
list_debug_information if wraith.verbose
validate_basic_properties
validate_mode_properties(mode) if mode
# if we get this far, we've only had warnings at worst, not errors.
"Config validated. No serious issues found."
end
def validate_basic_properties
fail MissingRequiredPropertyError, "You must specify a browser engine! #{docs_prompt}" if wraith.engine.nil?
fail MissingRequiredPropertyError, "You must specify at least one domain for Wraith to do anything! #{docs_prompt}" unless wraith.domains
fail MissingRequiredPropertyError, "You must specify a directory for capture! #{docs_prompt}" if wraith.directory.nil?
# @TODO validate fuzz is not nil, etc
end
def validate_mode_properties(mode)
case mode
when "capture"
validate_capture_mode
when "history"
validate_history_mode
when "latest"
validate_history_mode
validate_base_shots_exist
when "spider"
validate_spider_mode
else
logger.warn "Wraith doesn't know how to validate mode '#{mode}'. Continuing..."
end
end
def validate_capture_mode
fail InvalidDomainsError, "`wraith capture` requires exactly two domains. #{docs_prompt}" if wraith.domains.length != 2
logger.warn "You have specified a `history_dir` in your config, but this is"\
" used in `history` mode, NOT `capture` mode. #{docs_prompt}" if wraith.history_dir
end
def validate_history_mode
fail MissingRequiredPropertyError, "You must specify a `history_dir` to run"\
" Wraith in history mode. #{docs_prompt}" unless wraith.history_dir
fail InvalidDomainsError, "History mode requires exactly one domain. #{docs_prompt}" if wraith.domains.length != 1
end
def validate_spider_mode
fail MissingRequiredPropertyError, "You must specify an `imports` YML"\
" before running `wraith spider`. #{docs_prompt}" unless wraith.imports
#fail PropertyOutOfContextError, "Tried running `wraith spider` but you have already"\
# " specified paths in your YML. #{docs_prompt}" if wraith.paths
end
def validate_base_shots_exist
unless File.directory?(wraith.history_dir)
logger.error "You need to run `wraith history` at least once before you can run `wraith latest`!"
end
end
def docs_prompt
"See the docs at http://bbc-news.github.io/wraith/"
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/gallery.rb | lib/wraith/gallery.rb | require "erb"
require "pp"
require "fileutils"
require "wraith/wraith"
require "wraith/helpers/logger"
class Wraith::GalleryGenerator
include Logging
attr_reader :wraith
MATCH_FILENAME = /(\S+)_(\S+)\.\S+/
def initialize(config, multi)
@wraith = Wraith::Wraith.new(config)
@location = wraith.directory
@multi = multi
@folder_manager = Wraith::FolderManager.new(config)
end
def parse_directories(dirname)
@dirs = {}
categories = Dir.foreach(dirname).select do |category|
if [".", "..", "thumbnails"].include? category
false
elsif File.directory? "#{dirname}/#{category}"
true
else
false
end
end
match(categories, dirname)
end
def match(categories, dirname)
categories.each do |category|
@dirs[category] = {}
Dir.foreach("#{dirname}/#{category}") do |filename|
match = MATCH_FILENAME.match(filename)
matcher(match, filename, dirname, category) unless match.nil?
end
end
@folder_manager.tidy_shots_folder(@dirs)
@failed_shots = @folder_manager.threshold_rate(@dirs)
sorting_dirs(@dirs)
end
def matcher(match, filename, dirname, category)
@size = match[1].to_i
@group = get_group_from_match match
@filepath = category + "/" + filename
@thumbnail = "thumbnails/#{category}/#{filename}"
@url = figure_out_url @group, category
@dirs[category][@size] = { :variants => [] } if @dirs[category][@size].nil?
size_dict = @dirs[category][@size]
data_group(@group, size_dict, dirname, @filepath)
end
def figure_out_url(group, category)
root = wraith.domains["#{group}"]
return "" if root.nil?
path = get_path(category)
url = root + path
url
end
def get_path(category)
wraith.paths[category]["path"] || wraith.paths[category]
end
def get_group_from_match(match)
group = match[2]
dash = match[2].rindex("-")
group = match[2][dash + 1..-1] unless dash.nil?
group
end
def data_group(group, size_dict, dirname, filepath)
case group
when "diff"
diff_check(size_dict, filepath)
when "data"
data_check(size_dict, dirname, filepath)
else
variant_check(size_dict, group)
end
end
def variant_check(size_dict, group)
size_dict[:variants] << {
:name => group,
:filename => @filepath,
:thumb => @thumbnail,
:url => @url
}
size_dict[:variants].sort! { |a, b| a[:name] <=> b[:name] }
end
def diff_check(size_dict, filepath)
size_dict[:diff] = {
:filename => filepath, :thumb => @thumbnail
}
end
def data_check(size_dict, dirname, filepath)
size_dict[:data] = File.read("#{dirname}/#{filepath}").to_f
end
def sorting_dirs(dirs)
if %w(diffs_only diffs_first).include?(wraith.mode)
@sorted = sort_by_diffs dirs
else
@sorted = sort_alphabetically dirs
end
Hash[@sorted]
end
def sort_by_diffs(dirs)
dirs.sort_by do |_category, sizes|
size = select_size_with_biggest_diff sizes
-1 * size[1][:data]
end
end
def select_size_with_biggest_diff(sizes)
begin
sizes.max_by { |_size, dict| dict[:data] }
rescue
fail MissingImageError
end
end
def sort_alphabetically(dirs)
dirs.sort_by { |category, _sizes| category }
end
def generate_gallery(with_path = "")
dest = "#{@location}/gallery.html"
directories = parse_directories(@location)
template = File.expand_path("gallery_template/#{wraith.gallery_template}.erb", File.dirname(__FILE__))
generate_html(@location, directories, template, dest, with_path)
report_gallery_status dest
end
def generate_html(location, directories, template, destination, path)
template = File.read(template)
locals = {
:location => location,
:directories => directories,
:path => path,
:threshold => wraith.threshold
}
html = ERB.new(template).result(ErbBinding.new(locals).get_binding)
File.open(destination, "w") do |outf|
outf.write(html)
end
end
def report_gallery_status(dest)
logger.info "Gallery generated"
failed = check_failed_shots
prompt_user_to_open_gallery dest
exit 1 if failed
end
def check_failed_shots
if @multi
return false
elsif @failed_shots == false
logger.warn "Failures detected:"
@dirs.each do |dir, sizes|
sizes.to_a.sort.each do |size, files|
file = dir.gsub("__", "/")
if !files.include?(:diff)
logger.warn "\t Unable to create a diff image for #{file}"
elsif files[:data] > wraith.threshold
logger.warn "\t #{file} failed at a resolution of #{size} (#{files[:data]}% diff)"
end
end
end
return true
else
false
end
end
def prompt_user_to_open_gallery(dest)
logger.info "\nView the gallery in your browser:"
logger.info "\t file://" + Dir.pwd + "/" + dest
end
class ErbBinding < OpenStruct
def get_binding
binding
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/cli.rb | lib/wraith/cli.rb | require "thor"
require "wraith"
require "wraith/save_images"
require "wraith/crop"
require "wraith/spider"
require "wraith/folder"
require "wraith/thumbnails"
require "wraith/compare_images"
require "wraith/gallery"
require "wraith/validate"
require "wraith/version"
require "wraith/helpers/logger"
require "wraith/helpers/utilities"
class Wraith::CLI < Thor
include Thor::Actions
include Logging
attr_accessor :config_name
def self.source_root
File.expand_path("../../../", __FILE__)
end
desc "validate [config_name]", "checks your configuration and validates that all required properties exist"
def validate(config_name)
within_acceptable_limits do
logger.info Wraith::Validate.new(config_name).validate
end
end
desc "setup", "creates config folder and default config"
def setup
within_acceptable_limits do
directory("templates/configs", "configs")
directory("templates/javascript", "javascript")
end
end
desc "spider [config_name]", "crawls a site for paths and stores them to YML file"
def spider(config)
within_acceptable_limits do
logger.info Wraith::Validate.new(config, { imports_must_resolve: false }).validate("spider")
spider = Wraith::Spider.new(config)
spider.crawl
end
end
desc "reset_shots [config_name]", "removes all the files in the shots folder"
def reset_shots(config_name)
within_acceptable_limits do
reset = Wraith::FolderManager.new(config_name)
reset.clear_shots_folder
end
end
desc "setup_folders [config_name]", "create folders for images"
def setup_folders(config_name)
within_acceptable_limits do
create = Wraith::FolderManager.new(config_name)
create.create_folders
end
end
desc "copy_base_images [config_name]", "copies the required base images over for comparison with latest images"
def copy_base_images(config_name)
within_acceptable_limits do
copy = Wraith::FolderManager.new(config_name)
copy.copy_base_images
end
end
desc "save_images [config_name]", "captures screenshots"
def save_images(config_name, history = false)
within_acceptable_limits do
logger.info "SAVING IMAGES"
save_images = Wraith::SaveImages.new(config_name, history)
save_images.save_images
end
end
desc "crop_images [config_name]", "crops images to the same height"
def crop_images(config_name)
within_acceptable_limits do
logger.info "CROPPING IMAGES"
crop = Wraith::CropImages.new(config_name)
crop.crop_images
end
end
desc "compare_images [config_name]", "compares images to generate diffs"
def compare_images(config_name)
within_acceptable_limits do
logger.info "COMPARING IMAGES"
compare = Wraith::CompareImages.new(config_name)
compare.compare_images
end
end
desc "generate_thumbnails [config_name]", "create thumbnails for gallery"
def generate_thumbnails(config_name)
within_acceptable_limits do
logger.info "GENERATING THUMBNAILS"
thumbs = Wraith::Thumbnails.new(config_name)
thumbs.generate_thumbnails
end
end
desc "generate_gallery [config_name]", "create page for viewing images"
def generate_gallery(config_name, multi = false)
within_acceptable_limits do
logger.info "GENERATING GALLERY"
gallery = Wraith::GalleryGenerator.new(config_name, multi)
gallery.generate_gallery
end
end
desc "capture [config_name]", "Capture paths against two domains, compare them, generate gallery"
def capture(config, multi = false)
within_acceptable_limits do
logger.info Wraith::Validate.new(config).validate("capture")
reset_shots(config)
setup_folders(config)
save_images(config)
crop_images(config)
compare_images(config)
generate_thumbnails(config)
generate_gallery(config, multi)
end
end
desc "multi_capture [filelist]", "A Batch of Wraith Jobs"
def multi_capture(filelist)
within_acceptable_limits do
config_array = IO.readlines(filelist)
config_array.each do |config|
capture(config.chomp, true)
end
end
end
desc "history [config_name]", "Setup a baseline set of shots"
def history(config)
within_acceptable_limits do
logger.info Wraith::Validate.new(config).validate("history")
reset_shots(config)
setup_folders(config)
save_images(config)
Wraith::FolderManager.new(config).copy_old_shots
end
end
desc "latest [config_name]", "Capture new shots to compare with baseline"
def latest(config)
within_acceptable_limits do
logger.info Wraith::Validate.new(config).validate("latest")
reset_shots(config)
setup_folders(config)
save_images(config, true)
copy_base_images(config)
crop_images(config)
compare_images(config)
generate_thumbnails(config)
generate_gallery(config)
end
end
desc "info", "Show various info about your system"
def info
list_debug_information
end
desc "version", "Show the version of Wraith"
map ["--version", "-version", "-v"] => "version"
def version
logger.info Wraith::VERSION
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/folder.rb | lib/wraith/folder.rb | require "wraith"
require "wraith/helpers/logger"
class Wraith::FolderManager
include Logging
attr_reader :wraith
def initialize(config)
@wraith = Wraith::Wraith.new(config)
end
def dir
wraith.directory
end
def history_dir
wraith.history_dir
end
def paths
wraith.paths
end
def spider_paths
if !paths
paths = File.read(wraith.spider_file)
eval(paths)
else
wraith.paths
end
end
def clear_shots_folder
FileUtils.rm_rf("./#{dir}")
FileUtils.mkdir_p("#{dir}")
end
def copy_old_shots
if history_dir.nil?
logger.error "no `history_dir` attribute found in config. Cannot copy files."
else
FileUtils.mkdir_p("#{history_dir}")
FileUtils.cp_r("#{dir}/.", "#{history_dir}/")
FileUtils.rm_rf("#{history_dir}/thumbnails") # thumbnails aren't generated until the gallery stage anyway
FileUtils.rm_rf("#{dir}") # get rid of the live folder
Dir["#{history_dir}/**/*.png"].each do |filepath|
new_name = filepath.gsub("latest.png", "base.png")
File.rename(filepath, new_name)
end
end
end
def copy_base_images
logger.info "COPYING BASE IMAGES"
wraith.paths.each do |path|
path = path[0]
logger.info "Copying #{history_dir}/#{path} to #{dir}"
FileUtils.cp_r(Dir.glob("#{history_dir}/#{path}"), dir)
end
end
def create_folders
spider_paths.each do |folder_label, path|
unless path
path = folder_label
folder_label = path.gsub("/", "__")
end
FileUtils.mkdir_p("#{dir}/thumbnails/#{folder_label}")
FileUtils.mkdir_p("#{dir}/#{folder_label}")
end
logger.info "Creating Folders"
end
def tidy_shots_folder(dirs)
if wraith.mode == "diffs_only"
dirs.each do |folder_name, shot_info|
if shot_info.none? { |_k, v| v[:data] > 0 }
FileUtils.rm_rf("#{wraith.directory}/#{folder_name}")
dirs.delete(folder_name)
end
end
end
end
def threshold_rate(dirs)
dirs.each do |_folder_name, shot_info|
shot_info.each do |_k, v|
begin
return false unless v.include?(:diff)
return false if v[:data] > wraith.threshold
rescue
return true
end
end
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/compare_images.rb | lib/wraith/compare_images.rb | require "wraith"
require "wraith/helpers/logger"
require "image_size"
require "open3"
require "parallel"
require "shellwords"
class Wraith::CompareImages
include Logging
attr_reader :wraith
def initialize(config)
@wraith = Wraith::Wraith.new(config)
end
def compare_images
files = Dir.glob("#{wraith.directory}/*/*.png").sort
Parallel.each(files.each_slice(2), :in_processes => Parallel.processor_count) do |base, compare|
diff = base.gsub(/([a-zA-Z0-9]+).png$/, "diff.png")
info = base.gsub(/([a-zA-Z0-9]+).png$/, "data.txt")
logger.info "Comparing #{base} and #{compare}"
compare_task(base, compare, diff, info)
logger.info "Saved diff"
end
end
def percentage(img_size, px_value, info)
pixel_count = (px_value / img_size) * 100
rounded = pixel_count.round(2)
File.open(info, "w") { |file| file.write(rounded) }
end
def compare_task(base, compare, output, info)
cmdline = "compare -fuzz #{wraith.fuzz} -metric AE -highlight-color #{wraith.highlight_color} #{base} #{compare.shellescape} #{output}"
px_value = Open3.popen3(cmdline) { |_stdin, _stdout, stderr, _wait_thr| stderr.read }.to_f
begin
img_size = ImageSize.path(output).size.inject(:*)
percentage(img_size, px_value, info)
rescue
File.open(info, "w") { |file| file.write("invalid") } unless File.exist?(output)
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/crop.rb | lib/wraith/crop.rb | require "wraith"
require "wraith/helpers/logger"
require "image_size"
require "parallel"
require "shellwords"
class Wraith::CropImages
include Logging
attr_reader :wraith
def initialize(config)
@wraith = Wraith::Wraith.new(config)
end
def crop_images
files = Dir.glob("#{wraith.directory}/*/*.png").sort
Parallel.each(files.each_slice(2), :in_processes => Parallel.processor_count) do |base, compare|
crop_if_necessary base, compare
end
end
def crop_if_necessary(base, compare)
base_width = image_dimensions(base)[0]
base_height = image_dimensions(base)[1]
compare_width = image_dimensions(compare)[0]
compare_height = image_dimensions(compare)[1]
if base_height == compare_height and base_width == compare_width
logger.debug "Both images are exactly #{base_width}x#{base_height} - no cropping required. (#{base}, #{compare})"
return true
end
max_width = [base_width, compare_width].max
max_height = [base_height, compare_height].max
logger.debug "Cropping both images to #{max_width}x#{max_height}. (#{base}, #{compare})"
[base, compare].each do |image_to_crop|
run_crop_task(image_to_crop, max_height, max_width)
end
end
def run_crop_task(crop, height, width)
`convert #{crop.shellescape} -background none -extent #{width}x#{height} #{crop.shellescape}`
end
def image_dimensions(image)
ImageSize.new(File.open(image, "rb").read).size
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/spider.rb | lib/wraith/spider.rb | require "wraith"
require "wraith/helpers/logger"
require "yaml"
require "anemone"
require "uri"
class Wraith::Spider
include Logging
EXT = %w(flv swf png jpg gif asx zip rar tar 7z \
gz jar js css dtd xsd ico raw mp3 mp4 m4a \
wav wmv ape aac ac3 wma aiff mpg mpeg \
avi mov ogg mkv mka asx asf mp2 m1v \
m3u f4v pdf doc xls ppt pps bin exe rss xml)
attr_reader :wraith
def initialize(config)
@wraith = Wraith::Wraith.new(config, { imports_must_resolve: false })
@paths = {}
end
def crawl
logger.info "Crawling #{wraith.base_domain}"
Anemone.crawl(wraith.base_domain) do |anemone|
anemone.skip_links_like(/\.(#{EXT.join('|')})$/)
# Add user specified skips
anemone.skip_links_like(wraith.spider_skips)
anemone.on_every_page do |page|
logger.info " #{page.url.path}"
add_path(page.url.path)
end
end
logger.info "Crawl complete."
write_file
end
def add_path(path)
@paths[path == "/" ? "home" : path.gsub("/", "__").chomp("__").downcase] = path.downcase
end
def write_file
logger.info "Writing to YML file..."
config = {}
config['paths'] = @paths
File.open("#{wraith.config_dir}/#{wraith.imports}", "w+") do |file|
file.write(config.to_yaml)
logger.info "Spider paths written to #{wraith.imports}"
end
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/helpers/capture_options.rb | lib/wraith/helpers/capture_options.rb | require "wraith"
require "wraith/helpers/utilities"
class CaptureOptions
attr_reader :options, :wraith
def initialize(options, wraith)
@options = options
@wraith = wraith
end
def path
casper?(options)
end
def selector
options["selector"] || "body"
end
def resize
# path level, or YAML-file level `resize_or_reload` property value
if options["resize_or_reload"]
(options["resize_or_reload"] == "resize")
else
wraith.resize
end
end
def before_capture
options["before_capture"] ? convert_to_absolute(options["before_capture"]) : false
end
def base_url
base_urls(path)
end
def compare_url
compare_urls(path)
end
def base_urls(path)
wraith.base_domain + path unless wraith.base_domain.nil?
end
def compare_urls(path)
wraith.comp_domain + path unless wraith.comp_domain.nil?
end
def casper?(options)
options["path"] ? options["path"] : options
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/helpers/logger.rb | lib/wraith/helpers/logger.rb | # Logging Module, credit: http://stackoverflow.com/a/6768164
require "logger"
module Logging
# This is the magical bit that gets mixed into your classes
def logger
Logging.logger
end
# Global, memoized, lazy initialized instance of a logger
def self.logger
unless @logger
@logger = Logger.new(STDOUT)
@logger.formatter = proc do |severity, _datetime, _progname, msg|
(severity == "INFO") ? "#{msg}\n" : "#{severity}: #{msg}\n"
end
end
@logger
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/helpers/utilities.rb | lib/wraith/helpers/utilities.rb | require "wraith/helpers/custom_exceptions"
def within_acceptable_limits
yield
rescue CustomError => e
logger.error e.message
# other errors, such as SystemError, will not be caught nicely and will give a stack trace (which we'd need)
end
def absolute_path_of_dir(filepath)
path_parts = filepath.split('/')
path_to_dir = path_parts.first path_parts.size - 1
path_to_dir.join('/')
end
def convert_to_absolute(filepath)
if !filepath
"false"
elsif filepath[0] == "/"
# filepath is already absolute. return unchanged
filepath
elsif filepath.match(/^[A-Za-z]:\/(.+)$/)
# filepath is an absolute Windows path, e.g. C:/Code/Wraith/javascript/global.js. return unchanged
filepath
else
# filepath is relative. it must be converted to absolute
"#{Dir.pwd}/#{filepath}"
end
end
def list_debug_information
wraith_version = Wraith::VERSION
command_run = ARGV.join ' '
ruby_version = run_command_safely("ruby -v") || "Ruby not installed"
phantomjs_version = run_command_safely("phantomjs --version") || "PhantomJS not installed"
chromedriver_version = run_command_safely("chromedriver --version") || "chromedriver not installed"
casperjs_version = run_command_safely("casperjs --version") || "CasperJS not installed"
imagemagick_version = run_command_safely("convert -version") || "ImageMagick not installed"
logger.debug "#################################################"
logger.debug " Command run: #{command_run}"
logger.debug " Wraith version: #{wraith_version}"
logger.debug " Ruby version: #{ruby_version}"
logger.debug " ImageMagick: #{imagemagick_version}"
logger.debug " PhantomJS version: #{phantomjs_version}"
logger.debug " chromedriver version: #{chromedriver_version}"
logger.debug " CasperJS version: #{casperjs_version}"
# @TODO - add a SlimerJS equivalent
logger.debug "#################################################"
end
def run_command_safely(command)
begin
output = `#{command}`
output.lines.first.chomp
rescue StandardError
return false
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/helpers/custom_exceptions.rb | lib/wraith/helpers/custom_exceptions.rb | class CustomError < StandardError
end
class InvalidDomainsError < CustomError
end
class MissingRequiredPropertyError < CustomError
end
class ConfigFileDoesNotExistError < CustomError
end
class PropertyOutOfContextError < CustomError
end
class InvalidYamlError < CustomError
end
class MissingImageError < CustomError
def initialize(msg = false)
default_msg = "Something went wrong! It looks like you're missing some images. Check your output directory and make sure that each path has four files for every screen size (data.txt, diff, base, latest). If in doubt, delete your output directory and run Wraith again."
msg = default_msg unless msg
super(msg)
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
bbc/wraith | https://github.com/bbc/wraith/blob/4f6eb0b7625d468bb9e3061fd2628f48df4c4e37/lib/wraith/helpers/save_metadata.rb | lib/wraith/helpers/save_metadata.rb | require "wraith"
require "fileutils"
class SaveMetadata
attr_reader :wraith, :history
def initialize(config, history)
@wraith = config
@history = history
end
def history_label
history ? "_latest" : ""
end
def file_names(width, label, domain_label)
width = "MULTI" if width.is_a? Array
FileUtils::mkdir_p "#{wraith.directory}/#{label}" # ensure the directory exists
"#{wraith.directory}/#{label}/#{width}_#{engine}_#{domain_label}.png"
end
def base_label
"#{wraith.base_domain_label}#{history_label}"
end
def compare_label
"#{wraith.comp_domain_label}#{history_label}"
end
def engine
wraith.engine
end
end
| ruby | Apache-2.0 | 4f6eb0b7625d468bb9e3061fd2628f48df4c4e37 | 2026-01-04T15:45:27.176979Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/helpers/blazer/base_helper.rb | app/helpers/blazer/base_helper.rb | module Blazer
module BaseHelper
def blazer_title(title = nil)
if title
content_for(:title) { title }
else
content_for?(:title) ? content_for(:title) : nil
end
end
BLAZER_URL_REGEX = /\Ahttps?:\/\/[\S]+\z/
BLAZER_IMAGE_EXT = %w[png jpg jpeg gif]
def blazer_format_value(key, value)
if value.is_a?(Numeric) && !key.to_s.end_with?("id") && !key.to_s.start_with?("id")
number_with_delimiter(value)
elsif value.is_a?(String) && value =~ BLAZER_URL_REGEX
# see if image or link
if Blazer.images && (key.include?("image") || BLAZER_IMAGE_EXT.include?(value.split(".").last.split("?").first.try(:downcase)))
link_to value, target: "_blank" do
image_tag value, referrerpolicy: "no-referrer"
end
else
link_to value, value, target: "_blank"
end
else
value
end
end
def blazer_js_var(name, value)
"var #{name} = #{json_escape(value.to_json(root: false))};".html_safe
end
def blazer_series_name(k)
k.nil? ? "null" : k.to_s
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/controllers/blazer/queries_controller.rb | app/controllers/blazer/queries_controller.rb | module Blazer
class QueriesController < BaseController
before_action :set_query, only: [:show, :edit, :update, :destroy, :refresh]
before_action :set_data_source, only: [:tables, :docs, :schema, :cancel]
def home
set_queries(1000)
if params[:filter]
@dashboards = [] # TODO show my dashboards
else
@dashboards = Blazer::Dashboard.order(:name)
@dashboards = @dashboards.includes(:creator) if Blazer.user_class
end
@dashboards =
@dashboards.map do |d|
{
id: d.id,
name: d.name,
creator: blazer_user && d.try(:creator) == blazer_user ? "You" : d.try(:creator).try(Blazer.user_name),
to_param: d.to_param,
dashboard: true
}
end
end
def index
respond_to do |format|
format.html do
redirect_to root_path
end
format.json do
set_queries
render json: @queries
end
end
end
def new
@query = Blazer::Query.new(
data_source: params[:data_source],
name: params[:name]
)
if params[:fork_query_id]
@query.statement ||= Blazer::Query.find(params[:fork_query_id]).try(:statement)
end
if params[:upload_id]
upload = Blazer::Upload.find(params[:upload_id])
upload_settings = Blazer.settings["uploads"]
@query.data_source ||= upload_settings["data_source"]
@query.statement ||= "SELECT * FROM #{upload.table_name} LIMIT 10"
end
end
def create
@query = Blazer::Query.new(query_params)
@query.creator = blazer_user if @query.respond_to?(:creator)
@query.status = "active" if @query.respond_to?(:status)
if @query.save
redirect_to query_path(@query, params: variable_params(@query))
else
render_errors @query
end
end
def show
@statement = @query.statement_object
@success = process_vars(@statement)
@smart_vars = {}
@sql_errors = []
@bind_vars.each do |var|
smart_var, error = parse_smart_variables(var, @statement.data_source)
@smart_vars[var] = smart_var if smart_var
@sql_errors << error if error
end
@query.update!(status: "active") if @query.respond_to?(:status) && @query.status.in?(["archived", nil])
add_cohort_analysis_vars if @query.cohort_analysis?
if @success
@run_data = {statement: @query.statement, query_id: @query.id, data_source: @query.data_source, variables: variable_params(@query)}
@run_data[:forecast] = "t" if params[:forecast]
@run_data[:cohort_period] = params[:cohort_period] if params[:cohort_period]
end
end
def edit
end
def run
@query = Query.find_by(id: params[:query_id]) if params[:query_id]
# use query data source when present
data_source = @query.data_source if @query && @query.data_source
data_source ||= params[:data_source]
@data_source = Blazer.data_sources[data_source]
@statement = Blazer::Statement.new(params[:statement], @data_source)
# before process_vars
@cohort_analysis = @statement.cohort_analysis?
# fallback for now for users with open tabs
# TODO remove fallback in future version
@var_params = request.request_parameters["variables"] || request.request_parameters
@success = process_vars(@statement, @var_params)
@only_chart = params[:only_chart]
@run_id = blazer_params[:run_id]
run_cohort_analysis if @cohort_analysis
query_running = !@run_id.nil?
if query_running
@timestamp = blazer_params[:timestamp].to_i
@result = @data_source.run_results(@run_id)
@success = !@result.nil?
if @success
@data_source.delete_results(@run_id)
@columns = @result.columns
@rows = @result.rows
@error = @result.error
@just_cached = !@result.error && @result.cached_at.present?
@cached_at = nil
params[:data_source] = nil
render_run
elsif Time.now > Time.at(@timestamp + (@data_source.timeout || 600).to_i + 5)
# query lost
Rails.logger.info "[blazer lost query] #{@run_id}"
@error = "We lost your query :("
@rows = []
@columns = []
render_run
else
continue_run
end
elsif @success
@run_id = blazer_run_id
async = Blazer.async
options = {user: blazer_user, query: @query, refresh_cache: params[:check], run_id: @run_id, async: async}
if async && request.format.symbol != :csv
Blazer::RunStatementJob.perform_later(@data_source.id, @statement.statement, options.merge(values: @statement.values))
wait_start = Blazer.monotonic_time
loop do
sleep(0.1)
@result = @data_source.run_results(@run_id)
break if @result || Blazer.monotonic_time - wait_start > 3
end
else
@result = Blazer::RunStatement.new.perform(@statement, options)
end
if @result
@data_source.delete_results(@run_id) if @run_id && async
@columns = @result.columns
@rows = @result.rows
@error = @result.error
@cached_at = @result.cached_at
@just_cached = @result.just_cached
@forecast = @query && @result.forecastable? && params[:forecast]
if @forecast
@result.forecast
@forecast_error = @result.forecast_error
@forecast = @forecast_error.nil?
end
render_run
else
@timestamp = Time.now.to_i
continue_run
end
else
render layout: false
end
end
def refresh
refresh_query(@query)
redirect_to query_path(@query, params: variable_params(@query))
end
def update
if params[:commit] == "Fork"
@query = Blazer::Query.new
@query.creator = blazer_user if @query.respond_to?(:creator)
end
@query.status = "active" if @query.respond_to?(:status)
unless @query.editable?(blazer_user)
@query.errors.add(:base, "Sorry, permission denied")
end
if @query.errors.empty? && @query.update(query_params)
redirect_to query_path(@query, params: variable_params(@query))
else
render_errors @query
end
end
def destroy
@query.destroy if @query.editable?(blazer_user)
redirect_to root_path
end
def tables
render json: @data_source.tables
end
def docs
@smart_variables = @data_source.smart_variables
@linked_columns = @data_source.linked_columns
@smart_columns = @data_source.smart_columns
end
def schema
@schema = @data_source.schema
end
def cancel
@data_source.cancel(blazer_run_id)
head :ok
end
private
def set_data_source
@data_source = Blazer.data_sources[params[:data_source]]
rescue Blazer::Error => e
raise unless e.message.start_with?("Unknown data source:")
render plain: "Unknown data source", status: :not_found
end
def continue_run
render json: {run_id: @run_id, timestamp: @timestamp}, status: :accepted
end
def render_run
@checks = @query ? @query.checks.order(:id) : []
@first_row = @rows.first || []
@column_types = []
if @rows.any?
@columns.each_with_index do |_, i|
@column_types << (
case @first_row[i]
when Integer
"int"
when Float, BigDecimal
"float"
else
"string-ins"
end
)
end
end
@min_width_types = @columns.each_with_index.select { |c, i| @first_row[i].is_a?(Time) || @first_row[i].is_a?(String) || @data_source.smart_columns[c] }.map(&:last)
@smart_values = @result.smart_values if @result
@linked_columns = @data_source.linked_columns
@markers = []
@geojson = []
set_map_data if Blazer.maps?
render_cohort_analysis if @cohort_analysis && !@error
respond_to do |format|
format.html do
render layout: false
end
format.csv do
# not ideal, but useful for testing
raise Error, @error if @error && Rails.env.test?
data = csv_data(@columns, @rows, @data_source)
filename = "#{@query.try(:name).try(:parameterize).presence || 'query'}.csv"
send_data data, type: "text/csv; charset=utf-8", disposition: "attachment", filename: filename
end
end
end
def set_map_data
[["latitude", "longitude"], ["lat", "lon"], ["lat", "lng"]].each do |keys|
lat_index = @columns.index(keys.first)
lon_index = @columns.index(keys.last)
if lat_index && lon_index
@markers =
@rows.select do |r|
r[lat_index] && r[lon_index]
end.map do |r|
{
tooltip: map_tooltip(r.each_with_index.reject { |v, i| i == lat_index || i == lon_index }),
latitude: r[lat_index],
longitude: r[lon_index]
}
end
return if @markers.any?
end
end
geo_index = @columns.index("geojson")
if geo_index
@geojson =
@rows.filter_map do |r|
if r[geo_index].is_a?(String) && (geometry = (JSON.parse(r[geo_index]) rescue nil)) && geometry.is_a?(Hash)
{
tooltip: map_tooltip(r.each_with_index.reject { |v, i| i == geo_index }),
geometry: geometry
}
end
end
end
end
def map_tooltip(r)
r.map { |v, i| "<strong>#{ERB::Util.html_escape(@columns[i])}:</strong> #{ERB::Util.html_escape(v)}" }.join("<br>").truncate(140, separator: " ")
end
def set_queries(limit = nil)
@queries = Blazer::Query.named.select(:id, :name, :creator_id, :statement)
@queries = @queries.includes(:creator) if Blazer.user_class
if blazer_user && params[:filter] == "mine"
@queries = @queries.where(creator_id: blazer_user.id).reorder(updated_at: :desc)
elsif blazer_user && params[:filter] == "viewed" && Blazer.audit
@queries = queries_by_ids(Blazer::Audit.where(user_id: blazer_user.id).order(created_at: :desc).limit(500).pluck(:query_id).uniq)
else
@queries = @queries.limit(limit) if limit
@queries = @queries.active.order(:name)
end
@queries = @queries.to_a
@more = limit && @queries.size >= limit
@queries = @queries.select { |q| !q.name.to_s.start_with?("#") || q.try(:creator).try(:id) == blazer_user.try(:id) }
@queries =
@queries.map do |q|
{
id: q.id,
name: q.name,
creator: blazer_user && q.try(:creator) == blazer_user ? "You" : q.try(:creator).try(Blazer.user_name),
vars: q.variables.join(", "),
to_param: q.to_param
}
end
end
def queries_by_ids(favorite_query_ids)
queries = Blazer::Query.active.named.where(id: favorite_query_ids)
queries = queries.includes(:creator) if Blazer.user_class
queries = queries.index_by(&:id)
favorite_query_ids.map { |query_id| queries[query_id] }.compact
end
def set_query
@query = Blazer::Query.find(params[:id].to_s.split("-").first)
end
def render_forbidden
render plain: "Access denied", status: :forbidden
end
def query_params
params.require(:query).permit(:name, :description, :statement, :data_source)
end
def blazer_params
params[:blazer] || {}
end
def csv_data(columns, rows, data_source)
CSV.generate do |csv|
csv << columns
rows.each do |row|
csv << row.each_with_index.map { |v, i| v.is_a?(Time) ? blazer_time_value(data_source, columns[i], v) : v }
end
end
end
def blazer_time_value(data_source, k, v)
data_source.local_time_suffix.any? { |s| k.ends_with?(s) } ? v.to_s.sub(" UTC", "") : v.in_time_zone(Blazer.time_zone)
end
helper_method :blazer_time_value
def blazer_run_id
params[:run_id].to_s.gsub(/[^a-z0-9\-]/i, "")
end
def run_cohort_analysis
unless @statement.data_source.supports_cohort_analysis?
@cohort_error = "This data source does not support cohort analysis"
end
@show_cohort_rows = !params[:query_id] || @cohort_error
cohort_analysis_statement(@statement) unless @show_cohort_rows
end
def render_cohort_analysis
if @show_cohort_rows
@cohort_analysis = false
@row_limit = 1000
# check results
unless @cohort_error
# check names
expected_columns = ["user_id", "conversion_time"]
missing_columns = expected_columns - @result.columns
if missing_columns.any?
@cohort_error = "Expected user_id and conversion_time columns"
end
# check types (user_id can be any type)
unless @cohort_error
column_types = @result.columns.zip(@result.column_types).to_h
if !column_types["cohort_time"].in?(["time", nil])
@cohort_error = "cohort_time must be time column"
elsif !column_types["conversion_time"].in?(["time", nil])
@cohort_error = "conversion_time must be time column"
end
end
end
else
@today = Blazer.time_zone.today
@min_cohort_date, @max_cohort_date = @result.rows.map { |r| r[0] }.minmax
@buckets = {}
@rows.each do |r|
@buckets[[r[0], r[1]]] = r[2]
end
@cohort_dates = []
current_date = @max_cohort_date
while current_date && current_date >= @min_cohort_date
@cohort_dates << current_date
current_date =
case @cohort_period
when "day"
current_date - 1
when "week"
current_date - 7
else
current_date.prev_month
end
end
num_cols = @cohort_dates.size
@columns = ["Cohort", "Users"] + num_cols.times.map { |i| "#{@conversion_period.titleize} #{i + 1}" }
rows = []
date_format = @cohort_period == "month" ? "%b %Y" : "%b %-e, %Y"
@cohort_dates.each do |date|
row = [date.strftime(date_format), @buckets[[date, 0]] || 0]
num_cols.times do |i|
if @today >= date + (@cohort_days * i)
row << (@buckets[[date, i + 1]] || 0)
end
end
rows << row
end
@rows = rows
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/controllers/blazer/checks_controller.rb | app/controllers/blazer/checks_controller.rb | module Blazer
class ChecksController < BaseController
before_action :set_check, only: [:edit, :update, :destroy, :run]
def index
state_order = [nil, "disabled", "error", "timed out", "failing", "passing"]
@checks = Blazer::Check.joins(:query).includes(:query).order("blazer_queries.name, blazer_checks.id").to_a.sort_by { |q| state_order.index(q.state) || 99 }
@checks.select! { |c| "#{c.query.name} #{c.emails}".downcase.include?(params[:q]) } if params[:q]
end
def new
@check = Blazer::Check.new(query_id: params[:query_id])
end
def create
@check = Blazer::Check.new(check_params)
# use creator_id instead of creator
# since we setup association without checking if column exists
@check.creator = blazer_user if @check.respond_to?(:creator_id=) && blazer_user
if @check.save
redirect_to query_path(@check.query)
else
render_errors @check
end
end
def update
if @check.update(check_params)
redirect_to query_path(@check.query)
else
render_errors @check
end
end
def destroy
@check.destroy
redirect_to checks_path
end
def run
@query = @check.query
redirect_to query_path(@query)
end
private
def check_params
params.require(:check).permit(:query_id, :emails, :slack_channels, :invert, :check_type, :schedule)
end
def set_check
@check = Blazer::Check.find(params[:id])
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/controllers/blazer/dashboards_controller.rb | app/controllers/blazer/dashboards_controller.rb | module Blazer
class DashboardsController < BaseController
before_action :set_dashboard, only: [:show, :edit, :update, :destroy, :refresh]
def new
@dashboard = Blazer::Dashboard.new
end
def create
@dashboard = Blazer::Dashboard.new
# use creator_id instead of creator
# since we setup association without checking if column exists
@dashboard.creator = blazer_user if @dashboard.respond_to?(:creator_id=) && blazer_user
if update_dashboard(@dashboard)
redirect_to dashboard_path(@dashboard)
else
render_errors @dashboard
end
end
def show
@queries = @dashboard.dashboard_queries.order(:position).preload(:query).map(&:query)
@queries.each do |query|
@success = process_vars(query.statement_object)
end
@bind_vars ||= []
@smart_vars = {}
@sql_errors = []
@data_sources = @queries.map { |q| Blazer.data_sources[q.data_source] }.uniq
@bind_vars.each do |var|
@data_sources.each do |data_source|
smart_var, error = parse_smart_variables(var, data_source)
((@smart_vars[var] ||= []).concat(smart_var)).uniq! if smart_var
@sql_errors << error if error
end
end
add_cohort_analysis_vars if @queries.any?(&:cohort_analysis?)
end
def edit
end
def update
if update_dashboard(@dashboard)
redirect_to dashboard_path(@dashboard, params: variable_params(@dashboard))
else
render_errors @dashboard
end
end
def destroy
@dashboard.destroy
redirect_to root_path
end
def refresh
@dashboard.queries.each do |query|
refresh_query(query)
end
redirect_to dashboard_path(@dashboard, params: variable_params(@dashboard))
end
private
def dashboard_params
params.require(:dashboard).permit(:name)
end
def set_dashboard
@dashboard = Blazer::Dashboard.find(params[:id])
end
def update_dashboard(dashboard)
dashboard.assign_attributes(dashboard_params)
Blazer::Dashboard.transaction do
if params[:query_ids].is_a?(Array)
query_ids = params[:query_ids].map(&:to_i)
@queries = Blazer::Query.find(query_ids).sort_by { |q| query_ids.index(q.id) }
end
if dashboard.save
if @queries
@queries.each_with_index do |query, i|
dashboard_query = dashboard.dashboard_queries.where(query_id: query.id).first_or_initialize
dashboard_query.position = i
dashboard_query.save!
end
if dashboard.persisted?
dashboard.dashboard_queries.where.not(query_id: query_ids).destroy_all
end
end
true
end
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/controllers/blazer/base_controller.rb | app/controllers/blazer/base_controller.rb | module Blazer
class BaseController < ApplicationController
# skip filters
filters = _process_action_callbacks.map(&:filter) - [:activate_authlogic]
skip_before_action(*filters, raise: false)
skip_after_action(*filters, raise: false)
skip_around_action(*filters, raise: false)
clear_helpers
if ENV["BLAZER_PASSWORD"]
http_basic_authenticate_with name: ENV["BLAZER_USERNAME"], password: ENV["BLAZER_PASSWORD"]
end
if Blazer.settings["before_action"]
raise Blazer::Error, "The docs for protecting Blazer with a custom before_action had an incorrect example from August 2017 to June 2018. The example method had a boolean return value. However, you must render or redirect if a user is unauthorized rather than return a falsy value. Double check that your before_action works correctly for unauthorized users (if it worked when added, there should be no issue). Then, change before_action to before_action_method in config/blazer.yml."
end
if Blazer.before_action
before_action Blazer.before_action.to_sym
end
protect_from_forgery with: :exception
if Blazer.override_csp
after_action do
response.headers['Content-Security-Policy'] = "default-src 'self' https: 'unsafe-inline' 'unsafe-eval' data: blob:"
end
end
layout "blazer/application"
default_form_builder ::ActionView::Helpers::FormBuilder
private
def process_vars(statement, var_params = nil)
var_params ||= request.query_parameters
(@bind_vars ||= []).concat(statement.variables).uniq!
# update in-place so populated in view and consistent across queries on dashboard
@bind_vars.each do |var|
if !var_params[var]
default = statement.data_source.variable_defaults[var]
# only add if default exists
var_params[var] = default if default
end
end
runnable = @bind_vars.all? { |v| var_params[v] }
statement.add_values(var_params) if runnable
runnable
end
def refresh_query(query)
statement = query.statement_object
runnable = process_vars(statement)
cohort_analysis_statement(statement) if statement.cohort_analysis?
statement.clear_cache if runnable
end
def add_cohort_analysis_vars
@bind_vars << "cohort_period" unless @bind_vars.include?("cohort_period")
@smart_vars["cohort_period"] = ["day", "week", "month"] if @smart_vars
# TODO create var_params method
request.query_parameters["cohort_period"] ||= "week"
end
def parse_smart_variables(var, data_source)
smart_var_data_source =
([data_source] + Array(data_source.settings["inherit_smart_settings"]).map { |ds| Blazer.data_sources[ds] }).find { |ds| ds.smart_variables[var] }
if smart_var_data_source
query = smart_var_data_source.smart_variables[var]
if query.is_a?(Hash)
smart_var = query.map { |k, v| [v, k] }
elsif query.is_a?(Array)
smart_var = query.map { |v| [v, v] }
elsif query
result = smart_var_data_source.run_statement(query)
smart_var = result.rows.map { |v| v.reverse }
error = result.error if result.error
end
end
[smart_var, error]
end
def cohort_analysis_statement(statement)
@cohort_period = params["cohort_period"] || "week"
@cohort_period = "week" unless ["day", "week", "month"].include?(@cohort_period)
# for now
@conversion_period = @cohort_period
@cohort_days =
case @cohort_period
when "day"
1
when "week"
7
when "month"
30
end
statement.apply_cohort_analysis(period: @cohort_period, days: @cohort_days)
end
def variable_params(resource, var_params = nil)
permitted_keys = resource.variables
var_params ||= request.query_parameters
var_params.slice(*permitted_keys)
end
helper_method :variable_params
def nested_variable_params(resource)
variable_params(resource, request.query_parameters["variables"] || {})
end
helper_method :nested_variable_params
def blazer_user
send(Blazer.user_method) if Blazer.user_method && respond_to?(Blazer.user_method, true)
end
helper_method :blazer_user
def render_errors(resource)
@errors = resource.errors
action = resource.persisted? ? :edit : :new
render action, status: :unprocessable_entity
end
# do not inherit from ApplicationController - #120
def default_url_options
{}
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/controllers/blazer/uploads_controller.rb | app/controllers/blazer/uploads_controller.rb | module Blazer
class UploadsController < BaseController
before_action :ensure_uploads
before_action :set_upload, only: [:show, :edit, :update, :destroy]
def index
@uploads = Blazer::Upload.order(:table)
end
def new
@upload = Blazer::Upload.new
end
def create
@upload = Blazer::Upload.new(upload_params)
# use creator_id instead of creator
# since we setup association without checking if column exists
@upload.creator = blazer_user if @upload.respond_to?(:creator_id=) && blazer_user
success = params.require(:upload).key?(:file)
if success
Blazer::Upload.transaction do
success = @upload.save
if success
begin
update_file(@upload)
rescue CSV::MalformedCSVError, Blazer::UploadError => e
@upload.errors.add(:base, e.message)
success = false
raise ActiveRecord::Rollback
end
end
end
else
@upload.errors.add(:base, "File can't be blank")
end
if success
redirect_to upload_path(@upload)
else
render_errors @upload
end
end
def show
redirect_to new_query_path(upload_id: @upload.id)
end
def edit
end
def update
original_table = @upload.table
@upload.assign_attributes(upload_params)
success = nil
Blazer::Upload.transaction do
success = @upload.save
if success
if params.require(:upload).key?(:file)
begin
update_file(@upload, drop: original_table)
rescue CSV::MalformedCSVError, Blazer::UploadError => e
@upload.errors.add(:base, e.message)
success = false
raise ActiveRecord::Rollback
end
elsif @upload.table != original_table
Blazer.uploads_connection.execute("ALTER TABLE #{Blazer.uploads_table_name(original_table)} RENAME TO #{Blazer.uploads_connection.quote_table_name(@upload.table)}")
end
end
end
if success
redirect_to upload_path(@upload)
else
render_errors @upload
end
end
def destroy
Blazer.uploads_connection.execute("DROP TABLE IF EXISTS #{@upload.table_name}")
@upload.destroy
redirect_to uploads_path
end
private
def update_file(upload, drop: nil)
file = params.require(:upload).fetch(:file)
raise Blazer::UploadError, "File is not a CSV" if file.content_type != "text/csv"
raise Blazer::UploadError, "File is too large (maximum is 100MB)" if file.size > 100.megabytes
contents = file.read
rows = CSV.parse(contents, converters: %i[numeric date date_time])
# friendly column names
columns = rows.shift.map { |v| v.to_s.encode("UTF-8").gsub("%", " pct ").parameterize.gsub("-", "_") }
duplicate_column = columns.find { |c| columns.count(c) > 1 }
raise Blazer::UploadError, "Duplicate column name: #{duplicate_column}" if duplicate_column
column_types =
columns.size.times.map do |i|
values = rows.map { |r| r[i] }.uniq.compact
if values.all? { |v| v.is_a?(Integer) && v >= -9223372036854775808 && v <= 9223372036854775807 }
"bigint"
elsif values.all? { |v| v.is_a?(Numeric) }
"decimal"
elsif values.all? { |v| v.is_a?(DateTime) }
"timestamptz"
elsif values.all? { |v| v.is_a?(Date) }
"date"
else
"text"
end
end
begin
# maybe SET LOCAL statement_timeout = '30s'
# maybe regenerate CSV in Ruby to ensure consistent parsing
Blazer.uploads_connection.transaction do
Blazer.uploads_connection.execute("DROP TABLE IF EXISTS #{Blazer.uploads_table_name(drop)}") if drop
Blazer.uploads_connection.execute("CREATE TABLE #{upload.table_name} (#{columns.map.with_index { |c, i| "#{Blazer.uploads_connection.quote_column_name(c)} #{column_types[i]}" }.join(", ")})")
Blazer.uploads_connection.raw_connection.copy_data("COPY #{upload.table_name} FROM STDIN CSV HEADER") do
Blazer.uploads_connection.raw_connection.put_copy_data(contents)
end
end
rescue ActiveRecord::StatementInvalid => e
raise Blazer::UploadError, "Table already exists" if e.message.include?("PG::DuplicateTable")
raise e
end
end
def upload_params
params.require(:upload).except(:file).permit(:table, :description)
end
def set_upload
@upload = Blazer::Upload.find(params[:id])
end
# routes aren't added, but also check here
def ensure_uploads
render plain: "Uploads not enabled" unless Blazer.uploads?
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/record.rb | app/models/blazer/record.rb | module Blazer
class Record < ActiveRecord::Base
self.abstract_class = true
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/dashboard.rb | app/models/blazer/dashboard.rb | module Blazer
class Dashboard < Record
belongs_to :creator, optional: true, class_name: Blazer.user_class.to_s if Blazer.user_class
has_many :dashboard_queries, dependent: :destroy
has_many :queries, through: :dashboard_queries
validates :name, presence: true
def variables
queries.flat_map { |q| q.variables }.uniq
end
def to_param
[id, name.gsub("'", "").parameterize].join("-")
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/query.rb | app/models/blazer/query.rb | module Blazer
class Query < Record
belongs_to :creator, optional: true, class_name: Blazer.user_class.to_s if Blazer.user_class
has_many :checks, dependent: :destroy
has_many :dashboard_queries, dependent: :destroy
has_many :dashboards, through: :dashboard_queries
has_many :audits
validates :statement, presence: true
scope :active, -> { column_names.include?("status") ? where(status: ["active", nil]) : all }
scope :named, -> { where.not(name: "") }
def to_param
[id, name].compact.join("-").gsub("'", "").parameterize
end
def friendly_name
name.to_s.sub(/\A[#\*]/, "").gsub(/\[.+\]/, "").strip
end
def editable?(user)
!persisted? || (name.present? && name.first != "*" && name.first != "#") || user == try(:creator)
end
def variables
# don't require data_source to be loaded
variables = Statement.new(statement).variables
variables += ["cohort_period"] if cohort_analysis?
variables
end
def cohort_analysis?
# don't require data_source to be loaded
Statement.new(statement).cohort_analysis?
end
def statement_object
Statement.new(statement, data_source)
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/dashboard_query.rb | app/models/blazer/dashboard_query.rb | module Blazer
class DashboardQuery < Record
belongs_to :dashboard
belongs_to :query
validates :dashboard_id, presence: true
validates :query_id, presence: true
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/connection.rb | app/models/blazer/connection.rb | module Blazer
class Connection < ActiveRecord::Base
self.abstract_class = true
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/uploads_connection.rb | app/models/blazer/uploads_connection.rb | module Blazer
class UploadsConnection < ActiveRecord::Base
self.abstract_class = true
establish_connection Blazer.settings["uploads"]["url"] if Blazer.uploads?
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/audit.rb | app/models/blazer/audit.rb | module Blazer
class Audit < Record
belongs_to :user, optional: true, class_name: Blazer.user_class.to_s
belongs_to :query, optional: true
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/check.rb | app/models/blazer/check.rb | module Blazer
class Check < Record
belongs_to :creator, optional: true, class_name: Blazer.user_class.to_s if Blazer.user_class
belongs_to :query
validates :query_id, presence: true
validate :validate_emails
validate :validate_variables, if: -> { query_id_changed? }
before_validation :set_state
before_validation :fix_emails
def split_emails
emails.to_s.downcase.split(",").map(&:strip)
end
def split_slack_channels
if Blazer.slack?
slack_channels.to_s.downcase.split(",").map(&:strip)
else
[]
end
end
def update_state(result)
check_type =
if respond_to?(:check_type)
self.check_type
elsif respond_to?(:invert)
invert ? "missing_data" : "bad_data"
else
"bad_data"
end
message = result.error
self.state =
if result.timed_out?
"timed out"
elsif result.error
"error"
elsif check_type == "anomaly"
anomaly, message = result.detect_anomaly
if anomaly.nil?
"error"
elsif anomaly
"failing"
else
"passing"
end
elsif result.rows.any?
check_type == "missing_data" ? "passing" : "failing"
else
check_type == "missing_data" ? "failing" : "passing"
end
self.last_run_at = Time.now if respond_to?(:last_run_at=)
self.message = message if respond_to?(:message=)
if respond_to?(:timeouts=)
if result.timed_out?
self.timeouts += 1
self.state = "disabled" if timeouts >= 3
else
self.timeouts = 0
end
end
# do not notify on creation, except when not passing
if (state_was != "new" || state != "passing") && state != state_was
Blazer::CheckMailer.state_change(self, state, state_was, result.rows.size, message, result.columns, result.rows.first(10).as_json, result.column_types, check_type).deliver_now if emails.present?
Blazer::SlackNotifier.state_change(self, state, state_was, result.rows.size, message, check_type)
end
save! if changed?
end
private
def set_state
self.state ||= "new"
end
def fix_emails
# some people like doing ; instead of ,
# but we know what they mean, so let's fix it
# also, some people like to use whitespace
if emails.present?
self.emails = emails.strip.gsub(/[;\s]/, ",").gsub(/,+/, ", ")
end
end
def validate_emails
unless split_emails.all? { |e| e =~ /\A\S+@\S+\.\S+\z/ }
errors.add(:base, "Invalid emails")
end
end
def validate_variables
if query.variables.any?
errors.add(:base, "Query can't have variables")
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/app/models/blazer/upload.rb | app/models/blazer/upload.rb | module Blazer
class Upload < Record
belongs_to :creator, optional: true, class_name: Blazer.user_class.to_s if Blazer.user_class
validates :table, presence: true, uniqueness: true, format: {with: /\A[a-z0-9_]+\z/, message: "can only contain lowercase letters, numbers, and underscores"}, length: {maximum: 63}
def table_name
Blazer.uploads_table_name(table)
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.