repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/dangerfile_dsl.rb | lib/danger/danger_core/dangerfile_dsl.rb | # frozen_string_literal: true
module Danger
class Dangerfile
# Anything inside this module is considered public API, and in the future
# documentation will be generated from it via rdoc.
module DSL
# @!group Danger Zone
# Provides access to the raw Travis/Circle/Buildkite/GitHub objects, which
# you can use to pull out extra bits of information. _Warning_
# the interfaces of these objects is **not** considered a part of the Dangerfile public
# API, and is viable to change occasionally on the whims of developers.
# @return [EnvironmentManager]
attr_reader :env
private
def initialize
load_default_plugins
end
def load_default_plugins
Dir["./danger_plugins/*.rb"].each do |file|
require File.expand_path(file)
end
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/dangerfile.rb | lib/danger/danger_core/dangerfile.rb | # frozen_string_literal: true
# So much was ripped direct from CocoaPods-Core - thanks!
require "danger/danger_core/dangerfile_dsl"
require "danger/danger_core/standard_error"
require "danger/danger_core/message_aggregator"
require "danger/danger_core/plugins/dangerfile_messaging_plugin"
require "danger/danger_core/plugins/dangerfile_danger_plugin"
require "danger/danger_core/plugins/dangerfile_git_plugin"
require "danger/danger_core/plugins/dangerfile_github_plugin"
require "danger/danger_core/plugins/dangerfile_gitlab_plugin"
require "danger/danger_core/plugins/dangerfile_bitbucket_server_plugin"
require "danger/danger_core/plugins/dangerfile_bitbucket_cloud_plugin"
require "danger/danger_core/plugins/dangerfile_vsts_plugin"
require "danger/danger_core/plugins/dangerfile_local_only_plugin"
module Danger
class Dangerfile
include Danger::Dangerfile::DSL
attr_accessor :env, :verbose, :plugins, :ui
# @return [Pathname] the path where the Dangerfile was loaded from. It is nil
# if the Dangerfile was generated programmatically.
#
attr_accessor :defined_in_file
# @return [String] a string useful to represent the Dangerfile in a message
# presented to the user.
#
def to_s
"Dangerfile"
end
# These are the classes that are allowed to also use method_missing
# in order to provide broader plugin support
def self.core_plugin_classes
[DangerfileMessagingPlugin]
end
# The ones that everything would break without
def self.essential_plugin_classes
[DangerfileMessagingPlugin, DangerfileGitPlugin, DangerfileDangerPlugin, DangerfileGitHubPlugin, DangerfileGitLabPlugin, DangerfileBitbucketServerPlugin, DangerfileBitbucketCloudPlugin, DangerfileVSTSPlugin, DangerfileLocalOnlyPlugin]
end
# Both of these methods exist on all objects
# http://ruby-doc.org/core-2.2.3/Kernel.html#method-i-warn
# http://ruby-doc.org/core-2.2.3/Kernel.html#method-i-fail
# However, as we're using using them in the DSL, they won't
# get method_missing called correctly without overriding them.
def warn(*args, **kargs, &blk)
method_missing(:warn, *args, **kargs, &blk)
end
def fail(*args, **kargs, &blk)
method_missing(:fail, *args, **kargs, &blk)
end
# When an undefined method is called, we check to see if it's something
# that the core DSLs have, then starts looking at plugins support.
def method_missing(method_sym, *arguments, **keyword_arguments, &_block)
@core_plugins.each do |plugin|
if plugin.public_methods(false).include?(method_sym)
if keyword_arguments.empty?
return plugin.send(method_sym, *arguments)
else
return plugin.send(method_sym, *arguments, **keyword_arguments)
end
end
end
super
end
# cork_board not being set comes from plugins #585
def initialize(env_manager, cork_board = nil)
@plugins = {}
@core_plugins = []
@ui = cork_board || Cork::Board.new(silent: false, verbose: false)
# Triggers the core plugins
@env = env_manager
# Triggers local plugins from the root of a project
Dir["./danger_plugins/*.rb"].each do |file|
require File.expand_path(file)
end
refresh_plugins if env_manager.pr?
end
# Iterate through available plugin classes and initialize them with
# a reference to this Dangerfile
def refresh_plugins
plugins = Plugin.all_plugins
plugins.each do |klass|
next if klass.respond_to?(:singleton_class?) && klass.singleton_class?
plugin = klass.new(self)
next if plugin.nil? || @plugins[klass]
name = plugin.class.instance_name
self.class.send(:attr_reader, name)
instance_variable_set("@#{name}", plugin)
@plugins[klass] = plugin
@core_plugins << plugin if self.class.core_plugin_classes.include? klass
end
end
alias init_plugins refresh_plugins
def core_dsl_attributes
@core_plugins.map { |plugin| { plugin: plugin, methods: plugin.public_methods(false) } }
end
def external_dsl_attributes
plugins.values.reject { |plugin| @core_plugins.include? plugin }.map { |plugin| { plugin: plugin, methods: plugin.public_methods(false) } }
end
def method_values_for_plugin_hashes(plugin_hashes)
plugin_hashes.flat_map do |plugin_hash|
plugin = plugin_hash[:plugin]
methods = plugin_hash[:methods].select { |name| plugin.method(name).parameters.empty? }
methods.map do |method|
case method
when :api
value = "Octokit::Client"
when :pr_json, :mr_json
value = "[Skipped JSON]"
when :pr_diff, :mr_diff
value = "[Skipped Diff]"
else
value = plugin.send(method)
value = wrap_text(value.encode("utf-8")) if value.kind_of?(String)
# So that we either have one value per row
# or we have [] for an empty array
value = value.join("\n") if value.kind_of?(Array) && value.count > 0
end
[method.to_s, value]
end
end
end
# Iterates through the DSL's attributes, and table's the output
def print_known_info
rows = []
rows += method_values_for_plugin_hashes(core_dsl_attributes)
rows << ["---", "---"]
rows += method_values_for_plugin_hashes(external_dsl_attributes)
rows << ["---", "---"]
rows << ["SCM", env.scm.class]
rows << ["Source", env.ci_source.class]
rows << ["Requests", env.request_source.class]
rows << ["Base Commit", env.meta_info_for_base]
rows << ["Head Commit", env.meta_info_for_head]
params = {}
params[:rows] = rows.each { |current| current[0] = current[0].yellow }
params[:title] = "Danger v#{Danger::VERSION}\nDSL Attributes".green
ui.section("Info:") do
ui.puts
table = Terminal::Table.new(params)
table.align_column(0, :right)
ui.puts table
ui.puts
end
end
# Parses the file at a path, optionally takes the content of the file for DI
#
def parse(path, contents = nil)
print_known_info if verbose
contents ||= File.open(path, "r:utf-8", &:read)
# Work around for Rubinius incomplete encoding in 1.9 mode
if contents.respond_to?(:encoding) && contents.encoding.name != "UTF-8"
contents.encode!("UTF-8")
end
sanitised_contents = contents.tr("“”‘’‛", %(""'''))
if sanitised_contents != contents
contents = sanitised_contents
ui.puts "Your #{path.basename} has had smart quotes sanitised. " \
"To avoid issues in the future, you should not use " \
"TextEdit for editing it. If you are not using TextEdit, " \
"you should turn off smart quotes in your editor of choice.".red
end
if contents.include?("puts")
ui.puts "You used `puts` in your Dangerfile. To print out text to GitHub use `message` instead"
end
self.defined_in_file = path
instance_eval do
# rubocop:disable Lint/RescueException
eval_file(contents, path)
rescue Exception => e
message = "Invalid `#{path.basename}` file: #{e.message}"
raise DSLError.new(message, path, e.backtrace, contents)
# rubocop:enable Lint/RescueException
end
end
def print_results
status = status_report
violations = violation_report
return if (violations[:errors] + violations[:warnings] + violations[:messages] + status[:markdowns]).count.zero?
ui.section("Results:") do
%i(errors warnings messages).each do |key|
formatted = "#{key.to_s.capitalize}:"
title = case key
when :errors
formatted.red
when :warnings
formatted.yellow
else
formatted
end
rows = violations[key].uniq
print_list(title, rows)
end
if status[:markdowns].count > 0
ui.title("Markdown:") do
status[:markdowns].each do |current_markdown|
ui.puts "#{current_markdown.file}\#L#{current_markdown.line}" if current_markdown.file && current_markdown.line
ui.puts current_markdown.message
end
end
end
end
end
def failed?
violation_report[:errors].count > 0
end
def post_results(danger_id, new_comment, remove_previous_comments)
violations = violation_report
report = {
warnings: violations[:warnings].uniq,
errors: violations[:errors].uniq,
messages: violations[:messages].uniq,
markdowns: status_report[:markdowns].uniq,
danger_id: danger_id
}
if env.request_source.respond_to?(:update_pr_by_line!) && ENV["DANGER_MESSAGE_AGGREGATION"]
env.request_source.update_pr_by_line!(message_groups: MessageAggregator.aggregate(**report),
new_comment: new_comment,
remove_previous_comments: remove_previous_comments,
danger_id: report[:danger_id])
else
env.request_source.update_pull_request!(
**report,
new_comment: new_comment,
remove_previous_comments: remove_previous_comments
)
end
end
def setup_for_running(base_branch, head_branch)
env.ensure_danger_branches_are_setup
env.scm.diff_for_folder(".", from: base_branch, to: head_branch, lookup_top_level: true)
end
def run(base_branch, head_branch, dangerfile_path, danger_id, new_comment, remove_previous_comments, report_results = true)
# Setup internal state
init_plugins
env.fill_environment_vars
begin
# Sets up the git environment
setup_for_running(base_branch, head_branch)
# Parse the local Dangerfile
parse(Pathname.new(dangerfile_path))
# Push results to the API
# Pass along the details of the run to the request source
# to send back to the code review site.
post_results(danger_id, new_comment, remove_previous_comments) if report_results
# Print results in the terminal
print_results
rescue DSLError => e
# Push exception to the API and re-raise
post_exception(e, danger_id, new_comment) unless danger_id.nil?
raise
ensure
# Makes sure that Danger specific git branches are cleaned
env.clean_up
end
failed?
end
private
def eval_file(contents, path)
eval(contents, nil, path.to_s) # rubocop:disable Security/Eval
end
def print_list(title, rows)
unless rows.empty?
ui.title(title) do
rows.each do |row|
if row.file && row.line
path = "#{row.file}\#L#{row.line}: "
else
path = ""
end
ui.puts("- [ ] #{path}#{row.message}")
end
end
end
end
def wrap_text(text, width = 80)
text.gsub(/.{,#{width}}/) do |line|
line.strip!
"#{line}\n"
end
end
def post_exception(ex, danger_id, new_comment)
return if ENV["DANGER_DO_NOT_POST_INVALID_DANGERFILE_ERROR"]
return if danger_id.nil?
env.request_source.update_pull_request!(
danger_id: danger_id,
new_comment: new_comment,
markdowns: [ex.to_markdown]
)
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/executor.rb | lib/danger/danger_core/executor.rb | # frozen_string_literal: true
module Danger
class Executor
def initialize(system_env)
@system_env = system_env
end
def run(env: nil,
dm: nil,
cork: nil,
base: nil,
head: nil,
dangerfile_path: nil,
danger_id: nil,
new_comment: nil,
fail_on_errors: nil,
fail_if_no_pr: nil,
remove_previous_comments: nil)
# Create a silent Cork instance if cork is nil, as it's likely a test
cork ||= Cork::Board.new(silent: false, verbose: false)
# Run some validations
validate!(cork, fail_if_no_pr: fail_if_no_pr)
# OK, we now know that Danger can run in this environment
env ||= EnvironmentManager.new(system_env, cork, danger_id)
dm ||= Dangerfile.new(env, cork)
ran_status = dm.run(
base_branch(base),
head_branch(head),
dangerfile_path,
danger_id,
new_comment,
remove_previous_comments
)
# By default Danger will use the status API to fail a build,
# allowing execution to continue, this behavior isn't always
# optimal for everyone.
exit(1) if fail_on_errors && ran_status
end
def validate!(cork, fail_if_no_pr: false)
validate_ci!
validate_pr!(cork, fail_if_no_pr)
end
private
attr_reader :system_env
# Could we find a CI source at all?
def validate_ci!
unless EnvironmentManager.local_ci_source(system_env)
abort("Could not find the type of CI for Danger to run on.".red)
end
end
# Could we determine that the CI source is inside a PR?
def validate_pr!(cork, fail_if_no_pr)
unless EnvironmentManager.pr?(system_env)
ci_name = EnvironmentManager.local_ci_source(system_env).name.split("::").last
msg = +"Not a #{ci_name} #{commit_request(ci_name)} - skipping `danger` run. "
# circle won't run danger properly if the commit is pushed and build runs before the PR exists
# https://danger.systems/guides/troubleshooting.html#circle-ci-doesnt-run-my-build-consistently
# the best solution is to enable `fail_if_no_pr`, and then re-run the job once the PR is up
if ci_name == "CircleCI"
msg << "If you only created the PR recently, try re-running your workflow."
end
cork.puts msg.strip.yellow
exit(fail_if_no_pr ? 1 : 0)
end
end
def base_branch(user_specified_base_branch)
user_specified_base_branch || EnvironmentManager.danger_base_branch
end
def head_branch(user_specified_head_branch)
user_specified_head_branch || EnvironmentManager.danger_head_branch
end
def commit_request(ci_name)
return "Merge Request" if ci_name == "GitLabCI"
return "Pull Request"
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/messages/base.rb | lib/danger/danger_core/messages/base.rb | # frozen_string_literal: true
module Danger
class BaseMessage
attr_accessor :message, :file, :line, :type
def initialize(type:, message:, file: nil, line: nil)
@type = type
@message = message
@file = file
@line = line
end
def compare_by_file_and_line(other)
order = cmp_nils(file, other.file)
return order unless order.nil?
order = file <=> other.file
return order unless order.zero?
order = cmp_nils(line, other.line)
return order unless order.nil?
line <=> other.line
end
# compares a and b based entirely on whether one or the other is nil
# arguments are in the same order as `a <=> b`
# nil is sorted earlier - so cmp_nils(nil, 1) => -1
#
# If neither are nil, rather than returning `a <=> b` which would seem
# like the obvious shortcut, `nil` is returned.
# This allows us to distinguish between cmp_nils returning 0 for a
# comparison of filenames, which means "a comparison on the lines is
# meaningless - you cannot have a line number for a nil file - so they
# should be sorted the same", and a <=> b returning 0, which means "the
# files are the same, so compare on the lines"
#
# @return 0, 1, -1, or nil
def cmp_nils(a, b)
if a.nil? && b.nil?
0
elsif a.nil?
-1
elsif b.nil?
1
end
end
def eql?(other)
return self == other
end
# @return [Boolean] returns true if is a file or line, false otherwise
def inline?
file || line
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/messages/markdown.rb | lib/danger/danger_core/messages/markdown.rb | # frozen_string_literal: true
require "danger/danger_core/messages/base"
module Danger
class Markdown < BaseMessage
def initialize(message, file = nil, line = nil)
super(type: :markdown, message: message, file: file, line: line)
end
def ==(other)
return false if other.nil?
return false unless other.kind_of? self.class
other.message == message &&
other.file == file &&
other.line == line
end
def hash
h = 1
h = h * 31 + message.hash
h = h * 17 + file.hash
h * 17 + line.hash
end
def to_s
extra = []
extra << "file: #{file}" unless file
extra << "line: #{line}" unless line
"Markdown #{message} { #{extra.join ', '} }"
end
def <=>(other)
return 1 if other.type != :markdown
compare_by_file_and_line(other)
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/messages/violation.rb | lib/danger/danger_core/messages/violation.rb | # frozen_string_literal: true
require "danger/danger_core/messages/base"
module Danger
class Violation < BaseMessage
VALID_TYPES = %I[error warning message].freeze
attr_accessor :sticky
def initialize(message, sticky, file = nil, line = nil, type: :warning)
raise ArgumentError unless VALID_TYPES.include?(type)
super(type: type, message: message, file: file, line: line)
self.sticky = sticky
end
def ==(other)
return false if other.nil?
return false unless other.kind_of? self.class
other.message == message &&
other.sticky == sticky &&
other.file == file &&
other.line == line
end
def hash
h = 1
h = h * 31 + message.hash
h = h * 13 + sticky.hash
h = h * 17 + file.hash
h * 17 + line.hash
end
def <=>(other)
types = VALID_TYPES + [:markdown]
order = types.index(type) <=> types.index(other.type)
return order unless order.zero?
compare_by_file_and_line(other)
end
def to_s
extra = []
extra << "sticky: #{sticky}"
extra << "file: #{file}" if file
extra << "line: #{line}" if line
extra << "type: #{type}"
"Violation #{message} { #{extra.join ', '} }"
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_github_plugin.rb | lib/danger/danger_core/plugins/dangerfile_github_plugin.rb | # frozen_string_literal: true
require "danger/plugin_support/plugin"
module Danger
# Handles interacting with GitHub inside a Dangerfile. Provides a few functions which wrap `pr_json` and also
# through a few standard functions to simplify your code.
#
# @example Warn when a PR is classed as work in progress
#
# warn "PR is classed as Work in Progress" if github.pr_title.include? "[WIP]"
#
# @example Declare a PR to be simple to avoid specific Danger rules
#
# declared_trivial = (github.pr_title + github.pr_body).include?("#trivial")
#
# @example Ensure that labels have been used on the PR
#
# failure "Please add labels to this PR" if github.pr_labels.empty?
#
# @example Check if a user is in a specific GitHub org, and message them if so
#
# unless github.api.organization_member?('danger', github.pr_author)
# message "@#{github.pr_author} is not a contributor yet, would you like to join the Danger org?"
# end
#
# @example Ensure there is a summary for a PR
#
# failure "Please provide a summary in the Pull Request description" if github.pr_body.length < 5
#
# @example Only accept PRs to the develop branch
#
# failure "Please re-submit this PR to develop, we may have already fixed your issue." if github.branch_for_base != "develop"
#
# @example Note when PRs don't reference a milestone, which goes away when it does
#
# has_milestone = github.pr_json["milestone"] != nil
# warn("This PR does not refer to an existing milestone", sticky: false) unless has_milestone
#
# @example Note when a PR cannot be manually merged, which goes away when you can
#
# can_merge = github.pr_json["mergeable"]
# warn("This PR cannot be merged yet.", sticky: false) unless can_merge
#
# @example Highlight when a celebrity makes a pull request
#
# message "Welcome, Danger." if github.pr_author == "dangermcshane"
#
# @example Ensure that all PRs have an assignee
#
# warn "This PR does not have any assignees yet." unless github.pr_json["assignee"]
#
# @example Send a message with links to a collection of specific files
#
# if git.modified_files.include? "config/*.js"
# config_files = git.modified_files.select { |path| path.include? "config/" }
# message "This PR changes #{ github.html_link(config_files) }"
# end
#
# @example Highlight with a clickable link if a Package.json is changed
#
# warn "#{github.html_link("Package.json")} was edited." if git.modified_files.include? "Package.json"
#
# @example Note an issue with a particular line on a file using the #L[num] syntax, e.g. `#L23`
#
# linter_json = `my_linter lint "file"`
# results = JSON.parse linter_json
# unless results.empty?
# file, line, warning = result.first
# warn "#{github.html_link("#{file}#L#{line}")} has linter issue: #{warning}."
# end
#
#
# @see danger/danger
# @tags core, github
#
class DangerfileGitHubPlugin < Plugin
# So that this init can fail.
def self.new(dangerfile)
return nil if dangerfile.env.request_source.class != Danger::RequestSources::GitHub
super
end
def initialize(dangerfile)
super(dangerfile)
@github = dangerfile.env.request_source
end
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"github"
end
# @!group PR Review
#
# In Beta. Provides access to creating a GitHub Review instead of a typical GitHub comment.
#
# To use you announce the start of your review, and the end via the `start` and `submit` functions,
# for example:
#
# github.review.start
# github.review.fail(message)
# github.review.warn(message)
# github.review.message(message)
# github.review.markdown(message)
# github.review.submit
#
# @return [ReviewDSL]
def review
@github.review
end
# @!group PR Metadata
# The title of the Pull Request.
# @return [String]
#
def pr_title
@github.pr_json["title"].to_s
end
# @!group PR Metadata
# The body text of the Pull Request.
# @return [String]
#
def pr_body
pr_json["body"].to_s
end
# @!group PR Metadata
# The username of the author of the Pull Request.
# @return [String]
#
def pr_author
pr_json["user"]["login"].to_s
end
# @!group PR Metadata
# The labels assigned to the Pull Request.
# @return [String]
#
def pr_labels
@github.issue_json["labels"].map { |l| l[:name] }
end
# @!group PR Metadata
# Whether the PR is a Draft.
# @return [Boolean]
#
def pr_draft?
pr_json["draft"] == true
end
# @!group PR Commit Metadata
# The branch to which the PR is going to be merged into.
# @return [String]
#
def branch_for_base
pr_json["base"]["ref"]
end
# @!group PR Commit Metadata
# The branch to which the PR is going to be merged from.
# @return [String]
#
def branch_for_head
pr_json["head"]["ref"]
end
# @!group PR Commit Metadata
# The base commit to which the PR is going to be merged as a parent.
# @return [String]
#
def base_commit
pr_json["base"]["sha"]
end
# @!group PR Commit Metadata
# The head commit to which the PR is requesting to be merged from.
# @return [String]
#
def head_commit
pr_json["head"]["sha"]
end
# @!group GitHub Misc
# The hash that represents the PR's JSON. For an example of what this looks like
# see the [Danger Fixture'd one](https://raw.githubusercontent.com/danger/danger/master/spec/fixtures/github_api/pr_response.json).
# @return [Hash]
#
def pr_json
@github.pr_json
end
# @!group GitHub Misc
# Provides access to the GitHub API client used inside Danger. Making
# it easy to use the GitHub API inside a Dangerfile.
# @return [Octokit::Client]
def api
@github.client
end
# @!group PR Content
# The unified diff produced by Github for this PR
# see [Unified diff](https://en.wikipedia.org/wiki/Diff_utility#Unified_format)
# @return [String]
def pr_diff
@github.pr_diff
end
# @!group GitHub Misc
# Returns a list of HTML anchors for a file, or files in the head repository. An example would be:
# `<a href='https://github.com/artsy/eigen/blob/561827e46167077b5e53515b4b7349b8ae04610b/file.txt'>file.txt</a>`. It returns a string of multiple anchors if passed an array.
# @param [String or Array<String>] paths
# A list of strings to convert to github anchors
# @param [Bool] full_path
# Shows the full path as the link's text, defaults to `true`.
#
# @return [String]
def html_link(paths, full_path: true)
paths = [paths] unless paths.kind_of?(Array)
commit = head_commit
repo = pr_json["head"]["repo"]["html_url"]
paths = paths.map do |path|
url_path = path.start_with?("/") ? path : "/#{path}"
text = full_path ? path : File.basename(path)
create_link("#{repo}/blob/#{commit}#{url_path}", text)
end
return paths.first if paths.count < 2
"#{paths.first(paths.count - 1).join(', ')} & #{paths.last}"
end
# @!group GitHub Misc
# Use to ignore inline messages which lay outside a diff's range, thereby not posting them in the main comment.
# You can set hash to change behavior per each kinds. (ex. `{warning: true, error: false}`)
# @param [Bool or Hash<Symbol, Bool>] dismiss
# Ignore out of range inline messages, defaults to `true`
#
# @return [void]
def dismiss_out_of_range_messages(dismiss = true)
if dismiss.kind_of?(Hash)
@github.dismiss_out_of_range_messages = dismiss
elsif dismiss.kind_of?(TrueClass)
@github.dismiss_out_of_range_messages = true
elsif dismiss.kind_of?(FalseClass)
@github.dismiss_out_of_range_messages = false
end
end
%i(title body author labels json).each do |suffix|
alias_method "mr_#{suffix}".to_sym, "pr_#{suffix}".to_sym
end
private
def create_link(href, text)
"<a href='#{href}'>#{text}</a>"
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_git_plugin.rb | lib/danger/danger_core/plugins/dangerfile_git_plugin.rb | # frozen_string_literal: true
require "danger/plugin_support/plugin"
require "danger/core_ext/file_list"
# Danger
module Danger
# Handles interacting with git inside a Dangerfile. Providing access to files that have changed, and useful statistics. Also provides
# access to the commits in the form of [Git::Log](https://github.com/schacon/ruby-git/blob/master/lib/git/log.rb) objects.
#
# @example Do something to all new and edited markdown files
#
# markdowns = (git.added_files + git.modified_files)
# do_something markdowns.select{ |file| file.end_with? "md" }
#
# @example Don't allow a file to be deleted
#
# deleted = git.deleted_files.include? "my/favourite.file"
# failure "Don't delete my precious" if deleted
#
# @example Fail really big diffs
#
# failure "We cannot handle the scale of this PR" if git.lines_of_code > 50_000
#
# @example Warn when there are merge commits in the diff
#
# if git.commits.any? { |c| c.parents.count > 1 }
# warn 'Please rebase to get rid of the merge commits in this PR'
# end
#
# @example Warn when somebody tries to add nokogiri to the project
#
# diff = git.diff_for_file("Gemfile.lock")
# if diff && diff.patch =~ "nokogiri"
# warn 'Please do not add nokogiri to the project. Thank you.'
# end
#
# @see danger/danger
# @tags core, git
class DangerfileGitPlugin < Plugin
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"git"
end
def initialize(dangerfile)
super(dangerfile)
raise unless dangerfile.env.scm.class == Danger::GitRepo # rubocop:disable Style/ClassEqualityComparison
@git = dangerfile.env.scm
end
# @!group Git Files
# Paths for files that were added during the diff
# @return [FileList<String>] an [Array] subclass
#
def added_files
Danger::FileList.new(@git.diff.select { |diff| diff.type == "new" }.map(&:path))
end
# @!group Git Files
# Paths for files that were removed during the diff
# @return [FileList<String>] an [Array] subclass
#
def deleted_files
Danger::FileList.new(@git.diff.select { |diff| diff.type == "deleted" }.map(&:path))
end
# @!group Git Files
# Paths for files that changed during the diff
# @return [FileList<String>] an [Array] subclass
#
def modified_files
Danger::FileList.new(@git.diff.select { |diff| diff.type == "modified" }.map(&:path))
end
# @!group Git Metadata
# List of renamed files
# @return [Array<Hash>] with keys `:before` and `:after`
#
def renamed_files
@git.renamed_files
end
# @!group Git Metadata
# Whole diff
# @return [Git::Diff] from the gem `git`
#
def diff
@git.diff
end
# @!group Git Metadata
# The overall lines of code added/removed in the diff
# @return [Fixnum]
#
def lines_of_code
@git.diff.lines
end
# @!group Git Metadata
# The overall lines of code removed in the diff
# @return [Fixnum]
#
def deletions
@git.diff.deletions
end
# @!group Git Metadata
# The overall lines of code added in the diff
# @return [Fixnum]
#
def insertions
@git.diff.insertions
end
# @!group Git Metadata
# The log of commits inside the diff
# @return [Git::Log] from the gem `git`
#
def commits
@git.log.to_a
end
# @!group Git Metadata
# Details for a specific file in this diff
# @return [Git::Diff::DiffFile] from the gem `git`
#
def diff_for_file(file)
(added_files + modified_files + deleted_files).include?(file) ? @git.diff[file] : nil
end
# @!group Git Metadata
# Statistics for a specific file in this diff
# @return [Hash] with keys `:insertions`, `:deletions` giving line counts, and `:before`, `:after` giving file contents, or nil if the file has no changes or does not exist
#
def info_for_file(file)
return nil unless modified_files.include?(file) || added_files.include?(file) || deleted_files.include?(file)
stats = @git.diff.stats[:files][file]
diff = @git.diff[file]
{
insertions: stats[:insertions],
deletions: stats[:deletions],
before: added_files.include?(file) || deleted_files.include?(file) ? nil : diff.blob(:src).contents,
after: added_files.include?(file) || deleted_files.include?(file) ? nil : diff.blob(:dst).contents
}
end
# @!group Git Metadata
# List of remote tags
# @return [String]
#
def tags
@git.tags.each_line
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_gitlab_plugin.rb | lib/danger/danger_core/plugins/dangerfile_gitlab_plugin.rb | # frozen_string_literal: true
require "danger/plugin_support/plugin"
module Danger
# Handles interacting with GitLab inside a Dangerfile. Provides a few functions which wrap `mr_json` and also
# through a few standard functions to simplify your code.
#
# @example Warn when an MR is classed as work in progress.
#
# warn "MR is classed as Work in Progress" if gitlab.mr_title.include? "[WIP]"
#
# @example Declare a MR to be simple to avoid specific Danger rules.
#
# declared_trivial = (gitlab.mr_title + gitlab.mr_body).include?("#trivial")
#
# @example Ensure that labels have been applied to the MR.
#
# failure "Please add labels to this MR" if gitlab.mr_labels.empty?
#
# @example Ensure that all MRs have an assignee.
#
# warn "This MR does not have any assignees yet." unless gitlab.mr_json["assignee"]
#
# @example Ensure there is a summary for a MR.
#
# failure "Please provide a summary in the Merge Request description" if gitlab.mr_body.length < 5
#
# @example Only accept MRs to the develop branch.
#
# failure "Please re-submit this MR to develop, we may have already fixed your issue." if gitlab.branch_for_merge != "develop"
#
# @example Note when MRs don't reference a milestone, make the warning stick around on subsequent runs
#
# has_milestone = gitlab.mr_json["milestone"] != nil
# warn("This MR does not refer to an existing milestone", sticky: true) unless has_milestone
#
# @example Note when a MR cannot be manually merged
#
# can_merge = gitlab.mr_json["mergeable"]
# warn("This MR cannot be merged yet.") unless can_merge
#
# @example Highlight when a celebrity makes a merge request.
#
# message "Welcome, Danger." if gitlab.mr_author == "dangermcshane"
#
# @example Send a message with links to a collection of specific files.
#
# if git.modified_files.include? "config/*.js"
# config_files = git.modified_files.select { |path| path.include? "config/" }
# message "This MR changes #{ gitlab.html_link(config_files) }"
# end
#
# @example Highlight with a clickable link if a Package.json is changed.
#
# warn "#{gitlab.html_link("Package.json")} was edited." if git.modified_files.include? "Package.json"
#
# @example Select a random group member as assignee if no assignee is selected
#
# if gitlab.mr_json["assignee"].nil?
# reviewer = gitlab.api.group_members(gitlab.api.merge_request_approvals(project_id, mr_id).to_hash["approver_groups"].first["group"]["id"]).sample
# if gitlab.api.group_members(gitlab.api.merge_request_approvals(project_id, mr_id).to_hash["approver_groups"].first["group"]["id"]).length > 1
# while reviewer.to_hash["id"] == gitlab.mr_json["author"]["id"] do
# reviewer = gitlab.api.group_members(gitlab.api.merge_request_approvals(project_id, mr_id).to_hash["approver_groups"].first["group"]["id"]).sample
# end
# end
# message "Reviewer roulete rolled for: #{reviewer.to_hash['name']} (@#{reviewer.to_hash['username']})"
# gitlab.api.update_merge_request(project_id, mr_id, { assignee_id: reviewer.to_hash["id"] })
# end
#
#
# @see danger/danger
# @tags core, gitlab
#
class DangerfileGitLabPlugin < Plugin
# So that this init can fail.
def self.new(dangerfile)
return nil if dangerfile.env.request_source.class != Danger::RequestSources::GitLab
super
end
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"gitlab"
end
def initialize(dangerfile)
super(dangerfile)
@gitlab = dangerfile.env.request_source
end
# @!group MR Metadata
# The title of the Merge Request
# @return [String]
#
def mr_title
@gitlab.mr_json.title.to_s
end
# @!group MR Metadata
# The body text of the Merge Request
# @return [String]
#
def mr_body
@gitlab.mr_json.description.to_s
end
# @!group MR Metadata
# The username of the author of the Merge Request
# @return [String]
#
def mr_author
@gitlab.mr_json.author.username.to_s
end
# @!group MR Metadata
# The labels assigned to the Merge Request
# @return [String]
#
def mr_labels
@gitlab.mr_json.labels
end
# @!group MR Content
# The unified diff produced by GitLab for this MR
# see [Unified diff](https://en.wikipedia.org/wiki/Diff_utility#Unified_format)
# @return [String]
#
def mr_diff
@gitlab.mr_diff
end
# @!group MR Changes
# The array of changes
# @return [Array<Gitlab::ObjectifiedHash>]
#
def mr_changes
@gitlab.mr_changes.changes
end
# @!group MR Closes issues
# The array of issues that this MR closes
# @return [Array<Gitlab::ObjectifiedHash>]
#
def mr_closes_issues
@gitlab.mr_closes_issues
end
# @!group MR Commit Metadata
# The branch to which the MR is going to be merged into
# @deprecated Please use {#branch_for_base} instead
# @return [String]
#
def branch_for_merge
branch_for_base
end
# @!group MR Commit Metadata
# The branch to which the MR is going to be merged into.
# @return [String]
#
def branch_for_base
@gitlab.mr_json.target_branch
end
# @!group MR Commit Metadata
# The branch to which the MR is going to be merged from.
# @return [String]
#
def branch_for_head
@gitlab.mr_json.source_branch
end
# @!group MR Commit Metadata
# The base commit to which the MR is going to be merged as a parent
# @return [String]
#
def base_commit
@gitlab.mr_json.diff_refs.base_sha
end
# @!group MR Commit Metadata
# The head commit to which the MR is requesting to be merged from
# @return [String]
#
def head_commit
@gitlab.mr_json.diff_refs.head_sha
end
# @!group GitLab Misc
# The hash that represents the MR's JSON. See documentation for the
# structure [here](http://docs.gitlab.com/ce/api/merge_requests.html#get-single-mr)
# @return [Hash]
#
def mr_json
@gitlab.mr_json.to_hash
end
# @!group GitLab Misc
# Provides access to the GitLab API client used inside Danger. Making
# it easy to use the GitLab API inside a Dangerfile. See the gitlab
# gem's [documentation](http://www.rubydoc.info/gems/gitlab/Gitlab/Client)
# for accessible methods.
# @return [GitLab::Client]
#
def api
@gitlab.client
end
# @!group GitLab Misc
# Returns the web_url of the source project.
# @return [String]
#
def repository_web_url
@repository_web_url ||= begin
project = api.project(mr_json["source_project_id"])
project.web_url
end
end
# @!group GitLab Misc
# Returns a list of HTML anchors for a file, or files in the head repository. An example would be:
# `<a href='https://gitlab.com/artsy/eigen/blob/561827e46167077b5e53515b4b7349b8ae04610b/file.txt'>file.txt</a>`. It returns a string of multiple anchors if passed an array.
# @param [String or Array<String>] paths
# A list of strings to convert to gitlab anchors
# @param [Bool] full_path
# Shows the full path as the link's text, defaults to `true`.
#
# @return [String]
#
def html_link(paths, full_path: true)
paths = [paths] unless paths.kind_of?(Array)
commit = head_commit
paths = paths.map do |path|
url_path = path.start_with?("/") ? path : "/#{path}"
text = full_path ? path : File.basename(path)
create_link("#{repository_web_url}/blob/#{commit}#{url_path}", text)
end
return paths.first if paths.count < 2
"#{paths.first(paths.count - 1).join(', ')} & #{paths.last}"
end
# @!group Gitlab Misc
# Use to ignore inline messages which lay outside a diff's range, thereby not posting the comment.
# You can set hash to change behavior per each kinds. (ex. `{warning: true, error: false}`)
# @param [Bool or Hash<Symbol, Bool>] dismiss
# Ignore out of range inline messages, defaults to `true`
#
# @return [void]
def dismiss_out_of_range_messages(dismiss = true)
if dismiss.kind_of?(Hash)
@gitlab.dismiss_out_of_range_messages = dismiss
elsif dismiss.kind_of?(TrueClass)
@gitlab.dismiss_out_of_range_messages = true
elsif dismiss.kind_of?(FalseClass)
@gitlab.dismiss_out_of_range_messages = false
end
end
%i(title body author labels json diff).each do |suffix|
alias_method "pr_#{suffix}".to_sym, "mr_#{suffix}".to_sym
end
private
def create_link(href, text)
"<a href='#{href}'>#{text}</a>"
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_local_only_plugin.rb | lib/danger/danger_core/plugins/dangerfile_local_only_plugin.rb | # frozen_string_literal: true
require "danger/plugin_support/plugin"
# Danger
module Danger
# Handles interacting with local only plugin inside a Dangerfile.
# It is support plugin for dry_run command and does not expose any methods.
# But you can still use other plugins like git
#
# @example Check that added lines contains agreed form of words
#
# git.diff.each do |chunk|
# chunk.patch.lines.grep(/^\+/).each do |added_line|
# if added_line.gsub!(/(?<cancel>cancel)(?<rest>[^l[[:space:]][[:punct:]]]+)/i, '>>\k<cancel>-l-\k<rest><<')
# fail "Single 'L' for cancellation-alike words in '#{added_line}'"
# end
# end
# end
#
# @see danger/danger
# @tags core, local_only
#
class DangerfileLocalOnlyPlugin < Plugin
# So that this init can fail.
def self.new(dangerfile)
return nil if dangerfile.env.request_source.class != Danger::RequestSources::LocalOnly
super
end
def initialize(dangerfile)
super(dangerfile)
@local_repo = dangerfile.env.request_source
end
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"local_repo"
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_danger_plugin.rb | lib/danger/danger_core/plugins/dangerfile_danger_plugin.rb | # frozen_string_literal: true
require "danger/plugin_support/plugin"
module Danger
# A way to interact with Danger herself. Offering APIs to import plugins,
# and Dangerfiles from multiple sources.
#
# @example Import a plugin available over HTTP
#
# device_grid = "https://raw.githubusercontent.com/fastlane/fastlane/master/danger-device_grid/lib/device_grid/plugin.rb"
# danger.import_plugin(device_grid)
#
# @example Import from a local file reference
#
# danger.import_plugin("danger/plugins/watch_plugin.rb")
#
# @example Import all files inside a folder
#
# danger.import_plugin("danger/plugins/*.rb")
#
# @example Run a Dangerfile from inside a sub-folder
#
# danger.import_dangerfile(path: "path/to/Dangerfile")
#
# @example Run a Dangerfile from inside a gem
#
# danger.import_dangerfile(gem: "ruby-grape-danger")
#
# @example Run a Dangerfile from inside a repo
#
# danger.import_dangerfile(gitlab_project_id: 1345)
#
# @example Run a Dangerfile from inside a repo branch and path
#
# danger.import_dangerfile(github: "ruby-grape/danger", branch: "custom", path: "path/to/Dangerfile")
#
# @example Import a plugin available over HTTP
#
# custom_url = "https://custom.bitbucket.com/project-name/Dangerfile?raw"
# danger.import_dangerfile(url: custom_url)
#
# @see danger/danger
# @tags core, plugins
class DangerfileDangerPlugin < Plugin
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"danger"
end
# @!group Danger
# Download a local or remote plugin and make it usable inside the Dangerfile.
#
# @param [String] path_or_url
# a local path or a https URL to the Ruby file to import
# a danger plugin from.
# @return [void]
#
def import_plugin(path_or_url)
raise "`import_plugin` requires a string" unless path_or_url.kind_of?(String)
if path_or_url.start_with?("http")
import_url(path_or_url)
else
import_local(path_or_url)
end
end
# @!group Danger
# Import a Dangerfile.
#
# @param [Hash] opts
# @option opts [String] :github GitHub repo
# @option opts [String] :gitlab GitLab repo
# @option opts [String] :gem Gem name
# @option opts [String] :ref The name of branch, tag or commit
# @option opts [String] :branch Alias of :ref
# @option opts [String] :path Path to Dangerfile
# @return [void]
def import_dangerfile(opts)
if opts.kind_of?(String)
warn "Use `import_dangerfile(github: '#{opts}')` instead of `import_dangerfile '#{opts}'`."
import_dangerfile_from_github(opts)
elsif opts.kind_of?(Hash)
if opts.key?(:github)
import_dangerfile_from_github(opts[:github], opts[:ref] || opts[:branch], opts[:path])
elsif opts.key?(:gitlab)
import_dangerfile_from_gitlab(opts[:gitlab], opts[:ref] || opts[:branch], opts[:path])
elsif opts.key?(:path)
import_dangerfile_from_path(opts[:path])
elsif opts.key?(:gem)
import_dangerfile_from_gem(opts[:gem])
elsif opts.key?(:url)
import_dangerfile_from_url(opts[:url])
else
raise "`import` requires a Hash with either :github, :gitlab, :gem, :path or :url"
end
else
raise "`import` requires a Hash"
end
end
# @!group Danger
# Returns the name of the current SCM Provider being used.
# @return [Symbol] The name of the SCM Provider used for the active repository.
def scm_provider
return :unknown unless env.request_source
case env.request_source
when Danger::RequestSources::GitHub
:github
when Danger::RequestSources::GitLab
:gitlab
when Danger::RequestSources::BitbucketServer
:bitbucket_server
when Danger::RequestSources::BitbucketCloud
:bitbucket_cloud
when Danger::RequestSources::VSTS
:vsts
else
:unknown
end
end
private
# @!group Danger
# Read and execute a local Dangerfile.
#
# @param [String] path
# A path to a Dangerfile.
# @return [void]
#
def import_dangerfile_from_path(path)
raise "`import_dangerfile_from_path` requires a string" unless path.kind_of?(String)
local_path = File.file?(path) ? path : File.join(path, "Dangerfile")
@dangerfile.parse(Pathname.new(local_path))
end
# @!group Danger
# Read and execute a Dangerfile from a gem.
#
# @param [String] name
# The name of the gem that contains a Dangerfile.
# @return [void]
#
def import_dangerfile_from_gem(name)
raise "`import_dangerfile_from_gem` requires a string" unless name.kind_of?(String)
spec = Gem::Specification.find_by_name(name)
import_dangerfile_from_path(spec.gem_dir)
rescue Gem::MissingSpecError
raise "`import_dangerfile_from_gem` tried to load `#{name}` and failed, did you forget to include it in your Gemfile?"
end
# @!group Danger
# Download and execute a remote Dangerfile.
#
# @param [String] slug
# A slug that represents the repo where the Dangerfile is.
# @param [String] branch
# A branch from repo where the Dangerfile is.
# @param [String] path
# The path at the repo where Dangerfile is.
# @return [void]
#
def import_dangerfile_from_github(slug, branch = nil, path = nil)
raise "`import_dangerfile_from_github` requires a string" unless slug.kind_of?(String)
org, repo = slug.split("/")
download_url = env.request_source.file_url(organisation: org, repository: repo, branch: branch, path: path || "Dangerfile")
local_path = download(download_url)
@dangerfile.parse(Pathname.new(local_path))
end
# @!group Danger
# Download and execute a remote Dangerfile.
#
# @param [Int] slug_or_project_id
# The slug or id of the repo where the Dangerfile is.
# @param [String] branch
# A branch from repo where the Dangerfile is.
# @param [String] path
# The path at the repo where Dangerfile is.
# @return [void]
#
def import_dangerfile_from_gitlab(slug_or_project_id, branch = nil, path = nil)
download_url = env.request_source.file_url(repository: slug_or_project_id, branch: branch, path: path || "Dangerfile")
local_path = download(download_url)
@dangerfile.parse(Pathname.new(local_path))
end
# @!group Danger
# Download and execute a remote Dangerfile.
#
# @param [String] url
# A https url where the Dangerfile is.
# @return [void]
#
def import_dangerfile_from_url(url)
raise "`import_dangerfile_from_url` requires a string" unless url.kind_of?(String)
local_path = download(url)
@dangerfile.parse(Pathname.new(local_path))
end
# @!group Plugins
# Download a local or remote plugin or Dangerfile.
# This method will not import the file for you, use plugin.import instead
#
# @param [String] path_or_url
# a local path or a https URL to the Ruby file to import
# a danger plugin from.
# @return [String] The path to the downloaded Ruby file
#
def download(path_or_url)
raise "`download` requires a string" unless path_or_url.kind_of?(String)
raise "URL is not https, for security reasons `danger` only supports encrypted requests" if URI.parse(path_or_url).scheme != "https"
require "tmpdir"
require "faraday"
@http_client ||= Faraday.new do |b|
b.adapter :net_http
end
content = @http_client.get(path_or_url)
path = File.join(Dir.mktmpdir, "temporary_danger.rb")
File.write(path, content.body)
return path
end
# @!group Plugins
# Download a remote plugin and use it locally.
#
# @param [String] url
# https URL to the Ruby file to use
# @return [void]
def import_url(url)
path = download(url)
import_local(path)
end
# @!group Plugins
# Import one or more local plugins.
#
# @param [String] path
# The path to the file to import
# Can also be a pattern (./**/*plugin.rb)
# @return [void]
def import_local(path)
Dir[path].each do |file|
validate_file_contains_plugin!(file) do
# Without the expand_path it would fail if the path doesn't start with ./
require File.expand_path(file)
end
refresh_plugins
end
end
# Raises an error when the given block does not register a plugin.
def validate_file_contains_plugin!(file)
plugin_count_was = Danger::Plugin.all_plugins.length
yield
if Danger::Plugin.all_plugins.length == plugin_count_was
raise("#{file} doesn't contain any valid danger plugins.")
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_vsts_plugin.rb | lib/danger/danger_core/plugins/dangerfile_vsts_plugin.rb | # frozen_string_literal: true
require "danger/plugin_support/plugin"
module Danger
# Handles interacting with VSTS inside a Dangerfile. Provides a few functions which wrap `pr_json` and also
# through a few standard functions to simplify your code.
#
# @example Warn when a PR is classed as work in progress
#
# warn "PR is classed as Work in Progress" if vsts.pr_title.include? "[WIP]"
#
# @example Declare a PR to be simple to avoid specific Danger rules
#
# declared_trivial = (vsts.pr_title + vsts.pr_body).include?("#trivial")
#
# @example Ensure there is a summary for a PR
#
# failure "Please provide a summary in the Pull Request description" if vsts.pr_body.length < 5
#
# @example Only accept PRs to the develop branch
#
# failure "Please re-submit this PR to develop, we may have already fixed your issue." if vsts.branch_for_base != "develop"
#
# @example Highlight when a celebrity makes a pull request
#
# message "Welcome, Danger." if vsts.pr_author == "dangermcshane"
#
# @example Ensure that all PRs have an assignee
#
# warn "This PR does not have any assignees yet." unless vsts.pr_json["reviewers"].length == 0
#
# @example Send a message with links to a collection of specific files
#
# if git.modified_files.include? "config/*.js"
# config_files = git.modified_files.select { |path| path.include? "config/" }
# message "This PR changes #{ vsts.markdown_link(config_files) }"
# end
#
# @example Highlight with a clickable link if a Package.json is changed
#
# warn "#{vsts.markdown_link("Package.json")} was edited." if git.modified_files.include? "Package.json"
#
# @example Note an issue with a particular line on a file using the #L[num] syntax, e.g. `#L23`
#
# linter_json = `my_linter lint "file"`
# results = JSON.parse linter_json
# unless results.empty?
# file, line, warning = result.first
# warn "#{vsts.markdown_link("#{file}#L#{line}")} has linter issue: #{warning}."
# end
#
#
# @see danger/danger
# @tags core, vsts
#
class DangerfileVSTSPlugin < Plugin
# So that this init can fail.
def self.new(dangerfile)
return nil if dangerfile.env.request_source.class != Danger::RequestSources::VSTS
super
end
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"vsts"
end
def initialize(dangerfile)
super(dangerfile)
@source = dangerfile.env.request_source
end
# @!group VSTS Misc
# The hash that represents the PR's JSON. For an example of what this looks like
# see the [Danger Fixture'd one](https://raw.githubusercontent.com/danger/danger/master/spec/fixtures/vsts_api/pr_response.json).
# @return [Hash]
def pr_json
@source.pr_json
end
# @!group PR Metadata
# The title of the Pull Request.
# @return [String]
#
def pr_title
@source.pr_json[:title].to_s
end
# @!group PR Metadata
# The body text of the Pull Request.
# @return [String]
#
def pr_description
@source.pr_json[:description].to_s
end
alias pr_body pr_description
# @!group PR Metadata
# The username of the author of the Pull Request.
# @return [String]
#
def pr_author
@source.pr_json[:createdBy][:displayName].to_s
end
# @!group PR Commit Metadata
# The branch to which the PR is going to be merged into.
# @return [String]
#
def branch_for_base
branch_name(:targetRefName)
end
# @!group PR Commit Metadata
# A href that represents the current PR
# @return [String]
#
def pr_link
repo_path = @source.pr_json[:repository][:remoteUrl].to_s
pull_request_id = @source.pr_json[:pullRequestId].to_s
"#{repo_path}/pullRequest/#{pull_request_id}"
end
# @!group PR Commit Metadata
# The branch to which the PR is going to be merged from.
# @return [String]
#
def branch_for_head
branch_name(:sourceRefName)
end
# @!group PR Commit Metadata
# The base commit to which the PR is going to be merged as a parent.
# @return [String]
#
def base_commit
@source.pr_json[:lastMergeTargetCommit][:commitId].to_s
end
# @!group PR Commit Metadata
# The head commit to which the PR is requesting to be merged from.
# @return [String]
#
def head_commit
@source.pr_json[:lastMergeSourceCommit][:commitId].to_s
end
# @!group VSTS Misc
# Returns a list of Markdown links for a file, or files in the head repository.
# It returns a string of multiple links if passed an array.
# @param [String or Array<String>] paths
# A list of strings to convert to Markdown links
# @param [Bool] full_path
# Shows the full path as the link's text, defaults to `true`.
#
# @return [String]
#
def markdown_link(paths, full_path: true)
paths = [paths] unless paths.kind_of?(Array)
commit = head_commit
repo = pr_json[:repository][:remoteUrl].to_s
paths = paths.map do |path|
path, line = path.split("#L")
url_path = path.start_with?("/") ? path : "/#{path}"
text = full_path ? path : File.basename(path)
url_path = url_path.gsub(" ", "%20")
line_ref = line ? "&line=#{line}" : ""
create_markdown_link("#{repo}/commit/#{commit}?path=#{url_path}&_a=contents#{line_ref}", text)
end
return paths.first if paths.count < 2
"#{paths.first(paths.count - 1).join(', ')} & #{paths.last}"
end
private
def create_markdown_link(href, text)
"[#{text}](#{href})"
end
def branch_name(key)
repo_matches = @source.pr_json[key].to_s.match(%r{refs/heads/(.*)})
repo_matches[1] unless repo_matches.nil?
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_bitbucket_cloud_plugin.rb | lib/danger/danger_core/plugins/dangerfile_bitbucket_cloud_plugin.rb | # frozen_string_literal: true
require "danger/plugin_support/plugin"
module Danger
# Handles interacting with Bitbucket Cloud inside a Dangerfile. Provides a few functions which wrap `pr_json` and also
# through a few standard functions to simplify your code.
#
# @example Warn when a PR is classed as work in progress
#
# warn "PR is classed as Work in Progress" if bitbucket_cloud.pr_title.include? "[WIP]"
#
# @example Declare a PR to be simple to avoid specific Danger rules
#
# declared_trivial = (bitbucket_cloud.pr_title + bitbucket_cloud.pr_body).include?("#trivial")
#
# @example Ensure that labels have been used on the PR
#
# failure "Please add labels to this PR" if bitbucket_cloud.pr_labels.empty?
#
# @example Ensure there is a summary for a PR
#
# failure "Please provide a summary in the Pull Request description" if bitbucket_cloud.pr_body.length < 5
#
# @example Only accept PRs to the develop branch
#
# failure "Please re-submit this PR to develop, we may have already fixed your issue." if bitbucket_cloud.branch_for_base != "develop"
#
# @example Highlight when a celebrity makes a pull request
#
# message "Welcome, Danger." if bitbucket_cloud.pr_author == "dangermcshane"
#
# @example Ensure that all PRs have an assignee
#
# warn "This PR does not have any assignees yet." if bitbucket_cloud.pr_json[:reviewers].length == 0
#
# @example Send a message with links to a collection of specific files
#
# if git.modified_files.include? "config/*.js"
# config_files = git.modified_files.select { |path| path.include? "config/" }
# message "This PR changes #{ bitbucket_cloud.html_link(config_files) }"
# end
#
# @example Highlight with a clickable link if a Package.json is changed
#
# warn "#{bitbucket_cloud.html_link("Package.json")} was edited." if git.modified_files.include? "Package.json"
#
# @see danger/danger
# @tags core, bitbucket_cloud
#
class DangerfileBitbucketCloudPlugin < Plugin
# So that this init can fail.
def self.new(dangerfile)
return nil if dangerfile.env.request_source.class != Danger::RequestSources::BitbucketCloud
super
end
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"bitbucket_cloud"
end
def initialize(dangerfile)
super(dangerfile)
@bs = dangerfile.env.request_source
end
# @!group Bitbucket Cloud Misc
# The hash that represents the PR's JSON. For an example of what this looks like
# see the [Danger Fixture'd one](https://raw.githubusercontent.com/danger/danger/master/spec/fixtures/bitbucket_cloud_api/pr_response.json).
# @return [Hash]
def pr_json
@bs.pr_json
end
# @!group PR Metadata
# The title of the Pull Request.
# @return [String]
#
def pr_title
@bs.pr_json[:title].to_s
end
# @!group PR Metadata
# The body text of the Pull Request.
# @return [String]
#
def pr_description
@bs.pr_json[:description].to_s
end
alias pr_body pr_description
# @!group PR Metadata
# The username of the author of the Pull Request.
# @return [String]
#
def pr_author
@bs.pr_json[:author][:nickname]
end
# @!group PR Commit Metadata
# The branch to which the PR is going to be merged into.
# @return [String]
#
def branch_for_base
@bs.pr_json[:destination][:branch][:name]
end
# @!group PR Commit Metadata
# A href that represents the current PR
# @return [String]
#
def pr_link
@bs.pr_json[:links][:self][:href]
end
# @!group PR Commit Metadata
# The branch to which the PR is going to be merged from.
# @return [String]
#
def branch_for_head
@bs.pr_json[:source][:branch][:name]
end
# @!group PR Commit Metadata
# The base commit to which the PR is going to be merged as a parent.
# @return [String]
#
def base_commit
@bs.pr_json[:destination][:commit][:hash]
end
# @!group PR Commit Metadata
# The head commit to which the PR is requesting to be merged from.
# @return [String]
#
def head_commit
@bs.pr_json[:source][:commit][:hash]
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_bitbucket_server_plugin.rb | lib/danger/danger_core/plugins/dangerfile_bitbucket_server_plugin.rb | # frozen_string_literal: true
require "danger/plugin_support/plugin"
module Danger
# Handles interacting with Bitbucket Server inside a Dangerfile. Provides a few functions which wrap `pr_json` and also
# through a few standard functions to simplify your code.
#
# @example Warn when a PR is classed as work in progress
#
# warn "PR is classed as Work in Progress" if bitbucket_server.pr_title.include? "[WIP]"
#
# @example Declare a PR to be simple to avoid specific Danger rules
#
# declared_trivial = (bitbucket_server.pr_title + bitbucket_server.pr_body).include?("#trivial")
#
# @example Ensure that labels have been used on the PR
#
# failure "Please add labels to this PR" if bitbucket_server.pr_labels.empty?
#
# @example Ensure there is a summary for a PR
#
# failure "Please provide a summary in the Pull Request description" if bitbucket_server.pr_body.length < 5
#
# @example Only accept PRs to the develop branch
#
# failure "Please re-submit this PR to develop, we may have already fixed your issue." if bitbucket_server.branch_for_base != "develop"
#
# @example Highlight when a celebrity makes a pull request
#
# message "Welcome, Danger." if bitbucket_server.pr_author == "dangermcshane"
#
# @example Ensure that all PRs have an assignee
#
# warn "This PR does not have any assignees yet." if bitbucket_server.pr_json[:reviewers].length == 0
#
# @example Send a message with links to a collection of specific files
#
# if git.modified_files.include? "config/*.js"
# config_files = git.modified_files.select { |path| path.include? "config/" }
# message "This PR changes #{ bitbucket_server.html_link(config_files) }"
# end
#
# @example Highlight with a clickable link if a Package.json is changed
#
# warn "#{bitbucket_server.html_link("Package.json")} was edited." if git.modified_files.include? "Package.json"
#
# @see danger/danger
# @tags core, bitbucket_server
#
class DangerfileBitbucketServerPlugin < Plugin
# So that this init can fail.
def self.new(dangerfile)
return nil if dangerfile.env.request_source.class != Danger::RequestSources::BitbucketServer
super
end
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"bitbucket_server"
end
def initialize(dangerfile)
super(dangerfile)
@bs = dangerfile.env.request_source
end
# @!group Bitbucket Server Misc
# The hash that represents the PR's JSON. For an example of what this looks like
# see the [Danger Fixture'd one](https://raw.githubusercontent.com/danger/danger/master/spec/fixtures/bitbucket_server_api/pr_response.json).
# @return [Hash]
def pr_json
@bs.pr_json
end
# @!group PR Metadata
# The title of the Pull Request.
# @return [String]
#
def pr_title
@bs.pr_json[:title].to_s
end
# @!group PR Metadata
# The body text of the Pull Request.
# @return [String]
#
def pr_description
@bs.pr_json[:description].to_s
end
alias pr_body pr_description
# @!group PR Metadata
# The username of the author of the Pull Request.
# @return [String]
#
def pr_author
@bs.pr_json[:author][:user][:slug].to_s
end
# @!group PR Commit Metadata
# The branch to which the PR is going to be merged into.
# @return [String]
#
def branch_for_base
@bs.pr_json[:toRef][:displayId].to_s
end
# @!group PR Commit Metadata
# A href that represents the current PR
# @return [String]
#
def pr_link
@bs.pr_json[:links][:self].flat_map { |l| l[:href] }.first.to_s
end
# @!group PR Commit Metadata
# The branch to which the PR is going to be merged from.
# @return [String]
#
def branch_for_head
@bs.pr_json[:fromRef][:displayId].to_s
end
# @!group PR Commit Metadata
# The base commit to which the PR is going to be merged as a parent.
# @return [String]
#
def base_commit
@bs.pr_json[:toRef][:latestCommit].to_s
end
# @!group PR Commit Metadata
# The head commit to which the PR is requesting to be merged from.
# @return [String]
#
def head_commit
@bs.pr_json[:fromRef][:latestCommit].to_s
end
# @!group Bitbucket Server Misc
# Returns a list of Markdown links for a file, or files in the head repository.
# It returns a string of multiple anchors if passed an array.
# @note Atlassian [disabled inline HTML support](https://jira.atlassian.com/browse/BSERV-7147).
# This method method left for backward compatibility.
# @param [String or Array<String>] paths
# A list of strings to convert to github anchors
# @param [Bool] full_path
# Shows the full path as the link's text, defaults to `true`.
#
# @return [String]
#
def html_link(paths, full_path: true)
markdown_link(paths, full_path: full_path)
end
# @!group Bitbucket Server Misc
# Returns a list of Markdown links for a file, or files in the head repository.
# It returns a string of multiple links if passed an array.
# @param [String or Array<String>] paths
# A list of strings to convert to Markdown links
# @param [Bool] full_path
# Shows the full path as the link's text, defaults to `true`.
#
# @return [String]
#
def markdown_link(paths, full_path: true)
create_link(paths, full_path) { |href, text| create_markdown_link(href, text) }
end
# @!group Bitbucket Server Misc
# Updates the PR with build status and build server job link.
# @param [String] status
# SUCCESSFUL, FAILED and INPROGRESS
# @param [String] build_job_link
# Build server job link
# @param [String] description
# Build status description
# @return [String]
#
def update_pr_build_status(status, build_job_link, description)
@bs.update_pr_build_status(status, build_job_link, description)
end
private
def create_link(paths, full_path)
paths = [paths] unless paths.kind_of?(Array)
commit = head_commit
repo = pr_json[:fromRef][:repository][:links][:self].flat_map { |l| l[:href] }.first
paths = paths.map do |path|
path, line = path.split("#")
url_path = path.start_with?("/") ? path : "/#{path}"
text = full_path ? path : File.basename(path)
url_path = url_path.gsub(" ", "%20")
line_ref = line ? "##{line}" : ""
yield("#{repo}#{url_path}?at=#{commit}#{line_ref}", text)
end
return paths.first if paths.count < 2
"#{paths.first(paths.count - 1).join(', ')} & #{paths.last}"
end
def create_markdown_link(href, text)
"[#{text}](#{href})"
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/danger_core/plugins/dangerfile_messaging_plugin.rb | lib/danger/danger_core/plugins/dangerfile_messaging_plugin.rb | # frozen_string_literal: true
require "danger/danger_core/messages/violation"
require "danger/danger_core/messages/markdown"
require "danger/plugin_support/plugin"
module Danger
# Provides the feedback mechanism for Danger. Danger can keep track of
# messages, warnings, failure and post arbitrary markdown into a comment.
#
# The message within which Danger communicates back is amended on each run in a session.
#
# Each of `message`, `warn` and `fail` have a `sticky` flag, `false` by default, which
# when `true` means that the message will be crossed out instead of being removed.
# If it's not called again on subsequent runs.
#
# Each of `message`, `warn`, `fail` and `markdown` support multiple passed arguments
#
# @example Multiple passed arguments
#
# message 'Hello', 'World', file: "Dangerfile", line: 1
# warn ['This', 'is', 'warning'], file: "Dangerfile", line: 1
# failure 'Ooops', 'bad bad error', sticky: false
# markdown '# And', '# Even', '# Markdown', file: "Dangerfile", line: 1
#
# By default, using `failure` would fail the corresponding build. Either via an API call, or
# via the return value for the danger command. Older code examples use `fail` which is an alias
# of `failure`, but the default Rubocop settings would have an issue with it.
#
# You can optionally add `file` and `line` to provide inline feedback on a PR in GitHub, note that
# only feedback inside the PR's diff will show up inline. Others will appear inside the main comment.
#
# It is possible to have Danger ignore specific warnings or errors by writing `Danger: Ignore "[warning/error text]"`.
#
# Sidenote: Messaging is the only plugin which adds functions to the root of the Dangerfile.
#
# @example Failing a build
#
# failure "This build didn't pass tests"
# failure "Ooops!", "Something bad happened"
# failure ["This is example", "with array"]
#
# @example Failing a build, and note that on subsequent runs
#
# failure("This build didn't pass tests", sticky: true)
#
# @example Passing a warning
#
# warn "This build didn't pass linting"
# warn "Hm...", "This is not really good"
# warn ["Multiple warnings", "via array"]
#
# @example Displaying a markdown table
#
# message = "### Proselint found issues\n\n"
# message << "Line | Message | Severity |\n"
# message << "| --- | ----- | ----- |\n"
# message << "20 | No documentation | Error \n"
# markdown message
#
# markdown "### First issue", "### Second issue"
# markdown ["### First issue", "### Second issue"]
#
# @example Adding an inline warning to a file
#
# warn("You shouldn't use puts in your Dangerfile", file: "Dangerfile", line: 10)
#
#
# @see danger/danger
# @tags core, messaging
#
class DangerfileMessagingPlugin < Plugin
def initialize(dangerfile)
super(dangerfile)
@warnings = []
@errors = []
@messages = []
@markdowns = []
end
# The instance name used in the Dangerfile
# @return [String]
#
def self.instance_name
"messaging"
end
# @!group Core
# Print markdown to below the table
#
# @param [Hash] options
# @option [String, Array<String>] markdowns
# The markdown based message to be printed below the table
# @option [String] file
# Optional. Path to the file that the message is for.
# @option [String] line
# Optional. The line in the file to present the message in.
# @return [void]
#
def markdown(*markdowns, **options)
file = options.fetch(:file, nil)
line = options.fetch(:line, nil)
markdowns.flatten.each do |markdown|
@markdowns << Markdown.new(markdown, file, line)
end
end
# @!group Core
# Print out a generate message on the PR
#
# @param [String, Array<String>] messages
# The message to present to the user
# @param [Hash] options
# @option [Boolean] sticky
# Whether the message should be kept after it was fixed,
# defaults to `false`.
# @option [String] file
# Optional. Path to the file that the message is for.
# @option [String] line
# Optional. The line in the file to present the message in.
# @return [void]
#
def message(*messages, **options)
sticky = options.fetch(:sticky, false)
file = options.fetch(:file, nil)
line = options.fetch(:line, nil)
messages.flatten.each do |message|
@messages << Violation.new(message, sticky, file, line, type: :message) if message
end
end
# @!group Core
# Specifies a problem, but not critical
#
# @param [String, Array<String>] warnings
# The message to present to the user
# @param options
# @option [Boolean] sticky
# Whether the message should be kept after it was fixed,
# defaults to `false`.
# @option [String] file
# Optional. Path to the file that the message is for.
# @option [String] line
# Optional. The line in the file to present the message in.
# @return [void]
#
def warn(*warnings, **options)
sticky = options.fetch(:sticky, false)
file = options.fetch(:file, nil)
line = options.fetch(:line, nil)
warnings.flatten.each do |warning|
next if should_ignore_violation(warning)
@warnings << Violation.new(warning, sticky, file, line, type: :warning) if warning
end
end
# @!group Core
# Declares a CI blocking error
#
# @param [String, Array<String>] failures
# The message to present to the user
# @param options
# @option [Boolean] sticky
# Whether the message should be kept after it was fixed,
# defaults to `false`.
# @option [String] file
# Optional. Path to the file that the message is for.
# @option [String] line
# Optional. The line in the file to present the message in.
# @return [void]
#
def fail(*failures, **options)
sticky = options.fetch(:sticky, false)
file = options.fetch(:file, nil)
line = options.fetch(:line, nil)
failures.flatten.each do |failure|
next if should_ignore_violation(failure)
@errors << Violation.new(failure, sticky, file, line, type: :error) if failure
end
end
alias failure fail
# @!group Reporting
# A list of all messages passed to Danger, including
# the markdowns.
#
# @visibility hidden
# @return [Hash]
def status_report
{
errors: @errors.map(&:message).clone.freeze,
warnings: @warnings.map(&:message).clone.freeze,
messages: @messages.map(&:message).clone.freeze,
markdowns: @markdowns.clone.freeze
}
end
# @!group Reporting
# A list of all violations passed to Danger, we don't
# anticipate users of Danger needing to use this.
#
# @visibility hidden
# @return [Hash]
def violation_report
{
errors: @errors.clone.freeze,
warnings: @warnings.clone.freeze,
messages: @messages.clone.freeze
}
end
private
def should_ignore_violation(message)
env.request_source.ignored_violations.include? message
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/clients/rubygems_client.rb | lib/danger/clients/rubygems_client.rb | # frozen_string_literal: true
module Danger
class RubyGemsClient
API_URL = "https://rubygems.org/api/v1/versions/danger/latest.json"
DUMMY_VERSION = "0.0.0"
def self.latest_danger_version
require "json"
json = JSON.parse(Faraday.get(API_URL).body)
json.fetch("version") { DUMMY_VERSION }
rescue StandardError => _e
DUMMY_VERSION
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/local_only.rb | lib/danger/request_sources/local_only.rb | # frozen_string_literal: true
require "danger/helpers/comments_helper"
require "danger/helpers/comment"
module Danger
module RequestSources
class LocalOnly < RequestSource
include Danger::Helpers::CommentsHelper
attr_accessor :mr_json, :commits_json
def self.env_vars
["DANGER_LOCAL_ONLY"]
end
def initialize(ci_source, _environment)
self.ci_source = ci_source
end
def validates_as_ci?
true
end
def validates_as_api_source?
true
end
def scm
@scm ||= GitRepo.new
end
def setup_danger_branches
# Check that discovered values really exists
[ci_source.base_commit, ci_source.head_commit].each do |commit|
raise "Specified commit '#{commit}' not found" if scm.exec("rev-parse --quiet --verify #{commit}").empty?
end
self.scm.exec "branch #{EnvironmentManager.danger_base_branch} #{ci_source.base_commit}"
self.scm.exec "branch #{EnvironmentManager.danger_head_branch} #{ci_source.head_commit}"
end
def fetch_details; end
def update_pull_request!(_hash_needed); end
# @return [String] The organisation name, is nil if it can't be detected
def organisation
nil
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/vsts.rb | lib/danger/request_sources/vsts.rb | # frozen_string_literal: true
require "danger/helpers/comments_helper"
require "danger/request_sources/vsts_api"
module Danger
module RequestSources
class VSTS < RequestSource
include Danger::Helpers::CommentsHelper
attr_accessor :pr_json
def self.env_vars
[
"DANGER_VSTS_API_TOKEN",
"DANGER_VSTS_HOST"
]
end
def self.optional_env_vars
[
"DANGER_VSTS_API_VERSION"
]
end
def initialize(ci_source, environment)
self.ci_source = ci_source
@is_vsts_ci = environment.key? "DANGER_VSTS_HOST"
@api = VSTSAPI.new(ci_source.repo_slug, ci_source.pull_request_id, environment)
end
def validates_as_ci?
@is_vsts_ci
end
def validates_as_api_source?
@api.credentials_given?
end
def scm
@scm ||= GitRepo.new
end
def client
@api
end
def host
@host ||= @api.host
end
def fetch_details
self.pr_json = @api.fetch_pr_json
end
def setup_danger_branches
base_branch = self.pr_json[:targetRefName].sub("refs/heads/", "")
base_commit = self.pr_json[:lastMergeTargetCommit][:commitId]
head_branch = self.pr_json[:sourceRefName].sub("refs/heads/", "")
head_commit = self.pr_json[:lastMergeSourceCommit][:commitId]
# Next, we want to ensure that we have a version of the current branch at a known location
scm.ensure_commitish_exists_on_branch! base_branch, base_commit
self.scm.exec "branch #{EnvironmentManager.danger_base_branch} #{base_commit}"
# OK, so we want to ensure that we have a known head branch, this will always represent
# the head of the PR ( e.g. the most recent commit that will be merged. )
scm.ensure_commitish_exists_on_branch! head_branch, head_commit
self.scm.exec "branch #{EnvironmentManager.danger_head_branch} #{head_commit}"
end
def organisation
nil
end
def update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
unless @api.supports_comments?
return
end
regular_violations = regular_violations_group(
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
)
inline_violations = inline_violations_group(
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
)
rest_inline_violations = submit_inline_comments!(**{
danger_id: danger_id,
previous_violations: {}
}.merge(inline_violations))
main_violations = merge_violations(
regular_violations, rest_inline_violations
)
comment = generate_description(warnings: main_violations[:warnings], errors: main_violations[:errors])
comment += "\n\n"
comment += generate_comment(**{
previous_violations: {},
danger_id: danger_id,
template: "vsts"
}.merge(main_violations))
if new_comment || remove_previous_comments
post_new_comment(comment)
else
update_old_comment(comment, danger_id: danger_id)
end
end
def post_new_comment(comment)
@api.post_comment(comment)
end
def update_old_comment(new_comment, danger_id: "danger")
comment_updated = false
@api.fetch_last_comments.each do |c|
thread_id = c[:id]
comment = c[:comments].first
comment_id = comment[:id]
comment_content = comment[:content].nil? ? "" : comment[:content]
# Skip the comment if it wasn't posted by danger
next unless comment_content.include?("generated_by_#{danger_id}")
# Skip the comment if it's an inline comment
next unless c[:threadContext].nil?
# Updated the danger posted comment
@api.update_comment(thread_id, comment_id, new_comment)
comment_updated = true
end
# If no comment was updated, post a new one
post_new_comment(new_comment) unless comment_updated
end
def submit_inline_comments!(warnings: [], errors: [], messages: [], markdowns: [], previous_violations: [], danger_id: "danger")
# Avoid doing any fetches if there's no inline comments
return {} if (warnings + errors + messages + markdowns).select(&:inline?).empty?
pr_threads = @api.fetch_last_comments
danger_threads = pr_threads.select do |thread|
comment = thread[:comments].first
comment_content = comment[:content].nil? ? "" : comment[:content]
next comment_content.include?("generated_by_#{danger_id}")
end
warnings = submit_inline_comments_for_kind!(:warning, warnings, danger_threads, previous_violations["warning"], danger_id: danger_id)
errors = submit_inline_comments_for_kind!(:error, errors, danger_threads, previous_violations["error"], danger_id: danger_id)
messages = submit_inline_comments_for_kind!(:message, messages, danger_threads, previous_violations["message"], danger_id: danger_id)
markdowns = submit_inline_comments_for_kind!(:markdown, markdowns, danger_threads, [], danger_id: danger_id)
# submit removes from the array all comments that are still in force
# so we strike out all remaining ones
danger_threads.each do |thread|
violation = violations_from_table(thread[:comments].first[:content]).first
if !violation.nil? && violation.sticky
body = generate_inline_comment_body("white_check_mark", violation, danger_id: danger_id, resolved: true, template: "github")
@api.update_comment(thread[:id], thread[:comments].first[:id], body)
end
end
{
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
}
end
def messages_are_equivalent(m1, m2)
blob_regexp = %r{blob/[0-9a-z]+/}
m1.file == m2.file && m1.line == m2.line &&
m1.message.sub(blob_regexp, "") == m2.message.sub(blob_regexp, "")
end
def submit_inline_comments_for_kind!(kind, messages, danger_threads, previous_violations, danger_id: "danger")
previous_violations ||= []
is_markdown_content = kind == :markdown
emoji = { warning: "warning", error: "no_entry_sign", message: "book" }[kind]
messages.reject do |m| # rubocop:todo Metrics/BlockLength
next false unless m.file && m.line
# Once we know we're gonna submit it, we format it
if is_markdown_content
body = generate_inline_markdown_body(m, danger_id: danger_id, template: "vsts")
else
# Hide the inline link behind a span
m.message = m.message.gsub("\n", "<br />")
m = process_markdown(m, true)
body = generate_inline_comment_body(emoji, m, danger_id: danger_id, template: "vsts")
# A comment might be in previous_violations because only now it's part of the unified diff
# We remove from the array since it won't have a place in the table anymore
previous_violations.reject! { |v| messages_are_equivalent(v, m) }
end
matching_threads = danger_threads.select do |comment_data|
if comment_data.key?(:threadContext) && !comment_data[:threadContext].nil? &&
comment_data[:threadContext][:filePath] == m.file &&
comment_data[:threadContext].key?(:rightFileStart) &&
comment_data[:threadContext][:rightFileStart][:line] == m.line
# Parse it to avoid problems with strikethrough
violation = violations_from_table(comment_data[:comments].first[:content]).first
if violation
messages_are_equivalent(violation, m)
else
blob_regexp = %r{blob/[0-9a-z]+/}
comment_data[:comments].first[:content].sub(blob_regexp, "") == body.sub(blob_regexp, "")
end
else
false
end
end
if matching_threads.empty?
@api.post_inline_comment(body, m.file, m.line)
# Not reject because this comment has not completed
next false
else
# Remove the surviving comment so we don't strike it out
danger_threads.reject! { |c| matching_threads.include? c }
# Update the comment to remove the strikethrough if present
thread = matching_threads.first
@api.update_comment(thread[:id], thread[:comments].first[:id], body)
end
# Remove this element from the array
next true
end
end
private
def regular_violations_group(warnings: [], errors: [], messages: [], markdowns: [])
{
warnings: warnings.reject(&:inline?),
errors: errors.reject(&:inline?),
messages: messages.reject(&:inline?),
markdowns: markdowns.reject(&:inline?)
}
end
def inline_violations_group(warnings: [], errors: [], messages: [], markdowns: [])
cmp = proc do |a, b|
next -1 unless a.file && a.line
next 1 unless b.file && b.line
next a.line <=> b.line if a.file == b.file
next a.file <=> b.file
end
# Sort to group inline comments by file
{
warnings: warnings.select(&:inline?).sort(&cmp),
errors: errors.select(&:inline?).sort(&cmp),
messages: messages.select(&:inline?).sort(&cmp),
markdowns: markdowns.select(&:inline?).sort(&cmp)
}
end
def merge_violations(*violation_groups)
violation_groups.inject({}) do |accumulator, group|
accumulator.merge(group) { |_, old, fresh| old + fresh }
end
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/bitbucket_cloud_api.rb | lib/danger/request_sources/bitbucket_cloud_api.rb | # frozen_string_literal: true
require "danger/helpers/comments_helper"
module Danger
module RequestSources
class BitbucketCloudAPI
attr_accessor :host, :project, :slug, :access_token, :pull_request_id
attr_reader :my_uuid
def initialize(repo_slug, pull_request_id, branch_name, environment)
initialize_my_uuid(environment["DANGER_BITBUCKETCLOUD_UUID"])
@username = environment["DANGER_BITBUCKETCLOUD_USERNAME"]
@password = environment["DANGER_BITBUCKETCLOUD_PASSWORD"]
self.project, self.slug = repo_slug.split("/")
self.access_token = fetch_access_token(environment)
self.pull_request_id = pull_request_id || fetch_pr_from_branch(branch_name)
self.host = "https://bitbucket.org/"
end
def initialize_my_uuid(uuid)
return if uuid.nil?
return @my_uuid = uuid if uuid.empty?
if uuid.start_with?("{") && uuid.end_with?("}")
@my_uuid = uuid
else
@my_uuid = "{#{uuid}}"
end
end
def inspect
inspected = super
inspected = inspected.gsub(@password, "********") if @password
inspected = inspected.gsub(@access_token, "********") if @access_token
inspected
end
def credentials_given?
return true if @access_token
@my_uuid && !@my_uuid.empty? &&
@username && !@username.empty? &&
@password && !@password.empty?
end
def pull_request(*)
fetch_pr_json
end
def fetch_pr_json
uri = URI(pr_api_endpoint)
fetch_json(uri)
end
def fetch_comments
values = []
# TODO: use a url parts encoder to encode the query
corrected_uuid = @my_uuid[1...-1] unless @my_uuid.nil? # Endpoint doesnt support curly brackets for this, so remove them for this
uri = "#{pr_api_endpoint}/comments?pagelen=100&q=deleted+%7E+false+AND+user.uuid+%7E+%22#{corrected_uuid}%22"
while uri
json = fetch_json(URI(uri))
values += json[:values]
uri = json[:next]
end
values
end
def delete_comment(id)
uri = URI("#{pr_api_endpoint}/comments/#{id}")
delete(uri)
end
def post_comment(text, file: nil, line: nil)
uri = URI("#{pr_api_endpoint}/comments")
body = {
content: {
raw: text
}
}
body.merge!(inline: { path: file, to: line }) if file && line
post(uri, body.to_json)
end
private
def base_url(version)
"https://api.bitbucket.org/#{version}.0/repositories/#{project}/#{slug}/pullrequests"
end
def pr_api_endpoint
"#{base_url(2)}/#{pull_request_id}"
end
def prs_api_url(branch_name)
encoded_branch_name = URI.encode_www_form_component(branch_name)
"#{base_url(2)}?q=source.branch.name=\"#{encoded_branch_name}\""
end
def fetch_pr_from_branch(branch_name)
uri = URI(prs_api_url(branch_name))
fetch_json(uri)[:values][0][:id]
end
def fetch_access_token(environment)
access_token = environment["DANGER_BITBUCKETCLOUD_REPO_ACCESSTOKEN"]
if access_token
@access_token = access_token
return access_token
end
oauth_key = environment["DANGER_BITBUCKETCLOUD_OAUTH_KEY"]
oauth_secret = environment["DANGER_BITBUCKETCLOUD_OAUTH_SECRET"]
return nil if oauth_key.nil?
return nil if oauth_secret.nil?
uri = URI.parse("https://bitbucket.org/site/oauth2/access_token")
req = Net::HTTP::Post.new(uri.request_uri, { "Content-Type" => "application/json" })
req.basic_auth oauth_key, oauth_secret
req.set_form_data({ "grant_type" => "client_credentials" })
res = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
http.request(req)
end
JSON.parse(res.body, symbolize_names: true)[:access_token]
end
def fetch_json(uri)
raise credentials_not_available unless credentials_given?
req = Net::HTTP::Get.new(uri.request_uri, { "Content-Type" => "application/json" })
if access_token.nil?
req.basic_auth @username, @password
else
req["Authorization"] = "Bearer #{access_token}"
end
res = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
http.request(req)
end
raise error_fetching_json(uri.to_s, res.code) unless res.code == "200"
JSON.parse(res.body, symbolize_names: true)
end
def post(uri, body)
raise credentials_not_available unless credentials_given?
req = Net::HTTP::Post.new(uri.request_uri, { "Content-Type" => "application/json" })
if access_token.nil?
req.basic_auth @username, @password
else
req["Authorization"] = "Bearer #{access_token}"
end
req.body = body
Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
http.request(req)
end
end
def delete(uri)
raise credentials_not_available unless credentials_given?
req = Net::HTTP::Delete.new(uri.request_uri, { "Content-Type" => "application/json" })
if access_token.nil?
req.basic_auth @username, @password
else
req["Authorization"] = "Bearer #{access_token}"
end
Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
http.request(req)
end
end
def credentials_not_available
"Credentials not available. Provide DANGER_BITBUCKETCLOUD_USERNAME, " \
"DANGER_BITBUCKETCLOUD_UUID, and DANGER_BITBUCKETCLOUD_PASSWORD " \
"as environment variables."
end
def error_fetching_json(url, status_code)
"Error fetching json for: #{url}, status code: #{status_code}"
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/gitlab.rb | lib/danger/request_sources/gitlab.rb | # frozen_string_literal: true
require "uri"
require "danger/helpers/comments_helper"
require "danger/helpers/comment"
require "danger/request_sources/support/get_ignored_violation"
module Danger
module RequestSources
class GitLab < RequestSource
include Danger::Helpers::CommentsHelper
attr_accessor :mr_json, :commits_json, :dismiss_out_of_range_messages, :endpoint, :host
FIRST_GITLAB_GEM_WITH_VERSION_CHECK = Gem::Version.new("4.6.0")
FIRST_VERSION_WITH_INLINE_COMMENTS = Gem::Version.new("10.8.0")
def self.env_vars
["DANGER_GITLAB_API_TOKEN"]
end
def self.optional_env_vars
["DANGER_GITLAB_HOST", "DANGER_GITLAB_API_BASE_URL"]
end
def initialize(ci_source, environment)
self.ci_source = ci_source
self.dismiss_out_of_range_messages = false
@endpoint = environment["DANGER_GITLAB_API_BASE_URL"] || environment.fetch("CI_API_V4_URL", "https://gitlab.com/api/v4")
@host = environment.fetch("DANGER_GITLAB_HOST", URI.parse(endpoint).host) || "gitlab.com"
@token = environment["DANGER_GITLAB_API_TOKEN"]
end
def client
raise "No API token given, please provide one using `DANGER_GITLAB_API_TOKEN`" unless @token
# The require happens inline so that it won't cause exceptions when just using the `danger` gem.
require "gitlab"
@client ||= Gitlab.client(endpoint: endpoint, private_token: @token)
rescue LoadError => e
if e.path == "gitlab"
puts "The GitLab gem was not installed, you will need to change your Gem from `danger` to `danger-gitlab`.".red
puts "\n - See https://github.com/danger/danger/blob/master/CHANGELOG.md#400"
else
puts "Error: #{e}".red
end
abort
end
def validates_as_ci?
includes_port = host.include? ":"
raise "Port number included in `DANGER_GITLAB_HOST`, this will fail with GitLab CI Runners" if includes_port
# We don't call super because in some cases the Git remote doesn't match the GitLab instance host.
# In Danger::EnvironmentManager#initialize we still check that the request source is #validates_as_api_source?
# so that should be sufficient to validate GitLab as request source.
# See https://github.com/danger/danger/issues/1231 and https://gitlab.com/gitlab-com/gl-infra/infrastructure/-/issues/10069.
true
end
def validates_as_api_source?
@token && !@token.empty?
end
def scm
@scm ||= GitRepo.new
end
def base_commit
@base_commit ||= self.mr_json.diff_refs.base_sha
end
def mr_comments
# @raw_comments contains what we got back from the server.
# @comments contains Comment objects (that have less information)
if supports_inline_comments
@raw_comments = mr_discussions
.auto_paginate
.flat_map { |discussion| discussion.notes.map { |note| note.to_h.merge({ "discussion_id" => discussion.id }) } }
else
@raw_comments = client.merge_request_comments(ci_source.repo_slug, ci_source.pull_request_id, per_page: 100)
.auto_paginate
end
@comments ||= @raw_comments
.map { |comment| Comment.from_gitlab(comment) }
end
def mr_discussions
@mr_discussions ||= client.merge_request_discussions(ci_source.repo_slug, ci_source.pull_request_id)
end
def mr_diff
@mr_diff ||= begin
diffs = mr_changes.changes.map do |change|
diff = change["diff"]
if diff.start_with?("--- a/")
diff
else
"--- a/#{change['old_path']}\n+++ b/#{change['new_path']}\n#{diff}"
end
end
diffs.join("\n")
end
end
def mr_changed_paths
@mr_changed_paths ||= mr_changes
.changes.map { |change| change["new_path"] }
@mr_changed_paths
end
def mr_changes
@mr_changes ||= client.merge_request_changes(ci_source.repo_slug, ci_source.pull_request_id)
end
def mr_closes_issues
@mr_closes_issues ||= client.merge_request_closes_issues(ci_source.repo_slug, ci_source.pull_request_id)
end
def setup_danger_branches
# we can use a GitLab specific feature here:
base_branch = self.mr_json.source_branch
base_commit = self.mr_json.diff_refs.base_sha
head_branch = self.mr_json.target_branch
head_commit = self.mr_json.diff_refs.head_sha
# Next, we want to ensure that we have a version of the current branch at a known location
scm.ensure_commitish_exists_on_branch! base_branch, base_commit
self.scm.exec "branch #{EnvironmentManager.danger_base_branch} #{base_commit}"
# OK, so we want to ensure that we have a known head branch, this will always represent
# the head of the PR ( e.g. the most recent commit that will be merged. )
scm.ensure_commitish_exists_on_branch! head_branch, head_commit
self.scm.exec "branch #{EnvironmentManager.danger_head_branch} #{head_commit}"
end
def fetch_details
self.mr_json = client.merge_request(ci_source.repo_slug, self.ci_source.pull_request_id)
self.ignored_violations = ignored_violations_from_pr
end
def ignored_violations_from_pr
GetIgnoredViolation.new(self.mr_json.description).call
end
def supports_inline_comments
# If we can't check GitLab's version, we assume we don't support inline comments
@supports_inline_comments ||= if Gem.loaded_specs["gitlab"].version < FIRST_GITLAB_GEM_WITH_VERSION_CHECK
false
else
current_version = Gem::Version.new(client.version.version)
current_version >= FIRST_VERSION_WITH_INLINE_COMMENTS
end
end
def update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
if supports_inline_comments
update_pull_request_with_inline_comments!(warnings: warnings, errors: errors, messages: messages, markdowns: markdowns, danger_id: danger_id, new_comment: new_comment, remove_previous_comments: remove_previous_comments)
else
update_pull_request_without_inline_comments!(warnings: warnings, errors: errors, messages: messages, markdowns: markdowns, danger_id: danger_id, new_comment: new_comment, remove_previous_comments: remove_previous_comments)
end
end
def update_pull_request_with_inline_comments!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
editable_regular_comments = mr_comments
.select { |comment| comment.generated_by_danger?(danger_id) }
.reject(&:inline?)
last_comment = editable_regular_comments.last
should_create_new_comment = new_comment || last_comment.nil? || remove_previous_comments
previous_violations =
if should_create_new_comment
{}
else
parse_comment(last_comment.body)
end
regular_violations = regular_violations_group(
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
)
inline_violations = inline_violations_group(
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
)
rest_inline_violations = submit_inline_comments!(**{
danger_id: danger_id,
previous_violations: previous_violations
}.merge(inline_violations))
main_violations = merge_violations(
regular_violations, rest_inline_violations
)
main_violations_sum = main_violations.values.inject(:+)
if (previous_violations.empty? && main_violations_sum.empty?) || remove_previous_comments
# Just remove the comment, if there's nothing to say or --remove-previous-comments CLI was set.
delete_old_comments!(danger_id: danger_id)
end
# If there are still violations to show
if main_violations_sum.any?
body = generate_comment(**{
template: "gitlab",
danger_id: danger_id,
previous_violations: previous_violations
}.merge(main_violations))
if should_create_new_comment
client.create_merge_request_note(ci_source.repo_slug, ci_source.pull_request_id, body)
else
client.edit_merge_request_note(ci_source.repo_slug, ci_source.pull_request_id, last_comment.id, body)
end
end
end
def update_pull_request_without_inline_comments!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
editable_comments = mr_comments.select { |comment| comment.generated_by_danger?(danger_id) }
should_create_new_comment = new_comment || editable_comments.empty? || remove_previous_comments
if should_create_new_comment
previous_violations = {}
else
comment = editable_comments.first.body
previous_violations = parse_comment(comment)
end
if (previous_violations.empty? && (warnings + errors + messages + markdowns).empty?) || remove_previous_comments
# Just remove the comment, if there's nothing to say or --remove-previous-comments CLI was set.
delete_old_comments!(danger_id: danger_id)
else
body = generate_comment(warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns,
previous_violations: previous_violations,
danger_id: danger_id,
template: "gitlab")
if editable_comments.empty? or should_create_new_comment
client.create_merge_request_comment(
ci_source.repo_slug, ci_source.pull_request_id, body
)
else
original_id = editable_comments.first.id
client.edit_merge_request_comment(
ci_source.repo_slug,
ci_source.pull_request_id,
original_id,
{ body: body }
)
end
end
end
def delete_old_comments!(except: nil, danger_id: "danger")
@raw_comments.each do |raw_comment|
comment = Comment.from_gitlab(raw_comment)
next unless comment.generated_by_danger?(danger_id)
next if comment.id == except
next unless raw_comment.kind_of?(Hash) && raw_comment["position"].nil?
begin
client.delete_merge_request_comment(
ci_source.repo_slug,
ci_source.pull_request_id,
comment.id
)
rescue StandardError
end
end
end
def markdown_link_to_message(message, _)
"#{message.file}#L#{message.line}: "
end
# @return [String] The organisation name, is nil if it can't be detected
def organisation
nil # TODO: Implement this
end
def dismiss_out_of_range_messages_for(kind)
if self.dismiss_out_of_range_messages.kind_of?(Hash) && self.dismiss_out_of_range_messages[kind]
self.dismiss_out_of_range_messages[kind]
elsif self.dismiss_out_of_range_messages == true
self.dismiss_out_of_range_messages
else
false
end
end
# @return [String] A URL to the specific file, ready to be downloaded
def file_url(organisation: nil, repository: nil, ref: nil, branch: nil, path: nil)
ref ||= branch || "master"
# According to GitLab Repositories API docs path and id(slug) should be encoded.
path = URI.encode_www_form_component(path)
repository = URI.encode_www_form_component(repository)
"#{endpoint}/projects/#{repository}/repository/files/#{path}/raw?ref=#{ref}&private_token=#{@token}"
end
def regular_violations_group(warnings: [], errors: [], messages: [], markdowns: [])
{
warnings: warnings.reject(&:inline?),
errors: errors.reject(&:inline?),
messages: messages.reject(&:inline?),
markdowns: markdowns.reject(&:inline?)
}
end
def inline_violations_group(warnings: [], errors: [], messages: [], markdowns: [])
cmp = proc do |a, b|
next -1 unless a.file && a.line
next 1 unless b.file && b.line
next a.line <=> b.line if a.file == b.file
next a.file <=> b.file
end
# Sort to group inline comments by file
{
warnings: warnings.select(&:inline?).sort(&cmp),
errors: errors.select(&:inline?).sort(&cmp),
messages: messages.select(&:inline?).sort(&cmp),
markdowns: markdowns.select(&:inline?).sort(&cmp)
}
end
def merge_violations(*violation_groups)
violation_groups.inject({}) do |accumulator, group|
accumulator.merge(group) { |_, old, fresh| old + fresh }
end
end
def submit_inline_comments!(warnings: [], errors: [], messages: [], markdowns: [], previous_violations: [], danger_id: "danger")
comments = mr_discussions
.auto_paginate
.flat_map { |discussion| discussion.notes.map { |note| note.to_h.merge({ "discussion_id" => discussion.id }) } }
.select { |comment| Comment.from_gitlab(comment).inline? }
danger_comments = comments.select { |comment| Comment.from_gitlab(comment).generated_by_danger?(danger_id) }
non_danger_comments = comments - danger_comments
diff_lines = []
warnings = submit_inline_comments_for_kind!(:warning, warnings, diff_lines, danger_comments, previous_violations["warning"], danger_id: danger_id)
errors = submit_inline_comments_for_kind!(:error, errors, diff_lines, danger_comments, previous_violations["error"], danger_id: danger_id)
messages = submit_inline_comments_for_kind!(:message, messages, diff_lines, danger_comments, previous_violations["message"], danger_id: danger_id)
markdowns = submit_inline_comments_for_kind!(:markdown, markdowns, diff_lines, danger_comments, [], danger_id: danger_id)
# submit removes from the array all comments that are still in force
# so we strike out all remaining ones
danger_comments.each do |comment|
violation = violations_from_table(comment["body"]).first
if !violation.nil? && violation.sticky
body = generate_inline_comment_body("white_check_mark", violation, danger_id: danger_id, resolved: true, template: "gitlab")
client.update_merge_request_discussion_note(ci_source.repo_slug, ci_source.pull_request_id, comment["discussion_id"], comment["id"], body: body)
else
# We remove non-sticky violations that have no replies
# Since there's no direct concept of a reply in GH, we simply consider
# the existence of non-danger comments in that line as replies
replies = non_danger_comments.select do |potential|
potential["path"] == comment["path"] &&
potential["position"] == comment["position"] &&
potential["commit_id"] == comment["commit_id"]
end
client.delete_merge_request_comment(ci_source.repo_slug, ci_source.pull_request_id, comment["id"]) if replies.empty?
end
end
{
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
}
end
def submit_inline_comments_for_kind!(kind, messages, _diff_lines, danger_comments, previous_violations, danger_id: "danger")
previous_violations ||= []
is_markdown_content = kind == :markdown
emoji = { warning: "warning", error: "no_entry_sign", message: "book" }[kind]
messages.reject do |m| # rubocop:todo Metrics/BlockLength
next false unless m.file && m.line
# Reject if it's out of range and in dismiss mode
next true if dismiss_out_of_range_messages_for(kind) && is_out_of_range(mr_changes.changes, m)
# Once we know we're gonna submit it, we format it
if is_markdown_content
body = generate_inline_markdown_body(m, danger_id: danger_id, template: "gitlab")
else
# Hide the inline link behind a span
m = process_markdown(m, true)
body = generate_inline_comment_body(emoji, m, danger_id: danger_id, template: "gitlab")
# A comment might be in previous_violations because only now it's part of the unified diff
# We remove from the array since it won't have a place in the table anymore
previous_violations.reject! { |v| messages_are_equivalent(v, m) }
end
matching_comments = danger_comments.select do |comment_data|
position = comment_data["position"]
if position.nil?
false
else
position["new_path"] == m.file && position["new_line"] == m.line
end
end
if matching_comments.empty?
old_position = find_old_position_in_diff mr_changes.changes, m
next false if old_position.nil?
params = {
body: body,
position: {
position_type: "text",
new_path: m.file,
new_line: m.line,
old_path: old_position[:path],
old_line: old_position[:line],
base_sha: self.mr_json.diff_refs.base_sha,
start_sha: self.mr_json.diff_refs.start_sha,
head_sha: self.mr_json.diff_refs.head_sha
}
}
begin
client.create_merge_request_discussion(ci_source.repo_slug, ci_source.pull_request_id, params)
rescue Gitlab::Error::Error => e
message = [e, "body: #{body}", "position: #{params[:position].inspect}"].join("\n")
puts message
next false
end
else
# Remove the surviving comment so we don't strike it out
danger_comments.reject! { |c| matching_comments.include? c }
# Update the comment to remove the strikethrough if present
comment = matching_comments.first
begin
client.update_merge_request_discussion_note(ci_source.repo_slug, ci_source.pull_request_id, comment["discussion_id"], comment["id"], body: body)
rescue Gitlab::Error::Error => e
message = [e, "body: #{body}"].join("\n")
puts message
next false
end
end
# Remove this element from the array
next true
end
end
def find_old_position_in_diff(changes, message)
range_header_regexp = /@@ -(?<old>[0-9]+)(,([0-9]+))? \+(?<new>[0-9]+)(,([0-9]+))? @@.*/
change = changes.find { |c| c["new_path"] == message.file }
# If there is no changes or rename only or deleted, return nil.
return nil if change.nil? || change["diff"].empty? || change["deleted_file"]
modified_position = {
path: change["old_path"],
line: nil
}
# If the file is new one, old line number must be nil.
return modified_position if change["new_file"]
current_old_line = 0
current_new_line = 0
change["diff"].each_line do |line|
match = line.match range_header_regexp
if match
# If the message line is at before next diffs, break from loop.
break if message.line.to_i < match[:new].to_i
# The match [:old] line does not appear yet at the header position, so reduce line number.
current_old_line = match[:old].to_i - 1
current_new_line = match[:new].to_i - 1
next
end
if line.start_with?("-")
current_old_line += 1
elsif line.start_with?("+")
current_new_line += 1
# If the message line starts with '+', old line number must be nil.
return modified_position if current_new_line == message.line.to_i
elsif !line.eql?("\\ No newline at end of file\n")
current_old_line += 1
current_new_line += 1
# If the message line doesn't start with '+', old line number must be specified.
break if current_new_line == message.line.to_i
end
end
{
path: change["old_path"],
line: current_old_line - current_new_line + message.line.to_i
}
end
def is_out_of_range(changes, message)
change = changes.find { |c| c["new_path"] == message.file }
# If there is no changes or rename only or deleted, return out of range.
return true if change.nil? || change["diff"].empty? || change["deleted_file"]
# If new file then return in range
return false if change["new_file"]
addition_lines = generate_addition_lines(change["diff"])
return false if addition_lines.include?(message.line.to_i)
return true
end
def generate_addition_lines(diff)
range_header_regexp = /@@ -(?<old>[0-9]+)(,([0-9]+))? \+(?<new>[0-9]+)(,([0-9]+))? @@.*/
addition_lines = []
line_number = 0
diff.each_line do |line|
if line.match range_header_regexp
line = line.split("+").last
line = line.split(" ").first
range_string = line.split(",")
line_number = range_string[0].to_i - 1
elsif line.start_with?("+")
addition_lines.push(line_number)
elsif line.start_with?("-")
line_number -= 1
end
line_number += 1
end
addition_lines
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/bitbucket_server_api.rb | lib/danger/request_sources/bitbucket_server_api.rb | # frozen_string_literal: true
require "openssl"
require "danger/helpers/comments_helper"
module Danger
module RequestSources
class BitbucketServerAPI
attr_accessor :host, :verify_ssl, :pr_api_endpoint, :key, :project
def initialize(project, slug, pull_request_id, environment)
@username = environment["DANGER_BITBUCKETSERVER_USERNAME"]
@password = environment["DANGER_BITBUCKETSERVER_PASSWORD"]
self.host = environment["DANGER_BITBUCKETSERVER_HOST"]
self.verify_ssl = environment["DANGER_BITBUCKETSERVER_VERIFY_SSL"] != "false"
if self.host && !(self.host.include? "http://") && !(self.host.include? "https://")
self.host = "https://#{self.host}"
end
self.key = slug
self.project = project
self.pr_api_endpoint = "#{host}/rest/api/1.0/projects/#{project}/repos/#{slug}/pull-requests/#{pull_request_id}"
end
def inspect
inspected = super
inspected = inspected.gsub(@password, "********") if @password
inspected
end
def credentials_given?
@username && !@username.empty? && @password && !@password.empty?
end
def pull_request(*)
fetch_pr_json
end
def fetch_pr_json
uri = URI(pr_api_endpoint)
fetch_json(uri)
end
def fetch_pr_diff
uri = URI("#{pr_api_endpoint}/diff?withComments=false")
fetch_json(uri)
end
def fetch_last_comments
uri = URI("#{pr_api_endpoint}/activities?limit=1000")
fetch_json(uri)[:values].select { |v| v[:action] == "COMMENTED" }.map { |v| v[:comment] }
end
def delete_comment(id, version)
uri = URI("#{pr_api_endpoint}/comments/#{id}?version=#{version}")
delete(uri)
end
def post_comment(text)
uri = URI("#{pr_api_endpoint}/comments")
body = { text: text }.to_json
post(uri, body)
end
def update_pr_build_status(status, changeset, build_job_link, description)
uri = URI("#{self.host}/rest/build-status/1.0/commits/#{changeset}")
body = build_status_body(status, build_job_link, description)
post(uri, body)
end
private
def use_ssl
return self.pr_api_endpoint.include? "https://"
end
def fetch_json(uri)
req = Net::HTTP::Get.new(uri.request_uri, { "Content-Type" => "application/json" })
req.basic_auth @username, @password
res = http(uri).start do |http|
http.request(req)
end
JSON.parse(res.body, symbolize_names: true)
end
def post(uri, body)
req = Net::HTTP::Post.new(uri.request_uri, { "Content-Type" => "application/json" })
req.basic_auth @username, @password
req.body = body
res = http(uri).start do |http|
http.request(req)
end
# show error to the user when Bitbucket Server returned an error
case res
when Net::HTTPClientError, Net::HTTPServerError
# HTTP 4xx - 5xx
abort "\nError posting comment to Bitbucket Server: #{res.code} (#{res.message}) - #{res.body}\n\n"
end
end
def delete(uri)
req = Net::HTTP::Delete.new(uri.request_uri, { "Content-Type" => "application/json" })
req.basic_auth @username, @password
http(uri).start do |http|
http.request(req)
end
end
def http(uri)
http = Net::HTTP.new(uri.hostname, uri.port)
http.use_ssl = use_ssl
http.verify_mode = verify_ssl ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE
http
end
def build_status_body(status, build_job_link, description)
body = {}
body["state"] = status
body["key"] = self.key
body["url"] = build_job_link
body["description"] = description if description
return body.to_json
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/bitbucket_server.rb | lib/danger/request_sources/bitbucket_server.rb | # frozen_string_literal: true
require "danger/helpers/comments_helper"
require "danger/request_sources/bitbucket_server_api"
require "danger/request_sources/code_insights_api"
require_relative "request_source"
module Danger
module RequestSources
class BitbucketServer < RequestSource
include Danger::Helpers::CommentsHelper
attr_accessor :pr_json, :dismiss_out_of_range_messages
def self.env_vars
[
"DANGER_BITBUCKETSERVER_USERNAME",
"DANGER_BITBUCKETSERVER_PASSWORD",
"DANGER_BITBUCKETSERVER_HOST"
]
end
def self.optional_env_vars
[
"DANGER_BITBUCKETSERVER_CODE_INSIGHTS_REPORT_KEY",
"DANGER_BITBUCKETSERVER_CODE_INSIGHTS_REPORT_TITLE",
"DANGER_BITBUCKETSERVER_CODE_INSIGHTS_REPORT_DESCRIPTION",
"DANGER_BITBUCKETSERVER_CODE_INSIGHTS_REPORT_LOGO_URL",
"DANGER_BITBUCKETSERVER_VERIFY_SSL",
"DANGER_BITBUCKETSERVER_DISMISS_OUT_OF_RANGE_MESSAGES"
]
end
def initialize(ci_source, environment)
self.ci_source = ci_source
self.dismiss_out_of_range_messages = environment["DANGER_BITBUCKETSERVER_DISMISS_OUT_OF_RANGE_MESSAGES"] == "true"
project, slug = ci_source.repo_slug.split("/")
@api = BitbucketServerAPI.new(project, slug, ci_source.pull_request_id, environment)
@code_insights = CodeInsightsAPI.new(project, slug, environment)
end
def validates_as_ci?
# TODO: ???
true
end
def validates_as_api_source?
@api.credentials_given?
end
def scm
@scm ||= GitRepo.new
end
def host
@host ||= @api.host
end
def fetch_details
self.pr_json = @api.fetch_pr_json
end
def pr_diff
@pr_diff ||= @api.fetch_pr_diff
end
def setup_danger_branches
base_branch = self.pr_json[:toRef][:id].sub("refs/heads/", "")
base_commit = self.pr_json[:toRef][:latestCommit]
# Support for older versions of Bitbucket Server
base_commit = self.pr_json[:toRef][:latestChangeset] if self.pr_json[:fromRef].key? :latestChangeset
head_branch = self.pr_json[:fromRef][:id].sub("refs/heads/", "")
head_commit = self.pr_json[:fromRef][:latestCommit]
# Support for older versions of Bitbucket Server
head_commit = self.pr_json[:fromRef][:latestChangeset] if self.pr_json[:fromRef].key? :latestChangeset
# Next, we want to ensure that we have a version of the current branch at a known location
scm.ensure_commitish_exists_on_branch! base_branch, base_commit
self.scm.exec "branch #{EnvironmentManager.danger_base_branch} #{base_commit}"
# OK, so we want to ensure that we have a known head branch, this will always represent
# the head of the PR ( e.g. the most recent commit that will be merged. )
scm.ensure_commitish_exists_on_branch! head_branch, head_commit
self.scm.exec "branch #{EnvironmentManager.danger_head_branch} #{head_commit}"
end
def organisation
nil
end
def update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
delete_old_comments(danger_id: danger_id) if !new_comment || remove_previous_comments
# If configured, send a Code Insights API to provide the PR with a quality report
# which includes inline code violations found by Danger as Annotations.
# If no inline violations occurred, an empty, successful (green) report will be sent.
if @code_insights.ready?
inline_violations = inline_violations_group(warnings: warnings, errors: errors, messages: messages)
inline_warnings = inline_violations[:warnings] || []
inline_errors = inline_violations[:errors] || []
inline_messages = inline_violations[:messages] || []
head_commit = self.pr_json[:fromRef][:latestCommit]
@code_insights.send_report(head_commit,
inline_warnings,
inline_errors,
inline_messages)
end
# If we're sending inline comments separately via Code Insights,
# the main body comment should contain only generic, non-file specific messages.
if @code_insights.ready?
main_violations = main_violations_group(warnings: warnings, errors: errors, messages: messages)
warnings = main_violations[:warnings] || []
errors = main_violations[:errors] || []
messages = main_violations[:messages] || []
markdowns = main_violations[:markdowns] || []
end
has_comments = warnings.count > 0 || errors.count > 0 || messages.count > 0 || markdowns.count > 0
if has_comments
comment = generate_description(warnings: warnings,
errors: errors)
comment += "\n\n"
comment += generate_comment(warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns,
previous_violations: {},
danger_id: danger_id,
template: "bitbucket_server")
@api.post_comment(comment)
end
end
def delete_old_comments(danger_id: "danger")
@api.fetch_last_comments.each do |c|
@api.delete_comment(c[:id], c[:version]) if c[:text] =~ /generated_by_#{danger_id}/
end
end
def main_violations_group(warnings: [], errors: [], messages: [], markdowns: [])
if dismiss_out_of_range_messages
{
warnings: warnings.reject(&:inline?),
errors: errors.reject(&:inline?),
messages: messages.reject(&:inline?),
markdowns: markdowns.reject(&:inline?)
}
else
in_diff = proc { |a| find_position_in_diff?(a.file, a.line) }
{
warnings: warnings.reject(&in_diff),
errors: errors.reject(&in_diff),
messages: messages.reject(&in_diff),
markdowns: markdowns.reject(&in_diff)
}
end
end
def inline_violations_group(warnings: [], errors: [], messages: [], markdowns: [])
cmp = proc do |a, b|
next -1 unless a.file && a.line
next 1 unless b.file && b.line
next a.line <=> b.line if a.file == b.file
next a.file <=> b.file
end
# Sort to group inline comments by file
{
warnings: warnings.select(&:inline?).sort(&cmp),
errors: errors.select(&:inline?).sort(&cmp),
messages: messages.select(&:inline?).sort(&cmp),
markdowns: markdowns.select(&:inline?).sort(&cmp)
}
end
def update_pr_build_status(status, build_job_link, description)
changeset = self.pr_json[:fromRef][:latestCommit]
# Support for older versions of Bitbucket Server
changeset = self.pr_json[:fromRef][:latestChangeset] if self.pr_json[:fromRef].key? :latestChangeset
puts "Changeset: #{changeset}"
puts self.pr_json.to_json
@api.update_pr_build_status(status, changeset, build_job_link, description)
end
def find_position_in_diff?(file, line)
return nil if file.nil? || line.nil?
return nil if file.empty?
added_lines(file).include?(line)
end
def file_diff(file)
self.pr_diff[:diffs].find { |diff| diff[:destination] && diff[:destination][:toString] == file } || { hunks: [] }
end
def added_lines(file)
@added_lines ||= {}
@added_lines[file] ||= file_diff(file)[:hunks].map do |hunk|
hunk[:segments].select { |segment| segment[:type] == "ADDED" }.map do |segment|
segment[:lines].map do |line|
line[:destination]
end
end
end.flatten
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/bitbucket_cloud.rb | lib/danger/request_sources/bitbucket_cloud.rb | # frozen_string_literal: true
require "danger/helpers/comments_helper"
require "danger/request_sources/bitbucket_cloud_api"
require "danger/danger_core/message_group"
module Danger
module RequestSources
class BitbucketCloud < RequestSource
include Danger::Helpers::CommentsHelper
attr_accessor :pr_json
def self.env_vars
["DANGER_BITBUCKETCLOUD_UUID"]
end
# While it says "optional", one of these is required to run Danger on Bitbucket Cloud.
#
# - Both `DANGER_BITBUCKETCLOUD_OAUTH_KEY` and `DANGER_BITBUCKETCLOUD_OAUTH_SECRET`
# - Both `DANGER_BITBUCKETCLOUD_USERNAME` and `DANGER_BITBUCKETCLOUD_PASSWORD`
# - `DANGER_BITBUCKETCLOUD_REPO_ACCESSTOKEN`
def self.optional_env_vars
[
"DANGER_BITBUCKETCLOUD_OAUTH_KEY",
"DANGER_BITBUCKETCLOUD_OAUTH_SECRET",
"DANGER_BITBUCKETCLOUD_REPO_ACCESSTOKEN",
"DANGER_BITBUCKETCLOUD_USERNAME",
"DANGER_BITBUCKETCLOUD_PASSWORD"
]
end
def initialize(ci_source, environment)
self.ci_source = ci_source
@api = BitbucketCloudAPI.new(ci_source.repo_slug, ci_source.pull_request_id, nil, environment)
end
def validates_as_ci?
# TODO: ???
true
end
def validates_as_api_source?
@api.credentials_given?
end
def scm
@scm ||= GitRepo.new
end
def host
@host ||= @api.host
end
def fetch_details
self.pr_json = @api.fetch_pr_json
end
def setup_danger_branches
base_branch = self.pr_json[:destination][:branch][:name]
base_commit = self.pr_json[:destination][:commit][:hash]
head_branch = self.pr_json[:source][:branch][:name]
head_commit = self.pr_json[:source][:commit][:hash]
# Next, we want to ensure that we have a version of the current branch at a known location
scm.ensure_commitish_exists_on_branch! base_branch, base_commit
self.scm.exec "branch #{EnvironmentManager.danger_base_branch} #{base_commit}"
# OK, so we want to ensure that we have a known head branch, this will always represent
# the head of the PR ( e.g. the most recent commit that will be merged. )
scm.ensure_commitish_exists_on_branch! head_branch, head_commit
self.scm.exec "branch #{EnvironmentManager.danger_head_branch} #{head_commit}"
end
def organisation
nil
end
def update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
delete_old_comments(danger_id: danger_id) if !new_comment || remove_previous_comments
warnings = update_inline_comments_for_kind!(:warnings, warnings, danger_id: danger_id)
errors = update_inline_comments_for_kind!(:errors, errors, danger_id: danger_id)
messages = update_inline_comments_for_kind!(:messages, messages, danger_id: danger_id)
markdowns = update_inline_comments_for_kind!(:markdowns, markdowns, danger_id: danger_id)
has_comments = warnings.count.positive? || errors.count.positive? || messages.count.positive? || markdowns.count.positive?
if has_comments
comment = generate_description(warnings: warnings, errors: errors, template: "bitbucket_server")
comment += "\n\n"
comment += generate_comment(warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns,
previous_violations: {},
danger_id: danger_id,
template: "bitbucket_server")
@api.post_comment(comment)
end
end
def update_pr_by_line!(message_groups:,
danger_id: "danger",
new_comment: false,
remove_previous_comments: false)
if !new_comment || remove_previous_comments
delete_old_comments(danger_id: danger_id)
end
summary_body = generate_description(warnings: message_groups.fake_warnings_array,
errors: message_groups.fake_errors_array,
template: "bitbucket_server")
summary_body += "\n\n"
# this isn't the most elegant thing in the world, but we need the group
# with file: nil, line: nil so we can combine its info in with the
# summary_body
summary_group = message_groups.first
if summary_group && summary_group.file.nil? && summary_group.line.nil?
# remove summary_group from message_groups so it doesn't get a
# duplicate comment posted in the message_groups loop below
message_groups.shift
else
summary_group = MessageGroup.new(file: nil, line: nil)
end
summary_body += generate_message_group_comment(
message_group: summary_group,
danger_id: danger_id,
template: "bitbucket_server_message_group"
)
@api.post_comment(summary_body)
message_groups.each do |message_group|
body = generate_message_group_comment(message_group: message_group,
danger_id: danger_id,
template: "bitbucket_server_message_group")
@api.post_comment(body,
file: message_group.file,
line: message_group.line)
end
end
def update_inline_comments_for_kind!(kind, messages, danger_id: "danger")
emoji = { warnings: "warning", errors: "no_entry_sign", messages: "book" }[kind]
messages.reject do |message|
next false unless message.file && message.line
body = ""
if kind == :markdown
body = generate_inline_markdown_body(message,
danger_id: danger_id,
template: "bitbucket_server")
else
body = generate_inline_comment_body(emoji, message,
danger_id: danger_id,
template: "bitbucket_server")
end
@api.post_comment(body, file: message.file, line: message.line)
true
end
end
def delete_old_comments(danger_id: "danger")
@api.fetch_comments.each do |c|
next if c[:user][:uuid] != @api.my_uuid
@api.delete_comment(c[:id]) if c[:content][:raw] =~ /generated_by_#{danger_id}/
end
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/code_insights_api.rb | lib/danger/request_sources/code_insights_api.rb | # frozen_string_literal: true
module Danger
module RequestSources
#
# Provides ability for Danger to interact with Atlassian's Code Insights API in order to provide code quality
# reports along with inline comments for specific lines in specific files.
# See https://developer.atlassian.com/server/bitbucket/how-tos/code-insights/ for more details.
#
# Currently this functionality is implemented only for Bitbucket Server request source.
class CodeInsightsAPI
attr_accessor :username, :password, :host, :report_key, :report_title, :report_description, :logo_url
def initialize(project, slug, environment)
@username = environment["DANGER_BITBUCKETSERVER_USERNAME"] || ""
@password = environment["DANGER_BITBUCKETSERVER_PASSWORD"] || ""
@host = environment["DANGER_BITBUCKETSERVER_HOST"] || ""
@report_key = environment["DANGER_BITBUCKETSERVER_CODE_INSIGHTS_REPORT_KEY"] || ""
@report_title = environment["DANGER_BITBUCKETSERVER_CODE_INSIGHTS_REPORT_TITLE"] || ""
@report_description = environment["DANGER_BITBUCKETSERVER_CODE_INSIGHTS_REPORT_DESCRIPTION"] || ""
@logo_url = environment["DANGER_BITBUCKETSERVER_CODE_INSIGHTS_REPORT_LOGO_URL"] || ""
@project = project
@slug = slug
end
def inspect
inspected = super
inspected = inspected.gsub(@password, "********") if @password
inspected
end
def ready?
!(@report_key.empty? || @report_title.empty? || @report_description.empty? || @username.empty? || @password.empty? || @host.empty?)
end
def delete_report(commit)
uri = URI(report_endpoint_at_commit(commit))
request = Net::HTTP::Delete.new(uri.request_uri, { "Content-Type" => "application/json" })
request.basic_auth @username, @password
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: use_ssl) do |http|
http.request(request)
end
# show failure when server returns an error
case response
when Net::HTTPClientError, Net::HTTPServerError
# HTTP 4xx - 5xx
abort "\nError deleting report from Code Insights API: #{response.code} (#{response.message}) - #{response.body}\n\n"
end
end
def send_report(commit, inline_warnings, inline_errors, inline_messages)
delete_report(commit)
put_report(commit, inline_errors.count)
should_post_annotations = !(inline_warnings + inline_errors + inline_messages).empty?
if should_post_annotations
post_annotations(commit, inline_warnings, inline_errors, inline_messages)
end
end
def put_report(commit, inline_errors_count)
uri = URI(report_endpoint_at_commit(commit))
request = Net::HTTP::Put.new(uri.request_uri, { "Content-Type" => "application/json" })
request.basic_auth @username, @password
request.body = { "title": @report_title,
"details": @report_description,
"result": inline_errors_count > 0 ? "FAIL" : "PASS",
"reporter": @username,
"link": "https://github.com/danger/danger",
"logoURL": @logo_url }.to_json
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: use_ssl) do |http|
http.request(request)
end
# show failure when server returns an error
case response
when Net::HTTPClientError, Net::HTTPServerError
# HTTP 4xx - 5xx
abort "\nError putting report to Code Insights API: #{response.code} (#{response.message}) - #{response.body}\n\n"
end
end
def post_annotations(commit, inline_warnings, inline_errors, inline_messages)
uri = URI(annotation_endpoint_at_commit(commit))
annotations = []
inline_messages.each do |violation|
annotations << violation_hash_with_severity(violation, "LOW")
end
inline_warnings.each do |violation|
annotations << violation_hash_with_severity(violation, "MEDIUM")
end
inline_errors.each do |violation|
annotations << violation_hash_with_severity(violation, "HIGH")
end
body = { annotations: annotations }.to_json
request = Net::HTTP::Post.new(uri.request_uri, { "Content-Type" => "application/json" })
request.basic_auth @username, @password
request.body = body
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: use_ssl) do |http|
http.request(request)
end
# show failure when server returns an error
case response
when Net::HTTPClientError, Net::HTTPServerError
# HTTP 4xx - 5xx
abort "\nError posting comment to Code Insights API: #{response.code} (#{response.message}) - #{response.body}\n\n"
end
end
def violation_hash_with_severity(violation, severity)
annotation = {}
annotation["message"] = violation.message
annotation["severity"] = severity
annotation["path"] = violation.file
annotation["line"] = violation.line.to_i
return annotation
end
def report_endpoint_at_commit(commit)
"#{@host}/rest/insights/1.0/projects/#{@project}/repos/#{@slug}/commits/#{commit}/reports/#{@report_key}"
end
def annotation_endpoint_at_commit(commit)
"#{report_endpoint_at_commit(commit)}/annotations"
end
def use_ssl
@host.include? "https://"
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/vsts_api.rb | lib/danger/request_sources/vsts_api.rb | # frozen_string_literal: true
require "base64"
require "danger/helpers/comments_helper"
module Danger
module RequestSources
class VSTSAPI
attr_accessor :host, :pr_api_endpoint, :min_api_version_for_comments
def initialize(slug, pull_request_id, environment)
self.min_api_version_for_comments = "3.0"
user_name = ""
personal_access_token = environment["DANGER_VSTS_API_TOKEN"]
@token = Base64.strict_encode64("#{user_name}:#{personal_access_token}")
@api_version = environment["DANGER_VSTS_API_VERSION"] ||= self.min_api_version_for_comments
self.host = environment["DANGER_VSTS_HOST"]
if self.host && !(self.host.include? "http://") && !(self.host.include? "https://")
self.host = "https://#{self.host}"
end
self.pr_api_endpoint = "#{host}/_apis/git/repositories/#{slug}/pullRequests/#{pull_request_id}"
end
def supports_comments?
major_version = @api_version.split(".").first.to_i
minimum_version_for_comments = self.min_api_version_for_comments.split(".").first.to_i
major_version >= minimum_version_for_comments
end
def inspect
inspected = super
inspected = inspected.gsub(@token, "********") if @token
inspected
end
def credentials_given?
@token && !@token.empty?
end
def pull_request(*)
fetch_pr_json
end
def fetch_pr_json
uri = URI("#{pr_api_endpoint}?api-version=#{@api_version}")
fetch_json(uri)
end
def fetch_last_comments
uri = URI("#{pr_api_endpoint}/threads?api-version=#{@api_version}")
fetch_json(uri)[:value]
end
def post_comment(text)
uri = URI("#{pr_api_endpoint}/threads?api-version=#{@api_version}")
body = {
"comments" => [
{
"parentCommentId" => 0,
"content" => text,
"commentType" => 1
}
],
"properties" => {
"Microsoft.TeamFoundation.Discussion.SupportsMarkdown" => {
"type" => "System.Int32",
"value" => 1
}
},
"status" => 1
}.to_json
post(uri, body)
end
def post_inline_comment(text, file, line)
uri = URI("#{pr_api_endpoint}/threads?api-version=#{@api_version}")
body = {
"comments" => [
{
"parentCommentId" => 0,
"content" => text,
"commentType" => 1
}
],
"properties" => {
"Microsoft.TeamFoundation.Discussion.SupportsMarkdown" => {
"type" => "System.Int32",
"value" => 1
}
},
"status" => 1,
"threadContext" => {
"filePath" => file,
"rightFileEnd" => {
"line" => line + 1,
"offset" => 1
},
"rightFileStart" => {
"line" => line,
"offset" => 1
}
}
}.to_json
post(uri, body)
end
def update_comment(thread, id, new_comment)
uri = URI("#{pr_api_endpoint}/threads/#{thread}/comments/#{id}?api-version=#{@api_version}")
body = {
"content" => new_comment
}.to_json
patch(uri, body)
end
private
def use_ssl
return self.pr_api_endpoint.include? "https://"
end
def fetch_json(uri)
req = Net::HTTP::Get.new(uri.request_uri, { "Content-Type" => "application/json", "Authorization" => "Basic #{@token}" })
res = Net::HTTP.start(uri.hostname, uri.port, use_ssl: use_ssl) do |http|
http.request(req)
end
JSON.parse(res.body, symbolize_names: true)
end
def post(uri, body)
req = Net::HTTP::Post.new(uri.request_uri, { "Content-Type" => "application/json", "Authorization" => "Basic #{@token}" })
req.body = body
res = Net::HTTP.start(uri.hostname, uri.port, use_ssl: use_ssl) do |http|
http.request(req)
end
# show error to the user when VSTS returned an error
case res
when Net::HTTPClientError, Net::HTTPServerError
# HTTP 4xx - 5xx
abort "\nError posting comment to VSTS: #{res.code} (#{res.message})\n\n"
end
end
def patch(uri, body)
puts uri
puts body
req = Net::HTTP::Patch.new(uri.request_uri, { "Content-Type" => "application/json", "Authorization" => "Basic #{@token}" })
req.body = body
res = Net::HTTP.start(uri.hostname, uri.port, use_ssl: use_ssl) do |http|
http.request(req)
end
# show error to the user when VSTS returned an error
case res
when Net::HTTPClientError, Net::HTTPServerError
# HTTP 4xx - 5xx
abort "\nError updating comment on VSTS: #{res.code} (#{res.message})\n\n"
end
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/request_source.rb | lib/danger/request_sources/request_source.rb | # frozen_string_literal: true
module Danger
module RequestSources
class RequestSource
DANGER_REPO_NAME = "danger"
attr_accessor :ci_source, :scm, :host, :ignored_violations
def self.env_vars
raise "Subclass and overwrite self.env_vars"
end
def self.optional_env_vars
[]
end
def self.inherited(child_class)
available_request_sources.add child_class
super
end
def self.available_request_sources
@available_request_sources ||= Set.new
end
def self.source_name
to_s.sub("Danger::RequestSources::", "")
end
def self.available_source_names_and_envs
available_request_sources.map do |klass|
" - #{klass.source_name}: #{klass.env_vars.join(', ').yellow}"
end
end
def initialize(_ci_source, _environment)
raise "Subclass and overwrite initialize"
end
def inspect
inspected = super
inspected = inspected.gsub(@token, "********") if @token
inspected = inspected.gsub(@access_token, "********") if @access_token
inspected = inspected.gsub(@bearer_token, "********") if @bearer_token
inspected
end
# @return [Boolean] whether scm.origins is a valid git repository or not
def validates_as_ci?
!!self.scm.origins.match(%r{#{Regexp.escape self.host}(:|/)(.+/.+?)(?:\.git)?$})
end
def validates_as_api_source?
raise "Subclass and overwrite validates_as_api_source?"
end
def scm
@scm ||= nil
end
def host
@host ||= nil
end
def ignored_violations
@ignored_violations ||= []
end
def update_pull_request!(_warnings: [], _errors: [], _messages: [], _markdowns: [])
raise "Subclass and overwrite update_pull_request!"
end
def setup_danger_branches
raise "Subclass and overwrite setup_danger_branches"
end
def fetch_details
raise "Subclass and overwrite initialize"
end
def organisation
raise "Subclass and overwrite organisation"
end
def file_url(_organisation: nil, _repository: nil, _ref: nil, _branch: nil, _path: nil)
raise "Subclass and overwrite file_url"
end
def update_build_status(_status)
raise "Subclass and overwrite update_build_status"
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/support/get_ignored_violation.rb | lib/danger/request_sources/support/get_ignored_violation.rb | # frozen_string_literal: true
class GetIgnoredViolation
IGNORE_REGEXP = />*\s*danger\s*:\s*ignore\s*"(?<error>[^"]*)"/i.freeze
def initialize(body)
@body = body
end
def call
return [] unless body
body.chomp.scan(IGNORE_REGEXP).flatten
end
private
attr_reader :body
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/github/github_review.rb | lib/danger/request_sources/github/github_review.rb | # frozen_string_literal: true
require "octokit"
require "danger/ci_source/ci_source"
require "danger/request_sources/github/github_review_resolver"
require "danger/danger_core/messages/violation"
require "danger/danger_core/messages/markdown"
require "danger/helpers/comments_helper"
require "danger/helpers/comment"
module Danger
module RequestSources
module GitHubSource
class Review
include Danger::Helpers::CommentsHelper
# @see https://developer.github.com/v3/pulls/reviews/ for all possible events
EVENT_APPROVE = "APPROVE"
EVENT_REQUEST_CHANGES = "REQUEST_CHANGES"
EVENT_COMMENT = "COMMENT"
# Current review status, if the review has not been submitted yet -> STATUS_PENDING
STATUS_APPROVED = "APPROVED"
STATUS_REQUESTED_CHANGES = "CHANGES_REQUESTED"
STATUS_COMMENTED = "COMMENTED"
STATUS_PENDING = "PENDING"
attr_reader :id, :body, :status, :review_json
def initialize(client, ci_source, review_json = nil)
@ci_source = ci_source
@client = client
@review_json = review_json
end
def id
return nil unless self.review_json
self.review_json["id"]
end
def body
return "" unless self.review_json
self.review_json["body"]
end
def status
return STATUS_PENDING if self.review_json.nil?
return self.review_json["state"]
end
# Starts the new review process
def start
@warnings = []
@errors = []
@messages = []
@markdowns = []
end
# Submits the prepared review
def submit
general_violations = generate_general_violations
submission_body = generate_body
# If the review resolver says that there is nothing to submit we skip submission
return unless ReviewResolver.should_submit?(self, submission_body)
@review_json = @client.create_pull_request_review(@ci_source.repo_slug, @ci_source.pull_request_id, event: generate_event(general_violations), body: submission_body)
end
def generated_by_danger?(danger_id = "danger")
self.review_json["body"].include?("generated_by_#{danger_id}")
end
def message(message, sticky = true, file = nil, line = nil)
@messages << Violation.new(message, sticky, file, line)
end
def warn(message, sticky = true, file = nil, line = nil)
@warnings << Violation.new(message, sticky, file, line)
end
def fail(message, sticky = true, file = nil, line = nil)
@errors << Violation.new(message, sticky, file, line)
end
def markdown(message, file = nil, line = nil)
@markdowns << Markdown.new(message, file, line)
end
private
# The only reason to request changes for the PR is to have errors from Danger
# otherwise let's just notify user and we're done
def generate_event(violations)
violations[:errors].empty? ? EVENT_APPROVE : EVENT_REQUEST_CHANGES
end
def generate_body(danger_id: "danger")
previous_violations = parse_comment(body)
general_violations = generate_general_violations
new_body = generate_comment(warnings: general_violations[:warnings],
errors: general_violations[:errors],
messages: general_violations[:messages],
markdowns: general_violations[:markdowns],
previous_violations: previous_violations,
danger_id: danger_id,
template: "github")
return new_body
end
def generate_general_violations
general_warnings = @warnings.reject(&:inline?)
general_errors = @errors.reject(&:inline?)
general_messages = @messages.reject(&:inline?)
general_markdowns = @markdowns.reject(&:inline?)
{
warnings: general_warnings,
markdowns: general_markdowns,
errors: general_errors,
messages: general_messages
}
end
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/github/github_review_resolver.rb | lib/danger/request_sources/github/github_review_resolver.rb | # frozen_string_literal: true
require "danger/request_sources/github/github_review"
module Danger
module RequestSources
module GitHubSource
class ReviewResolver
def self.should_submit?(review, body)
return !same_body?(body, review.body)
end
def self.same_body?(body1, body2)
return !body1.nil? && !body2.nil? && body1 == body2
end
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/github/github.rb | lib/danger/request_sources/github/github.rb | # frozen_string_literal: true
# rubocop:disable Metrics/ClassLength
require "octokit"
require "danger/helpers/comments_helper"
require "danger/helpers/comment"
require "danger/request_sources/github/github_review"
require "danger/request_sources/github/github_review_unsupported"
require "danger/request_sources/support/get_ignored_violation"
module Danger
module RequestSources
class GitHub < RequestSource
include Danger::Helpers::CommentsHelper
attr_accessor :pr_json, :issue_json, :use_local_git, :support_tokenless_auth, :dismiss_out_of_range_messages, :host, :api_url, :verify_ssl
def self.env_vars
["DANGER_GITHUB_API_TOKEN", "DANGER_GITHUB_BEARER_TOKEN"]
end
def self.optional_env_vars
["DANGER_GITHUB_HOST", "DANGER_GITHUB_API_BASE_URL", "DANGER_OCTOKIT_VERIFY_SSL"]
end
def initialize(ci_source, environment)
self.ci_source = ci_source
self.use_local_git = environment["DANGER_USE_LOCAL_GIT"]
self.support_tokenless_auth = false
self.dismiss_out_of_range_messages = false
self.host = environment.fetch("DANGER_GITHUB_HOST", "github.com")
# `DANGER_GITHUB_API_HOST` is the old name kept for legacy reasons and
# backwards compatibility. `DANGER_GITHUB_API_BASE_URL` is the new
# correctly named variable.
self.api_url = environment.fetch("DANGER_GITHUB_API_HOST") do
environment.fetch("DANGER_GITHUB_API_BASE_URL", "https://api.github.com/")
end
self.verify_ssl = environment["DANGER_OCTOKIT_VERIFY_SSL"] != "false"
@access_token = environment["DANGER_GITHUB_API_TOKEN"]
@bearer_token = environment["DANGER_GITHUB_BEARER_TOKEN"]
end
def get_pr_from_branch(repo_name, branch_name, owner)
prs = client.pull_requests(repo_name, head: "#{owner}:#{branch_name}")
unless prs.empty?
prs.first.number
end
end
def validates_as_ci?
true
end
def validates_as_api_source?
valid_bearer_token? || valid_access_token? || use_local_git
end
def scm
@scm ||= GitRepo.new
end
def client
raise "No API token given, please provide one using `DANGER_GITHUB_API_TOKEN` or `DANGER_GITHUB_BEARER_TOKEN`" if !valid_access_token? && !valid_bearer_token? && !support_tokenless_auth
@client ||= begin
Octokit.configure do |config|
config.connection_options[:ssl] = { verify: verify_ssl }
end
if valid_bearer_token?
Octokit::Client.new(bearer_token: @bearer_token, auto_paginate: true, api_endpoint: api_url)
elsif valid_access_token?
Octokit::Client.new(access_token: @access_token, auto_paginate: true, api_endpoint: api_url)
end
end
end
def pr_diff
# This is a hack to get the file patch into a format that parse-diff accepts
# as the GitHub API for listing pull request files is missing file names in the patch.
prefixed_patch = lambda do |file:|
<<~PATCH
diff --git a/#{file['filename']} b/#{file['filename']}
--- a/#{file['filename']}
+++ b/#{file['filename']}
#{file['patch']}
PATCH
end
files = client.pull_request_files(
ci_source.repo_slug,
ci_source.pull_request_id,
accept: "application/vnd.github.v3.diff"
)
@pr_diff ||= files.map { |file| prefixed_patch.call(file: file) }.join("\n")
end
def review
return @review unless @review.nil?
begin
@review = client.pull_request_reviews(ci_source.repo_slug, ci_source.pull_request_id)
.map { |review_json| Danger::RequestSources::GitHubSource::Review.new(client, ci_source, review_json) }
.select(&:generated_by_danger?)
.last
@review ||= Danger::RequestSources::GitHubSource::Review.new(client, ci_source)
@review
rescue Octokit::NotFound
@review = Danger::RequestSources::GitHubSource::ReviewUnsupported.new
@review
end
end
def setup_danger_branches
# we can use a github specific feature here:
base_branch = self.pr_json["base"]["ref"]
base_commit = self.pr_json["base"]["sha"]
head_branch = self.pr_json["head"]["ref"]
head_commit = self.pr_json["head"]["sha"]
# Next, we want to ensure that we have a version of the current branch at a known location
scm.ensure_commitish_exists_on_branch! base_branch, base_commit
self.scm.exec "branch #{EnvironmentManager.danger_base_branch} #{base_commit}"
# OK, so we want to ensure that we have a known head branch, this will always represent
# the head of the PR ( e.g. the most recent commit that will be merged. )
scm.ensure_commitish_exists_on_branch! head_branch, head_commit
self.scm.exec "branch #{EnvironmentManager.danger_head_branch} #{head_commit}"
end
def fetch_details
self.pr_json = client.pull_request(ci_source.repo_slug, ci_source.pull_request_id)
if self.pr_json["message"] == "Moved Permanently"
raise "Repo moved or renamed, make sure to update the git remote".red
end
fetch_issue_details(self.pr_json)
self.ignored_violations = ignored_violations_from_pr
end
def ignored_violations_from_pr
GetIgnoredViolation.new(self.pr_json["body"]).call
end
def fetch_issue_details(pr_json)
href = pr_json["_links"]["issue"]["href"]
self.issue_json = client.get(href)
end
def issue_comments
@comments ||= client.issue_comments(ci_source.repo_slug, ci_source.pull_request_id)
.map { |comment| Comment.from_github(comment) }
end
# Sending data to GitHub
def update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
comment_result = {}
editable_comments = issue_comments.select { |comment| comment.generated_by_danger?(danger_id) }
last_comment = editable_comments.last
should_create_new_comment = new_comment || last_comment.nil? || remove_previous_comments
previous_violations =
if should_create_new_comment
{}
else
parse_comment(last_comment.body)
end
regular_violations = regular_violations_group(
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
)
inline_violations = inline_violations_group(
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
)
rest_inline_violations = submit_inline_comments!(**{
danger_id: danger_id,
previous_violations: previous_violations
}.merge(inline_violations))
main_violations = merge_violations(
regular_violations, rest_inline_violations
)
main_violations_sum = main_violations.values.inject(:+)
if (previous_violations.empty? && main_violations_sum.empty?) || remove_previous_comments
# Just remove the comment, if there's nothing to say or --remove-previous-comments CLI was set.
delete_old_comments!(danger_id: danger_id)
end
# If there are still violations to show
if main_violations_sum.any?
body = generate_comment(**{
template: "github",
danger_id: danger_id,
previous_violations: previous_violations
}.merge(main_violations))
comment_result =
if should_create_new_comment
client.add_comment(ci_source.repo_slug, ci_source.pull_request_id, body)
else
client.update_comment(ci_source.repo_slug, last_comment.id, body)
end
end
# Now, set the pull request status.
# Note: this can terminate the entire process.
submit_pull_request_status!(
warnings: warnings,
errors: errors,
details_url: comment_result["html_url"],
danger_id: danger_id
)
end
def submit_pull_request_status!(warnings: [], errors: [], details_url: [], danger_id: "danger")
status = (errors.count.zero? ? "success" : "failure")
message = generate_description(warnings: warnings, errors: errors)
latest_pr_commit_ref = self.pr_json["head"]["sha"]
if latest_pr_commit_ref.empty? || latest_pr_commit_ref.nil?
raise "Couldn't find a commit to update its status".red
end
begin
client.create_status(ci_source.repo_slug, latest_pr_commit_ref, status, {
description: message,
context: "danger/#{danger_id}",
target_url: details_url
})
rescue StandardError
# This usually means the user has no commit access to this repo
# That's always the case for open source projects where you can only
# use a read-only GitHub account
if errors.count > 0
# We need to fail the actual build here
is_private = pr_json["base"]["repo"]["private"]
if is_private
abort("\nDanger has failed this build. \nFound #{'error'.danger_pluralize(errors.count)} and I don't have write access to the PR to set a PR status.")
else
abort("\nDanger has failed this build. \nFound #{'error'.danger_pluralize(errors.count)}.")
end
else
puts message
puts "\nDanger does not have write access to the PR to set a PR status.".yellow
end
end
end
# Get rid of the previously posted comment, to only have the latest one
def delete_old_comments!(except: nil, danger_id: "danger")
issue_comments.each do |comment|
next unless comment.generated_by_danger?(danger_id)
next if comment.id == except
client.delete_comment(ci_source.repo_slug, comment.id)
end
end
def submit_inline_comments!(warnings: [], errors: [], messages: [], markdowns: [], previous_violations: [], danger_id: "danger")
pr_comments = client.pull_request_comments(ci_source.repo_slug, ci_source.pull_request_id)
danger_comments = pr_comments.select { |comment| Comment.from_github(comment).generated_by_danger?(danger_id) }
non_danger_comments = pr_comments - danger_comments
if (warnings + errors + messages + markdowns).select(&:inline?).empty?
delete_old_inline_violations(danger_comments: danger_comments, non_danger_comments: non_danger_comments)
return {}
end
diff_lines = self.pr_diff.lines
warnings = submit_inline_comments_for_kind!(:warning, warnings, diff_lines, danger_comments, previous_violations["warning"], danger_id: danger_id)
errors = submit_inline_comments_for_kind!(:error, errors, diff_lines, danger_comments, previous_violations["error"], danger_id: danger_id)
messages = submit_inline_comments_for_kind!(:message, messages, diff_lines, danger_comments, previous_violations["message"], danger_id: danger_id)
markdowns = submit_inline_comments_for_kind!(:markdown, markdowns, diff_lines, danger_comments, [], danger_id: danger_id)
# submit removes from the array all comments that are still in force
# so we strike out all remaining ones
delete_old_inline_violations(danger_comments: danger_comments, non_danger_comments: non_danger_comments)
{
warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns
}
end
def delete_old_inline_violations(danger_comments: [], non_danger_comments: [])
danger_comments.each do |comment|
violation = violations_from_table(comment["body"]).first
if !violation.nil? && violation.sticky
body = generate_inline_comment_body("white_check_mark", violation, danger_id: danger_id, resolved: true, template: "github")
client.update_pull_request_comment(ci_source.repo_slug, comment["id"], body)
else
# We remove non-sticky violations that have no replies
# Since there's no direct concept of a reply in GH, we simply consider
# the existence of non-danger comments in that line as replies
replies = non_danger_comments.select do |potential|
potential["path"] == comment["path"] &&
potential["position"] == comment["position"] &&
potential["commit_id"] == comment["commit_id"]
end
client.delete_pull_request_comment(ci_source.repo_slug, comment["id"]) if replies.empty?
end
end
end
def messages_are_equivalent(m1, m2)
blob_regexp = %r{blob/[0-9a-z]+/}
m1.file == m2.file && m1.line == m2.line &&
m1.message.sub(blob_regexp, "") == m2.message.sub(blob_regexp, "")
end
def submit_inline_comments_for_kind!(kind, messages, diff_lines, danger_comments, previous_violations, danger_id: "danger")
head_ref = pr_json["head"]["sha"]
previous_violations ||= []
is_markdown_content = kind == :markdown
emoji = { warning: "warning", error: "no_entry_sign", message: "book" }[kind]
messages.reject do |m| # rubocop:todo Metrics/BlockLength
next false unless m.file && m.line
position = find_position_in_diff diff_lines, m, kind
# Keep the change if it's line is not in the diff and not in dismiss mode
next dismiss_out_of_range_messages_for(kind) if position.nil?
# Once we know we're gonna submit it, we format it
if is_markdown_content
body = generate_inline_markdown_body(m, danger_id: danger_id, template: "github")
else
# Hide the inline link behind a span
m = process_markdown(m, true)
body = generate_inline_comment_body(emoji, m, danger_id: danger_id, template: "github")
# A comment might be in previous_violations because only now it's part of the unified diff
# We remove from the array since it won't have a place in the table anymore
previous_violations.reject! { |v| messages_are_equivalent(v, m) }
end
matching_comments = danger_comments.select do |comment_data|
if comment_data["path"] == m.file && comment_data["position"] == position
# Parse it to avoid problems with strikethrough
violation = violations_from_table(comment_data["body"]).first
if violation
messages_are_equivalent(violation, m)
else
blob_regexp = %r{blob/[0-9a-z]+/}
comment_data["body"].sub(blob_regexp, "") == body.sub(blob_regexp, "")
end
else
false
end
end
if matching_comments.empty?
begin
# Since Octokit v8, the signature of create_pull_request_comment has been changed.
# See https://github.com/danger/danger/issues/1475 for detailed information.
client.create_pull_request_comment(ci_source.repo_slug, ci_source.pull_request_id,
body, head_ref, m.file, (Octokit::MAJOR >= 8 ? m.line : position))
rescue Octokit::UnprocessableEntity => e
# Show more detail for UnprocessableEntity error
message = [e, "body: #{body}", "head_ref: #{head_ref}", "filename: #{m.file}", "position: #{position}"].join("\n")
puts message
# Not reject because this comment has not completed
next false
end
else
# Remove the surviving comment so we don't strike it out
danger_comments.reject! { |c| matching_comments.include? c }
# Update the comment to remove the strikethrough if present
comment = matching_comments.first
client.update_pull_request_comment(ci_source.repo_slug, comment["id"], body)
end
# Remove this element from the array
next true
end
end
def find_position_in_diff(diff_lines, message, kind)
range_header_regexp = /@@ -([0-9]+)(,([0-9]+))? \+(?<start>[0-9]+)(,(?<end>[0-9]+))? @@.*/
file_header_regexp = %r{^diff --git a/.*}
pattern = "+++ b/#{message.file}\n"
file_start = diff_lines.index(pattern)
# Files containing spaces sometimes have a trailing tab
if file_start.nil?
pattern = "+++ b/#{message.file}\t\n"
file_start = diff_lines.index(pattern)
end
return nil if file_start.nil?
position = -1
file_line = nil
diff_lines.drop(file_start).each do |line|
# If the line has `No newline` annotation, position need increment
if line.eql?("\\ No newline at end of file\n")
position += 1
next
end
# If we found the start of another file diff, we went too far
break if line.match file_header_regexp
match = line.match range_header_regexp
# file_line is set once we find the hunk the line is in
# we need to count how many lines in new file we have
# so we do it one by one ignoring the deleted lines
if !file_line.nil? && !line.start_with?("-")
if file_line == message.line
file_line = nil if dismiss_out_of_range_messages_for(kind) && !line.start_with?("+")
break
end
file_line += 1
end
# We need to count how many diff lines are between us and
# the line we're looking for
position += 1
next unless match
range_start = match[:start].to_i
if match[:end]
range_end = match[:end].to_i + range_start
else
range_end = range_start
end
# We are past the line position, just abort
break if message.line.to_i < range_start
next unless message.line.to_i >= range_start && message.line.to_i < range_end
file_line = range_start
end
position unless file_line.nil?
end
# See the tests for examples of data coming in looks like
def parse_message_from_row(row)
message_regexp = %r{(<(a |span data-)href="https://#{host}/#{ci_source.repo_slug}/blob/[0-9a-z]+/(?<file>[^#]+)#L(?<line>[0-9]+)"(>[^<]*</a> - |/>))?(?<message>.*?)}im
match = message_regexp.match(row)
if match[:line]
line = match[:line].to_i
else
line = nil
end
Violation.new(row, true, match[:file], line)
end
def markdown_link_to_message(message, hide_link)
url = "https://#{host}/#{ci_source.repo_slug}/blob/#{pr_json['head']['sha']}/#{message.file}#L#{message.line}"
if hide_link
"<span data-href=\"#{url}\"/>"
else
"[#{message.file}#L#{message.line}](#{url}) - "
end
end
# @return [String] The organisation name, is nil if it can't be detected
def organisation
matched = self.issue_json["repository_url"].match(%r{repos/(.*)/})
return matched[1] if matched && matched[1]
rescue StandardError
nil
end
def dismiss_out_of_range_messages_for(kind)
if self.dismiss_out_of_range_messages.kind_of?(Hash) && self.dismiss_out_of_range_messages[kind]
self.dismiss_out_of_range_messages[kind]
elsif self.dismiss_out_of_range_messages == true
self.dismiss_out_of_range_messages
else
false
end
end
# @return [String] A URL to the specific file, ready to be downloaded
def file_url(organisation: nil, repository: nil, ref: nil, branch: nil, path: nil)
organisation ||= self.organisation
ref ||= branch
begin
# Retrieve the download URL (default ref on nil param)
contents = client.contents("#{organisation}/#{repository}", path: path, ref: ref)
@download_url = contents["download_url"]
rescue Octokit::ClientError
# Fallback to github.com
ref ||= "master"
@download_url = "https://raw.githubusercontent.com/#{organisation}/#{repository}/#{ref}/#{path}"
end
end
private
def valid_access_token?
@access_token && !@access_token.empty?
end
def valid_bearer_token?
@bearer_token && !@bearer_token.empty?
end
def regular_violations_group(warnings: [], errors: [], messages: [], markdowns: [])
{
warnings: warnings.reject(&:inline?),
errors: errors.reject(&:inline?),
messages: messages.reject(&:inline?),
markdowns: markdowns.reject(&:inline?)
}
end
def inline_violations_group(warnings: [], errors: [], messages: [], markdowns: [])
cmp = proc do |a, b|
next -1 unless a.file && a.line
next 1 unless b.file && b.line
next a.line <=> b.line if a.file == b.file
next a.file <=> b.file
end
# Sort to group inline comments by file
{
warnings: warnings.select(&:inline?).sort(&cmp),
errors: errors.select(&:inline?).sort(&cmp),
messages: messages.select(&:inline?).sort(&cmp),
markdowns: markdowns.select(&:inline?).sort(&cmp)
}
end
def merge_violations(*violation_groups)
violation_groups.inject({}) do |accumulator, group|
accumulator.merge(group) { |_, old, fresh| old + fresh }
end
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
danger/danger | https://github.com/danger/danger/blob/20f1243775da2ce53f55ad5ba65112de5da6e118/lib/danger/request_sources/github/github_review_unsupported.rb | lib/danger/request_sources/github/github_review_unsupported.rb | # frozen_string_literal: true
module Danger
module RequestSources
module GitHubSource
class ReviewUnsupported
attr_reader :id, :body, :status, :review_json
def initialize; end
def start; end
def submit; end
def message(message, sticky = true, file = nil, line = nil); end
def warn(message, sticky = true, file = nil, line = nil); end
def fail(message, sticky = true, file = nil, line = nil); end
def markdown(message, file = nil, line = nil); end
end
end
end
end
| ruby | MIT | 20f1243775da2ce53f55ad5ba65112de5da6e118 | 2026-01-04T15:42:56.145797Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/docs/.mdl_ruleset.rb | docs/.mdl_ruleset.rb | # typed: true
# frozen_string_literal: true
rule "HB034", "Bare unstyled URL used" do
tags :links, :url
aliases "no-bare-unstyled-urls"
check do |doc|
doc.matching_text_element_lines(%r{(?<=\s)https?://})
end
end
rule "HB100", "Full URL for internal link used" do
tags :links, :url
aliases "no-full-urls-for-internal-links"
check do |doc|
doc.matching_lines(%r{\]\(https://docs.brew.sh/.+?\)})
end
end
rule "HB101", "File extension missing from internal link" do
tags :links, :url
aliases "file-extension-required-for-internal-links"
check do |doc|
doc.matching_lines(/\]\((?!#|\w+:)(?>[^#.)]+)(?!\.\w+)/)
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/docs/.mdl_style.rb | docs/.mdl_style.rb | # typed: true
# frozen_string_literal: true
all
rule "MD007", indent: 2 # Unordered list indentation
rule "MD026", punctuation: ",;:" # Trailing punctuation in header
exclude_rule "MD013" # Line length
exclude_rule "MD029" # Ordered list item prefix
exclude_rule "MD033" # Inline HTML
exclude_rule "MD034" # Bare URL used (replaced by HB034)
exclude_rule "MD046" # Code block style
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/hardware.rb | Library/Homebrew/hardware.rb | # typed: strict
# frozen_string_literal: true
require "utils/popen"
# Helper module for querying hardware information.
module Hardware
# Helper module for querying CPU information.
class CPU
INTEL_32BIT_ARCHS = [:i386].freeze
INTEL_64BIT_ARCHS = [:x86_64].freeze
INTEL_ARCHS = T.let((INTEL_32BIT_ARCHS + INTEL_64BIT_ARCHS).freeze, T::Array[Symbol])
PPC_32BIT_ARCHS = [:ppc, :ppc32, :ppc7400, :ppc7450, :ppc970].freeze
PPC_64BIT_ARCHS = [:ppc64, :ppc64le, :ppc970].freeze
PPC_ARCHS = T.let((PPC_32BIT_ARCHS + PPC_64BIT_ARCHS).freeze, T::Array[Symbol])
ARM_64BIT_ARCHS = [:arm64, :aarch64].freeze
ARM_ARCHS = ARM_64BIT_ARCHS
ALL_ARCHS = T.let([
*INTEL_ARCHS,
*PPC_ARCHS,
*ARM_ARCHS,
].freeze, T::Array[Symbol])
INTEL_64BIT_OLDEST_CPU = :core2
class << self
sig { returns(T::Hash[Symbol, String]) }
def optimization_flags
@optimization_flags ||= T.let({
dunno: "",
native: arch_flag("native"),
ivybridge: "-march=ivybridge",
sandybridge: "-march=sandybridge",
westmere: "-march=westmere",
nehalem: "-march=nehalem",
core2: "-march=core2",
core: "-march=prescott",
arm_vortex_tempest: "", # TODO: -mcpu=apple-m1 when we've patched all our GCCs to support it
armv6: "-march=armv6",
armv8: "-march=armv8-a",
ppc64: "-mcpu=powerpc64",
ppc64le: "-mcpu=powerpc64le",
}.freeze, T.nilable(T::Hash[Symbol, String]))
end
sig { returns(Symbol) }
def arch_32_bit
if arm?
:arm
elsif intel?
:i386
elsif ppc32?
:ppc32
else
:dunno
end
end
sig { returns(Symbol) }
def arch_64_bit
if arm?
:arm64
elsif intel?
:x86_64
elsif ppc64le?
:ppc64le
elsif ppc64?
:ppc64
else
:dunno
end
end
sig { returns(Symbol) }
def arch
case bits
when 32
arch_32_bit
when 64
arch_64_bit
else
:dunno
end
end
sig { returns(Symbol) }
def type
case RUBY_PLATFORM
when /x86_64/, /i\d86/ then :intel
when /arm/, /aarch64/ then :arm
when /ppc|powerpc/ then :ppc
else :dunno
end
end
sig { returns(Symbol) }
def family
:dunno
end
sig { returns(Integer) }
def cores
return @cores if @cores
@cores = Utils.popen_read("getconf", "_NPROCESSORS_ONLN").chomp.to_i
@cores = T.let(1, T.nilable(Integer)) unless $CHILD_STATUS.success?
@cores
end
sig { returns(T.nilable(Integer)) }
def bits
@bits ||= T.let(case RUBY_PLATFORM
when /x86_64/, /ppc64|powerpc64/, /aarch64|arm64/ then 64
when /i\d86/, /ppc/, /arm/ then 32
end, T.nilable(Integer))
end
sig { returns(T::Boolean) }
def sse4?
RUBY_PLATFORM.to_s.include?("x86_64")
end
sig { returns(T::Boolean) }
def is_32_bit?
bits == 32
end
sig { returns(T::Boolean) }
def is_64_bit?
bits == 64
end
sig { returns(T::Boolean) }
def intel?
type == :intel
end
sig { returns(T::Boolean) }
def ppc?
type == :ppc
end
sig { returns(T::Boolean) }
def ppc32?
ppc? && is_32_bit?
end
sig { returns(T::Boolean) }
def ppc64le?
ppc? && is_64_bit? && little_endian?
end
sig { returns(T::Boolean) }
def ppc64?
ppc? && is_64_bit? && big_endian?
end
# Check whether the CPU architecture is ARM.
#
# @api internal
sig { returns(T::Boolean) }
def arm?
type == :arm
end
# Check whether the CPU architecture is 64-bit ARM.
sig { returns(T::Boolean) }
def arm64?
arm? && is_64_bit?
end
sig { returns(T::Boolean) }
def little_endian?
!big_endian?
end
sig { returns(T::Boolean) }
def big_endian?
[1].pack("I") == [1].pack("N")
end
sig { returns(T::Boolean) }
def virtualized?
false
end
sig { returns(T::Array[Symbol]) }
def features
[]
end
sig { params(name: Symbol).returns(T::Boolean) }
def feature?(name)
features.include?(name)
end
sig { params(arch: T.any(String, Symbol)).returns(String) }
def arch_flag(arch)
return "-mcpu=#{arch}" if ppc?
"-march=#{arch}"
end
sig { returns(T::Boolean) }
def in_rosetta2?
false
end
end
end
class << self
sig { returns(String) }
def cores_as_words
case Hardware::CPU.cores
when 1 then "single"
when 2 then "dual"
when 4 then "quad"
when 6 then "hexa"
when 8 then "octa"
when 10 then "deca"
when 12 then "dodeca"
else
Hardware::CPU.cores.to_s
end
end
sig { params(_version: T.nilable(MacOSVersion)).returns(Symbol) }
def oldest_cpu(_version = nil)
if Hardware::CPU.intel?
if Hardware::CPU.is_64_bit?
Hardware::CPU::INTEL_64BIT_OLDEST_CPU
else
:core
end
elsif Hardware::CPU.arm?
if Hardware::CPU.is_64_bit?
:armv8
else
:armv6
end
elsif Hardware::CPU.ppc? && Hardware::CPU.is_64_bit?
if Hardware::CPU.little_endian?
:ppc64le
else
:ppc64
end
else
Hardware::CPU.family
end
end
# Returns a Rust flag to set the target CPU if necessary.
# Defaults to nil.
sig { params(arch: Symbol).returns(T.nilable(String)) }
def rustflags_target_cpu(arch)
# Rust already defaults to the oldest supported cpu for each target-triplet
# so it's safe to ignore generic archs such as :armv6 here.
# Rust defaults to apple-m1 since Rust 1.71 for aarch64-apple-darwin.
@target_cpu ||= T.let(case arch
when :core
:prescott
when :native, :ivybridge, :sandybridge, :westmere, :nehalem, :core2
arch
end, T.nilable(Symbol))
return if @target_cpu.blank?
"--codegen target-cpu=#{@target_cpu}"
end
end
end
require "extend/os/hardware"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/formula_info.rb | Library/Homebrew/formula_info.rb | # typed: strict
# frozen_string_literal: true
# Formula information drawn from an external `brew info --json` call.
class FormulaInfo
# The whole info structure parsed from the JSON.
sig { returns(T::Hash[String, T.untyped]) }
attr_accessor :info
sig { params(info: T::Hash[String, T.untyped]).void }
def initialize(info)
@info = info
end
# Looks up formula on disk and reads its info.
# Returns nil if formula is absent or if there was an error reading it.
sig { params(name: Pathname).returns(T.nilable(FormulaInfo)) }
def self.lookup(name)
json = Utils.popen_read(
*HOMEBREW_RUBY_EXEC_ARGS,
HOMEBREW_LIBRARY_PATH/"brew.rb",
"info",
"--json=v1",
name,
)
return unless $CHILD_STATUS.success?
force_utf8!(json)
FormulaInfo.new(JSON.parse(json)[0])
end
sig { returns(T::Array[String]) }
def bottle_tags
return [] unless info["bottle"]["stable"]
info["bottle"]["stable"]["files"].keys
end
sig {
params(my_bottle_tag: T.any(Utils::Bottles::Tag, T.nilable(String))).returns(T.nilable(T::Hash[String, String]))
}
def bottle_info(my_bottle_tag = Utils::Bottles.tag)
tag_s = my_bottle_tag.to_s
return unless info["bottle"]["stable"]
btl_info = info["bottle"]["stable"]["files"][tag_s]
return unless btl_info
{ "url" => btl_info["url"], "sha256" => btl_info["sha256"] }
end
sig { returns(T.nilable(T::Hash[String, String])) }
def bottle_info_any
bottle_info(any_bottle_tag)
end
sig { returns(T.nilable(String)) }
def any_bottle_tag
tag = Utils::Bottles.tag.to_s
# Prefer native bottles as a convenience for download caching
bottle_tags.include?(tag) ? tag : bottle_tags.first
end
sig { params(spec_type: Symbol).returns(Version) }
def version(spec_type)
version_str = info["versions"][spec_type.to_s]
Version.new(version_str)
end
sig { params(spec_type: Symbol).returns(PkgVersion) }
def pkg_version(spec_type = :stable)
PkgVersion.new(version(spec_type), revision)
end
sig { returns(Integer) }
def revision
info["revision"]
end
sig { params(str: String).void }
def self.force_utf8!(str)
str.force_encoding("UTF-8") if str.respond_to?(:force_encoding)
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/test.rb | Library/Homebrew/test.rb | # typed: strict
# frozen_string_literal: true
raise "#{__FILE__} must not be loaded via `require`." if $PROGRAM_NAME != __FILE__
old_trap = trap("INT") { exit! 130 }
require_relative "global"
require "extend/ENV"
require "timeout"
require "formula_assertions"
require "formula_free_port"
require "fcntl"
require "utils/socket"
require "cli/parser"
require "dev-cmd/test"
require "json/add/exception"
require "extend/pathname/write_mkpath_extension"
DEFAULT_TEST_TIMEOUT_SECONDS = T.let(5 * 60, Integer)
begin
# Undocumented opt-out for internal use.
# We need to allow formulae from paths here due to how we pass them through.
ENV["HOMEBREW_INTERNAL_ALLOW_PACKAGES_FROM_PATHS"] = "1"
args = Homebrew::DevCmd::Test.new.args
Context.current = args.context
error_pipe = Utils::UNIXSocketExt.open(ENV.fetch("HOMEBREW_ERROR_PIPE"), &:recv_io)
error_pipe.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
trap("INT", old_trap)
if Homebrew::EnvConfig.developer? || ENV["CI"].present?
raise "Cannot find child processes without `pgrep`, please install!" unless which("pgrep")
raise "Cannot kill child processes without `pkill`, please install!" unless which("pkill")
end
formula = args.named.to_resolved_formulae.fetch(0)
formula.extend(Homebrew::Assertions)
formula.extend(Homebrew::FreePort)
if args.debug? && !Homebrew::EnvConfig.disable_debrew?
require "debrew"
formula.extend(Debrew::Formula)
end
ENV.extend(Stdenv)
ENV.setup_build_environment(formula:, testing_formula: true)
Pathname.activate_extensions!
# tests can also return false to indicate failure
run_test = proc do |_|
# TODO: Replace proc usage with direct `formula.run_test` when removing this.
# Also update formula.rb 'TODO: replace `returns(BasicObject)` with `void`'
if formula.run_test(keep_tmp: args.keep_tmp?) == false
require "utils/output"
Utils::Output.odeprecated "`return false` in test", "`raise \"<reason for failure>\"`"
raise "test returned false"
end
end
if args.debug? # --debug is interactive
run_test.call(nil)
else
# HOMEBREW_TEST_TIMEOUT_SECS is private API and subject to change.
timeout = ENV["HOMEBREW_TEST_TIMEOUT_SECS"]&.to_i || DEFAULT_TEST_TIMEOUT_SECONDS
Timeout.timeout(timeout, &run_test)
end
# Any exceptions during the test run are reported.
rescue Exception => e # rubocop:disable Lint/RescueException
error_pipe&.puts e.to_json
error_pipe&.close
ensure
pid = Process.pid.to_s
pkill = "/usr/bin/pkill"
pgrep = "/usr/bin/pgrep"
if File.executable?(pkill) && File.executable?(pgrep) && system(pgrep, "-P", pid, out: File::NULL)
$stderr.puts "Killing child processes..."
system pkill, "-P", pid
sleep 1
system pkill, "-9", "-P", pid
end
exit! 1 if e
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/brew.rb | Library/Homebrew/brew.rb | # typed: strict
# frozen_string_literal: true
# `HOMEBREW_STACKPROF` should be set via `brew prof --stackprof`, not manually.
if ENV["HOMEBREW_STACKPROF"]
require "rubygems"
require "stackprof"
StackProf.start(mode: :wall, raw: true)
end
raise "HOMEBREW_BREW_FILE was not exported! Please call bin/brew directly!" unless ENV["HOMEBREW_BREW_FILE"]
if $PROGRAM_NAME != __FILE__ && !$PROGRAM_NAME.end_with?("/bin/ruby-prof")
raise "#{__FILE__} must not be loaded via `require`."
end
std_trap = trap("INT") { exit! 130 } # no backtrace thanks
require_relative "global"
require "utils/output"
begin
trap("INT", std_trap) # restore default CTRL-C handler
if ENV["CI"]
$stdout.sync = true
$stderr.sync = true
end
empty_argv = ARGV.empty?
help_flag_list = %w[-h --help --usage -?]
help_flag = !ENV["HOMEBREW_HELP"].nil?
help_cmd_index = T.let(nil, T.nilable(Integer))
cmd = T.let(nil, T.nilable(String))
ARGV.each_with_index do |arg, i|
break if help_flag && cmd
if arg == "help" && !cmd
# Command-style help: `help <cmd>` is fine, but `<cmd> help` is not.
help_flag = true
help_cmd_index = i
elsif !cmd && help_flag_list.exclude?(arg)
require "commands"
cmd = ARGV.delete_at(i)
cmd = Commands::HOMEBREW_INTERNAL_COMMAND_ALIASES.fetch(cmd, cmd)
end
end
ARGV.delete_at(help_cmd_index) if help_cmd_index
require "cli/parser"
args = Homebrew::CLI::Parser.new(Homebrew::Cmd::Brew).parse(ARGV.dup.freeze, ignore_invalid_options: true)
Context.current = args.context
path = PATH.new(ENV.fetch("PATH"))
homebrew_path = PATH.new(ENV.fetch("HOMEBREW_PATH"))
# Add shared wrappers.
path.prepend(HOMEBREW_SHIMS_PATH/"shared")
homebrew_path.prepend(HOMEBREW_SHIMS_PATH/"shared")
ENV["PATH"] = path.to_s
require "commands"
require "warnings"
internal_cmd = Commands.valid_internal_cmd?(cmd) || Commands.valid_internal_dev_cmd?(cmd) if cmd
unless internal_cmd
# Add contributed commands to PATH before checking.
homebrew_path.append(Commands.tap_cmd_directories)
# External commands expect a normal PATH
ENV["PATH"] = homebrew_path.to_s
end
# Usage instructions should be displayed if and only if one of:
# - a help flag is passed AND a command is matched
# - a help flag is passed AND there is no command specified
# - no arguments are passed
if empty_argv || help_flag
require "help"
Homebrew::Help.help cmd, remaining_args: args.remaining, empty_argv:
# `Homebrew::Help.help` never returns, except for unknown commands.
end
if cmd.nil?
raise UsageError, "Unknown command: brew #{ARGV.join(" ")}"
elsif internal_cmd || Commands.external_ruby_v2_cmd_path(cmd)
cmd_class = Homebrew::AbstractCommand.command(cmd)
Homebrew.running_command = cmd
if cmd_class
if !Homebrew::EnvConfig.no_install_from_api? && Homebrew::EnvConfig.download_concurrency > 1
require "api"
Homebrew::API.fetch_api_files!
end
command_instance = cmd_class.new
require "utils/analytics"
Utils::Analytics.report_command_run(command_instance)
command_instance.run
else
begin
Homebrew.public_send Commands.method_name(cmd)
rescue NoMethodError => e
converted_cmd = cmd.downcase.tr("-", "_")
case_error = "undefined method `#{converted_cmd}' for module Homebrew"
private_method_error = "private method `#{converted_cmd}' called for module Homebrew"
Utils::Output.odie "Unknown command: brew #{cmd}" if [case_error, private_method_error].include?(e.message)
raise
end
end
elsif (path = Commands.external_ruby_cmd_path(cmd))
Homebrew.running_command = cmd
Homebrew.require?(path)
exit Homebrew.failed? ? 1 : 0
elsif Commands.external_cmd_path(cmd)
%w[CACHE LIBRARY_PATH].each do |env|
ENV["HOMEBREW_#{env}"] = Object.const_get(:"HOMEBREW_#{env}").to_s
end
exec "brew-#{cmd}", *ARGV
else
raise UsageError, "Unknown command: brew #{cmd}"
end
rescue UsageError => e
require "help"
Homebrew::Help.help cmd, remaining_args: args&.remaining || [], usage_error: e.message
rescue SystemExit => e
Utils::Output.onoe "Kernel.exit" if args&.debug? && !e.success?
if args&.debug? || ARGV.include?("--debug")
require "utils/backtrace"
$stderr.puts Utils::Backtrace.clean(e)
end
raise
rescue Interrupt
$stderr.puts # seemingly a newline is typical
exit 130
rescue BuildError => e
Utils::Analytics.report_build_error(e)
e.dump(verbose: args&.verbose? || false)
if OS.not_tier_one_configuration?
$stderr.puts <<~EOS
This build failure was expected, as this is not a Tier 1 configuration:
#{Formatter.url("https://docs.brew.sh/Support-Tiers")}
#{Formatter.bold("Do not report any issues to Homebrew/* repositories!")}
Read the above document instead before opening any issues or PRs.
EOS
elsif e.formula.head? || e.formula.deprecated? || e.formula.disabled?
reason = if e.formula.head?
"was built from an unstable upstream --HEAD"
elsif e.formula.deprecated?
"is deprecated"
elsif e.formula.disabled?
"is disabled"
end
$stderr.puts <<~EOS
#{e.formula.name}'s formula #{reason}.
This build failure is expected behaviour.
EOS
end
exit 1
rescue RuntimeError, SystemCallError => e
raise if e.message.empty?
Utils::Output.onoe e
if args&.debug? || ARGV.include?("--debug")
require "utils/backtrace"
$stderr.puts Utils::Backtrace.clean(e)
end
exit 1
# Catch any other types of exceptions.
rescue Exception => e # rubocop:disable Lint/RescueException
Utils::Output.onoe e
method_deprecated_error = e.is_a?(MethodDeprecatedError)
require "utils/backtrace"
$stderr.puts Utils::Backtrace.clean(e) if args&.debug? || ARGV.include?("--debug") || !method_deprecated_error
if OS.not_tier_one_configuration?
$stderr.puts <<~EOS
This error was expected, as this is not a Tier 1 configuration:
#{Formatter.url("https://docs.brew.sh/Support-Tiers")}
#{Formatter.bold("Do not report any issues to Homebrew/* repositories!")}
Read the above document instead before opening any issues or PRs.
EOS
elsif Homebrew::EnvConfig.no_auto_update? &&
(fetch_head = HOMEBREW_REPOSITORY/".git/FETCH_HEAD") &&
(!fetch_head.exist? || (fetch_head.mtime.to_date < Date.today))
$stderr.puts "#{Tty.bold}You have disabled automatic updates and have not updated today.#{Tty.reset}"
$stderr.puts "#{Tty.bold}Do not report this issue until you've run `brew update` and tried again.#{Tty.reset}"
elsif (issues_url = (method_deprecated_error && e.issues_url) || Utils::Backtrace.tap_error_url(e))
$stderr.puts "If reporting this issue please do so at (not Homebrew/* repositories):"
$stderr.puts " #{Formatter.url(issues_url)}"
elsif internal_cmd && !method_deprecated_error
$stderr.puts "#{Tty.bold}Please report this issue:#{Tty.reset}"
$stderr.puts " #{Formatter.url(OS::ISSUES_URL)}"
end
exit 1
else
exit 1 if Homebrew.failed?
ensure
if ENV["HOMEBREW_STACKPROF"]
StackProf.stop
StackProf.results("prof/stackprof.dump")
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/resource.rb | Library/Homebrew/resource.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "downloadable"
require "mktemp"
require "livecheck"
require "on_system"
require "utils/output"
# Resource is the fundamental representation of an external resource. The
# primary formula download, along with other declared resources, are instances
# of this class.
class Resource
include Downloadable
include FileUtils
include OnSystem::MacOSAndLinux
include Utils::Output::Mixin
attr_reader :source_modified_time, :patches, :owner
attr_writer :checksum
attr_accessor :download_strategy
# Formula name must be set after the DSL, as we have no access to the
# formula name before initialization of the formula.
attr_accessor :name
sig { params(name: T.nilable(String), block: T.nilable(T.proc.bind(Resource).void)).void }
def initialize(name = nil, &block)
super()
# Generally ensure this is synced with `initialize_dup` and `freeze`
# (excluding simple objects like integers & booleans, weak refs like `owner` or permafrozen objects)
@name = name
@source_modified_time = nil
@patches = []
@owner = nil
@livecheck = Livecheck.new(self)
@livecheck_defined = false
@insecure = false
instance_eval(&block) if block
end
sig { override.params(other: T.any(Resource, Downloadable)).void }
def initialize_dup(other)
super
@name = @name.dup
@patches = @patches.dup
@livecheck = @livecheck.dup
end
def freeze
@name.freeze
@patches.freeze
@livecheck.freeze
super
end
def owner=(owner)
@owner = owner
patches.each { |p| p.owner = owner }
end
sig { override.returns(String) }
def download_queue_type = "Resource"
# Verifies download and unpacks it.
# The block may call `|resource, staging| staging.retain!` to retain the staging
# directory. Subclasses that override stage should implement the tmp
# dir using {Mktemp} so that works with all subtypes.
#
# @api public
def stage(target = nil, debug_symbols: false, &block)
raise ArgumentError, "Target directory or block is required" if !target && !block_given?
prepare_patches
fetch_patches(skip_downloaded: true)
fetch unless downloaded?
unpack(target, debug_symbols:, &block)
end
def prepare_patches
patches.grep(DATAPatch) { |p| p.path = owner.owner.path }
end
def fetch_patches(skip_downloaded: false)
external_patches = patches.select(&:external?)
external_patches.reject!(&:downloaded?) if skip_downloaded
external_patches.each(&:fetch)
end
def apply_patches
return if patches.empty?
ohai "Patching #{name}"
patches.each(&:apply)
end
# If a target is given, unpack there; else unpack to a temp folder.
# If block is given, yield to that block with `|stage|`, where stage
# is a {ResourceStageContext}.
# A target or a block must be given, but not both.
def unpack(target = nil, debug_symbols: false)
current_working_directory = Pathname.pwd
stage_resource(download_name, debug_symbols:) do |staging|
downloader.stage do
@source_modified_time = downloader.source_modified_time.freeze
apply_patches
if block_given?
yield ResourceStageContext.new(self, staging)
elsif target
target = Pathname(target)
target = current_working_directory/target if target.relative?
target.install Pathname.pwd.children
end
end
end
end
Partial = Struct.new(:resource, :files)
def files(*files)
Partial.new(self, files)
end
sig {
override
.params(
verify_download_integrity: T::Boolean,
timeout: T.nilable(T.any(Integer, Float)),
quiet: T::Boolean,
skip_patches: T::Boolean,
).returns(Pathname)
}
def fetch(verify_download_integrity: true, timeout: nil, quiet: false, skip_patches: false)
fetch_patches unless skip_patches
super(verify_download_integrity:, timeout:, quiet:)
end
# {Livecheck} can be used to check for newer versions of the software.
# This method evaluates the DSL specified in the `livecheck` block of the
# {Resource} (if it exists) and sets the instance variables of a {Livecheck}
# object accordingly. This is used by `brew livecheck` to check for newer
# versions of the software.
#
# ### Example
#
# ```ruby
# livecheck do
# url "https://example.com/foo/releases"
# regex /foo-(\d+(?:\.\d+)+)\.tar/
# end
# ```
def livecheck(&block)
return @livecheck unless block
@livecheck_defined = true
@livecheck.instance_eval(&block)
end
# Whether a livecheck specification is defined or not.
#
# It returns `true` when a `livecheck` block is present in the {Resource}
# and `false` otherwise.
sig { returns(T::Boolean) }
def livecheck_defined?
@livecheck_defined == true
end
def sha256(val)
@checksum = Checksum.new(val)
end
sig { override.params(val: T.nilable(String), specs: T.anything).returns(T.nilable(String)) }
def url(val = nil, **specs)
return @url&.to_s if val.nil?
specs = specs.dup
# Don't allow this to be set.
specs.delete(:insecure)
specs[:insecure] = true if @insecure
@url = URL.new(val, specs)
@downloader = nil
@download_strategy = @url.download_strategy
@url.to_s
end
sig { override.params(val: T.nilable(T.any(String, Version))).returns(T.nilable(Version)) }
def version(val = nil)
return super() if val.nil?
@version = case val
when String
val.blank? ? Version::NULL : Version.new(val)
when Version
val
end
end
def mirror(val)
mirrors << val
end
def patch(strip = :p1, src = nil, &block)
p = ::Patch.create(strip, src, &block)
patches << p
end
def using
@url&.using
end
def specs
@url&.specs || {}.freeze
end
protected
def stage_resource(prefix, debug_symbols: false, &block)
Mktemp.new(prefix, retain_in_cache: debug_symbols).run(&block)
end
private
sig { override.returns(String) }
def download_name
return owner.name if name.nil?
# Removes /s from resource names; this allows Go package names
# to be used as resource names without confusing software that
# interacts with {download_name}, e.g. `github.com/foo/bar`.
escaped_name = name.tr("/", "-")
return escaped_name if owner.nil?
"#{owner.name}--#{escaped_name}"
end
def determine_url_mirrors
extra_urls = []
url = T.must(self.url)
# glibc-bootstrap
if url.start_with?("https://github.com/Homebrew/glibc-bootstrap/releases/download")
if (artifact_domain = Homebrew::EnvConfig.artifact_domain.presence)
artifact_url = url.sub("https://github.com", artifact_domain)
return [artifact_url] if Homebrew::EnvConfig.artifact_domain_no_fallback?
extra_urls << artifact_url
end
if Homebrew::EnvConfig.bottle_domain != HOMEBREW_BOTTLE_DEFAULT_DOMAIN
tag, filename = url.split("/").last(2)
extra_urls << "#{Homebrew::EnvConfig.bottle_domain}/glibc-bootstrap/#{tag}/#{filename}"
end
end
# PyPI packages: PEP 503 – Simple Repository API <https://peps.python.org/pep-0503>
if (pip_index_url = Homebrew::EnvConfig.pip_index_url.presence)
pip_index_base_url = pip_index_url.chomp("/").chomp("/simple")
%w[https://files.pythonhosted.org https://pypi.org].each do |base_url|
extra_urls << url.sub(base_url, pip_index_base_url) if url.start_with?("#{base_url}/packages")
end
end
[*extra_urls, *super].uniq
end
# A local resource that doesn't need to be downloaded.
class Local < Resource
def initialize(path)
super(File.basename(path))
@downloader = LocalBottleDownloadStrategy.new(path)
end
end
# A resource for a formula.
class Formula < Resource
sig { override.returns(String) }
def download_queue_type = "Formula"
sig { override.returns(String) }
def download_queue_name = "#{T.must(owner).name} (#{version})"
end
# A resource containing a Go package.
class Go < Resource
def stage(target, &block)
super(target/name, &block)
end
end
# A resource for a bottle manifest.
class BottleManifest < Resource
class Error < RuntimeError; end
attr_reader :bottle
def initialize(bottle)
super("#{bottle.name}_bottle_manifest")
@bottle = bottle
@manifest_annotations = nil
end
def verify_download_integrity(_filename)
# We don't have a checksum, but we can at least try parsing it.
tab
end
def tab
tab = manifest_annotations["sh.brew.tab"]
raise Error, "Couldn't find tab from manifest." if tab.blank?
begin
JSON.parse(tab)
rescue JSON::ParserError
raise Error, "Couldn't parse tab JSON."
end
end
sig { returns(T.nilable(Integer)) }
def bottle_size
manifest_annotations["sh.brew.bottle.size"]&.to_i
end
sig { returns(T.nilable(Integer)) }
def installed_size
manifest_annotations["sh.brew.bottle.installed_size"]&.to_i
end
sig { override.returns(String) }
def download_queue_type = "Bottle Manifest"
sig { override.returns(String) }
def download_queue_name = "#{bottle.name} (#{bottle.resource.version})"
private
def manifest_annotations
return @manifest_annotations unless @manifest_annotations.nil?
json = begin
JSON.parse(cached_download.read)
rescue JSON::ParserError
raise Error, "The downloaded GitHub Packages manifest was corrupted or modified (it is not valid JSON): " \
"\n#{cached_download}"
end
manifests = json["manifests"]
raise Error, "Missing 'manifests' section." if manifests.blank?
manifests_annotations = manifests.filter_map { |m| m["annotations"] }
raise Error, "Missing 'annotations' section." if manifests_annotations.blank?
bottle_digest = bottle.resource.checksum.hexdigest
image_ref = GitHubPackages.version_rebuild(bottle.resource.version, bottle.rebuild, bottle.tag.to_s)
manifest_annotations = manifests_annotations.find do |m|
next if m["sh.brew.bottle.digest"] != bottle_digest
m["org.opencontainers.image.ref.name"] == image_ref
end
raise Error, "Couldn't find manifest matching bottle checksum." if manifest_annotations.blank?
@manifest_annotations = manifest_annotations
end
end
# A resource containing a patch.
class Patch < Resource
attr_reader :patch_files
def initialize(&block)
@patch_files = []
@directory = nil
super "patch", &block
end
def apply(*paths)
paths.flatten!
@patch_files.concat(paths)
@patch_files.uniq!
end
def directory(val = nil)
return @directory if val.nil?
@directory = val
end
sig { override.returns(String) }
def download_queue_type = "Patch"
sig { override.returns(String) }
def download_queue_name
if (last_url_component = url.to_s.split("/").last)
return last_url_component
end
super
end
end
end
# The context in which a {Resource#stage} occurs. Supports access to both
# the {Resource} and associated {Mktemp} in a single block argument. The interface
# is back-compatible with {Resource} itself as used in that context.
class ResourceStageContext
extend Forwardable
# The {Resource} that is being staged.
attr_reader :resource
# The {Mktemp} in which {#resource} is staged.
attr_reader :staging
def_delegators :@resource, :version, :url, :mirrors, :specs, :using, :source_modified_time
def_delegators :@staging, :retain!
def initialize(resource, staging)
@resource = resource
@staging = staging
end
sig { returns(String) }
def to_s
"<#{self.class}: resource=#{resource} staging=#{staging}>"
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/readall.rb | Library/Homebrew/readall.rb | # typed: strict
# frozen_string_literal: true
require "formula"
require "cask/cask_loader"
require "system_command"
require "utils/output"
# Helper module for validating syntax in taps.
module Readall
extend Cachable
extend SystemCommand::Mixin
extend Utils::Output::Mixin
# TODO: remove this once the `MacOS` module is undefined on Linux
MACOS_MODULE_REGEX = /\b(MacOS|OS::Mac)(\.|::)\b/
private_constant :MACOS_MODULE_REGEX
private_class_method :cache
sig { params(ruby_files: T::Array[Pathname]).returns(T::Boolean) }
def self.valid_ruby_syntax?(ruby_files)
failed = T.let(false, T::Boolean)
ruby_files.each do |ruby_file|
# As a side effect, print syntax errors/warnings to `$stderr`.
failed = true if syntax_errors_or_warnings?(ruby_file)
end
!failed
end
sig { params(alias_dir: Pathname, formula_dir: Pathname).returns(T::Boolean) }
def self.valid_aliases?(alias_dir, formula_dir)
return true unless alias_dir.directory?
failed = T.let(false, T::Boolean)
alias_dir.each_child do |f|
if !f.symlink?
onoe "Non-symlink alias: #{f}"
failed = true
elsif !f.file?
onoe "Non-file alias: #{f}"
failed = true
end
if formula_dir.glob("**/#{f.basename}.rb").any?(&:exist?)
onoe "Formula duplicating alias: #{f}"
failed = true
end
end
!failed
end
sig { params(tap: Tap, bottle_tag: T.nilable(Utils::Bottles::Tag)).returns(T::Boolean) }
def self.valid_formulae?(tap, bottle_tag: nil)
cache[:valid_formulae] ||= {}
success = T.let(true, T::Boolean)
tap.formula_files.each do |file|
valid = cache[:valid_formulae][file]
next if valid == true || valid&.include?(bottle_tag)
formula_name = file.basename(".rb").to_s
formula_contents = file.read.force_encoding("UTF-8")
readall_namespace = "ReadallNamespace"
readall_formula_class = Formulary.load_formula(formula_name, file, formula_contents, readall_namespace,
flags: [], ignore_errors: false)
readall_formula = readall_formula_class.new(formula_name, file, :stable, tap:)
readall_formula.to_hash
# TODO: Remove check for MACOS_MODULE_REGEX once the `MacOS` module is undefined on Linux
cache[:valid_formulae][file] = if readall_formula.on_system_blocks_exist? ||
formula_contents.match?(MACOS_MODULE_REGEX)
[bottle_tag, *cache[:valid_formulae][file]]
else
true
end
rescue Interrupt
raise
# Handle all possible exceptions reading formulae.
rescue Exception => e # rubocop:disable Lint/RescueException
onoe "Invalid formula (#{bottle_tag}): #{file}"
$stderr.puts e
success = false
end
success
end
sig { params(_tap: Tap, os_name: T.nilable(Symbol), arch: T.nilable(Symbol)).returns(T::Boolean) }
def self.valid_casks?(_tap, os_name: nil, arch: nil)
true
end
sig {
params(
tap: Tap, aliases: T::Boolean, no_simulate: T::Boolean, os_arch_combinations: T::Array[[Symbol, Symbol]],
).returns(T::Boolean)
}
def self.valid_tap?(tap, aliases: false, no_simulate: false,
os_arch_combinations: OnSystem::ALL_OS_ARCH_COMBINATIONS)
success = true
if aliases
valid_aliases = valid_aliases?(tap.alias_dir, tap.formula_dir)
success = false unless valid_aliases
end
if no_simulate
success = false unless valid_formulae?(tap)
success = false unless valid_casks?(tap)
else
os_arch_combinations.each do |os, arch|
bottle_tag = Utils::Bottles::Tag.new(system: os, arch:)
next unless bottle_tag.valid_combination?
Homebrew::SimulateSystem.with(os:, arch:) do
success = false unless valid_formulae?(tap, bottle_tag:)
success = false unless valid_casks?(tap, os_name: os, arch:)
end
end
end
success
end
sig { params(filename: Pathname).returns(T::Boolean) }
private_class_method def self.syntax_errors_or_warnings?(filename)
# Retrieve messages about syntax errors/warnings printed to `$stderr`.
_, err, status = system_command(RUBY_PATH, args: ["-c", "-w", filename], print_stderr: false).to_a
# Ignore unnecessary warning about named capture conflicts.
# See https://bugs.ruby-lang.org/issues/12359.
messages = err.lines
.grep_v(/named capture conflicts a local variable/)
.join
$stderr.print messages
# Only syntax errors result in a non-zero status code. To detect syntax
# warnings we also need to inspect the output to `$stderr`.
!status.success? || !messages.chomp.empty?
end
end
require "extend/os/readall"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/unpack_strategy.rb | Library/Homebrew/unpack_strategy.rb | # typed: strict
# frozen_string_literal: true
require "mktemp"
require "system_command"
require "utils/output"
# Module containing all available strategies for unpacking archives.
module UnpackStrategy
extend T::Helpers
extend Utils::Output::Mixin
include SystemCommand::Mixin
include Utils::Output::Mixin
abstract!
requires_ancestor { Kernel }
UnpackStrategyType = T.type_alias { T.all(T::Class[UnpackStrategy], UnpackStrategy::ClassMethods) }
module ClassMethods
extend T::Helpers
abstract!
sig { abstract.returns(T::Array[String]) }
def extensions; end
sig { abstract.params(path: Pathname).returns(T::Boolean) }
def can_extract?(path); end
end
mixes_in_class_methods(ClassMethods)
sig { returns(T::Array[UnpackStrategyType]) }
def self.strategies
@strategies ||= T.let([
Tar, # Needs to be before Bzip2/Gzip/Xz/Lzma/Zstd.
Pax,
Gzip,
Dmg, # Needs to be before Bzip2/Xz/Lzma.
Lzma,
Xz,
Zstd,
Lzip,
Air, # Needs to be before `Zip`.
Jar, # Needs to be before `Zip`.
LuaRock, # Needs to be before `Zip`.
MicrosoftOfficeXml, # Needs to be before `Zip`.
Zip,
Pkg, # Needs to be before `Xar`.
Xar,
Ttf,
Otf,
Git,
Mercurial,
Subversion,
Cvs,
SelfExtractingExecutable, # Needs to be before `Cab`.
Cab,
Executable,
Bzip2,
Fossil,
Bazaar,
Compress,
P7Zip,
Sit,
Rar,
Lha,
].freeze, T.nilable(T::Array[UnpackStrategyType]))
end
private_class_method :strategies
sig { params(type: Symbol).returns(T.nilable(UnpackStrategyType)) }
def self.from_type(type)
type = {
naked: :uncompressed,
nounzip: :uncompressed,
seven_zip: :p7zip,
}.fetch(type, type)
begin
const_get(type.to_s.split("_").map(&:capitalize).join.gsub(/\d+[a-z]/, &:upcase))
rescue NameError
nil
end
end
sig { params(extension: String).returns(T.nilable(UnpackStrategyType)) }
def self.from_extension(extension)
strategies.sort_by { |s| s.extensions.map(&:length).max || 0 }
.reverse
.find { |s| extension.end_with?(*s.extensions) }
end
sig { params(path: Pathname).returns(T.nilable(UnpackStrategyType)) }
def self.from_magic(path)
strategies.find { |s| s.can_extract?(path) }
end
sig {
params(path: Pathname, prioritize_extension: T::Boolean, type: T.nilable(Symbol), ref_type: T.nilable(Symbol),
ref: T.nilable(String), merge_xattrs: T::Boolean).returns(UnpackStrategy)
}
def self.detect(path, prioritize_extension: false, type: nil, ref_type: nil, ref: nil, merge_xattrs: false)
strategy = from_type(type) if type
if prioritize_extension && path.extname.present?
strategy ||= from_extension(path.extname)
strategy ||= strategies.find { |s| (s < Directory || s == Fossil) && s.can_extract?(path) }
else
strategy ||= from_magic(path)
strategy ||= from_extension(path.extname)
end
strategy ||= Uncompressed
strategy.new(path, ref_type:, ref:, merge_xattrs:)
end
sig { returns(Pathname) }
attr_reader :path
sig { returns(T::Boolean) }
attr_reader :merge_xattrs
sig {
params(path: T.any(String, Pathname), ref_type: T.nilable(Symbol), ref: T.nilable(String),
merge_xattrs: T::Boolean).void
}
def initialize(path, ref_type: nil, ref: nil, merge_xattrs: false)
@path = T.let(Pathname(path).expand_path, Pathname)
@ref_type = T.let(ref_type, T.nilable(Symbol))
@ref = T.let(ref, T.nilable(String))
@merge_xattrs = merge_xattrs
end
sig { abstract.params(unpack_dir: Pathname, basename: Pathname, verbose: T::Boolean).void }
def extract_to_dir(unpack_dir, basename:, verbose:); end
private :extract_to_dir
sig {
params(
to: T.nilable(Pathname), basename: T.nilable(T.any(String, Pathname)), verbose: T::Boolean,
).void
}
def extract(to: nil, basename: nil, verbose: false)
basename ||= path.basename
unpack_dir = Pathname(to || Dir.pwd).expand_path
unpack_dir.mkpath
extract_to_dir(unpack_dir, basename: Pathname(basename), verbose:)
end
sig {
params(
to: T.nilable(Pathname),
basename: T.nilable(T.any(String, Pathname)),
verbose: T::Boolean,
prioritize_extension: T::Boolean,
).void
}
def extract_nestedly(to: nil, basename: nil, verbose: false, prioritize_extension: false)
Mktemp.new("homebrew-unpack").run(chdir: false) do |unpack_dir|
tmp_unpack_dir = T.must(unpack_dir.tmpdir)
extract(to: tmp_unpack_dir, basename:, verbose:)
children = tmp_unpack_dir.children
if children.size == 1 && !children.fetch(0).directory?
first_child = children.first
next if first_child.nil?
s = UnpackStrategy.detect(first_child, prioritize_extension:)
s.extract_nestedly(to:, verbose:, prioritize_extension:)
next
end
# Ensure all extracted directories are writable.
each_directory(tmp_unpack_dir) do |path|
next if path.writable?
FileUtils.chmod "u+w", path, verbose:
end
Directory.new(tmp_unpack_dir, move: true).extract(to:, verbose:)
end
end
sig { returns(T.any(T::Array[Cask::Cask], T::Array[Formula])) }
def dependencies
[]
end
# Helper method for iterating over directory trees.
sig {
params(
pathname: Pathname,
_block: T.proc.params(path: Pathname).void,
).void
}
def each_directory(pathname, &_block)
pathname.find do |path|
yield path if path.directory?
end
end
end
require "unpack_strategy/air"
require "unpack_strategy/bazaar"
require "unpack_strategy/bzip2"
require "unpack_strategy/cab"
require "unpack_strategy/compress"
require "unpack_strategy/cvs"
require "unpack_strategy/directory"
require "unpack_strategy/dmg"
require "unpack_strategy/executable"
require "unpack_strategy/fossil"
require "unpack_strategy/generic_unar"
require "unpack_strategy/git"
require "unpack_strategy/gzip"
require "unpack_strategy/jar"
require "unpack_strategy/lha"
require "unpack_strategy/lua_rock"
require "unpack_strategy/lzip"
require "unpack_strategy/lzma"
require "unpack_strategy/mercurial"
require "unpack_strategy/microsoft_office_xml"
require "unpack_strategy/otf"
require "unpack_strategy/p7zip"
require "unpack_strategy/pax"
require "unpack_strategy/pkg"
require "unpack_strategy/rar"
require "unpack_strategy/self_extracting_executable"
require "unpack_strategy/sit"
require "unpack_strategy/subversion"
require "unpack_strategy/tar"
require "unpack_strategy/ttf"
require "unpack_strategy/uncompressed"
require "unpack_strategy/xar"
require "unpack_strategy/xz"
require "unpack_strategy/zip"
require "unpack_strategy/zstd"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/formula_pin.rb | Library/Homebrew/formula_pin.rb | # typed: strict
# frozen_string_literal: true
require "keg"
# Helper functions for pinning a formula.
class FormulaPin
sig { params(formula: Formula).void }
def initialize(formula)
@formula = formula
end
sig { returns(Pathname) }
def path
HOMEBREW_PINNED_KEGS/@formula.name
end
sig { params(version: PkgVersion).void }
def pin_at(version)
HOMEBREW_PINNED_KEGS.mkpath
version_path = @formula.rack/version.to_s
path.make_relative_symlink(version_path) if !pinned? && version_path.exist?
end
sig { void }
def pin
latest_keg = @formula.installed_kegs.max_by(&:scheme_and_version)
return if latest_keg.nil?
pin_at(latest_keg.version)
end
sig { void }
def unpin
path.unlink if pinned?
HOMEBREW_PINNED_KEGS.rmdir_if_possible
end
sig { returns(T::Boolean) }
def pinned?
path.symlink?
end
sig { returns(T::Boolean) }
def pinnable?
!@formula.installed_prefixes.empty?
end
sig { returns(T.nilable(PkgVersion)) }
def pinned_version
Keg.new(path.resolved_path).version if pinned?
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/executables_db.rb | Library/Homebrew/executables_db.rb | # typed: strict
# frozen_string_literal: true
# License: MIT
# The license text can be found in Library/Homebrew/command-not-found/LICENSE
require "formula"
require "formulary"
require "tap"
require "utils/output"
module Homebrew
# ExecutablesDB represents a DB associating formulae to the binaries they
# provide.
class ExecutablesDB
include Utils::Output::Mixin
sig { returns(T::Hash[String, FormulaEntry]) }
attr_accessor :exes
sig { returns(Changes) }
attr_reader :changes
sig { returns(Pathname) }
attr_reader :root
DB_LINE_REGEX = /^(?<name>.*?)(?:\((?<version>.*)\)):(?<exes_line>.*)?$/
class FormulaEntry < T::Struct
const :version, String
const :binaries, T::Array[String]
end
class Changes
TYPES = [:add, :remove, :update, :version_bump].freeze
sig { returns(T::Set[String]) }
attr_accessor :add, :remove, :update, :version_bump
sig { void }
def initialize
@add = T.let(Set.new, T::Set[String])
@remove = T.let(Set.new, T::Set[String])
@update = T.let(Set.new, T::Set[String])
@version_bump = T.let(Set.new, T::Set[String])
end
sig { returns(T::Boolean) }
def changed?
add.any? || remove.any? || update.any? || version_bump.any?
end
end
# initialize a new DB with the given filename. The file will be used to
# populate the DB if it exists. It'll be created or overridden when saving the
# DB.
# @see #save!
sig { params(filename: String).void }
def initialize(filename)
@filename = filename
@root = T.let(Pathname.new(@filename).parent, Pathname)
@exes = T.let({}, T::Hash[String, FormulaEntry])
# keeps track of things that changed in the DB between its creation and
# each {#save!} call. This is used to generate commit messages
@changes = T.let(Changes.new, Changes)
return unless File.file? @filename
File.new(@filename).each do |line|
matches = line.match DB_LINE_REGEX
next unless matches
name = T.must(matches[:name])
version = T.must(matches[:version])
binaries = matches[:exes_line]&.split || []
@exes[name] ||= FormulaEntry.new(version:, binaries:)
end
end
sig { returns(T::Array[String]) }
def formula_names
@exes.keys
end
sig { returns(T::Boolean) }
def changed?
@changes.changed?
end
# update the DB with the installed formulae
# @see #save!
sig {
params(
update_existing: T::Boolean,
install_missing: T::Boolean,
max_downloads: T.nilable(Integer),
eval_all: T::Boolean,
).void
}
def update!(update_existing: false, install_missing: false, max_downloads: nil, eval_all: false)
downloads = 0
disabled_formulae = []
# Evaluate only the core tap by default.
taps = eval_all ? Tap.each.to_a : [CoreTap.instance]
taps.each do |tap|
tap.formula_files_by_name.each_key do |name|
f = Formulary.factory("#{tap}/#{name}")
break if max_downloads.present? && downloads > max_downloads.to_i
name = f.full_name
if f.disabled?
disabled_formulae << name
next
end
update_formula = missing_formula?(f) || (update_existing && outdated_formula?(f))
# Install unbottled formulae if they should be added/updated
if !f.bottled? && install_missing && update_formula
downloads += 1
ohai "Installing #{f}"
system HOMEBREW_BREW_FILE, "install", "--formula", f.to_s
end
# We don't need to worry about updating outdated versions unless update_existing is true
if f.latest_version_installed?
update_installed_formula f
elsif f.bottled? && update_formula
downloads += 1
update_bottled_formula f
end
# renamed formulae
f.oldnames.each do |oldname|
mv oldname, name if @exes[oldname]
end
# aliased formulae
f.aliases.each do |a|
mv a, name if @exes[a]
end
end
end
removed = (@exes.keys - Formula.full_names) | disabled_formulae
removed.each do |name|
next unless @exes.key?(name)
@exes.delete name
@changes.remove << name
end
nil
end
# save the DB in the underlying file
sig { void }
def save!
ordered_db = @exes.map do |formula, entry|
version_string = "(#{entry.version})"
"#{formula}#{version_string}:#{entry.binaries.join(" ")}\n"
end.sort
File.open(@filename, "w") do |f|
ordered_db.each do |line|
f.write(line)
end
end
end
private
sig { params(old: String, new: String).void }
def mv(old, new)
return unless (old_entry = @exes[old])
unless @exes[new]
@exes[new] = old_entry
@changes.add << new
end
@exes.delete old
@changes.remove << old
puts "Moving #{old} => #{new}"
end
sig { params(formula: Formula).returns(T::Boolean) }
def missing_formula?(formula)
!@exes.key? formula.full_name
end
sig { params(formula: Formula).returns(T::Boolean) }
def outdated_formula?(formula)
return true unless (entry = @exes[formula.full_name])
formula.pkg_version.to_s != entry.version
end
sig { params(formula: Formula, prefix: Pathname).void }
def update_formula_binaries_from_prefix(formula, prefix = T.unsafe(nil))
prefix ||= formula.prefix
binaries = Set.new
Dir["#{prefix}/{bin,sbin}/*"].each do |file|
binaries << File.basename(file).to_s if File.executable? file
end
update_formula_binaries(formula, binaries)
end
sig { params(formula: Formula, binaries: T::Set[String]).void }
def update_formula_binaries(formula, binaries)
name = formula.full_name
version = formula.pkg_version.to_s
binaries = binaries.to_a.sort
if missing_formula? formula
@changes.add << name
elsif (formula_entry = @exes[name]) && formula_entry.binaries != binaries
@changes.update << name
elsif outdated_formula? formula
@changes.version_bump << name
end
@exes[name] = FormulaEntry.new(version:, binaries:)
end
# update the binaries of {formula}, assuming it's installed
sig { params(formula: Formula).void }
def update_installed_formula(formula)
update_formula_binaries_from_prefix formula
end
# Add a formula's binaries from its bottle
sig { params(formula: Formula).void }
def update_bottled_formula(formula)
return unless (formula_bottle = formula.bottle)
formula_bottle.fetch
path = formula_bottle.resource.cached_download.to_s
content = Utils.popen_read("tar", "tzvf", path, "*/bin/*", "*/sbin/*")
binaries = Set.new
prefix = formula.prefix.relative_path_from(HOMEBREW_CELLAR).to_s
binpath_re = %r{^#{prefix}/s?bin/}
content.each_line do |line|
# skip directories and non-executable files
# 'l' = symlink, '-' = regular file
next unless /^[l-]r.x/.match?(line)
# ignore symlink targets
line = line.chomp.sub(/\s+->.+$/, "")
path = line.split(/\s+/).last
next unless binpath_re.match?(path)
binaries << Pathname.new(path).basename.to_s
end
update_formula_binaries formula, binaries
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/metafiles.rb | Library/Homebrew/metafiles.rb | # typed: strict
# frozen_string_literal: true
# Helper for checking if a file is considered a metadata file.
module Metafiles
LICENSES = T.let(Set.new(%w[copying copyright license licence]).freeze, T::Set[String])
# {https://github.com/github/markup#markups}
EXTENSIONS = T.let(Set.new(%w[
.adoc .asc .asciidoc .creole .html .markdown .md .mdown .mediawiki .mkdn
.org .pod .rdoc .rst .rtf .textile .txt .wiki
]).freeze, T::Set[String])
BASENAMES = T.let(Set.new(%w[
about authors changelog changes history news notes notice readme todo
]).freeze, T::Set[String])
module_function
sig { params(file: String).returns(T::Boolean) }
def list?(file)
return false if %w[.DS_Store INSTALL_RECEIPT.json].include?(file)
!copy?(file)
end
sig { params(file: String).returns(T::Boolean) }
def copy?(file)
file = file.downcase
license = file.split(/\.|-/).first
return false unless license
return true if LICENSES.include?(license)
ext = File.extname(file)
file = File.basename(file, ext) if EXTENSIONS.include?(ext)
BASENAMES.include?(file)
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/formula_name_cask_token_auditor.rb | Library/Homebrew/formula_name_cask_token_auditor.rb | # typed: strict
# frozen_string_literal: true
module Homebrew
class FormulaNameCaskTokenAuditor
sig { returns(String) }
attr_reader :token
sig { params(token: String).void }
def initialize(token)
@token = token
end
sig { returns(T::Array[String]) }
def errors
errors = []
errors << "uppercase letters" if token.match?(/[A-Z]/)
errors << "whitespace" if token.match?(/\s/)
errors << "non-ASCII characters" unless token.ascii_only?
errors << "double hyphens" if token.include?("--")
errors << "a leading @" if token.start_with?("@")
errors << "a trailing @" if token.end_with?("@")
errors << "a leading hyphen" if token.start_with?("-")
errors << "a trailing hyphen" if token.end_with?("-")
errors << "multiple @ symbols" if token.count("@") > 1
errors << "a hyphen followed by an @" if token.include? "-@"
errors << "an @ followed by a hyphen" if token.include? "@-"
errors
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/macos_version.rb | Library/Homebrew/macos_version.rb | # typed: strong
# frozen_string_literal: true
require "version"
# A macOS version.
class MacOSVersion < Version
# Raised when a macOS version is unsupported.
class Error < RuntimeError
sig { returns(T.nilable(T.any(String, Symbol))) }
attr_reader :version
sig { params(version: T.nilable(T.any(String, Symbol))).void }
def initialize(version)
@version = version
super "unknown or unsupported macOS version: #{version.inspect}"
end
end
# NOTE: When removing symbols here, ensure that they are added
# to `DEPRECATED_MACOS_VERSIONS` in `MacOSRequirement`.
# NOTE: Changes to this list must match `macos_version_name` in `cmd/update.sh`.
SYMBOLS = T.let({
tahoe: "26",
sequoia: "15",
sonoma: "14",
ventura: "13",
monterey: "12",
# odisabled: remove support for Big Sur and macOS x86_64 September (or later) 2027
big_sur: "11",
# odisabled: remove support for Catalina September (or later) 2026
catalina: "10.15",
}.freeze, T::Hash[Symbol, String])
sig { params(macos_version: MacOSVersion).returns(Version) }
def self.kernel_major_version(macos_version)
version_major = macos_version.major.to_i
if version_major >= 26
Version.new((version_major - 1).to_s)
elsif version_major > 10
Version.new((version_major + 9).to_s)
else
version_minor = macos_version.minor.to_i
Version.new((version_minor + 4).to_s)
end
end
sig { params(version: Symbol).returns(T.attached_class) }
def self.from_symbol(version)
str = SYMBOLS.fetch(version) { raise MacOSVersion::Error, version }
new(str)
end
sig { params(version: T.nilable(String)).void }
def initialize(version)
raise MacOSVersion::Error, version unless /\A\d{2,}(?:\.\d+){0,2}\z/.match?(version)
super(T.must(version))
@comparison_cache = T.let({}, T::Hash[T.untyped, T.nilable(Integer)])
@pretty_name = T.let(nil, T.nilable(String))
@sym = T.let(nil, T.nilable(Symbol))
end
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return @comparison_cache[other] if @comparison_cache.key?(other)
result = case other
when Symbol
if SYMBOLS.key?(other) && to_sym == other
0
else
v = SYMBOLS.fetch(other) { other.to_s }
super(v)
end
else
super
end
@comparison_cache[other] = result unless frozen?
result
end
sig { returns(T.self_type) }
def strip_patch
return self if null?
# Big Sur is 11.x but Catalina is 10.15.x.
if T.must(major) >= 11
self.class.new(major.to_s)
else
major_minor
end
end
sig { returns(Symbol) }
def to_sym
return @sym if @sym
sym = SYMBOLS.invert.fetch(strip_patch.to_s, :dunno)
@sym = sym unless frozen?
sym
end
sig { returns(String) }
def pretty_name
return @pretty_name if @pretty_name
pretty_name = to_sym.to_s.split("_").map(&:capitalize).join(" ").freeze
@pretty_name = pretty_name unless frozen?
pretty_name
end
sig { returns(String) }
def inspect
"#<#{self.class.name}: #{to_s.inspect}>"
end
sig { returns(T::Boolean) }
def outdated_release?
self < HOMEBREW_MACOS_OLDEST_SUPPORTED
end
sig { returns(T::Boolean) }
def prerelease?
self >= HOMEBREW_MACOS_NEWEST_UNSUPPORTED
end
sig { returns(T::Boolean) }
def unsupported_release?
outdated_release? || prerelease?
end
sig { returns(T::Boolean) }
def requires_nehalem_cpu?
return false if null?
require "hardware"
return Hardware.oldest_cpu(self) == :nehalem if Hardware::CPU.intel?
raise ArgumentError, "Unexpected architecture: #{Hardware::CPU.arch}. This only works with Intel architecture."
end
# https://en.wikipedia.org/wiki/Nehalem_(microarchitecture)
alias requires_sse4? requires_nehalem_cpu?
alias requires_sse41? requires_nehalem_cpu?
alias requires_sse42? requires_nehalem_cpu?
alias requires_popcnt? requires_nehalem_cpu?
# Represents the absence of a version.
#
# NOTE: Constructor needs to called with an arbitrary macOS-like version which is then set to `nil`.
NULL = T.let(MacOSVersion.new("10.0").tap do |v|
T.let(v, MacOSVersion).instance_variable_set(:@version, nil)
end.freeze, MacOSVersion)
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/sandbox.rb | Library/Homebrew/sandbox.rb | # typed: strict
# frozen_string_literal: true
require "erb"
require "io/console"
require "pty"
require "tempfile"
require "utils/fork"
require "utils/output"
# Helper class for running a sub-process inside of a sandboxed environment.
class Sandbox
include Utils::Output::Mixin
SANDBOX_EXEC = "/usr/bin/sandbox-exec"
# This is defined in the macOS SDK but Ruby unfortunately does not expose it.
# This value can be found by compiling a C program that prints TIOCSCTTY.
# The value is different on Linux but that's not a problem as we only support macOS in this file.
TIOCSCTTY = 0x20007461
private_constant :TIOCSCTTY
sig { returns(T::Boolean) }
def self.available?
false
end
sig { void }
def initialize
@profile = T.let(SandboxProfile.new, SandboxProfile)
@failed = T.let(false, T::Boolean)
end
sig { params(file: T.any(String, Pathname)).void }
def record_log(file)
@logfile = T.let(file, T.nilable(T.any(String, Pathname)))
end
sig { params(allow: T::Boolean, operation: String, filter: T.nilable(String), modifier: T.nilable(String)).void }
def add_rule(allow:, operation:, filter: nil, modifier: nil)
rule = SandboxRule.new(allow:, operation:, filter:, modifier:)
@profile.add_rule(rule)
end
sig { params(path: T.any(String, Pathname), type: Symbol).void }
def allow_write(path:, type: :literal)
add_rule allow: true, operation: "file-write*", filter: path_filter(path, type)
add_rule allow: true, operation: "file-write-setugid", filter: path_filter(path, type)
add_rule allow: true, operation: "file-write-mode", filter: path_filter(path, type)
end
sig { params(path: T.any(String, Pathname), type: Symbol).void }
def deny_write(path:, type: :literal)
add_rule allow: false, operation: "file-write*", filter: path_filter(path, type)
end
sig { params(path: T.any(String, Pathname)).void }
def allow_write_path(path)
allow_write path:, type: :subpath
end
sig { params(path: T.any(String, Pathname)).void }
def deny_write_path(path)
deny_write path:, type: :subpath
end
sig { void }
def allow_write_temp_and_cache
allow_write_path "/private/tmp"
allow_write_path "/private/var/tmp"
allow_write path: "^/private/var/folders/[^/]+/[^/]+/[C,T]/", type: :regex
allow_write_path HOMEBREW_TEMP
allow_write_path HOMEBREW_CACHE
end
sig { void }
def allow_cvs
allow_write_path "#{Dir.home(ENV.fetch("USER"))}/.cvspass"
end
sig { void }
def allow_fossil
allow_write_path "#{Dir.home(ENV.fetch("USER"))}/.fossil"
allow_write_path "#{Dir.home(ENV.fetch("USER"))}/.fossil-journal"
end
sig { params(formula: Formula).void }
def allow_write_cellar(formula)
allow_write_path formula.rack
allow_write_path formula.etc
allow_write_path formula.var
end
# Xcode projects expect access to certain cache/archive dirs.
sig { void }
def allow_write_xcode
allow_write_path "#{Dir.home(ENV.fetch("USER"))}/Library/Developer"
allow_write_path "#{Dir.home(ENV.fetch("USER"))}/Library/Caches/org.swift.swiftpm"
end
sig { params(formula: Formula).void }
def allow_write_log(formula)
allow_write_path formula.logs
end
sig { void }
def deny_write_homebrew_repository
deny_write path: HOMEBREW_ORIGINAL_BREW_FILE
if HOMEBREW_PREFIX.to_s == HOMEBREW_REPOSITORY.to_s
deny_write_path HOMEBREW_LIBRARY
deny_write_path HOMEBREW_REPOSITORY/".git"
else
deny_write_path HOMEBREW_REPOSITORY
end
end
sig { params(path: T.any(String, Pathname), type: Symbol).void }
def allow_network(path:, type: :literal)
add_rule allow: true, operation: "network*", filter: path_filter(path, type)
end
sig { params(path: T.any(String, Pathname), type: Symbol).void }
def deny_network(path:, type: :literal)
add_rule allow: false, operation: "network*", filter: path_filter(path, type)
end
sig { void }
def allow_all_network
add_rule allow: true, operation: "network*"
end
sig { void }
def deny_all_network
add_rule allow: false, operation: "network*"
end
sig { params(args: T.any(String, Pathname)).void }
def run(*args)
Dir.mktmpdir("homebrew-sandbox", HOMEBREW_TEMP) do |tmpdir|
allow_network path: File.join(tmpdir, "socket"), type: :literal # Make sure we have access to the error pipe.
seatbelt = File.new(File.join(tmpdir, "homebrew.sb"), "wx")
seatbelt.write(@profile.dump)
seatbelt.close
@start = T.let(Time.now, T.nilable(Time))
begin
command = [SANDBOX_EXEC, "-f", seatbelt.path, *args]
# Start sandbox in a pseudoterminal to prevent access of the parent terminal.
PTY.open do |controller, worker|
# Set the PTY's window size to match the parent terminal.
# Some formula tests are sensitive to the terminal size and fail if this is not set.
winch = proc do |_sig|
controller.winsize = if $stdout.tty?
# We can only use IO#winsize if the IO object is a TTY.
$stdout.winsize
else
# Otherwise, default to tput, if available.
# This relies on ncurses rather than the system's ioctl.
[Utils.popen_read("tput", "lines").to_i, Utils.popen_read("tput", "cols").to_i]
end
end
write_to_pty = proc do
# Don't hang if stdin is not able to be used - throw EIO instead.
old_ttin = trap(:TTIN, "IGNORE")
# Update the window size whenever the parent terminal's window size changes.
old_winch = trap(:WINCH, &winch)
winch.call(nil)
stdin_thread = Thread.new do
IO.copy_stream($stdin, controller)
rescue Errno::EIO
# stdin is unavailable - move on.
end
stdout_thread = Thread.new do
controller.each_char { |c| print(c) }
end
Utils.safe_fork(directory: tmpdir, yield_parent: true) do |error_pipe|
if error_pipe
# Child side
Process.setsid
controller.close
worker.ioctl(TIOCSCTTY, 0) # Make this the controlling terminal.
File.open("/dev/tty", Fcntl::O_WRONLY).close # Workaround for https://developer.apple.com/forums/thread/663632
worker.close_on_exec = true
exec(*command, in: worker, out: worker, err: worker) # And map everything to the PTY.
else
# Parent side
worker.close
end
end
rescue ChildProcessError => e
raise ErrorDuringExecution.new(command, status: e.status)
ensure
stdin_thread&.kill
stdout_thread&.kill
trap(:TTIN, old_ttin)
trap(:WINCH, old_winch)
end
if $stdin.tty?
# If stdin is a TTY, use io.raw to set stdin to a raw, passthrough
# mode while we copy the input/output of the process spawned in the
# PTY. After we've finished copying to/from the PTY process, io.raw
# will restore the stdin TTY to its original state.
begin
# Ignore SIGTTOU as setting raw mode will hang if the process is in the background.
old_ttou = trap(:TTOU, "IGNORE")
$stdin.raw(&write_to_pty)
ensure
trap(:TTOU, old_ttou)
end
else
write_to_pty.call
end
end
rescue
@failed = true
raise
ensure
sleep 0.1 # wait for a bit to let syslog catch up the latest events.
syslog_args = [
"-F", "$((Time)(local)) $(Sender)[$(PID)]: $(Message)",
"-k", "Time", "ge", @start.to_i.to_s,
"-k", "Message", "S", "deny",
"-k", "Sender", "kernel",
"-o",
"-k", "Time", "ge", @start.to_i.to_s,
"-k", "Message", "S", "deny",
"-k", "Sender", "sandboxd"
]
logs = Utils.popen_read("syslog", *syslog_args)
# These messages are confusing and non-fatal, so don't report them.
logs = logs.lines.grep_v(/^.*Python\(\d+\) deny file-write.*pyc$/).join
unless logs.empty?
if @logfile
File.open(@logfile, "w") do |log|
log.write logs
log.write "\nWe use time to filter sandbox log. Therefore, unrelated logs may be recorded.\n"
end
end
if @failed && Homebrew::EnvConfig.verbose?
ohai "Sandbox Log", logs
$stdout.flush # without it, brew test-bot would fail to catch the log
end
end
end
end
end
# @api private
sig { params(path: T.any(String, Pathname), type: Symbol).returns(String) }
def path_filter(path, type)
invalid_char = ['"', "'", "(", ")", "\n", "\\"].find do |c|
path.to_s.include?(c)
end
raise ArgumentError, "Invalid character '#{invalid_char}' in path: #{path}" if invalid_char
case type
when :regex then "regex #\"#{path}\""
when :subpath then "subpath \"#{expand_realpath(Pathname.new(path))}\""
when :literal then "literal \"#{expand_realpath(Pathname.new(path))}\""
else raise ArgumentError, "Invalid path filter type: #{type}"
end
end
private
sig { params(path: Pathname).returns(Pathname) }
def expand_realpath(path)
raise unless path.absolute?
path.exist? ? path.realpath : expand_realpath(path.parent)/path.basename
end
class SandboxRule
sig { returns(T::Boolean) }
attr_reader :allow
sig { returns(String) }
attr_reader :operation
sig { returns(T.nilable(String)) }
attr_reader :filter
sig { returns(T.nilable(String)) }
attr_reader :modifier
sig { params(allow: T::Boolean, operation: String, filter: T.nilable(String), modifier: T.nilable(String)).void }
def initialize(allow:, operation:, filter:, modifier:)
@allow = allow
@operation = operation
@filter = filter
@modifier = modifier
end
end
private_constant :SandboxRule
# Configuration profile for a sandbox.
class SandboxProfile
SEATBELT_ERB = <<~ERB
(version 1)
(debug deny) ; log all denied operations to /var/log/system.log
<%= rules.join("\n") %>
(allow file-write*
(literal "/dev/ptmx")
(literal "/dev/dtracehelper")
(literal "/dev/null")
(literal "/dev/random")
(literal "/dev/zero")
(regex #"^/dev/fd/[0-9]+$")
(regex #"^/dev/tty[a-z0-9]*$")
)
(deny file-write*) ; deny non-allowlist file write operations
(deny file-write-setugid) ; deny non-allowlist file write SUID/SGID operations
(deny file-write-mode) ; deny non-allowlist file write mode operations
(allow process-exec
(literal "/bin/ps")
(with no-sandbox)
) ; allow certain processes running without sandbox
(allow default) ; allow everything else
ERB
sig { returns(T::Array[String]) }
attr_reader :rules
sig { void }
def initialize
@rules = T.let([], T::Array[String])
end
sig { params(rule: SandboxRule).void }
def add_rule(rule)
s = +"("
s << (rule.allow ? "allow" : "deny")
s << " #{rule.operation}"
s << " (#{rule.filter})" if rule.filter
s << " (with #{rule.modifier})" if rule.modifier
s << ")"
@rules << s.freeze
end
sig { returns(String) }
def dump
ERB.new(SEATBELT_ERB).result(binding)
end
end
private_constant :SandboxProfile
end
require "extend/os/sandbox"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/download_queue.rb | Library/Homebrew/download_queue.rb | # typed: strict
# frozen_string_literal: true
require "downloadable"
require "concurrent/promises"
require "concurrent/executors"
require "retryable_download"
require "resource"
require "utils/output"
module Homebrew
class DownloadQueue
include Utils::Output::Mixin
sig { params(retries: Integer, force: T::Boolean, pour: T::Boolean).returns(T.nilable(DownloadQueue)) }
def self.new_if_concurrency_enabled(retries: 1, force: false, pour: false)
return if Homebrew::EnvConfig.download_concurrency <= 1
new(retries:, force:, pour:)
end
sig { params(retries: Integer, force: T::Boolean, pour: T::Boolean).void }
def initialize(retries: 1, force: false, pour: false)
@concurrency = T.let(EnvConfig.download_concurrency, Integer)
@quiet = T.let(@concurrency > 1, T::Boolean)
@tries = T.let(retries + 1, Integer)
@force = force
@pour = pour
@pool = T.let(Concurrent::FixedThreadPool.new(concurrency), Concurrent::FixedThreadPool)
@tty = T.let($stdout.tty?, T::Boolean)
@spinner = T.let(nil, T.nilable(Spinner))
end
sig {
params(
downloadable: Downloadable,
check_attestation: T::Boolean,
).void
}
def enqueue(downloadable, check_attestation: false)
downloads[downloadable] ||= Concurrent::Promises.future_on(
pool, RetryableDownload.new(downloadable, tries:, pour:),
force, quiet, check_attestation
) do |download, force, quiet, check_attestation|
download.clear_cache if force
download.fetch(quiet:)
if check_attestation && downloadable.is_a?(Bottle)
Utils::Attestation.check_attestation(downloadable, quiet: true)
end
end
end
sig { void }
def fetch
return if downloads.empty?
context_before_fetch = Context.current
if concurrency == 1
downloads.each do |downloadable, promise|
promise.wait!
rescue ChecksumMismatchError => e
ofail "#{downloadable.download_queue_type} reports different checksum: #{e.expected}"
rescue => e
raise e unless bottle_manifest_error?(downloadable, e)
end
else
message_length_max = downloads.keys.map { |download| download.download_queue_message.length }.max || 0
remaining_downloads = downloads.dup.to_a
previous_pending_line_count = 0
begin
stdout_print_and_flush_if_tty Tty.hide_cursor
output_message = lambda do |downloadable, future, last|
status = status_from_future(future)
exception = future.reason if future.rejected?
next 1 if bottle_manifest_error?(downloadable, exception)
message = downloadable.download_queue_message
if tty
message = message_with_progress(downloadable, future, message, message_length_max)
stdout_print_and_flush "#{status} #{message}#{"\n" unless last}"
elsif status
$stderr.puts "#{status} #{message}"
end
if future.rejected?
if exception.is_a?(ChecksumMismatchError)
actual = Digest::SHA256.file(downloadable.cached_download).hexdigest
actual_message, expected_message = align_checksum_mismatch_message(downloadable.download_queue_type)
ofail "#{actual_message} #{exception.expected}"
puts "#{expected_message} #{actual}"
next 2
elsif exception.is_a?(CannotInstallFormulaError)
cached_download = downloadable.cached_download
cached_download.unlink if cached_download&.exist?
raise exception
else
message = future.reason.to_s
ofail message
next message.count("\n")
end
end
1
end
until remaining_downloads.empty?
begin
finished_states = [:fulfilled, :rejected]
finished_downloads, remaining_downloads = remaining_downloads.partition do |_, future|
finished_states.include?(future.state)
end
finished_downloads.each do |downloadable, future|
previous_pending_line_count -= 1
stdout_print_and_flush_if_tty Tty.clear_to_end
output_message.call(downloadable, future, false)
end
previous_pending_line_count = 0
max_lines = [concurrency, Tty.height].min
remaining_downloads.each_with_index do |(downloadable, future), i|
break if previous_pending_line_count >= max_lines
stdout_print_and_flush_if_tty Tty.clear_to_end
last = i == max_lines - 1 || i == remaining_downloads.count - 1
previous_pending_line_count += output_message.call(downloadable, future, last)
end
if previous_pending_line_count.positive?
if (previous_pending_line_count - 1).zero?
stdout_print_and_flush_if_tty Tty.move_cursor_beginning
else
stdout_print_and_flush_if_tty Tty.move_cursor_up_beginning(previous_pending_line_count - 1)
end
end
sleep 0.05
# We want to catch all exceptions to ensure we can cancel any
# running downloads and flush the TTY.
rescue Exception # rubocop:disable Lint/RescueException
remaining_downloads.each do |_, future|
# FIXME: Implement cancellation of running downloads.
end
cancel
if previous_pending_line_count.positive?
stdout_print_and_flush_if_tty Tty.move_cursor_down(previous_pending_line_count - 1)
end
raise
end
end
ensure
stdout_print_and_flush_if_tty Tty.show_cursor
end
end
# Restore the pre-parallel fetch context to avoid e.g. quiet state bleeding out from threads.
Context.current = context_before_fetch
downloads.clear
end
sig { params(message: String).void }
def stdout_print_and_flush_if_tty(message)
stdout_print_and_flush(message) if $stdout.tty?
end
sig { params(message: String).void }
def stdout_print_and_flush(message)
$stdout.print(message)
$stdout.flush
end
sig { void }
def shutdown
pool.shutdown
pool.wait_for_termination
end
private
sig { params(downloadable: Downloadable, exception: T.nilable(Exception)).returns(T::Boolean) }
def bottle_manifest_error?(downloadable, exception)
return false if exception.nil?
downloadable.is_a?(Resource::BottleManifest) || exception.is_a?(Resource::BottleManifest::Error)
end
sig { void }
def cancel
# FIXME: Implement graceful cancellation of running downloads based on
# https://ruby-concurrency.github.io/concurrent-ruby/master/Concurrent/Cancellation.html
# instead of killing the whole thread pool.
pool.kill
end
sig { returns(Concurrent::FixedThreadPool) }
attr_reader :pool
sig { returns(Integer) }
attr_reader :concurrency
sig { returns(Integer) }
attr_reader :tries
sig { returns(T::Boolean) }
attr_reader :force
sig { returns(T::Boolean) }
attr_reader :quiet
sig { returns(T::Boolean) }
attr_reader :pour
sig { returns(T::Boolean) }
attr_reader :tty
sig { returns(T::Hash[Downloadable, Concurrent::Promises::Future]) }
def downloads
@downloads ||= T.let({}, T.nilable(T::Hash[Downloadable, Concurrent::Promises::Future]))
end
sig { params(future: Concurrent::Promises::Future).returns(T.nilable(String)) }
def status_from_future(future)
case future.state
when :fulfilled
if tty
"#{Tty.green}✔︎#{Tty.reset}"
else
"✔︎"
end
when :rejected
if tty
"#{Tty.red}✘#{Tty.reset}"
else
"✘"
end
when :pending, :processing
"#{Tty.blue}#{spinner}#{Tty.reset}" if tty
else
raise future.state.to_s
end
end
sig { params(downloadable_type: String).returns(T::Array[String]) }
def align_checksum_mismatch_message(downloadable_type)
actual_checksum_output = "#{downloadable_type} reports different checksum:"
expected_checksum_output = "SHA-256 checksum of downloaded file:"
# `.max` returns `T.nilable(Integer)`, use `|| 0` to pass the typecheck
rightpad = [actual_checksum_output, expected_checksum_output].map(&:size).max || 0
# 7 spaces are added to align with `ofail` message, which adds `Error: ` at the beginning
[actual_checksum_output.ljust(rightpad), (" " * 7) + expected_checksum_output.ljust(rightpad)]
end
sig { returns(Spinner) }
def spinner
@spinner ||= Spinner.new
end
sig { params(downloadable: Downloadable, future: Concurrent::Promises::Future, message: String, message_length_max: Integer).returns(String) }
def message_with_progress(downloadable, future, message, message_length_max)
tty_width = Tty.width
return message unless tty_width.positive?
available_width = tty_width - 2
fetched_size = downloadable.fetched_size
return message[0, available_width].to_s if fetched_size.blank?
precision = 1
size_length = 5
unit_length = 2
size_formatting_string = "%<size>#{size_length}.#{precision}f%<unit>#{unit_length}s"
size, unit = disk_usage_readable_size_unit(fetched_size, precision:)
formatted_fetched_size = format(size_formatting_string, size:, unit:)
formatted_total_size = if future.fulfilled?
formatted_fetched_size
elsif (total_size = downloadable.total_size)
size, unit = disk_usage_readable_size_unit(total_size, precision:)
format(size_formatting_string, size:, unit:)
else
# fill in the missing spaces for the size if we don't have it yet.
"-" * (size_length + unit_length)
end
max_phase_length = 11
phase = format("%-<phase>#{max_phase_length}s", phase: downloadable.phase.to_s.capitalize)
progress = " #{phase} #{formatted_fetched_size}/#{formatted_total_size}"
bar_length = [4, available_width - progress.length - message_length_max - 1].max
if downloadable.phase == :downloading
percent = if (total_size = downloadable.total_size)
(fetched_size.to_f / [1, total_size].max).clamp(0.0, 1.0)
else
0.0
end
bar_used = (percent * bar_length).round
bar_completed = "#" * bar_used
bar_pending = "-" * (bar_length - bar_used)
progress = " #{bar_completed}#{bar_pending}#{progress}"
end
message_length = available_width - progress.length
return message[0, available_width].to_s unless message_length.positive?
"#{message[0, message_length].to_s.ljust(message_length)}#{progress}"
end
class Spinner
FRAMES = [
"⠋",
"⠙",
"⠚",
"⠞",
"⠖",
"⠦",
"⠴",
"⠲",
"⠳",
"⠓",
].freeze
sig { void }
def initialize
@start = T.let(Time.now, Time)
@i = T.let(0, Integer)
end
sig { returns(String) }
def to_s
now = Time.now
if @start + 0.1 < now
@start = now
@i = (@i + 1) % FRAMES.count
end
FRAMES.fetch(@i)
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/formula_versions.rb | Library/Homebrew/formula_versions.rb | # typed: strict
# frozen_string_literal: true
require "formula"
require "utils/output"
# Helper class for traversing a formula's previous versions.
#
# @api internal
class FormulaVersions
include Context
include Utils::Output::Mixin
IGNORED_EXCEPTIONS = [
ArgumentError, NameError, SyntaxError, TypeError,
FormulaSpecificationError, FormulaValidationError,
ErrorDuringExecution, LoadError, MethodDeprecatedError
].freeze
sig { params(formula: Formula).void }
def initialize(formula)
@name = T.let(formula.name, String)
@path = T.let(formula.tap_path, Pathname)
@repository = T.let(T.must(formula.tap).path, Pathname)
@relative_path = T.let(@path.relative_path_from(repository).to_s, String)
# Also look at e.g. older homebrew-core paths before sharding.
if (match = @relative_path.match(%r{^(HomebrewFormula|Formula)/([a-z]|lib)/(.+)}))
@old_relative_path = T.let("#{match[1]}/#{match[3]}", T.nilable(String))
end
@formula_at_revision = T.let({}, T::Hash[String, Formula])
end
sig { params(branch: String, _block: T.proc.params(revision: String, path: String).void).void }
def rev_list(branch, &_block)
repository.cd do
rev_list_cmd = ["git", "rev-list", "--abbrev-commit", "--remove-empty"]
[relative_path, old_relative_path].compact.each do |entry|
Utils.popen_read(*rev_list_cmd, branch, "--", entry) do |io|
yield io.readline.chomp, entry until io.eof?
end
end
end
end
sig {
type_parameters(:U)
.params(
revision: String,
formula_relative_path: String,
_block: T.proc.params(arg0: Formula).returns(T.type_parameter(:U)),
).returns(T.nilable(T.type_parameter(:U)))
}
def formula_at_revision(revision, formula_relative_path = relative_path, &_block)
Homebrew.raise_deprecation_exceptions = true
yield @formula_at_revision[revision] ||= begin
contents = file_contents_at_revision(revision, formula_relative_path)
nostdout { Formulary.from_contents(name, path, contents, ignore_errors: true) }
end
rescue *IGNORED_EXCEPTIONS => e
require "utils/backtrace"
# We rescue these so that we can skip bad versions and
# continue walking the history
odebug "#{e} in #{name} at revision #{revision}", Utils::Backtrace.clean(e)
nil
rescue FormulaUnavailableError
nil
ensure
Homebrew.raise_deprecation_exceptions = false
end
private
sig { returns(String) }
attr_reader :name, :relative_path
sig { returns(T.nilable(String)) }
attr_reader :old_relative_path
sig { returns(Pathname) }
attr_reader :path, :repository
sig { params(revision: String, relative_path: String).returns(String) }
def file_contents_at_revision(revision, relative_path)
repository.cd { Utils.popen_read("git", "cat-file", "blob", "#{revision}:#{relative_path}") }
end
sig {
type_parameters(:U)
.params(block: T.proc.returns(T.type_parameter(:U)))
.returns(T.type_parameter(:U))
}
def nostdout(&block)
if verbose?
yield
else
redirect_stdout(File::NULL, &block)
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/test_runner_formula.rb | Library/Homebrew/test_runner_formula.rb | # typed: strict
# frozen_string_literal: true
require "formula"
class TestRunnerFormula
sig { returns(String) }
attr_reader :name
sig { returns(Formula) }
attr_reader :formula
sig { returns(T::Boolean) }
attr_reader :eval_all
sig { params(formula: Formula, eval_all: T::Boolean).void }
def initialize(formula, eval_all: Homebrew::EnvConfig.eval_all?)
Formulary.enable_factory_cache!
@formula = formula
@name = T.let(formula.name, String)
@dependent_hash = T.let({}, T::Hash[Symbol, T::Array[TestRunnerFormula]])
@eval_all = eval_all
freeze
end
sig { returns(T::Boolean) }
def macos_only?
formula.requirements.any? { |r| r.is_a?(MacOSRequirement) && !r.version_specified? }
end
sig { returns(T::Boolean) }
def macos_compatible?
!linux_only?
end
sig { returns(T::Boolean) }
def linux_only?
formula.requirements.any?(LinuxRequirement)
end
sig { returns(T::Boolean) }
def linux_compatible?
!macos_only?
end
sig { returns(T::Boolean) }
def x86_64_only?
formula.requirements.any? { |r| r.is_a?(ArchRequirement) && (r.arch == :x86_64) }
end
sig { returns(T::Boolean) }
def x86_64_compatible?
!arm64_only?
end
sig { returns(T::Boolean) }
def arm64_only?
formula.requirements.any? { |r| r.is_a?(ArchRequirement) && (r.arch == :arm64) }
end
sig { returns(T::Boolean) }
def arm64_compatible?
!x86_64_only?
end
sig { returns(T.nilable(MacOSRequirement)) }
def versioned_macos_requirement
formula.requirements.find { |r| r.is_a?(MacOSRequirement) && r.version_specified? }
end
sig { params(macos_version: MacOSVersion).returns(T::Boolean) }
def compatible_with?(macos_version)
# Assign to a variable to assist type-checking.
requirement = versioned_macos_requirement
return true if requirement.blank?
macos_version.public_send(requirement.comparator, requirement.version)
end
sig {
params(
platform: Symbol,
arch: Symbol,
macos_version: T.nilable(Symbol),
).returns(T::Array[TestRunnerFormula])
}
def dependents(platform:, arch:, macos_version:)
cache_key = :"#{platform}_#{arch}_#{macos_version}"
@dependent_hash[cache_key] ||= begin
formula_selector, eval_all_env = if eval_all
[:all, "1"]
else
[:installed, nil]
end
with_env(HOMEBREW_EVAL_ALL: eval_all_env) do
os = macos_version || platform
arch = Homebrew::SimulateSystem.arch_symbols.fetch(arch)
Homebrew::SimulateSystem.with(os:, arch:) do
Formula.public_send(formula_selector)
.select { |candidate_f| candidate_f.deps.map(&:name).include?(name) }
.map { |f| TestRunnerFormula.new(f, eval_all:) }
.freeze
end
end
end
@dependent_hash.fetch(cache_key)
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/bundle.rb | Library/Homebrew/bundle.rb | # typed: strict
# frozen_string_literal: true
require "English"
module Homebrew
module Bundle
class << self
sig { params(args_upgrade_formula: T.nilable(String)).void }
def upgrade_formulae=(args_upgrade_formula)
@upgrade_formulae = args_upgrade_formula.to_s.split(",")
end
sig { returns(T::Array[String]) }
def upgrade_formulae
@upgrade_formulae || []
end
sig { params(cmd: T.any(String, Pathname), args: T.anything, verbose: T::Boolean).returns(T::Boolean) }
def system(cmd, *args, verbose: false)
return super cmd, *args if verbose
logs = []
success = T.let(false, T::Boolean)
IO.popen([cmd, *args], err: [:child, :out]) do |pipe|
while (buf = pipe.gets)
logs << buf
end
Process.wait(pipe.pid)
success = $CHILD_STATUS.success?
pipe.close
end
puts logs.join unless success
success
end
sig { params(args: T.anything, verbose: T::Boolean).returns(T::Boolean) }
def brew(*args, verbose: false)
system(HOMEBREW_BREW_FILE, *args, verbose:)
end
sig { returns(T::Boolean) }
def mas_installed?
@mas_installed ||= which_formula?("mas")
end
sig { returns(T::Boolean) }
def vscode_installed?
@vscode_installed ||= which_vscode.present?
end
sig { returns(T.nilable(Pathname)) }
def which_vscode
@which_vscode ||= which("code", ORIGINAL_PATHS)
@which_vscode ||= which("codium", ORIGINAL_PATHS)
@which_vscode ||= which("cursor", ORIGINAL_PATHS)
@which_vscode ||= which("code-insiders", ORIGINAL_PATHS)
end
sig { returns(T.nilable(Pathname)) }
def which_go
@which_go ||= which("go", ORIGINAL_PATHS)
end
sig { returns(T::Boolean) }
def go_installed?
@go_installed ||= which_go.present?
end
sig { returns(T.nilable(Pathname)) }
def which_cargo
@which_cargo ||= which("cargo", ORIGINAL_PATHS)
end
sig { returns(T::Boolean) }
def cargo_installed?
@cargo_installed ||= which_cargo.present?
end
sig { returns(T.nilable(Pathname)) }
def which_flatpak
@which_flatpak ||= which("flatpak", ORIGINAL_PATHS)
end
sig { returns(T::Boolean) }
def flatpak_installed?
@flatpak_installed ||= which_flatpak.present?
end
sig { returns(T::Boolean) }
def cask_installed?
@cask_installed ||= File.directory?("#{HOMEBREW_PREFIX}/Caskroom") &&
(File.directory?("#{HOMEBREW_LIBRARY}/Taps/homebrew/homebrew-cask") ||
!Homebrew::EnvConfig.no_install_from_api?)
end
sig { params(name: String).returns(T::Boolean) }
def which_formula?(name)
formula = Formulary.factory(name)
ENV["PATH"] = "#{formula.opt_bin}:#{ENV.fetch("PATH", nil)}" if formula.any_version_installed?
which(name).present?
end
sig { params(block: T.proc.returns(T.anything)).returns(T.untyped) }
def exchange_uid_if_needed!(&block)
euid = Process.euid
uid = Process.uid
return yield if euid == uid
old_euid = euid
process_reexchangeable = Process::UID.re_exchangeable?
if process_reexchangeable
Process::UID.re_exchange
else
Process::Sys.seteuid(uid)
end
home = T.must(Etc.getpwuid(Process.uid)).dir
return_value = with_env("HOME" => home, &block)
if process_reexchangeable
Process::UID.re_exchange
else
Process::Sys.seteuid(old_euid)
end
return_value
end
sig { params(formula_name: String).returns(T.nilable(String)) }
def formula_versions_from_env(formula_name)
@formula_versions_from_env ||= begin
formula_versions = {}
ENV.each do |key, value|
match = key.match(/^HOMEBREW_BUNDLE_FORMULA_VERSION_(.+)$/)
next if match.blank?
env_formula_name = match[1]
next if env_formula_name.blank?
ENV.delete(key)
formula_versions[env_formula_name] = value
end
formula_versions
end
# Fix up formula name for a valid environment variable name.
formula_env_name = formula_name.upcase
.gsub("@", "AT")
.tr("+", "X")
.tr("-", "_")
@formula_versions_from_env[formula_env_name]
end
sig { void }
def prepend_pkgconf_path_if_needed!; end
sig { void }
def reset!
@mas_installed = T.let(nil, T.nilable(T::Boolean))
@vscode_installed = T.let(nil, T.nilable(T::Boolean))
@which_vscode = T.let(nil, T.nilable(Pathname))
@which_go = T.let(nil, T.nilable(Pathname))
@go_installed = T.let(nil, T.nilable(T::Boolean))
@which_cargo = T.let(nil, T.nilable(Pathname))
@cargo_installed = T.let(nil, T.nilable(T::Boolean))
@which_flatpak = T.let(nil, T.nilable(Pathname))
@flatpak_installed = T.let(nil, T.nilable(T::Boolean))
@cask_installed = T.let(nil, T.nilable(T::Boolean))
@formula_versions_from_env = T.let(nil, T.nilable(T::Hash[String, String]))
@upgrade_formulae = T.let(nil, T.nilable(T::Array[String]))
end
# Marks Brewfile formulae as installed_on_request to prevent autoremove
# from removing them when their dependents are uninstalled.
sig { params(entries: T::Array[Dsl::Entry]).void }
def mark_as_installed_on_request!(entries)
return if entries.empty?
require "tab"
installed_formulae = Formula.installed_formula_names
return if installed_formulae.empty?
entries.each do |entry|
next if entry.type != :brew
name = entry.name
next if installed_formulae.exclude?(name)
tab = Tab.for_name(name)
next if tab.tabfile.blank? || !tab.tabfile.exist?
next if tab.installed_on_request
tab.installed_on_request = true
tab.write
end
end
end
end
end
require "extend/os/bundle/bundle"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/cache_store.rb | Library/Homebrew/cache_store.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "json"
#
# {CacheStoreDatabase} acts as an interface to a persistent storage mechanism
# residing in the `HOMEBREW_CACHE`.
#
class CacheStoreDatabase
# Yields the cache store database.
# Closes the database after use if it has been loaded.
#
# @param [Symbol] type
# @yield [CacheStoreDatabase] self
def self.use(type)
@db_type_reference_hash ||= {}
@db_type_reference_hash[type] ||= {}
type_ref = @db_type_reference_hash[type]
type_ref[:count] ||= 0
type_ref[:count] += 1
type_ref[:db] ||= CacheStoreDatabase.new(type)
return_value = yield(type_ref[:db])
if type_ref[:count].positive?
type_ref[:count] -= 1
else
type_ref[:count] = 0
end
if type_ref[:count].zero?
type_ref[:db].write_if_dirty!
type_ref.delete(:db)
end
return_value
end
# Creates a CacheStoreDatabase.
#
# @param [Symbol] type
# @return [nil]
def initialize(type)
@type = type
@dirty = false
end
# Sets a value in the underlying database (and creates it if necessary).
def set(key, value)
dirty!
db[key] = value
end
# Gets a value from the underlying database (if it already exists).
def get(key)
return unless created?
db[key]
end
# Deletes a value from the underlying database (if it already exists).
def delete(key)
return unless created?
dirty!
db.delete(key)
end
# Deletes all content from the underlying database (if it already exists).
def clear!
return unless created?
dirty!
db.clear
end
# Closes the underlying database (if it is created and open).
def write_if_dirty!
return unless dirty?
cache_path.dirname.mkpath
cache_path.atomic_write(JSON.dump(@db))
end
# Returns `true` if the cache file has been created for the given `@type`.
#
# @return [Boolean]
def created?
cache_path.exist?
end
# Returns the modification time of the cache file (if it already exists).
#
# @return [Time]
def mtime
return unless created?
cache_path.mtime
end
# Performs a `select` on the underlying database.
#
# @return [Array]
def select(&block)
db.select(&block)
end
# Returns `true` if the cache is empty.
#
# @return [Boolean]
def empty?
db.empty?
end
# Performs a `each_key` on the underlying database.
#
# @return [Array]
def each_key(&block)
db.each_key(&block)
end
private
# Lazily loaded database in read/write mode. If this method is called, a
# database file will be created in the `HOMEBREW_CACHE` with a name
# corresponding to the `@type` instance variable.
#
# @return [Hash] db
def db
@db ||= begin
JSON.parse(cache_path.read) if created?
rescue JSON::ParserError
nil
end
@db ||= {}
end
# The path where the database resides in the `HOMEBREW_CACHE` for the given
# `@type`.
#
# @return [String]
def cache_path
HOMEBREW_CACHE/"#{@type}.json"
end
# Sets that the cache needs to be written to disk.
def dirty!
@dirty = true
end
# Returns `true` if the cache needs to be written to disk.
#
# @return [Boolean]
def dirty?
@dirty
end
end
#
# {CacheStore} provides methods to mutate and fetch data from a persistent
# storage mechanism.
#
class CacheStore
# @param [CacheStoreDatabase] database
# @return [nil]
def initialize(database)
@database = database
end
# Inserts new values or updates existing cached values to persistent storage.
#
# @abstract
def update!(*)
raise NotImplementedError
end
# Fetches cached values in persistent storage according to the type of data
# stored.
#
# @abstract
def fetch(*)
raise NotImplementedError
end
# Deletes data from the cache based on a condition defined in a concrete class.
#
# @abstract
def delete!(*)
raise NotImplementedError
end
protected
# @return [CacheStoreDatabase]
attr_reader :database
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/system_command.rb | Library/Homebrew/system_command.rb | # typed: strict
# frozen_string_literal: true
require "plist"
require "shellwords"
require "uri"
require "context"
require "readline_nonblock"
require "utils/timer"
require "utils/output"
# Class for running sub-processes and capturing their output and exit status.
#
# @api internal
class SystemCommand
# Helper functions for calling {SystemCommand.run}.
#
# @api internal
module Mixin
# Run a fallible system command.
#
# @api internal
sig {
params(
executable: T.any(String, Pathname),
args: T::Array[T.any(String, Integer, Float, Pathname, URI::Generic)],
sudo: T::Boolean,
sudo_as_root: T::Boolean,
env: T::Hash[String, T.nilable(T.any(String, T::Boolean))],
input: T.any(String, T::Array[String]),
must_succeed: T::Boolean,
print_stdout: T.any(T::Boolean, Symbol),
print_stderr: T.any(T::Boolean, Symbol),
debug: T.nilable(T::Boolean),
verbose: T.nilable(T::Boolean),
secrets: T.any(String, T::Array[String]),
chdir: T.any(String, Pathname),
reset_uid: T::Boolean,
timeout: T.nilable(T.any(Integer, Float)),
).returns(SystemCommand::Result)
}
def system_command(executable, args: [], sudo: false, sudo_as_root: false, env: {}, input: [],
must_succeed: false, print_stdout: false, print_stderr: true, debug: nil, verbose: nil,
secrets: [], chdir: T.unsafe(nil), reset_uid: false, timeout: nil)
SystemCommand.run(executable, args:, sudo:, sudo_as_root:, env:, input:, must_succeed:, print_stdout:,
print_stderr:, debug:, verbose:, secrets:, chdir:, reset_uid:, timeout:)
end
# Run an infallible system command.
#
# @api internal
sig {
params(
executable: T.any(String, Pathname),
args: T::Array[T.any(String, Integer, Float, Pathname, URI::Generic)],
sudo: T::Boolean,
sudo_as_root: T::Boolean,
env: T::Hash[String, T.nilable(T.any(String, T::Boolean))],
input: T.any(String, T::Array[String]),
print_stdout: T.any(T::Boolean, Symbol),
print_stderr: T.any(T::Boolean, Symbol),
debug: T.nilable(T::Boolean),
verbose: T.nilable(T::Boolean),
secrets: T.any(String, T::Array[String]),
chdir: T.any(String, Pathname),
reset_uid: T::Boolean,
timeout: T.nilable(T.any(Integer, Float)),
).returns(SystemCommand::Result)
}
def system_command!(executable, args: [], sudo: false, sudo_as_root: false, env: {}, input: [],
print_stdout: false, print_stderr: true, debug: nil, verbose: nil, secrets: [],
chdir: T.unsafe(nil), reset_uid: false, timeout: nil)
SystemCommand.run!(executable, args:, sudo:, sudo_as_root:, env:, input:, print_stdout:,
print_stderr:, debug:, verbose:, secrets:, chdir:, reset_uid:, timeout:)
end
end
include Context
sig {
params(
executable: T.any(String, Pathname),
args: T::Array[T.any(String, Integer, Float, Pathname, URI::Generic)],
sudo: T::Boolean,
sudo_as_root: T::Boolean,
env: T::Hash[String, T.nilable(T.any(String, T::Boolean))],
input: T.any(String, T::Array[String]),
must_succeed: T::Boolean,
print_stdout: T.any(T::Boolean, Symbol),
print_stderr: T.any(T::Boolean, Symbol),
debug: T.nilable(T::Boolean),
verbose: T.nilable(T::Boolean),
secrets: T.any(String, T::Array[String]),
chdir: T.nilable(T.any(String, Pathname)),
reset_uid: T::Boolean,
timeout: T.nilable(T.any(Integer, Float)),
).returns(SystemCommand::Result)
}
def self.run(executable, args: [], sudo: false, sudo_as_root: false, env: {}, input: [], must_succeed: false,
print_stdout: false, print_stderr: true, debug: nil, verbose: nil, secrets: [], chdir: nil,
reset_uid: false, timeout: nil)
new(executable, args:, sudo:, sudo_as_root:, env:, input:, must_succeed:, print_stdout:, print_stderr:, debug:,
verbose:, secrets:, chdir:, reset_uid:, timeout:).run!
end
sig {
params(
executable: T.any(String, Pathname),
args: T::Array[T.any(String, Integer, Float, Pathname, URI::Generic)],
sudo: T::Boolean,
sudo_as_root: T::Boolean,
env: T::Hash[String, T.nilable(T.any(String, T::Boolean))],
input: T.any(String, T::Array[String]),
must_succeed: T::Boolean,
print_stdout: T.any(T::Boolean, Symbol),
print_stderr: T.any(T::Boolean, Symbol),
debug: T.nilable(T::Boolean),
verbose: T.nilable(T::Boolean),
secrets: T.any(String, T::Array[String]),
chdir: T.nilable(T.any(String, Pathname)),
reset_uid: T::Boolean,
timeout: T.nilable(T.any(Integer, Float)),
).returns(SystemCommand::Result)
}
def self.run!(executable, args: [], sudo: false, sudo_as_root: false, env: {}, input: [], must_succeed: true,
print_stdout: false, print_stderr: true, debug: nil, verbose: nil, secrets: [], chdir: nil,
reset_uid: false, timeout: nil)
run(executable, args:, sudo:, sudo_as_root:, env:, input:, must_succeed:, print_stdout:, print_stderr:,
debug:, verbose:, secrets:, chdir:, reset_uid:, timeout:)
end
sig { returns(SystemCommand::Result) }
def run!
$stderr.puts redact_secrets(command.shelljoin.gsub('\=', "="), @secrets) if verbose? && debug?
@output = T.let([], T.nilable(T::Array[[Symbol, String]]))
@output = T.must(@output)
each_output_line do |type, line|
case type
when :stdout
case @print_stdout
when true
$stdout << redact_secrets(line, @secrets)
when :debug
$stderr << redact_secrets(line, @secrets) if debug?
end
@output << [:stdout, line]
when :stderr
case @print_stderr
when true
$stderr << redact_secrets(line, @secrets)
when :debug
$stderr << redact_secrets(line, @secrets) if debug?
end
@output << [:stderr, line]
end
end
result = Result.new(command, @output, T.must(@status), secrets: @secrets)
result.assert_success! if must_succeed?
result
end
sig {
params(
executable: T.any(String, Pathname),
args: T::Array[T.any(String, Integer, Float, Pathname, URI::Generic)],
sudo: T::Boolean,
sudo_as_root: T::Boolean,
env: T::Hash[String, T.nilable(T.any(String, T::Boolean))],
input: T.any(String, T::Array[String]),
must_succeed: T::Boolean,
print_stdout: T.any(T::Boolean, Symbol),
print_stderr: T.any(T::Boolean, Symbol),
debug: T.nilable(T::Boolean),
verbose: T.nilable(T::Boolean),
secrets: T.any(String, T::Array[String]),
chdir: T.nilable(T.any(String, Pathname)),
reset_uid: T::Boolean,
timeout: T.nilable(T.any(Integer, Float)),
).void
}
def initialize(executable, args: [], sudo: false, sudo_as_root: false, env: {}, input: [], must_succeed: false,
print_stdout: false, print_stderr: true, debug: nil, verbose: nil, secrets: [], chdir: nil,
reset_uid: false, timeout: nil)
require "extend/ENV"
@executable = executable
@args = args
raise ArgumentError, "`sudo_as_root` cannot be set if sudo is false" if !sudo && sudo_as_root
if print_stdout.is_a?(Symbol) && print_stdout != :debug
raise ArgumentError, "`print_stdout` is not a valid symbol"
end
if print_stderr.is_a?(Symbol) && print_stderr != :debug
raise ArgumentError, "`print_stderr` is not a valid symbol"
end
@sudo = sudo
@sudo_as_root = sudo_as_root
env.each_key do |name|
next if /^[\w&&\D]\w*$/.match?(name)
raise ArgumentError, "Invalid variable name: #{name}"
end
@env = env
@input = T.let(Array(input), T::Array[String])
@must_succeed = must_succeed
@print_stdout = print_stdout
@print_stderr = print_stderr
@debug = debug
@verbose = verbose
@secrets = T.let((Array(secrets) + ENV.sensitive_environment.values).uniq, T::Array[String])
@chdir = chdir
@reset_uid = reset_uid
@timeout = timeout
end
sig { returns(T::Array[String]) }
def command
[*command_prefix, executable.to_s, *expanded_args]
end
private
sig { returns(T.any(Pathname, String)) }
attr_reader :executable
sig { returns(T::Array[T.any(String, Integer, Float, Pathname, URI::Generic)]) }
attr_reader :args
sig { returns(T::Array[String]) }
attr_reader :input
sig { returns(T.nilable(T.any(String, Pathname))) }
attr_reader :chdir
sig { returns(T::Hash[String, T.nilable(T.any(String, T::Boolean))]) }
attr_reader :env
sig { returns(T::Boolean) }
def must_succeed? = @must_succeed
sig { returns(T::Boolean) }
def reset_uid? = @reset_uid
sig { returns(T::Boolean) }
def sudo? = @sudo
sig { returns(T::Boolean) }
def sudo_as_root? = @sudo_as_root
sig { returns(T::Boolean) }
def debug?
return super if @debug.nil?
@debug
end
sig { returns(T::Boolean) }
def verbose?
return super if @verbose.nil?
@verbose
end
sig { returns(T::Array[String]) }
def env_args
set_variables = env.compact.map do |name, value|
sanitized_name = Shellwords.escape(name)
sanitized_value = Shellwords.escape(value)
"#{sanitized_name}=#{sanitized_value}"
end
return [] if set_variables.empty?
set_variables
end
sig { returns(T.nilable(String)) }
def homebrew_sudo_user
ENV.fetch("HOMEBREW_SUDO_USER", nil)
end
sig { returns(T::Array[String]) }
def sudo_prefix
askpass_flags = ENV.key?("SUDO_ASKPASS") ? ["-A"] : []
user_flags = []
if Homebrew::EnvConfig.sudo_through_sudo_user?
if homebrew_sudo_user.blank?
raise ArgumentError, "`$HOMEBREW_SUDO_THROUGH_SUDO_USER` set but `$SUDO_USER` unset!"
end
user_flags += ["--prompt", "Password for %p:", "-u", homebrew_sudo_user,
*askpass_flags,
"-E", *env_args,
"--", "/usr/bin/sudo"]
end
user_flags += ["-u", "root"] if sudo_as_root?
["/usr/bin/sudo", *user_flags, *askpass_flags, "-E", *env_args, "--"]
end
sig { returns(T::Array[String]) }
def env_prefix
["/usr/bin/env", *env_args]
end
sig { returns(T::Array[String]) }
def command_prefix
sudo? ? sudo_prefix : env_prefix
end
sig { returns(T::Array[String]) }
def expanded_args
@expanded_args ||= T.let(args.map do |arg|
if arg.is_a?(Pathname)
File.absolute_path(arg)
else
arg.to_s
end
end, T.nilable(T::Array[String]))
end
class ProcessTerminatedInterrupt < StandardError; end
private_constant :ProcessTerminatedInterrupt
sig { params(block: T.proc.params(type: Symbol, line: String).void).void }
def each_output_line(&block)
executable, *args = command
options = {
# Create a new process group so that we can send `SIGINT` from
# parent to child rather than the child receiving `SIGINT` directly.
pgroup: sudo? ? nil : true,
}
options[:chdir] = chdir if chdir
raw_stdin, raw_stdout, raw_stderr, raw_wait_thr = exec3(env, executable, *args, **options)
write_input_to(raw_stdin)
raw_stdin.close_write
thread_context = Context.current
thread_ready_queue = Queue.new
thread_done_queue = Queue.new
line_thread = Thread.new do
# Ensure the new thread inherits the current context.
Thread.current[:context] = thread_context
Thread.handle_interrupt(ProcessTerminatedInterrupt => :never) do
thread_ready_queue << true
each_line_from [raw_stdout, raw_stderr], &block
end
thread_done_queue.pop
rescue ProcessTerminatedInterrupt
nil
end
end_time = Time.now + @timeout if @timeout
raise Timeout::Error if raw_wait_thr.join(Utils::Timer.remaining(end_time)).nil?
@status = T.let(raw_wait_thr.value, T.nilable(Process::Status))
rescue Interrupt
Process.kill("INT", raw_wait_thr.pid) if raw_wait_thr && !sudo?
raise Interrupt
ensure
if line_thread
thread_ready_queue.pop
line_thread.raise ProcessTerminatedInterrupt.new
thread_done_queue << true
line_thread.join
end
raw_stdin&.close
raw_stdout&.close
raw_stderr&.close
end
sig {
params(
env: T::Hash[String, T.nilable(String)],
executable: String,
args: String,
options: T.untyped,
).returns([IO, IO, IO, Thread])
}
def exec3(env, executable, *args, **options)
in_r, in_w = IO.pipe
options[:in] = in_r
in_w.sync = true
out_r, out_w = IO.pipe
options[:out] = out_w
err_r, err_w = IO.pipe
options[:err] = err_w
pid = fork do
Process::UID.change_privilege(Process.euid) if reset_uid? && Process.euid != Process.uid
exec(
env.merge({ "COLUMNS" => Tty.width.to_s }),
[executable, executable],
*args,
**options,
)
rescue SystemCallError => e
$stderr.puts(e.message)
exit!(127)
end
wait_thr = Process.detach(pid)
[in_w, out_r, err_r, wait_thr]
rescue
in_w&.close
out_r&.close
err_r&.close
raise
ensure
in_r&.close
out_w&.close
err_w&.close
end
sig { params(raw_stdin: IO).void }
def write_input_to(raw_stdin)
input.each { raw_stdin.write(it) }
end
sig { params(sources: T::Array[IO], _block: T.proc.params(type: Symbol, line: String).void).void }
def each_line_from(sources, &_block)
sources = {
sources[0] => :stdout,
sources[1] => :stderr,
}
pending_interrupt = T.let(false, T::Boolean)
until pending_interrupt || sources.empty?
readable_sources = T.let([], T::Array[IO])
begin
Thread.handle_interrupt(ProcessTerminatedInterrupt => :on_blocking) do
readable_sources = T.must(IO.select(sources.keys)).fetch(0)
end
rescue ProcessTerminatedInterrupt
readable_sources = sources.keys
pending_interrupt = true
end
readable_sources.each do |source|
loop do
line = ReadlineNonblock.read(source)
yield(sources.fetch(source), line)
end
rescue EOFError
source.close_read
sources.delete(source)
rescue IO::WaitReadable
# We've got all the data that was ready, but the other end of the stream isn't finished yet
end
end
sources.each_key(&:close_read)
end
# Result containing the output and exit status of a finished sub-process.
class Result
include Context
include Utils::Output::Mixin
sig { returns(T::Array[String]) }
attr_accessor :command
sig { returns(Process::Status) }
attr_accessor :status
sig { returns(T.nilable(Integer)) }
attr_accessor :exit_status
sig {
params(
command: T::Array[String],
output: T::Array[[T.any(String, Symbol), String]],
status: Process::Status,
secrets: T::Array[String],
).void
}
def initialize(command, output, status, secrets:)
@command = command
@output = output
@status = status
@exit_status = T.let(status.exitstatus, T.nilable(Integer))
@secrets = secrets
end
sig { void }
def assert_success!
return if @status.success?
raise ErrorDuringExecution.new(command, status: @status, output: @output, secrets: @secrets)
end
sig { returns(String) }
def stdout
@stdout ||= T.let(@output.select { |type,| type == :stdout }
.map { |_, line| line }
.join, T.nilable(String))
end
sig { returns(String) }
def stderr
@stderr ||= T.let(@output.select { |type,| type == :stderr }
.map { |_, line| line }
.join, T.nilable(String))
end
sig { returns(String) }
def merged_output
@merged_output ||= T.let(@output.map { |_, line| line }.join, T.nilable(String))
end
sig { returns(T::Boolean) }
def success?
return false if @exit_status.nil?
@exit_status.zero?
end
sig { returns([String, String, Process::Status]) }
def to_ary
[stdout, stderr, status]
end
alias to_a to_ary
sig { returns(T.untyped) }
def plist
@plist ||= T.let(begin
output = stdout
output = output.sub(/\A(.*?)(\s*<\?\s*xml)/m) do
warn_plist_garbage(T.must(Regexp.last_match(1)))
Regexp.last_match(2)
end
output = output.sub(%r{(<\s*/\s*plist\s*>\s*)(.*?)\Z}m) do
warn_plist_garbage(T.must(Regexp.last_match(2)))
Regexp.last_match(1)
end
Plist.parse_xml(output, marshal: false)
end, T.untyped)
end
sig { params(garbage: String).void }
def warn_plist_garbage(garbage)
return unless verbose?
return unless garbage.match?(/\S/)
opoo "Received non-XML output from #{Formatter.identifier(command.first)}:"
$stderr.puts garbage.strip
end
private :warn_plist_garbage
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/bump_version_parser.rb | Library/Homebrew/bump_version_parser.rb | # typed: strict
# frozen_string_literal: true
module Homebrew
# Class handling architecture-specific version information.
class BumpVersionParser
VERSION_SYMBOLS = [:general, :arm, :intel].freeze
sig { returns(T.nilable(T.any(Version, Cask::DSL::Version))) }
attr_reader :arm, :general, :intel
sig {
params(general: T.nilable(T.any(Version, String)),
arm: T.nilable(T.any(Version, String)),
intel: T.nilable(T.any(Version, String))).void
}
def initialize(general: nil, arm: nil, intel: nil)
@general = T.let(parse_version(general), T.nilable(T.any(Version, Cask::DSL::Version))) if general.present?
@arm = T.let(parse_version(arm), T.nilable(T.any(Version, Cask::DSL::Version))) if arm.present?
@intel = T.let(parse_version(intel), T.nilable(T.any(Version, Cask::DSL::Version))) if intel.present?
return if @general.present?
raise UsageError, "`--version` must not be empty." if arm.blank? && intel.blank?
raise UsageError, "`--version-arm` must not be empty." if arm.blank?
raise UsageError, "`--version-intel` must not be empty." if intel.blank?
end
sig {
params(version: T.any(Version, String))
.returns(T.nilable(T.any(Version, Cask::DSL::Version)))
}
def parse_version(version)
if version.is_a?(Version)
version
elsif version.is_a?(String)
parse_cask_version(version)
end
end
sig { params(version: String).returns(T.nilable(Cask::DSL::Version)) }
def parse_cask_version(version)
if version == "latest"
Cask::DSL::Version.new(:latest)
else
Cask::DSL::Version.new(version)
end
end
sig { returns(T::Boolean) }
def blank?
@general.blank? && @arm.blank? && @intel.blank?
end
sig { params(other: T.anything).returns(T::Boolean) }
def ==(other)
case other
when BumpVersionParser
(general == other.general) && (arm == other.arm) && (intel == other.intel)
else
false
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/unversioned_cask_checker.rb | Library/Homebrew/unversioned_cask_checker.rb | # typed: strict
# frozen_string_literal: true
require "bundle_version"
require "cask/cask"
require "cask/installer"
require "system_command"
require "utils/output"
module Homebrew
# Check unversioned casks for updates by extracting their
# contents and guessing the version from contained files.
class UnversionedCaskChecker
include SystemCommand::Mixin
include Utils::Output::Mixin
sig { returns(Cask::Cask) }
attr_reader :cask
sig { params(cask: Cask::Cask).void }
def initialize(cask)
@cask = cask
end
sig { returns(Cask::Installer) }
def installer
@installer ||= T.let(Cask::Installer.new(cask, verify_download_integrity: false), T.nilable(Cask::Installer))
end
sig { returns(T::Array[Cask::Artifact::App]) }
def apps
@apps ||= T.let(@cask.artifacts.grep(Cask::Artifact::App), T.nilable(T::Array[Cask::Artifact::App]))
end
sig { returns(T::Array[Cask::Artifact::KeyboardLayout]) }
def keyboard_layouts
@keyboard_layouts ||= T.let(@cask.artifacts.grep(Cask::Artifact::KeyboardLayout),
T.nilable(T::Array[Cask::Artifact::KeyboardLayout]))
end
sig { returns(T::Array[Cask::Artifact::Qlplugin]) }
def qlplugins
@qlplugins ||= T.let(@cask.artifacts.grep(Cask::Artifact::Qlplugin),
T.nilable(T::Array[Cask::Artifact::Qlplugin]))
end
sig { returns(T::Array[Cask::Artifact::Dictionary]) }
def dictionaries
@dictionaries ||= T.let(@cask.artifacts.grep(Cask::Artifact::Dictionary),
T.nilable(T::Array[Cask::Artifact::Dictionary]))
end
sig { returns(T::Array[Cask::Artifact::ScreenSaver]) }
def screen_savers
@screen_savers ||= T.let(@cask.artifacts.grep(Cask::Artifact::ScreenSaver),
T.nilable(T::Array[Cask::Artifact::ScreenSaver]))
end
sig { returns(T::Array[Cask::Artifact::Colorpicker]) }
def colorpickers
@colorpickers ||= T.let(@cask.artifacts.grep(Cask::Artifact::Colorpicker),
T.nilable(T::Array[Cask::Artifact::Colorpicker]))
end
sig { returns(T::Array[Cask::Artifact::Mdimporter]) }
def mdimporters
@mdimporters ||= T.let(@cask.artifacts.grep(Cask::Artifact::Mdimporter),
T.nilable(T::Array[Cask::Artifact::Mdimporter]))
end
sig { returns(T::Array[Cask::Artifact::Installer]) }
def installers
@installers ||= T.let(@cask.artifacts.grep(Cask::Artifact::Installer),
T.nilable(T::Array[Cask::Artifact::Installer]))
end
sig { returns(T::Array[Cask::Artifact::Pkg]) }
def pkgs
@pkgs ||= T.let(@cask.artifacts.grep(Cask::Artifact::Pkg), T.nilable(T::Array[Cask::Artifact::Pkg]))
end
sig { returns(T::Boolean) }
def single_app_cask?
apps.one?
end
sig { returns(T::Boolean) }
def single_qlplugin_cask?
qlplugins.one?
end
sig { returns(T::Boolean) }
def single_pkg_cask?
pkgs.one?
end
# Filter paths to `Info.plist` files so that ones belonging
# to e.g. nested `.app`s are ignored.
sig { params(paths: T::Array[Pathname]).returns(T::Array[Pathname]) }
def top_level_info_plists(paths)
# Go from `./Contents/Info.plist` to `./`.
top_level_paths = paths.map { |path| path.parent.parent }
paths.reject do |path|
top_level_paths.any? do |_other_top_level_path|
path.ascend.drop(3).any? { |parent_path| top_level_paths.include?(parent_path) }
end
end
end
sig { returns(T::Hash[String, BundleVersion]) }
def all_versions
versions = {}
parse_info_plist = proc do |info_plist_path|
plist = system_command!("plutil", args: ["-convert", "xml1", "-o", "-", info_plist_path]).plist
id = plist["CFBundleIdentifier"]
version = BundleVersion.from_info_plist_content(plist)
versions[id] = version if id && version
end
Dir.mktmpdir("cask-checker", HOMEBREW_TEMP) do |dir|
dir = Pathname(dir)
installer.extract_primary_container(to: dir)
installer.process_rename_operations(target_dir: dir)
info_plist_paths = [
*apps,
*keyboard_layouts,
*mdimporters,
*colorpickers,
*dictionaries,
*qlplugins,
*installers,
*screen_savers,
].flat_map do |artifact|
sources = if artifact.is_a?(Cask::Artifact::Installer)
# Installers are sometimes contained within an `.app`, so try both.
installer_path = artifact.path
installer_path.ascend
.select { |path| path == installer_path || path.extname == ".app" }
.sort
else
[artifact.source.basename]
end
sources.flat_map do |source|
top_level_info_plists(Pathname.glob(dir/"**"/source/"Contents"/"Info.plist")).sort
end
end
info_plist_paths.each(&parse_info_plist)
pkg_paths = pkgs.flat_map { |pkg| Pathname.glob(dir/"**"/pkg.path.basename).sort }
pkg_paths = Pathname.glob(dir/"**"/"*.pkg").sort if pkg_paths.empty?
pkg_paths.each do |pkg_path|
Dir.mktmpdir("cask-checker", HOMEBREW_TEMP) do |extract_dir|
extract_dir = Pathname(extract_dir)
FileUtils.rmdir extract_dir
system_command! "pkgutil", args: ["--expand-full", pkg_path, extract_dir]
top_level_info_plist_paths = top_level_info_plists(Pathname.glob(extract_dir/"**/Contents/Info.plist"))
top_level_info_plist_paths.each(&parse_info_plist)
ensure
extract_dir = Pathname(extract_dir)
Cask::Utils.gain_permissions_remove(extract_dir)
extract_dir.mkpath
end
end
nil
end
versions
end
sig { returns(T.nilable(String)) }
def guess_cask_version
if apps.empty? && pkgs.empty? && qlplugins.empty?
opoo "Cask #{cask} does not contain any apps, qlplugins or PKG installers."
return
end
Dir.mktmpdir("cask-checker", HOMEBREW_TEMP) do |dir|
dir = Pathname(dir)
installer.then do |i|
i.extract_primary_container(to: dir)
rescue ErrorDuringExecution => e
onoe e
return nil
end
info_plist_paths = apps.flat_map do |app|
top_level_info_plists(Pathname.glob(dir/"**"/app.source.basename/"Contents"/"Info.plist")).sort
end
info_plist_paths.each do |info_plist_path|
if (version = BundleVersion.from_info_plist(info_plist_path))
return version.nice_version
end
end
pkg_paths = pkgs.flat_map do |pkg|
Pathname.glob(dir/"**"/pkg.path.basename).sort
end
pkg_paths.each do |pkg_path|
packages =
system_command!("installer", args: ["-plist", "-pkginfo", "-pkg", pkg_path])
.plist
.map { |package| package.fetch("Package") }
Dir.mktmpdir("cask-checker", HOMEBREW_TEMP) do |extract_dir|
extract_dir = Pathname(extract_dir)
FileUtils.rmdir extract_dir
begin
system_command! "pkgutil", args: ["--expand-full", pkg_path, extract_dir]
rescue ErrorDuringExecution => e
onoe "Failed to extract #{pkg_path.basename}: #{e}"
next
end
top_level_info_plist_paths = top_level_info_plists(Pathname.glob(extract_dir/"**/Contents/Info.plist"))
unique_info_plist_versions =
top_level_info_plist_paths.filter_map { |i| BundleVersion.from_info_plist(i)&.nice_version }
.uniq
return unique_info_plist_versions.first if unique_info_plist_versions.one?
package_info_path = extract_dir/"PackageInfo"
if package_info_path.exist?
if (version = BundleVersion.from_package_info(package_info_path))
return version.nice_version
end
elsif packages.one?
onoe "#{pkg_path.basename} does not contain a `PackageInfo` file."
end
distribution_path = extract_dir/"Distribution"
if distribution_path.exist?
require "rexml/document"
xml = REXML::Document.new(distribution_path.read)
product = xml.get_elements("//installer-gui-script//product").first
product_version = product["version"] if product
return product_version if product_version.present?
end
opoo "#{pkg_path.basename} contains multiple packages: #{packages}" if packages.count != 1
$stderr.puts Pathname.glob(extract_dir/"**/*")
.map { |path|
regex = %r{\A(.*?\.(app|qlgenerator|saver|plugin|kext|bundle|osax))/.*\Z}
path.to_s.sub(regex, '\1')
}.uniq
ensure
extract_dir = Pathname(extract_dir)
Cask::Utils.gain_permissions_remove(extract_dir)
extract_dir.mkpath
end
end
nil
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/startup.rb | Library/Homebrew/startup.rb | # typed: strict
# frozen_string_literal: true
# This file should be the first `require` in all entrypoints of `brew`.
# Bootsnap should be loaded as early as possible.
require_relative "standalone/init"
require_relative "startup/bootsnap"
require_relative "startup/ruby_path"
require "startup/config"
require_relative "standalone/sorbet"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/version.rb | Library/Homebrew/version.rb | # typed: strict
# frozen_string_literal: true
require "version/parser"
# A formula's version.
class Version
include Comparable
sig { params(name: T.any(String, Symbol), full: T::Boolean).returns(Regexp) }
def self.formula_optionally_versioned_regex(name, full: true)
/#{"^" if full}#{Regexp.escape(name)}(@\d[\d.]*)?#{"$" if full}/
end
# A part of a {Version}.
class Token
extend T::Helpers
abstract!
include Comparable
sig { params(val: String).returns(Token) }
def self.create(val)
case val
when /\A#{AlphaToken::PATTERN}\z/o then AlphaToken
when /\A#{BetaToken::PATTERN}\z/o then BetaToken
when /\A#{RCToken::PATTERN}\z/o then RCToken
when /\A#{PreToken::PATTERN}\z/o then PreToken
when /\A#{PatchToken::PATTERN}\z/o then PatchToken
when /\A#{PostToken::PATTERN}\z/o then PostToken
when /\A#{NumericToken::PATTERN}\z/o then NumericToken
when /\A#{StringToken::PATTERN}\z/o then StringToken
else raise "Cannot find a matching token pattern"
end.new(val)
end
sig { params(val: T.untyped).returns(T.nilable(Token)) }
def self.from(val)
return NULL_TOKEN if val.nil? || (val.respond_to?(:null?) && val.null?)
case val
when Token then val
when String then Token.create(val)
when Integer then Token.create(val.to_s)
end
end
sig { returns(T.nilable(T.any(String, Integer))) }
attr_reader :value
sig { params(value: T.nilable(T.any(String, Integer))).void }
def initialize(value)
@value = T.let(value, T.untyped)
end
sig { abstract.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other); end
sig { returns(String) }
def inspect
"#<#{self.class.name} #{value.inspect}>"
end
sig { returns(Integer) }
def hash
value.hash
end
sig { returns(Float) }
def to_f
value.to_f
end
sig { returns(Integer) }
def to_i
value.to_i
end
sig { returns(String) }
def to_str
value.to_s
end
sig { returns(String) }
def to_s = to_str
sig { returns(T::Boolean) }
def numeric?
false
end
sig { returns(T::Boolean) }
def null?
false
end
sig { returns(T::Boolean) }
def blank? = null?
end
# A pseudo-token representing the absence of a token.
class NullToken < Token
sig { override.returns(NilClass) }
attr_reader :value
sig { void }
def initialize
super(nil)
end
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when NullToken
0
when NumericToken
other.value.zero? ? 0 : -1
when AlphaToken, BetaToken, PreToken, RCToken
1
else
-1
end
end
sig { override.returns(T::Boolean) }
def null?
true
end
sig { returns(T::Boolean) }
def blank? = true
sig { returns(String) }
def inspect
"#<#{self.class.name}>"
end
end
private_constant :NullToken
# Represents the absence of a token.
NULL_TOKEN = T.let(NullToken.new.freeze, NullToken)
# A token string.
class StringToken < Token
PATTERN = /[a-z]+/i
sig { override.returns(String) }
attr_reader :value
sig { params(value: String).void }
def initialize(value)
super(value.to_s)
end
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when StringToken
value <=> other.value
when NumericToken, NullToken
-T.must(other <=> self)
end
end
end
# A token consisting of only numbers.
class NumericToken < Token
PATTERN = /[0-9]+/i
sig { override.returns(Integer) }
attr_reader :value
sig { params(value: T.any(String, Integer)).void }
def initialize(value)
super(value.to_i)
end
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when NumericToken
value <=> other.value
when StringToken
1
when NullToken
-T.must(other <=> self)
end
end
sig { override.returns(T::Boolean) }
def numeric?
true
end
end
# A token consisting of an alphabetic and a numeric part.
class CompositeToken < StringToken
sig { returns(Integer) }
def rev
value[/[0-9]+/].to_i
end
end
# A token representing the part of a version designating it as an alpha release.
class AlphaToken < CompositeToken
PATTERN = /alpha[0-9]*|a[0-9]+/i
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when AlphaToken
rev <=> other.rev
when BetaToken, RCToken, PreToken, PatchToken, PostToken
-1
else
super
end
end
end
# A token representing the part of a version designating it as a beta release.
class BetaToken < CompositeToken
PATTERN = /beta[0-9]*|b[0-9]+/i
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when BetaToken
rev <=> other.rev
when AlphaToken
1
when PreToken, RCToken, PatchToken, PostToken
-1
else
super
end
end
end
# A token representing the part of a version designating it as a pre-release.
class PreToken < CompositeToken
PATTERN = /pre[0-9]*/i
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when PreToken
rev <=> other.rev
when AlphaToken, BetaToken
1
when RCToken, PatchToken, PostToken
-1
else
super
end
end
end
# A token representing the part of a version designating it as a release candidate.
class RCToken < CompositeToken
PATTERN = /rc[0-9]*/i
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when RCToken
rev <=> other.rev
when AlphaToken, BetaToken, PreToken
1
when PatchToken, PostToken
-1
else
super
end
end
end
# A token representing the part of a version designating it as a patch release.
class PatchToken < CompositeToken
PATTERN = /p[0-9]*/i
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when PatchToken
rev <=> other.rev
when AlphaToken, BetaToken, RCToken, PreToken
1
else
super
end
end
end
# A token representing the part of a version designating it as a post release.
class PostToken < CompositeToken
PATTERN = /.post[0-9]+/i
sig { override.params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
return unless (other = Token.from(other))
case other
when PostToken
rev <=> other.rev
when AlphaToken, BetaToken, RCToken, PreToken
1
else
super
end
end
end
SCAN_PATTERN = T.let(Regexp.union(
AlphaToken::PATTERN,
BetaToken::PATTERN,
PreToken::PATTERN,
RCToken::PATTERN,
PatchToken::PATTERN,
PostToken::PATTERN,
NumericToken::PATTERN,
StringToken::PATTERN,
).freeze, Regexp)
private_constant :SCAN_PATTERN
sig { params(url: T.any(String, Pathname), specs: T.untyped).returns(Version) }
def self.detect(url, **specs)
parse(specs.fetch(:tag, url), detected_from_url: true)
end
sig { params(spec: T.any(String, Pathname), detected_from_url: T::Boolean).returns(Version) }
def self.parse(spec, detected_from_url: false)
# This type of full-URL decoding is not technically correct but we only need a rough decode for version parsing.
spec = URI.decode_www_form_component(spec.to_s) if detected_from_url
spec = Pathname(spec)
VERSION_PARSERS.each do |parser|
version = parser.parse(spec)
return new(version, detected_from_url:) if version.present?
end
NULL
end
NUMERIC_WITH_OPTIONAL_DOTS = T.let(/(?:\d+(?:\.\d+)*)/.source.freeze, String)
private_constant :NUMERIC_WITH_OPTIONAL_DOTS
NUMERIC_WITH_DOTS = T.let(/(?:\d+(?:\.\d+)+)/.source.freeze, String)
private_constant :NUMERIC_WITH_DOTS
MINOR_OR_PATCH = T.let(/(?:\d+(?:\.\d+){1,2})/.source.freeze, String)
private_constant :MINOR_OR_PATCH
CONTENT_SUFFIX = T.let(/(?:[._-](?i:bin|dist|stable|src|sources?|final|full))/.source.freeze, String)
private_constant :CONTENT_SUFFIX
PRERELEASE_SUFFIX = T.let(/(?:[._-]?(?i:alpha|beta|pre|rc)\.?\d{,2})/.source.freeze, String)
private_constant :PRERELEASE_SUFFIX
VERSION_PARSERS = T.let([
# date-based versioning
# e.g. `2023-09-28.tar.gz`
# e.g. `ltopers-v2017-04-14.tar.gz`
StemParser.new(/(?:^|[._-]?)v?(\d{4}-\d{2}-\d{2})/),
# GitHub tarballs
# e.g. `https://github.com/foo/bar/tarball/v1.2.3`
# e.g. `https://github.com/sam-github/libnet/tarball/libnet-1.1.4`
# e.g. `https://github.com/isaacs/npm/tarball/v0.2.5-1`
# e.g. `https://github.com/petdance/ack/tarball/1.93_02`
UrlParser.new(%r{github\.com/.+/(?:zip|tar)ball/(?:v|\w+-)?((?:\d+[._-])+\d*)$}),
# e.g. `https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style)`
UrlParser.new(/[_-]([Rr]\d+[AaBb]\d*(?:-\d+)?)/),
# e.g. `boost_1_39_0`
StemParser.new(/((?:\d+_)+\d+)$/) { |s| s.tr("_", ".") },
# e.g. `foobar-4.5.1-1`
# e.g. `unrtf_0.20.4-1`
# e.g. `ruby-1.9.1-p243`
StemParser.new(/[_-](#{NUMERIC_WITH_DOTS}-(?:p|P|rc|RC)?\d+)#{CONTENT_SUFFIX}?$/),
# Hyphenated versions without software-name prefix (e.g. brew-)
# e.g. `v0.0.8-12.tar.gz`
# e.g. `3.3.04-1.tar.gz`
# e.g. `v2.1-20210510.tar.gz`
# e.g. `2020.11.11-3.tar.gz`
# e.g. `v3.6.6-0.2`
StemParser.new(/^v?(#{NUMERIC_WITH_DOTS}(?:-#{NUMERIC_WITH_OPTIONAL_DOTS})+)/),
# URL with no extension
# e.g. `https://waf.io/waf-1.8.12`
# e.g. `https://codeload.github.com/gsamokovarov/jump/tar.gz/v0.7.1`
UrlParser.new(/[-v](#{NUMERIC_WITH_OPTIONAL_DOTS})$/),
# e.g. `lame-398-1`
StemParser.new(/-(\d+-\d+)/),
# e.g. `foobar-4.5.1`
StemParser.new(/-(#{NUMERIC_WITH_OPTIONAL_DOTS})$/),
# e.g. `foobar-4.5.1.post1`
StemParser.new(/-(#{NUMERIC_WITH_OPTIONAL_DOTS}(.post\d+)?)$/),
# e.g. `foobar-4.5.1b`
StemParser.new(/-(#{NUMERIC_WITH_OPTIONAL_DOTS}(?:[abc]|rc|RC)\d*)$/),
# e.g. `foobar-4.5.0-alpha5, foobar-4.5.0-beta1, or foobar-4.50-beta`
StemParser.new(/-(#{NUMERIC_WITH_OPTIONAL_DOTS}-(?:alpha|beta|rc)\d*)$/),
# e.g. `https://ftpmirror.gnu.org/libidn/libidn-1.29-win64.zip`
# e.g. `https://ftpmirror.gnu.org/libmicrohttpd/libmicrohttpd-0.9.17-w32.zip`
StemParser.new(/-(#{MINOR_OR_PATCH})-w(?:in)?(?:32|64)$/),
# Opam packages
# e.g. `https://opam.ocaml.org/archives/sha.1.9+opam.tar.gz`
# e.g. `https://opam.ocaml.org/archives/lablgtk.2.18.3+opam.tar.gz`
# e.g. `https://opam.ocaml.org/archives/easy-format.1.0.2+opam.tar.gz`
StemParser.new(/\.(#{MINOR_OR_PATCH})\+opam$/),
# e.g. `https://ftpmirror.gnu.org/mtools/mtools-4.0.18-1.i686.rpm`
# e.g. `https://ftpmirror.gnu.org/autogen/autogen-5.5.7-5.i386.rpm`
# e.g. `https://ftpmirror.gnu.org/libtasn1/libtasn1-2.8-x86.zip`
# e.g. `https://ftpmirror.gnu.org/libtasn1/libtasn1-2.8-x64.zip`
# e.g. `https://ftpmirror.gnu.org/mtools/mtools_4.0.18_i386.deb`
StemParser.new(/[_-](#{MINOR_OR_PATCH}(?:-\d+)?)[._-](?:i[36]86|x86|x64(?:[_-](?:32|64))?)$/),
# e.g. `https://registry.npmjs.org/@angular/cli/-/cli-1.3.0-beta.1.tgz`
# e.g. `https://github.com/dlang/dmd/archive/v2.074.0-beta1.tar.gz`
# e.g. `https://github.com/dlang/dmd/archive/v2.074.0-rc1.tar.gz`
# e.g. `https://github.com/premake/premake-core/releases/download/v5.0.0-alpha10/premake-5.0.0-alpha10-src.zip`
StemParser.new(/[-.vV]?(#{NUMERIC_WITH_DOTS}#{PRERELEASE_SUFFIX})/),
# e.g. `foobar4.5.1`
StemParser.new(/(#{NUMERIC_WITH_OPTIONAL_DOTS})$/),
# e.g. `foobar-4.5.0-bin`
StemParser.new(/[-vV](#{NUMERIC_WITH_DOTS}[abc]?)#{CONTENT_SUFFIX}$/),
# dash version style
# e.g. `http://www.antlr.org/download/antlr-3.4-complete.jar`
# e.g. `https://cdn.nuxeo.com/nuxeo-9.2/nuxeo-server-9.2-tomcat.zip`
# e.g. `https://search.maven.org/remotecontent?filepath=com/facebook/presto/presto-cli/0.181/presto-cli-0.181-executable.jar`
# e.g. `https://search.maven.org/remotecontent?filepath=org/fusesource/fuse-extra/fusemq-apollo-mqtt/1.3/fusemq-apollo-mqtt-1.3-uber.jar`
# e.g. `https://search.maven.org/remotecontent?filepath=org/apache/orc/orc-tools/1.2.3/orc-tools-1.2.3-uber.jar`
StemParser.new(/-(#{NUMERIC_WITH_DOTS})-/),
# Debian style
# e.g. `dash_0.5.5.1.orig.tar.gz`
# e.g. `lcrack_20040914.orig.tar.gz`
# e.g. `mkcue_1.orig.tar.gz`
StemParser.new(/_(#{NUMERIC_WITH_OPTIONAL_DOTS}[abc]?)\.orig$/),
# e.g. `https://www.openssl.org/source/openssl-0.9.8s.tar.gz`
StemParser.new(/-v?(\d[^-]+)/),
# e.g. `astyle_1.23_macosx.tar.gz`
StemParser.new(/_v?(\d[^_]+)/),
# e.g. `http://mirrors.jenkins-ci.org/war/1.486/jenkins.war`
# e.g. `https://github.com/foo/bar/releases/download/0.10.11/bar.phar`
# e.g. `https://github.com/clojure/clojurescript/releases/download/r1.9.293/cljs.jar`
# e.g. `https://github.com/fibjs/fibjs/releases/download/v0.6.1/fullsrc.zip`
# e.g. `https://wwwlehre.dhbw-stuttgart.de/~sschulz/WORK/E_DOWNLOAD/V_1.9/E.tgz`
# e.g. `https://github.com/JustArchi/ArchiSteamFarm/releases/download/2.3.2.0/ASF.zip`
# e.g. `https://people.gnome.org/~newren/eg/download/1.7.5.2/eg`
UrlParser.new(%r{/(?:[rvV]_?)?(\d+\.\d+(?:\.\d+){,2})}),
# e.g. `https://www.ijg.org/files/jpegsrc.v8d.tar.gz`
StemParser.new(/\.v(\d+[a-z]?)/),
# e.g. `https://secure.php.net/get/php-7.1.10.tar.bz2/from/this/mirror`
UrlParser.new(/[-.vV]?(#{NUMERIC_WITH_DOTS}#{PRERELEASE_SUFFIX}?)/),
].freeze, T::Array[Version::Parser])
private_constant :VERSION_PARSERS
sig { params(val: T.any(String, Version), detected_from_url: T::Boolean).void }
def initialize(val, detected_from_url: false)
version = val.to_str
raise ArgumentError, "Version must not be empty" if version.blank?
@version = T.let(version, String)
@detected_from_url = detected_from_url
end
sig { returns(T::Boolean) }
def detected_from_url?
@detected_from_url
end
HEAD_VERSION_REGEX = /\AHEAD(?:-(?<commit>.*))?\Z/
private_constant :HEAD_VERSION_REGEX
# Check if this is a HEAD version.
#
# @api public
sig { returns(T::Boolean) }
def head?
version&.match?(HEAD_VERSION_REGEX) || false
end
# Return the commit for a HEAD version.
#
# @api public
sig { returns(T.nilable(String)) }
def commit
version&.match(HEAD_VERSION_REGEX)&.[](:commit)
end
# Update the commit for a HEAD version.
sig { params(commit: T.nilable(String)).void }
def update_commit(commit)
raise ArgumentError, "Cannot update commit for non-HEAD version." unless head?
@version = if commit
"HEAD-#{commit}"
else
"HEAD"
end
end
sig { returns(T::Boolean) }
def null?
version.nil?
end
sig { params(comparator: String, other: Version).returns(T::Boolean) }
def compare(comparator, other)
case comparator
when ">=" then self >= other
when ">" then self > other
when "<" then self < other
when "<=" then self <= other
when "==" then self == other
when "!=" then self != other
else raise ArgumentError, "Unknown comparator: #{comparator}"
end
end
sig { params(other: T.untyped).returns(T.nilable(Integer)) }
def <=>(other)
other = case other
when String
if other.blank?
# Cannot compare `NULL` to empty string.
return if null?
return 1
end
# Needed to retain API compatibility with older string comparisons for compiler versions, etc.
Version.new(other)
when Integer
# Used by the `*_build_version` comparisons, which formerly returned an integer.
Version.new(other.to_s)
when Token
if other.null?
# Cannot compare `NULL` to `NULL`.
return if null?
return 1
end
Version.new(other.to_s)
when Version
if other.null?
# Cannot compare `NULL` to `NULL`.
return if null?
return 1
end
other
when nil
return 1
else
return
end
# All `other.null?` cases are handled at this point.
return -1 if null?
return 0 if version == other.version
return 1 if head? && !other.head?
return -1 if !head? && other.head?
return 0 if head? && other.head?
ltokens = tokens
rtokens = other.tokens
max = max(ltokens.length, rtokens.length)
l = r = 0
while l < max
a = ltokens[l] || NULL_TOKEN
b = rtokens[r] || NULL_TOKEN
if a == b
l += 1
r += 1
next
elsif a.numeric? && !b.numeric?
return 1 if a > NULL_TOKEN
l += 1
elsif !a.numeric? && b.numeric?
return -1 if b > NULL_TOKEN
r += 1
else
return a <=> b
end
end
0
end
sig { override.params(other: T.anything).returns(T::Boolean) }
def ==(other)
# Makes sure that the same instance of Version::NULL
# will never equal itself; normally Comparable#==
# will return true for this regardless of the return
# value of #<=>
return false if null?
super
end
alias eql? ==
# The major version.
#
# @api public
sig { returns(T.nilable(Token)) }
def major
return NULL_TOKEN if null?
tokens.first
end
# The minor version.
#
# @api public
sig { returns(T.nilable(Token)) }
def minor
return NULL_TOKEN if null?
tokens.second
end
# The patch version.
#
# @api public
sig { returns(T.nilable(Token)) }
def patch
return NULL_TOKEN if null?
tokens.third
end
# The major and minor version.
#
# @api public
sig { returns(T.self_type) }
def major_minor
return self if null?
major_minor = T.must(tokens[0..1])
major_minor.empty? ? NULL : self.class.new(major_minor.join("."))
end
# The major, minor and patch version.
#
# @api public
sig { returns(T.self_type) }
def major_minor_patch
return self if null?
major_minor_patch = T.must(tokens[0..2])
major_minor_patch.empty? ? NULL : self.class.new(major_minor_patch.join("."))
end
sig { returns(Integer) }
def hash
version.hash
end
# Convert the version to a floating-point number.
#
# @api public
sig { returns(Float) }
def to_f
return Float::NAN if null?
version.to_f
end
# Convert the version to an integer.
#
# @api public
sig { returns(Integer) }
def to_i
version.to_i
end
# @api public
sig { returns(String) }
def to_str
raise NoMethodError, "undefined method `to_str` for #{self.class}:NULL" if null?
T.must(version).to_str
end
# @api public
sig { returns(String) }
def to_s = version.to_s
sig { params(options: T.untyped).returns(String) }
def to_json(*options) = version.to_json(*options)
sig { params(method: T.any(Symbol, String), include_all: T::Boolean).returns(T::Boolean) }
def respond_to?(method, include_all = false)
return !null? if ["to_str", :to_str].include?(method)
super
end
sig { returns(String) }
def inspect
return "#<Version::NULL>" if null?
"#<Version #{self}>"
end
sig { returns(T.self_type) }
def freeze
tokens # Determine and store tokens before freezing
super
end
protected
sig { returns(T.nilable(String)) }
attr_reader :version
sig { returns(T::Array[Token]) }
def tokens
@tokens ||= T.let(
version&.scan(SCAN_PATTERN)&.map { |token| Token.create(T.cast(token, String)) } || [],
T.nilable(T::Array[Token]),
)
end
# Represents the absence of a version.
#
# NOTE: Constructor needs to called with an arbitrary non-empty version which is then set to `nil`.
NULL = T.let(Version.new("NULL").tap { |v| v.instance_variable_set(:@version, nil) }.freeze, Version)
private
sig { params(first: Integer, second: Integer).returns(Integer) }
def max(first, second)
[first, second].max
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/mcp_server.rb | Library/Homebrew/mcp_server.rb | # typed: strict
# frozen_string_literal: true
# This is a standalone Ruby script as MCP servers need a faster startup time
# than a normal Homebrew Ruby command allows.
require_relative "standalone"
require "json"
require "stringio"
module Homebrew
# Provides a Model Context Protocol (MCP) server for Homebrew.
# See https://modelcontextprotocol.io/introduction for more information.
#
# https://modelcontextprotocol.io/docs/tools/inspector is useful for testing.
class McpServer
HOMEBREW_BREW_FILE = T.let(ENV.fetch("HOMEBREW_BREW_FILE").freeze, String)
HOMEBREW_VERSION = T.let(ENV.fetch("HOMEBREW_VERSION").freeze, String)
JSON_RPC_VERSION = T.let("2.0", String)
MCP_PROTOCOL_VERSION = T.let("2025-03-26", String)
ERROR_CODE = T.let(-32601, Integer)
SERVER_INFO = T.let({
name: "brew-mcp-server",
version: HOMEBREW_VERSION,
}.freeze, T::Hash[Symbol, String])
FORMULA_OR_CASK_PROPERTIES = T.let({
formula_or_cask: {
type: "string",
description: "Formula or cask name",
},
}.freeze, T::Hash[Symbol, T.anything])
# NOTE: Cursor (as of June 2025) will only query/use a maximum of 40 tools.
TOOLS = T.let({
search: {
name: "search",
description: "Perform a substring search of cask tokens and formula names for <text>. " \
"If <text> is flanked by slashes, it is interpreted as a regular expression.",
command: "brew search",
inputSchema: {
type: "object",
properties: {
text_or_regex: {
type: "string",
description: "Text or regex to search for",
},
},
},
required: ["text_or_regex"],
},
info: {
name: "info",
description: "Display brief statistics for your Homebrew installation. " \
"If a <formula> or <cask> is provided, show summary of information about it.",
command: "brew info",
inputSchema: { type: "object", properties: FORMULA_OR_CASK_PROPERTIES },
},
install: {
name: "install",
description: "Install a <formula> or <cask>.",
command: "brew install",
inputSchema: { type: "object", properties: FORMULA_OR_CASK_PROPERTIES },
required: ["formula_or_cask"],
},
update: {
name: "update",
description: "Fetch the newest version of Homebrew and all formulae from GitHub using `git` and " \
"perform any necessary migrations.",
command: "brew update",
inputSchema: { type: "object", properties: {} },
},
upgrade: {
name: "upgrade",
description: "Upgrade outdated casks and outdated, unpinned formulae using the same options they were " \
"originally installed with, plus any appended brew formula options. If <cask> or <formula> " \
"are specified, upgrade only the given <cask> or <formula> kegs (unless they are pinned).",
command: "brew upgrade",
inputSchema: { type: "object", properties: FORMULA_OR_CASK_PROPERTIES },
},
uninstall: {
name: "uninstall",
description: "Uninstall a <formula> or <cask>.",
command: "brew uninstall",
inputSchema: { type: "object", properties: FORMULA_OR_CASK_PROPERTIES },
required: ["formula_or_cask"],
},
list: {
name: "list",
description: "List all installed formulae and casks. " \
"If <formula> is provided, summarise the paths within its current keg. " \
"If <cask> is provided, list its artifacts.",
command: "brew list",
inputSchema: { type: "object", properties: FORMULA_OR_CASK_PROPERTIES },
},
config: {
name: "config",
description: "Show Homebrew and system configuration info useful for debugging. " \
"If you file a bug report, you will be required to provide this information.",
command: "brew config",
inputSchema: { type: "object", properties: {} },
},
doctor: {
name: "doctor",
description: "Check your system for potential problems. Will exit with a non-zero status " \
"if any potential problems are found. " \
"Please note that these warnings are just used to help the Homebrew maintainers " \
"with debugging if you file an issue. If everything you use Homebrew for " \
"is working fine: please don't worry or file an issue; just ignore this.",
command: "brew doctor",
inputSchema: { type: "object", properties: {} },
},
typecheck: {
name: "typecheck",
description: "Check for typechecking errors using Sorbet.",
command: "brew typecheck",
inputSchema: { type: "object", properties: {} },
},
style: {
name: "style",
description: "Check formulae or files for conformance to Homebrew style guidelines.",
command: "brew style",
inputSchema: {
type: "object",
properties: {
fix: {
type: "boolean",
description: "Fix style violations automatically using RuboCop's auto-correct feature",
},
files: {
type: "string",
description: "Specific files to check (space-separated)",
},
changed: {
type: "boolean",
description: "Only check files that were changed from the `main` branch",
},
},
},
},
tests: {
name: "tests",
description: "Run Homebrew's unit and integration tests.",
command: "brew tests",
inputSchema: {
type: "object",
properties: {
only: {
type: "string",
description: "Specific tests to run (comma-seperated) e.g. for `<file>_spec.rb` pass `<file>`. " \
"Appending `:<line_number>` will start at a specific line",
},
fail_fast: {
type: "boolean",
description: "Exit early on the first failing test",
},
changed: {
type: "boolean",
description: "Only runs tests on files that were changed from the `main` branch",
},
online: {
type: "boolean",
description: "Run online tests",
},
},
},
},
commands: {
name: "commands",
description: "Show lists of built-in and external commands.",
command: "brew commands",
inputSchema: { type: "object", properties: {} },
},
help: {
name: "help",
description: "Outputs the usage instructions for `brew` <command>.",
command: "brew help",
inputSchema: {
type: "object",
properties: {
command: {
type: "string",
description: "Command to get help for",
},
},
},
},
}.freeze, T::Hash[Symbol, T::Hash[Symbol, T.anything]])
sig { params(stdin: T.any(IO, StringIO), stdout: T.any(IO, StringIO), stderr: T.any(IO, StringIO)).void }
def initialize(stdin: $stdin, stdout: $stdout, stderr: $stderr)
@debug_logging = T.let(ARGV.include?("--debug") || ARGV.include?("-d"), T::Boolean)
@ping_switch = T.let(ARGV.include?("--ping"), T::Boolean)
@stdin = T.let(stdin, T.any(IO, StringIO))
@stdout = T.let(stdout, T.any(IO, StringIO))
@stderr = T.let(stderr, T.any(IO, StringIO))
end
sig { returns(T::Boolean) }
def debug_logging? = @debug_logging
sig { returns(T::Boolean) }
def ping_switch? = @ping_switch
sig { void }
def run
@stderr.puts "==> Started Homebrew MCP server..."
loop do
input = if ping_switch?
{ jsonrpc: JSON_RPC_VERSION, id: 1, method: "ping" }.to_json
else
break if @stdin.eof?
@stdin.gets
end
next if input.nil? || input.strip.empty?
request = JSON.parse(input)
debug("Request: #{JSON.pretty_generate(request)}")
response = handle_request(request)
if response.nil?
debug("Response: nil")
next
end
debug("Response: #{JSON.pretty_generate(response)}")
output = JSON.dump(response).strip
@stdout.puts(output)
@stdout.flush
break if ping_switch?
end
rescue Interrupt
exit 0
rescue => e
log("Error: #{e.message}")
exit 1
end
sig { params(text: String).void }
def debug(text)
return unless debug_logging?
log(text)
end
sig { params(text: String).void }
def log(text)
@stderr.puts(text)
@stderr.flush
end
sig { params(request: T::Hash[String, T.untyped]).returns(T.nilable(T::Hash[Symbol, T.anything])) }
def handle_request(request)
id = request["id"]
return if id.nil?
case request["method"]
when "initialize"
respond_result(id, {
protocolVersion: MCP_PROTOCOL_VERSION,
capabilities: {
tools: { listChanged: false },
prompts: {},
resources: {},
logging: {},
roots: {},
},
serverInfo: SERVER_INFO,
})
when "resources/list"
respond_result(id, { resources: [] })
when "resources/templates/list"
respond_result(id, { resourceTemplates: [] })
when "prompts/list"
respond_result(id, { prompts: [] })
when "ping"
respond_result(id)
when "get_server_info"
respond_result(id, SERVER_INFO)
when "logging/setLevel"
@debug_logging = request["params"]["level"] == "debug"
respond_result(id)
when "notifications/initialized", "notifications/cancelled"
respond_result
when "tools/list"
respond_result(id, { tools: TOOLS.values })
when "tools/call"
respond_to_tools_call(id, request)
else
respond_error(id, "Method not found")
end
end
sig { params(id: Integer, request: T::Hash[String, T.untyped]).returns(T.nilable(T::Hash[Symbol, T.anything])) }
def respond_to_tools_call(id, request)
tool_name = request["params"]["name"].to_sym
tool = TOOLS.fetch tool_name do
return respond_error(id, "Unknown tool")
end
require "open3"
command_args = tool_command_arguments(tool_name, request["params"]["arguments"])
progress_token = request["params"]["_meta"]&.fetch("progressToken", nil)
brew_command = T.cast(tool.fetch(:command), String)
.delete_prefix("brew ")
buffer_size = 4096 # 4KB
progress = T.let(0, Integer)
done = T.let(false, T::Boolean)
new_output = T.let(false, T::Boolean)
output = +""
text = Open3.popen2e(HOMEBREW_BREW_FILE, brew_command, *command_args) do |stdin, io, _wait|
stdin.close
reader = Thread.new do
loop do
output << io.readpartial(buffer_size)
progress += 1
new_output = true
end
rescue EOFError
nil
ensure
done = true
end
until done
break unless progress_token
sleep 1
next unless new_output
response = {
jsonrpc: JSON_RPC_VERSION,
method: "notifications/progress",
params: { progressToken: progress_token, progress: },
}
progress_output = JSON.dump(response).strip
@stdout.puts(progress_output)
@stdout.flush
new_output = false
end
reader.join
output
end
respond_result(id, { content: [{ type: "text", text: }] })
end
sig { params(tool_name: Symbol, arguments: T::Hash[String, T.untyped]).returns(T::Array[String]) }
def tool_command_arguments(tool_name, arguments)
require "shellwords"
case tool_name
when :style
style_args = []
style_args << "--fix" if arguments["fix"]
style_args << "--changed" if arguments["changed"]
file_arguments = arguments.fetch("files", "").strip.split
style_args.concat(file_arguments) unless file_arguments.empty?
style_args
when :tests
tests_args = []
only_arguments = arguments.fetch("only", "").strip
tests_args << "--only=#{only_arguments}" unless only_arguments.empty?
tests_args << "--fail-fast" if arguments["fail_fast"]
tests_args << "--changed" if arguments["changed"]
tests_args << "--online" if arguments["online"]
tests_args
when :search
[arguments["text_or_regex"]]
when :help
[arguments["command"]]
else
[arguments["formula_or_cask"]]
end.compact
.reject(&:empty?)
.map { |arg| Shellwords.escape(arg) }
end
sig {
params(id: T.nilable(Integer),
result: T::Hash[Symbol, T.anything]).returns(T.nilable(T::Hash[Symbol, T.anything]))
}
def respond_result(id = nil, result = {})
return if id.nil?
{ jsonrpc: JSON_RPC_VERSION, id:, result: }
end
sig { params(id: T.nilable(Integer), message: String).returns(T::Hash[Symbol, T.anything]) }
def respond_error(id, message)
{ jsonrpc: JSON_RPC_VERSION, id:, error: { code: ERROR_CODE, message: } }
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/deprecate_disable.rb | Library/Homebrew/deprecate_disable.rb | # typed: strict
# frozen_string_literal: true
# Helper module for handling `disable!` and `deprecate!`.
# @api internal
module DeprecateDisable
module_function
FORMULA_DEPRECATE_DISABLE_REASONS = T.let({
does_not_build: "does not build",
no_license: "has no license",
repo_archived: "has an archived upstream repository",
repo_removed: "has a removed upstream repository",
unmaintained: "is not maintained upstream",
unsupported: "is not supported upstream",
deprecated_upstream: "is deprecated upstream",
versioned_formula: "is a versioned formula",
checksum_mismatch: "was built with an initially released source file that had " \
"a different checksum than the current one. " \
"Upstream's repository might have been compromised. " \
"We can re-package this once upstream has confirmed that they retagged their release",
}.freeze, T::Hash[Symbol, String])
CASK_DEPRECATE_DISABLE_REASONS = T.let({
discontinued: "is discontinued upstream",
moved_to_mas: "is now exclusively distributed on the Mac App Store",
no_longer_available: "is no longer available upstream",
no_longer_meets_criteria: "no longer meets the criteria for acceptable casks",
unmaintained: "is not maintained upstream",
fails_gatekeeper_check: "does not pass the macOS Gatekeeper check",
unreachable: "is no longer reliably reachable upstream",
}.freeze, T::Hash[Symbol, String])
# One year when << or >> to Date.today.
REMOVE_DISABLED_TIME_WINDOW = 12
REMOVE_DISABLED_BEFORE = T.let((Date.today << REMOVE_DISABLED_TIME_WINDOW).freeze, Date)
sig { params(formula_or_cask: T.any(Formula, Cask::Cask)).returns(T.nilable(Symbol)) }
def type(formula_or_cask)
return :deprecated if formula_or_cask.deprecated?
:disabled if formula_or_cask.disabled?
end
sig {
params(
formula: T.nilable(String),
cask: T.nilable(String),
).returns(T.nilable(String))
}
def replacement_with_type(formula, cask)
if formula && formula == cask
formula
elsif formula
"--formula #{formula}"
elsif cask
"--cask #{cask}"
end
end
sig { params(formula_or_cask: T.any(Formula, Cask::Cask)).returns(T.nilable(String)) }
def message(formula_or_cask)
return if type(formula_or_cask).blank?
reason = if formula_or_cask.deprecated?
formula_or_cask.deprecation_reason
elsif formula_or_cask.disabled?
formula_or_cask.disable_reason
end
reason = if formula_or_cask.is_a?(Formula) && FORMULA_DEPRECATE_DISABLE_REASONS.key?(reason)
FORMULA_DEPRECATE_DISABLE_REASONS[reason]
elsif formula_or_cask.is_a?(Cask::Cask) && CASK_DEPRECATE_DISABLE_REASONS.key?(reason)
CASK_DEPRECATE_DISABLE_REASONS[reason]
else
reason
end
message = if reason.present?
"#{type(formula_or_cask)} because it #{reason}!"
else
"#{type(formula_or_cask)}!"
end
disable_date = formula_or_cask.disable_date
if !disable_date && formula_or_cask.deprecation_date
disable_date = formula_or_cask.deprecation_date >> REMOVE_DISABLED_TIME_WINDOW
end
if disable_date
message += if disable_date < Date.today
" It was disabled on #{disable_date}."
else
" It will be disabled on #{disable_date}."
end
end
replacement = if formula_or_cask.disabled?
replacement_with_type(
formula_or_cask.disable_replacement_formula,
formula_or_cask.disable_replacement_cask,
)
elsif formula_or_cask.deprecated?
replacement_with_type(
formula_or_cask.deprecation_replacement_formula,
formula_or_cask.deprecation_replacement_cask,
)
end
if replacement.present?
message << "\n"
message << <<~EOS
Replacement:
brew install #{replacement}
EOS
end
message
end
sig { params(string: T.nilable(String), type: Symbol).returns(T.nilable(T.any(String, Symbol))) }
def to_reason_string_or_symbol(string, type:)
return if string.nil?
if (type == :formula && FORMULA_DEPRECATE_DISABLE_REASONS.key?(string.to_sym)) ||
(type == :cask && CASK_DEPRECATE_DISABLE_REASONS.key?(string.to_sym))
return string.to_sym
end
string
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/bottle_specification.rb | Library/Homebrew/bottle_specification.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
class BottleSpecification
RELOCATABLE_CELLARS = [:any, :any_skip_relocation].freeze
sig { returns(T.nilable(Tap)) }
attr_accessor :tap
attr_reader :collector
sig { returns(T::Hash[Symbol, T.untyped]) }
attr_reader :root_url_specs
sig { returns(String) }
attr_reader :repository
sig { void }
def initialize
@rebuild = T.let(0, Integer)
@repository = T.let(Homebrew::DEFAULT_REPOSITORY, String)
@collector = T.let(Utils::Bottles::Collector.new, Utils::Bottles::Collector)
@root_url_specs = T.let({}, T::Hash[Symbol, T.untyped])
@root_url = T.let(nil, T.nilable(String))
end
sig { params(val: Integer).returns(Integer) }
def rebuild(val = T.unsafe(nil))
val.nil? ? @rebuild : @rebuild = val
end
def root_url(var = nil, specs = {})
if var.nil?
@root_url ||= if (github_packages_url = GitHubPackages.root_url_if_match(Homebrew::EnvConfig.bottle_domain))
github_packages_url
else
Homebrew::EnvConfig.bottle_domain
end
else
@root_url = if (github_packages_url = GitHubPackages.root_url_if_match(var))
github_packages_url
else
var
end
@root_url_specs.merge!(specs)
end
end
def ==(other)
self.class == other.class && rebuild == other.rebuild && collector == other.collector &&
root_url == other.root_url && root_url_specs == other.root_url_specs && tap == other.tap
end
alias eql? ==
sig { params(tag: Utils::Bottles::Tag).returns(T.any(Symbol, String)) }
def tag_to_cellar(tag = Utils::Bottles.tag)
spec = collector.specification_for(tag)
if spec.present?
spec.cellar
else
tag.default_cellar
end
end
sig { params(tag: Utils::Bottles::Tag).returns(T::Boolean) }
def compatible_locations?(tag: Utils::Bottles.tag)
cellar = tag_to_cellar(tag)
return true if RELOCATABLE_CELLARS.include?(cellar)
prefix = Pathname(cellar.to_s).parent.to_s
cellar_relocatable = cellar.size >= HOMEBREW_CELLAR.to_s.size && ENV["HOMEBREW_RELOCATE_BUILD_PREFIX"].present?
prefix_relocatable = prefix.size >= HOMEBREW_PREFIX.to_s.size && ENV["HOMEBREW_RELOCATE_BUILD_PREFIX"].present?
compatible_cellar = cellar == HOMEBREW_CELLAR.to_s || cellar_relocatable
compatible_prefix = prefix == HOMEBREW_PREFIX.to_s || prefix_relocatable
compatible_cellar && compatible_prefix
end
# Does the {Bottle} this {BottleSpecification} belongs to need to be relocated?
sig { params(tag: Utils::Bottles::Tag).returns(T::Boolean) }
def skip_relocation?(tag: Utils::Bottles.tag)
spec = collector.specification_for(tag)
spec&.cellar == :any_skip_relocation
end
sig { params(tag: T.any(Symbol, Utils::Bottles::Tag), no_older_versions: T::Boolean).returns(T::Boolean) }
def tag?(tag, no_older_versions: false)
collector.tag?(tag, no_older_versions:)
end
# Checksum methods in the DSL's bottle block take
# a Hash, which indicates the platform the checksum applies on.
# Example bottle block syntax:
# bottle do
# sha256 cellar: :any_skip_relocation, big_sur: "69489ae397e4645..."
# sha256 cellar: :any, catalina: "449de5ea35d0e94..."
# end
def sha256(hash)
sha256_regex = /^[a-f0-9]{64}$/i
# find new `sha256 big_sur: "69489ae397e4645..."` format
tag, digest = hash.find do |key, value|
key.is_a?(Symbol) && value.is_a?(String) && value.match?(sha256_regex)
end
cellar = hash[:cellar] if digest && tag
tag = Utils::Bottles::Tag.from_symbol(tag)
cellar ||= tag.default_cellar
collector.add(tag, checksum: Checksum.new(digest), cellar:)
end
sig {
params(tag: Utils::Bottles::Tag, no_older_versions: T::Boolean)
.returns(T.nilable(Utils::Bottles::TagSpecification))
}
def tag_specification_for(tag, no_older_versions: false)
collector.specification_for(tag, no_older_versions:)
end
def checksums
tags = collector.tags.sort_by do |tag|
version = tag.to_macos_version
# Give `arm64` bottles a higher priority so they are first.
priority = (tag.arch == :arm64) ? 3 : 2
"#{priority}.#{version}_#{tag}"
rescue MacOSVersion::Error
# Sort non-macOS tags below macOS tags, and arm64 tags before other tags.
priority = (tag.arch == :arm64) ? 1 : 0
"#{priority}.#{tag}"
end
tags.reverse.map do |tag|
spec = collector.specification_for(tag)
{
"tag" => spec.tag.to_sym,
"digest" => spec.checksum,
"cellar" => spec.cellar,
}
end
end
end
require "extend/os/bottle_specification"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/exceptions.rb | Library/Homebrew/exceptions.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "utils/output"
# Raised when a command is used wrong.
#
# @api internal
class UsageError < RuntimeError
attr_reader :reason
def initialize(reason = nil)
super
@reason = reason
end
sig { returns(String) }
def to_s
s = "Invalid usage"
s += ": #{reason}" if reason
s
end
end
# Raised when a command expects a formula and none was specified.
class FormulaUnspecifiedError < UsageError
def initialize
super "this command requires a formula argument"
end
end
# Raised when a command expects a formula or cask and none was specified.
class FormulaOrCaskUnspecifiedError < UsageError
def initialize
super "this command requires a formula or cask argument"
end
end
# Raised when a command expects a keg and none was specified.
class KegUnspecifiedError < UsageError
def initialize
super "this command requires a keg argument"
end
end
class UnsupportedInstallationMethod < RuntimeError; end
class MultipleVersionsInstalledError < RuntimeError; end
# Raised when a path is not a keg.
#
# @api internal
class NotAKegError < RuntimeError; end
# Raised when a keg doesn't exist.
class NoSuchKegError < RuntimeError
attr_reader :name, :tap
def initialize(name, tap: nil)
@name = name
@tap = tap
message = "No such keg: #{HOMEBREW_CELLAR}/#{name}"
message += " from tap #{tap}" if tap
super message
end
end
# Raised when an invalid attribute is used in a formula.
class FormulaValidationError < StandardError
attr_reader :attr, :formula
def initialize(formula, attr, value)
@attr = attr
@formula = formula
super "invalid attribute for formula '#{formula}': #{attr} (#{value.inspect})"
end
end
class FormulaSpecificationError < StandardError; end
# Raised when a deprecated method is used.
class MethodDeprecatedError < StandardError
attr_accessor :issues_url
end
# Raised when neither a formula nor a cask with the given name is available.
class FormulaOrCaskUnavailableError < RuntimeError
attr_reader :name
def initialize(name)
super()
@name = name
# Store the state of these envs at the time the exception is thrown.
# This is so we do the fuzzy search for "did you mean" etc under that same mode,
# in case the list of formulae are different.
@without_api = Homebrew::EnvConfig.no_install_from_api?
@auto_without_api = Homebrew::EnvConfig.automatically_set_no_install_from_api?
end
sig { returns(String) }
def did_you_mean
require "formula"
similar_formula_names = Homebrew.with_no_api_env_if_needed(@without_api) { Formula.fuzzy_search(name) }
return "" if similar_formula_names.blank?
"Did you mean #{similar_formula_names.to_sentence two_words_connector: " or ", last_word_connector: " or "}?"
end
sig { returns(String) }
def to_s
s = "No available formula or cask with the name \"#{name}\". #{did_you_mean}".strip
if @auto_without_api && !CoreTap.instance.installed?
s += "\nA full git tap clone is required to use this command on core packages."
end
s
end
end
# Raised when a formula or cask in a specific tap is not available.
class TapFormulaOrCaskUnavailableError < FormulaOrCaskUnavailableError
attr_reader :tap
def initialize(tap, name)
super "#{tap}/#{name}"
@tap = tap
end
sig { returns(String) }
def to_s
s = super
s += "\nPlease tap it and then try again: brew tap #{tap}" unless tap.installed?
s
end
end
# Raised when a formula is not available.
#
# @api internal
class FormulaUnavailableError < FormulaOrCaskUnavailableError
attr_accessor :dependent
sig { returns(T.nilable(String)) }
def dependent_s
" (dependency of #{dependent})" if dependent && dependent != name
end
sig { returns(String) }
def to_s
"No available formula with the name \"#{name}\"#{dependent_s}. #{did_you_mean}".strip
end
end
# Shared methods for formula class errors.
module FormulaClassUnavailableErrorModule
attr_reader :path, :class_name, :class_list
sig { returns(String) }
def to_s
s = super
s += "\nIn formula file: #{path}"
s += "\nExpected to find class #{class_name}, but #{class_list_s}."
s
end
private
sig { returns(String) }
def class_list_s
formula_class_list = class_list.select { |klass| klass < Formula }
if class_list.empty?
"found no classes"
elsif formula_class_list.empty?
"only found: #{format_list(class_list)} (not derived from Formula!)"
else
"only found: #{format_list(formula_class_list)}"
end
end
def format_list(class_list)
class_list.map { |klass| klass.name.split("::").last }.join(", ")
end
end
# Raised when a formula does not contain a formula class.
class FormulaClassUnavailableError < FormulaUnavailableError
include FormulaClassUnavailableErrorModule
def initialize(name, path, class_name, class_list)
@path = path
@class_name = class_name
@class_list = class_list
super name
end
end
# Shared methods for formula unreadable errors.
module FormulaUnreadableErrorModule
extend T::Helpers
requires_ancestor { FormulaOrCaskUnavailableError }
attr_reader :formula_error
sig { returns(String) }
def to_s
"#{name}: " + formula_error.to_s
end
end
# Raised when a formula is unreadable.
class FormulaUnreadableError < FormulaUnavailableError
include FormulaUnreadableErrorModule
def initialize(name, error)
super(name)
@formula_error = error
set_backtrace(error.backtrace)
end
end
# Raised when a formula in a specific tap is unavailable.
class TapFormulaUnavailableError < FormulaUnavailableError
attr_reader :tap, :user, :repository
def initialize(tap, name)
@tap = tap
@user = tap.user
@repository = tap.repository
super "#{tap}/#{name}"
end
sig { returns(String) }
def to_s
s = super
s += "\nPlease tap it and then try again: brew tap #{tap}" unless tap.installed?
s
end
end
# Raised when a formula in a specific tap does not contain a formula class.
class TapFormulaClassUnavailableError < TapFormulaUnavailableError
include FormulaClassUnavailableErrorModule
attr_reader :tap
def initialize(tap, name, path, class_name, class_list)
@path = path
@class_name = class_name
@class_list = class_list
super tap, name
end
end
# Raised when a formula in a specific tap is unreadable.
class TapFormulaUnreadableError < TapFormulaUnavailableError
include FormulaUnreadableErrorModule
def initialize(tap, name, error)
super(tap, name)
@formula_error = error
set_backtrace(error.backtrace)
end
end
# Raised when a formula with the same name is found in multiple taps.
class TapFormulaAmbiguityError < RuntimeError
attr_reader :name, :taps, :loaders
def initialize(name, loaders)
@name = name
@loaders = loaders
@taps = loaders.map(&:tap)
formulae = taps.map { |tap| "#{tap}/#{name}" }
formula_list = formulae.map { |f| "\n * #{f}" }.join
super <<~EOS
Formulae found in multiple taps:#{formula_list}
Please use the fully-qualified name (e.g. #{formulae.first}) to refer to a specific formula.
EOS
end
end
# Raised when a tap is unavailable.
class TapUnavailableError < RuntimeError
attr_reader :name
def initialize(name)
@name = name
message = "No available tap #{name}.\n"
if [CoreTap.instance.name, CoreCaskTap.instance.name].include?(name)
command = "brew tap --force #{name}"
message += <<~EOS
Run #{Formatter.identifier(command)} to tap #{name}!
EOS
else
command = "brew tap-new #{name}"
message += <<~EOS
Run #{Formatter.identifier(command)} to create a new #{name} tap!
EOS
end
super message.freeze
end
end
# Raised when a tap's remote does not match the actual remote.
class TapRemoteMismatchError < RuntimeError
attr_reader :name, :expected_remote, :actual_remote
def initialize(name, expected_remote, actual_remote)
@name = name
@expected_remote = expected_remote
@actual_remote = actual_remote
super message
end
def message
<<~EOS
Tap #{name} remote mismatch.
#{expected_remote} != #{actual_remote}
EOS
end
end
# Raised when the remote of homebrew/core does not match HOMEBREW_CORE_GIT_REMOTE.
class TapCoreRemoteMismatchError < TapRemoteMismatchError
def message
<<~EOS
Tap #{name} remote does not match `$HOMEBREW_CORE_GIT_REMOTE`.
#{expected_remote} != #{actual_remote}
Please set `HOMEBREW_CORE_GIT_REMOTE="#{actual_remote}"` and run `brew update` instead.
EOS
end
end
# Raised when a tap is already installed.
class TapAlreadyTappedError < RuntimeError
attr_reader :name
def initialize(name)
@name = name
super <<~EOS
Tap #{name} already tapped.
EOS
end
end
# Raised when run `brew tap --custom-remote` without a remote URL.
class TapNoCustomRemoteError < RuntimeError
attr_reader :name
def initialize(name)
@name = name
super <<~EOS
Tap #{name} with option `--custom-remote` but without a remote URL.
EOS
end
end
# Raised when another Homebrew operation is already in progress.
class OperationInProgressError < RuntimeError
sig { params(locked_path: Pathname).void }
def initialize(locked_path)
full_command = Homebrew.running_command_with_args.presence || "brew"
lock_context = if (env_lock_context = Homebrew::EnvConfig.lock_context.presence)
"\n#{env_lock_context}"
end
message = <<~EOS
A `#{full_command}` process has already locked #{locked_path}.#{lock_context}
Please wait for it to finish or terminate it to continue.
EOS
super message
end
end
class CannotInstallFormulaError < RuntimeError; end
# Raised when a formula installation was already attempted.
class FormulaInstallationAlreadyAttemptedError < RuntimeError
def initialize(formula)
super "Formula installation already attempted: #{formula.full_name}"
end
end
# Raised when there are unsatisfied requirements.
class UnsatisfiedRequirements < RuntimeError
def initialize(reqs)
if reqs.length == 1
super "An unsatisfied requirement failed this build."
else
super "Unsatisfied requirements failed this build."
end
end
end
# Raised when a formula conflicts with another one.
class FormulaConflictError < RuntimeError
attr_reader :formula, :conflicts
def initialize(formula, conflicts)
@formula = formula
@conflicts = conflicts
super message
end
def conflict_message(conflict)
message = []
message << " #{conflict.name}"
message << ": because #{conflict.reason}" if conflict.reason
message.join
end
sig { returns(String) }
def message
message = []
message << "Cannot install #{formula.full_name} because conflicting formulae are installed."
message.concat conflicts.map { |c| conflict_message(c) } << ""
message << <<~EOS
Please `brew unlink #{conflicts.map(&:name) * " "}` before continuing.
Unlinking removes a formula's symlinks from #{HOMEBREW_PREFIX}. You can
link the formula again after the install finishes. You can `--force` this
install, but the build may fail or cause obscure side effects in the
resulting software.
EOS
message.join("\n")
end
end
# Raise when the Python version cannot be detected automatically.
class FormulaUnknownPythonError < RuntimeError
def initialize(formula)
super <<~EOS
The version of Python to use with the virtualenv in the `#{formula.full_name}` formula
cannot be guessed automatically because a recognised Python dependency could not be found.
If you are using a non-standard Python dependency, please add `:using => "python@x.y"`
to 'virtualenv_install_with_resources' to resolve the issue manually.
EOS
end
end
# Raise when two Python versions are detected simultaneously.
class FormulaAmbiguousPythonError < RuntimeError
def initialize(formula)
super <<~EOS
The version of Python to use with the virtualenv in the `#{formula.full_name}` formula
cannot be guessed automatically.
If the simultaneous use of multiple Pythons is intentional, please add `:using => "python@x.y"`
to 'virtualenv_install_with_resources' to resolve the ambiguity manually.
EOS
end
end
# Raised when an error occurs during a formula build.
class BuildError < RuntimeError
include Utils::Output::Mixin
attr_reader :cmd, :args, :env
attr_accessor :formula, :options
sig {
params(
formula: T.nilable(Formula),
cmd: T.any(String, Pathname),
args: T::Array[T.any(String, Integer, Pathname, Symbol)],
env: T::Hash[String, T.untyped],
).void
}
def initialize(formula, cmd, args, env)
@formula = formula
@cmd = cmd
@args = args
@env = env
pretty_args = Array(args).map { |arg| arg.to_s.gsub(/[\\ ]/, "\\\\\\0") }.join(" ")
super "Failed executing: #{cmd} #{pretty_args}".strip
end
sig { returns(T::Array[T.untyped]) }
def issues
@issues ||= fetch_issues
end
sig { returns(T::Array[T.untyped]) }
def fetch_issues
return [] if ENV["HOMEBREW_NO_BUILD_ERROR_ISSUES"].present?
GitHub.issues_for_formula(formula.name, tap: formula.tap, state: "open", type: "issue")
rescue GitHub::API::Error => e
opoo "Unable to query GitHub for recent issues on the tap\n#{e.message}"
[]
end
sig { params(verbose: T::Boolean).void }
def dump(verbose: false)
puts
if verbose
require "system_config"
require "build_environment"
ohai "Formula"
puts "Tap: #{formula.tap}" if formula.tap?
puts "Path: #{formula.path}"
ohai "Configuration"
SystemConfig.dump_verbose_config
ohai "ENV"
BuildEnvironment.dump env
puts
onoe "#{formula.full_name} #{formula.version} did not build"
unless (logs = Dir["#{formula.logs}/*"]).empty?
puts "Logs:"
puts logs.map { |fn| " #{fn}" }.join("\n")
end
end
if formula.tap
if OS.not_tier_one_configuration?
<<~EOS
This is not a Tier 1 configuration:
#{Formatter.url("https://docs.brew.sh/Support-Tiers")}
#{Formatter.bold("Do not report any issues to Homebrew/* repositories!")}
Read the above document instead before opening any issues or PRs.
EOS
elsif formula.tap.official?
puts Formatter.error(Formatter.url(OS::ISSUES_URL), label: "READ THIS")
elsif (issues_url = formula.tap.issues_url)
puts <<~EOS
If reporting this issue please do so at (not Homebrew/* repositories):
#{Formatter.url(issues_url)}
EOS
else
puts <<~EOS
If reporting this issue please do so to (not Homebrew/* repositories):
#{formula.tap}
EOS
end
else
<<~EOS
We cannot detect the correct tap to report this issue to.
Do not report this issue to Homebrew/* repositories!
EOS
end
puts
if issues.present?
puts "These open issues may also help:"
puts issues.map { |i| "#{i["title"]} #{i["html_url"]}" }.join("\n")
end
require "diagnostic"
checks = Homebrew::Diagnostic::Checks.new
checks.build_error_checks.each do |check|
out = checks.send(check)
next if out.nil?
puts
ofail out
end
end
end
# Raised if the formula or its dependencies are not bottled and are being
# installed in a situation where a bottle is required.
class UnbottledError < RuntimeError
def initialize(formulae)
require "utils"
msg = <<~EOS
The following #{Utils.pluralize("formula", formulae.count)} cannot be installed from #{Utils.pluralize("bottle", formulae.count)} and must be
built from source.
#{formulae.to_sentence}
EOS
msg += "#{DevelopmentTools.installation_instructions}\n" unless DevelopmentTools.installed?
msg.freeze
super(msg)
end
end
# Raised by `Homebrew.install`, `Homebrew.reinstall` and `Homebrew.upgrade`
# if the user passes any flags/environment that would case a bottle-only
# installation on a system without build tools to fail.
class BuildFlagsError < RuntimeError
def initialize(flags, bottled: true)
if flags.length > 1
flag_text = "flags"
require_text = "require"
else
flag_text = "flag"
require_text = "requires"
end
bottle_text = if bottled
<<~EOS
Alternatively, remove the #{flag_text} to attempt bottle installation.
EOS
end
message = <<~EOS
The following #{flag_text}:
#{flags.join(", ")}
#{require_text} building tools, but none are installed.
#{DevelopmentTools.installation_instructions} #{bottle_text}
EOS
super message
end
end
# Raised by {CompilerSelector} if the formula fails with all of
# the compilers available on the user's system.
class CompilerSelectionError < RuntimeError
def initialize(formula)
super <<~EOS
#{formula.full_name} cannot be built with any available compilers.
#{DevelopmentTools.custom_installation_instructions}
EOS
end
end
# Raised in {Downloadable#fetch}.
class DownloadError < RuntimeError
attr_reader :cause
def initialize(downloadable, cause)
super <<~EOS
Failed to download resource #{downloadable.download_queue_name.inspect}
#{cause.message}
EOS
@cause = cause
set_backtrace(cause.backtrace)
end
end
# Raised in {CurlDownloadStrategy#fetch}.
class CurlDownloadStrategyError < RuntimeError
def initialize(url)
case url
when %r{^file://(.+)}
super "File does not exist: #{Regexp.last_match(1)}"
else
super "Download failed: #{url}"
end
end
end
# Raised in {HomebrewCurlDownloadStrategy#fetch}.
class HomebrewCurlDownloadStrategyError < CurlDownloadStrategyError
def initialize(url)
super "Homebrew-installed `curl` is not installed for: #{url}"
end
end
# Raised by {Kernel#safe_system} in `utils.rb`.
class ErrorDuringExecution < RuntimeError
attr_reader :cmd, :status, :output
def initialize(cmd, status:, output: nil, secrets: [])
@cmd = cmd
@status = status
@output = output
raise ArgumentError, "Status cannot be nil." if status.nil?
exitstatus = case status
when Integer
status
when Hash
status["exitstatus"]
else
status.exitstatus
end
termsig = case status
when Integer
nil
when Hash
status["termsig"]
else
status.termsig
end
redacted_cmd = redact_secrets(cmd.shelljoin.gsub('\=', "="), secrets)
reason = if exitstatus
"exited with #{exitstatus}"
elsif termsig
"was terminated by uncaught signal #{Signal.signame(termsig)}"
else
raise ArgumentError, "Status neither has `exitstatus` nor `termsig`."
end
s = "Failure while executing; `#{redacted_cmd}` #{reason}."
if Array(output).present?
format_output_line = lambda do |type_line|
type, line = *type_line
if type == :stderr
Formatter.error(line)
else
line
end
end
s << " Here's the output:\n"
s << output.map(&format_output_line).join
s << "\n" unless s.end_with?("\n")
end
super s.freeze
end
sig { returns(String) }
def stderr
Array(output).select { |type,| type == :stderr }.map(&:last).join
end
end
# Raised by {Pathname#verify_checksum} when "expected" is nil or empty.
class ChecksumMissingError < ArgumentError; end
# Raised by {Pathname#verify_checksum} when verification fails.
class ChecksumMismatchError < RuntimeError
attr_reader :expected
def initialize(path, expected, actual)
@expected = expected
super <<~EOS
SHA-256 mismatch
Expected: #{Formatter.success(expected.to_s)}
Actual: #{Formatter.error(actual.to_s)}
File: #{path}
To retry an incomplete download, remove the file above.
EOS
end
end
# Raised when a resource is missing.
class ResourceMissingError < ArgumentError
def initialize(formula, resource)
super "#{formula.full_name} does not define resource #{resource.inspect}"
end
end
# Raised when a resource is specified multiple times.
class DuplicateResourceError < ArgumentError
def initialize(resource)
super "Resource #{resource.inspect} is defined more than once"
end
end
# Raised when a single patch file is not found and apply hasn't been specified.
class MissingApplyError < RuntimeError; end
# Raised when a bottle does not contain a formula file.
class BottleFormulaUnavailableError < RuntimeError
def initialize(bottle_path, formula_path)
super <<~EOS
This bottle does not contain the formula file:
#{bottle_path}
#{formula_path}
EOS
end
end
# Raised when a `Utils.safe_fork` exits with a non-zero code.
class ChildProcessError < RuntimeError
attr_reader :status
def initialize(status)
@status = status
super "Forked child process failed: #{status}"
end
end
# Raised when `detected_perl_shebang` etc cannot detect the shebang.
class ShebangDetectionError < RuntimeError
def initialize(type, reason)
super "Cannot detect #{type} shebang: #{reason}."
end
end
# Raised when one or more formulae have cyclic dependencies.
class CyclicDependencyError < RuntimeError
def initialize(strongly_connected_components)
super <<~EOS
The following packages contain cyclic dependencies:
#{strongly_connected_components.select { |packages| packages.count > 1 }.map(&:to_sentence).join("\n ")}
EOS
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/requirement.rb | Library/Homebrew/requirement.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "dependable"
require "dependency"
require "dependencies"
require "build_environment"
require "utils/output"
# A base class for non-formula requirements needed by formulae.
# A fatal requirement is one that will fail the build if it is not present.
# By default, requirements are non-fatal.
class Requirement
include Dependable
include Utils::Output::Mixin
extend Cachable
extend T::Helpers
# This base class enforces no constraints on its own.
# Individual subclasses use the `satisfy` DSL to define those constraints.
abstract!
attr_reader :name, :cask, :download
def initialize(tags = [])
@cask = self.class.cask
@download = self.class.download
tags.each do |tag|
next unless tag.is_a? Hash
@cask ||= tag[:cask]
@download ||= tag[:download]
end
@tags = tags
@tags << :build if self.class.build
@name ||= infer_name
end
def option_names
[name]
end
# The message to show when the requirement is not met.
sig { returns(String) }
def message
_, _, class_name = self.class.to_s.rpartition "::"
s = "#{class_name} unsatisfied!\n"
if cask
s += <<~EOS
You can install the necessary cask with:
brew install --cask #{cask}
EOS
end
if download
s += <<~EOS
You can download from:
#{Formatter.url(download)}
EOS
end
s
end
# Overriding {#satisfied?} is unsupported.
# Pass a block or boolean to the satisfy DSL method instead.
sig {
params(
env: T.nilable(String),
cc: T.nilable(String),
build_bottle: T::Boolean,
bottle_arch: T.nilable(String),
).returns(T::Boolean)
}
def satisfied?(env: nil, cc: nil, build_bottle: false, bottle_arch: nil)
satisfy = self.class.satisfy
return true unless satisfy
@satisfied_result =
satisfy.yielder(env:, cc:, build_bottle:, bottle_arch:) do |p|
instance_eval(&p)
end
return false unless @satisfied_result
true
end
# Overriding {#fatal?} is unsupported.
# Pass a boolean to the fatal DSL method instead.
sig { returns(T::Boolean) }
def fatal?
self.class.fatal || false
end
def satisfied_result_parent
return unless @satisfied_result.is_a?(Pathname)
parent = @satisfied_result.resolved_path.parent
if parent.to_s =~ %r{^#{Regexp.escape(HOMEBREW_CELLAR)}/([\w+-.@]+)/[^/]+/(s?bin)/?$}o
parent = HOMEBREW_PREFIX/"opt/#{Regexp.last_match(1)}/#{Regexp.last_match(2)}"
end
parent
end
# Pass a block to the env DSL method instead of overriding.
sig(:final) {
params(
env: T.nilable(String),
cc: T.nilable(String),
build_bottle: T::Boolean,
bottle_arch: T.nilable(String),
).void
}
def modify_build_environment(env: nil, cc: nil, build_bottle: false, bottle_arch: nil)
satisfied?(env:, cc:, build_bottle:, bottle_arch:)
instance_eval(&env_proc) if env_proc
# XXX If the satisfy block returns a Pathname, then make sure that it
# remains available on the PATH. This makes requirements like
# satisfy { which("executable") }
# work, even under superenv where "executable" wouldn't normally be on the
# PATH.
parent = satisfied_result_parent
return unless parent
return if ["#{HOMEBREW_PREFIX}/bin", "#{HOMEBREW_PREFIX}/bin"].include?(parent.to_s)
return if PATH.new(ENV.fetch("PATH")).include?(parent.to_s)
ENV.prepend_path("PATH", parent)
end
def env
self.class.env
end
def env_proc
self.class.env_proc
end
def ==(other)
instance_of?(other.class) && name == other.name && tags == other.tags
end
alias eql? ==
def hash
[self.class, name, tags].hash
end
sig { returns(String) }
def inspect
"#<#{self.class.name}: #{tags.inspect}>"
end
def display_s
name.capitalize
end
def mktemp(&block)
Mktemp.new(name).run(&block)
end
private
def infer_name
klass = self.class.name
klass = klass&.sub(/(Dependency|Requirement)$/, "")
&.sub(/^(\w+::)*/, "")
return klass.downcase if klass.present?
return @cask if @cask.present?
""
end
def which(cmd)
super(cmd, PATH.new(ORIGINAL_PATHS))
end
class << self
include BuildEnvironment::DSL
attr_reader :env_proc, :build
sig { params(val: String).returns(T.nilable(String)) }
def cask(val = T.unsafe(nil))
val.nil? ? @cask : @cask = val
end
sig { params(val: String).returns(T.nilable(String)) }
def download(val = T.unsafe(nil))
val.nil? ? @download : @download = val
end
sig { params(val: T::Boolean).returns(T.nilable(T::Boolean)) }
def fatal(val = T.unsafe(nil))
val.nil? ? @fatal : @fatal = val
end
def satisfy(options = nil, &block)
return @satisfied if options.nil? && !block
options = {} if options.nil?
@satisfied = Satisfier.new(options, &block)
end
def env(*settings, &block)
if block
@env_proc = block
else
super
end
end
end
# Helper class for evaluating whether a requirement is satisfied.
class Satisfier
def initialize(options, &block)
case options
when Hash
@options = { build_env: true }
@options.merge!(options)
else
@satisfied = options
end
@proc = block
end
def yielder(env: nil, cc: nil, build_bottle: false, bottle_arch: nil)
if instance_variable_defined?(:@satisfied)
@satisfied
elsif @options[:build_env]
require "extend/ENV"
ENV.with_build_environment(
env:, cc:, build_bottle:, bottle_arch:,
) do
yield @proc
end
else
yield @proc
end
end
end
private_constant :Satisfier
class << self
# Expand the requirements of dependent recursively, optionally yielding
# `[dependent, req]` pairs to allow callers to apply arbitrary filters to
# the list.
# The default filter, which is applied when a block is not given, omits
# optionals and recommends based on what the dependent has asked for.
def expand(dependent, cache_key: nil, &block)
if cache_key.present?
cache[cache_key] ||= {}
return cache[cache_key][cache_id dependent].dup if cache[cache_key][cache_id dependent]
end
reqs = Requirements.new
formulae = dependent.recursive_dependencies.map(&:to_formula)
formulae.unshift(dependent)
formulae.each do |f|
f.requirements.each do |req|
next if prune?(f, req, &block)
reqs << req
end
end
if cache_key.present?
# Even though we setup the cache above
# 'dependent.recursive_dependencies.map(&:to_formula)'
# is invalidating the singleton cache
cache[cache_key] ||= {}
cache[cache_key][cache_id dependent] = reqs.dup
end
reqs
end
def prune?(dependent, req, &block)
catch(:prune) do
if block
yield dependent, req
elsif req.optional? || req.recommended?
prune unless dependent.build.with?(req)
end
end
end
# Used to prune requirements when calling expand with a block.
sig { void }
def prune
throw(:prune, true)
end
private
def cache_id(dependent)
"#{dependent.full_name}_#{dependent.class}"
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/attestation.rb | Library/Homebrew/attestation.rb | # typed: strict
# frozen_string_literal: true
require "date"
require "json"
require "utils/popen"
require "utils/github/api"
require "exceptions"
require "system_command"
require "utils/output"
module Homebrew
module Attestation
extend SystemCommand::Mixin
extend Utils::Output::Mixin
# @api private
HOMEBREW_CORE_REPO = "Homebrew/homebrew-core"
# @api private
BACKFILL_REPO = "trailofbits/homebrew-brew-verify"
# No backfill attestations after this date are considered valid.
#
# This date is shortly after the backfill operation for homebrew-core
# completed, as can be seen here: <https://github.com/trailofbits/homebrew-brew-verify/attestations>.
#
# In effect, this means that, even if an attacker is able to compromise the backfill
# signing workflow, they will be unable to convince a verifier to accept their newer,
# malicious backfilled signatures.
#
# @api private
BACKFILL_CUTOFF = T.let(DateTime.new(2024, 3, 14).freeze, DateTime)
# Raised when the attestation was not found.
#
# @api private
class MissingAttestationError < RuntimeError; end
# Raised when attestation verification fails.
#
# @api private
class InvalidAttestationError < RuntimeError; end
# Raised if attestation verification cannot continue due to missing
# credentials.
#
# @api private
class GhAuthNeeded < RuntimeError; end
# Raised if attestation verification cannot continue due to invalid
# credentials.
#
# @api private
class GhAuthInvalid < RuntimeError; end
# Raised if attestation verification cannot continue due to `gh`
# being incompatible with attestations, typically because it's too old.
#
# @api private
class GhIncompatible < RuntimeError; end
# Returns whether attestation verification is enabled.
#
# @api private
sig { returns(T::Boolean) }
def self.enabled?
return false if Homebrew::EnvConfig.no_verify_attestations?
Homebrew::EnvConfig.verify_attestations?
end
# Returns a path to a suitable `gh` executable for attestation verification.
#
# @api private
sig { returns(Pathname) }
def self.gh_executable
@gh_executable ||= T.let(nil, T.nilable(Pathname))
return @gh_executable if @gh_executable.present?
# NOTE: We set HOMEBREW_NO_VERIFY_ATTESTATIONS when installing `gh` itself,
# to prevent a cycle during bootstrapping. This can eventually be resolved
# by vendoring a pure-Ruby Sigstore verifier client.
@gh_executable = with_env(HOMEBREW_NO_VERIFY_ATTESTATIONS: "1") do
ensure_executable!("gh", reason: "verifying attestations", latest: true)
end
end
# Prioritize installing `gh` first if it's in the formula list
# or check for the existence of the `gh` executable elsewhere.
#
# This ensures that a valid version of `gh` is installed before
# we use it to check the attestations of any other formulae we
# want to install.
#
# @api private
sig { params(formulae: T::Array[Formula]).returns(T::Array[Formula]) }
def self.sort_formulae_for_install(formulae)
if (gh = formulae.find { |f| f.full_name == "gh" })
[gh] | formulae
else
Homebrew::Attestation.gh_executable
formulae
end
end
# Verifies the given bottle against a cryptographic attestation of build provenance.
#
# The provenance is verified as originating from `signing_repository`, which is a `String`
# that should be formatted as a GitHub `owner/repository`.
#
# Callers may additionally pass in `signing_workflow`, which will scope the attestation
# down to an exact GitHub Actions workflow, in
# `https://github/OWNER/REPO/.github/workflows/WORKFLOW.yml@REF` format.
#
# @return [Hash] the JSON-decoded response.
# @raise [GhAuthNeeded] on any authentication failures
# @raise [InvalidAttestationError] on any verification failures
#
# @api private
sig {
params(bottle: Bottle, signing_repo: String,
signing_workflow: T.nilable(String), subject: T.nilable(String)).returns(T::Hash[T.untyped, T.untyped])
}
def self.check_attestation(bottle, signing_repo, signing_workflow = nil, subject = nil)
cmd = ["attestation", "verify", bottle.cached_download, "--repo", signing_repo, "--format",
"json"]
cmd += ["--cert-identity", signing_workflow] if signing_workflow.present?
# Fail early if we have no credentials. The command below invariably
# fails without them, so this saves us an unnecessary subshell.
credentials = GitHub::API.credentials
raise GhAuthNeeded, "missing credentials" if credentials.blank?
begin
result = system_command!(gh_executable, args: cmd,
env: { "GH_TOKEN" => credentials, "GH_HOST" => "github.com" },
secrets: [credentials], print_stderr: false, chdir: HOMEBREW_TEMP)
rescue ErrorDuringExecution => e
if e.status.exitstatus == 1 && e.stderr.include?("unknown command")
raise GhIncompatible, "gh CLI is incompatible with attestations"
end
# Even if we have credentials, they may be invalid or malformed.
if e.status.exitstatus == 4 || e.stderr.include?("HTTP 401: Bad credentials")
raise GhAuthInvalid, "invalid credentials"
end
raise MissingAttestationError, "attestation not found: #{e}" if e.stderr.include?("HTTP 404: Not Found")
raise InvalidAttestationError, "attestation verification failed: #{e}"
end
begin
attestations = JSON.parse(result.stdout)
rescue JSON::ParserError
raise InvalidAttestationError, "attestation verification returned malformed JSON"
end
# `gh attestation verify` returns a JSON array of one or more results,
# for all attestations that match the input's digest. We want to additionally
# filter these down to just the attestation whose subject(s) contain the bottle's name.
# As of 2024-12-04 GitHub's Artifact Attestation feature can put multiple subjects
# in a single attestation, so we check every subject in each attestation
# and select the first attestation with a matching subject.
# In particular, this happens with v2.0.0 and later of the
# `actions/attest-build-provenance` action.
subject = bottle.filename.to_s if subject.blank?
attestation = if bottle.tag.to_sym == :all
# :all-tagged bottles are created by `brew bottle --merge`, and are not directly
# bound to their own filename (since they're created by deduplicating other filenames).
# To verify these, we parse each attestation subject and look for one with a matching
# formula (name, version), but not an exact tag match.
# This is sound insofar as the signature has already been verified. However,
# longer term, we should also directly attest to `:all`-tagged bottles.
attestations.find do |a|
candidate_subjects = a.dig("verificationResult", "statement", "subject")
candidate_subjects.any? do |candidate|
candidate["name"].start_with? "#{bottle.filename.name}--#{bottle.filename.version}"
end
end
else
attestations.find do |a|
candidate_subjects = a.dig("verificationResult", "statement", "subject")
candidate_subjects.any? { |candidate| candidate["name"] == subject }
end
end
raise InvalidAttestationError, "no attestation matches subject: #{subject}" if attestation.blank?
attestation
end
ATTESTATION_MAX_RETRIES = 5
# Verifies the given bottle against a cryptographic attestation of build provenance
# from homebrew-core's CI, falling back on a "backfill" attestation for older bottles.
#
# This is a specialization of `check_attestation` for homebrew-core.
#
# @return [Hash] the JSON-decoded response
# @raise [GhAuthNeeded] on any authentication failures
# @raise [InvalidAttestationError] on any verification failures
#
# @api private
sig { params(bottle: Bottle).returns(T::Hash[T.untyped, T.untyped]) }
def self.check_core_attestation(bottle)
begin
# Ideally, we would also constrain the signing workflow here, but homebrew-core
# currently uses multiple signing workflows to produce bottles
# (e.g. `dispatch-build-bottle.yml`, `dispatch-rebottle.yml`, etc.).
#
# We could check each of these (1) explicitly (slow), (2) by generating a pattern
# to pass into `--cert-identity-regex` (requires us to build up a Go-style regex),
# or (3) by checking the resulting JSON for the expected signing workflow.
#
# Long term, we should probably either do (3) *or* switch to a single reusable
# workflow, which would then be our sole identity. However, GitHub's
# attestations currently do not include reusable workflow state by default.
attestation = check_attestation bottle, HOMEBREW_CORE_REPO
return attestation
rescue MissingAttestationError
odebug "falling back on backfilled attestation for #{bottle.filename}"
# Our backfilled attestation is a little unique: the subject is not just the bottle
# filename, but also has the bottle's hosted URL hash prepended to it.
# This was originally unintentional, but has a virtuous side effect of further
# limiting domain separation on the backfilled signatures (by committing them to
# their original bottle URLs).
url_sha256 = if EnvConfig.bottle_domain == HOMEBREW_BOTTLE_DEFAULT_DOMAIN
Digest::SHA256.hexdigest(bottle.url)
else
# If our bottle is coming from a mirror, we need to recompute the expected
# non-mirror URL to make the hash match.
path, = Utils::Bottles.path_resolved_basename HOMEBREW_BOTTLE_DEFAULT_DOMAIN, bottle.name,
bottle.resource.checksum, bottle.filename
url = "#{HOMEBREW_BOTTLE_DEFAULT_DOMAIN}/#{path}"
Digest::SHA256.hexdigest(url)
end
subject = "#{url_sha256}--#{bottle.filename}"
# We don't pass in a signing workflow for backfill signatures because
# some backfilled bottle signatures were signed from the 'backfill'
# branch, and others from 'main' of trailofbits/homebrew-brew-verify
# so the signing workflow is slightly different which causes some bottles to incorrectly
# fail when checking their attestation. This shouldn't meaningfully affect security
# because if somehow someone could generate false backfill attestations
# from a different workflow we will still catch it because the
# attestation would have been generated after our cutoff date.
backfill_attestation = check_attestation bottle, BACKFILL_REPO, nil, subject
timestamp = backfill_attestation.dig("verificationResult", "verifiedTimestamps",
0, "timestamp")
raise InvalidAttestationError, "backfill attestation is missing verified timestamp" if timestamp.nil?
if DateTime.parse(timestamp) > BACKFILL_CUTOFF
raise InvalidAttestationError, "backfill attestation post-dates cutoff"
end
end
backfill_attestation
rescue InvalidAttestationError
@attestation_retry_count ||= T.let(Hash.new(0), T.nilable(T::Hash[Bottle, Integer]))
raise if @attestation_retry_count[bottle] >= ATTESTATION_MAX_RETRIES
sleep_time = 3 ** @attestation_retry_count[bottle]
opoo "Failed to verify attestation. Retrying in #{sleep_time}s..."
sleep sleep_time if ENV["HOMEBREW_TESTS"].blank?
@attestation_retry_count[bottle] += 1
retry
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/checksum.rb | Library/Homebrew/checksum.rb | # typed: strict
# frozen_string_literal: true
# A formula's checksum.
class Checksum
extend Forwardable
sig { returns(String) }
attr_reader :hexdigest
sig { params(hexdigest: String).void }
def initialize(hexdigest)
@hexdigest = T.let(hexdigest.downcase, String)
end
sig { returns(String) }
def inspect
"#<Checksum #{hexdigest}>"
end
delegate [:empty?, :to_s, :length, :[]] => :@hexdigest
sig { params(other: T.anything).returns(T::Boolean) }
def ==(other)
case other
when String
to_s == other.downcase
when Checksum
hexdigest == other.hexdigest
else
false
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/keg_relocate.rb | Library/Homebrew/keg_relocate.rb | # typed: strict
# frozen_string_literal: true
class Keg
PREFIX_PLACEHOLDER = T.let("@@HOMEBREW_PREFIX@@", String)
CELLAR_PLACEHOLDER = T.let("@@HOMEBREW_CELLAR@@", String)
REPOSITORY_PLACEHOLDER = T.let("@@HOMEBREW_REPOSITORY@@", String)
LIBRARY_PLACEHOLDER = T.let("@@HOMEBREW_LIBRARY@@", String)
PERL_PLACEHOLDER = T.let("@@HOMEBREW_PERL@@", String)
JAVA_PLACEHOLDER = T.let("@@HOMEBREW_JAVA@@", String)
NULL_BYTE = T.let("\x00", String)
NULL_BYTE_STRING = T.let("\\x00", String)
class Relocation
RELOCATABLE_PATH_REGEX_PREFIX = T.let(/(?:(?<=-F|-I|-L|-isystem)|(?<![a-zA-Z0-9]))/, Regexp)
sig { void }
def initialize
@replacement_map = T.let({}, T::Hash[Symbol, [T.any(String, Regexp), String]])
end
sig { returns(Relocation) }
def freeze
@replacement_map.freeze
super
end
sig { params(key: Symbol, old_value: T.any(String, Regexp), new_value: String, path: T::Boolean).void }
def add_replacement_pair(key, old_value, new_value, path: false)
old_value = self.class.path_to_regex(old_value) if path
@replacement_map[key] = [old_value, new_value]
end
sig { params(key: Symbol).returns([T.any(String, Regexp), String]) }
def replacement_pair_for(key)
@replacement_map.fetch(key)
end
sig { params(text: String).returns(T::Boolean) }
def replace_text!(text)
replacements = @replacement_map.values.to_h
sorted_keys = replacements.keys.sort_by do |key|
key.is_a?(String) ? key.length : 999
end.reverse
any_changed = T.let(nil, T.nilable(String))
sorted_keys.each do |key|
changed = text.gsub!(key, replacements.fetch(key))
any_changed ||= changed
end
!any_changed.nil?
end
sig { params(path: T.any(String, Regexp)).returns(Regexp) }
def self.path_to_regex(path)
path = case path
when String
Regexp.escape(path)
when Regexp
path.source
end
Regexp.new(RELOCATABLE_PATH_REGEX_PREFIX.source + path)
end
end
sig { void }
def fix_dynamic_linkage
symlink_files.each do |file|
link = file.readlink
# Don't fix relative symlinks
next unless link.absolute?
link_starts_cellar = link.to_s.start_with?(HOMEBREW_CELLAR.to_s)
link_starts_prefix = link.to_s.start_with?(HOMEBREW_PREFIX.to_s)
next if !link_starts_cellar && !link_starts_prefix
new_src = link.relative_path_from(file.parent)
file.unlink
FileUtils.ln_s(new_src, file)
end
end
sig { params(_relocation: Relocation, skip_protodesc_cold: T::Boolean).void }
def relocate_dynamic_linkage(_relocation, skip_protodesc_cold: false); end
JAVA_REGEX = %r{#{HOMEBREW_PREFIX}/opt/openjdk(@\d+(\.\d+)*)?/libexec(/openjdk\.jdk/Contents/Home)?}
sig { returns(T::Hash[Symbol, T::Hash[Symbol, String]]) }
def new_usr_local_replacement_pairs
{
prefix: {
old: "/usr/local/opt",
new: "#{PREFIX_PLACEHOLDER}/opt",
},
caskroom: {
old: "/usr/local/Caskroom",
new: "#{PREFIX_PLACEHOLDER}/Caskroom",
},
etc_name: {
old: "/usr/local/etc/#{name}",
new: "#{PREFIX_PLACEHOLDER}/etc/#{name}",
},
var_homebrew: {
old: "/usr/local/var/homebrew",
new: "#{PREFIX_PLACEHOLDER}/var/homebrew",
},
var_www: {
old: "/usr/local/var/www",
new: "#{PREFIX_PLACEHOLDER}/var/www",
},
var_name: {
old: "/usr/local/var/#{name}",
new: "#{PREFIX_PLACEHOLDER}/var/#{name}",
},
var_log_name: {
old: "/usr/local/var/log/#{name}",
new: "#{PREFIX_PLACEHOLDER}/var/log/#{name}",
},
var_lib_name: {
old: "/usr/local/var/lib/#{name}",
new: "#{PREFIX_PLACEHOLDER}/var/lib/#{name}",
},
var_run_name: {
old: "/usr/local/var/run/#{name}",
new: "#{PREFIX_PLACEHOLDER}/var/run/#{name}",
},
var_db_name: {
old: "/usr/local/var/db/#{name}",
new: "#{PREFIX_PLACEHOLDER}/var/db/#{name}",
},
share_name: {
old: "/usr/local/share/#{name}",
new: "#{PREFIX_PLACEHOLDER}/share/#{name}",
},
}
end
sig { params(new_usr_local_relocation: T::Boolean).returns(Relocation) }
def prepare_relocation_to_placeholders(new_usr_local_relocation: new_usr_local_relocation?)
relocation = Relocation.new
# Use selective HOMEBREW_PREFIX replacement when HOMEBREW_PREFIX=/usr/local
# This avoids overzealous replacement of system paths when a script refers to e.g. /usr/local/bin
if new_usr_local_relocation
new_usr_local_replacement_pairs.each do |key, value|
relocation.add_replacement_pair(key, value.fetch(:old), value.fetch(:new), path: true)
end
else
relocation.add_replacement_pair(:prefix, HOMEBREW_PREFIX.to_s, PREFIX_PLACEHOLDER, path: true)
end
relocation.add_replacement_pair(:cellar, HOMEBREW_CELLAR.to_s, CELLAR_PLACEHOLDER, path: true)
# when HOMEBREW_PREFIX == HOMEBREW_REPOSITORY we should use HOMEBREW_PREFIX for all relocations to avoid
# being unable to differentiate between them.
if HOMEBREW_PREFIX != HOMEBREW_REPOSITORY
relocation.add_replacement_pair(:repository, HOMEBREW_REPOSITORY.to_s, REPOSITORY_PLACEHOLDER, path: true)
end
relocation.add_replacement_pair(:library, HOMEBREW_LIBRARY.to_s, LIBRARY_PLACEHOLDER, path: true)
relocation.add_replacement_pair(:perl,
%r{\A#![ \t]*(?:/usr/bin/perl\d\.\d+|#{HOMEBREW_PREFIX}/opt/perl/bin/perl)( |$)}o,
"#!#{PERL_PLACEHOLDER}\\1")
relocation.add_replacement_pair(:java, JAVA_REGEX, JAVA_PLACEHOLDER)
relocation
end
sig { returns(T::Array[Pathname]) }
def replace_locations_with_placeholders
relocation = prepare_relocation_to_placeholders.freeze
relocate_dynamic_linkage(relocation, skip_protodesc_cold: true)
replace_text_in_files(relocation)
end
sig { returns(Relocation) }
def prepare_relocation_to_locations
relocation = Relocation.new
relocation.add_replacement_pair(:prefix, PREFIX_PLACEHOLDER, HOMEBREW_PREFIX.to_s)
relocation.add_replacement_pair(:cellar, CELLAR_PLACEHOLDER, HOMEBREW_CELLAR.to_s)
relocation.add_replacement_pair(:repository, REPOSITORY_PLACEHOLDER, HOMEBREW_REPOSITORY.to_s)
relocation.add_replacement_pair(:library, LIBRARY_PLACEHOLDER, HOMEBREW_LIBRARY.to_s)
relocation.add_replacement_pair(:perl, PERL_PLACEHOLDER, "#{HOMEBREW_PREFIX}/opt/perl/bin/perl")
if (openjdk = openjdk_dep_name_if_applicable)
relocation.add_replacement_pair(:java, JAVA_PLACEHOLDER, "#{HOMEBREW_PREFIX}/opt/#{openjdk}/libexec")
end
relocation
end
sig { params(files: T.nilable(T::Array[Pathname]), skip_linkage: T::Boolean).void }
def replace_placeholders_with_locations(files, skip_linkage: false)
relocation = prepare_relocation_to_locations.freeze
relocate_dynamic_linkage(relocation) unless skip_linkage
replace_text_in_files(relocation, files:)
end
sig { returns(T.nilable(String)) }
def openjdk_dep_name_if_applicable
deps = runtime_dependencies
return if deps.blank?
dep_names = deps.map { |d| d["full_name"] }
dep_names.find { |d| d.match? Version.formula_optionally_versioned_regex(:openjdk) }
end
sig { params(file: Pathname).returns(T::Boolean) }
def homebrew_created_file?(file)
return false unless file.basename.to_s.start_with?("homebrew.")
%w[.plist .service .timer].include?(file.extname)
end
sig { params(relocation: Relocation, files: T.nilable(T::Array[Pathname])).returns(T::Array[Pathname]) }
def replace_text_in_files(relocation, files: nil)
files ||= text_files | libtool_files
changed_files = T.let([], T::Array[Pathname])
files.map { path.join(it) }.group_by { |f| f.stat.ino }.each_value do |first, *rest|
first = T.must(first)
s = first.open("rb", &:read)
# Use full prefix replacement for Homebrew-created files when using selective relocation
file_relocation = if new_usr_local_relocation? && homebrew_created_file?(first)
prepare_relocation_to_placeholders(new_usr_local_relocation: false)
else
relocation
end
next unless file_relocation.replace_text!(s)
changed_files += [first, *rest].map { |file| file.relative_path_from(path) }
begin
first.atomic_write(s)
rescue SystemCallError
first.ensure_writable do
first.open("wb") { |f| f.write(s) }
end
else
rest.each { |file| FileUtils.ln(first, file, force: true) }
end
end
changed_files
end
sig { params(keg: Keg, old_prefix: T.any(String, Pathname), new_prefix: T.any(String, Pathname)).void }
def relocate_build_prefix(keg, old_prefix, new_prefix)
each_unique_file_matching(old_prefix) do |file|
# Skip files which are not binary, as they do not need null padding.
next unless keg.binary_file?(file)
# Skip sharballs, which appear to break if patched.
next if file.text_executable?
# Split binary by null characters into array and substitute new prefix for old prefix.
# Null padding is added if the new string is too short.
file.ensure_writable do
binary = File.binread file
odebug "Replacing build prefix in: #{file}"
binary_strings = binary.split(/#{NULL_BYTE}/o, -1)
match_indices = binary_strings.each_index.select { |i| binary_strings.fetch(i).include?(old_prefix.to_s) }
# Only perform substitution on strings which match prefix regex.
match_indices.each do |i|
s = binary_strings.fetch(i)
binary_strings[i] = s.gsub(old_prefix.to_s, new_prefix.to_s)
.ljust(s.size, NULL_BYTE)
end
# Rejoin strings by null bytes.
patched_binary = binary_strings.join(NULL_BYTE)
if patched_binary.size != binary.size
raise <<~EOS
Patching failed! Original and patched binary sizes do not match.
Original size: #{binary.size}
Patched size: #{patched_binary.size}
EOS
end
file.atomic_write patched_binary
end
codesign_patched_binary(file.to_s)
end
end
sig { params(_options: T::Hash[Symbol, T::Boolean]).returns(T::Array[Symbol]) }
def detect_cxx_stdlibs(_options = {})
[]
end
sig { returns(String) }
def recursive_fgrep_args
# for GNU grep; overridden for BSD grep on OS X
"-lr"
end
sig { returns([String, T::Array[String]]) }
def egrep_args
grep_bin = "grep"
grep_args = [
"--files-with-matches",
"--perl-regexp",
"--binary-files=text",
]
[grep_bin, grep_args]
end
sig { params(string: T.any(String, Pathname), _block: T.proc.params(arg0: Pathname).void).void }
def each_unique_file_matching(string, &_block)
Utils.popen_read("fgrep", recursive_fgrep_args, string, to_s) do |io|
hardlinks = Set.new
until io.eof?
file = Pathname.new(io.readline.chomp)
# Don't return symbolic links.
next if file.symlink?
# To avoid returning hardlinks, only return files with unique inodes.
# Hardlinks will have the same inode as the file they point to.
yield file if hardlinks.add? file.stat.ino
end
end
end
sig { params(file: Pathname).returns(T::Boolean) }
def binary_file?(file)
grep_bin, grep_args = egrep_args
# We need to pass NULL_BYTE_STRING, the literal string "\x00", to grep
# rather than NULL_BYTE, a literal null byte, because grep will internally
# convert the literal string "\x00" to a null byte.
Utils.popen_read(grep_bin, *grep_args, NULL_BYTE_STRING, file).present?
end
sig { returns(Pathname) }
def lib
path/"lib"
end
sig { returns(Pathname) }
def libexec
path/"libexec"
end
sig { returns(T::Array[Pathname]) }
def text_files
text_files = []
return text_files if !which("file") || !which("xargs")
# file has known issues with reading files on other locales. Has
# been fixed upstream for some time, but a sufficiently new enough
# file with that fix is only available in macOS Sierra.
# https://bugs.gw.com/view.php?id=292
# TODO: remove custom logic as we're now not supporting pre-Sierra.
with_custom_locale("C") do
files = Set.new path.find.reject { |pn|
next true if pn.symlink?
next true if pn.directory?
next false if pn.basename.to_s == "orig-prefix.txt" # for python virtualenvs
next true if pn == self/".brew/#{name}.rb"
require "metafiles"
next true if Metafiles::EXTENSIONS.include?(pn.extname)
if pn.text_executable?
text_files << pn
next true
end
false
}
output, _status = Open3.capture2("xargs -0 file --no-dereference --print0",
stdin_data: files.to_a.join("\0"))
# `file` output sometimes contains data from the file, which may include
# invalid UTF-8 entities, so tell Ruby this is just a bytestring
output.force_encoding(Encoding::ASCII_8BIT)
output.each_line do |line|
path, info = line.split("\0", 2)
# `file` sometimes prints more than one line of output per file;
# subsequent lines do not contain a null-byte separator, so `info`
# will be `nil` for those lines
next unless info
next unless info.include?("text")
path = Pathname.new(path)
next unless files.include?(path)
text_files << path
end
end
text_files
end
sig { returns(T::Array[Pathname]) }
def libtool_files
libtool_files = []
path.find do |pn|
next if pn.symlink? || pn.directory? || Keg::LIBTOOL_EXTENSIONS.exclude?(pn.extname)
libtool_files << pn
end
libtool_files
end
sig { returns(T::Array[Pathname]) }
def symlink_files
symlink_files = []
path.find do |pn|
symlink_files << pn if pn.symlink?
end
symlink_files
end
sig {
params(file: Pathname, string: String, ignores: T::Array[Regexp], linked_libraries: T::Array[Pathname],
formula_and_runtime_deps_names: T.nilable(T::Array[String])).returns(T::Array[[String, String]])
}
def self.text_matches_in_file(file, string, ignores, linked_libraries, formula_and_runtime_deps_names)
text_matches = []
path_regex = Relocation.path_to_regex(string)
Utils.popen_read("strings", "-t", "x", "-", file.to_s) do |io|
until io.eof?
str = io.readline.chomp
next if ignores.any? { |i| str.match?(i) }
next unless str.match? path_regex
offset, match = str.split(" ", 2)
# Some binaries contain strings with lists of files
# e.g. `/usr/local/lib/foo:/usr/local/share/foo:/usr/lib/foo`
# Each item in the list should be checked separately
match.split(":").each do |sub_match|
# Not all items in the list may be matches
next unless sub_match.match? path_regex
next if linked_libraries.include? sub_match # Don't bother reporting a string if it was found by otool
# Do not report matches to files that do not exist.
next unless File.exist? sub_match
# Do not report matches to build dependencies.
if formula_and_runtime_deps_names.present?
begin
keg_name = Keg.for(Pathname.new(sub_match)).name
next unless formula_and_runtime_deps_names.include? keg_name
rescue NotAKegError
nil
end
end
text_matches << [match, offset] unless text_matches.any? { |text| text.last == offset }
end
end
end
text_matches
end
sig { params(_file: Pathname, _string: String).returns(T::Array[Pathname]) }
def self.file_linked_libraries(_file, _string)
[]
end
private
sig { returns(T::Boolean) }
def new_usr_local_relocation?
return false if HOMEBREW_PREFIX.to_s != "/usr/local"
formula = begin
Formula[name]
rescue FormulaUnavailableError
nil
end
return true unless formula
tap = formula.tap
return true unless tap
tap.disabled_new_usr_local_relocation_formulae.exclude?(name)
end
end
require "extend/os/keg_relocate"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/resource_auditor.rb | Library/Homebrew/resource_auditor.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "utils/svn"
module Homebrew
# Auditor for checking common violations in {Resource}s.
class ResourceAuditor
include Utils::Curl
attr_reader :name, :version, :checksum, :url, :mirrors, :using, :specs, :owner, :spec_name, :problems
def initialize(resource, spec_name, options = {})
@name = resource.name
@version = resource.version
@checksum = resource.checksum
@url = resource.url
@mirrors = resource.mirrors
@using = resource.using
@specs = resource.specs
@owner = resource.owner
@spec_name = spec_name
@online = options[:online]
@strict = options[:strict]
@only = options[:only]
@except = options[:except]
@core_tap = options[:core_tap]
@use_homebrew_curl = options[:use_homebrew_curl]
@problems = []
end
def audit
only_audits = @only
except_audits = @except
methods.map(&:to_s).grep(/^audit_/).each do |audit_method_name|
name = audit_method_name.delete_prefix("audit_")
next if only_audits&.exclude?(name)
next if except_audits&.include?(name)
send(audit_method_name)
end
self
end
def audit_version
if version.nil?
problem "Missing version"
elsif owner.is_a?(Formula) && !version.to_s.match?(GitHubPackages::VALID_OCI_TAG_REGEX) &&
(owner.core_formula? ||
(owner.bottle_defined? && GitHubPackages::URL_REGEX.match?(owner.bottle_specification.root_url)))
problem "`version #{version}` does not match #{GitHubPackages::VALID_OCI_TAG_REGEX.source}"
elsif !version.detected_from_url?
version_text = version
version_url = Version.detect(url, **specs)
if version_url.to_s == version_text.to_s && version.instance_of?(Version)
problem "`version #{version_text}` is redundant with version scanned from URL"
end
end
end
def audit_download_strategy
url_strategy = DownloadStrategyDetector.detect(url)
if (using == :git || url_strategy == GitDownloadStrategy) && specs[:tag] && !specs[:revision]
problem "Git should specify `revision:` when a `tag:` is specified."
end
return unless using
if using == :cvs
mod = specs[:module]
problem "Redundant `module:` value in URL" if mod == name
if url.match?(%r{:[^/]+$})
mod = url.split(":").last
if mod == name
problem "Redundant CVS module appended to URL"
else
problem "Specify CVS module as `module: \"#{mod}\"` instead of appending it to the URL"
end
end
end
return if url_strategy != DownloadStrategyDetector.detect("", using)
problem "Redundant `using:` value in URL"
end
def audit_checksum
return if spec_name == :head
# This condition is non-invertible.
# rubocop:disable Style/InvertibleUnlessCondition
return unless DownloadStrategyDetector.detect(url, using) <= CurlDownloadStrategy
# rubocop:enable Style/InvertibleUnlessCondition
problem "Checksum is missing" if checksum.blank?
end
def self.curl_deps
@curl_deps ||= begin
["curl"] + Formula["curl"].recursive_dependencies.map(&:name).uniq
rescue FormulaUnavailableError
[]
end
end
def audit_resource_name_matches_pypi_package_name_in_url
return unless url.match?(%r{^https?://files\.pythonhosted\.org/packages/})
return if name == owner.name # Skip the top-level package name as we only care about `resource "foo"` blocks.
if url.end_with? ".whl"
path = URI(url).path
return unless path.present?
pypi_package_name, = File.basename(path).split("-", 2)
else
url =~ %r{/(?<package_name>[^/]+)-}
pypi_package_name = Regexp.last_match(:package_name).to_s
end
T.must(pypi_package_name).gsub!(/[_.]/, "-")
return if name.casecmp(pypi_package_name).zero?
problem "`resource` name should be '#{pypi_package_name}' to match the PyPI package name"
end
def audit_urls
urls = [url] + mirrors
curl_dep = self.class.curl_deps.include?(owner.name)
# Ideally `ca-certificates` would not be excluded here, but sourcing a HTTP mirror was tricky.
# Instead, we have logic elsewhere to pass `--insecure` to curl when downloading the certs.
# TODO: try remove the OS/env conditional
if Homebrew::SimulateSystem.simulating_or_running_on_macos? && spec_name == :stable &&
owner.name != "ca-certificates" && curl_dep && !urls.find { |u| u.start_with?("http://") }
problem "Should always include at least one HTTP mirror"
end
return unless @online
urls.each do |url|
next if !@strict && mirrors.include?(url)
strategy = DownloadStrategyDetector.detect(url, using)
if strategy <= CurlDownloadStrategy && !url.start_with?("file")
raise HomebrewCurlDownloadStrategyError, url if
strategy <= HomebrewCurlDownloadStrategy && !Formula["curl"].any_version_installed?
# Skip ftp.gnu.org audit, upstream has asked us to reduce load.
# See issue: https://github.com/Homebrew/brew/issues/20456
next if url.match?(%r{^https?://ftp\.gnu\.org/.+})
# Skip https audit for curl dependencies
if !curl_dep && (http_content_problem = curl_check_http_content(
url,
"source URL",
specs:,
use_homebrew_curl: @use_homebrew_curl,
))
problem http_content_problem
end
elsif strategy <= GitDownloadStrategy
attempts = 0
remote_exists = T.let(false, T::Boolean)
while !remote_exists && attempts < Homebrew::EnvConfig.curl_retries.to_i
remote_exists = Utils::Git.remote_exists?(url)
attempts += 1
end
problem "The URL #{url} is not a valid Git URL" unless remote_exists
elsif strategy <= SubversionDownloadStrategy
next unless Utils::Svn.available?
problem "The URL #{url} is not a valid SVN URL" unless Utils::Svn.remote_exists? url
end
end
end
def audit_head_branch
return unless @online
return if spec_name != :head
return if specs[:tag].present?
return if specs[:revision].present?
# Skip `resource` URLs as they use SHAs instead of branch specifiers.
return if name != owner.name
return unless url.end_with?(".git")
return unless Utils::Git.remote_exists?(url)
detected_branch = Utils.popen_read("git", "ls-remote", "--symref", url, "HEAD")
.match(%r{ref: refs/heads/(.*?)\s+HEAD})&.to_a&.second
if specs[:branch].blank?
problem "Git `head` URL must specify a branch name"
return
end
return unless @core_tap
return if specs[:branch] == detected_branch
problem "To use a non-default HEAD branch, add the formula to `head_non_default_branch_allowlist.json`."
end
def problem(text)
@problems << text
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/reinstall.rb | Library/Homebrew/reinstall.rb | # typed: strict
# frozen_string_literal: true
require "development_tools"
require "messages"
require "utils/output"
# Needed to handle circular require dependency.
# rubocop:disable Lint/EmptyClass
class FormulaInstaller; end
# rubocop:enable Lint/EmptyClass
module Homebrew
module Reinstall
extend Utils::Output::Mixin
class InstallationContext < T::Struct
const :formula_installer, ::FormulaInstaller
const :keg, T.nilable(Keg)
const :formula, Formula
const :options, Options
end
class << self
sig {
params(
formula: Formula, flags: T::Array[String], force_bottle: T::Boolean,
build_from_source_formulae: T::Array[String], interactive: T::Boolean, keep_tmp: T::Boolean,
debug_symbols: T::Boolean, force: T::Boolean, debug: T::Boolean, quiet: T::Boolean,
verbose: T::Boolean, git: T::Boolean
).returns(InstallationContext)
}
def build_install_context(
formula,
flags:,
force_bottle: false,
build_from_source_formulae: [],
interactive: false,
keep_tmp: false,
debug_symbols: false,
force: false,
debug: false,
quiet: false,
verbose: false,
git: false
)
if formula.opt_prefix.directory?
keg = Keg.new(formula.opt_prefix.resolved_path)
tab = keg.tab
link_keg = keg.linked?
installed_as_dependency = tab.installed_as_dependency == true
installed_on_request = tab.installed_on_request == true
build_bottle = tab.built_bottle?
backup keg
else
link_keg = nil
installed_as_dependency = false
installed_on_request = true
build_bottle = false
end
build_options = BuildOptions.new(Options.create(flags), formula.options)
options = build_options.used_options
options |= formula.build.used_options
options &= formula.options
formula_installer = FormulaInstaller.new(
formula,
**{
options:,
link_keg:,
installed_as_dependency:,
installed_on_request:,
build_bottle:,
force_bottle:,
build_from_source_formulae:,
git:,
interactive:,
keep_tmp:,
debug_symbols:,
force:,
debug:,
quiet:,
verbose:,
}.compact,
)
InstallationContext.new(formula_installer:, keg:, formula:, options:)
end
sig { params(install_context: InstallationContext).void }
def reinstall_formula(install_context)
formula_installer = install_context.formula_installer
keg = install_context.keg
formula = install_context.formula
options = install_context.options
link_keg = keg&.linked?
verbose = formula_installer.verbose?
oh1 "Reinstalling #{Formatter.identifier(formula.full_name)} #{options.to_a.join " "}"
formula_installer.install
formula_installer.finish
rescue FormulaInstallationAlreadyAttemptedError
nil
# Any other exceptions we want to restore the previous keg and report the error.
rescue Exception # rubocop:disable Lint/RescueException
ignore_interrupts { restore_backup(keg, link_keg, verbose:) if keg }
raise
else
if keg
backup_keg = backup_path(keg)
begin
FileUtils.rm_r(backup_keg) if backup_keg.exist?
rescue Errno::EACCES, Errno::ENOTEMPTY
odie <<~EOS
Could not remove #{backup_keg.parent.basename} backup keg! Do so manually:
sudo rm -rf #{backup_keg}
EOS
end
end
end
sig { params(dry_run: T::Boolean).void }
def reinstall_pkgconf_if_needed!(dry_run: false)
nil
end
private
sig { params(keg: Keg).void }
def backup(keg)
keg.unlink
begin
keg.rename backup_path(keg)
rescue Errno::EACCES, Errno::ENOTEMPTY
odie <<~EOS
Could not rename #{keg.name} keg! Check/fix its permissions:
sudo chown -R #{ENV.fetch("USER", "$(whoami)")} #{keg}
EOS
end
end
sig { params(keg: Keg, keg_was_linked: T::Boolean, verbose: T::Boolean).void }
def restore_backup(keg, keg_was_linked, verbose:)
path = backup_path(keg)
return unless path.directory?
FileUtils.rm_r(Pathname.new(keg)) if keg.exist?
path.rename keg.to_s
keg.link(verbose:) if keg_was_linked
end
sig { params(keg: Keg).returns(Pathname) }
def backup_path(keg)
Pathname.new "#{keg}.reinstall"
end
end
end
end
require "extend/os/reinstall"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/autobump_constants.rb | Library/Homebrew/autobump_constants.rb | # typed: strict
# frozen_string_literal: true
NO_AUTOBUMP_REASONS_INTERNAL = T.let({
extract_plist: "livecheck uses `:extract_plist` strategy",
latest_version: "`version` is set to `:latest`",
}.freeze, T::Hash[Symbol, String])
# The valid symbols for passing to `no_autobump!` in a `Formula` or `Cask`.
# @api public
NO_AUTOBUMP_REASONS_LIST = T.let({
incompatible_version_format: "the package has a version format that can only be updated manually",
bumped_by_upstream: "updates to the package are handled by the upstream developers",
requires_manual_review: "a manual review of this package is required for inclusion in autobump",
}.merge(NO_AUTOBUMP_REASONS_INTERNAL).freeze, T::Hash[Symbol, String])
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/cachable.rb | Library/Homebrew/cachable.rb | # typed: strict
# frozen_string_literal: true
module Cachable
sig { returns(T::Hash[T.untyped, T.untyped]) }
def cache
@cache ||= T.let({}, T.nilable(T::Hash[T.untyped, T.untyped]))
end
sig { void }
def clear_cache
cache.clear
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/completions.rb | Library/Homebrew/completions.rb | # typed: strict
# frozen_string_literal: true
require "utils/link"
require "settings"
require "erb"
module Homebrew
# Helper functions for generating shell completions.
module Completions
extend Utils::Output::Mixin
Variables = Struct.new(
:aliases,
:builtin_command_descriptions,
:completion_functions,
:function_mappings,
keyword_init: true,
)
COMPLETIONS_DIR = T.let((HOMEBREW_REPOSITORY/"completions").freeze, Pathname)
TEMPLATE_DIR = T.let((HOMEBREW_LIBRARY_PATH/"completions").freeze, Pathname)
SHELLS = %w[bash fish zsh].freeze
COMPLETIONS_EXCLUSION_LIST = %w[
instal
uninstal
update-report
].freeze
BASH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING = T.let({
formula: "__brew_complete_formulae",
installed_formula: "__brew_complete_installed_formulae",
outdated_formula: "__brew_complete_outdated_formulae",
cask: "__brew_complete_casks",
installed_cask: "__brew_complete_installed_casks",
outdated_cask: "__brew_complete_outdated_casks",
tap: "__brew_complete_tapped",
installed_tap: "__brew_complete_tapped",
command: "__brew_complete_commands",
diagnostic_check: '__brewcomp "${__HOMEBREW_DOCTOR_CHECKS=$(brew doctor --list-checks)}"',
file: "__brew_complete_files",
}.freeze, T::Hash[Symbol, String])
ZSH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING = T.let({
formula: "__brew_formulae",
installed_formula: "__brew_installed_formulae",
outdated_formula: "__brew_outdated_formulae",
cask: "__brew_casks",
installed_cask: "__brew_installed_casks",
outdated_cask: "__brew_outdated_casks",
tap: "__brew_any_tap",
installed_tap: "__brew_installed_taps",
command: "__brew_commands",
diagnostic_check: "__brew_diagnostic_checks",
file: "__brew_formulae_or_ruby_files",
}.freeze, T::Hash[Symbol, String])
FISH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING = T.let({
formula: "__fish_brew_suggest_formulae_all",
installed_formula: "__fish_brew_suggest_formulae_installed",
outdated_formula: "__fish_brew_suggest_formulae_outdated",
cask: "__fish_brew_suggest_casks_all",
installed_cask: "__fish_brew_suggest_casks_installed",
outdated_cask: "__fish_brew_suggest_casks_outdated",
tap: "__fish_brew_suggest_taps_installed",
installed_tap: "__fish_brew_suggest_taps_installed",
command: "__fish_brew_suggest_commands",
diagnostic_check: "__fish_brew_suggest_diagnostic_checks",
}.freeze, T::Hash[Symbol, String])
sig { void }
def self.link!
Settings.write :linkcompletions, true
Tap.installed.each do |tap|
Utils::Link.link_completions tap.path, "brew completions link"
end
end
sig { void }
def self.unlink!
Settings.write :linkcompletions, false
Tap.installed.each do |tap|
next if tap.official?
Utils::Link.unlink_completions tap.path
end
end
sig { returns(T::Boolean) }
def self.link_completions?
Settings.read(:linkcompletions) == "true"
end
sig { returns(T::Boolean) }
def self.completions_to_link?
Tap.installed.each do |tap|
next if tap.official?
SHELLS.each do |shell|
return true if (tap.path/"completions/#{shell}").exist?
end
end
false
end
sig { void }
def self.show_completions_message_if_needed
return if Settings.read(:completionsmessageshown) == "true"
return unless completions_to_link?
ohai "Homebrew completions for external commands are unlinked by default!"
puts <<~EOS
To opt-in to automatically linking external tap shell completion files, run:
brew completions link
Then, follow the directions at #{Formatter.url("https://docs.brew.sh/Shell-Completion")}
EOS
Settings.write :completionsmessageshown, true
end
sig { void }
def self.update_shell_completions!
commands = Commands.commands(external: false, aliases: true).sort
puts "Writing completions to #{COMPLETIONS_DIR}"
(COMPLETIONS_DIR/"bash/brew").atomic_write generate_bash_completion_file(commands)
(COMPLETIONS_DIR/"zsh/_brew").atomic_write generate_zsh_completion_file(commands)
(COMPLETIONS_DIR/"fish/brew.fish").atomic_write generate_fish_completion_file(commands)
end
sig { params(command: String).returns(T::Boolean) }
def self.command_gets_completions?(command)
command_options(command).any?
end
sig { params(description: String, fish: T::Boolean).returns(String) }
def self.format_description(description, fish: false)
description = if fish
description.gsub("'", "\\\\'")
else
description.gsub("'", "'\\\\''")
end
description.gsub(/[<>]/, "").tr("\n", " ").chomp(".")
end
sig { params(command: String).returns(T::Hash[String, String]) }
def self.command_options(command)
options = {}
Commands.command_options(command)&.each do |option|
next if option.blank?
name = option.first
desc = option.second
if name.start_with? "--[no-]"
options[name.gsub("[no-]", "")] = desc
options[name.sub("[no-]", "no-")] = desc
else
options[name] = desc
end
end
options
end
sig { params(command: String).returns(T.nilable(String)) }
def self.generate_bash_subcommand_completion(command)
return unless command_gets_completions? command
named_completion_string = ""
if (types = Commands.named_args_type(command))
named_args_strings, named_args_types = types.partition { |type| type.is_a? String }
T.cast(named_args_types, T::Array[Symbol]).each do |type|
next unless BASH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING.key? type
named_completion_string += "\n #{BASH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING[type]}"
end
named_completion_string += "\n __brewcomp \"#{named_args_strings.join(" ")}\"" if named_args_strings.any?
end
<<~COMPLETION
_brew_#{Commands.method_name command}() {
local cur="${COMP_WORDS[COMP_CWORD]}"
case "${cur}" in
-*)
__brewcomp "
#{command_options(command).keys.sort.join("\n ")}
"
return
;;
*) ;;
esac#{named_completion_string}
}
COMPLETION
end
sig { params(commands: T::Array[String]).returns(String) }
def self.generate_bash_completion_file(commands)
variables = Variables.new(
completion_functions: commands.filter_map do |command|
generate_bash_subcommand_completion command
end,
function_mappings: commands.filter_map do |command|
next unless command_gets_completions? command
"#{command}) _brew_#{Commands.method_name command} ;;"
end,
)
ERB.new((TEMPLATE_DIR/"bash.erb").read, trim_mode: ">").result(variables.instance_eval { binding })
end
sig { params(command: String).returns(T.nilable(String)) }
def self.generate_zsh_subcommand_completion(command)
return unless command_gets_completions? command
options = command_options(command)
args_options = []
if (types = Commands.named_args_type(command))
named_args_strings, named_args_types = types.partition { |type| type.is_a? String }
T.cast(named_args_types, T::Array[Symbol]).each do |type|
next unless ZSH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING.key? type
args_options << "- #{type}"
opt = "--#{type.to_s.gsub(/(installed|outdated)_/, "")}"
if options.key?(opt)
desc = options[opt]
if desc.blank?
args_options << opt
else
conflicts = generate_zsh_option_exclusions(command, opt)
args_options << "#{conflicts}#{opt}[#{format_description desc}]"
end
options.delete(opt)
end
args_options << "*:#{type}:#{ZSH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING[type]}"
end
if named_args_strings.any?
args_options << "- subcommand"
args_options << "*:subcommand:(#{named_args_strings.join(" ")})"
end
end
options = options.sort.map do |opt, desc|
next opt if desc.blank?
conflicts = generate_zsh_option_exclusions(command, opt)
"#{conflicts}#{opt}[#{format_description desc}]"
end
options += args_options
<<~COMPLETION
# brew #{command}
_brew_#{Commands.method_name command}() {
_arguments \\
#{options.map! { |opt| opt.start_with?("- ") ? opt : "'#{opt}'" }.join(" \\\n ")}
}
COMPLETION
end
sig { params(command: String, option: String).returns(String) }
def self.generate_zsh_option_exclusions(command, option)
conflicts = Commands.option_conflicts(command, option.gsub(/^--?/, ""))
return "" if conflicts.blank?
"(#{conflicts.map { |conflict| "-#{"-" if conflict.size > 1}#{conflict}" }.join(" ")})"
end
sig { params(commands: T::Array[String]).returns(String) }
def self.generate_zsh_completion_file(commands)
variables = Variables.new(
aliases: Commands::HOMEBREW_INTERNAL_COMMAND_ALIASES.filter_map do |alias_cmd, command|
alias_cmd = "'#{alias_cmd}'" if alias_cmd.start_with? "-"
command = "'#{command}'" if command.start_with? "-"
"#{alias_cmd} #{command}"
end,
builtin_command_descriptions: commands.filter_map do |command|
next if Commands::HOMEBREW_INTERNAL_COMMAND_ALIASES.key? command
description = Commands.command_description(command, short: true)
next if description.blank?
description = format_description description
"'#{command}:#{description}'"
end,
completion_functions: commands.filter_map do |command|
generate_zsh_subcommand_completion command
end,
)
ERB.new((TEMPLATE_DIR/"zsh.erb").read, trim_mode: ">").result(variables.instance_eval { binding })
end
sig { params(command: String).returns(T.nilable(String)) }
def self.generate_fish_subcommand_completion(command)
return unless command_gets_completions? command
command_description = format_description Commands.command_description(command, short: true).to_s, fish: true
lines = if COMPLETIONS_EXCLUSION_LIST.include?(command)
[]
else
["__fish_brew_complete_cmd '#{command}' '#{command_description}'"]
end
options = command_options(command).sort.filter_map do |opt, desc|
arg_line = "__fish_brew_complete_arg '#{command}' -l #{opt.sub(/^-+/, "")}"
arg_line += " -d '#{format_description desc, fish: true}'" if desc.present?
arg_line
end
subcommands = []
named_args = []
if (types = Commands.named_args_type(command))
named_args_strings, named_args_types = types.partition { |type| type.is_a? String }
T.cast(named_args_types, T::Array[Symbol]).each do |type|
next unless FISH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING.key? type
named_arg_function = FISH_NAMED_ARGS_COMPLETION_FUNCTION_MAPPING[type]
named_arg_prefix = "__fish_brew_complete_arg '#{command}; and not __fish_seen_argument"
formula_option = command_options(command).key?("--formula")
cask_option = command_options(command).key?("--cask")
named_args << if formula_option && cask_option && type.to_s.end_with?("formula")
"#{named_arg_prefix} -l cask -l casks' -a '(#{named_arg_function})'"
elsif formula_option && cask_option && type.to_s.end_with?("cask")
"#{named_arg_prefix} -l formula -l formulae' -a '(#{named_arg_function})'"
else
"__fish_brew_complete_arg '#{command}' -a '(#{named_arg_function})'"
end
end
named_args_strings.each do |subcommand|
subcommands << "__fish_brew_complete_sub_cmd '#{command}' '#{subcommand}'"
end
end
lines += subcommands + options + named_args
<<~COMPLETION
#{lines.join("\n").chomp}
COMPLETION
end
sig { params(commands: T::Array[String]).returns(String) }
def self.generate_fish_completion_file(commands)
variables = Variables.new(
completion_functions: commands.filter_map do |command|
generate_fish_subcommand_completion command
end,
)
ERB.new((TEMPLATE_DIR/"fish.erb").read, trim_mode: ">").result(variables.instance_eval { binding })
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/ast_constants.rb | Library/Homebrew/ast_constants.rb | # typed: strict
# frozen_string_literal: true
require "macos_version"
FORMULA_COMPONENT_PRECEDENCE_LIST = T.let([
[{ name: :include, type: :method_call }],
[{ name: :desc, type: :method_call }],
[{ name: :homepage, type: :method_call }],
[{ name: :url, type: :method_call }],
[{ name: :mirror, type: :method_call }],
[{ name: :version, type: :method_call }],
[{ name: :sha256, type: :method_call }],
[{ name: :license, type: :method_call }],
[{ name: :revision, type: :method_call }],
[{ name: :version_scheme, type: :method_call }],
[{ name: :head, type: :method_call }],
[{ name: :stable, type: :block_call }],
[{ name: :livecheck, type: :block_call }],
[{ name: :no_autobump!, type: :method_call }],
[{ name: :bottle, type: :block_call }],
[{ name: :pour_bottle?, type: :block_call }],
[{ name: :head, type: :block_call }],
[{ name: :bottle, type: :method_call }],
[{ name: :keg_only, type: :method_call }],
[{ name: :option, type: :method_call }],
[{ name: :deprecated_option, type: :method_call }],
[{ name: :deprecate!, type: :method_call }],
[{ name: :disable!, type: :method_call }],
[{ name: :depends_on, type: :method_call }],
[{ name: :uses_from_macos, type: :method_call }],
[{ name: :on_macos, type: :block_call }],
*MacOSVersion::SYMBOLS.keys.map do |os_name|
[{ name: :"on_#{os_name}", type: :block_call }]
end,
[{ name: :on_system, type: :block_call }],
[{ name: :on_linux, type: :block_call }],
[{ name: :on_arm, type: :block_call }],
[{ name: :on_intel, type: :block_call }],
[{ name: :conflicts_with, type: :method_call }],
[{ name: :preserve_rpath, type: :method_call }],
[{ name: :skip_clean, type: :method_call }],
[{ name: :cxxstdlib_check, type: :method_call }],
[{ name: :link_overwrite, type: :method_call }],
[{ name: :fails_with, type: :method_call }, { name: :fails_with, type: :block_call }],
[{ name: :pypi_packages, type: :method_call }],
[{ name: :resource, type: :block_call }],
[{ name: :patch, type: :method_call }, { name: :patch, type: :block_call }],
[{ name: :needs, type: :method_call }],
[{ name: :allow_network_access!, type: :method_call }],
[{ name: :deny_network_access!, type: :method_call }],
[{ name: :install, type: :method_definition }],
[{ name: :post_install, type: :method_definition }],
[{ name: :caveats, type: :method_definition }],
[{ name: :plist_options, type: :method_call }, { name: :plist, type: :method_definition }],
[{ name: :test, type: :block_call }],
].freeze, T::Array[T::Array[{ name: Symbol, type: Symbol }]])
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/build.rb | Library/Homebrew/build.rb | # typed: strict
# frozen_string_literal: true
# This script is loaded by formula_installer as a separate instance.
# Thrown exceptions are propagated back to the parent process over a pipe
raise "#{__FILE__} must not be loaded via `require`." if $PROGRAM_NAME != __FILE__
old_trap = trap("INT") { exit! 130 }
require_relative "global"
require "build_options"
require "keg"
require "extend/ENV"
require "fcntl"
require "utils/socket"
require "cmd/install"
require "json/add/exception"
require "utils/output"
require "extend/pathname/write_mkpath_extension"
# A formula build.
class Build
include Utils::Output::Mixin
sig { returns(Formula) }
attr_reader :formula
sig { returns(T::Array[Dependency]) }
attr_reader :deps
sig { returns(Requirements) }
attr_reader :reqs
sig { returns(Homebrew::Cmd::InstallCmd::Args) }
attr_reader :args
sig { params(formula: Formula, options: Options, args: Homebrew::Cmd::InstallCmd::Args).void }
def initialize(formula, options, args:)
@formula = formula
@formula.build = BuildOptions.new(options, formula.options)
@args = args
@deps = T.let([], T::Array[Dependency])
@reqs = T.let(Requirements.new, Requirements)
return if args.ignore_dependencies?
@deps = expand_deps
@reqs = expand_reqs
end
sig { params(dependent: Formula).returns(BuildOptions) }
def effective_build_options_for(dependent)
args = dependent.build.used_options
args |= Tab.for_formula(dependent).used_options
BuildOptions.new(args, dependent.options)
end
sig { returns(Requirements) }
def expand_reqs
formula.recursive_requirements do |dependent, req|
build = effective_build_options_for(dependent)
if req.prune_from_option?(build) || req.prune_if_build_and_not_dependent?(dependent, formula) || req.test?
Requirement.prune
end
end
end
sig { returns(T::Array[Dependency]) }
def expand_deps
formula.recursive_dependencies do |dependent, dep|
build = effective_build_options_for(dependent)
if dep.prune_from_option?(build) ||
dep.prune_if_build_and_not_dependent?(dependent, formula) ||
(dep.test? && !dep.build?) || dep.implicit?
Dependency.prune
elsif dep.build?
Dependency.keep_but_prune_recursive_deps
end
end
end
sig { void }
def install
formula_deps = deps.map(&:to_formula)
keg_only_deps = formula_deps.select(&:keg_only?)
run_time_deps = deps.reject(&:build?).map(&:to_formula)
formula_deps.each do |dep|
fixopt(dep) unless dep.opt_prefix.directory?
end
ENV.activate_extensions!(env: args.env)
if superenv?(args.env)
superenv = ENV
superenv.keg_only_deps = keg_only_deps
superenv.deps = formula_deps
superenv.run_time_deps = run_time_deps
ENV.setup_build_environment(
formula:,
cc: args.cc,
build_bottle: args.build_bottle?,
bottle_arch: args.bottle_arch,
debug_symbols: args.debug_symbols?,
)
reqs.each do |req|
req.modify_build_environment(
env: args.env, cc: args.cc, build_bottle: args.build_bottle?, bottle_arch: args.bottle_arch,
)
end
else
ENV.setup_build_environment(
formula:,
cc: args.cc,
build_bottle: args.build_bottle?,
bottle_arch: args.bottle_arch,
debug_symbols: args.debug_symbols?,
)
reqs.each do |req|
req.modify_build_environment(
env: args.env, cc: args.cc, build_bottle: args.build_bottle?, bottle_arch: args.bottle_arch,
)
end
keg_only_deps.each do |dep|
ENV.prepend_path "PATH", dep.opt_bin.to_s
ENV.prepend_path "PKG_CONFIG_PATH", "#{dep.opt_lib}/pkgconfig"
ENV.prepend_path "PKG_CONFIG_PATH", "#{dep.opt_share}/pkgconfig"
ENV.prepend_path "ACLOCAL_PATH", "#{dep.opt_share}/aclocal"
ENV.prepend_path "CMAKE_PREFIX_PATH", dep.opt_prefix.to_s
ENV.prepend "LDFLAGS", "-L#{dep.opt_lib}" if dep.opt_lib.directory?
ENV.prepend "CPPFLAGS", "-I#{dep.opt_include}" if dep.opt_include.directory?
end
end
new_env = {
"TMPDIR" => HOMEBREW_TEMP.to_s,
"TEMP" => HOMEBREW_TEMP.to_s,
"TMP" => HOMEBREW_TEMP.to_s,
}
with_env(new_env) do
if args.debug? && !Homebrew::EnvConfig.disable_debrew?
require "debrew"
formula.extend(Debrew::Formula)
end
formula.update_head_version
formula.brew(
fetch: false,
keep_tmp: args.keep_tmp?,
debug_symbols: args.debug_symbols?,
interactive: args.interactive?,
) do
with_env(
# For head builds, HOMEBREW_FORMULA_PREFIX should include the commit,
# which is not known until after the formula has been staged.
HOMEBREW_FORMULA_PREFIX: formula.prefix,
# https://reproducible-builds.org/docs/build-path/
HOMEBREW_FORMULA_BUILDPATH: formula.buildpath,
# https://reproducible-builds.org/docs/source-date-epoch/
SOURCE_DATE_EPOCH: formula.source_modified_time.to_i.to_s,
# Avoid make getting confused about timestamps.
# https://github.com/Homebrew/homebrew-core/pull/87470
TZ: "UTC0",
) do
if args.git?
formula.selective_patch(is_data: false)
system "git", "init"
system "git", "add", "-A"
formula.selective_patch(is_data: true)
else
formula.patch
end
if args.interactive?
ohai "Entering interactive mode..."
puts <<~EOS
Type `exit` to return and finalize the installation.
Install to this prefix: #{formula.prefix}
EOS
if args.git?
puts <<~EOS
This directory is now a Git repository. Make your changes and then use:
git diff | pbcopy
to copy the diff to the clipboard.
EOS
end
interactive_shell(formula)
else
formula.prefix.mkpath
formula.logs.mkpath
(formula.logs/"00.options.out").write \
"#{formula.full_name} #{formula.build.used_options.sort.join(" ")}".strip
Pathname.activate_extensions!
formula.install
stdlibs = detect_stdlibs
tab = Tab.create(formula, ENV.compiler, stdlibs.first)
tab.write
# Find and link metafiles
formula.prefix.install_metafiles T.must(formula.buildpath)
if formula.libexec.exist?
require "metafiles"
no_metafiles = formula.prefix.children.none? { |p| p.file? && Metafiles.copy?(p.basename.to_s) }
formula.prefix.install_metafiles formula.libexec if no_metafiles
end
normalize_pod2man_outputs!(formula)
end
end
end
end
end
sig { returns(T::Array[Symbol]) }
def detect_stdlibs
keg = Keg.new(formula.prefix)
# The stdlib recorded in the install receipt is used during dependency
# compatibility checks, so we only care about the stdlib that libraries
# link against.
keg.detect_cxx_stdlibs(skip_executables: true)
end
sig { params(formula: Formula).void }
def fixopt(formula)
path = if formula.linked_keg.directory? && formula.linked_keg.symlink?
formula.linked_keg.resolved_path
elsif formula.prefix.directory?
formula.prefix
elsif (children = formula.rack.children.presence) && children.size == 1 &&
(first_child = children.first.presence) && first_child.directory?
first_child
else
raise
end
Keg.new(path).optlink(verbose: args.verbose?)
rescue
raise "#{formula.opt_prefix} not present or broken\nPlease reinstall #{formula.full_name}. Sorry :("
end
sig { params(formula: Formula).void }
def normalize_pod2man_outputs!(formula)
keg = Keg.new(formula.prefix)
keg.normalize_pod2man_outputs!
end
end
begin
# Undocumented opt-out for internal use.
# We need to allow formulae from paths here due to how we pass them through.
ENV["HOMEBREW_INTERNAL_ALLOW_PACKAGES_FROM_PATHS"] = "1"
args = Homebrew::Cmd::InstallCmd.new.args
Context.current = args.context
error_pipe = Utils::UNIXSocketExt.open(ENV.fetch("HOMEBREW_ERROR_PIPE"), &:recv_io)
error_pipe.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
trap("INT", old_trap)
formula = args.named.to_formulae.fetch(0)
options = Options.create(args.flags_only)
build = Build.new(formula, options, args:)
build.install
# Any exception means the build did not complete.
# The `case` for what to do per-exception class is further down.
rescue Exception => e # rubocop:disable Lint/RescueException
error_hash = JSON.parse e.to_json
# Special case: need to recreate BuildErrors in full
# for proper analytics reporting and error messages.
# BuildErrors are specific to build processes and not other
# children, which is why we create the necessary state here
# and not in Utils.safe_fork.
case e
when BuildError
error_hash["cmd"] = e.cmd
error_hash["args"] = e.args
error_hash["env"] = e.env
when ErrorDuringExecution
error_hash["cmd"] = e.cmd
error_hash["status"] = if e.status.is_a?(Process::Status)
{
exitstatus: e.status.exitstatus,
termsig: e.status.termsig,
}
else
e.status
end
error_hash["output"] = e.output
end
error_pipe&.puts error_hash.to_json
error_pipe&.close
exit! 1
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/description_cache_store.rb | Library/Homebrew/description_cache_store.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "cache_store"
#
# {DescriptionCacheStore} provides methods to fetch and mutate formula descriptions used
# by the `brew desc` and `brew search` commands.
#
class DescriptionCacheStore < CacheStore
# Inserts a formula description into the cache if it does not exist or
# updates the formula description if it does exist.
#
# @param formula_name [String] the name of the formula to set
# @param description [String] the description from the formula to set
# @return [nil]
def update!(formula_name, description)
database.set(formula_name, description)
end
# Delete the formula description from the {DescriptionCacheStore}.
#
# @param formula_name [String] the name of the formula to delete
# @return [nil]
def delete!(formula_name)
database.delete(formula_name)
end
# If the database is empty `update!` it with all known formulae.
#
# @return [nil]
def populate_if_empty!(eval_all: Homebrew::EnvConfig.eval_all?)
return unless eval_all
return unless database.empty?
Formula.all(eval_all:).each { |f| update!(f.full_name, f.desc) }
end
# Use an update report to update the {DescriptionCacheStore}.
#
# @param report [Report] an update report generated by cmd/update.rb
# @return [nil]
def update_from_report!(report)
unless Homebrew::EnvConfig.eval_all?
database.clear!
return
end
return populate_if_empty! if database.empty?
return if report.empty?
renamings = report.select_formula_or_cask(:R)
alterations = report.select_formula_or_cask(:A) +
report.select_formula_or_cask(:M) +
renamings.map(&:last)
update_from_formula_names!(alterations)
delete_from_formula_names!(report.select_formula_or_cask(:D) +
renamings.map(&:first))
end
# Use an array of formula names to update the {DescriptionCacheStore}.
#
# @param formula_names [Array] the formulae to update
# @return [nil]
def update_from_formula_names!(formula_names)
unless Homebrew::EnvConfig.eval_all?
database.clear!
return
end
return populate_if_empty! if database.empty?
formula_names.each do |name|
update!(name, Formula[name].desc)
rescue FormulaUnavailableError, *FormulaVersions::IGNORED_EXCEPTIONS
delete!(name)
end
end
# Use an array of formula names to delete them from the {DescriptionCacheStore}.
#
# @param formula_names [Array] the formulae to delete
# @return [nil]
def delete_from_formula_names!(formula_names)
return if database.empty?
formula_names.each { delete!(it) }
end
alias delete_from_cask_tokens! delete_from_formula_names!
# `select` from the underlying database.
def select(&block)
database.select(&block)
end
end
#
# {CaskDescriptionCacheStore} provides methods to fetch and mutate cask descriptions used
# by the `brew desc` and `brew search` commands.
#
class CaskDescriptionCacheStore < DescriptionCacheStore
# If the database is empty `update!` it with all known casks.
#
# @return [nil]
def populate_if_empty!(eval_all: Homebrew::EnvConfig.eval_all?)
return unless eval_all
return unless database.empty?
Cask::Cask.all(eval_all:)
.each { |c| update!(c.full_name, [c.name.join(", "), c.desc.presence]) }
end
# Use an update report to update the {CaskDescriptionCacheStore}.
#
# @param report [Report] an update report generated by cmd/update.rb
# @return [nil]
def update_from_report!(report)
unless Homebrew::EnvConfig.eval_all?
database.clear!
return
end
return populate_if_empty! if database.empty?
return if report.empty?
alterations = report.select_formula_or_cask(:AC) +
report.select_formula_or_cask(:MC)
update_from_cask_tokens!(alterations)
delete_from_cask_tokens!(report.select_formula_or_cask(:DC))
end
# Use an array of cask tokens to update the {CaskDescriptionCacheStore}.
#
# @param cask_tokens [Array] the casks to update
# @return [nil]
def update_from_cask_tokens!(cask_tokens)
unless Homebrew::EnvConfig.eval_all?
database.clear!
return
end
return populate_if_empty! if database.empty?
cask_tokens.each do |token|
c = Cask::CaskLoader.load(token)
update!(c.full_name, [c.name.join(", "), c.desc.presence])
rescue Cask::CaskUnavailableError, *FormulaVersions::IGNORED_EXCEPTIONS
delete!(c.full_name) if c.present?
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/cleanup.rb | Library/Homebrew/cleanup.rb | # typed: strict
# frozen_string_literal: true
require "utils/bottles"
require "utils/output"
require "formula"
require "cask/cask_loader"
module Homebrew
# Helper class for cleaning up the Homebrew cache.
class Cleanup
extend Utils::Output::Mixin
include Utils::Output::Mixin
CLEANUP_DEFAULT_DAYS = T.let(Homebrew::EnvConfig.cleanup_periodic_full_days.to_i.freeze, Integer)
GH_ACTIONS_ARTIFACT_CLEANUP_DAYS = 3
private_constant :CLEANUP_DEFAULT_DAYS, :GH_ACTIONS_ARTIFACT_CLEANUP_DAYS
class << self
sig { params(pathname: Pathname).returns(T::Boolean) }
def incomplete?(pathname)
pathname.extname.end_with?(".incomplete")
end
sig { params(pathname: Pathname).returns(T::Boolean) }
def nested_cache?(pathname)
pathname.directory? && %w[
cargo_cache
go_cache
go_mod_cache
glide_home
java_cache
npm_cache
pip_cache
gclient_cache
].include?(pathname.basename.to_s)
end
sig { params(pathname: Pathname).returns(T::Boolean) }
def go_cache_directory?(pathname)
# Go makes its cache contents read-only to ensure cache integrity,
# which makes sense but is something we need to undo for cleanup.
pathname.directory? && %w[go_cache go_mod_cache].include?(pathname.basename.to_s)
end
sig { params(pathname: Pathname, days: T.nilable(Integer)).returns(T::Boolean) }
def prune?(pathname, days)
return false unless days
return true if days.zero?
return true if pathname.symlink? && !pathname.exist?
days_ago = (DateTime.now - days).to_time
pathname.mtime < days_ago && pathname.ctime < days_ago
end
sig { params(entry: { path: Pathname, type: T.nilable(Symbol) }, scrub: T::Boolean).returns(T::Boolean) }
def stale?(entry, scrub: false)
pathname = entry[:path]
return false unless pathname.resolved_path.file?
case entry[:type]
when :api_source
stale_api_source?(pathname, scrub)
when :cask
stale_cask?(pathname, scrub)
when :gh_actions_artifact
stale_gh_actions_artifact?(pathname, scrub)
else
stale_formula?(pathname, scrub)
end
end
private
sig { params(pathname: Pathname, scrub: T::Boolean).returns(T::Boolean) }
def stale_gh_actions_artifact?(pathname, scrub)
scrub || prune?(pathname, GH_ACTIONS_ARTIFACT_CLEANUP_DAYS)
end
sig { params(pathname: Pathname, scrub: T::Boolean).returns(T::Boolean) }
def stale_api_source?(pathname, scrub)
return true if scrub
org, repo, git_head, type, basename = pathname.each_filename.to_a.last(5)
name = "#{org}/#{repo}/#{File.basename(T.must(basename), ".rb")}"
package = if type == "Cask"
begin
Cask::CaskLoader.load(name)
rescue Cask::CaskError
nil
end
else
begin
Formulary.factory(name)
rescue FormulaUnavailableError
nil
end
end
return true if package.nil?
package.tap_git_head != git_head
end
sig { params(formula: Formula).returns(T::Set[String]) }
def excluded_versions_from_cleanup(formula)
@excluded_versions_from_cleanup ||= T.let({}, T.nilable(T::Hash[String, T::Set[String]]))
@excluded_versions_from_cleanup[formula.name] ||= begin
eligible_kegs_for_cleanup = formula.eligible_kegs_for_cleanup(quiet: true)
Set.new((formula.installed_kegs - eligible_kegs_for_cleanup).map { |keg| keg.version.to_s })
end
end
sig { params(pathname: Pathname, scrub: T::Boolean).returns(T::Boolean) }
def stale_formula?(pathname, scrub)
return false unless HOMEBREW_CELLAR.directory?
version = if HOMEBREW_BOTTLES_EXTNAME_REGEX.match?(to_s)
begin
Utils::Bottles.resolve_version(pathname).to_s
rescue
nil
end
end
basename_str = pathname.basename.to_s
version ||= basename_str[/\A.*(?:--.*?)*--(.*?)#{Regexp.escape(pathname.extname)}\Z/, 1]
version ||= basename_str[/\A.*--?(.*?)#{Regexp.escape(pathname.extname)}\Z/, 1]
return false if version.blank?
version = Version.new(version)
unless (formula_name = basename_str[/\A(.*?)(?:--.*?)*--?(?:#{Regexp.escape(version.to_s)})/, 1])
return false
end
formula = begin
Formulary.from_rack(HOMEBREW_CELLAR/formula_name)
rescue FormulaUnavailableError, TapFormulaAmbiguityError
nil
end
formula_excluded_versions_from_cleanup = nil
if formula.blank? && formula_name.delete_suffix!("_bottle_manifest")
formula = begin
Formulary.from_rack(HOMEBREW_CELLAR/formula_name)
rescue FormulaUnavailableError, TapFormulaAmbiguityError
nil
end
return false if formula.blank?
formula_excluded_versions_from_cleanup = excluded_versions_from_cleanup(formula)
return false if formula_excluded_versions_from_cleanup.include?(version.to_s)
if pathname.to_s.include?("_bottle_manifest")
excluded_version = version.to_s
excluded_version.sub!(/-\d+$/, "")
return false if formula_excluded_versions_from_cleanup.include?(excluded_version)
end
# We can't determine an installed rebuild and parsing manifest version cannot be reliably done.
return false unless formula.latest_version_installed?
return true if (bottle = formula.bottle).blank?
return version != GitHubPackages.version_rebuild(bottle.resource.version, bottle.rebuild)
end
return false if formula.blank?
resource_name = basename_str[/\A.*?--(.*?)--?(?:#{Regexp.escape(version.to_s)})/, 1]
stable = formula.stable
if resource_name == "patch"
patch_hashes = stable&.patches&.filter_map { T.cast(it, ExternalPatch).resource.version if it.external? }
return true unless patch_hashes&.include?(Checksum.new(version.to_s))
elsif resource_name && stable && (resource_version = stable.resources[resource_name]&.version)
return true if resource_version != version
elsif (formula_excluded_versions_from_cleanup ||= excluded_versions_from_cleanup(formula).presence) &&
formula_excluded_versions_from_cleanup.include?(version.to_s)
return false
elsif (formula.latest_version_installed? && formula.pkg_version.to_s != version) ||
formula.pkg_version.to_s > version
return true
end
return true if scrub && !formula.latest_version_installed?
return true if Utils::Bottles.file_outdated?(formula, pathname)
false
end
sig { params(pathname: Pathname, scrub: T::Boolean).returns(T::Boolean) }
def stale_cask?(pathname, scrub)
basename = pathname.basename
return false unless (name = basename.to_s[/\A(.*?)--/, 1])
cask = begin
Cask::CaskLoader.load(name, warn: false)
rescue Cask::CaskError
nil
end
return false if cask.blank?
return true unless basename.to_s.match?(/\A#{Regexp.escape(name)}--#{Regexp.escape(cask.version)}\b/)
return true if scrub && cask.installed_version != cask.version
if cask.version.latest?
cleanup_threshold = (DateTime.now - CLEANUP_DEFAULT_DAYS).to_time
return pathname.mtime < cleanup_threshold && pathname.ctime < cleanup_threshold
end
false
end
end
PERIODIC_CLEAN_FILE = T.let((HOMEBREW_CACHE/".cleaned").freeze, Pathname)
sig { returns(T::Array[String]) }
attr_reader :args
sig { returns(Integer) }
attr_reader :days
sig { returns(Pathname) }
attr_reader :cache
sig { returns(Integer) }
attr_reader :disk_cleanup_size
sig {
params(args: String, dry_run: T::Boolean, scrub: T::Boolean, days: T.nilable(Integer), cache: Pathname).void
}
def initialize(*args, dry_run: false, scrub: false, days: nil, cache: HOMEBREW_CACHE)
@disk_cleanup_size = T.let(0, Integer)
@args = args
@dry_run = dry_run
@scrub = scrub
@prune = T.let(days.present?, T::Boolean)
@days = T.let(days || Homebrew::EnvConfig.cleanup_max_age_days.to_i, Integer)
@cache = cache
@cleaned_up_paths = T.let(Set.new, T::Set[Pathname])
end
sig { returns(T::Boolean) }
def dry_run? = @dry_run
sig { returns(T::Boolean) }
def prune? = @prune
sig { returns(T::Boolean) }
def scrub? = @scrub
sig { params(formula: Formula, dry_run: T::Boolean).void }
def self.install_formula_clean!(formula, dry_run: false)
return if Homebrew::EnvConfig.no_install_cleanup?
return unless formula.latest_version_installed?
return if skip_clean_formula?(formula)
if dry_run
ohai "Would run `brew cleanup #{formula}`"
else
ohai "Running `brew cleanup #{formula}`..."
end
puts_no_install_cleanup_disable_message_if_not_already!
return if dry_run
Cleanup.new.cleanup_formula(formula)
end
sig { void }
def self.puts_no_install_cleanup_disable_message
return if Homebrew::EnvConfig.no_env_hints?
return if Homebrew::EnvConfig.no_install_cleanup?
puts "Disable this behaviour by setting `HOMEBREW_NO_INSTALL_CLEANUP=1`."
puts "Hide these hints with `HOMEBREW_NO_ENV_HINTS=1` (see `man brew`)."
end
sig { void }
def self.puts_no_install_cleanup_disable_message_if_not_already!
return if @puts_no_install_cleanup_disable_message_if_not_already
puts_no_install_cleanup_disable_message
@puts_no_install_cleanup_disable_message_if_not_already = T.let(true, T.nilable(TrueClass))
end
sig { params(formula: Formula).returns(T::Boolean) }
def self.skip_clean_formula?(formula)
no_cleanup_formula = Homebrew::EnvConfig.no_cleanup_formulae
return false if no_cleanup_formula.blank?
@skip_clean_formulae ||= T.let(no_cleanup_formula.split(","), T.nilable(T::Array[String]))
@skip_clean_formulae.include?(formula.name) || @skip_clean_formulae.intersect?(formula.aliases)
end
sig { returns(T::Boolean) }
def self.periodic_clean_due?
return false if Homebrew::EnvConfig.no_install_cleanup?
unless PERIODIC_CLEAN_FILE.exist?
HOMEBREW_CACHE.mkpath
FileUtils.touch PERIODIC_CLEAN_FILE
return false
end
PERIODIC_CLEAN_FILE.mtime < (DateTime.now - CLEANUP_DEFAULT_DAYS).to_time
end
sig { params(dry_run: T::Boolean).void }
def self.periodic_clean!(dry_run: false)
return if Homebrew::EnvConfig.no_install_cleanup?
return unless periodic_clean_due?
if dry_run
oh1 "Would run `brew cleanup` which has not been run in the last #{CLEANUP_DEFAULT_DAYS} days"
else
oh1 "`brew cleanup` has not been run in the last #{CLEANUP_DEFAULT_DAYS} days, running now..."
end
puts_no_install_cleanup_disable_message
return if dry_run
Cleanup.new.clean!(quiet: true, periodic: true)
end
sig { params(quiet: T::Boolean, periodic: T::Boolean).void }
def clean!(quiet: false, periodic: false)
if args.empty?
Formula.installed
.sort_by(&:name)
.reject { |f| Cleanup.skip_clean_formula?(f) }
.each do |formula|
cleanup_formula(formula, quiet:, ds_store: false, cache_db: false)
end
if ENV["HOMEBREW_AUTOREMOVE"].present?
opoo "`$HOMEBREW_AUTOREMOVE` is now a no-op as it is the default behaviour. " \
"Set `HOMEBREW_NO_AUTOREMOVE=1` to disable it."
end
Cleanup.autoremove(dry_run: dry_run?) unless Homebrew::EnvConfig.no_autoremove?
cleanup_cache
cleanup_empty_api_source_directories
cleanup_bootsnap
cleanup_logs
cleanup_temp_cellar
cleanup_lockfiles
cleanup_python_site_packages
prune_prefix_symlinks_and_directories
unless dry_run?
cleanup_cache_db
rm_ds_store
HOMEBREW_CACHE.mkpath
FileUtils.touch PERIODIC_CLEAN_FILE
end
# Cleaning up Ruby needs to be done last to avoid requiring additional
# files afterwards. Additionally, don't allow it on periodic cleans to
# avoid having to try to do a `brew install` when we've just deleted
# the running Ruby process...
return if periodic
cleanup_portable_ruby
else
args.each do |arg|
formula = begin
Formulary.resolve(arg)
rescue FormulaUnavailableError, TapFormulaAmbiguityError
nil
end
cask = begin
Cask::CaskLoader.load(arg)
rescue Cask::CaskError
nil
end
if formula && Cleanup.skip_clean_formula?(formula)
onoe "Refusing to clean #{formula} because it is listed in " \
"#{Tty.bold}HOMEBREW_NO_CLEANUP_FORMULAE#{Tty.reset}!"
elsif formula
cleanup_formula(formula)
end
cleanup_cask(cask) if cask
end
end
end
sig { returns(T::Array[Keg]) }
def unremovable_kegs
@unremovable_kegs ||= T.let([], T.nilable(T::Array[Keg]))
end
sig { params(formula: Formula, quiet: T::Boolean, ds_store: T::Boolean, cache_db: T::Boolean).void }
def cleanup_formula(formula, quiet: false, ds_store: true, cache_db: true)
formula.eligible_kegs_for_cleanup(quiet:)
.each { |keg| cleanup_keg(keg) }
cleanup_cache(Pathname.glob(cache/"#{formula.name}{_bottle_manifest,}--*").map { |path| { path:, type: nil } })
rm_ds_store([formula.rack]) if ds_store
cleanup_cache_db(formula.rack) if cache_db
cleanup_lockfiles(FormulaLock.new(formula.name).path)
end
sig { params(cask: Cask::Cask, ds_store: T::Boolean).void }
def cleanup_cask(cask, ds_store: true)
cleanup_cache(Pathname.glob(cache/"Cask/#{cask.token}--*").map { |path| { path:, type: :cask } })
rm_ds_store([cask.caskroom_path]) if ds_store
cleanup_lockfiles(CaskLock.new(cask.token).path)
end
sig { params(keg: Keg).void }
def cleanup_keg(keg)
cleanup_path(Pathname.new(keg)) { keg.uninstall(raise_failures: true) }
rescue Errno::EACCES, Errno::ENOTEMPTY => e
opoo e.message
unremovable_kegs << keg
end
sig { void }
def cleanup_logs
return unless HOMEBREW_LOGS.directory?
logs_days = [days, CLEANUP_DEFAULT_DAYS].min
HOMEBREW_LOGS.subdirs.each do |dir|
cleanup_path(dir) { FileUtils.rm_r(dir) } if self.class.prune?(dir, logs_days)
end
end
sig { void }
def cleanup_temp_cellar
return unless HOMEBREW_TEMP_CELLAR.directory?
HOMEBREW_TEMP_CELLAR.each_child do |child|
cleanup_path(child) { FileUtils.rm_r(child) }
end
end
sig { returns(T::Array[{ path: Pathname, type: T.nilable(Symbol) }]) }
def cache_files
files = cache.directory? ? cache.children : []
cask_files = (cache/"Cask").directory? ? (cache/"Cask").children : []
api_source_files = (cache/"api-source").glob("*/*/*/*/*") # `<org>/<repo>/<git_head>/<type>/<token>.rb`
gh_actions_artifacts = (cache/"gh-actions-artifact").directory? ? (cache/"gh-actions-artifact").children : []
files.map { |path| { path:, type: nil } } +
cask_files.map { |path| { path:, type: :cask } } +
api_source_files.map { |path| { path:, type: :api_source } } +
gh_actions_artifacts.map { |path| { path:, type: :gh_actions_artifact } }
end
sig { params(directory: Pathname).void }
def cleanup_empty_api_source_directories(directory = cache/"api-source")
return if dry_run?
return unless directory.directory?
directory.each_child do |child|
next unless child.directory?
cleanup_empty_api_source_directories(child)
child.rmdir if child.empty?
end
end
sig { void }
def cleanup_unreferenced_downloads
return if dry_run?
return unless (cache/"downloads").directory?
downloads = (cache/"downloads").children
referenced_downloads = cache_files.map { |file| file[:path] }.select(&:symlink?).map(&:resolved_path)
(downloads - referenced_downloads).each do |download|
if self.class.incomplete?(download)
begin
DownloadLock.new(download).with_lock do
download.unlink
end
rescue OperationInProgressError
# Skip incomplete downloads which are still in progress.
next
end
elsif download.directory?
FileUtils.rm_rf download
else
download.unlink
end
end
end
sig { params(entries: T.nilable(T::Array[{ path: Pathname, type: T.nilable(Symbol) }])).void }
def cleanup_cache(entries = nil)
entries ||= cache_files
entries.each do |entry|
path = entry[:path]
next if path == PERIODIC_CLEAN_FILE
FileUtils.chmod_R 0755, path if self.class.go_cache_directory?(path) && !dry_run?
next cleanup_path(path) { path.unlink } if self.class.incomplete?(path)
next cleanup_path(path) { FileUtils.rm_rf path } if self.class.nested_cache?(path)
if self.class.prune?(path, days)
if path.file? || path.symlink?
cleanup_path(path) { path.unlink }
elsif path.directory? && path.to_s.include?("--")
cleanup_path(path) { FileUtils.rm_rf path }
end
next
end
# If we've specified --prune don't do the (expensive) .stale? check.
cleanup_path(path) { path.unlink } if !prune? && self.class.stale?(entry, scrub: scrub?)
end
cleanup_unreferenced_downloads
end
sig { params(path: Pathname, _block: T.proc.void).void }
def cleanup_path(path, &_block)
return unless path.exist?
return unless @cleaned_up_paths.add?(path)
@disk_cleanup_size += path.disk_usage
if dry_run?
puts "Would remove: #{path} (#{path.abv})"
else
puts "Removing: #{path}... (#{path.abv})"
yield
end
end
sig { params(lockfiles: Pathname).void }
def cleanup_lockfiles(*lockfiles)
return if dry_run?
lockfiles = HOMEBREW_LOCKS.children.select(&:file?) if lockfiles.empty? && HOMEBREW_LOCKS.directory?
lockfiles.each do |file|
next unless file.readable?
next unless file.open(File::RDWR).flock(File::LOCK_EX | File::LOCK_NB)
begin
file.unlink
ensure
file.open(File::RDWR).flock(File::LOCK_UN) if file.exist?
end
end
end
sig { void }
def cleanup_portable_ruby
vendor_dir = HOMEBREW_LIBRARY/"Homebrew/vendor"
portable_ruby_latest_version = (vendor_dir/"portable-ruby-version").read.chomp
portable_rubies_to_remove = []
Pathname.glob(vendor_dir/"portable-ruby/*.*").select(&:directory?).each do |path|
next if !use_system_ruby? && portable_ruby_latest_version == path.basename.to_s
portable_rubies_to_remove << path
end
return if portable_rubies_to_remove.empty?
bundler_paths = (vendor_dir/"bundle/ruby").children.select do |child|
basename = child.basename.to_s
next false if basename == ".homebrew_gem_groups"
next true unless child.directory?
[
"#{Version.new(portable_ruby_latest_version).major_minor}.0",
RbConfig::CONFIG["ruby_version"],
].uniq.exclude?(basename)
end
bundler_paths.each do |bundler_path|
if dry_run?
puts Utils.popen_read("git", "-C", HOMEBREW_REPOSITORY, "clean", "-nx", bundler_path).chomp
else
puts Utils.popen_read("git", "-C", HOMEBREW_REPOSITORY, "clean", "-ffqx", bundler_path).chomp
end
end
portable_rubies_to_remove.each do |portable_ruby|
cleanup_path(portable_ruby) { FileUtils.rm_r(portable_ruby) }
end
end
sig { returns(T::Boolean) }
def use_system_ruby?
false
end
sig { void }
def cleanup_bootsnap
bootsnap = cache/"bootsnap"
return unless bootsnap.directory?
bootsnap.each_child do |subdir|
cleanup_path(subdir) { FileUtils.rm_r(subdir) } if subdir.basename.to_s != Homebrew::Bootsnap.key
end
end
sig { params(rack: T.nilable(Pathname)).void }
def cleanup_cache_db(rack = nil)
FileUtils.rm_rf [
cache/"desc_cache.json",
cache/"linkage.db",
cache/"linkage.db.db",
]
CacheStoreDatabase.use(:linkage) do |db|
break unless db.created?
db.each_key do |keg|
next if rack.present? && !keg.start_with?("#{rack}/")
next if File.directory?(keg)
LinkageCacheStore.new(keg, db).delete!
end
end
end
sig { params(dirs: T.nilable(T::Array[Pathname])).void }
def rm_ds_store(dirs = nil)
dirs ||= Keg.must_exist_directories + [
HOMEBREW_PREFIX/"Caskroom",
]
dirs.select(&:directory?)
.flat_map { |d| Pathname.glob("#{d}/**/.DS_Store") }
.each do |dir|
dir.unlink
rescue Errno::EACCES
# don't care if we can't delete a .DS_Store
nil
end
end
sig { void }
def cleanup_python_site_packages
pyc_files = Hash.new { |h, k| h[k] = [] }
seen_non_pyc_file = Hash.new { |h, k| h[k] = false }
unused_pyc_files = []
HOMEBREW_PREFIX.glob("lib/python*/site-packages").each do |site_packages|
site_packages.each_child do |child|
next unless child.directory?
# TODO: Work out a sensible way to clean up `pip`'s, `setuptools`' and `wheel`'s
# `{dist,site}-info` directories. Alternatively, consider always removing
# all `-info` directories, because we may not be making use of them.
next if child.basename.to_s.end_with?("-info")
# Clean up old *.pyc files in the top-level __pycache__.
if child.basename.to_s == "__pycache__"
child.find do |path|
next if path.extname != ".pyc"
next unless self.class.prune?(path, days)
unused_pyc_files << path
end
next
end
# Look for directories that contain only *.pyc files.
child.find do |path|
next if path.directory?
if path.extname == ".pyc"
pyc_files[child] << path
else
seen_non_pyc_file[child] = true
break
end
end
end
end
unused_pyc_files += pyc_files.reject { |k,| seen_non_pyc_file[k] }
.values
.flatten
return if unused_pyc_files.blank?
unused_pyc_files.each do |pyc|
cleanup_path(pyc) { pyc.unlink }
end
end
sig { void }
def prune_prefix_symlinks_and_directories
ObserverPathnameExtension.reset_counts!
dirs = []
children_count = {}
Keg.must_exist_subdirectories.each do |dir|
next unless dir.directory?
dir.find do |path|
path.extend(ObserverPathnameExtension)
if path.symlink?
unless path.resolved_path_exists?
path.uninstall_info if path.to_s.match?(Keg::INFOFILE_RX) && !dry_run?
if dry_run?
puts "Would remove (broken link): #{path}"
children_count[path.dirname] -= 1 if children_count.key?(path.dirname)
else
path.unlink
end
end
elsif path.directory? && Keg.must_exist_subdirectories.exclude?(path)
dirs << path
children_count[path] = path.children.length if dry_run?
end
end
end
dirs.reverse_each do |d|
if !dry_run?
d.rmdir_if_possible
elsif children_count[d].zero?
puts "Would remove (empty directory): #{d}"
children_count[d.dirname] -= 1 if children_count.key?(d.dirname)
end
end
require "cask/caskroom"
if Cask::Caskroom.path.directory?
Cask::Caskroom.path.each_child do |path|
path.extend(ObserverPathnameExtension)
next if !path.symlink? || path.resolved_path_exists?
if dry_run?
puts "Would remove (broken link): #{path}"
else
path.unlink
end
end
end
return if dry_run?
return if ObserverPathnameExtension.total.zero?
n, d = ObserverPathnameExtension.counts
print "Pruned #{n} symbolic links "
print "and #{d} directories " if d.positive?
puts "from #{HOMEBREW_PREFIX}"
end
sig { params(dry_run: T::Boolean).void }
def self.autoremove(dry_run: false)
require "utils/autoremove"
require "cask/caskroom"
# If this runs after install, uninstall, reinstall or upgrade,
# the cache of installed formulae may no longer be valid.
Formula.clear_cache unless dry_run
formulae = Formula.installed
# Remove formulae listed in HOMEBREW_NO_CLEANUP_FORMULAE and their dependencies.
if Homebrew::EnvConfig.no_cleanup_formulae.present?
formulae -= formulae.select { skip_clean_formula?(it) }
.flat_map { |f| [f, *f.installed_runtime_formula_dependencies] }
end
casks = Cask::Caskroom.casks
removable_formulae = Utils::Autoremove.removable_formulae(formulae, casks)
return if removable_formulae.blank?
formulae_names = removable_formulae.map(&:full_name).sort
verb = dry_run ? "Would autoremove" : "Autoremoving"
oh1 "#{verb} #{formulae_names.count} unneeded #{Utils.pluralize("formula", formulae_names.count)}:"
puts formulae_names.join("\n")
return if dry_run
require "uninstall"
kegs_by_rack = removable_formulae.filter_map(&:any_installed_keg).group_by(&:rack)
Uninstall.uninstall_kegs(kegs_by_rack)
# The installed formula cache will be invalid after uninstalling.
Formula.clear_cache
end
end
end
require "extend/os/cleanup"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/options.rb | Library/Homebrew/options.rb | # typed: strict
# frozen_string_literal: true
# A formula option.
class Option
sig { returns(String) }
attr_reader :name
sig { returns(String) }
attr_reader :description, :flag
sig { params(name: String, description: String).void }
def initialize(name, description = "")
@name = name
@flag = T.let("--#{name}", String)
@description = description
end
sig { returns(String) }
def to_s = flag
sig { params(other: T.anything).returns(T.nilable(Integer)) }
def <=>(other)
case other
when Option
name <=> other.name
end
end
sig { params(other: T.anything).returns(T::Boolean) }
def ==(other)
case other
when Option
instance_of?(other.class) && name == other.name
else
false
end
end
alias eql? ==
sig { returns(Integer) }
def hash
name.hash
end
sig { returns(String) }
def inspect
"#<#{self.class.name}: #{flag.inspect}>"
end
end
# A deprecated formula option.
class DeprecatedOption
sig { returns(String) }
attr_reader :old, :current
sig { params(old: String, current: String).void }
def initialize(old, current)
@old = old
@current = current
end
sig { returns(String) }
def old_flag
"--#{old}"
end
sig { returns(String) }
def current_flag
"--#{current}"
end
sig { params(other: T.anything).returns(T::Boolean) }
def ==(other)
case other
when DeprecatedOption
instance_of?(other.class) && old == other.old && current == other.current
else
false
end
end
alias eql? ==
end
# A collection of formula options.
class Options
include Enumerable
extend T::Generic
Elem = type_member(:out) { { fixed: Option } }
sig { params(array: T.nilable(T::Array[String])).returns(Options) }
def self.create(array)
new Array(array).map { |e| Option.new(e[/^--([^=]+=?)(.+)?$/, 1] || e) }
end
sig { params(options: T.nilable(T::Enumerable[Option])).void }
def initialize(options = nil)
# Ensure this is synced with `initialize_dup` and `freeze` (excluding simple objects like integers and booleans)
@options = T.let(Set.new(options), T::Set[Option])
end
sig { params(other: Options).void }
def initialize_dup(other)
super
@options = @options.dup
end
sig { returns(T.self_type) }
def freeze
@options.dup
super
end
sig { override.params(block: T.proc.params(arg0: Option).returns(BasicObject)).returns(T.self_type) }
def each(&block)
@options.each(&block)
self
end
sig { params(other: Option).returns(T.self_type) }
def <<(other)
@options << other
self
end
sig { params(other: T::Enumerable[Option]).returns(T.self_type) }
def +(other)
self.class.new(@options + other)
end
sig { params(other: T::Enumerable[Option]).returns(T.self_type) }
def -(other)
self.class.new(@options - other)
end
sig { params(other: T::Enumerable[Option]).returns(T.self_type) }
def &(other)
self.class.new(@options & other)
end
sig { params(other: T::Enumerable[Option]).returns(T.self_type) }
def |(other)
self.class.new(@options | other)
end
sig { params(other: String).returns(String) }
def *(other)
@options.to_a * other
end
sig { params(other: T.anything).returns(T::Boolean) }
def ==(other)
case other
when Options
instance_of?(other.class) && to_a == other.to_a
else
false
end
end
alias eql? ==
sig { returns(T::Boolean) }
def empty?
@options.empty?
end
sig { returns(T::Array[String]) }
def as_flags
map(&:flag)
end
sig { params(option: T.any(Option, String)).returns(T::Boolean) }
def include?(option)
any? { |opt| opt == option || opt.name == option || opt.flag == option }
end
alias to_ary to_a
sig { returns(String) }
def to_s
@options.map(&:to_s).join(" ")
end
sig { returns(String) }
def inspect
"#<#{self.class.name}: #{to_a.inspect}>"
end
sig { params(formula: Formula).void }
def self.dump_for_formula(formula)
formula.options.sort_by(&:flag).each do |opt|
puts "#{opt.flag}\n\t#{opt.description}"
end
puts "--HEAD\n\tInstall HEAD version" if formula.head
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/missing_formula.rb | Library/Homebrew/missing_formula.rb | # typed: strict
# frozen_string_literal: true
require "formulary"
require "utils/output"
module Homebrew
# Helper module for checking if there is a reason a formula is missing.
module MissingFormula
extend Utils::Output::Mixin
class << self
sig { params(name: String, silent: T::Boolean, show_info: T::Boolean).returns(T.nilable(String)) }
def reason(name, silent: false, show_info: false)
cask_reason(name, silent:, show_info:) || disallowed_reason(name) ||
tap_migration_reason(name) || deleted_reason(name, silent:)
end
sig { params(name: String).returns(T.nilable(String)) }
def disallowed_reason(name)
case name.downcase
when "gem", /^rubygems?$/ then <<~EOS
macOS provides gem as part of Ruby. To install a newer version:
brew install ruby
EOS
when "pip" then <<~EOS
pip is part of the python formula:
brew install python
EOS
when "pil" then <<~EOS
Instead of PIL, consider pillow:
brew install pillow
EOS
when "macruby" then <<~EOS
MacRuby has been discontinued. Consider RubyMotion:
brew install --cask rubymotion
EOS
when /(lib)?lzma/ then <<~EOS
lzma is now part of the xz formula:
brew install xz
EOS
when "gsutil" then <<~EOS
gsutil is available through pip:
pip3 install gsutil
EOS
when "gfortran" then <<~EOS
GNU Fortran is part of the GCC formula:
brew install gcc
EOS
when "play" then <<~EOS
Play 2.3 replaces the play command with activator:
brew install typesafe-activator
You can read more about this change at:
#{Formatter.url("https://www.playframework.com/documentation/2.3.x/Migration23")}
#{Formatter.url("https://www.playframework.com/documentation/2.3.x/Highlights23")}
EOS
when "haskell-platform" then <<~EOS
The components of the Haskell Platform are available separately.
Glasgow Haskell Compiler:
brew install ghc
Cabal build system:
brew install cabal-install
Haskell Stack tool:
brew install haskell-stack
EOS
when "mysqldump-secure" then <<~EOS
The creator of mysqldump-secure tried to game our popularity metrics.
EOS
when "ngrok" then <<~EOS
Upstream sunsetted 1.x in March 2016 and 2.x is not open-source.
If you wish to use the 2.x release you can install it with:
brew install --cask ngrok
EOS
when "cargo" then <<~EOS
cargo is part of the rust formula:
brew install rust
EOS
when "cargo-completion" then <<~EOS
cargo-completion is part of the rust formula:
brew install rust
EOS
when "uconv" then <<~EOS
uconv is part of the icu4c formula:
brew install icu4c
EOS
when "postgresql", "postgres" then <<~EOS
postgresql breaks existing databases on upgrade without human intervention.
See a more specific version to install with:
brew formulae | grep postgresql@
EOS
end
end
sig { params(name: String).returns(T.nilable(String)) }
def tap_migration_reason(name)
message = T.let(nil, T.nilable(String))
Tap.each do |old_tap|
new_tap = old_tap.tap_migrations[name]
next unless new_tap
new_tap_user, new_tap_repo, new_tap_new_name = new_tap.split("/")
new_tap_name = "#{new_tap_user}/#{new_tap_repo}"
message = <<~EOS
It was migrated from #{old_tap} to #{new_tap}.
EOS
break if new_tap_name == CoreTap.instance.name
install_cmd = if new_tap_name.start_with?("homebrew/cask")
"install --cask"
else
"install"
end
new_tap_new_name ||= name
message += <<~EOS
You can access it again by running:
brew tap #{new_tap_name}
And then you can install it by running:
brew #{install_cmd} #{new_tap_new_name}
EOS
break
end
message
end
sig { params(name: String, silent: T::Boolean).returns(T.nilable(String)) }
def deleted_reason(name, silent: false)
path = Formulary.path name
return if File.exist? path
tap = Tap.from_path(path)
return if tap.nil? || !File.exist?(tap.path)
relative_path = path.relative_path_from tap.path
tap.path.cd do
unless silent
ohai "Searching for a previously deleted formula (in the last month)..."
if (tap.path/".git/shallow").exist?
opoo <<~EOS
#{tap} is shallow clone. To get its complete history, run:
git -C "$(brew --repo #{tap})" fetch --unshallow
EOS
end
end
# Optimization for the core tap which has many monthly commits
if tap.core_tap?
# Check if the formula has been deleted in the last month.
diff_command = ["git", "diff", "--diff-filter=D", "--name-only",
"@{'1 month ago'}", "--", relative_path]
deleted_formula = Utils.popen_read(*diff_command)
if deleted_formula.blank?
ofail "No previously deleted formula found." unless silent
return
end
end
# Find commit where formula was deleted in the last month.
log_command = "git log --since='1 month ago' --diff-filter=D " \
"--name-only --max-count=1 " \
"--format=%H\\\\n%h\\\\n%B -- #{relative_path}"
hash, short_hash, *commit_message, relative_path_string =
Utils.popen_read(log_command).gsub("\\n", "\n").lines.map(&:chomp)
if hash.blank? || short_hash.blank? || relative_path_string.blank?
ofail "No previously deleted formula found." unless silent
return
end
commit_message = commit_message.reject(&:empty?).join("\n ")
commit_message.sub!(/ \(#(\d+)\)$/, " (#{tap.issues_url}/\\1)")
commit_message.gsub!(/(Closes|Fixes) #(\d+)/, "\\1 #{tap.issues_url}/\\2")
<<~EOS
#{name} was deleted from #{tap.name} in commit #{short_hash}:
#{commit_message}
To show the formula before removal, run:
git -C "$(brew --repo #{tap})" show #{short_hash}^:#{relative_path_string}
If you still use this formula, consider creating your own tap:
#{Formatter.url("https://docs.brew.sh/How-to-Create-and-Maintain-a-Tap")}
EOS
end
end
sig { params(name: String, silent: T::Boolean, show_info: T::Boolean).returns(T.nilable(String)) }
def cask_reason(name, silent: false, show_info: false); end
sig { params(name: String, command: String).returns(T.nilable(String)) }
def suggest_command(name, command); end
require "extend/os/missing_formula"
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/caveats.rb | Library/Homebrew/caveats.rb | # typed: strict
# frozen_string_literal: true
require "language/python"
require "utils/service"
# A formula's caveats.
class Caveats
extend Forwardable
sig { returns(Formula) }
attr_reader :formula
sig { params(formula: Formula).void }
def initialize(formula)
@formula = formula
@caveats = T.let(nil, T.nilable(String))
@completions_and_elisp = T.let(nil, T.nilable(T::Array[String]))
end
sig { returns(String) }
def caveats
@caveats ||= begin
caveats = []
build = formula.build
begin
formula.build = Tab.for_formula(formula)
string = formula.caveats.to_s
caveats << "#{string.chomp}\n" unless string.empty?
ensure
formula.build = build
end
caveats << keg_only_text
caveats << service_caveats
caveats.compact.join("\n")
end
end
sig { returns(T::Boolean) }
def empty?
caveats.blank? && completions_and_elisp.blank?
end
delegate [:to_s] => :caveats
sig { returns(T::Array[String]) }
def completions_and_elisp
@completions_and_elisp ||= begin
valid_shells = [:bash, :zsh, :fish, :pwsh].freeze
current_shell = Utils::Shell.preferred || Utils::Shell.parent
shells = if current_shell.present? &&
(shell_sym = current_shell.to_sym) &&
valid_shells.include?(shell_sym)
[shell_sym]
else
valid_shells
end
completions_and_elisp = shells.map do |shell|
function_completion_caveats(shell)
end
completions_and_elisp << elisp_caveats
completions_and_elisp.compact
end
end
sig { params(skip_reason: T::Boolean).returns(T.nilable(String)) }
def keg_only_text(skip_reason: false)
return unless formula.keg_only?
s = if skip_reason
""
else
<<~EOS
#{formula.name} is keg-only, which means it was not symlinked into #{HOMEBREW_PREFIX},
because #{formula.keg_only_reason.to_s.chomp}.
EOS
end.dup
if formula.bin.directory? || formula.sbin.directory?
s << <<~EOS
If you need to have #{formula.name} first in your PATH, run:
EOS
s << " #{Utils::Shell.prepend_path_in_profile(formula.opt_bin.to_s)}\n" if formula.bin.directory?
s << " #{Utils::Shell.prepend_path_in_profile(formula.opt_sbin.to_s)}\n" if formula.sbin.directory?
end
if formula.lib.directory? || formula.include.directory?
s << <<~EOS
For compilers to find #{formula.name} you may need to set:
EOS
s << " #{Utils::Shell.export_value("LDFLAGS", "-L#{formula.opt_lib}")}\n" if formula.lib.directory?
s << " #{Utils::Shell.export_value("CPPFLAGS", "-I#{formula.opt_include}")}\n" if formula.include.directory?
if which("pkgconf", ORIGINAL_PATHS) &&
((formula.lib/"pkgconfig").directory? || (formula.share/"pkgconfig").directory?)
s << <<~EOS
For pkgconf to find #{formula.name} you may need to set:
EOS
if (formula.lib/"pkgconfig").directory?
s << " #{Utils::Shell.export_value("PKG_CONFIG_PATH", "#{formula.opt_lib}/pkgconfig")}\n"
end
if (formula.share/"pkgconfig").directory?
s << " #{Utils::Shell.export_value("PKG_CONFIG_PATH", "#{formula.opt_share}/pkgconfig")}\n"
end
end
if which("cmake", ORIGINAL_PATHS) &&
((formula.lib/"cmake").directory? || (formula.share/"cmake").directory?)
s << <<~EOS
For cmake to find #{formula.name} you may need to set:
#{Utils::Shell.export_value("CMAKE_PREFIX_PATH", formula.opt_prefix.to_s)}
EOS
end
end
s << "\n" unless s.end_with?("\n")
s
end
private
sig { returns(T.nilable(Keg)) }
def keg
@keg ||= T.let([formula.prefix, formula.opt_prefix, formula.linked_keg].filter_map do |d|
Keg.new(d.resolved_path)
rescue
nil
end.first, T.nilable(Keg))
end
sig { params(shell: Symbol).returns(T.nilable(String)) }
def function_completion_caveats(shell)
return unless (keg = self.keg)
return unless which(shell.to_s, ORIGINAL_PATHS)
completion_installed = keg.completion_installed?(shell)
functions_installed = keg.functions_installed?(shell)
return if !completion_installed && !functions_installed
installed = []
installed << "completions" if completion_installed
installed << "functions" if functions_installed
root_dir = formula.keg_only? ? formula.opt_prefix : HOMEBREW_PREFIX
case shell
when :bash
<<~EOS
Bash completion has been installed to:
#{root_dir}/etc/bash_completion.d
EOS
when :fish
fish_caveats = "fish #{installed.join(" and ")} have been installed to:"
fish_caveats << "\n #{root_dir}/share/fish/vendor_completions.d" if completion_installed
fish_caveats << "\n #{root_dir}/share/fish/vendor_functions.d" if functions_installed
fish_caveats.freeze
when :zsh
<<~EOS
zsh #{installed.join(" and ")} have been installed to:
#{root_dir}/share/zsh/site-functions
EOS
when :pwsh
<<~EOS
PowerShell completion has been installed to:
#{root_dir}/share/pwsh/completions
EOS
end
end
sig { returns(T.nilable(String)) }
def elisp_caveats
return if formula.keg_only?
return unless (keg = self.keg)
return unless keg.elisp_installed?
<<~EOS
Emacs Lisp files have been installed to:
#{HOMEBREW_PREFIX}/share/emacs/site-lisp/#{formula.name}
EOS
end
sig { returns(T.nilable(String)) }
def service_caveats
return if !formula.service? && !Utils::Service.installed?(formula) && !keg&.plist_installed?
return if formula.service? && !formula.service.command? && !Utils::Service.installed?(formula)
s = []
# Brew services only works with these two tools
return <<~EOS if !Utils::Service.systemctl? && !Utils::Service.launchctl? && formula.service.command?
#{Formatter.warning("Warning:")} #{formula.name} provides a service which can only be used on macOS or systemd!
You can manually execute the service instead with:
#{formula.service.manual_command}
EOS
startup = formula.service.requires_root?
if Utils::Service.running?(formula)
s << "To restart #{formula.full_name} after an upgrade:"
s << " #{"sudo " if startup}brew services restart #{formula.full_name}"
elsif startup
s << "To start #{formula.full_name} now and restart at startup:"
s << " sudo brew services start #{formula.full_name}"
else
s << "To start #{formula.full_name} now and restart at login:"
s << " brew services start #{formula.full_name}"
end
if formula.service.command?
s << "Or, if you don't want/need a background service you can just run:"
s << " #{formula.service.manual_command}"
end
# pbpaste is the system clipboard tool on macOS and fails with `tmux` by default
# check if this is being run under `tmux` to avoid failing
if ENV["HOMEBREW_TMUX"] && !quiet_system("/usr/bin/pbpaste")
s << "" << "WARNING: brew services will fail when run under tmux."
end
"#{s.join("\n")}\n" unless s.empty?
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/linkage_cache_store.rb | Library/Homebrew/linkage_cache_store.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "cache_store"
#
# {LinkageCacheStore} provides methods to fetch and mutate linkage-specific data used
# by the `brew linkage` command.
#
class LinkageCacheStore < CacheStore
# @param keg_path [String]
# @param database [CacheStoreDatabase]
# @return [nil]
def initialize(keg_path, database)
@keg_path = keg_path
super(database)
end
# Returns `true` if the database has any value for the current `keg_path`.
#
# @return [Boolean]
def keg_exists?
!database.get(@keg_path).nil?
end
# Inserts dylib-related information into the cache if it does not exist or
# updates data into the linkage cache if it does exist.
#
# @param hash_values [Hash] hash containing KVPs of { :type => Hash }
# @return [nil]
def update!(hash_values)
hash_values.each_key do |type|
next if HASH_LINKAGE_TYPES.include?(type)
raise TypeError, <<~EOS
Can't update types that are not defined for the linkage store
EOS
end
database.set @keg_path, hash_values
end
# @param type [Symbol] the type to fetch from the {LinkageCacheStore}
# @raise [TypeError] error if the type is not in `HASH_LINKAGE_TYPES`
# @return [Hash]
def fetch(type)
unless HASH_LINKAGE_TYPES.include?(type)
raise TypeError, <<~EOS
Can't fetch types that are not defined for the linkage store
EOS
end
return {} unless keg_exists?
fetch_hash_values(type)
end
# Delete the keg from the {LinkageCacheStore}.
#
# @return [nil]
def delete!
database.delete(@keg_path)
end
private
HASH_LINKAGE_TYPES = [:keg_files_dylibs].freeze
private_constant :HASH_LINKAGE_TYPES
# @param type [Symbol]
# @return [Hash]
def fetch_hash_values(type)
keg_cache = database.get(@keg_path)
return {} unless keg_cache
keg_cache[type.to_s]
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/livecheck.rb | Library/Homebrew/livecheck.rb | # typed: strict
# frozen_string_literal: true
require "livecheck/constants"
require "livecheck/options"
require "cask/cask"
# The {Livecheck} class implements the DSL methods used in a formula's, cask's
# or resource's `livecheck` block and stores related instance variables. Most
# of these methods also return the related instance variable when no argument
# is provided.
#
# This information is used by the `brew livecheck` command to control its
# behavior. Example `livecheck` blocks can be found in the
# [`brew livecheck` documentation](https://docs.brew.sh/Brew-Livecheck).
class Livecheck
extend Forwardable
# Options to modify livecheck's behavior.
sig { returns(Homebrew::Livecheck::Options) }
attr_reader :options
# A very brief description of why the formula/cask/resource is skipped (e.g.
# `No longer developed or maintained`).
sig { returns(T.nilable(String)) }
attr_reader :skip_msg
# A block used by strategies to identify version information.
sig { returns(T.nilable(Proc)) }
attr_reader :strategy_block
sig { params(package_or_resource: T.any(Cask::Cask, T.class_of(Formula), Resource)).void }
def initialize(package_or_resource)
@package_or_resource = package_or_resource
@options = T.let(Homebrew::Livecheck::Options.new, Homebrew::Livecheck::Options)
@referenced_cask_name = T.let(nil, T.nilable(String))
@referenced_formula_name = T.let(nil, T.nilable(String))
@regex = T.let(nil, T.nilable(Regexp))
@skip = T.let(false, T::Boolean)
@skip_msg = T.let(nil, T.nilable(String))
@strategy = T.let(nil, T.nilable(Symbol))
@strategy_block = T.let(nil, T.nilable(Proc))
@throttle = T.let(nil, T.nilable(Integer))
@url = T.let(nil, T.nilable(T.any(String, Symbol)))
end
# Sets the `@referenced_cask_name` instance variable to the provided `String`
# or returns the `@referenced_cask_name` instance variable when no argument
# is provided. Inherited livecheck values from the referenced cask
# (e.g. regex) can be overridden in the `livecheck` block.
sig {
params(
# Name of cask to inherit livecheck info from.
cask_name: String,
).returns(T.nilable(String))
}
def cask(cask_name = T.unsafe(nil))
case cask_name
when nil
@referenced_cask_name
when String
@referenced_cask_name = cask_name
end
end
# Sets the `@referenced_formula_name` instance variable to the provided
# `String`/`Symbol` or returns the `@referenced_formula_name` instance
# variable when no argument is provided. Inherited livecheck values from the
# referenced formula (e.g. regex) can be overridden in the `livecheck` block.
sig {
params(
# Name of formula to inherit livecheck info from.
formula_name: T.any(String, Symbol),
).returns(T.nilable(T.any(String, Symbol)))
}
def formula(formula_name = T.unsafe(nil))
case formula_name
when nil
@referenced_formula_name
when String, :parent
@referenced_formula_name = formula_name
end
end
# Sets the `@regex` instance variable to the provided `Regexp` or returns the
# `@regex` instance variable when no argument is provided.
sig {
params(
# Regex to use for matching versions in content.
pattern: Regexp,
).returns(T.nilable(Regexp))
}
def regex(pattern = T.unsafe(nil))
case pattern
when nil
@regex
when Regexp
@regex = pattern
end
end
# Sets the `@skip` instance variable to `true` and sets the `@skip_msg`
# instance variable if a `String` is provided. `@skip` is used to indicate
# that the formula/cask/resource should be skipped and the `skip_msg` very
# briefly describes why it is skipped (e.g. "No longer developed or
# maintained").
sig {
params(
# String describing why the formula/cask is skipped.
skip_msg: String,
).returns(T::Boolean)
}
def skip(skip_msg = T.unsafe(nil))
@skip_msg = skip_msg if skip_msg.is_a?(String)
@skip = true
end
# Should `livecheck` skip this formula/cask/resource?
sig { returns(T::Boolean) }
def skip?
@skip
end
# Sets the `@strategy` instance variable to the provided `Symbol` or returns
# the `@strategy` instance variable when no argument is provided. The strategy
# symbols use snake case (e.g. `:page_match`) and correspond to the strategy
# file name.
sig {
params(
# Symbol for the desired strategy.
symbol: Symbol,
block: T.nilable(Proc),
).returns(T.nilable(Symbol))
}
def strategy(symbol = T.unsafe(nil), &block)
@strategy_block = block if block
case symbol
when nil
@strategy
when Symbol
@strategy = symbol
end
end
# Sets the `@throttle` instance variable to the provided `Integer` or returns
# the `@throttle` instance variable when no argument is provided.
sig {
params(
# Throttle rate of version patch number to use for bumpable versions.
rate: Integer,
).returns(T.nilable(Integer))
}
def throttle(rate = T.unsafe(nil))
case rate
when nil
@throttle
when Integer
@throttle = rate
end
end
# Sets the `@url` instance variable to the provided argument or returns the
# `@url` instance variable when no argument is provided. The argument can be
# a `String` (a URL) or a supported `Symbol` corresponding to a URL in the
# formula/cask/resource (e.g. `:stable`, `:homepage`, `:head`, `:url`).
# Any options provided to the method are passed through to `Strategy` methods
# (`page_headers`, `page_content`).
sig {
params(
# URL to check for version information.
url: T.any(String, Symbol),
cookies: T.nilable(T::Hash[String, String]),
header: T.nilable(T.any(String, T::Array[String])),
homebrew_curl: T.nilable(T::Boolean),
post_form: T.nilable(T::Hash[Symbol, String]),
post_json: T.nilable(T::Hash[Symbol, T.anything]),
referer: T.nilable(String),
user_agent: T.nilable(T.any(String, Symbol)),
).returns(T.nilable(T.any(String, Symbol)))
}
def url(
url = T.unsafe(nil),
cookies: nil,
header: nil,
homebrew_curl: nil,
post_form: nil,
post_json: nil,
referer: nil,
user_agent: nil
)
raise ArgumentError, "Only use `post_form` or `post_json`, not both" if post_form && post_json
@options.cookies = cookies unless cookies.nil?
@options.header = header unless header.nil?
@options.homebrew_curl = homebrew_curl unless homebrew_curl.nil?
@options.post_form = post_form unless post_form.nil?
@options.post_json = post_json unless post_json.nil?
@options.referer = referer unless referer.nil?
@options.user_agent = user_agent unless user_agent.nil?
case url
when nil
@url
when String, :head, :homepage, :stable, :url
@url = url
when Symbol
raise ArgumentError, "#{url.inspect} is not a valid URL shorthand"
end
end
delegate url_options: :@options
delegate arch: :@package_or_resource
delegate os: :@package_or_resource
delegate version: :@package_or_resource
private :arch, :os, :version
# Returns a `Hash` of all instance variable values.
# @return [Hash]
sig { returns(T::Hash[String, T.untyped]) }
def to_hash
{
"options" => @options.to_hash,
"cask" => @referenced_cask_name,
"formula" => @referenced_formula_name,
"regex" => @regex,
"skip" => @skip,
"skip_msg" => @skip_msg,
"strategy" => @strategy,
"throttle" => @throttle,
"url" => @url,
}
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/uninstall.rb | Library/Homebrew/uninstall.rb | # typed: strict
# frozen_string_literal: true
require "dependents_message"
require "installed_dependents"
require "utils/output"
module Homebrew
# Helper module for uninstalling kegs.
module Uninstall
extend ::Utils::Output::Mixin
sig {
params(
kegs_by_rack: T::Hash[Pathname, T::Array[Keg]],
casks: T::Array[Cask::Cask],
force: T::Boolean,
ignore_dependencies: T::Boolean,
named_args: T::Array[String],
).void
}
def self.uninstall_kegs(kegs_by_rack, casks: [], force: false, ignore_dependencies: false, named_args: [])
handle_unsatisfied_dependents(kegs_by_rack,
casks:,
ignore_dependencies:,
named_args:)
return if Homebrew.failed?
kegs_by_rack.each do |rack, kegs|
if force
name = rack.basename
if rack.directory?
puts "Uninstalling #{name}... (#{rack.abv})"
kegs.each do |keg|
keg.unlink
keg.uninstall
end
end
rm_pin rack
else
kegs.each do |keg|
begin
f = Formulary.from_rack(rack)
if f.pinned?
onoe "#{f.full_name} is pinned. You must unpin it to uninstall."
break # exit keg loop and move on to next rack
end
rescue
nil
end
keg.lock do
puts "Uninstalling #{keg}... (#{keg.abv})"
keg.unlink
keg.uninstall
rack = keg.rack
rm_pin rack
if rack.directory?
versions = rack.subdirs.map(&:basename)
puts <<~EOS
#{keg.name} #{versions.to_sentence} #{versions.one? ? "is" : "are"} still installed.
To remove all versions, run:
brew uninstall --force #{keg.name}
EOS
end
next unless f
paths = f.pkgetc.find.map(&:to_s) if f.pkgetc.exist?
if paths.present?
puts
opoo <<~EOS
The following #{f.name} configuration files have not been removed!
If desired, remove them manually with `rm -rf`:
#{paths.sort.uniq.join("\n ")}
EOS
end
unversioned_name = f.name.gsub(/@.+$/, "")
maybe_paths = Dir.glob("#{f.etc}/#{unversioned_name}*")
excluded_names = if Homebrew::EnvConfig.no_install_from_api?
Formula.names
else
Homebrew::API.formula_names
end
maybe_paths = maybe_paths.reject do |path|
# Remove extension only if a file
# (e.g. directory with name "openssl@1.1" will be trimmed to "openssl@1")
basename = if File.directory?(path)
File.basename(path)
else
File.basename(path, ".*")
end
excluded_names.include?(basename)
end
maybe_paths -= paths if paths.present?
if maybe_paths.present?
puts
opoo <<~EOS
The following may be #{f.name} configuration files and have not been removed!
If desired, remove them manually with `rm -rf`:
#{maybe_paths.sort.uniq.join("\n ")}
EOS
end
end
end
end
end
rescue MultipleVersionsInstalledError => e
ofail e
ensure
# If we delete Cellar/newname, then Cellar/oldname symlink
# can become broken and we have to remove it.
if HOMEBREW_CELLAR.directory?
HOMEBREW_CELLAR.children.each do |rack|
rack.unlink if rack.symlink? && !rack.resolved_path_exists?
end
end
end
sig {
params(
kegs_by_rack: T::Hash[Pathname, T::Array[Keg]],
casks: T::Array[Cask::Cask],
ignore_dependencies: T::Boolean,
named_args: T::Array[String],
).void
}
def self.handle_unsatisfied_dependents(kegs_by_rack, casks: [], ignore_dependencies: false, named_args: [])
return if ignore_dependencies
all_kegs = kegs_by_rack.values.flatten(1)
check_for_dependents!(all_kegs, casks:, named_args:)
rescue MethodDeprecatedError
# Silently ignore deprecations when uninstalling.
nil
end
sig { params(kegs: T::Array[Keg], casks: T::Array[Cask::Cask], named_args: T::Array[String]).returns(T::Boolean) }
def self.check_for_dependents!(kegs, casks: [], named_args: [])
return false unless (result = InstalledDependents.find_some_installed_dependents(kegs, casks:))
DependentsMessage.new(*result, named_args:).output
true
end
sig { params(rack: Pathname).void }
def self.rm_pin(rack)
Formulary.from_rack(rack).unpin
rescue
nil
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/descriptions.rb | Library/Homebrew/descriptions.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "formula"
require "formula_versions"
require "search"
# Helper class for printing and searching descriptions.
class Descriptions
# Given a regex, find all formulae whose specified fields contain a match.
def self.search(string_or_regex, field, cache_store,
eval_all = Homebrew::EnvConfig.eval_all?, cache_store_hash: false)
cache_store.populate_if_empty!(eval_all:) unless cache_store_hash
results = case field
when :name
Homebrew::Search.search(cache_store, string_or_regex) { |name, _| name }
when :desc
Homebrew::Search.search(cache_store, string_or_regex) { |_, desc| desc }
when :either
Homebrew::Search.search(cache_store, string_or_regex)
end
new(results)
end
# Create an actual instance.
def initialize(descriptions)
@descriptions = descriptions
end
# Take search results -- a hash mapping formula names to descriptions -- and
# print them.
def print
blank = Formatter.warning("[no description]")
@descriptions.keys.sort.each do |full_name|
short_name = short_names[full_name]
printed_name = if short_name_counts[short_name] == 1
short_name
else
full_name
end
description = @descriptions[full_name] || blank
if description.is_a?(Array)
names = description[0]
description = description[1] || blank
puts "#{Tty.bold}#{printed_name}:#{Tty.reset} (#{names}) #{description}"
else
puts "#{Tty.bold}#{printed_name}:#{Tty.reset} #{description}"
end
end
end
private
def short_names
@short_names ||= @descriptions.keys.to_h { |k| [k, k.split("/").last] }
end
def short_name_counts
@short_name_counts ||=
short_names.values
.each_with_object(Hash.new(0)) do |name, counts|
counts[name] += 1
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/utils.rb | Library/Homebrew/utils.rb | # typed: strict
# frozen_string_literal: true
require "context"
module Homebrew
extend Context
sig { params(path: T.nilable(T.any(String, Pathname))).returns(T::Boolean) }
def self.require?(path)
return false if path.nil?
if defined?(Warnings)
# Work around require warning when done repeatedly:
# https://bugs.ruby-lang.org/issues/21091
Warnings.ignore(/already initialized constant/, /previous definition of/) do
require path.to_s
end
else
require path.to_s
end
true
rescue LoadError
false
end
# Need to keep this naming as-is for backwards compatibility.
# rubocop:disable Naming/PredicateMethod
sig {
params(
cmd: T.nilable(T.any(Pathname, String, [String, String], T::Hash[String, T.nilable(String)])),
argv0: T.nilable(T.any(Pathname, String, [String, String])),
args: T.any(Pathname, String),
options: T.untyped,
_block: T.nilable(T.proc.void),
).returns(T::Boolean)
}
def self._system(cmd, argv0 = nil, *args, **options, &_block)
pid = fork do
yield if block_given?
args.map!(&:to_s)
begin
if argv0
exec(cmd, argv0, *args, **options)
else
exec(cmd, *args, **options)
end
rescue
nil
end
exit! 1 # never gets here unless exec failed
end
Process.wait(pid)
$CHILD_STATUS.success?
end
# TODO: make private_class_method when possible
# private_class_method :_system
# rubocop:enable Naming/PredicateMethod
sig {
params(
cmd: T.any(Pathname, String, [String, String], T::Hash[String, T.nilable(String)]),
argv0: T.nilable(T.any(Pathname, String, [String, String])),
args: T.any(Pathname, String),
options: T.untyped,
).returns(T::Boolean)
}
def self.system(cmd, argv0 = nil, *args, **options)
if verbose?
out = (options[:out] == :err) ? $stderr : $stdout
out.puts "#{cmd} #{args * " "}".gsub(RUBY_PATH, "ruby")
.gsub($LOAD_PATH.join(File::PATH_SEPARATOR).to_s, "$LOAD_PATH")
end
_system(cmd, argv0, *args, **options)
end
# `Module` and `Regexp` are global variables used as types here so they don't need to be imported
# rubocop:disable Style/GlobalVars
sig { params(the_module: T::Module[T.anything], pattern: Regexp).void }
def self.inject_dump_stats!(the_module, pattern)
@injected_dump_stat_modules ||= T.let({}, T.nilable(T::Hash[T::Module[T.anything], T::Array[String]]))
@injected_dump_stat_modules[the_module] ||= []
injected_methods = @injected_dump_stat_modules.fetch(the_module)
the_module.module_eval do
instance_methods.grep(pattern).each do |name|
next if injected_methods.include? name
method = instance_method(name)
define_method(name) do |*args, &block|
require "time"
time = Time.now
begin
method.bind_call(self, *args, &block)
ensure
$times[name] ||= 0
$times[name] += Time.now - time
end
end
end
end
return unless $times.nil?
$times = {}
at_exit do
col_width = [$times.keys.map(&:size).max.to_i + 2, 15].max
$times.sort_by { |_k, v| v }.each do |method, time|
puts format("%<method>-#{col_width}s %<time>0.4f sec", method: "#{method}:", time:)
end
end
end
# rubocop:enable Style/GlobalVars
end
module Utils
# Removes the rightmost segment from the constant expression in the string.
#
# deconstantize('Net::HTTP') # => "Net"
# deconstantize('::Net::HTTP') # => "::Net"
# deconstantize('String') # => ""
# deconstantize('::String') # => ""
# deconstantize('') # => ""
#
# See also #demodulize.
# @see https://github.com/rails/rails/blob/b0dd7c7/activesupport/lib/active_support/inflector/methods.rb#L247-L258
# `ActiveSupport::Inflector.deconstantize`
sig { params(path: String).returns(String) }
def self.deconstantize(path)
T.must(path[0, path.rindex("::") || 0]) # implementation based on the one in facets' Module#spacename
end
# Removes the module part from the expression in the string.
#
# demodulize('ActiveSupport::Inflector::Inflections') # => "Inflections"
# demodulize('Inflections') # => "Inflections"
# demodulize('::Inflections') # => "Inflections"
# demodulize('') # => ""
#
# See also #deconstantize.
# @see https://github.com/rails/rails/blob/b0dd7c7/activesupport/lib/active_support/inflector/methods.rb#L230-L245
# `ActiveSupport::Inflector.demodulize`
# @raise [ArgumentError] if the provided path is nil
sig { params(path: T.nilable(String)).returns(String) }
def self.demodulize(path)
raise ArgumentError, "No constant path provided" if path.nil?
if (i = path.rindex("::"))
T.must(path[(i + 2)..])
else
path
end
end
# A lightweight alternative to `ActiveSupport::Inflector.pluralize`:
# Combines `stem` with the `singular` or `plural` suffix based on `count`.
# Adds a prefix of the count value if `include_count` is set to true.
sig {
params(stem: String, count: Integer, plural: String, singular: String, include_count: T::Boolean).returns(String)
}
def self.pluralize(stem, count, plural: "s", singular: "", include_count: false)
case stem
when "formula"
plural = "e"
when "dependency", "try"
stem = stem.delete_suffix("y")
plural = "ies"
singular = "y"
end
prefix = include_count ? "#{count} " : ""
suffix = (count == 1) ? singular : plural
"#{prefix}#{stem}#{suffix}"
end
sig { params(author: String).returns({ email: String, name: String }) }
def self.parse_author!(author)
match_data = /^(?<name>[^<]+?)[ \t]*<(?<email>[^>]+?)>$/.match(author)
if match_data
name = match_data[:name]
email = match_data[:email]
end
raise UsageError, "Unable to parse name and email." if name.blank? && email.blank?
{ name: T.must(name), email: T.must(email) }
end
# Makes an underscored, lowercase form from the expression in the string.
#
# Changes '::' to '/' to convert namespaces to paths.
#
# underscore('ActiveModel') # => "active_model"
# underscore('ActiveModel::Errors') # => "active_model/errors"
#
# @see https://github.com/rails/rails/blob/v6.1.7.2/activesupport/lib/active_support/inflector/methods.rb#L81-L100
# `ActiveSupport::Inflector.underscore`
sig { params(camel_cased_word: T.any(String, Symbol)).returns(String) }
def self.underscore(camel_cased_word)
return camel_cased_word.to_s unless /[A-Z-]|::/.match?(camel_cased_word)
word = camel_cased_word.to_s.gsub("::", "/")
word.gsub!(/([A-Z])(?=[A-Z][a-z])|([a-z\d])(?=[A-Z])/) do
T.must(::Regexp.last_match(1) || ::Regexp.last_match(2)) << "_"
end
word.tr!("-", "_")
word.downcase!
word
end
SAFE_FILENAME_REGEX = /[[:cntrl:]#{Regexp.escape("#{File::SEPARATOR}#{File::ALT_SEPARATOR}")}]/o
private_constant :SAFE_FILENAME_REGEX
sig { params(basename: String).returns(T::Boolean) }
def self.safe_filename?(basename)
!SAFE_FILENAME_REGEX.match?(basename)
end
sig { params(basename: String).returns(String) }
def self.safe_filename(basename)
basename.gsub(SAFE_FILENAME_REGEX, "")
end
# Converts a string starting with `:` to a symbol, otherwise returns the
# string itself.
#
# convert_to_string_or_symbol(":example") # => :example
# convert_to_string_or_symbol("example") # => "example"
sig { params(string: String).returns(T.any(String, Symbol)) }
def self.convert_to_string_or_symbol(string)
return T.must(string[1..]).to_sym if string.start_with?(":")
string
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/linux_runner_spec.rb | Library/Homebrew/linux_runner_spec.rb | # typed: strict
# frozen_string_literal: true
class LinuxRunnerSpec < T::Struct
const :name, String
const :runner, String
const :container, T::Hash[Symbol, String]
const :workdir, String
const :timeout, Integer
const :cleanup, T::Boolean
prop :testing_formulae, T::Array[String], default: []
sig {
returns({
name: String,
runner: String,
container: T::Hash[Symbol, String],
workdir: String,
timeout: Integer,
cleanup: T::Boolean,
testing_formulae: String,
})
}
def to_h
{
name:,
runner:,
container:,
workdir:,
timeout:,
cleanup:,
testing_formulae: testing_formulae.join(","),
}
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/tap_auditor.rb | Library/Homebrew/tap_auditor.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
module Homebrew
# Auditor for checking common violations in {Tap}s.
class TapAuditor
attr_reader :name, :path, :formula_names, :formula_aliases, :formula_renames, :cask_tokens, :cask_renames,
:tap_audit_exceptions, :tap_style_exceptions, :problems
sig { params(tap: Tap, strict: T.nilable(T::Boolean)).void }
def initialize(tap, strict:)
Homebrew.with_no_api_env do
tap.clear_cache if Homebrew::EnvConfig.automatically_set_no_install_from_api?
@name = tap.name
@path = tap.path
@tap_audit_exceptions = tap.audit_exceptions
@tap_style_exceptions = tap.style_exceptions
@tap_synced_versions_formulae = tap.synced_versions_formulae
@tap_autobump = tap.autobump
@tap_official = tap.official?
@problems = []
@cask_tokens = tap.cask_tokens.map do |cask_token|
cask_token.split("/").last
end
@formula_aliases = tap.aliases.map do |formula_alias|
formula_alias.split("/").last
end
@formula_renames = tap.formula_renames
@cask_renames = tap.cask_renames
@formula_names = tap.formula_names.map do |formula_name|
formula_name.split("/").last
end
end
end
sig { void }
def audit
audit_json_files
audit_tap_formula_lists
audit_aliases_renames_duplicates
end
sig { void }
def audit_json_files
json_patterns = Tap::HOMEBREW_TAP_JSON_FILES.map { |pattern| @path/pattern }
Pathname.glob(json_patterns).each do |file|
JSON.parse file.read
rescue JSON::ParserError
problem "#{file.to_s.delete_prefix("#{@path}/")} contains invalid JSON"
end
end
sig { void }
def audit_tap_formula_lists
check_formula_list_directory "audit_exceptions", @tap_audit_exceptions
check_formula_list_directory "style_exceptions", @tap_style_exceptions
check_renames "formula_renames.json", @formula_renames, @formula_names, @formula_aliases
check_renames "cask_renames.json", @cask_renames, @cask_tokens
check_formula_list ".github/autobump.txt", @tap_autobump unless @tap_official
check_formula_list "synced_versions_formulae", @tap_synced_versions_formulae.flatten
end
sig { void }
def audit_aliases_renames_duplicates
duplicates = formula_aliases & formula_renames.keys
return if duplicates.none?
problem "The following should either be an alias or a rename, not both: #{duplicates.to_sentence}"
end
sig { params(message: String).void }
def problem(message)
@problems << ({ message:, location: nil, corrected: false })
end
private
sig { params(list_file: String, list: T.untyped).void }
def check_formula_list(list_file, list)
list_file += ".json" if File.extname(list_file).empty?
unless [Hash, Array].include? list.class
problem <<~EOS
#{list_file} should contain a JSON array
of formula names or a JSON object mapping formula names to values
EOS
return
end
list = list.keys if list.is_a? Hash
invalid_formulae_casks = list.select do |formula_or_cask_name|
formula_names.exclude?(formula_or_cask_name) &&
formula_aliases.exclude?(formula_or_cask_name) &&
cask_tokens.exclude?(formula_or_cask_name)
end
return if invalid_formulae_casks.empty?
problem <<~EOS
#{list_file} references
formulae or casks that are not found in the #{@name} tap.
Invalid formulae or casks: #{invalid_formulae_casks.join(", ")}
EOS
end
sig { params(directory_name: String, lists: Hash).void }
def check_formula_list_directory(directory_name, lists)
lists.each do |list_name, list|
check_formula_list "#{directory_name}/#{list_name}", list
end
end
sig {
params(list_file: String, renames_hash: T::Hash[String, String], valid_tokens: T::Array[String],
valid_aliases: T::Array[String]).void
}
def check_renames(list_file, renames_hash, valid_tokens, valid_aliases = [])
item_type = list_file.include?("cask") ? "casks" : "formulae"
# Collect all validation issues in a single pass
invalid_format_entries = []
invalid_targets = []
chained_rename_suggestions = []
conflicts = []
renames_hash.each do |old_name, new_name|
# Check for .rb extensions
if old_name.end_with?(".rb") || new_name.end_with?(".rb")
invalid_format_entries << "\"#{old_name}\": \"#{new_name}\""
end
# Check that new name exists
if valid_tokens.exclude?(new_name) && valid_aliases.exclude?(new_name) && !renames_hash.key?(new_name)
invalid_targets << new_name
end
# Check for chained renames and follow to final target
if renames_hash.key?(new_name)
final = new_name
seen = Set.new([old_name, new_name])
while renames_hash.key?(final)
next_name = renames_hash[final]
break if next_name.nil? || seen.include?(next_name)
final = next_name
seen << final
end
chained_rename_suggestions << " \"#{old_name}\": \"#{final}\" (instead of chained rename)"
end
# Check for conflicts
conflicts << old_name if valid_tokens.include?(old_name)
end
if invalid_format_entries.any?
problem <<~EOS
#{list_file} contains entries with '.rb' file extensions.
Rename entries should use formula/cask names only, without '.rb' extensions.
Invalid entries: #{invalid_format_entries.join(", ")}
EOS
end
if invalid_targets.any?
problem <<~EOS
#{list_file} contains renames to #{item_type} that do not exist in the #{@name} tap.
Invalid targets: #{invalid_targets.join(", ")}
EOS
end
if chained_rename_suggestions.any?
problem <<~EOS
#{list_file} contains chained renames that should be collapsed.
Chained renames don't work automatically; each old name should point directly to the final target:
#{chained_rename_suggestions.join("\n")}
EOS
end
return if conflicts.none?
problem <<~EOS
#{list_file} contains old names that conflict with existing #{item_type} in the #{@name} tap.
Renames only work after the old #{item_type} are deleted. Conflicting names: #{conflicts.join(", ")}
EOS
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/postinstall.rb | Library/Homebrew/postinstall.rb | # typed: strict
# frozen_string_literal: true
raise "#{__FILE__} must not be loaded via `require`." if $PROGRAM_NAME != __FILE__
old_trap = trap("INT") { exit! 130 }
require_relative "global"
require "fcntl"
require "utils/socket"
require "cli/parser"
require "cmd/postinstall"
require "json/add/exception"
require "extend/pathname/write_mkpath_extension"
begin
# Undocumented opt-out for internal use.
# We need to allow formulae from paths here due to how we pass them through.
ENV["HOMEBREW_INTERNAL_ALLOW_PACKAGES_FROM_PATHS"] = "1"
args = Homebrew::Cmd::Postinstall.new.args
error_pipe = Utils::UNIXSocketExt.open(ENV.fetch("HOMEBREW_ERROR_PIPE"), &:recv_io)
error_pipe.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
trap("INT", old_trap)
formula = args.named.to_resolved_formulae.fetch(0)
if args.debug? && !Homebrew::EnvConfig.disable_debrew?
require "debrew"
formula.extend(Debrew::Formula)
end
Pathname.activate_extensions!
formula.run_post_install
# Handle all possible exceptions.
rescue Exception => e # rubocop:disable Lint/RescueException
error_pipe&.puts e.to_json
error_pipe&.close
exit! 1
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/os.rb | Library/Homebrew/os.rb | # typed: strict
# frozen_string_literal: true
require "version"
# Helper functions for querying operating system information.
module OS
# Check whether the operating system is macOS.
#
# @api public
sig { returns(T::Boolean) }
def self.mac?
return false if ENV["HOMEBREW_TEST_GENERIC_OS"]
RbConfig::CONFIG["host_os"].include? "darwin"
end
# Check whether the operating system is Linux.
#
# @api public
sig { returns(T::Boolean) }
def self.linux?
return false if ENV["HOMEBREW_TEST_GENERIC_OS"]
RbConfig::CONFIG["host_os"].include? "linux"
end
# Get the kernel version.
#
# @api public
sig { returns(Version) }
def self.kernel_version
require "etc"
@kernel_version ||= T.let(Version.new(Etc.uname.fetch(:release)), T.nilable(Version))
end
# Get the kernel name.
#
# @api public
sig { returns(String) }
def self.kernel_name
require "etc"
@kernel_name ||= T.let(Etc.uname.fetch(:sysname), T.nilable(String))
end
::OS_VERSION = T.let(ENV.fetch("HOMEBREW_OS_VERSION").freeze, String)
# See Linux-CI.md
LINUX_CI_OS_VERSION = "Ubuntu 22.04"
LINUX_GLIBC_CI_VERSION = "2.35"
LINUX_GLIBC_NEXT_CI_VERSION = "2.39"
LINUX_GCC_CI_VERSION = "12" # https://packages.ubuntu.com/jammy/gcc-12
LINUX_LIBSTDCXX_CI_VERSION = "6.0.30" # https://packages.ubuntu.com/jammy/libstdc++6
LINUX_PREFERRED_GCC_COMPILER_FORMULA = T.let("gcc@#{LINUX_GCC_CI_VERSION}".freeze, String)
LINUX_PREFERRED_GCC_RUNTIME_FORMULA = "gcc"
if OS.mac?
require "os/mac"
require "hardware"
# Don't tell people to report issues on non-Tier 1 configurations.
if !OS::Mac.version.prerelease? &&
!OS::Mac.version.outdated_release? &&
ARGV.none? { |v| v.start_with?("--cc=") } &&
(HOMEBREW_PREFIX.to_s == HOMEBREW_DEFAULT_PREFIX ||
(HOMEBREW_PREFIX.to_s == HOMEBREW_MACOS_ARM_DEFAULT_PREFIX && Hardware::CPU.arm?))
ISSUES_URL = "https://docs.brew.sh/Troubleshooting"
end
PATH_OPEN = "/usr/bin/open"
elsif OS.linux?
require "os/linux"
ISSUES_URL = "https://docs.brew.sh/Troubleshooting"
PATH_OPEN = if OS::Linux.wsl? && (wslview = which("wslview").presence)
wslview.to_s
else
"xdg-open"
end.freeze
end
sig { returns(T::Boolean) }
def self.not_tier_one_configuration?
!defined?(OS::ISSUES_URL)
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/warnings.rb | Library/Homebrew/warnings.rb | # typed: strict
# frozen_string_literal: true
require "warning"
# Helper module for handling warnings.
module Warnings
COMMON_WARNINGS = T.let({
parser_syntax: [
%r{warning: parser/current is loading parser/ruby\d+, which recognizes},
/warning: \d+\.\d+\.\d+-compliant syntax, but you are running \d+\.\d+\.\d+\./,
%r{warning: please see https://github\.com/whitequark/parser#compatibility-with-ruby-mri\.},
],
}.freeze, T::Hash[Symbol, T::Array[Regexp]])
sig { params(warnings: T.any(Symbol, Regexp), _block: T.nilable(T.proc.void)).void }
def self.ignore(*warnings, &_block)
warnings.map! do |warning|
next warning if !warning.is_a?(Symbol) || !COMMON_WARNINGS.key?(warning)
COMMON_WARNINGS[warning]
end
warnings.flatten.each do |warning|
Warning.ignore warning
end
return unless block_given?
yield
Warning.clear
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/sbom.rb | Library/Homebrew/sbom.rb | # typed: strict
# frozen_string_literal: true
require "cxxstdlib"
require "json"
require "development_tools"
require "cachable"
require "utils/curl"
require "utils/output"
# Rather than calling `new` directly, use one of the class methods like {SBOM.create}.
class SBOM
include Utils::Output::Mixin
FILENAME = "sbom.spdx.json"
SCHEMA_FILE = T.let((HOMEBREW_LIBRARY_PATH/"data/schemas/sbom.json").freeze, Pathname)
class Source < T::Struct
const :path, String
const :tap_name, T.nilable(String)
const :tap_git_head, T.nilable(String)
const :spec, Symbol
const :patches, T::Array[T.any(EmbeddedPatch, ExternalPatch)]
const :bottle, T::Hash[String, T.untyped]
const :version, T.nilable(Version)
const :url, T.nilable(String)
const :checksum, T.nilable(Checksum)
end
# Instantiates a {SBOM} for a new installation of a formula.
sig { params(formula: Formula, tab: Tab).returns(T.attached_class) }
def self.create(formula, tab)
active_spec = if formula.stable?
T.must(formula.stable)
else
T.must(formula.head)
end
active_spec_sym = formula.active_spec_sym
new(
name: formula.name,
homebrew_version: HOMEBREW_VERSION,
spdxfile: SBOM.spdxfile(formula),
time: tab.time || Time.now,
source_modified_time: tab.source_modified_time.to_i,
compiler: tab.compiler,
stdlib: tab.stdlib,
runtime_dependencies: SBOM.runtime_deps_hash(Array(tab.runtime_dependencies)),
license: SPDX.license_expression_to_string(formula.license),
built_on: DevelopmentTools.build_system_info,
source: Source.new(
path: formula.specified_path.to_s,
tap_name: formula.tap&.name,
# We can only get `tap_git_head` if the tap is installed locally
tap_git_head: (T.must(formula.tap).git_head if formula.tap&.installed?),
spec: active_spec_sym,
patches: active_spec.patches,
bottle: formula.bottle_hash,
version: active_spec.version,
url: active_spec.url,
checksum: active_spec.checksum,
),
)
end
sig { params(formula: Formula).returns(Pathname) }
def self.spdxfile(formula)
formula.prefix/FILENAME
end
sig { params(deps: T::Array[T::Hash[String, T.untyped]]).returns(T::Array[T::Hash[String, T.anything]]) }
def self.runtime_deps_hash(deps)
deps.map do |dep|
full_name = dep.fetch("full_name")
dep_formula = Formula[full_name]
{
"full_name" => full_name,
"pkg_version" => dep.fetch("pkg_version"),
"name" => dep_formula.name,
"license" => SPDX.license_expression_to_string(dep_formula.license),
"bottle" => dep_formula.bottle_hash,
"formula_pkg_version" => dep_formula.pkg_version.to_s,
}
end
end
sig { params(formula: Formula).returns(T::Boolean) }
def self.exist?(formula)
spdxfile(formula).exist?
end
sig { returns(T::Hash[String, T.anything]) }
def self.schema
@schema ||= T.let(JSON.parse(SCHEMA_FILE.read, freeze: true), T.nilable(T::Hash[String, T.untyped]))
end
sig { params(bottling: T::Boolean).returns(T::Array[String]) }
def schema_validation_errors(bottling: false)
unless Homebrew.require? "json_schemer"
error_message = "Need json_schemer to validate SBOM, run `brew install-bundler-gems --add-groups=bottle`!"
odie error_message if ENV["HOMEBREW_ENFORCE_SBOM"]
return []
end
schemer = JSONSchemer.schema(SBOM.schema)
data = to_spdx_sbom(bottling:)
schemer.validate(data).map { |error| error["error"] }
end
sig { params(bottling: T::Boolean).returns(T::Boolean) }
def valid?(bottling: false)
validation_errors = schema_validation_errors(bottling:)
return true if validation_errors.empty?
opoo "SBOM validation errors:"
validation_errors.each(&:puts)
odie "Failed to validate SBOM against JSON schema!" if ENV["HOMEBREW_ENFORCE_SBOM"]
false
end
sig { params(validate: T::Boolean, bottling: T::Boolean).void }
def write(validate: true, bottling: false)
# If this is a new installation, the cache of installed formulae
# will no longer be valid.
Formula.clear_cache unless spdxfile.exist?
if validate && !valid?(bottling:)
opoo "SBOM is not valid, not writing to disk!"
return
end
spdxfile.atomic_write(JSON.pretty_generate(to_spdx_sbom(bottling:)))
end
private
sig { returns(String) }
attr_reader :name, :homebrew_version
sig { returns(T.any(Integer, Time)) }
attr_reader :time
sig { returns(T.nilable(T.any(String, Symbol))) }
attr_reader :stdlib
sig { returns(Source) }
attr_reader :source
sig { returns(T::Hash[String, T.nilable(String)]) }
attr_reader :built_on
sig { returns(T.nilable(String)) }
attr_reader :license
sig { returns(Pathname) }
attr_accessor :spdxfile
sig {
params(
name: String,
homebrew_version: String,
spdxfile: Pathname,
time: T.any(Integer, Time),
source_modified_time: Integer,
compiler: T.any(String, Symbol),
stdlib: T.nilable(T.any(String, Symbol)),
runtime_dependencies: T::Array[T::Hash[String, T.untyped]],
license: T.nilable(String),
built_on: T::Hash[String, T.nilable(String)],
source: Source,
).void
}
def initialize(name:, homebrew_version:, spdxfile:, time:, source_modified_time:,
compiler:, stdlib:, runtime_dependencies:, license:, built_on:, source:)
@name = name
@homebrew_version = homebrew_version
@spdxfile = spdxfile
@time = time
@source_modified_time = source_modified_time
@compiler = compiler
@stdlib = stdlib
@runtime_dependencies = runtime_dependencies
@license = license
@built_on = built_on
@source = source
end
sig {
params(
runtime_dependency_declaration: T::Array[T::Hash[Symbol, T.untyped]],
compiler_declaration: T::Hash[String, T.untyped],
bottling: T::Boolean,
).returns(T::Array[T::Hash[Symbol, T.untyped]])
}
def generate_relations_json(runtime_dependency_declaration, compiler_declaration, bottling:)
runtime = runtime_dependency_declaration.map do |dependency|
{
spdxElementId: dependency[:SPDXID],
relationshipType: "RUNTIME_DEPENDENCY_OF",
relatedSpdxElement: "SPDXRef-Bottle-#{name}",
}
end
patches = source.patches.each_with_index.map do |_patch, index|
{
spdxElementId: "SPDXRef-Patch-#{name}-#{index}",
relationshipType: "PATCH_APPLIED",
relatedSpdxElement: "SPDXRef-Archive-#{name}-src",
}
end
base = T.let([{
spdxElementId: "SPDXRef-File-#{name}",
relationshipType: "PACKAGE_OF",
relatedSpdxElement: "SPDXRef-Archive-#{name}-src",
}], T::Array[T::Hash[Symbol, T.untyped]])
unless bottling
base << {
spdxElementId: "SPDXRef-Compiler",
relationshipType: "BUILD_TOOL_OF",
relatedSpdxElement: "SPDXRef-Package-#{name}-src",
}
if compiler_declaration["SPDXRef-Stdlib"].present?
base << {
spdxElementId: "SPDXRef-Stdlib",
relationshipType: "DEPENDENCY_OF",
relatedSpdxElement: "SPDXRef-Bottle-#{name}",
}
end
end
runtime + patches + base
end
sig {
params(
runtime_dependency_declaration: T::Array[T::Hash[Symbol, T.anything]],
compiler_declaration: T::Hash[String, T::Hash[Symbol, T.anything]],
bottling: T::Boolean,
).returns(T::Array[T::Hash[Symbol, T.untyped]])
}
def generate_packages_json(runtime_dependency_declaration, compiler_declaration, bottling:)
bottle = []
if !bottling && (bottle_info = get_bottle_info(source.bottle)) &&
spec_symbol == :stable && (stable_version = source.version)
bottle << {
SPDXID: "SPDXRef-Bottle-#{name}",
name: name.to_s,
versionInfo: stable_version.to_s,
filesAnalyzed: false,
licenseDeclared: assert_value(nil),
builtDate: source_modified_time.to_s,
licenseConcluded: assert_value(license),
downloadLocation: bottle_info.fetch("url"),
copyrightText: assert_value(nil),
externalRefs: [
{
referenceCategory: "PACKAGE-MANAGER",
referenceLocator: "pkg:brew/#{tap}/#{name}@#{stable_version}",
referenceType: "purl",
},
],
checksums: [
{
algorithm: "SHA256",
checksumValue: bottle_info.fetch("sha256"),
},
],
}
end
compiler_declarations = if bottling
[]
else
compiler_declaration.values
end
[
{
SPDXID: "SPDXRef-Archive-#{name}-src",
name: name.to_s,
versionInfo: spec_version.to_s,
filesAnalyzed: false,
licenseDeclared: assert_value(nil),
builtDate: source_modified_time.to_s,
licenseConcluded: assert_value(license),
downloadLocation: source.url,
copyrightText: assert_value(nil),
externalRefs: [],
checksums: [
{
algorithm: "SHA256",
checksumValue: source.checksum.to_s,
},
],
},
] + runtime_dependency_declaration + compiler_declarations + bottle
end
sig {
params(bottling: T::Boolean)
.returns(T::Array[T::Hash[Symbol, T.any(T::Boolean, String, T::Array[T::Hash[Symbol, String]])]])
}
def full_spdx_runtime_dependencies(bottling:)
return [] if bottling || @runtime_dependencies.blank?
@runtime_dependencies.compact.filter_map do |dependency|
next unless dependency.present?
bottle_info = get_bottle_info(dependency["bottle"])
next unless bottle_info.present?
# Only set bottle URL if the dependency is the same version as the formula/bottle.
bottle_url = bottle_info["url"] if dependency["pkg_version"] == dependency["formula_pkg_version"]
dependency_json = {
SPDXID: "SPDXRef-Package-SPDXRef-#{dependency["name"].tr("/", "-")}-#{dependency["pkg_version"]}",
name: dependency["name"],
versionInfo: dependency["pkg_version"],
filesAnalyzed: false,
licenseDeclared: assert_value(nil),
licenseConcluded: assert_value(dependency["license"]),
downloadLocation: assert_value(bottle_url),
copyrightText: assert_value(nil),
checksums: [
{
algorithm: "SHA256",
checksumValue: assert_value(bottle_info["sha256"]),
},
],
externalRefs: [
{
referenceCategory: "PACKAGE-MANAGER",
referenceLocator: "pkg:brew/#{dependency["full_name"]}@#{dependency["pkg_version"]}",
referenceType: "purl",
},
],
}
dependency_json
end
end
sig { params(bottling: T::Boolean).returns(T::Hash[Symbol, T.anything]) }
def to_spdx_sbom(bottling:)
runtime_full = full_spdx_runtime_dependencies(bottling:)
compiler_info = {
"SPDXRef-Compiler" => {
SPDXID: "SPDXRef-Compiler",
name: compiler.to_s,
versionInfo: assert_value(built_on["xcode"]),
filesAnalyzed: false,
licenseDeclared: assert_value(nil),
licenseConcluded: assert_value(nil),
copyrightText: assert_value(nil),
downloadLocation: assert_value(nil),
checksums: [],
externalRefs: [],
},
}
if stdlib.present?
compiler_info["SPDXRef-Stdlib"] = {
SPDXID: "SPDXRef-Stdlib",
name: stdlib.to_s,
versionInfo: stdlib.to_s,
filesAnalyzed: false,
licenseDeclared: assert_value(nil),
licenseConcluded: assert_value(nil),
copyrightText: assert_value(nil),
downloadLocation: assert_value(nil),
checksums: [],
externalRefs: [],
}
end
# Improve reproducibility when bottling.
if bottling
created = source_modified_time.iso8601
creators = ["Tool: https://github.com/Homebrew/brew"]
else
created = Time.at(time).utc.iso8601
creators = ["Tool: https://github.com/Homebrew/brew@#{homebrew_version}"]
end
packages = generate_packages_json(runtime_full, compiler_info, bottling:)
{
SPDXID: "SPDXRef-DOCUMENT",
spdxVersion: "SPDX-2.3",
name: "SBOM-SPDX-#{name}-#{spec_version}",
creationInfo: { created:, creators: },
dataLicense: "CC0-1.0",
documentNamespace: "https://formulae.brew.sh/spdx/#{name}-#{spec_version}.json",
documentDescribes: packages.map { |dependency| dependency[:SPDXID] },
files: [],
packages:,
relationships: generate_relations_json(runtime_full, compiler_info, bottling:),
}
end
sig { params(base: T.nilable(T::Hash[String, T.untyped])).returns(T.nilable(T::Hash[String, String])) }
def get_bottle_info(base)
return unless base.present?
files = base["files"].presence
return unless files
files[Utils::Bottles.tag.to_sym] || files[:all]
end
sig { returns(Symbol) }
def compiler
@compiler.presence&.to_sym || DevelopmentTools.default_compiler
end
sig { returns(T.nilable(Tap)) }
def tap
tap_name = source.tap_name
Tap.fetch(tap_name) if tap_name
end
sig { returns(Symbol) }
def spec_symbol
source.spec
end
sig { returns(T.nilable(Version)) }
def spec_version
source.version
end
sig { returns(Time) }
def source_modified_time
Time.at(@source_modified_time).utc
end
sig { params(val: T.untyped).returns(T.any(String, Symbol)) }
def assert_value(val)
return :NOASSERTION.to_s unless val.present?
val
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/cleaner.rb | Library/Homebrew/cleaner.rb | # typed: strict
# frozen_string_literal: true
require "utils/output"
# Cleans a newly installed keg.
# By default:
#
# * removes `.la` files
# * removes `.tbd` files
# * removes `perllocal.pod` files
# * removes `.packlist` files
# * removes empty directories
# * sets permissions on executables
# * removes unresolved symlinks
class Cleaner
include Context
include Utils::Output::Mixin
# Create a cleaner for the given formula.
sig { params(formula: Formula).void }
def initialize(formula)
@formula = formula
end
# Clean the keg of the formula.
sig { void }
def clean
ObserverPathnameExtension.reset_counts!
# Many formulae include `lib/charset.alias`, but it is not strictly needed
# and will conflict if more than one formula provides it.
observe_file_removal @formula.lib/"charset.alias"
[@formula.bin, @formula.sbin, @formula.lib].each { |dir| clean_dir(dir) if dir.exist? }
# Get rid of any info `dir` files, so they don't conflict at the link stage.
#
# The `dir` files come in at least 3 locations:
#
# 1. `info/dir`
# 2. `info/#{name}/dir`
# 3. `info/#{arch}/dir`
#
# Of these 3 only `info/#{name}/dir` is safe to keep since the rest will
# conflict with other formulae because they use a shared location.
#
# See
# [cleaner: recursively delete info `dir`s][1],
# [emacs 28.1 bottle does not contain `dir` file][2] and
# [Keep `info/#{f.name}/dir` files in cleaner][3]
# for more info.
#
# [1]: https://github.com/Homebrew/brew/pull/11597
# [2]: https://github.com/Homebrew/homebrew-core/issues/100190
# [3]: https://github.com/Homebrew/brew/pull/13215
@formula.info.glob("**/dir").each do |info_dir_file|
next unless info_dir_file.file?
next if info_dir_file == @formula.info/@formula.name/"dir"
next if @formula.skip_clean?(info_dir_file)
observe_file_removal info_dir_file
end
rewrite_shebangs
clean_python_metadata
prune
end
private
sig { params(path: Pathname).void }
def observe_file_removal(path)
path.extend(ObserverPathnameExtension).unlink if path.exist?
end
# Removes any empty directories in the formula's prefix subtree
# Keeps any empty directories protected by skip_clean
# Removes any unresolved symlinks
sig { void }
def prune
dirs = []
symlinks = []
@formula.prefix.find do |path|
if path == @formula.libexec || @formula.skip_clean?(path)
Find.prune
elsif path.symlink?
symlinks << path
elsif path.directory?
dirs << path
end
end
# Remove directories opposite from traversal, so that a subtree with no
# actual files gets removed correctly.
dirs.reverse_each do |d|
if d.children.empty?
puts "rmdir: #{d} (empty)" if verbose?
d.rmdir
end
end
# Remove unresolved symlinks
symlinks.reverse_each do |s|
s.unlink unless s.resolved_path_exists?
end
end
sig { params(path: Pathname).returns(T::Boolean) }
def executable_path?(path)
path.text_executable? || path.executable?
end
# Both these files are completely unnecessary to package and cause
# pointless conflicts with other formulae. They are removed by Debian,
# Arch & MacPorts amongst other packagers as well. The files are
# created as part of installing any Perl module.
PERL_BASENAMES = T.let(Set.new(%w[perllocal.pod .packlist]).freeze, T::Set[String])
private_constant :PERL_BASENAMES
# Clean a top-level (`bin`, `sbin`, `lib`) directory, recursively, by fixing file
# permissions and removing .la files, unless the files (or parent
# directories) are protected by skip_clean.
#
# `bin` and `sbin` should not have any subdirectories; if either do that is
# caught as an audit warning.
#
# `lib` may have a large directory tree (see Erlang for instance) and
# clean_dir applies cleaning rules to the entire tree.
sig { params(directory: Pathname).void }
def clean_dir(directory)
directory.find do |path|
path.extend(ObserverPathnameExtension)
Find.prune if @formula.skip_clean? path
next if path.directory?
if path.extname == ".la" || path.extname == ".tbd" || PERL_BASENAMES.include?(path.basename.to_s)
path.unlink
elsif path.symlink?
# Skip it.
else
# Set permissions for executables and non-executables.
perms = if executable_path?(path)
0555
else
0444
end
if debug?
old_perms = path.stat.mode & 0777
odebug "Fixing #{path} permissions from #{old_perms.to_s(8)} to #{perms.to_s(8)}" if perms != old_perms
end
path.chmod perms
end
end
end
sig { void }
def rewrite_shebangs
require "language/node"
require "language/perl"
require "utils/shebang"
rewrites = [Language::Node::Shebang.method(:detected_node_shebang),
Language::Perl::Shebang.method(:detected_perl_shebang)].filter_map do |detector|
detector.call(@formula)
rescue ShebangDetectionError
nil
end
return if rewrites.empty?
basepath = @formula.prefix.realpath
basepath.find do |path|
Find.prune if @formula.skip_clean? path
next if path.directory? || path.symlink?
rewrites.each { |rw| Utils::Shebang.rewrite_shebang rw, path }
end
end
# Remove non-reproducible pip direct_url.json which records the /tmp build directory.
# Remove RECORD files to prevent changes to the installed Python package.
# Modify INSTALLER to provide information that files are managed by brew.
#
# @see https://packaging.python.org/en/latest/specifications/recording-installed-packages/
sig { void }
def clean_python_metadata
basepath = @formula.prefix.realpath
basepath.find do |path|
Find.prune if @formula.skip_clean?(path)
next if path.directory? || path.symlink?
next if path.parent.extname != ".dist-info"
case path.basename.to_s
when "direct_url.json", "RECORD"
observe_file_removal path
when "INSTALLER"
odebug "Modifying #{path} contents from #{path.read.chomp} to brew"
path.atomic_write("brew\n")
end
end
end
end
require "extend/os/cleaner"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/on_system.rb | Library/Homebrew/on_system.rb | # typed: strict
# frozen_string_literal: true
require "simulate_system"
module OnSystem
ARCH_OPTIONS = [:intel, :arm].freeze
BASE_OS_OPTIONS = [:macos, :linux].freeze
ALL_OS_OPTIONS = T.let([*MacOSVersion::SYMBOLS.keys, :linux].freeze, T::Array[Symbol])
ALL_OS_ARCH_COMBINATIONS = T.let(ALL_OS_OPTIONS.product(ARCH_OPTIONS).freeze, T::Array[[Symbol, Symbol]])
VALID_OS_ARCH_TAGS = T.let(ALL_OS_ARCH_COMBINATIONS.filter_map do |os, arch|
tag = Utils::Bottles::Tag.new(system: os, arch:)
next unless tag.valid_combination?
tag
end.freeze, T::Array[Utils::Bottles::Tag])
sig { params(arch: Symbol).returns(T::Boolean) }
def self.arch_condition_met?(arch)
raise ArgumentError, "Invalid arch condition: #{arch.inspect}" if ARCH_OPTIONS.exclude?(arch)
arch == Homebrew::SimulateSystem.current_arch
end
sig { params(os_name: Symbol, or_condition: T.nilable(Symbol)).returns(T::Boolean) }
def self.os_condition_met?(os_name, or_condition = nil)
return Homebrew::SimulateSystem.send(:"simulating_or_running_on_#{os_name}?") if BASE_OS_OPTIONS.include?(os_name)
raise ArgumentError, "Invalid OS condition: #{os_name.inspect}" unless MacOSVersion::SYMBOLS.key?(os_name)
if or_condition.present? && [:or_newer, :or_older].exclude?(or_condition)
raise ArgumentError, "Invalid OS `or_*` condition: #{or_condition.inspect}"
end
return false if Homebrew::SimulateSystem.simulating_or_running_on_linux?
base_os = MacOSVersion.from_symbol(os_name)
current_os = if Homebrew::SimulateSystem.current_os == :macos
# Assume the oldest macOS version when simulating a generic macOS version
# Version::NULL is always treated as less than any other version.
Version::NULL
else
MacOSVersion.from_symbol(Homebrew::SimulateSystem.current_os)
end
return current_os >= base_os if or_condition == :or_newer
return current_os <= base_os if or_condition == :or_older
current_os == base_os
end
sig { params(method_name: Symbol).returns(Symbol) }
def self.condition_from_method_name(method_name)
method_name.to_s.sub(/^on_/, "").to_sym
end
sig { params(base: T::Class[T.anything]).void }
def self.setup_arch_methods(base)
ARCH_OPTIONS.each do |arch|
base.define_method(:"on_#{arch}") do |&block|
@on_system_blocks_exist = T.let(true, T.nilable(TrueClass))
return unless OnSystem.arch_condition_met? OnSystem.condition_from_method_name(T.must(__method__))
@called_in_on_system_block = true
result = block.call
@called_in_on_system_block = false
result
end
end
base.define_method(:on_arch_conditional) do |arm: nil, intel: nil|
@on_system_blocks_exist = T.let(true, T.nilable(TrueClass))
if OnSystem.arch_condition_met? :arm
arm
elsif OnSystem.arch_condition_met? :intel
intel
end
end
end
sig { params(base: T::Class[T.anything]).void }
def self.setup_base_os_methods(base)
BASE_OS_OPTIONS.each do |base_os|
base.define_method(:"on_#{base_os}") do |&block|
@on_system_blocks_exist = T.let(true, T.nilable(TrueClass))
return unless OnSystem.os_condition_met? OnSystem.condition_from_method_name(T.must(__method__))
@called_in_on_system_block = true
result = block.call
@called_in_on_system_block = false
result
end
end
base.define_method(:on_system) do |linux, macos:, &block|
@on_system_blocks_exist = T.let(true, T.nilable(TrueClass))
raise ArgumentError, "The first argument to `on_system` must be `:linux`" if linux != :linux
os_version, or_condition = if macos.to_s.include?("_or_")
macos.to_s.split(/_(?=or_)/).map(&:to_sym)
else
[macos.to_sym, nil]
end
return if !OnSystem.os_condition_met?(os_version, or_condition) && !OnSystem.os_condition_met?(:linux)
@called_in_on_system_block = true
result = block.call
@called_in_on_system_block = false
result
end
base.define_method(:on_system_conditional) do |macos: nil, linux: nil|
@on_system_blocks_exist = T.let(true, T.nilable(TrueClass))
if OnSystem.os_condition_met?(:macos) && macos.present?
macos
elsif OnSystem.os_condition_met?(:linux) && linux.present?
linux
end
end
end
sig { params(base: T::Class[T.anything]).void }
def self.setup_macos_methods(base)
MacOSVersion::SYMBOLS.each_key do |os_name|
base.define_method(:"on_#{os_name}") do |or_condition = nil, &block|
@on_system_blocks_exist = T.let(true, T.nilable(TrueClass))
os_condition = OnSystem.condition_from_method_name T.must(__method__)
return unless OnSystem.os_condition_met? os_condition, or_condition
@on_system_block_min_os = T.let(
if or_condition == :or_older
@called_in_on_system_block ? @on_system_block_min_os : MacOSVersion.new(HOMEBREW_MACOS_OLDEST_ALLOWED)
else
MacOSVersion.from_symbol(os_condition)
end,
T.nilable(MacOSVersion),
)
@called_in_on_system_block = T.let(true, T.nilable(T::Boolean))
result = block.call
@called_in_on_system_block = false
result
end
end
end
sig { params(_base: T::Class[T.anything]).void }
def self.included(_base)
raise "Do not include `OnSystem` directly. Instead, include `OnSystem::MacOSAndLinux` or `OnSystem::MacOSOnly`"
end
module MacOSAndLinux
sig { params(base: T::Class[T.anything]).void }
def self.included(base)
OnSystem.setup_arch_methods(base)
OnSystem.setup_base_os_methods(base)
OnSystem.setup_macos_methods(base)
end
end
module MacOSOnly
sig { params(base: T::Class[T.anything]).void }
def self.included(base)
OnSystem.setup_arch_methods(base)
OnSystem.setup_macos_methods(base)
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/formula_assertions.rb | Library/Homebrew/formula_assertions.rb | # typed: strict
# frozen_string_literal: true
require "utils/output"
module Homebrew
# Helper functions available in formula `test` blocks.
module Assertions
include Context
include ::Utils::Output::Mixin
extend T::Helpers
requires_ancestor { Kernel }
require "minitest"
require "minitest/assertions"
include ::Minitest::Assertions
sig { params(assertions: Integer).returns(Integer) }
attr_writer :assertions
sig { returns(Integer) }
def assertions
@assertions ||= T.let(0, T.nilable(Integer))
end
sig { params(exp: Object, act: Object, msg: T.nilable(String)).returns(TrueClass) }
def assert_equal(exp, act, msg = nil)
# odeprecated "assert_equal(nil, ...)", "assert_nil(...)"
exp.nil? ? assert_nil(act, msg) : super
end
# Returns the output of running cmd and asserts the exit status.
#
# @api public
sig { params(cmd: T.any(Pathname, String), result: Integer).returns(String) }
def shell_output(cmd, result = 0)
ohai cmd.to_s
assert_path_exists cmd, "Pathname '#{cmd}' does not exist!" if cmd.is_a?(Pathname)
output = `#{cmd}`
assert_equal result, $CHILD_STATUS.exitstatus
output
rescue Minitest::Assertion
puts output if verbose?
raise
end
# Returns the output of running the cmd with the optional input and
# optionally asserts the exit status.
#
# @api public
sig { params(cmd: T.any(String, Pathname), input: T.nilable(String), result: T.nilable(Integer)).returns(String) }
def pipe_output(cmd, input = nil, result = nil)
ohai cmd.to_s
assert_path_exists cmd, "Pathname '#{cmd}' does not exist!" if cmd.is_a?(Pathname)
output = IO.popen(cmd, "w+") do |pipe|
pipe.write(input) unless input.nil?
pipe.close_write
pipe.read
end
assert_equal result, $CHILD_STATUS.exitstatus unless result.nil?
output
rescue Minitest::Assertion
puts output if verbose?
raise
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/tab.rb | Library/Homebrew/tab.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "cxxstdlib"
require "options"
require "json"
require "development_tools"
require "cachable"
# Rather than calling `new` directly, use one of the class methods like {Tab.create}.
class AbstractTab
extend Cachable
extend T::Helpers
abstract!
FILENAME = "INSTALL_RECEIPT.json"
# Check whether the formula or cask was installed as a dependency.
#
# @api internal
sig { returns(T::Boolean) }
attr_accessor :installed_as_dependency
# Check whether the formula or cask was installed on request.
#
# @api internal
sig { returns(T::Boolean) }
attr_accessor :installed_on_request
sig { returns(T.nilable(String)) }
attr_accessor :homebrew_version
attr_accessor :tabfile, :loaded_from_api, :time, :arch, :source, :built_on
# Returns the formula or cask runtime dependencies.
#
# @api internal
attr_accessor :runtime_dependencies
# TODO: Update attributes to only accept symbol keys (kwargs style).
sig { params(attributes: T.any(T::Hash[String, T.untyped], T::Hash[Symbol, T.untyped])).void }
def initialize(attributes = {})
@installed_as_dependency = T.let(false, T::Boolean)
@installed_on_request = T.let(false, T::Boolean)
@installed_as_dependency_present = T.let(false, T::Boolean)
@installed_on_request_present = T.let(false, T::Boolean)
@homebrew_version = T.let(nil, T.nilable(String))
@tabfile = T.let(nil, T.nilable(Pathname))
@loaded_from_api = T.let(nil, T.nilable(T::Boolean))
@time = T.let(nil, T.nilable(Integer))
@arch = T.let(nil, T.nilable(String))
@source = T.let(nil, T.nilable(T::Hash[String, T.untyped]))
@built_on = T.let(nil, T.nilable(T::Hash[String, T.untyped]))
@runtime_dependencies = T.let(nil, T.nilable(T::Array[T.untyped]))
attributes.each do |key, value|
case key.to_sym
when :installed_as_dependency
@installed_as_dependency = value.nil? ? false : value
@installed_as_dependency_present = true
when :installed_on_request
@installed_on_request = value.nil? ? false : value
@installed_on_request_present = true
when :changed_files
@changed_files = value&.map { |f| Pathname(f) }
else
instance_variable_set(:"@#{key}", value)
end
end
end
# Instantiates a {Tab} for a new installation of a formula or cask.
sig { params(formula_or_cask: T.any(Formula, Cask::Cask)).returns(T.attached_class) }
def self.create(formula_or_cask)
attributes = {
"homebrew_version" => HOMEBREW_VERSION,
"installed_as_dependency" => false,
"installed_on_request" => false,
"loaded_from_api" => formula_or_cask.loaded_from_api?,
"time" => Time.now.to_i,
"arch" => Hardware::CPU.arch,
"source" => {
"tap" => formula_or_cask.tap&.name,
"tap_git_head" => formula_or_cask.tap_git_head,
},
"built_on" => DevelopmentTools.build_system_info,
}
new(attributes)
end
# Returns the {Tab} for a formula or cask install receipt at `path`.
#
# NOTE: Results are cached.
sig { params(path: T.any(Pathname, String)).returns(T.attached_class) }
def self.from_file(path)
cache.fetch(path) do |p|
content = File.read(p)
return empty if content.blank?
cache[p] = from_file_content(content, p)
end
end
# Like {from_file}, but bypass the cache.
sig { params(content: String, path: T.any(Pathname, String)).returns(T.attached_class) }
def self.from_file_content(content, path)
attributes = begin
JSON.parse(content)
rescue JSON::ParserError => e
raise e, "Cannot parse #{path}: #{e}", e.backtrace
end
attributes["tabfile"] = path
new(attributes)
end
sig { returns(T.attached_class) }
def self.empty
attributes = {
"homebrew_version" => HOMEBREW_VERSION,
"installed_as_dependency" => false,
"installed_on_request" => false,
"loaded_from_api" => false,
"time" => nil,
"runtime_dependencies" => nil,
"arch" => nil,
"source" => {
"path" => nil,
"tap" => nil,
"tap_git_head" => nil,
},
"built_on" => DevelopmentTools.build_system_info,
}
new(attributes)
end
def self.formula_to_dep_hash(formula, declared_deps)
{
"full_name" => formula.full_name,
"version" => formula.version.to_s,
"revision" => formula.revision,
"bottle_rebuild" => formula.bottle&.rebuild,
"pkg_version" => formula.pkg_version.to_s,
"declared_directly" => declared_deps.include?(formula.full_name),
"compatibility_version" => formula.compatibility_version,
}.compact
end
private_class_method :formula_to_dep_hash
sig { returns(Version) }
def parsed_homebrew_version
homebrew_version = self.homebrew_version
return Version::NULL if homebrew_version.nil?
Version.new(homebrew_version)
end
sig { returns(T.nilable(Tap)) }
def tap
tap_name = source["tap"]
Tap.fetch(tap_name) if tap_name
end
sig { params(tap: T.nilable(T.any(Tap, String))).void }
def tap=(tap)
tap_name = tap.is_a?(Tap) ? tap.name : tap
source["tap"] = tap_name
end
sig { void }
def write
self.class.cache[tabfile] = self
tabfile.atomic_write(to_json)
end
end
class Tab < AbstractTab
# Check whether the formula was poured from a bottle.
#
# @api internal
attr_accessor :poured_from_bottle
attr_accessor :built_as_bottle, :stdlib, :aliases
attr_writer :used_options, :unused_options, :compiler, :source_modified_time
attr_reader :tapped_from
sig { returns(T.nilable(T::Array[Pathname])) }
attr_accessor :changed_files
sig { params(attributes: T.any(T::Hash[String, T.untyped], T::Hash[Symbol, T.untyped])).void }
def initialize(attributes = {})
@poured_from_bottle = T.let(nil, T.nilable(T::Boolean))
@built_as_bottle = T.let(nil, T.nilable(T::Boolean))
@changed_files = nil
@stdlib = T.let(nil, T.nilable(String))
@aliases = T.let(nil, T.nilable(T::Array[String]))
@used_options = T.let(nil, T.nilable(T::Array[String]))
@unused_options = T.let(nil, T.nilable(T::Array[String]))
@compiler = T.let(nil, T.nilable(String))
@source_modified_time = T.let(nil, T.nilable(Integer))
@tapped_from = T.let(nil, T.nilable(String))
super
end
# Instantiates a {Tab} for a new installation of a formula.
sig {
override.params(formula_or_cask: T.any(Formula, Cask::Cask), compiler: T.any(Symbol, String),
stdlib: T.nilable(T.any(String, Symbol))).returns(T.attached_class)
}
def self.create(formula_or_cask, compiler = DevelopmentTools.default_compiler, stdlib = nil)
formula = T.cast(formula_or_cask, Formula)
tab = super(formula)
build = formula.build
runtime_deps = formula.runtime_dependencies(undeclared: false)
tab.used_options = build.used_options.as_flags
tab.unused_options = build.unused_options.as_flags
tab.tabfile = formula.prefix/FILENAME
tab.built_as_bottle = build.bottle?
tab.poured_from_bottle = false
tab.source_modified_time = formula.source_modified_time.to_i
tab.compiler = compiler
tab.stdlib = stdlib
tab.aliases = formula.aliases
tab.runtime_dependencies = Tab.runtime_deps_hash(formula, runtime_deps)
tab.source["spec"] = formula.active_spec_sym.to_s
tab.source["path"] = formula.specified_path.to_s
tab.source["versions"] = {
"stable" => formula.stable&.version&.to_s,
"head" => formula.head&.version&.to_s,
"version_scheme" => formula.version_scheme,
"compatibility_version" => formula.compatibility_version,
}
tab
end
# Like {from_file}, but bypass the cache.
sig { params(content: String, path: T.any(Pathname, String)).returns(T.attached_class) }
def self.from_file_content(content, path)
tab = super
tab.source ||= {}
tab.tap = tab.tapped_from if !tab.tapped_from.nil? && tab.tapped_from != "path or URL"
tab.tap = "homebrew/core" if ["mxcl/master", "Homebrew/homebrew"].include?(tab.tap)
if tab.source["spec"].nil?
version = PkgVersion.parse(File.basename(File.dirname(path)))
tab.source["spec"] = if version.head?
"head"
else
"stable"
end
end
tab.source["versions"] ||= empty_source_versions
# Tabs created with Homebrew 1.5.13 through 4.0.17 inclusive created empty string versions in some cases.
["stable", "head"].each do |spec|
tab.source["versions"][spec] = tab.source["versions"][spec].presence
end
tab
end
# Get the {Tab} for the given {Keg},
# or a fake one if the formula is not installed.
#
# @api internal
sig { params(keg: T.any(Keg, Pathname)).returns(T.attached_class) }
def self.for_keg(keg)
path = keg/FILENAME
tab = if path.exist?
from_file(path)
else
empty
end
tab.tabfile = path
tab
end
# Returns a {Tab} for the named formula's installation,
# or a fake one if the formula is not installed.
sig { params(name: String).returns(T.attached_class) }
def self.for_name(name)
rack = HOMEBREW_CELLAR/name
if (keg = Keg.from_rack(rack))
for_keg(keg)
else
for_formula(Formulary.from_rack(rack, keg:))
end
end
def self.remap_deprecated_options(deprecated_options, options)
deprecated_options.each do |deprecated_option|
option = options.find { |o| o.name == deprecated_option.old }
next unless option
options -= [option]
options << Option.new(deprecated_option.current, option.description)
end
options
end
# Returns a {Tab} for an already installed formula,
# or a fake one if the formula is not installed.
sig { params(formula: Formula).returns(T.attached_class) }
def self.for_formula(formula)
paths = []
paths << formula.opt_prefix.resolved_path if formula.opt_prefix.symlink? && formula.opt_prefix.directory?
paths << formula.linked_keg.resolved_path if formula.linked_keg.symlink? && formula.linked_keg.directory?
if (dirs = formula.installed_prefixes).length == 1
paths << dirs.first
end
paths << formula.latest_installed_prefix
path = paths.map { |pathname| pathname/FILENAME }.find(&:file?)
if path
tab = from_file(path)
used_options = remap_deprecated_options(formula.deprecated_options, tab.used_options)
tab.used_options = used_options.as_flags
else
# Formula is not installed. Return a fake tab.
tab = empty
tab.unused_options = formula.options.as_flags
tab.source = {
"path" => formula.specified_path.to_s,
"tap" => formula.tap&.name,
"tap_git_head" => formula.tap_git_head,
"spec" => formula.active_spec_sym.to_s,
"versions" => {
"stable" => formula.stable&.version&.to_s,
"head" => formula.head&.version&.to_s,
"version_scheme" => formula.version_scheme,
},
}
end
tab
end
sig { returns(T.attached_class) }
def self.empty
tab = super
tab.used_options = []
tab.unused_options = []
tab.built_as_bottle = false
tab.poured_from_bottle = false
tab.source_modified_time = 0
tab.stdlib = nil
tab.compiler = DevelopmentTools.default_compiler
tab.aliases = []
tab.source["spec"] = "stable"
tab.source["versions"] = empty_source_versions
tab
end
sig { returns(T::Hash[String, T.untyped]) }
def self.empty_source_versions
{
"stable" => nil,
"head" => nil,
"version_scheme" => 0,
"compatibility_version" => nil,
}
end
private_class_method :empty_source_versions
def self.runtime_deps_hash(formula, deps)
deps.map do |dep|
formula_to_dep_hash(dep.to_formula, formula.deps.map(&:name))
end
end
sig { returns(T::Boolean) }
def any_args_or_options?
!used_options.empty? || !unused_options.empty?
end
def with?(val)
option_names = val.respond_to?(:option_names) ? val.option_names : [val]
option_names.any? do |name|
include?("with-#{name}") || unused_options.include?("without-#{name}")
end
end
def without?(val)
!with?(val)
end
sig { params(opt: String).returns(T::Boolean) }
def include?(opt)
used_options.include? opt
end
sig { returns(T::Boolean) }
def head?
spec == :head
end
sig { returns(T::Boolean) }
def stable?
spec == :stable
end
# The options used to install the formula.
#
# @api internal
sig { returns(Options) }
def used_options
Options.create(@used_options)
end
sig { returns(Options) }
def unused_options
Options.create(@unused_options)
end
sig { returns(T.any(String, Symbol)) }
def compiler
@compiler || DevelopmentTools.default_compiler
end
def runtime_dependencies
# Homebrew versions prior to 1.1.6 generated incorrect runtime dependency
# lists.
@runtime_dependencies if parsed_homebrew_version >= "1.1.6"
end
sig { returns(CxxStdlib) }
def cxxstdlib
# Older tabs won't have these values, so provide sensible defaults
lib = stdlib.to_sym if stdlib
CxxStdlib.create(lib, compiler.to_sym)
end
sig { returns(T::Boolean) }
def built_bottle?
built_as_bottle && !poured_from_bottle
end
sig { returns(T::Boolean) }
def bottle?
built_as_bottle
end
sig { returns(Symbol) }
def spec
source["spec"].to_sym
end
sig { returns(T::Hash[String, T.untyped]) }
def versions
source["versions"]
end
sig { returns(T.nilable(Version)) }
def stable_version
versions["stable"]&.then { Version.new(it) }
end
sig { returns(T.nilable(Version)) }
def head_version
versions["head"]&.then { Version.new(it) }
end
sig { returns(Integer) }
def version_scheme
versions["version_scheme"] || 0
end
sig { returns(Time) }
def source_modified_time
Time.at(@source_modified_time || 0)
end
sig { params(options: T.nilable(T::Hash[String, T.untyped])).returns(String) }
def to_json(options = nil)
attributes = {
"homebrew_version" => homebrew_version,
"used_options" => used_options.as_flags,
"unused_options" => unused_options.as_flags,
"built_as_bottle" => built_as_bottle,
"poured_from_bottle" => poured_from_bottle,
"loaded_from_api" => loaded_from_api,
"installed_as_dependency" => installed_as_dependency,
"installed_on_request" => installed_on_request,
"changed_files" => changed_files&.map(&:to_s),
"time" => time,
"source_modified_time" => source_modified_time.to_i,
"stdlib" => stdlib&.to_s,
"compiler" => compiler.to_s,
"aliases" => aliases,
"runtime_dependencies" => runtime_dependencies,
"source" => source,
"arch" => arch,
"built_on" => built_on,
}
attributes.delete("stdlib") if attributes["stdlib"].blank?
JSON.pretty_generate(attributes, options)
end
# A subset of to_json that we care about for bottles.
sig { returns(T::Hash[String, T.untyped]) }
def to_bottle_hash
attributes = {
"homebrew_version" => homebrew_version,
"changed_files" => changed_files&.map(&:to_s),
"source_modified_time" => source_modified_time.to_i,
"stdlib" => stdlib&.to_s,
"compiler" => compiler.to_s,
"runtime_dependencies" => runtime_dependencies,
"arch" => arch,
"built_on" => built_on,
}
attributes.delete("stdlib") if attributes["stdlib"].blank?
attributes
end
sig { void }
def write
# If this is a new installation, the cache of installed formulae
# will no longer be valid.
Formula.clear_cache unless tabfile.exist?
super
end
sig { returns(String) }
def to_s
s = []
s << if poured_from_bottle
"Poured from bottle"
else
"Built from source"
end
s << "using the formulae.brew.sh API" if loaded_from_api
s << Time.at(time).strftime("on %Y-%m-%d at %H:%M:%S") if time
unless used_options.empty?
s << "with:"
s << used_options.to_a.join(" ")
end
s.join(" ")
end
sig { returns(T::Boolean) }
def installed_on_request_present? = @installed_on_request_present
sig { returns(T::Boolean) }
def installed_as_dependency_present? = @installed_as_dependency_present
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/github_packages.rb | Library/Homebrew/github_packages.rb | # typed: strict
# frozen_string_literal: true
require "utils/curl"
require "utils/gzip"
require "utils/output"
require "json"
require "zlib"
require "extend/hash/keys"
require "system_command"
# GitHub Packages client.
class GitHubPackages
include Context
include SystemCommand::Mixin
include Utils::Output::Mixin
URL_DOMAIN = "ghcr.io"
URL_PREFIX = T.let("https://#{URL_DOMAIN}/v2/".freeze, String)
DOCKER_PREFIX = T.let("docker://#{URL_DOMAIN}/".freeze, String)
public_constant :URL_DOMAIN
private_constant :URL_PREFIX
private_constant :DOCKER_PREFIX
URL_REGEX = %r{(?:#{Regexp.escape(URL_PREFIX)}|#{Regexp.escape(DOCKER_PREFIX)})([\w-]+)/([\w-]+)}
# Valid OCI tag characters
# https://github.com/opencontainers/distribution-spec/blob/main/spec.md#workflow-categories
VALID_OCI_TAG_REGEX = /^[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}$/
INVALID_OCI_TAG_CHARS_REGEX = /[^a-zA-Z0-9._-]/
# Translate Homebrew tab.arch to OCI platform.architecture
TAB_ARCH_TO_PLATFORM_ARCHITECTURE = T.let(
{
"arm64" => "arm64",
"x86_64" => "amd64",
}.freeze,
T::Hash[String, String],
)
# Translate Homebrew built_on.os to OCI platform.os
BUILT_ON_OS_TO_PLATFORM_OS = T.let(
{
"Linux" => "linux",
"Macintosh" => "darwin",
}.freeze,
T::Hash[String, String],
)
sig {
params(
bottles_hash: T::Hash[String, T.untyped],
keep_old: T::Boolean,
dry_run: T::Boolean,
warn_on_error: T::Boolean,
).void
}
def upload_bottles(bottles_hash, keep_old:, dry_run:, warn_on_error:)
user = Homebrew::EnvConfig.github_packages_user
token = Homebrew::EnvConfig.github_packages_token
raise UsageError, "HOMEBREW_GITHUB_PACKAGES_USER is unset." if user.blank?
raise UsageError, "HOMEBREW_GITHUB_PACKAGES_TOKEN is unset." if token.blank?
skopeo = ensure_executable!("skopeo", reason: "upload")
require "json_schemer"
load_schemas!
bottles_hash.each do |formula_full_name, bottle_hash|
# First, check that we won't encounter an error in the middle of uploading bottles.
preupload_check(user, token, skopeo, formula_full_name, bottle_hash,
keep_old:, dry_run:, warn_on_error:)
end
# We intentionally iterate over `bottles_hash` twice to
# avoid erroring out in the middle of uploading bottles.
# rubocop:disable Style/CombinableLoops
bottles_hash.each do |formula_full_name, bottle_hash|
# Next, upload the bottles after checking them all.
upload_bottle(user, token, skopeo, formula_full_name, bottle_hash,
keep_old:, dry_run:, warn_on_error:)
end
# rubocop:enable Style/CombinableLoops
end
sig { params(version: Version, rebuild: Integer, bottle_tag: T.nilable(String)).returns(String) }
def self.version_rebuild(version, rebuild, bottle_tag = nil)
bottle_tag = (".#{bottle_tag}" if bottle_tag.present?)
rebuild = if rebuild.positive?
if bottle_tag
".#{rebuild}"
else
"-#{rebuild}"
end
end
"#{version}#{bottle_tag}#{rebuild}"
end
sig { params(repo: String).returns(String) }
def self.repo_without_prefix(repo)
# Remove redundant repository prefix for a shorter name.
repo.delete_prefix("homebrew-")
end
sig { params(org: String, repo: String, prefix: String).returns(String) }
def self.root_url(org, repo, prefix = URL_PREFIX)
# `docker`/`skopeo` insist on lowercase organisation (“repository name”).
org = org.downcase
"#{prefix}#{org}/#{repo_without_prefix(repo)}"
end
sig { params(url: T.nilable(String)).returns(T.nilable(String)) }
def self.root_url_if_match(url)
return if url.blank?
_, org, repo, = *url.to_s.match(URL_REGEX)
return if org.blank? || repo.blank?
root_url(org, repo)
end
sig { params(formula_name: String).returns(String) }
def self.image_formula_name(formula_name)
# Invalid docker name characters:
# - `/` makes sense because we already use it to separate repository/formula.
# - `x` makes sense because we already use it in `Formulary`.
formula_name.tr("@", "/")
.tr("+", "x")
end
sig { params(version_rebuild: String).returns(String) }
def self.image_version_rebuild(version_rebuild)
unless version_rebuild.match?(VALID_OCI_TAG_REGEX)
raise ArgumentError, "GitHub Packages versions must match #{VALID_OCI_TAG_REGEX.source}!"
end
version_rebuild
end
private
IMAGE_CONFIG_SCHEMA_URI = "https://opencontainers.org/schema/image/config"
IMAGE_INDEX_SCHEMA_URI = "https://opencontainers.org/schema/image/index"
IMAGE_LAYOUT_SCHEMA_URI = "https://opencontainers.org/schema/image/layout"
IMAGE_MANIFEST_SCHEMA_URI = "https://opencontainers.org/schema/image/manifest"
GITHUB_PACKAGE_TYPE = "homebrew_bottle"
private_constant :IMAGE_CONFIG_SCHEMA_URI, :IMAGE_INDEX_SCHEMA_URI, :IMAGE_LAYOUT_SCHEMA_URI,
:IMAGE_MANIFEST_SCHEMA_URI, :GITHUB_PACKAGE_TYPE
sig { void }
def load_schemas!
schema_uri("content-descriptor",
"https://opencontainers.org/schema/image/content-descriptor.json")
schema_uri("defs", %w[
https://opencontainers.org/schema/defs.json
https://opencontainers.org/schema/descriptor/defs.json
https://opencontainers.org/schema/image/defs.json
https://opencontainers.org/schema/image/descriptor/defs.json
https://opencontainers.org/schema/image/index/defs.json
https://opencontainers.org/schema/image/manifest/defs.json
])
schema_uri("defs-descriptor", %w[
https://opencontainers.org/schema/descriptor.json
https://opencontainers.org/schema/defs-descriptor.json
https://opencontainers.org/schema/descriptor/defs-descriptor.json
https://opencontainers.org/schema/image/defs-descriptor.json
https://opencontainers.org/schema/image/descriptor/defs-descriptor.json
https://opencontainers.org/schema/image/index/defs-descriptor.json
https://opencontainers.org/schema/image/manifest/defs-descriptor.json
https://opencontainers.org/schema/index/defs-descriptor.json
])
schema_uri("config-schema", IMAGE_CONFIG_SCHEMA_URI)
schema_uri("image-index-schema", IMAGE_INDEX_SCHEMA_URI)
schema_uri("image-layout-schema", IMAGE_LAYOUT_SCHEMA_URI)
schema_uri("image-manifest-schema", IMAGE_MANIFEST_SCHEMA_URI)
end
sig { params(basename: String, uris: T.any(String, T::Array[String])).void }
def schema_uri(basename, uris)
# The current `main` version has an invalid JSON schema.
# Going forward, this should probably be pinned to tags.
# We currently use features newer than the last one (v1.0.2).
url = "https://raw.githubusercontent.com/opencontainers/image-spec/170393e57ed656f7f81c3070bfa8c3346eaa0a5a/schema/#{basename}.json"
out = Utils::Curl.curl_output(url).stdout
json = JSON.parse(out)
@schema_json ||= T.let({}, T.nilable(T::Hash[String, T::Hash[String, T.untyped]]))
Array(uris).each do |uri|
@schema_json[uri] = json
end
end
sig { params(uri: URI::Generic).returns(T.nilable(T::Hash[String, T.untyped])) }
def schema_resolver(uri)
@schema_json&.fetch(uri.to_s.gsub(/#.*/, ""))
end
sig { params(schema_uri: String, json: T::Hash[String, T.untyped]).void }
def validate_schema!(schema_uri, json)
schema = JSONSchemer.schema(@schema_json&.fetch(schema_uri), ref_resolver: method(:schema_resolver))
json = json.deep_stringify_keys
return if schema.valid?(json)
puts
ofail "#{Formatter.url(schema_uri)} JSON schema validation failed!"
oh1 "Errors"
puts schema.validate(json).to_a.inspect
oh1 "JSON"
puts json.inspect
exit 1
end
sig { params(user: String, token: String, skopeo: Pathname, image_uri: String, root: Pathname, dry_run: T::Boolean).void }
def download(user, token, skopeo, image_uri, root, dry_run:)
puts
args = ["copy", "--all", image_uri.to_s, "oci:#{root}"]
if dry_run
puts "#{skopeo} #{args.join(" ")} --src-creds=#{user}:$HOMEBREW_GITHUB_PACKAGES_TOKEN"
else
args << "--src-creds=#{user}:#{token}"
system_command!(skopeo, verbose: true, print_stdout: true, args:)
end
end
sig {
params(
user: String, token: String, skopeo: Pathname, _formula_full_name: String,
bottle_hash: T::Hash[String, T.untyped], keep_old: T::Boolean, dry_run: T::Boolean, warn_on_error: T::Boolean
).returns(
T.nilable([String, String, String, Version, Integer, String, String, String, T::Boolean]),
)
}
def preupload_check(user, token, skopeo, _formula_full_name, bottle_hash, keep_old:, dry_run:, warn_on_error:)
formula_name = bottle_hash["formula"]["name"]
_, org, repo, = *bottle_hash["bottle"]["root_url"].match(URL_REGEX)
repo = "homebrew-#{repo}" unless repo.start_with?("homebrew-")
version = Version.new(bottle_hash["formula"]["pkg_version"])
rebuild = bottle_hash["bottle"]["rebuild"].to_i
version_rebuild = GitHubPackages.version_rebuild(version, rebuild)
image_name = GitHubPackages.image_formula_name(formula_name)
image_tag = GitHubPackages.image_version_rebuild(version_rebuild)
image_uri = "#{GitHubPackages.root_url(org, repo, DOCKER_PREFIX)}/#{image_name}:#{image_tag}"
puts
inspect_args = ["inspect", "--raw", image_uri.to_s]
if dry_run
puts "#{skopeo} #{inspect_args.join(" ")} --creds=#{user}:$HOMEBREW_GITHUB_PACKAGES_TOKEN"
else
inspect_args << "--creds=#{user}:#{token}"
inspect_result = system_command(skopeo, print_stderr: false, args: inspect_args)
# Order here is important.
if !inspect_result.status.success? && !inspect_result.stderr.match?(/(name|manifest) unknown/)
# We got an error and it was not about the tag or package being unknown.
if warn_on_error
opoo "#{image_uri} inspection returned an error, skipping upload!\n#{inspect_result.stderr}"
return
else
odie "#{image_uri} inspection returned an error!\n#{inspect_result.stderr}"
end
elsif keep_old
# If the tag doesn't exist, ignore `--keep-old`.
keep_old = false unless inspect_result.status.success?
# Otherwise, do nothing - the tag already existing is expected behaviour for --keep-old.
elsif inspect_result.status.success?
# The tag already exists and we are not passing `--keep-old`.
if warn_on_error
opoo "#{image_uri} already exists, skipping upload!"
return
else
odie "#{image_uri} already exists!"
end
end
end
[formula_name, org, repo, version, rebuild, version_rebuild, image_name, image_uri, keep_old]
end
sig {
params(
user: String, token: String, skopeo: Pathname, formula_full_name: String,
bottle_hash: T::Hash[String, T.untyped], keep_old: T::Boolean, dry_run: T::Boolean, warn_on_error: T::Boolean
).void
}
def upload_bottle(user, token, skopeo, formula_full_name, bottle_hash, keep_old:, dry_run:, warn_on_error:)
# We run the preupload check twice to prevent TOCTOU bugs.
result = preupload_check(user, token, skopeo, formula_full_name, bottle_hash,
keep_old:, dry_run:, warn_on_error:)
# Skip upload if preupload check returned early.
return if result.nil?
formula_name, org, repo, version, rebuild, version_rebuild, image_name, image_uri, keep_old = *result
root = Pathname("#{formula_name}--#{version_rebuild}")
FileUtils.rm_rf root
root.mkpath
if keep_old
download(user, token, skopeo, image_uri, root, dry_run:)
else
write_image_layout(root)
end
blobs = root/"blobs/sha256"
blobs.mkpath
git_path = bottle_hash["formula"]["tap_git_path"]
git_revision = bottle_hash["formula"]["tap_git_revision"]
source_org_repo = "#{org}/#{repo}"
source = "https://github.com/#{source_org_repo}/blob/#{git_revision.presence || "HEAD"}/#{git_path}"
formula_core_tap = formula_full_name.exclude?("/")
documentation = if formula_core_tap
"https://formulae.brew.sh/formula/#{formula_name}"
elsif (remote = bottle_hash["formula"]["tap_git_remote"]) && remote.start_with?("https://github.com/")
remote
end
license = bottle_hash["formula"]["license"].to_s
created_date = bottle_hash["bottle"]["date"]
if keep_old
index = JSON.parse((root/"index.json").read)
image_index_sha256 = index["manifests"].first["digest"].delete_prefix("sha256:")
image_index = JSON.parse((blobs/image_index_sha256).read)
(blobs/image_index_sha256).unlink
formula_annotations_hash = image_index["annotations"]
manifests = image_index["manifests"]
else
image_license = if license.length <= 256
license
else
# TODO: Consider generating a truncated license when over the limit
require "utils/spdx"
SPDX.license_expression_to_string(:cannot_represent)
end
formula_annotations_hash = {
"com.github.package.type" => GITHUB_PACKAGE_TYPE,
"org.opencontainers.image.created" => created_date,
"org.opencontainers.image.description" => bottle_hash["formula"]["desc"],
"org.opencontainers.image.documentation" => documentation,
"org.opencontainers.image.licenses" => image_license,
"org.opencontainers.image.ref.name" => version_rebuild,
"org.opencontainers.image.revision" => git_revision,
"org.opencontainers.image.source" => source,
"org.opencontainers.image.title" => formula_full_name,
"org.opencontainers.image.url" => bottle_hash["formula"]["homepage"],
"org.opencontainers.image.vendor" => org,
"org.opencontainers.image.version" => version.to_s, # Schema accepts strings for version
}.compact_blank
manifests = []
end
processed_image_refs = Set.new
manifests.each do |manifest|
processed_image_refs << manifest["annotations"]["org.opencontainers.image.ref.name"]
end
manifests += bottle_hash["bottle"]["tags"].map do |bottle_tag, tag_hash|
bottle_tag = Utils::Bottles::Tag.from_symbol(bottle_tag.to_sym)
tag = GitHubPackages.version_rebuild(version, rebuild, bottle_tag.to_s)
if processed_image_refs.include?(tag)
puts
odie "A bottle JSON for #{bottle_tag} is present, but it is already in the image index!"
else
processed_image_refs << tag
end
local_file = tag_hash["local_filename"]
odebug "Uploading #{local_file}"
tar_gz_sha256 = write_tar_gz(local_file, blobs)
tab = tag_hash["tab"]
architecture = TAB_ARCH_TO_PLATFORM_ARCHITECTURE[tab["arch"].presence || bottle_tag.standardized_arch.to_s]
raise TypeError, "unknown tab['arch']: #{tab["arch"]}" if architecture.blank?
os = if tab["built_on"].present? && tab["built_on"]["os"].present?
BUILT_ON_OS_TO_PLATFORM_OS[tab["built_on"]["os"]]
elsif bottle_tag.linux?
"linux"
else
"darwin"
end
raise TypeError, "unknown tab['built_on']['os']: #{tab["built_on"]["os"]}" if os.blank?
os_version = tab["built_on"]["os_version"].presence if tab["built_on"].present?
case os
when "darwin"
os_version ||= "macOS #{bottle_tag.to_macos_version}"
when "linux"
os_version&.delete_suffix!(" LTS")
os_version ||= OS::LINUX_CI_OS_VERSION
glibc_version = tab["built_on"]["glibc_version"].presence if tab["built_on"].present?
glibc_version ||= OS::LINUX_GLIBC_CI_VERSION
cpu_variant = tab.dig("built_on", "oldest_cpu_family") || Hardware::CPU::INTEL_64BIT_OLDEST_CPU.to_s
end
platform_hash = {
architecture:,
os:,
"os.version" => os_version,
}.compact_blank
tar_sha256 = Digest::SHA256.new
Zlib::GzipReader.open(local_file) do |gz|
while (data = gz.read(Utils::Gzip::GZIP_BUFFER_SIZE))
tar_sha256 << data
end
end
config_json_sha256, config_json_size = write_image_config(platform_hash, tar_sha256.hexdigest, blobs)
documentation = "https://formulae.brew.sh/formula/#{formula_name}" if formula_core_tap
local_file_size = File.size(local_file)
path_exec_files_string = if (path_exec_files = tag_hash["path_exec_files"].presence)
path_exec_files.join(",")
end
descriptor_annotations_hash = {
"org.opencontainers.image.ref.name" => tag,
"sh.brew.bottle.cpu.variant" => cpu_variant,
"sh.brew.bottle.digest" => tar_gz_sha256,
"sh.brew.bottle.glibc.version" => glibc_version,
"sh.brew.bottle.size" => local_file_size.to_s,
"sh.brew.bottle.installed_size" => tag_hash["installed_size"].to_s,
"sh.brew.license" => license,
"sh.brew.tab" => tab.to_json,
"sh.brew.path_exec_files" => path_exec_files_string,
}.compact_blank
# TODO: upload/add tag_hash["all_files"] somewhere.
annotations_hash = formula_annotations_hash.merge(descriptor_annotations_hash).merge(
{
"org.opencontainers.image.created" => created_date,
"org.opencontainers.image.documentation" => documentation,
"org.opencontainers.image.title" => "#{formula_full_name} #{tag}",
},
).compact_blank.sort.to_h
image_manifest = {
schemaVersion: 2,
config: {
mediaType: "application/vnd.oci.image.config.v1+json",
digest: "sha256:#{config_json_sha256}",
size: config_json_size,
},
layers: [{
mediaType: "application/vnd.oci.image.layer.v1.tar+gzip",
digest: "sha256:#{tar_gz_sha256}",
size: File.size(local_file),
annotations: {
"org.opencontainers.image.title" => local_file,
},
}],
annotations: annotations_hash,
}
validate_schema!(IMAGE_MANIFEST_SCHEMA_URI, image_manifest)
manifest_json_sha256, manifest_json_size = write_hash(blobs, image_manifest)
{
mediaType: "application/vnd.oci.image.manifest.v1+json",
digest: "sha256:#{manifest_json_sha256}",
size: manifest_json_size,
platform: platform_hash,
annotations: descriptor_annotations_hash,
}
end
index_json_sha256, index_json_size = write_image_index(manifests, blobs, formula_annotations_hash)
raise "Image index too large!" if index_json_size >= 4 * 1024 * 1024 # GitHub will error 500 if too large
write_index_json(index_json_sha256, index_json_size, root,
"org.opencontainers.image.ref.name" => version_rebuild)
puts
args = ["copy", "--retry-times=3", "--format=oci", "--all", "oci:#{root}", image_uri.to_s]
if dry_run
puts "#{skopeo} #{args.join(" ")} --dest-creds=#{user}:$HOMEBREW_GITHUB_PACKAGES_TOKEN"
else
args << "--dest-creds=#{user}:#{token}"
retry_count = 0
begin
system_command!(skopeo, verbose: true, print_stdout: true, args:)
rescue ErrorDuringExecution
retry_count += 1
odie "Cannot perform an upload to registry after retrying multiple times!" if retry_count >= 10
sleep 2 ** retry_count
retry
end
package_name = "#{GitHubPackages.repo_without_prefix(repo)}/#{image_name}"
ohai "Uploaded to https://github.com/orgs/#{org}/packages/container/package/#{package_name}"
end
end
sig { params(root: Pathname).returns([String, Integer]) }
def write_image_layout(root)
image_layout = { imageLayoutVersion: "1.0.0" }
validate_schema!(IMAGE_LAYOUT_SCHEMA_URI, image_layout)
write_hash(root, image_layout, "oci-layout")
end
sig { params(local_file: String, blobs: Pathname).returns(String) }
def write_tar_gz(local_file, blobs)
tar_gz_sha256 = Digest::SHA256.file(local_file)
.hexdigest
FileUtils.ln local_file, blobs/tar_gz_sha256, force: true
tar_gz_sha256
end
sig { params(platform_hash: T::Hash[String, T.untyped], tar_sha256: String, blobs: Pathname).returns([String, Integer]) }
def write_image_config(platform_hash, tar_sha256, blobs)
image_config = platform_hash.merge({
rootfs: {
type: "layers",
diff_ids: ["sha256:#{tar_sha256}"],
},
})
validate_schema!(IMAGE_CONFIG_SCHEMA_URI, image_config)
write_hash(blobs, image_config)
end
sig { params(manifests: T::Array[T::Hash[String, T.untyped]], blobs: Pathname, annotations: T::Hash[String, String]).returns([String, Integer]) }
def write_image_index(manifests, blobs, annotations)
image_index = {
schemaVersion: 2,
manifests:,
annotations:,
}
validate_schema!(IMAGE_INDEX_SCHEMA_URI, image_index)
write_hash(blobs, image_index)
end
sig { params(index_json_sha256: String, index_json_size: Integer, root: Pathname, annotations: T::Hash[String, String]).void }
def write_index_json(index_json_sha256, index_json_size, root, annotations)
index_json = {
schemaVersion: 2,
manifests: [{
mediaType: "application/vnd.oci.image.index.v1+json",
digest: "sha256:#{index_json_sha256}",
size: index_json_size,
annotations:,
}],
}
validate_schema!(IMAGE_INDEX_SCHEMA_URI, index_json)
write_hash(root, index_json, "index.json")
end
sig { params(directory: Pathname, hash: T::Hash[String, T.untyped], filename: T.nilable(String)).returns([String, Integer]) }
def write_hash(directory, hash, filename = nil)
json = JSON.pretty_generate(hash)
sha256 = Digest::SHA256.hexdigest(json)
filename ||= sha256
path = directory/filename
path.unlink if path.exist?
path.write(json)
[sha256, json.size]
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/patch.rb | Library/Homebrew/patch.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "resource"
require "erb"
require "utils/output"
# Helper module for creating patches.
module Patch
def self.create(strip, src, &block)
case strip
when :DATA
DATAPatch.new(:p1)
when String
StringPatch.new(:p1, strip)
when Symbol
case src
when :DATA
DATAPatch.new(strip)
when String
StringPatch.new(strip, src)
else
ExternalPatch.new(strip, &block)
end
when nil
raise ArgumentError, "nil value for strip"
else
raise ArgumentError, "Unexpected value for strip: #{strip.inspect}"
end
end
end
# An abstract class representing a patch embedded into a formula.
class EmbeddedPatch
include Utils::Output::Mixin
attr_writer :owner
attr_reader :strip
def initialize(strip)
@strip = strip
end
sig { returns(T::Boolean) }
def external?
false
end
def contents; end
def apply
data = contents.gsub("@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX)
if data.gsub!("HOMEBREW_PREFIX", HOMEBREW_PREFIX)
odeprecated "patch with HOMEBREW_PREFIX placeholder",
"patch with @@HOMEBREW_PREFIX@@ placeholder"
end
args = %W[-g 0 -f -#{strip}]
Utils.safe_popen_write("patch", *args) { |p| p.write(data) }
end
sig { returns(String) }
def inspect
"#<#{self.class.name}: #{strip.inspect}>"
end
end
# A patch at the `__END__` of a formula file.
class DATAPatch < EmbeddedPatch
attr_accessor :path
def initialize(strip)
super
@path = nil
end
sig { returns(String) }
def contents
data = +""
path.open("rb") do |f|
loop do
line = f.gets
break if line.nil? || /^__END__$/.match?(line)
end
while (line = f.gets)
data << line
end
end
data.freeze
end
end
# A string containing a patch.
class StringPatch < EmbeddedPatch
def initialize(strip, str)
super(strip)
@str = str
end
def contents
@str
end
end
# A file containing a patch.
class ExternalPatch
include Utils::Output::Mixin
extend Forwardable
attr_reader :resource, :strip
def_delegators :resource,
:url, :fetch, :patch_files, :verify_download_integrity,
:cached_download, :downloaded?, :clear_cache
def initialize(strip, &block)
@strip = strip
@resource = Resource::Patch.new(&block)
end
sig { returns(T::Boolean) }
def external?
true
end
def owner=(owner)
resource.owner = owner
resource.version(resource.checksum&.hexdigest || ERB::Util.url_encode(resource.url))
end
def apply
base_dir = Pathname.pwd
resource.unpack do
patch_dir = Pathname.pwd
if patch_files.empty?
children = patch_dir.children
if children.length != 1 || !children.fetch(0).file?
raise MissingApplyError, <<~EOS
There should be exactly one patch file in the staging directory unless
the "apply" method was used one or more times in the patch-do block.
EOS
end
patch_files << children.fetch(0).basename
end
dir = base_dir
dir /= resource.directory if resource.directory.present?
dir.cd do
patch_files.each do |patch_file|
ohai "Applying #{patch_file}"
patch_file = patch_dir/patch_file
Utils.safe_popen_write("patch", "-g", "0", "-f", "-#{strip}") do |p|
File.foreach(patch_file) do |line|
data = line.gsub("@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX)
p.write(data)
end
end
end
end
end
rescue ErrorDuringExecution => e
onoe e
f = resource.owner.owner
cmd, *args = e.cmd
raise BuildError.new(f, cmd, args, ENV.to_hash)
end
sig { returns(String) }
def inspect
"#<#{self.class.name}: #{strip.inspect} #{url.inspect}>"
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/tap_constants.rb | Library/Homebrew/tap_constants.rb | # typed: strict
# frozen_string_literal: true
# Match a formula name.
HOMEBREW_TAP_FORMULA_NAME_REGEX = T.let(/(?<name>[\w+\-.@]+)/, Regexp)
# Match taps' formulae, e.g. `someuser/sometap/someformula`.
HOMEBREW_TAP_FORMULA_REGEX = T.let(
%r{\A(?<user>[^/]+)/(?<repository>[^/]+)/#{HOMEBREW_TAP_FORMULA_NAME_REGEX.source}\Z},
Regexp,
)
# Match default formula taps' formulae, e.g. `homebrew/core/someformula` or `someformula`.
HOMEBREW_DEFAULT_TAP_FORMULA_REGEX = T.let(
%r{\A(?:[Hh]omebrew/(?:homebrew-)?core/)?(?<name>#{HOMEBREW_TAP_FORMULA_NAME_REGEX.source})\Z},
Regexp,
)
# Match taps' remote repository, e.g. `someuser/somerepo`.
HOMEBREW_TAP_REPOSITORY_REGEX = T.let(
%r{\A.+[/:](?<remote_repository>[^/:]+/[^/:]+?(?=\.git/*\Z|/*\Z))},
Regexp,
)
# Match a cask token.
HOMEBREW_TAP_CASK_TOKEN_REGEX = T.let(/(?<token>[\w+\-.@]+)/, Regexp)
# Match taps' casks, e.g. `someuser/sometap/somecask`.
HOMEBREW_TAP_CASK_REGEX = T.let(
%r{\A(?<user>[^/]+)/(?<repository>[^/]+)/#{HOMEBREW_TAP_CASK_TOKEN_REGEX.source}\Z},
Regexp,
)
# Match default cask taps' casks, e.g. `homebrew/cask/somecask` or `somecask`.
HOMEBREW_DEFAULT_TAP_CASK_REGEX = T.let(
%r{\A(?:[Hh]omebrew/(?:homebrew-)?cask/)?#{HOMEBREW_TAP_CASK_TOKEN_REGEX.source}\Z},
Regexp,
)
# Match taps' directory paths, e.g. `HOMEBREW_LIBRARY/Taps/someuser/sometap`.
HOMEBREW_TAP_DIR_REGEX = T.let(
%r{#{Regexp.escape(HOMEBREW_LIBRARY.to_s)}/Taps/(?<user>[^/]+)/(?<repository>[^/]+)},
Regexp,
)
# Match taps' formula paths, e.g. `HOMEBREW_LIBRARY/Taps/someuser/sometap/someformula`.
HOMEBREW_TAP_PATH_REGEX = T.let(Regexp.new(HOMEBREW_TAP_DIR_REGEX.source + %r{(?:/.*)?\Z}.source).freeze, Regexp)
# Match official cask taps, e.g `homebrew/cask`.
HOMEBREW_CASK_TAP_REGEX = T.let(
%r{(?:([Cc]askroom)/(cask)|([Hh]omebrew)/(?:homebrew-)?(cask|cask-[\w-]+))},
Regexp,
)
# Match official taps' casks, e.g. `homebrew/cask/somecask`.
HOMEBREW_CASK_TAP_CASK_REGEX = T.let(
%r{\A#{HOMEBREW_CASK_TAP_REGEX.source}/#{HOMEBREW_TAP_CASK_TOKEN_REGEX.source}\Z},
Regexp,
)
HOMEBREW_OFFICIAL_REPO_PREFIXES_REGEX = T.let(/\A(home|linux)brew-/, Regexp)
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/keg_only_reason.rb | Library/Homebrew/keg_only_reason.rb | # typed: strict
# frozen_string_literal: true
# Used to annotate formulae that duplicate macOS-provided software
# or cause conflicts when linked in.
class KegOnlyReason
sig { returns(T.any(Symbol, String)) }
attr_reader :reason
sig { params(reason: T.any(Symbol, String), explanation: String).void }
def initialize(reason, explanation)
@reason = reason
@explanation = explanation
end
sig { returns(T::Boolean) }
def versioned_formula?
@reason == :versioned_formula
end
sig { returns(T::Boolean) }
def provided_by_macos?
@reason == :provided_by_macos
end
sig { returns(T::Boolean) }
def shadowed_by_macos?
@reason == :shadowed_by_macos
end
sig { returns(T::Boolean) }
def by_macos?
provided_by_macos? || shadowed_by_macos?
end
sig { returns(T::Boolean) }
def applicable?
# macOS reasons aren't applicable on other OSs
# (see extend/os/mac/keg_only_reason for override on macOS)
!by_macos?
end
sig { returns(String) }
def to_s
return @explanation unless @explanation.empty?
if versioned_formula?
<<~EOS
this is an alternate version of another formula
EOS
elsif provided_by_macos?
<<~EOS
macOS already provides this software and installing another version in
parallel can cause all kinds of trouble
EOS
elsif shadowed_by_macos?
<<~EOS
macOS provides similar software and installing this software in
parallel can cause all kinds of trouble
EOS
else
@reason.to_s
end.strip
end
sig { returns(T::Hash[String, String]) }
def to_hash
reason_string = if @reason.is_a?(Symbol)
@reason.inspect
else
@reason.to_s
end
{
"reason" => reason_string,
"explanation" => @explanation,
}
end
end
require "extend/os/keg_only_reason"
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/download_strategy.rb | Library/Homebrew/download_strategy.rb | # typed: strict
# frozen_string_literal: true
require "json"
require "time"
require "unpack_strategy"
require "lazy_object"
require "lock_file"
require "system_command"
require "utils/output"
# Need to define this before requiring Mechanize to avoid:
# uninitialized constant Mechanize
# rubocop:disable Lint/EmptyClass
class Mechanize; end
require "vendor/gems/mechanize/lib/mechanize/http/content_disposition_parser"
# rubocop:enable Lint/EmptyClass
require "utils/curl"
require "utils/github"
require "utils/timer"
require "github_packages"
# @abstract Abstract superclass for all download strategies.
class AbstractDownloadStrategy
extend T::Helpers
include FileUtils
include Context
include SystemCommand::Mixin
include Utils::Output::Mixin
abstract!
# The download URL.
#
# @api public
sig { returns(String) }
attr_reader :url
sig { returns(Pathname) }
attr_reader :cache
sig { returns(T::Hash[Symbol, T.untyped]) }
attr_reader :meta
sig { returns(String) }
attr_reader :name
sig { returns(T.nilable(T.any(String, Version))) }
attr_reader :version
private :meta, :name, :version
sig { params(url: String, name: String, version: T.nilable(T.any(String, Version)), meta: T.untyped).void }
def initialize(url, name, version, **meta)
@cached_location = T.let(nil, T.nilable(Pathname))
@ref_type = T.let(nil, T.nilable(Symbol))
@ref = T.let(nil, T.untyped)
@url = url
@name = name
@version = version
@cache = T.let(meta.fetch(:cache, HOMEBREW_CACHE), Pathname)
@meta = T.let(meta, T::Hash[Symbol, T.untyped])
@quiet = T.let(false, T.nilable(T::Boolean))
end
# Download and cache the resource at {#cached_location}.
#
# @api public
sig { overridable.params(timeout: T.nilable(T.any(Float, Integer))).void }
def fetch(timeout: nil); end
# Total bytes downloaded if available.
sig { overridable.returns(T.nilable(Integer)) }
def fetched_size; end
# Total download size if available.
sig { overridable.returns(T.nilable(Integer)) }
def total_size; end
# Location of the cached download.
#
# @api public
sig { abstract.returns(Pathname) }
def cached_location; end
# Disable any output during downloading.
#
# @api public
sig { void }
def quiet!
@quiet = T.let(true, T.nilable(T::Boolean))
end
sig { returns(T::Boolean) }
def quiet?
Context.current.quiet? || @quiet || false
end
# Unpack {#cached_location} into the current working directory.
#
# Additionally, if a block is given, the working directory was previously empty
# and a single directory is extracted from the archive, the block will be called
# with the working directory changed to that directory. Otherwise this method
# will return, or the block will be called, without changing the current working
# directory.
#
# @api public
sig { overridable.params(block: T.untyped).void }
def stage(&block)
UnpackStrategy.detect(cached_location,
prioritize_extension: true,
ref_type: @ref_type, ref: @ref)
.extract_nestedly(basename:,
prioritize_extension: true,
verbose: verbose? && !quiet?)
chdir(&block) if block
end
sig { params(block: T.untyped).void }
def chdir(&block)
entries = Dir["*"]
raise "Empty archive" if entries.empty?
if entries.length != 1
yield
return
end
if File.directory? entries.fetch(0)
Dir.chdir(entries.fetch(0), &block)
else
yield
end
end
private :chdir
# Returns the most recent modified time for all files in the current working directory after stage.
#
# @api public
sig { overridable.returns(Time) }
def source_modified_time
Pathname.pwd.to_enum(:find).select(&:file?).map(&:mtime).max
end
# Remove {#cached_location} and any other files associated with the resource
# from the cache.
#
# @api public
sig { overridable.void }
def clear_cache
rm_rf(cached_location)
end
sig { returns(Pathname) }
def basename
cached_location.basename
end
sig { override.params(title: T.any(String, Exception), sput: T.anything).void }
def ohai(title, *sput)
super unless quiet?
end
private
sig { params(args: T.anything).void }
def puts(*args)
super unless quiet?
end
sig { params(args: String, options: T.untyped).returns(SystemCommand::Result) }
def silent_command(*args, **options)
system_command(*args, print_stderr: false, env:, **options)
end
sig { params(args: String, options: T.untyped).returns(SystemCommand::Result) }
def command!(*args, **options)
system_command!(
*args,
env: env.merge(options.fetch(:env, {})),
**command_output_options,
**options,
)
end
sig { returns(T::Hash[Symbol, T::Boolean]) }
def command_output_options
{
print_stdout: !quiet?,
print_stderr: !quiet?,
verbose: verbose? && !quiet?,
}
end
sig { overridable.returns(T::Hash[String, String]) }
def env
{}
end
end
# @abstract Abstract superclass for all download strategies downloading from a version control system.
class VCSDownloadStrategy < AbstractDownloadStrategy
abstract!
sig { override.returns(Pathname) }
attr_reader :cached_location
REF_TYPES = [:tag, :branch, :revisions, :revision].freeze
sig { params(url: String, name: String, version: T.nilable(T.any(String, Version)), meta: T.untyped).void }
def initialize(url, name, version, **meta)
super
extracted_ref = extract_ref(meta)
@ref_type = T.let(extracted_ref.fetch(0), T.nilable(Symbol))
@ref = T.let(extracted_ref.fetch(1), T.untyped)
@revision = T.let(meta[:revision], T.nilable(String))
@cached_location = T.let(@cache/Utils.safe_filename("#{name}--#{cache_tag}"), Pathname)
end
# Download and cache the repository at {#cached_location}.
#
# @api public
sig { override.params(timeout: T.nilable(T.any(Float, Integer))).void }
def fetch(timeout: nil)
end_time = Time.now + timeout if timeout
ohai "Cloning #{url}"
if cached_location.exist? && repo_valid?
puts "Updating #{cached_location}"
update(timeout: end_time)
elsif cached_location.exist?
puts "Removing invalid repository from cache"
clear_cache
clone_repo(timeout: end_time)
else
clone_repo(timeout: end_time)
end
v = version
v.update_commit(last_commit) if v.is_a?(Version) && head?
return if @ref_type != :tag || @revision.blank? || current_revision.blank? || current_revision == @revision
raise <<~EOS
#{@ref} tag should be #{@revision}
but is actually #{current_revision}
EOS
end
sig { returns(String) }
def fetch_last_commit
fetch
last_commit
end
sig { overridable.params(commit: T.nilable(String)).returns(T::Boolean) }
def commit_outdated?(commit)
@last_commit ||= T.let(fetch_last_commit, T.nilable(String))
commit != @last_commit
end
sig { returns(T::Boolean) }
def head?
v = version
v.is_a?(Version) ? v.head? : false
end
# Return the most recent modified timestamp.
#
# @api public
sig { overridable.returns(String) }
def last_commit
source_modified_time.to_i.to_s
end
private
sig { abstract.returns(String) }
def cache_tag; end
sig { abstract.returns(T::Boolean) }
def repo_valid?; end
sig { abstract.params(timeout: T.nilable(Time)).void }
def clone_repo(timeout: nil); end
sig { abstract.params(timeout: T.nilable(Time)).void }
def update(timeout: nil); end
sig { overridable.returns(T.nilable(String)) }
def current_revision; end
sig { params(specs: T::Hash[T.nilable(Symbol), T.untyped]).returns([T.nilable(Symbol), T.untyped]) }
def extract_ref(specs)
key = REF_TYPES.find { |type| specs.key?(type) }
[key, specs[key]]
end
end
# @abstract Abstract superclass for all download strategies downloading a single file.
class AbstractFileDownloadStrategy < AbstractDownloadStrategy
abstract!
# Path for storing an incomplete download while the download is still in progress.
#
# @api public
sig { returns(Pathname) }
def temporary_path
@temporary_path ||= T.let(Pathname.new("#{cached_location}.incomplete"), T.nilable(Pathname))
end
# Path of the symlink (whose name includes the resource name, version and extension)
# pointing to {#cached_location}.
#
# @api public
sig { returns(Pathname) }
def symlink_location
return T.must(@symlink_location) if defined?(@symlink_location)
ext = Pathname(parse_basename(url)).extname
@symlink_location = T.let(@cache/Utils.safe_filename("#{name}--#{version}#{ext}"), T.nilable(Pathname))
T.must(@symlink_location)
end
# Path for storing the completed download.
#
# @api public
sig { override.returns(Pathname) }
def cached_location
return @cached_location if @cached_location
url_sha256 = Digest::SHA256.hexdigest(url)
downloads = Pathname.glob(HOMEBREW_CACHE/"downloads/#{url_sha256}--*")
.reject { |path| path.extname.end_with?(".incomplete") }
@cached_location = T.let(
if downloads.one?
downloads.fetch(0)
else
HOMEBREW_CACHE/"downloads/#{url_sha256}--#{Utils.safe_filename(resolved_basename)}"
end, T.nilable(Pathname)
)
T.must(@cached_location)
end
sig { override.returns(T.nilable(Integer)) }
def fetched_size
File.size?(temporary_path) || File.size?(cached_location)
end
sig { returns(Pathname) }
def basename
cached_location.basename.sub(/^[\da-f]{64}--/, "")
end
private
sig { returns(String) }
def resolved_url
resolved_url, = resolved_url_and_basename
resolved_url
end
sig { returns(String) }
def resolved_basename
_, resolved_basename = resolved_url_and_basename
resolved_basename
end
sig { returns([String, String]) }
def resolved_url_and_basename
return T.must(@resolved_url_and_basename) if defined?(@resolved_url_and_basename)
T.must(@resolved_url_and_basename = T.let([url, parse_basename(url)], T.nilable([String, String])))
end
sig { params(url: String, search_query: T::Boolean).returns(String) }
def parse_basename(url, search_query: true)
components = { path: T.let([], T::Array[String]), query: T.let([], T::Array[String]) }
if url.match?(URI::RFC2396_PARSER.make_regexp)
uri = URI(url)
if (uri_query = uri.query.presence)
URI.decode_www_form(uri_query).each do |key, param|
components[:query] << param if search_query
next if key != "response-content-disposition"
query_basename = param[/attachment;\s*filename=(["']?)(.+)\1/i, 2]
return File.basename(query_basename) if query_basename
end
end
if (uri_path = uri.path.presence)
components[:path] = uri_path.split("/").filter_map do |part|
URI::RFC2396_PARSER.unescape(part).presence
end
end
else
components[:path] = [url]
end
# We need a Pathname because we've monkeypatched extname to support double
# extensions (e.g. tar.gz).
# Given a URL like https://example.com/download.php?file=foo-1.0.tar.gz
# the basename we want is "foo-1.0.tar.gz", not "download.php".
[*components[:path], *components[:query]].reverse_each do |path|
path = Pathname(path)
return path.basename.to_s if path.extname.present?
end
filename = components[:path].last
return "" if filename.blank?
File.basename(filename)
end
end
# Strategy for downloading files using `curl`.
#
# @api public
class CurlDownloadStrategy < AbstractFileDownloadStrategy
include Utils::Curl
# url, basename, time, file_size, content_type, is_redirection
URLMetadata = T.type_alias { [String, String, T.nilable(Time), T.nilable(Integer), T.nilable(String), T::Boolean] }
sig { returns(T::Array[String]) }
attr_reader :mirrors
sig { params(url: String, name: String, version: T.nilable(T.any(String, Version)), meta: T.untyped).void }
def initialize(url, name, version, **meta)
@try_partial = T.let(true, T::Boolean)
@mirrors = T.let(meta.fetch(:mirrors, []), T::Array[String])
@file_size = T.let(nil, T.nilable(Integer))
# Merge `:header` with `:headers`.
if (header = meta.delete(:header))
meta[:headers] ||= []
meta[:headers] << header
end
super
end
# Download and cache the file at {#cached_location}.
#
# @api public
sig { override.params(timeout: T.nilable(T.any(Float, Integer))).void }
def fetch(timeout: nil)
end_time = Time.now + timeout if timeout
download_lock = DownloadLock.new(temporary_path)
begin
download_lock.lock
urls = [url, *mirrors]
begin
url = T.must(urls.shift)
if (domain = Homebrew::EnvConfig.artifact_domain)
url = url.sub(%r{^https?://#{GitHubPackages::URL_DOMAIN}/}o, "#{domain.chomp("/")}/")
urls = [] if Homebrew::EnvConfig.artifact_domain_no_fallback?
end
ohai "Downloading #{url}"
cached_location_valid = cached_location.exist?
resolved_url, _, last_modified, @file_size, content_type, is_redirection = begin
resolve_url_basename_time_file_size(url, timeout: Utils::Timer.remaining!(end_time))
rescue ErrorDuringExecution
raise unless cached_location_valid
end
# Authorization is no longer valid after redirects
meta[:headers]&.delete_if { |header| header.start_with?("Authorization") } if is_redirection
# The cached location is no longer fresh if either:
# - Last-Modified value is newer than the file's timestamp
# - Content-Length value is different than the file's size
if cached_location_valid && (!content_type.is_a?(String) || !content_type.start_with?("text/"))
if last_modified && last_modified > cached_location.mtime
ohai "Ignoring #{cached_location}",
"Cached modified time #{cached_location.mtime.iso8601} is before " \
"Last-Modified header: #{last_modified.iso8601}"
cached_location_valid = false
end
if @file_size&.nonzero? && @file_size != cached_location.size
ohai "Ignoring #{cached_location}",
"Cached size #{cached_location.size} differs from " \
"Content-Length header: #{@file_size}"
cached_location_valid = false
end
end
if cached_location_valid
puts "Already downloaded: #{cached_location}"
else
begin
_fetch(url:, resolved_url: T.must(resolved_url), timeout: Utils::Timer.remaining!(end_time))
rescue ErrorDuringExecution
raise CurlDownloadStrategyError, url
end
cached_location.dirname.mkpath
temporary_path.rename(cached_location.to_s)
end
symlink_location.dirname.mkpath
FileUtils.ln_s cached_location.relative_path_from(symlink_location.dirname), symlink_location, force: true
rescue CurlDownloadStrategyError
raise if urls.empty?
puts "Trying a mirror..."
retry
rescue Timeout::Error => e
raise Timeout::Error, "Timed out downloading #{self.url}: #{e}"
end
ensure
download_lock.unlock(unlink: true)
end
end
sig { override.returns(T.nilable(Integer)) }
def total_size
@file_size
end
sig { override.void }
def clear_cache
super
rm_rf(temporary_path)
end
sig { params(timeout: T.nilable(T.any(Float, Integer))).returns([T.nilable(Time), Integer]) }
def resolved_time_file_size(timeout: nil)
_, _, time, file_size, = resolve_url_basename_time_file_size(url, timeout:)
[time, T.must(file_size)]
end
private
sig { params(timeout: T.nilable(T.any(Float, Integer))).returns([String, String]) }
def resolved_url_and_basename(timeout: nil)
resolved_url, basename, = resolve_url_basename_time_file_size(url, timeout: nil)
[resolved_url, basename]
end
sig { overridable.params(url: String, timeout: T.nilable(T.any(Float, Integer))).returns(URLMetadata) }
def resolve_url_basename_time_file_size(url, timeout: nil)
@resolved_info_cache ||= T.let({}, T.nilable(T::Hash[String, URLMetadata]))
return @resolved_info_cache.fetch(url) if @resolved_info_cache.include?(url)
begin
parsed_output = curl_headers(url.to_s, wanted_headers: ["content-disposition"], timeout:)
rescue ErrorDuringExecution
return [url, parse_basename(url), nil, nil, nil, false]
end
parsed_headers = parsed_output.fetch(:responses).map { |r| r.fetch(:headers) }
final_url = curl_response_follow_redirections(parsed_output.fetch(:responses), url)
content_disposition_parser = Mechanize::HTTP::ContentDispositionParser.new
parse_content_disposition = lambda do |line|
next unless (content_disposition = content_disposition_parser.parse(line.sub(/; *$/, ""), true))
filename = nil
if (filename_with_encoding = content_disposition.parameters["filename*"])
encoding, encoded_filename = filename_with_encoding.split("''", 2)
# If the `filename*` has incorrectly added double quotes, e.g.
# content-disposition: attachment; filename="myapp-1.2.3.pkg"; filename*=UTF-8''"myapp-1.2.3.pkg"
# Then the encoded_filename will come back as the empty string, in which case we should fall back to the
# `filename` parameter.
if encoding.present? && encoded_filename.present?
filename = URI.decode_www_form_component(encoded_filename).encode(encoding)
end
end
filename = content_disposition.filename if filename.blank?
next if filename.blank?
# Servers may include '/' in their Content-Disposition filename header. Take only the basename of this, because:
# - Unpacking code assumes this is a single file - not something living in a subdirectory.
# - Directory traversal attacks are possible without limiting this to just the basename.
File.basename(filename)
end
filenames = parsed_headers.flat_map do |headers|
next [] unless (header = headers["content-disposition"])
[*parse_content_disposition.call("Content-Disposition: #{header}")]
end
time = parsed_headers
.flat_map { |headers| [*headers["last-modified"]] }
.filter_map do |t|
t.match?(/^\d+$/) ? Time.at(t.to_i) : Time.parse(t)
rescue ArgumentError # When `Time.parse` gets a badly formatted date.
nil
end
file_size = parsed_headers
.flat_map { |headers| [*headers["content-length"]&.to_i] }
.last
content_type = parsed_headers
.flat_map { |headers| [*headers["content-type"]] }
.last
is_redirection = url != final_url
basename = filenames.last || parse_basename(final_url, search_query: !is_redirection)
@resolved_info_cache[url] = [final_url, basename, time.last, file_size, content_type, is_redirection]
end
sig {
overridable.params(url: String, resolved_url: String, timeout: T.nilable(T.any(Float, Integer)))
.returns(T.nilable(SystemCommand::Result))
}
def _fetch(url:, resolved_url:, timeout:)
ohai "Downloading from #{resolved_url}" if url != resolved_url
if Homebrew::EnvConfig.no_insecure_redirect? &&
url.start_with?("https://") && !resolved_url.start_with?("https://")
$stderr.puts "HTTPS to HTTP redirect detected and `$HOMEBREW_NO_INSECURE_REDIRECT` is set."
raise CurlDownloadStrategyError, url
end
_curl_download resolved_url, temporary_path, timeout
end
sig {
params(resolved_url: String, to: T.any(Pathname, String), timeout: T.nilable(T.any(Float, Integer)))
.returns(T.nilable(SystemCommand::Result))
}
def _curl_download(resolved_url, to, timeout)
curl_download resolved_url, to:, try_partial: @try_partial, timeout:
end
# Curl options to be always passed to curl,
# with raw head calls (`curl --head`) or with actual `fetch`.
sig { returns(T::Array[String]) }
def _curl_args
args = []
args += ["-b", meta.fetch(:cookies).map { |k, v| "#{k}=#{v}" }.join(";")] if meta.key?(:cookies)
args += ["-e", meta.fetch(:referer)] if meta.key?(:referer)
args += ["--user", meta.fetch(:user)] if meta.key?(:user)
args += meta.fetch(:headers, []).flat_map { |h| ["--header", h.strip] }
args
end
sig { returns(T::Hash[Symbol, T.any(String, Symbol)]) }
def _curl_opts
meta.slice(:user_agent)
end
sig { override.params(args: String, options: T.untyped).returns(SystemCommand::Result) }
def curl_output(*args, **options)
super(*_curl_args, *args, **_curl_opts, **options)
end
sig {
override.params(args: String, print_stdout: T.any(T::Boolean, Symbol), options: T.untyped)
.returns(SystemCommand::Result)
}
def curl(*args, print_stdout: true, **options)
options[:connect_timeout] = 15 unless mirrors.empty?
super(*_curl_args, *args, **_curl_opts, **command_output_options, **options)
end
end
# Strategy for downloading a file using Homebrew's `curl`.
#
# @api public
class HomebrewCurlDownloadStrategy < CurlDownloadStrategy
private
sig {
params(resolved_url: String, to: T.any(Pathname, String), timeout: T.nilable(T.any(Float, Integer)))
.returns(T.nilable(SystemCommand::Result))
}
def _curl_download(resolved_url, to, timeout)
raise HomebrewCurlDownloadStrategyError, url unless Formula["curl"].any_version_installed?
curl_download resolved_url, to:, try_partial: @try_partial, timeout:, use_homebrew_curl: true
end
sig { override.params(args: String, options: T.untyped).returns(SystemCommand::Result) }
def curl_output(*args, **options)
raise HomebrewCurlDownloadStrategyError, url unless Formula["curl"].any_version_installed?
options[:use_homebrew_curl] = true
super
end
end
# Strategy for downloading a file from an GitHub Packages URL.
#
# @api public
class CurlGitHubPackagesDownloadStrategy < CurlDownloadStrategy
sig { params(resolved_basename: String).returns(T.nilable(String)) }
attr_writer :resolved_basename
sig { params(url: String, name: String, version: T.nilable(T.any(String, Version)), meta: T.untyped).void }
def initialize(url, name, version, **meta)
meta[:headers] ||= []
# GitHub Packages authorization header.
# HOMEBREW_GITHUB_PACKAGES_AUTH set in brew.sh
# If using a private GHCR mirror with no Authentication set or HOMEBREW_GITHUB_PACKAGES_AUTH is empty
# then do not add the header. In all other cases add it.
if HOMEBREW_GITHUB_PACKAGES_AUTH.presence && (
!Homebrew::EnvConfig.artifact_domain.presence ||
Homebrew::EnvConfig.docker_registry_basic_auth_token.presence ||
Homebrew::EnvConfig.docker_registry_token.presence
)
meta[:headers] << "Authorization: #{HOMEBREW_GITHUB_PACKAGES_AUTH}"
end
super
end
private
sig { override.params(url: String, timeout: T.nilable(T.any(Float, Integer))).returns(URLMetadata) }
def resolve_url_basename_time_file_size(url, timeout: nil)
return super if @resolved_basename.blank?
[url, @resolved_basename, nil, nil, nil, false]
end
end
# Strategy for downloading a file from an Apache Mirror URL.
#
# @api public
class CurlApacheMirrorDownloadStrategy < CurlDownloadStrategy
sig { returns(T::Array[String]) }
def mirrors
combined_mirrors
end
private
sig { returns(T::Array[String]) }
def combined_mirrors
return T.must(@combined_mirrors) if defined?(@combined_mirrors)
backup_mirrors = unless apache_mirrors["in_attic"]
apache_mirrors.fetch("backup", [])
.map { |mirror| "#{mirror}#{apache_mirrors["path_info"]}" }
end
T.must(@combined_mirrors = T.let([*@mirrors, *backup_mirrors], T.nilable(T::Array[String])))
end
sig { override.params(url: String, timeout: T.nilable(T.any(Float, Integer))).returns(URLMetadata) }
def resolve_url_basename_time_file_size(url, timeout: nil)
if url == self.url
preferred = if apache_mirrors["in_attic"]
"https://archive.apache.org/dist/"
else
apache_mirrors["preferred"]
end
super("#{preferred}#{apache_mirrors["path_info"]}", timeout:)
else
super
end
end
sig { returns(T::Hash[String, T.untyped]) }
def apache_mirrors
return T.must(@apache_mirrors) if defined?(@apache_mirrors)
json = curl_output("--silent", "--location", "#{url}&asjson=1").stdout
T.must(@apache_mirrors = T.let(JSON.parse(json), T.nilable(T::Hash[String, T.untyped])))
rescue JSON::ParserError
raise CurlDownloadStrategyError, "Couldn't determine mirror, try again later."
end
end
# Strategy for downloading via an HTTP POST request using `curl`.
# Query parameters on the URL are converted into POST parameters.
#
# @api public
class CurlPostDownloadStrategy < CurlDownloadStrategy
private
sig {
override.params(url: String, resolved_url: String, timeout: T.nilable(T.any(Float, Integer)))
.returns(T.nilable(SystemCommand::Result))
}
def _fetch(url:, resolved_url:, timeout:)
args = if meta.key?(:data)
escape_data = ->(d) { ["-d", URI.encode_www_form([d])] }
[url, *meta[:data].flat_map(&escape_data)]
else
url, query = url.split("?", 2)
query.nil? ? [url, "-X", "POST"] : [url, "-d", query]
end
curl_download(*args, to: temporary_path, try_partial: @try_partial, timeout:)
end
end
# Strategy for downloading archives without automatically extracting them.
# (Useful for downloading `.jar` files.)
#
# @api public
class NoUnzipCurlDownloadStrategy < CurlDownloadStrategy
sig { override.params(_block: T.untyped).void }
def stage(&_block)
UnpackStrategy::Uncompressed.new(cached_location)
.extract(basename:,
verbose: verbose? && !quiet?)
yield if block_given?
end
end
# Strategy for extracting local binary packages.
class LocalBottleDownloadStrategy < AbstractFileDownloadStrategy
# TODO: Call `super` here
# rubocop:disable Lint/MissingSuper
sig { params(path: Pathname).void }
def initialize(path)
@cached_location = T.let(path, Pathname)
end
# rubocop:enable Lint/MissingSuper
sig { override.void }
def clear_cache
# Path is used directly and not cached.
end
end
# Strategy for downloading a Subversion repository.
#
# @api public
class SubversionDownloadStrategy < VCSDownloadStrategy
sig { params(url: String, name: String, version: T.nilable(T.any(String, Version)), meta: T.untyped).void }
def initialize(url, name, version, **meta)
super
@url = @url.sub("svn+http://", "")
end
# Download and cache the repository at {#cached_location}.
#
# @api public
sig { override.params(timeout: T.nilable(T.any(Float, Integer))).void }
def fetch(timeout: nil)
if @url.chomp("/") != repo_url || !silent_command("svn", args: ["switch", @url, cached_location]).success?
clear_cache
end
super
end
# Returns the most recent modified time for all files in the current working directory after stage.
#
# @api public
sig { override.returns(Time) }
def source_modified_time
require "utils/svn"
time = if Version.new(T.must(Utils::Svn.version)) >= Version.new("1.9")
silent_command("svn", args: ["info", "--show-item", "last-changed-date"], chdir: cached_location).stdout
else
silent_command("svn", args: ["info"], chdir: cached_location).stdout[/^Last Changed Date: (.+)$/, 1]
end
Time.parse T.must(time)
end
# Return last commit's unique identifier for the repository.
#
# @api public
sig { override.returns(String) }
def last_commit
silent_command("svn", args: ["info", "--show-item", "revision"], chdir: cached_location).stdout.strip
end
private
sig { returns(T.nilable(String)) }
def repo_url
silent_command("svn", args: ["info"], chdir: cached_location).stdout.strip[/^URL: (.+)$/, 1]
end
sig { params(_block: T.proc.params(arg0: String, arg1: String).void).void }
def externals(&_block)
out = silent_command("svn", args: ["propget", "svn:externals", @url]).stdout
out.chomp.split("\n").each do |line|
name, url = line.split(/\s+/)
yield T.must(name), T.must(url)
end
end
sig {
params(target: Pathname, url: String, revision: T.nilable(String), ignore_externals: T::Boolean,
timeout: T.nilable(Time)).void
}
def fetch_repo(target, url, revision = nil, ignore_externals: false, timeout: nil)
# Use "svn update" when the repository already exists locally.
# This saves on bandwidth and will have a similar effect to verifying the
# cache as it will make any changes to get the right revision.
args = []
args << "--quiet" unless verbose?
if revision
ohai "Checking out #{@ref}"
args << "-r" << revision
end
args << "--ignore-externals" if ignore_externals
require "utils/svn"
args.concat Utils::Svn.invalid_cert_flags if meta[:trust_cert] == true
if target.directory?
command! "svn", args: ["update", *args], chdir: target.to_s, timeout: Utils::Timer.remaining(timeout)
else
command! "svn", args: ["checkout", url, target, *args], timeout: Utils::Timer.remaining(timeout)
end
end
sig { override.returns(String) }
def cache_tag
head? ? "svn-HEAD" : "svn"
end
sig { override.returns(T::Boolean) }
def repo_valid?
(cached_location/".svn").directory?
end
sig { override.params(timeout: T.nilable(Time)).void }
def clone_repo(timeout: nil)
case @ref_type
when :revision
fetch_repo cached_location, @url, @ref, timeout:
when :revisions
# nil is OK for main_revision, as fetch_repo will then get latest
main_revision = @ref[:trunk]
fetch_repo(cached_location, @url, main_revision, ignore_externals: true, timeout:)
externals do |external_name, external_url|
fetch_repo cached_location/external_name, external_url, @ref[external_name], ignore_externals: true,
timeout:
end
else
fetch_repo cached_location, @url, timeout:
end
end
alias update clone_repo
end
# Strategy for downloading a Git repository.
#
# @api public
class GitDownloadStrategy < VCSDownloadStrategy
MINIMUM_COMMIT_HASH_LENGTH = 7
sig { params(url: String, name: String, version: T.nilable(T.any(String, Version)), meta: T.untyped).void }
def initialize(url, name, version, **meta)
# Needs to be before the call to `super`, as the VCSDownloadStrategy's
# constructor calls `cache_tag` and sets the cache path.
@only_path = meta[:only_path]
if @only_path.present?
# "Cone" mode of sparse checkout requires patterns to be directories
@only_path = T.let("/#{@only_path}", String) unless @only_path.start_with?("/")
@only_path = T.let("#{@only_path}/", String) unless @only_path.end_with?("/")
end
super
@ref_type ||= T.let(:branch, T.nilable(Symbol))
@ref ||= T.let("master", T.untyped)
end
# Returns the most recent modified time for all files in the current working directory after stage.
#
# @api public
sig { override.returns(Time) }
def source_modified_time
Time.parse(silent_command("git", args: ["--git-dir", git_dir, "show", "-s", "--format=%cD"]).stdout)
end
# Return last commit's unique identifier for the repository if fetched locally.
#
# @api public
sig { override.returns(String) }
def last_commit
args = ["--git-dir", git_dir, "rev-parse", "--short=#{MINIMUM_COMMIT_HASH_LENGTH}", "HEAD"]
@last_commit ||= silent_command("git", args:).stdout.chomp.presence
@last_commit || ""
end
private
sig { override.returns(String) }
def cache_tag
if partial_clone_sparse_checkout?
"git-sparse"
else
"git"
end
end
sig { returns(Integer) }
def cache_version
0
end
sig { override.params(timeout: T.nilable(Time)).void }
def update(timeout: nil)
config_repo
update_repo(timeout:)
checkout(timeout:)
reset
update_submodules(timeout:) if submodules?
end
sig { returns(T::Boolean) }
def shallow_dir?
(git_dir/"shallow").exist?
end
sig { returns(Pathname) }
def git_dir
cached_location/".git"
end
sig { returns(T::Boolean) }
def ref?
silent_command("git",
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | true |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/api_hashable.rb | Library/Homebrew/api_hashable.rb | # typed: strict
# frozen_string_literal: true
# Used to substitute common paths with generic placeholders when generating JSON for the API.
module APIHashable
sig { void }
def generating_hash!
return if generating_hash?
# Apply monkeypatches for API generation
@old_homebrew_prefix = T.let(HOMEBREW_PREFIX, T.nilable(Pathname))
@old_homebrew_cellar = T.let(HOMEBREW_CELLAR, T.nilable(Pathname))
@old_home = T.let(Dir.home, T.nilable(String))
@old_git_config_global = T.let(ENV.fetch("GIT_CONFIG_GLOBAL", nil), T.nilable(String))
Object.send(:remove_const, :HOMEBREW_PREFIX)
Object.const_set(:HOMEBREW_PREFIX, Pathname.new(HOMEBREW_PREFIX_PLACEHOLDER))
ENV["HOME"] = HOMEBREW_HOME_PLACEHOLDER
ENV["GIT_CONFIG_GLOBAL"] = File.join(@old_home, ".gitconfig")
@generating_hash = T.let(true, T.nilable(T::Boolean))
end
sig { void }
def generated_hash!
return unless generating_hash?
# Revert monkeypatches for API generation
Object.send(:remove_const, :HOMEBREW_PREFIX)
Object.const_set(:HOMEBREW_PREFIX, @old_homebrew_prefix)
ENV["HOME"] = @old_home
ENV["GIT_CONFIG_GLOBAL"] = @old_git_config_global
@generating_hash = false
end
sig { returns(T::Boolean) }
def generating_hash?
@generating_hash ||= false
@generating_hash == true
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/search.rb | Library/Homebrew/search.rb | # typed: true # rubocop:todo Sorbet/StrictSigil
# frozen_string_literal: true
require "description_cache_store"
require "utils/output"
module Homebrew
# Helper module for searching formulae or casks.
module Search
extend Utils::Output::Mixin
def self.query_regexp(query)
if (m = query.match(%r{^/(.*)/$}))
Regexp.new(m[1])
else
query
end
rescue RegexpError
raise "#{query} is not a valid regex."
end
def self.search_descriptions(string_or_regex, args, search_type: :desc)
both = !args.formula? && !args.cask?
eval_all = args.eval_all? || Homebrew::EnvConfig.eval_all?
if args.formula? || both
ohai "Formulae"
if eval_all
CacheStoreDatabase.use(:descriptions) do |db|
cache_store = DescriptionCacheStore.new(db)
Descriptions.search(string_or_regex, search_type, cache_store, eval_all).print
end
else
unofficial = Tap.all.sum { |tap| tap.official? ? 0 : tap.formula_files.size }
if unofficial.positive?
opoo "Use `--eval-all` to search #{unofficial} additional " \
"#{Utils.pluralize("formula", unofficial)} in third party taps."
end
descriptions = Homebrew::API::Formula.all_formulae.transform_values { |data| data["desc"] }
Descriptions.search(string_or_regex, search_type, descriptions, eval_all, cache_store_hash: true).print
end
end
return if !args.cask? && !both
puts if both
ohai "Casks"
if eval_all
CacheStoreDatabase.use(:cask_descriptions) do |db|
cache_store = CaskDescriptionCacheStore.new(db)
Descriptions.search(string_or_regex, search_type, cache_store, eval_all).print
end
else
unofficial = Tap.all.sum { |tap| tap.official? ? 0 : tap.cask_files.size }
if unofficial.positive?
opoo "Use `--eval-all` to search #{unofficial} additional " \
"#{Utils.pluralize("cask", unofficial)} in third party taps."
end
descriptions = Homebrew::API::Cask.all_casks.transform_values { |c| [c["name"].join(", "), c["desc"]] }
Descriptions.search(string_or_regex, search_type, descriptions, eval_all, cache_store_hash: true).print
end
end
def self.search_formulae(string_or_regex)
if string_or_regex.is_a?(String) && string_or_regex.match?(HOMEBREW_TAP_FORMULA_REGEX)
return begin
[Formulary.factory(string_or_regex).name]
rescue FormulaUnavailableError
[]
end
end
aliases = Formula.alias_full_names
results = search(Formula.full_names + aliases, string_or_regex).sort
if string_or_regex.is_a?(String)
results |= Formula.fuzzy_search(string_or_regex).map do |n|
Formulary.factory(n).full_name
end
end
results.filter_map do |name|
formula, canonical_full_name = begin
f = Formulary.factory(name)
[f, f.full_name]
rescue
[nil, name]
end
# Ignore aliases from results when the full name was also found
next if aliases.include?(name) && results.include?(canonical_full_name)
if formula&.any_version_installed?
pretty_installed(name)
elsif formula.nil? || formula.valid_platform?
name
end
end
end
def self.search_casks(string_or_regex)
if string_or_regex.is_a?(String) && string_or_regex.match?(HOMEBREW_TAP_CASK_REGEX)
return begin
[Cask::CaskLoader.load(string_or_regex).token]
rescue Cask::CaskUnavailableError
[]
end
end
cask_tokens = Tap.each_with_object([]) do |tap, array|
# We can exclude the core cask tap because `CoreCaskTap#cask_tokens` returns short names by default.
if tap.official? && !tap.core_cask_tap?
tap.cask_tokens.each { |token| array << token.sub(%r{^homebrew/cask.*/}, "") }
else
tap.cask_tokens.each { |token| array << token }
end
end.uniq
results = search(cask_tokens, string_or_regex)
results += DidYouMean::SpellChecker.new(dictionary: cask_tokens)
.correct(string_or_regex)
results.sort.map do |name|
cask = Cask::CaskLoader.load(name)
if cask.installed?
pretty_installed(cask.full_name)
else
cask.full_name
end
end.uniq
end
def self.search_names(string_or_regex, args)
both = !args.formula? && !args.cask?
all_formulae = if args.formula? || both
search_formulae(string_or_regex)
else
[]
end
all_casks = if args.cask? || both
search_casks(string_or_regex)
else
[]
end
[all_formulae, all_casks]
end
def self.search(selectable, string_or_regex, &block)
case string_or_regex
when Regexp
search_regex(selectable, string_or_regex, &block)
else
search_string(selectable, string_or_regex.to_str, &block)
end
end
def self.simplify_string(string)
string.downcase.gsub(/[^a-z\d@+]/i, "")
end
def self.search_regex(selectable, regex)
selectable.select do |*args|
args = yield(*args) if block_given?
args = Array(args).flatten.compact
args.any? { |arg| arg.match?(regex) }
end
end
def self.search_string(selectable, string)
simplified_string = simplify_string(string)
selectable.select do |*args|
args = yield(*args) if block_given?
args = Array(args).flatten.compact
args.any? { |arg| simplify_string(arg).include?(simplified_string) }
end
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/source_location.rb | Library/Homebrew/source_location.rb | # typed: strict
# frozen_string_literal: true
module Homebrew
# A location in source code.
class SourceLocation
sig { returns(Integer) }
attr_reader :line
sig { returns(T.nilable(Integer)) }
attr_reader :column
sig { params(line: Integer, column: T.nilable(Integer)).void }
def initialize(line, column = nil)
@line = line
@column = column
end
sig { returns(String) }
def to_s
"#{line}#{column&.to_s&.prepend(":")}"
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Homebrew/brew | https://github.com/Homebrew/brew/blob/fe0a384e3a04605192726c149570fbe33a8996b0/Library/Homebrew/install_renamed.rb | Library/Homebrew/install_renamed.rb | # typed: strict
# frozen_string_literal: true
# Helper module for installing default files.
module InstallRenamed
sig {
params(src: T.any(String, Pathname), new_basename: String,
_block: T.nilable(T.proc.params(src: Pathname, dst: Pathname).returns(T.nilable(Pathname)))).void
}
def install_p(src, new_basename, &_block)
super do |src, dst|
if src.directory?
dst.install(src.children)
next
else
append_default_if_different(src, dst)
end
end
end
sig {
params(pattern: T.any(Pathname, String, Regexp), replacement: T.any(Pathname, String),
_block: T.nilable(T.proc.params(src: Pathname, dst: Pathname).returns(Pathname))).void
}
def cp_path_sub(pattern, replacement, &_block)
super do |src, dst|
append_default_if_different(src, dst)
end
end
sig { params(other: T.any(String, Pathname)).returns(Pathname) }
def +(other)
super.extend(InstallRenamed)
end
sig { params(other: T.any(String, Pathname)).returns(Pathname) }
def /(other)
super.extend(InstallRenamed)
end
private
sig { params(src: Pathname, dst: Pathname).returns(Pathname) }
def append_default_if_different(src, dst)
if dst.file? && !FileUtils.identical?(src, dst)
Pathname.new("#{dst}.default")
else
dst
end
end
end
| ruby | BSD-2-Clause | fe0a384e3a04605192726c149570fbe33a8996b0 | 2026-01-04T15:37:27.366412Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.