CombinedText stringlengths 4 3.42M |
|---|
# coding: utf-8
require "danger/helpers/comments_helper"
require "danger/request_sources/vsts_api"
module Danger
module RequestSources
class VSTS < RequestSource
include Danger::Helpers::CommentsHelper
attr_accessor :pr_json
def self.env_vars
[
"DANGER_VSTS_API_TOKEN",
"DANGER_VSTS_HOST"
]
end
def self.optional_env_vars
[
"DANGER_VSTS_API_VERSION"
]
end
def initialize(ci_source, environment)
self.ci_source = ci_source
self.environment = environment
@is_vsts_git = environment["BUILD_REPOSITORY_PROVIDER"] == "TfsGit"
project, slug = ci_source.repo_slug.split("/")
@api = VSTSAPI.new(project, slug, ci_source.pull_request_id, environment)
end
def validates_as_ci?
@is_vsts_git
end
def validates_as_api_source?
@api.credentials_given?
end
def scm
@scm ||= GitRepo.new
end
def host
@host ||= @api.host
end
def fetch_details
self.pr_json = @api.fetch_pr_json
end
def setup_danger_branches
base_branch = self.pr_json[:targetRefName].sub("refs/heads/", "")
base_commit = self.pr_json[:lastMergeTargetCommit][:commitId]
head_branch = self.pr_json[:sourceRefName].sub("refs/heads/", "")
head_commit = self.pr_json[:lastMergeSourceCommit][:commitId]
# Next, we want to ensure that we have a version of the current branch at a known location
scm.ensure_commitish_exists_on_branch! base_branch, base_commit
self.scm.exec "branch #{EnvironmentManager.danger_base_branch} #{base_commit}"
# OK, so we want to ensure that we have a known head branch, this will always represent
# the head of the PR ( e.g. the most recent commit that will be merged. )
scm.ensure_commitish_exists_on_branch! head_branch, head_commit
self.scm.exec "branch #{EnvironmentManager.danger_head_branch} #{head_commit}"
end
def organisation
nil
end
def update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
unless @api.supports_comments?
return
end
comment = generate_description(warnings: warnings, errors: errors)
comment += "\n\n"
comment += generate_comment(warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns,
previous_violations: {},
danger_id: danger_id,
template: "vsts")
if new_comment
post_new_comment(comment)
else
update_old_comment(comment, danger_id: danger_id)
end
end
def post_new_comment(comment)
@api.post_comment(comment)
end
def update_old_comment(new_comment, danger_id: "danger")
comment_updated = false
@api.fetch_last_comments.each do |c|
thread_id = c[:id]
comment = c[:comments].first
comment_id = comment[:id]
comment_content = comment[:content].nil? ? "" : comment[:content]
# Skip the comment if it wasn't posted by danger
next unless comment_content.include?("generated_by_#{danger_id}")
# Updated the danger posted comment
@api.update_comment(thread_id, comment_id, new_comment)
comment_updated = true
end
# If no comment was updated, post a new one
post_new_comment(new_comment) unless comment_updated
end
end
end
end
Make rubocop happy
# coding: utf-8
require "danger/helpers/comments_helper"
require "danger/request_sources/vsts_api"
module Danger
module RequestSources
class VSTS < RequestSource
include Danger::Helpers::CommentsHelper
attr_accessor :pr_json
def self.env_vars
[
"DANGER_VSTS_API_TOKEN",
"DANGER_VSTS_HOST"
]
end
def self.optional_env_vars
[
"DANGER_VSTS_API_VERSION"
]
end
def initialize(ci_source, environment)
self.ci_source = ci_source
self.environment = environment
@is_vsts_git = environment["BUILD_REPOSITORY_PROVIDER"] == "TfsGit"
project, slug = ci_source.repo_slug.split("/")
@api = VSTSAPI.new(project, slug, ci_source.pull_request_id, environment)
end
def validates_as_ci?
@is_vsts_git
end
def validates_as_api_source?
@api.credentials_given?
end
def scm
@scm ||= GitRepo.new
end
def host
@host ||= @api.host
end
def fetch_details
self.pr_json = @api.fetch_pr_json
end
def setup_danger_branches
base_branch = self.pr_json[:targetRefName].sub("refs/heads/", "")
base_commit = self.pr_json[:lastMergeTargetCommit][:commitId]
head_branch = self.pr_json[:sourceRefName].sub("refs/heads/", "")
head_commit = self.pr_json[:lastMergeSourceCommit][:commitId]
# Next, we want to ensure that we have a version of the current branch at a known location
scm.ensure_commitish_exists_on_branch! base_branch, base_commit
self.scm.exec "branch #{EnvironmentManager.danger_base_branch} #{base_commit}"
# OK, so we want to ensure that we have a known head branch, this will always represent
# the head of the PR ( e.g. the most recent commit that will be merged. )
scm.ensure_commitish_exists_on_branch! head_branch, head_commit
self.scm.exec "branch #{EnvironmentManager.danger_head_branch} #{head_commit}"
end
def organisation
nil
end
def update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false, remove_previous_comments: false)
unless @api.supports_comments?
return
end
comment = generate_description(warnings: warnings, errors: errors)
comment += "\n\n"
comment += generate_comment(warnings: warnings,
errors: errors,
messages: messages,
markdowns: markdowns,
previous_violations: {},
danger_id: danger_id,
template: "vsts")
if new_comment || remove_previous_comments
post_new_comment(comment)
else
update_old_comment(comment, danger_id: danger_id)
end
end
def post_new_comment(comment)
@api.post_comment(comment)
end
def update_old_comment(new_comment, danger_id: "danger")
comment_updated = false
@api.fetch_last_comments.each do |c|
thread_id = c[:id]
comment = c[:comments].first
comment_id = comment[:id]
comment_content = comment[:content].nil? ? "" : comment[:content]
# Skip the comment if it wasn't posted by danger
next unless comment_content.include?("generated_by_#{danger_id}")
# Updated the danger posted comment
@api.update_comment(thread_id, comment_id, new_comment)
comment_updated = true
end
# If no comment was updated, post a new one
post_new_comment(new_comment) unless comment_updated
end
end
end
end
|
#
# Copyright (C) 2020 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require 'set'
module DataFixup::Auditors
module Migrate
DEFAULT_BATCH_SIZE = 100
module AuditorWorker
def initialize(account_id, date, operation_type: :backfill)
@account_id = account_id
@date = date
@_operation = operation_type
end
def operation
@_operation ||= :backfill
end
def account
@_account ||= Account.find(@account_id)
end
def previous_sunday
date_time - date_time.wday.days
end
def next_sunday
previous_sunday + 7.days
end
def date_time
@_dt ||= CanvasTime.try_parse("#{@date.strftime('%Y-%m-%d')} 00:00:00 -0000")
end
def cassandra_query_options
# auditors cassandra partitions span one week.
# querying a week at a time is more efficient
# than separately for each day.
# this query will usually span 2 partitions
# because the alignment of the partitions is
# to number of second from epoch % seconds_in_week
{
oldest: previous_sunday,
newest: next_sunday,
fetch_strategy: :serial
}
end
# rubocop:disable Lint/NoSleep
def get_cassandra_records_resiliantly(collection, page_args)
retries = 0
max_retries = 10
begin
recs = collection.paginate(page_args)
return recs
rescue CassandraCQL::Thrift::TimedOutException
raise if retries >= max_retries
sleep 1.4 ** retries
retries += 1
retry
end
end
# rubocop:enable Lint/NoSleep
def filter_for_idempotency(ar_attributes_list, auditor_ar_type)
# we might have inserted some of these already, try again with only new recs
uuids = ar_attributes_list.map{|h| h['uuid']}
existing_uuids = auditor_ar_type.where(uuid: uuids).pluck(:uuid)
ar_attributes_list.reject{|h| existing_uuids.include?(h['uuid']) }
end
def bulk_insert_auditor_recs(auditor_ar_type, attrs_lists)
partition_groups = attrs_lists.group_by{|a| auditor_ar_type.infer_partition_table_name(a) }
partition_groups.each do |partition_name, partition_attrs|
uuids = partition_attrs.map{|h| h['uuid']}
Rails.logger.info("INSERTING INTO #{partition_name} #{uuids.size} IDs (#{uuids.join(',')})")
auditor_ar_type.transaction do
auditor_ar_type.connection.bulk_insert(partition_name, partition_attrs)
end
end
end
def migrate_in_pages(collection, auditor_ar_type, batch_size=DEFAULT_BATCH_SIZE)
next_page = 1
until next_page.nil?
page_args = { page: next_page, per_page: batch_size}
auditor_recs = get_cassandra_records_resiliantly(collection, page_args)
ar_attributes_list = auditor_recs.map do |rec|
auditor_ar_type.ar_attributes_from_event_stream(rec)
end
begin
bulk_insert_auditor_recs(auditor_ar_type, ar_attributes_list)
rescue ActiveRecord::RecordNotUnique, ActiveRecord::InvalidForeignKey
# this gets messy if we act specifically; let's just apply both remedies
new_attrs_list = filter_for_idempotency(ar_attributes_list, auditor_ar_type)
new_attrs_list = filter_dead_foreign_keys(new_attrs_list)
bulk_insert_auditor_recs(auditor_ar_type, new_attrs_list) if new_attrs_list.size > 0
end
next_page = auditor_recs.next_page
end
end
# repairing is run after a scheduling pass. In most cases this means some records
# made it over and then the scheduled migration job failed, usually due to
# repeated cassandra timeouts. For this reason, we don't wish to load ALL
# scanned records from cassandra, only those that are not yet in the database.
# therefore "repair" is much more careful. It scans each batch of IDs from the cassandra
# index, sees which ones aren't currently in postgres, and then only loads attributes for
# that subset to insert. This makes it much faster for traversing a large dataset
# when some or most of the records are filled in already. Obviously it would be somewhat
# slower than the migrate pass if there were NO records migrated.
def repair_in_pages(ids_collection, stream_type, auditor_ar_type, batch_size=DEFAULT_BATCH_SIZE)
next_page = 1
until next_page.nil?
page_args = { page: next_page, per_page: batch_size}
auditor_id_recs = get_cassandra_records_resiliantly(ids_collection, page_args)
collect_propsed_ids = auditor_id_recs.map{|rec| rec['id']}
existing_ids = auditor_ar_type.where(uuid: collect_propsed_ids).pluck(:uuid)
insertable_ids = collect_propsed_ids - existing_ids
if insertable_ids.size > 0
auditor_recs = stream_type.fetch(insertable_ids, strategy: :serial)
ar_attributes_list = auditor_recs.map do |rec|
auditor_ar_type.ar_attributes_from_event_stream(rec)
end
begin
bulk_insert_auditor_recs(auditor_ar_type, ar_attributes_list)
rescue ActiveRecord::RecordNotUnique, ActiveRecord::InvalidForeignKey
# this gets messy if we act specifically; let's just apply both remedies
new_attrs_list = filter_for_idempotency(ar_attributes_list, auditor_ar_type)
new_attrs_list = filter_dead_foreign_keys(new_attrs_list)
bulk_insert_auditor_recs(auditor_ar_type, new_attrs_list) if new_attrs_list.size > 0
end
end
next_page = auditor_id_recs.next_page
end
end
def audit_in_pages(ids_collection, auditor_ar_type, batch_size=DEFAULT_BATCH_SIZE)
@audit_results ||= {
'uuid_count' => 0,
'failure_count' => 0,
'missed_ids' => []
}
audit_failure_uuids = []
audit_uuid_count = 0
next_page = 1
until next_page.nil?
page_args = { page: next_page, per_page: batch_size}
auditor_id_recs = get_cassandra_records_resiliantly(ids_collection, page_args)
uuids = auditor_id_recs.map{|rec| rec['id']}
audit_uuid_count += uuids.size
existing_uuids = auditor_ar_type.where(uuid: uuids).pluck(:uuid)
audit_failure_uuids += (uuids - existing_uuids)
next_page = auditor_id_recs.next_page
end
@audit_results['uuid_count'] += audit_uuid_count
@audit_results['failure_count'] += audit_failure_uuids.size
@audit_results['missed_ids'] += audit_failure_uuids
end
def cell_attributes(target_date: nil)
target_date = @date if target_date.nil?
{
auditor_type: auditor_type,
account_id: account.id,
year: target_date.year,
month: target_date.month,
day: target_date.day
}
end
def find_migration_cell(attributes: cell_attributes)
attributes = cell_attributes if attributes.nil?
::Auditors::ActiveRecord::MigrationCell.find_by(attributes)
end
def migration_cell
@_cell ||= find_migration_cell
end
def create_cell!(attributes: nil)
attributes = cell_attributes if attributes.nil?
::Auditors::ActiveRecord::MigrationCell.create!(attributes.merge({ completed: false, repaired: false }))
rescue ActiveRecord::RecordNotUnique, PG::UniqueViolation
created_cell = find_migration_cell(attributes: attributes)
raise "unresolvable auditors migration state #{attributes}" if created_cell.nil?
created_cell
end
def reset_cell!
migration_cell&.destroy
@_cell = nil
end
def mark_cell_queued!(delayed_job_id: nil)
(@_cell = create_cell!) if migration_cell.nil?
migration_cell.update_attribute(:failed, false) if migration_cell.failed
# queueing will take care of a week, so let's make sure we don't get
# other jobs vying for the same slot
current_date = previous_sunday
while current_date < next_sunday do
cur_cell_attrs = cell_attributes(target_date: current_date)
current_cell = find_migration_cell(attributes: cur_cell_attrs)
current_cell = create_cell!(attributes: cur_cell_attrs) if current_cell.nil?
current_cell.update(completed: false, failed: false, job_id: delayed_job_id, queued: true)
current_date += 1.day
end
@_cell.reload
end
def auditors_cassandra_db_lambda
lambda do
timeout_value = Setting.get("auditors_backfill_cassandra_timeout", 360).to_i
opts = { override_options: { 'timeout' => timeout_value } }
Canvas::Cassandra::DatabaseBuilder.from_config(:auditors, opts)
end
end
def already_complete?
migration_cell&.completed
end
def currently_queueable?
return true if migration_cell.nil?
return true if migration_cell.failed
if operation == :repair
return false if migration_cell.repaired
else
return false if migration_cell.completed
end
return true unless migration_cell.queued
# this cell is currently in the queue (maybe)
# If that update happened more than a few
# days ago, it's likely dead, and should
# get rescheduled. Worst case
# it scans and fails to find anything to do,
# and marks the cell complete.
return migration_cell.updated_at < 3.days.ago
end
def mark_week_complete!
current_date = previous_sunday
while current_date < next_sunday do
cur_cell_attrs = cell_attributes(target_date: current_date)
current_cell = find_migration_cell(attributes: cur_cell_attrs)
current_cell = create_cell!(attributes: cur_cell_attrs) if current_cell.nil?
repaired = (operation == :repair)
current_cell.update(completed: true, failed: false, repaired: repaired)
current_date += 1.day
end
end
def mark_week_audited!(results)
current_date = previous_sunday
failed_count = results['failure_count']
while current_date < next_sunday do
cur_cell_attrs = cell_attributes(target_date: current_date)
current_cell = find_migration_cell(attributes: cur_cell_attrs)
current_cell = create_cell!(attributes: cur_cell_attrs) if current_cell.nil?
current_cell.update(audited: true, missing_count: failed_count)
current_date += 1.day
end
end
def perform
extend_cassandra_stream_timeout!
cell = migration_cell
return if cell&.completed
cell = create_cell! if cell.nil?
if account.root_account.created_at > @date + 2.days
# this account wasn't active on this day, don't
# waste time migrating
return cell.update_attribute(:completed, true)
end
if operation == :repair
perform_repair
elsif operation == :backfill
perform_migration
else
raise "Unknown Auditor Backfill Operation: #{operation}"
end
# the reason this works is the rescheduling plan.
# if the job passes, the whole week gets marked "complete".
# If it fails, the target cell for this one job will get rescheduled
# later in the reconciliation pass.
# at that time it will again run for this whole week.
# any failed day results in a job spanning a week.
# If a job for another day in the SAME week runs,
# and this one is done already, it will quickly short circuit because this
# day is marked complete.
# If two jobs from the same week happened to run at the same time,
# they would contend over Uniqueness violations, which we catch and handle.
mark_week_complete!
ensure
cell.update_attribute(:failed, true) unless cell.reload.completed
cell.update_attribute(:queued, false)
clear_cassandra_stream_timeout!
end
def audit
extend_cassandra_stream_timeout!
@audit_results = {
'uuid_count' => 0,
'failure_count' => 0,
'missed_ids' => []
}
perform_audit
mark_week_audited!(@audit_results)
return @audit_results
ensure
clear_cassandra_stream_timeout!
end
def extend_cassandra_stream_timeout!
Canvas::Cassandra::DatabaseBuilder.reset_connections!
@_stream_db_proc = auditor_cassandra_stream.attr_config_values[:database]
auditor_cassandra_stream.database(auditors_cassandra_db_lambda)
end
def clear_cassandra_stream_timeout!
raise RuntimeError("stream db never cached!") unless @_stream_db_proc
Canvas::Cassandra::DatabaseBuilder.reset_connections!
auditor_cassandra_stream.database(@_stream_db_proc)
end
def auditor_cassandra_stream
stream_map = {
authentication: Auditors::Authentication::Stream,
course: Auditors::Course::Stream,
grade_change: Auditors::GradeChange::Stream
}
stream_map[auditor_type]
end
def auditor_type
raise "NOT IMPLEMENTED"
end
def perform_migration
raise "NOT IMPLEMENTED"
end
def perform_repair
raise "NOT IMPLEMENTED"
end
def perform_audit
raise "NOT IMPLEMENTED"
end
def filter_dead_foreign_keys(_attrs_list)
raise "NOT IMPLEMENTED"
end
end
# account = Account.find(account_id)
# date = Date.civil(2020, 4, 21)
# cass_class = Auditors::Authentication
# ar_class = Auditors::ActiveRecord::AuthenticationRecord
# worker = AuthenticationWorker.new(account, date)
# Delayed::Job.enqueue(worker)
class AuthenticationWorker
include AuditorWorker
def auditor_type
:authentication
end
def cassandra_collection
Auditors::Authentication.for_account(account, cassandra_query_options)
end
def cassandra_id_collection
Auditors::Authentication::Stream.ids_for_account(account, cassandra_query_options)
end
def perform_migration
migrate_in_pages(cassandra_collection, Auditors::ActiveRecord::AuthenticationRecord)
end
def perform_repair
repair_in_pages(cassandra_id_collection, Auditors::Authentication::Stream, Auditors::ActiveRecord::AuthenticationRecord)
end
def perform_audit
audit_in_pages(cassandra_id_collection, Auditors::ActiveRecord::AuthenticationRecord)
end
def filter_dead_foreign_keys(attrs_list)
user_ids = attrs_list.map{|a| a['user_id'] }
pseudonym_ids = attrs_list.map{|a| a['pseudonym_id'] }
existing_user_ids = User.where(id: user_ids).pluck(:id)
existing_pseud_ids = Pseudonym.where(id: pseudonym_ids).pluck(:id)
missing_uids = user_ids - existing_user_ids
missing_pids = pseudonym_ids - existing_pseud_ids
new_attrs_list = attrs_list.reject{|h| missing_uids.include?(h['user_id']) }
new_attrs_list.reject{|h| missing_pids.include?(h['pseudonym_id'])}
end
end
class CourseWorker
include AuditorWorker
def auditor_type
:course
end
def cassandra_collection
Auditors::Course.for_account(account, cassandra_query_options)
end
def cassandra_id_collection
Auditors::Course::Stream.ids_for_account(account, cassandra_query_options)
end
def perform_migration
migrate_in_pages(cassandra_collection, Auditors::ActiveRecord::CourseRecord)
end
def perform_repair
repair_in_pages(cassandra_id_collection, Auditors::Course::Stream, Auditors::ActiveRecord::CourseRecord)
end
def perform_audit
audit_in_pages(cassandra_id_collection, Auditors::ActiveRecord::CourseRecord)
end
def filter_dead_foreign_keys(attrs_list)
user_ids = attrs_list.map{|a| a['user_id'] }
existing_user_ids = User.where(id: user_ids).pluck(:id)
missing_uids = user_ids - existing_user_ids
attrs_list.reject {|h| missing_uids.include?(h['user_id']) }
end
end
class GradeChangeWorker
include AuditorWorker
def auditor_type
:grade_change
end
def cassandra_collection_for(course)
Auditors::GradeChange.for_course(course, cassandra_query_options)
end
def cassandra_id_collection_for(course)
Auditors::GradeChange::Stream.ids_for_course(course, cassandra_query_options)
end
def migrateable_course_ids
s_scope = Submission.where("course_id=courses.id").where("updated_at > ?", @date - 7.days)
account.courses.active.where(
"EXISTS (?)", s_scope).where(
"courses.created_at <= ?", @date + 2.days).pluck(:id)
end
def perform_migration
all_course_ids = migrateable_course_ids.to_a
all_course_ids.in_groups_of(1000) do |course_ids|
Course.where(id: course_ids).each do |course|
migrate_in_pages(cassandra_collection_for(course), Auditors::ActiveRecord::GradeChangeRecord)
end
end
end
def perform_repair
all_course_ids = migrateable_course_ids.to_a
all_course_ids.in_groups_of(1000) do |course_ids|
Course.where(id: course_ids).each do |course|
repair_in_pages(cassandra_id_collection_for(course), Auditors::GradeChange::Stream, Auditors::ActiveRecord::GradeChangeRecord)
end
end
end
def perform_audit
all_course_ids = migrateable_course_ids.to_a
all_course_ids.in_groups_of(1000) do |course_ids|
Course.where(id: course_ids).each do |course|
audit_in_pages(cassandra_id_collection_for(course), Auditors::ActiveRecord::GradeChangeRecord)
end
end
end
def filter_dead_foreign_keys(attrs_list)
student_ids = attrs_list.map{|a| a['student_id'] }
grader_ids = attrs_list.map{|a| a['grader_id'] }
user_ids = (student_ids + grader_ids).uniq
existing_user_ids = User.where(id: user_ids).pluck(:id)
missing_uids = user_ids - existing_user_ids
filtered_attrs_list = attrs_list.reject do |h|
missing_uids.include?(h['student_id']) || missing_uids.include?(h['grader_id'])
end
submission_ids = filtered_attrs_list.map{|a| a['submission_id'] }
existing_submission_ids = Submission.where(id: submission_ids).pluck(:id)
missing_sids = submission_ids - existing_submission_ids
filtered_attrs_list.reject {|h| missing_sids.include?(h['submission_id']) }
end
end
# sets up ALL the backfill jobs for the current shard
# given some date range
# remember we START with the most recent becuase
# they're typically most valuable, and walk backwards,
# so start_date should be > end_date.
#
# This job tries to be nice to the db by scheduling a day at a time
# and if the queue is over the set threshold it will schedule itself to
# run again in 5 minutes and see if it can schedule in any more.
# This should keep the queue from growing out of control.
#
# Setup is something like:
# start_date = Date.today
# end_date = start - 10.months
# worker = DataFixup::Auditors::Migrate::BackfillEngine.new(start_date, end_date)
# Delayed::Job.enqueue(worker)
#
# It will take care of re-scheduling itself until that backfill window is covered.
class BackfillEngine
DEFAULT_DEPTH_THRESHOLD = 100000
DEFAULT_SCHEDULING_INTERVAL = 150
# these jobs are all low-priority,
# so high-ish parallelism is ok
# (they mostly run in a few minutes or less).
# we'll wind it down on clusters that are
# in trouble if necessary. For clusters
# taking a long time, grades parallelism
# could actually be increased very substantially overnight
# as they will not try to overwrite each other.
DEFAULT_PARALLELISM_GRADES = 20
DEFAULT_PARALLELISM_COURSES = 10
DEFAULT_PARALLELISM_AUTHS = 5
LOG_PREFIX = "Auditors PG Backfill - ".freeze
SCHEDULAR_TAG = "DataFixup::Auditors::Migrate::BackfillEngine#perform"
WORKER_TAGS = [
"DataFixup::Auditors::Migrate::CourseWorker#perform".freeze,
"DataFixup::Auditors::Migrate::GradeChangeWorker#perform".freeze,
"DataFixup::Auditors::Migrate::AuthenticationWorker#perform".freeze
].freeze
class << self
def non_future_queue
Delayed::Job.where("run_at <= ?", Time.zone.now)
end
def queue_depth
non_future_queue.count
end
def queue_tag_counts
non_future_queue.group(:tag).count
end
def running_tag_counts
non_future_queue.where('locked_by IS NOT NULL').group(:tag).count
end
def backfill_jobs
non_future_queue.where("tag IN ('#{WORKER_TAGS.join("','")}')")
end
def other_jobs
non_future_queue.where("tag NOT IN ('#{WORKER_TAGS.join("','")}')")
end
def schedular_jobs
Delayed::Job.where(tag: SCHEDULAR_TAG)
end
def failed_jobs
Delayed::Job::Failed.where("tag IN ('#{WORKER_TAGS.join("','")}')")
end
def failed_schedulars
Delayed::Job::Failed.where(tag: SCHEDULAR_TAG)
end
def running_jobs
backfill_jobs.where("locked_by IS NOT NULL")
end
def completed_cells
Auditors::ActiveRecord::MigrationCell.where(completed: true)
end
def failed_cells
Auditors::ActiveRecord::MigrationCell.where(failed: true, completed: false)
end
def jobs_id
shard = Shard.current
(shard.respond_to?(:delayed_jobs_shard_id) ? shard.delayed_jobs_shard_id : "NONE")
end
def queue_setting_key
"auditors_backfill_queue_threshold_jobs#{jobs_id}"
end
def backfill_key
"auditors_backfill_interval_seconds_jobs#{jobs_id}"
end
def queue_threshold
Setting.get(queue_setting_key, DEFAULT_DEPTH_THRESHOLD).to_i
end
def backfill_interval
Setting.get(backfill_key, DEFAULT_SCHEDULING_INTERVAL).to_i.seconds
end
def cluster_name
Shard.current.database_server.id
end
def parallelism_key(auditor_type)
"auditors_migration_num_strands"
end
def check_parallelism
{
grade_changes: Setting.get(parallelism_key("grade_changes"), 1),
courses: Setting.get(parallelism_key("courses"), 1),
authentications: Setting.get(parallelism_key("authentications"), 1)
}
end
def longest_running(on_shard: false)
longest_scope = running_jobs
if on_shard
longest_scope = longest_scope.where(shard_id: Shard.current.id)
end
longest = longest_scope.order(:locked_at).first
return {} if longest.blank?
{
id: longest.id,
elapsed_seconds: (Time.now.utc - longest.locked_at),
locked_by: longest.locked_by
}
end
def update_parallelism!(hash)
hash.each do |auditor_type, parallelism_value|
Setting.set(parallelism_key(auditor_type), parallelism_value)
end
p_settings = check_parallelism
gc_tag = 'DataFixup::Auditors::Migrate::GradeChangeWorker#perform'
Delayed::Job.where(tag: gc_tag, locked_by: nil).update_all(max_concurrent: p_settings[:grade_changes])
course_tag = 'DataFixup::Auditors::Migrate::CourseWorker#perform'
Delayed::Job.where(tag: course_tag, locked_by: nil).update_all(max_concurrent: p_settings[:courses])
auth_tag = 'DataFixup::Auditors::Migrate::AuthenticationWorker#perform'
Delayed::Job.where(tag: auth_tag, locked_by: nil).update_all(max_concurrent: p_settings[:authentications])
end
# only set parallelism if it is not currently set at all.
# If it's already been set (either from previous preset or
# by manual action) it will have a > 0 value and this will
# just exit after checking each setting
def preset_parallelism!
if Setting.get(parallelism_key("grade_changes"), -1).to_i < 0
Setting.set(parallelism_key("grade_changes"), DEFAULT_PARALLELISM_GRADES)
end
if Setting.get(parallelism_key("courses"), -1).to_i < 0
Setting.set(parallelism_key("courses"), DEFAULT_PARALLELISM_COURSES)
end
if Setting.get(parallelism_key("authentications"), -1).to_i < 0
Setting.set(parallelism_key("authentications"), DEFAULT_PARALLELISM_AUTHS)
end
end
def working_dates(current_jobs_scope)
current_jobs_scope.pluck(:handler).map{|h| YAML.unsafe_load(h).instance_variable_get(:@date) }.uniq
end
def shard_summary
{
'total_depth': queue_depth,
'backfill': backfill_jobs.where(shard_id: Shard.current.id).count,
'others': other_jobs.where(shard_id: Shard.current.id).count,
'failed': failed_jobs.where(shard_id: Shard.current.id).count,
'currently_running': running_jobs.where(shard_id: Shard.current.id).count,
'completed_cells': completed_cells.count,
'dates_being_worked': working_dates(running_jobs.where(shard_id: Shard.current.id)),
'config': {
'threshold': "#{queue_threshold} jobs",
'interval': "#{backfill_interval} seconds",
'parallelism': check_parallelism
},
'longest_runner': longest_running(on_shard: true),
'schedular_count': schedular_jobs.where(shard_id: Shard.current.id).count,
'schedular_job_ids': schedular_jobs.where(shard_id: Shard.current.id).limit(10).map(&:id)
}
end
def summary
{
'total_depth': queue_depth,
'backfill': backfill_jobs.count,
'others': other_jobs.count,
'failed': failed_jobs.count,
'currently_running': running_jobs.count,
'completed_cells': completed_cells.count,
# it does not work to check these with jobs from other shards
# because deserializing them fails to find accounts
'dates_being_worked': working_dates(running_jobs.where(shard_id: Shard.current.id)),
'config': {
'threshold': "#{queue_threshold} jobs",
'interval': "#{backfill_interval} seconds",
'parallelism': check_parallelism
},
'longest_runner': longest_running,
'schedular_count': schedular_jobs.count,
'schedular_job_ids': schedular_jobs.limit(10).map(&:id)
}
end
def date_summaries(start_date, end_date)
cur_date = start_date
output = {}
while cur_date <= end_date
cells = completed_cells.where(year: cur_date.year, month: cur_date.month, day: cur_date.day)
output[cur_date.to_s] = cells.count
cur_date += 1.day
end
output
end
def scan_for_holes(start_date, end_date)
summaries = date_summaries(start_date, end_date)
max_count = summaries.values.max
{
'max_value': max_count,
'holes': summaries.keep_if{|_,v| v < max_count}
}
end
def log(message)
Rails.logger.info("#{LOG_PREFIX} #{message}")
end
def force_run_schedulars(id)
d_worker = Delayed::Worker.new
sched_job = Delayed::Job.find(id)
sched_job.update(locked_by: 'force_run', locked_at: Time.now.utc)
d_worker.perform(sched_job)
end
def total_reset_frd!
conn = Auditors::ActiveRecord::GradeChangeRecord.connection
conn.execute("set role dba")
conn.truncate(Auditors::ActiveRecord::GradeChangeRecord.table_name)
conn.truncate(Auditors::ActiveRecord::CourseRecord.table_name)
conn.truncate(Auditors::ActiveRecord::AuthenticationRecord.table_name)
conn.truncate(Auditors::ActiveRecord::MigrationCell.table_name)
end
end
def initialize(start_date, end_date, operation_type: :schedule)
if start_date < end_date
raise "You probably didn't read the comment on this job..."
end
@start_date = start_date
@end_date = end_date
@_operation = operation_type
end
def operation
@_operation ||= :schedule
end
def log(message)
self.class.log(message)
end
def queue_threshold
self.class.queue_threshold
end
def backfill_interval
self.class.backfill_interval
end
def queue_depth
self.class.queue_depth
end
def slim_accounts
return @_accounts if @_accounts
root_account_ids = Account.root_accounts.active.pluck(:id)
@_accounts = Account.active.where(
"root_account_id IS NULL OR root_account_id IN (?)", root_account_ids
).select(:id, :root_account_id)
end
def cluster_name
self.class.cluster_name
end
def conditionally_enqueue_worker(worker, n_strand)
if worker.currently_queueable?
job = Delayed::Job.enqueue(worker, n_strand: n_strand, priority: Delayed::LOW_PRIORITY)
worker.mark_cell_queued!(delayed_job_id: job.id)
end
end
def generate_worker(worker_type, account, current_date)
worker_operation = (operation == :repair) ? :repair : :backfill
worker_type.new(account.id, current_date, operation_type: worker_operation)
end
def enqueue_one_day_for_account(account, current_date)
if account.root_account?
# auth records are stored at the root account level,
# we only need to enqueue these jobs for root accounts
auth_worker = generate_worker(AuthenticationWorker, account, current_date)
conditionally_enqueue_worker(auth_worker, "auditors_migration")
end
course_worker = generate_worker(CourseWorker, account, current_date)
conditionally_enqueue_worker(course_worker, "auditors_migration")
grade_change_worker = generate_worker(GradeChangeWorker, account, current_date)
conditionally_enqueue_worker(grade_change_worker, "auditors_migration")
end
def enqueue_one_day(current_date)
slim_accounts.each do |account|
enqueue_one_day_for_account(account, current_date)
end
end
def schedular_strand_tag
"AuditorsBackfillEngine::Job_Shard_#{self.class.jobs_id}"
end
def next_schedule_date(current_date)
# each job spans a week, so when we're scheduling
# the initial job we can schedule one week at a time.
# In a repair pass, we want to make sure we hit every
# cell that failed, or that never got queued (just in case),
# so we actually line up jobs for each day that is
# missing/failed. It's possible to schedule multiple jobs
# for the same week. If one completes before the next one starts,
# they will bail immediately. If two from the same week are running
# at the same time the uniqueness-constraint-and-conflict-handling
# prevents them from creating duplicates
if operation == :schedule
current_date - 7.days
elsif operation == :repair
current_date - 1.day
else
raise "Unknown backfill operation: #{operation}"
end
end
def perform
self.class.preset_parallelism!
log("Scheduling Auditors Backfill!")
current_date = @start_date
while current_date >= @end_date
if queue_depth >= queue_threshold
log("Queue too deep (#{queue_depth}) for threshold (#{queue_threshold}), throttling...")
break
end
enqueue_one_day(current_date)
log("Scheduled Backfill for #{current_date} on #{Shard.current.id}")
# jobs span a week now, we can schedule them at week intervals arbitrarily
current_date = next_schedule_date(current_date)
end
if current_date >= @end_date
schedule_worker = BackfillEngine.new(current_date, @end_date)
next_time = Time.now.utc + backfill_interval
log("More work to do. Scheduling another job for #{next_time}")
Delayed::Job.enqueue(schedule_worker, run_at: next_time, priority: Delayed::LOW_PRIORITY, n_strand: schedular_strand_tag, max_attempts: 5)
else
log("WE DID IT. Shard #{Shard.current.id} has auditors migrated (probably, check the migration cell records to be sure)")
end
end
end
# useful for generating cassandra records in test environments
# to make migration practice more real.
# Probably should never run in production. Ever.
class DataFixtures
# pulled from one day on FFT
# as a sample size
AUTH_VOLUME = 275000
COURSE_VOLUME = 8000
GRADE_CHANGE_VOLUME = 175000
def generate_authentications
puts("generating auth records...")
pseudonyms = Pseudonym.active.limit(2000)
event_count = 0
while event_count < AUTH_VOLUME
event_record = Auditors::Authentication::Record.generate(pseudonyms.sample, 'login')
Auditors::Authentication::Stream.insert(event_record, {backend_strategy: :cassandra})
event_count += 1
puts("...#{event_count}") if event_count % 1000 == 0
end
end
def generate_courses
puts("generating course event records...")
courses = Course.active.limit(1000)
users = User.active.limit(1000)
event_count = 0
while event_count < COURSE_VOLUME
event_record = Auditors::Course::Record.generate(courses.sample, users.sample, 'published', {}, {})
Auditors::Course::Stream.insert(event_record, {backend_strategy: :cassandra}) if Auditors.write_to_cassandra?
event_count += 1
puts("...#{event_count}") if event_count % 1000 == 0
end
end
def generate_grade_changes
puts("generating grade change records...")
assignments = Assignment.active.limit(10000)
event_count = 0
while event_count < GRADE_CHANGE_VOLUME
assignment = assignments.sample
assignment.submissions.each do |sub|
event_record = Auditors::GradeChange::Record.generate(sub, 'graded')
Auditors::GradeChange::Stream.insert(event_record, {backend_strategy: :cassandra}) if Auditors.write_to_cassandra?
event_count += 1
puts("...#{event_count}") if event_count % 1000 == 0
end
end
end
def generate
generate_authentications
generate_courses
generate_grade_changes
end
end
end
end
re-query on missing records
refs CNVS-48876
flag = none
TEST PLAN:
1) run backfill
2) re-queries should happen
3) evidence should appear in logs
Change-Id: Ieb423f527151de08483ac1c330e046e4c886f8d4
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/238837
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
Reviewed-by: Simon Williams <088e16a1019277b15d58faf0541e11910eb756f6@instructure.com>
QA-Review: Simon Williams <088e16a1019277b15d58faf0541e11910eb756f6@instructure.com>
Product-Review: Ethan Vizitei <73eeb0b2f65d05a906adf3b21ee1f9f5a2aa3c1c@instructure.com>
#
# Copyright (C) 2020 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require 'set'
module DataFixup::Auditors
module Migrate
DEFAULT_BATCH_SIZE = 100
DEFAULT_REPAIR_BATCH_SIZE = 1000
module AuditorWorker
def initialize(account_id, date, operation_type: :backfill)
@account_id = account_id
@date = date
@_operation = operation_type
end
def operation
@_operation ||= :backfill
end
def account
@_account ||= Account.find(@account_id)
end
def previous_sunday
date_time - date_time.wday.days
end
def next_sunday
previous_sunday + 7.days
end
def date_time
@_dt ||= CanvasTime.try_parse("#{@date.strftime('%Y-%m-%d')} 00:00:00 -0000")
end
def cassandra_query_options
# auditors cassandra partitions span one week.
# querying a week at a time is more efficient
# than separately for each day.
# this query will usually span 2 partitions
# because the alignment of the partitions is
# to number of second from epoch % seconds_in_week
{
oldest: previous_sunday,
newest: next_sunday,
fetch_strategy: :serial
}
end
# rubocop:disable Lint/NoSleep
def get_cassandra_records_resiliantly(collection, page_args)
retries = 0
max_retries = 10
begin
recs = collection.paginate(page_args)
return recs
rescue CassandraCQL::Thrift::TimedOutException
raise if retries >= max_retries
sleep 1.4 ** retries
retries += 1
retry
end
end
def fetch_attributes_resiliantly(stream_type, ids)
retries = 0
max_retries = 10
begin
recs = stream_type.fetch(ids, strategy: :serial)
if recs.size != ids.size
found_ids = recs.map(&:id)
missing_ids = ids - found_ids
raise RuntimeError, "NOT FOUND: #{missing_ids.join(',')}"
end
return recs
rescue CassandraCQL::Thrift::TimedOutException, RuntimeError
raise if retries >= max_retries
sleep 1.4 ** retries
retries += 1
retry
end
end
# rubocop:enable Lint/NoSleep
def filter_for_idempotency(ar_attributes_list, auditor_ar_type)
# we might have inserted some of these already, try again with only new recs
uuids = ar_attributes_list.map{|h| h['uuid']}
existing_uuids = auditor_ar_type.where(uuid: uuids).pluck(:uuid)
ar_attributes_list.reject{|h| existing_uuids.include?(h['uuid']) }
end
def bulk_insert_auditor_recs(auditor_ar_type, attrs_lists)
partition_groups = attrs_lists.group_by{|a| auditor_ar_type.infer_partition_table_name(a) }
partition_groups.each do |partition_name, partition_attrs|
uuids = partition_attrs.map{|h| h['uuid']}
Rails.logger.info("INSERTING INTO #{partition_name} #{uuids.size} IDs (#{uuids.join(',')})")
auditor_ar_type.transaction do
auditor_ar_type.connection.bulk_insert(partition_name, partition_attrs)
end
end
end
def migrate_in_pages(ids_collection, stream_type, auditor_ar_type, batch_size=DEFAULT_BATCH_SIZE)
next_page = 1
until next_page.nil?
page_args = { page: next_page, per_page: batch_size}
auditor_id_recs = get_cassandra_records_resiliantly(ids_collection, page_args)
collect_propsed_ids = auditor_id_recs.map{|rec| rec['id']}
auditor_recs = fetch_attributes_resiliantly(stream_type, collect_propsed_ids)
ar_attributes_list = auditor_recs.map do |rec|
auditor_ar_type.ar_attributes_from_event_stream(rec)
end
begin
bulk_insert_auditor_recs(auditor_ar_type, ar_attributes_list)
rescue ActiveRecord::RecordNotUnique, ActiveRecord::InvalidForeignKey
# this gets messy if we act specifically; let's just apply both remedies
new_attrs_list = filter_for_idempotency(ar_attributes_list, auditor_ar_type)
new_attrs_list = filter_dead_foreign_keys(new_attrs_list)
bulk_insert_auditor_recs(auditor_ar_type, new_attrs_list) if new_attrs_list.size > 0
end
next_page = auditor_id_recs.next_page
end
end
# repairing is run after a scheduling pass. In most cases this means some records
# made it over and then the scheduled migration job failed, usually due to
# repeated cassandra timeouts. For this reason, we don't wish to load ALL
# scanned records from cassandra, only those that are not yet in the database.
# therefore "repair" is much more careful. It scans each batch of IDs from the cassandra
# index, sees which ones aren't currently in postgres, and then only loads attributes for
# that subset to insert. This makes it much faster for traversing a large dataset
# when some or most of the records are filled in already. Obviously it would be somewhat
# slower than the migrate pass if there were NO records migrated.
def repair_in_pages(ids_collection, stream_type, auditor_ar_type, batch_size=DEFAULT_REPAIR_BATCH_SIZE)
next_page = 1
until next_page.nil?
page_args = { page: next_page, per_page: batch_size}
auditor_id_recs = get_cassandra_records_resiliantly(ids_collection, page_args)
collect_propsed_ids = auditor_id_recs.map{|rec| rec['id']}
existing_ids = auditor_ar_type.where(uuid: collect_propsed_ids).pluck(:uuid)
insertable_ids = collect_propsed_ids - existing_ids
if insertable_ids.size > 0
auditor_recs = fetch_attributes_resiliantly(stream_type, insertable_ids)
ar_attributes_list = auditor_recs.map do |rec|
auditor_ar_type.ar_attributes_from_event_stream(rec)
end
begin
bulk_insert_auditor_recs(auditor_ar_type, ar_attributes_list)
rescue ActiveRecord::RecordNotUnique, ActiveRecord::InvalidForeignKey
# this gets messy if we act specifically; let's just apply both remedies
new_attrs_list = filter_for_idempotency(ar_attributes_list, auditor_ar_type)
new_attrs_list = filter_dead_foreign_keys(new_attrs_list)
bulk_insert_auditor_recs(auditor_ar_type, new_attrs_list) if new_attrs_list.size > 0
end
end
next_page = auditor_id_recs.next_page
end
end
def audit_in_pages(ids_collection, auditor_ar_type, batch_size=DEFAULT_BATCH_SIZE)
@audit_results ||= {
'uuid_count' => 0,
'failure_count' => 0,
'missed_ids' => []
}
audit_failure_uuids = []
audit_uuid_count = 0
next_page = 1
until next_page.nil?
page_args = { page: next_page, per_page: batch_size}
auditor_id_recs = get_cassandra_records_resiliantly(ids_collection, page_args)
uuids = auditor_id_recs.map{|rec| rec['id']}
audit_uuid_count += uuids.size
existing_uuids = auditor_ar_type.where(uuid: uuids).pluck(:uuid)
audit_failure_uuids += (uuids - existing_uuids)
next_page = auditor_id_recs.next_page
end
@audit_results['uuid_count'] += audit_uuid_count
@audit_results['failure_count'] += audit_failure_uuids.size
@audit_results['missed_ids'] += audit_failure_uuids
end
def cell_attributes(target_date: nil)
target_date = @date if target_date.nil?
{
auditor_type: auditor_type,
account_id: account.id,
year: target_date.year,
month: target_date.month,
day: target_date.day
}
end
def find_migration_cell(attributes: cell_attributes)
attributes = cell_attributes if attributes.nil?
::Auditors::ActiveRecord::MigrationCell.find_by(attributes)
end
def migration_cell
@_cell ||= find_migration_cell
end
def create_cell!(attributes: nil)
attributes = cell_attributes if attributes.nil?
::Auditors::ActiveRecord::MigrationCell.create!(attributes.merge({ completed: false, repaired: false }))
rescue ActiveRecord::RecordNotUnique, PG::UniqueViolation
created_cell = find_migration_cell(attributes: attributes)
raise "unresolvable auditors migration state #{attributes}" if created_cell.nil?
created_cell
end
def reset_cell!
migration_cell&.destroy
@_cell = nil
end
def mark_cell_queued!(delayed_job_id: nil)
(@_cell = create_cell!) if migration_cell.nil?
migration_cell.update_attribute(:failed, false) if migration_cell.failed
# queueing will take care of a week, so let's make sure we don't get
# other jobs vying for the same slot
current_date = previous_sunday
while current_date < next_sunday do
cur_cell_attrs = cell_attributes(target_date: current_date)
current_cell = find_migration_cell(attributes: cur_cell_attrs)
current_cell = create_cell!(attributes: cur_cell_attrs) if current_cell.nil?
current_cell.update(completed: false, failed: false, job_id: delayed_job_id, queued: true)
current_date += 1.day
end
@_cell.reload
end
def auditors_cassandra_db_lambda
lambda do
timeout_value = Setting.get("auditors_backfill_cassandra_timeout", 360).to_i
opts = { override_options: { 'timeout' => timeout_value } }
Canvas::Cassandra::DatabaseBuilder.from_config(:auditors, opts)
end
end
def already_complete?
migration_cell&.completed
end
def currently_queueable?
return true if migration_cell.nil?
return true if migration_cell.failed
if operation == :repair
return false if migration_cell.repaired
else
return false if migration_cell.completed
end
return true unless migration_cell.queued
# this cell is currently in the queue (maybe)
# If that update happened more than a few
# days ago, it's likely dead, and should
# get rescheduled. Worst case
# it scans and fails to find anything to do,
# and marks the cell complete.
return migration_cell.updated_at < 3.days.ago
end
def mark_week_complete!
current_date = previous_sunday
while current_date < next_sunday do
cur_cell_attrs = cell_attributes(target_date: current_date)
current_cell = find_migration_cell(attributes: cur_cell_attrs)
current_cell = create_cell!(attributes: cur_cell_attrs) if current_cell.nil?
repaired = (operation == :repair)
current_cell.update(completed: true, failed: false, repaired: repaired)
current_date += 1.day
end
end
def mark_week_audited!(results)
current_date = previous_sunday
failed_count = results['failure_count']
while current_date < next_sunday do
cur_cell_attrs = cell_attributes(target_date: current_date)
current_cell = find_migration_cell(attributes: cur_cell_attrs)
current_cell = create_cell!(attributes: cur_cell_attrs) if current_cell.nil?
current_cell.update(audited: true, missing_count: failed_count)
current_date += 1.day
end
end
def perform
extend_cassandra_stream_timeout!
cell = migration_cell
return if cell&.completed
cell = create_cell! if cell.nil?
if account.root_account.created_at > @date + 2.days
# this account wasn't active on this day, don't
# waste time migrating
return cell.update_attribute(:completed, true)
end
if operation == :repair
perform_repair
elsif operation == :backfill
perform_migration
else
raise "Unknown Auditor Backfill Operation: #{operation}"
end
# the reason this works is the rescheduling plan.
# if the job passes, the whole week gets marked "complete".
# If it fails, the target cell for this one job will get rescheduled
# later in the reconciliation pass.
# at that time it will again run for this whole week.
# any failed day results in a job spanning a week.
# If a job for another day in the SAME week runs,
# and this one is done already, it will quickly short circuit because this
# day is marked complete.
# If two jobs from the same week happened to run at the same time,
# they would contend over Uniqueness violations, which we catch and handle.
mark_week_complete!
ensure
cell.update_attribute(:failed, true) unless cell.reload.completed
cell.update_attribute(:queued, false)
clear_cassandra_stream_timeout!
end
def audit
extend_cassandra_stream_timeout!
@audit_results = {
'uuid_count' => 0,
'failure_count' => 0,
'missed_ids' => []
}
perform_audit
mark_week_audited!(@audit_results)
return @audit_results
ensure
clear_cassandra_stream_timeout!
end
def extend_cassandra_stream_timeout!
Canvas::Cassandra::DatabaseBuilder.reset_connections!
@_stream_db_proc = auditor_cassandra_stream.attr_config_values[:database]
auditor_cassandra_stream.database(auditors_cassandra_db_lambda)
end
def clear_cassandra_stream_timeout!
raise RuntimeError("stream db never cached!") unless @_stream_db_proc
Canvas::Cassandra::DatabaseBuilder.reset_connections!
auditor_cassandra_stream.database(@_stream_db_proc)
end
def auditor_cassandra_stream
stream_map = {
authentication: Auditors::Authentication::Stream,
course: Auditors::Course::Stream,
grade_change: Auditors::GradeChange::Stream
}
stream_map[auditor_type]
end
def auditor_type
raise "NOT IMPLEMENTED"
end
def perform_migration
raise "NOT IMPLEMENTED"
end
def perform_repair
raise "NOT IMPLEMENTED"
end
def perform_audit
raise "NOT IMPLEMENTED"
end
def filter_dead_foreign_keys(_attrs_list)
raise "NOT IMPLEMENTED"
end
end
# account = Account.find(account_id)
# date = Date.civil(2020, 4, 21)
# cass_class = Auditors::Authentication
# ar_class = Auditors::ActiveRecord::AuthenticationRecord
# worker = AuthenticationWorker.new(account, date)
# Delayed::Job.enqueue(worker)
class AuthenticationWorker
include AuditorWorker
def auditor_type
:authentication
end
def cassandra_id_collection
Auditors::Authentication::Stream.ids_for_account(account, cassandra_query_options)
end
def perform_migration
migrate_in_pages(cassandra_id_collection, Auditors::Authentication::Stream, Auditors::ActiveRecord::AuthenticationRecord)
end
def perform_repair
repair_in_pages(cassandra_id_collection, Auditors::Authentication::Stream, Auditors::ActiveRecord::AuthenticationRecord)
end
def perform_audit
audit_in_pages(cassandra_id_collection, Auditors::ActiveRecord::AuthenticationRecord)
end
def filter_dead_foreign_keys(attrs_list)
user_ids = attrs_list.map{|a| a['user_id'] }
pseudonym_ids = attrs_list.map{|a| a['pseudonym_id'] }
existing_user_ids = User.where(id: user_ids).pluck(:id)
existing_pseud_ids = Pseudonym.where(id: pseudonym_ids).pluck(:id)
missing_uids = user_ids - existing_user_ids
missing_pids = pseudonym_ids - existing_pseud_ids
new_attrs_list = attrs_list.reject{|h| missing_uids.include?(h['user_id']) }
new_attrs_list.reject{|h| missing_pids.include?(h['pseudonym_id'])}
end
end
class CourseWorker
include AuditorWorker
def auditor_type
:course
end
def cassandra_id_collection
Auditors::Course::Stream.ids_for_account(account, cassandra_query_options)
end
def perform_migration
migrate_in_pages(cassandra_id_collection, Auditors::Course::Stream, Auditors::ActiveRecord::CourseRecord)
end
def perform_repair
repair_in_pages(cassandra_id_collection, Auditors::Course::Stream, Auditors::ActiveRecord::CourseRecord)
end
def perform_audit
audit_in_pages(cassandra_id_collection, Auditors::ActiveRecord::CourseRecord)
end
def filter_dead_foreign_keys(attrs_list)
user_ids = attrs_list.map{|a| a['user_id'] }
existing_user_ids = User.where(id: user_ids).pluck(:id)
missing_uids = user_ids - existing_user_ids
attrs_list.reject {|h| missing_uids.include?(h['user_id']) }
end
end
class GradeChangeWorker
include AuditorWorker
def auditor_type
:grade_change
end
def cassandra_id_collection_for(course)
Auditors::GradeChange::Stream.ids_for_course(course, cassandra_query_options)
end
def migrateable_course_ids
s_scope = Submission.where("course_id=courses.id").where("updated_at > ?", @date - 7.days)
account.courses.active.where(
"EXISTS (?)", s_scope).where(
"courses.created_at <= ?", @date + 2.days).pluck(:id)
end
def perform_migration
all_course_ids = migrateable_course_ids.to_a
all_course_ids.in_groups_of(1000) do |course_ids|
Course.where(id: course_ids).each do |course|
migrate_in_pages(cassandra_id_collection_for(course), Auditors::GradeChange::Stream, Auditors::ActiveRecord::GradeChangeRecord)
end
end
end
def perform_repair
all_course_ids = migrateable_course_ids.to_a
all_course_ids.in_groups_of(1000) do |course_ids|
Course.where(id: course_ids).each do |course|
repair_in_pages(cassandra_id_collection_for(course), Auditors::GradeChange::Stream, Auditors::ActiveRecord::GradeChangeRecord)
end
end
end
def perform_audit
all_course_ids = migrateable_course_ids.to_a
all_course_ids.in_groups_of(1000) do |course_ids|
Course.where(id: course_ids).each do |course|
audit_in_pages(cassandra_id_collection_for(course), Auditors::ActiveRecord::GradeChangeRecord)
end
end
end
def filter_dead_foreign_keys(attrs_list)
student_ids = attrs_list.map{|a| a['student_id'] }
grader_ids = attrs_list.map{|a| a['grader_id'] }
user_ids = (student_ids + grader_ids).uniq
existing_user_ids = User.where(id: user_ids).pluck(:id)
missing_uids = user_ids - existing_user_ids
filtered_attrs_list = attrs_list.reject do |h|
missing_uids.include?(h['student_id']) || missing_uids.include?(h['grader_id'])
end
submission_ids = filtered_attrs_list.map{|a| a['submission_id'] }
existing_submission_ids = Submission.where(id: submission_ids).pluck(:id)
missing_sids = submission_ids - existing_submission_ids
filtered_attrs_list.reject {|h| missing_sids.include?(h['submission_id']) }
end
end
# sets up ALL the backfill jobs for the current shard
# given some date range
# remember we START with the most recent becuase
# they're typically most valuable, and walk backwards,
# so start_date should be > end_date.
#
# This job tries to be nice to the db by scheduling a day at a time
# and if the queue is over the set threshold it will schedule itself to
# run again in 5 minutes and see if it can schedule in any more.
# This should keep the queue from growing out of control.
#
# Setup is something like:
# start_date = Date.today
# end_date = start - 10.months
# worker = DataFixup::Auditors::Migrate::BackfillEngine.new(start_date, end_date)
# Delayed::Job.enqueue(worker)
#
# It will take care of re-scheduling itself until that backfill window is covered.
class BackfillEngine
DEFAULT_DEPTH_THRESHOLD = 100000
DEFAULT_SCHEDULING_INTERVAL = 150
# these jobs are all low-priority,
# so high-ish parallelism is ok
# (they mostly run in a few minutes or less).
# we'll wind it down on clusters that are
# in trouble if necessary. For clusters
# taking a long time, grades parallelism
# could actually be increased very substantially overnight
# as they will not try to overwrite each other.
DEFAULT_PARALLELISM_GRADES = 20
DEFAULT_PARALLELISM_COURSES = 10
DEFAULT_PARALLELISM_AUTHS = 5
LOG_PREFIX = "Auditors PG Backfill - ".freeze
SCHEDULAR_TAG = "DataFixup::Auditors::Migrate::BackfillEngine#perform"
WORKER_TAGS = [
"DataFixup::Auditors::Migrate::CourseWorker#perform".freeze,
"DataFixup::Auditors::Migrate::GradeChangeWorker#perform".freeze,
"DataFixup::Auditors::Migrate::AuthenticationWorker#perform".freeze
].freeze
class << self
def non_future_queue
Delayed::Job.where("run_at <= ?", Time.zone.now)
end
def queue_depth
non_future_queue.count
end
def queue_tag_counts
non_future_queue.group(:tag).count
end
def running_tag_counts
non_future_queue.where('locked_by IS NOT NULL').group(:tag).count
end
def backfill_jobs
non_future_queue.where("tag IN ('#{WORKER_TAGS.join("','")}')")
end
def other_jobs
non_future_queue.where("tag NOT IN ('#{WORKER_TAGS.join("','")}')")
end
def schedular_jobs
Delayed::Job.where(tag: SCHEDULAR_TAG)
end
def failed_jobs
Delayed::Job::Failed.where("tag IN ('#{WORKER_TAGS.join("','")}')")
end
def failed_schedulars
Delayed::Job::Failed.where(tag: SCHEDULAR_TAG)
end
def running_jobs
backfill_jobs.where("locked_by IS NOT NULL")
end
def completed_cells
Auditors::ActiveRecord::MigrationCell.where(completed: true)
end
def failed_cells
Auditors::ActiveRecord::MigrationCell.where(failed: true, completed: false)
end
def jobs_id
shard = Shard.current
(shard.respond_to?(:delayed_jobs_shard_id) ? shard.delayed_jobs_shard_id : "NONE")
end
def queue_setting_key
"auditors_backfill_queue_threshold_jobs#{jobs_id}"
end
def backfill_key
"auditors_backfill_interval_seconds_jobs#{jobs_id}"
end
def queue_threshold
Setting.get(queue_setting_key, DEFAULT_DEPTH_THRESHOLD).to_i
end
def backfill_interval
Setting.get(backfill_key, DEFAULT_SCHEDULING_INTERVAL).to_i.seconds
end
def cluster_name
Shard.current.database_server.id
end
def parallelism_key(auditor_type)
"auditors_migration_num_strands"
end
def check_parallelism
{
grade_changes: Setting.get(parallelism_key("grade_changes"), 1),
courses: Setting.get(parallelism_key("courses"), 1),
authentications: Setting.get(parallelism_key("authentications"), 1)
}
end
def longest_running(on_shard: false)
longest_scope = running_jobs
if on_shard
longest_scope = longest_scope.where(shard_id: Shard.current.id)
end
longest = longest_scope.order(:locked_at).first
return {} if longest.blank?
{
id: longest.id,
elapsed_seconds: (Time.now.utc - longest.locked_at),
locked_by: longest.locked_by
}
end
def update_parallelism!(hash)
hash.each do |auditor_type, parallelism_value|
Setting.set(parallelism_key(auditor_type), parallelism_value)
end
p_settings = check_parallelism
gc_tag = 'DataFixup::Auditors::Migrate::GradeChangeWorker#perform'
Delayed::Job.where(tag: gc_tag, locked_by: nil).update_all(max_concurrent: p_settings[:grade_changes])
course_tag = 'DataFixup::Auditors::Migrate::CourseWorker#perform'
Delayed::Job.where(tag: course_tag, locked_by: nil).update_all(max_concurrent: p_settings[:courses])
auth_tag = 'DataFixup::Auditors::Migrate::AuthenticationWorker#perform'
Delayed::Job.where(tag: auth_tag, locked_by: nil).update_all(max_concurrent: p_settings[:authentications])
end
# only set parallelism if it is not currently set at all.
# If it's already been set (either from previous preset or
# by manual action) it will have a > 0 value and this will
# just exit after checking each setting
def preset_parallelism!
if Setting.get(parallelism_key("grade_changes"), -1).to_i < 0
Setting.set(parallelism_key("grade_changes"), DEFAULT_PARALLELISM_GRADES)
end
if Setting.get(parallelism_key("courses"), -1).to_i < 0
Setting.set(parallelism_key("courses"), DEFAULT_PARALLELISM_COURSES)
end
if Setting.get(parallelism_key("authentications"), -1).to_i < 0
Setting.set(parallelism_key("authentications"), DEFAULT_PARALLELISM_AUTHS)
end
end
def working_dates(current_jobs_scope)
current_jobs_scope.pluck(:handler).map{|h| YAML.unsafe_load(h).instance_variable_get(:@date) }.uniq
end
def shard_summary
{
'total_depth': queue_depth,
'backfill': backfill_jobs.where(shard_id: Shard.current.id).count,
'others': other_jobs.where(shard_id: Shard.current.id).count,
'failed': failed_jobs.where(shard_id: Shard.current.id).count,
'currently_running': running_jobs.where(shard_id: Shard.current.id).count,
'completed_cells': completed_cells.count,
'dates_being_worked': working_dates(running_jobs.where(shard_id: Shard.current.id)),
'config': {
'threshold': "#{queue_threshold} jobs",
'interval': "#{backfill_interval} seconds",
'parallelism': check_parallelism
},
'longest_runner': longest_running(on_shard: true),
'schedular_count': schedular_jobs.where(shard_id: Shard.current.id).count,
'schedular_job_ids': schedular_jobs.where(shard_id: Shard.current.id).limit(10).map(&:id)
}
end
def summary
{
'total_depth': queue_depth,
'backfill': backfill_jobs.count,
'others': other_jobs.count,
'failed': failed_jobs.count,
'currently_running': running_jobs.count,
'completed_cells': completed_cells.count,
# it does not work to check these with jobs from other shards
# because deserializing them fails to find accounts
'dates_being_worked': working_dates(running_jobs.where(shard_id: Shard.current.id)),
'config': {
'threshold': "#{queue_threshold} jobs",
'interval': "#{backfill_interval} seconds",
'parallelism': check_parallelism
},
'longest_runner': longest_running,
'schedular_count': schedular_jobs.count,
'schedular_job_ids': schedular_jobs.limit(10).map(&:id)
}
end
def date_summaries(start_date, end_date)
cur_date = start_date
output = {}
while cur_date <= end_date
cells = completed_cells.where(year: cur_date.year, month: cur_date.month, day: cur_date.day)
output[cur_date.to_s] = cells.count
cur_date += 1.day
end
output
end
def scan_for_holes(start_date, end_date)
summaries = date_summaries(start_date, end_date)
max_count = summaries.values.max
{
'max_value': max_count,
'holes': summaries.keep_if{|_,v| v < max_count}
}
end
def log(message)
Rails.logger.info("#{LOG_PREFIX} #{message}")
end
def force_run_schedulars(id)
d_worker = Delayed::Worker.new
sched_job = Delayed::Job.find(id)
sched_job.update(locked_by: 'force_run', locked_at: Time.now.utc)
d_worker.perform(sched_job)
end
def total_reset_frd!
conn = Auditors::ActiveRecord::GradeChangeRecord.connection
conn.execute("set role dba")
conn.truncate(Auditors::ActiveRecord::GradeChangeRecord.table_name)
conn.truncate(Auditors::ActiveRecord::CourseRecord.table_name)
conn.truncate(Auditors::ActiveRecord::AuthenticationRecord.table_name)
conn.truncate(Auditors::ActiveRecord::MigrationCell.table_name)
end
end
def initialize(start_date, end_date, operation_type: :schedule)
if start_date < end_date
raise "You probably didn't read the comment on this job..."
end
@start_date = start_date
@end_date = end_date
@_operation = operation_type
end
def operation
@_operation ||= :schedule
end
def log(message)
self.class.log(message)
end
def queue_threshold
self.class.queue_threshold
end
def backfill_interval
self.class.backfill_interval
end
def queue_depth
self.class.queue_depth
end
def slim_accounts
return @_accounts if @_accounts
root_account_ids = Account.root_accounts.active.pluck(:id)
@_accounts = Account.active.where(
"root_account_id IS NULL OR root_account_id IN (?)", root_account_ids
).select(:id, :root_account_id)
end
def cluster_name
self.class.cluster_name
end
def conditionally_enqueue_worker(worker, n_strand)
if worker.currently_queueable?
job = Delayed::Job.enqueue(worker, n_strand: n_strand, priority: Delayed::LOW_PRIORITY)
worker.mark_cell_queued!(delayed_job_id: job.id)
end
end
def generate_worker(worker_type, account, current_date)
worker_operation = (operation == :repair) ? :repair : :backfill
worker_type.new(account.id, current_date, operation_type: worker_operation)
end
def enqueue_one_day_for_account(account, current_date)
if account.root_account?
# auth records are stored at the root account level,
# we only need to enqueue these jobs for root accounts
auth_worker = generate_worker(AuthenticationWorker, account, current_date)
conditionally_enqueue_worker(auth_worker, "auditors_migration")
end
course_worker = generate_worker(CourseWorker, account, current_date)
conditionally_enqueue_worker(course_worker, "auditors_migration")
grade_change_worker = generate_worker(GradeChangeWorker, account, current_date)
conditionally_enqueue_worker(grade_change_worker, "auditors_migration")
end
def enqueue_one_day(current_date)
slim_accounts.each do |account|
enqueue_one_day_for_account(account, current_date)
end
end
def schedular_strand_tag
"AuditorsBackfillEngine::Job_Shard_#{self.class.jobs_id}"
end
def next_schedule_date(current_date)
# each job spans a week, so when we're scheduling
# the initial job we can schedule one week at a time.
# In a repair pass, we want to make sure we hit every
# cell that failed, or that never got queued (just in case),
# so we actually line up jobs for each day that is
# missing/failed. It's possible to schedule multiple jobs
# for the same week. If one completes before the next one starts,
# they will bail immediately. If two from the same week are running
# at the same time the uniqueness-constraint-and-conflict-handling
# prevents them from creating duplicates
if operation == :schedule
current_date - 7.days
elsif operation == :repair
current_date - 1.day
else
raise "Unknown backfill operation: #{operation}"
end
end
def perform
self.class.preset_parallelism!
log("Scheduling Auditors Backfill!")
current_date = @start_date
while current_date >= @end_date
if queue_depth >= queue_threshold
log("Queue too deep (#{queue_depth}) for threshold (#{queue_threshold}), throttling...")
break
end
enqueue_one_day(current_date)
log("Scheduled Backfill for #{current_date} on #{Shard.current.id}")
# jobs span a week now, we can schedule them at week intervals arbitrarily
current_date = next_schedule_date(current_date)
end
if current_date >= @end_date
schedule_worker = BackfillEngine.new(current_date, @end_date)
next_time = Time.now.utc + backfill_interval
log("More work to do. Scheduling another job for #{next_time}")
Delayed::Job.enqueue(schedule_worker, run_at: next_time, priority: Delayed::LOW_PRIORITY, n_strand: schedular_strand_tag, max_attempts: 5)
else
log("WE DID IT. Shard #{Shard.current.id} has auditors migrated (probably, check the migration cell records to be sure)")
end
end
end
# useful for generating cassandra records in test environments
# to make migration practice more real.
# Probably should never run in production. Ever.
class DataFixtures
# pulled from one day on FFT
# as a sample size
AUTH_VOLUME = 275000
COURSE_VOLUME = 8000
GRADE_CHANGE_VOLUME = 175000
def generate_authentications
puts("generating auth records...")
pseudonyms = Pseudonym.active.limit(2000)
event_count = 0
while event_count < AUTH_VOLUME
event_record = Auditors::Authentication::Record.generate(pseudonyms.sample, 'login')
Auditors::Authentication::Stream.insert(event_record, {backend_strategy: :cassandra})
event_count += 1
puts("...#{event_count}") if event_count % 1000 == 0
end
end
def generate_courses
puts("generating course event records...")
courses = Course.active.limit(1000)
users = User.active.limit(1000)
event_count = 0
while event_count < COURSE_VOLUME
event_record = Auditors::Course::Record.generate(courses.sample, users.sample, 'published', {}, {})
Auditors::Course::Stream.insert(event_record, {backend_strategy: :cassandra}) if Auditors.write_to_cassandra?
event_count += 1
puts("...#{event_count}") if event_count % 1000 == 0
end
end
def generate_grade_changes
puts("generating grade change records...")
assignments = Assignment.active.limit(10000)
event_count = 0
while event_count < GRADE_CHANGE_VOLUME
assignment = assignments.sample
assignment.submissions.each do |sub|
event_record = Auditors::GradeChange::Record.generate(sub, 'graded')
Auditors::GradeChange::Stream.insert(event_record, {backend_strategy: :cassandra}) if Auditors.write_to_cassandra?
event_count += 1
puts("...#{event_count}") if event_count % 1000 == 0
end
end
end
def generate
generate_authentications
generate_courses
generate_grade_changes
end
end
end
end |
module DateTimePrecision
VERSION = "0.5.1"
end
Version bump [ci skip]
module DateTimePrecision
VERSION = "0.5.2"
end
|
# Notify Honeybadger of errors in DelayedJob workers
# in /config/initializers/
#
# modified
module Delayed
module Plugins
class Honeybadger < Plugin
module Notify
def error(job, error)
::Honeybadger.notify_or_ignore(
:error_class => error.class.name,
:error_message => "#{ error.class.name }: #{ error.message }",
:context => {
:failed_job => job.inspect,
}
)
super if defined?(super)
end
end
callbacks do |lifecycle|
lifecycle.before(:invoke_job) do |job|
payload = job.payload_object
payload = payload.object if payload.is_a? Delayed::PerformableMethod
payload.extend Notify
end
end
end
end
end
Remove some comment cruft
module Delayed
module Plugins
class Honeybadger < Plugin
module Notify
def error(job, error)
::Honeybadger.notify_or_ignore(
:error_class => error.class.name,
:error_message => "#{ error.class.name }: #{ error.message }",
:context => {
:failed_job => job.inspect,
}
)
super if defined?(super)
end
end
callbacks do |lifecycle|
lifecycle.before(:invoke_job) do |job|
payload = job.payload_object
payload = payload.object if payload.is_a? Delayed::PerformableMethod
payload.extend Notify
end
end
end
end
end
|
class A2ps < Formula
desc "Any-to-PostScript filter"
homepage "https://www.gnu.org/software/a2ps/"
url "http://ftpmirror.gnu.org/a2ps/a2ps-4.14.tar.gz"
mirror "https://ftp.gnu.org/gnu/a2ps/a2ps-4.14.tar.gz"
sha256 "f3ae8d3d4564a41b6e2a21f237d2f2b104f48108591e8b83497500182a3ab3a4"
depends_on "homebrew/dupes/gperf" unless OS.mac?
bottle do
cellar :any_skip_relocation
revision 2
sha256 "95e6cd96cc753d9d632ac8aa1b9d5099d5507c5fb8fc085544803fd85a4bd7c8" => :el_capitan
sha256 "c89521bb6b3df6a8277564f264006bde650b7d214b288f4805da856a76ec3b69" => :yosemite
sha256 "d10db3452567e6d4a6be65f15728c40b4a62bcc374e04ff7f5d3608c294c74f4" => :mavericks
end
# Software was last updated in 2007.
# https://svn.macports.org/ticket/20867
# https://trac.macports.org/ticket/18255
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/patches/0ae366e6/a2ps/patch-contrib_sample_Makefile.in"
sha256 "5a34c101feb00cf52199a28b1ea1bca83608cf0a1cb123e6af2d3d8992c6011f"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/patches/0ae366e6/a2ps/patch-lib__xstrrpl.c"
sha256 "89fa3c95c329ec326e2e76493471a7a974c673792725059ef121e6f9efb05bf4"
end
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}", "--sysconfdir=#{etc}",
"--with-lispdir=#{share}/emacs/site-lisp/#{name}"
system "make", "install"
end
test do
(testpath/"test.txt").write("Hello World!\n")
system "#{bin}/a2ps", "test.txt", "-o", "test.ps"
assert File.read("test.ps").start_with?("")
end
end
a2ps: Fix for Linuxbrew
a2ps had a patch which made the linker argument order incorrect.
Closes Linuxbrew/homebrew-core#181.
Signed-off-by: Bob W. Hogg <c772a964fd55352a3510e5d535dd9ccc9ac30168@linux.com>
class A2ps < Formula
desc "Any-to-PostScript filter"
homepage "https://www.gnu.org/software/a2ps/"
url "http://ftpmirror.gnu.org/a2ps/a2ps-4.14.tar.gz"
mirror "https://ftp.gnu.org/gnu/a2ps/a2ps-4.14.tar.gz"
sha256 "f3ae8d3d4564a41b6e2a21f237d2f2b104f48108591e8b83497500182a3ab3a4"
depends_on "homebrew/dupes/gperf" unless OS.mac?
bottle do
cellar :any_skip_relocation
revision 2
sha256 "95e6cd96cc753d9d632ac8aa1b9d5099d5507c5fb8fc085544803fd85a4bd7c8" => :el_capitan
sha256 "c89521bb6b3df6a8277564f264006bde650b7d214b288f4805da856a76ec3b69" => :yosemite
sha256 "d10db3452567e6d4a6be65f15728c40b4a62bcc374e04ff7f5d3608c294c74f4" => :mavericks
end
# Software was last updated in 2007.
# https://svn.macports.org/ticket/20867
# https://trac.macports.org/ticket/18255
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/patches/0ae366e6/a2ps/patch-contrib_sample_Makefile.in"
sha256 "5a34c101feb00cf52199a28b1ea1bca83608cf0a1cb123e6af2d3d8992c6011f"
end if OS.mac?
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/patches/0ae366e6/a2ps/patch-lib__xstrrpl.c"
sha256 "89fa3c95c329ec326e2e76493471a7a974c673792725059ef121e6f9efb05bf4"
end
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}", "--sysconfdir=#{etc}",
"--with-lispdir=#{share}/emacs/site-lisp/#{name}"
system "make", "install"
end
test do
(testpath/"test.txt").write("Hello World!\n")
system "#{bin}/a2ps", "test.txt", "-o", "test.ps"
assert File.read("test.ps").start_with?("")
end
end
|
require "language/haskell"
class Agda < Formula
include Language::Haskell::Cabal
desc "Dependently typed functional programming language"
homepage "https://wiki.portal.chalmers.se/agda/"
revision if OS.mac? 1 : 2
stable do
url "https://hackage.haskell.org/package/Agda-2.6.0.1/Agda-2.6.0.1.tar.gz"
sha256 "7bb88a9cd4a556259907ccc71d54e2acc9d3e9ce05486ffdc83f721c7c06c0e8"
resource "stdlib" do
url "https://github.com/agda/agda-stdlib.git",
:tag => "v1.1",
:revision => "dffb8023a63e7e66a90a8664752245971a915e66"
end
end
bottle do
sha256 "2baa8f12e01c319b627c0638fb507ab17e413836f8baf0eb8fc97f9fd6093e32" => :mojave
sha256 "9cd4769e7bb29ff52854efcdbba60a52efc69ac97c938667ae0aa424f11ea4e6" => :high_sierra
sha256 "9504f8bc0bf5fa728f97411307458945c8b29a6927e998794bcab8ca4506be1c" => :sierra
end
head do
url "https://github.com/agda/agda.git"
resource "stdlib" do
url "https://github.com/agda/agda-stdlib.git"
end
end
depends_on "cabal-install" => [:build, :test]
depends_on "emacs"
depends_on "ghc"
uses_from_macos "zlib"
def install
# install Agda core
install_cabal_package :using => ["alex", "happy", "cpphs"]
resource("stdlib").stage lib/"agda"
# generate the standard library's bytecode
cd lib/"agda" do
cabal_sandbox :home => buildpath, :keep_lib => true do
cabal_install "--only-dependencies"
cabal_install
system "GenerateEverything"
end
end
# generate the standard library's documentation and vim highlighting files
cd lib/"agda" do
system bin/"agda", "-i", ".", "-i", "src", "--html", "--vim", "README.agda"
end
# compile the included Emacs mode
system bin/"agda-mode", "compile"
elisp.install_symlink Dir["#{share}/*/Agda-#{version}/emacs-mode/*"]
end
def caveats; <<~EOS
To use the Agda standard library by default:
mkdir -p ~/.agda
echo #{HOMEBREW_PREFIX}/lib/agda/standard-library.agda-lib >>~/.agda/libraries
echo standard-library >>~/.agda/defaults
EOS
end
test do
simpletest = testpath/"SimpleTest.agda"
simpletest.write <<~EOS
module SimpleTest where
data ℕ : Set where
zero : ℕ
suc : ℕ → ℕ
infixl 6 _+_
_+_ : ℕ → ℕ → ℕ
zero + n = n
suc m + n = suc (m + n)
infix 4 _≡_
data _≡_ {A : Set} (x : A) : A → Set where
refl : x ≡ x
cong : ∀ {A B : Set} (f : A → B) {x y} → x ≡ y → f x ≡ f y
cong f refl = refl
+-assoc : ∀ m n o → (m + n) + o ≡ m + (n + o)
+-assoc zero _ _ = refl
+-assoc (suc m) n o = cong suc (+-assoc m n o)
EOS
stdlibtest = testpath/"StdlibTest.agda"
stdlibtest.write <<~EOS
module StdlibTest where
open import Data.Nat
open import Relation.Binary.PropositionalEquality
+-assoc : ∀ m n o → (m + n) + o ≡ m + (n + o)
+-assoc zero _ _ = refl
+-assoc (suc m) n o = cong suc (+-assoc m n o)
EOS
iotest = testpath/"IOTest.agda"
iotest.write <<~EOS
module IOTest where
open import Agda.Builtin.IO
open import Agda.Builtin.Unit
postulate
return : ∀ {A : Set} → A → IO A
{-# COMPILE GHC return = \\_ -> return #-}
main : _
main = return tt
EOS
stdlibiotest = testpath/"StdlibIOTest.agda"
stdlibiotest.write <<~EOS
module StdlibIOTest where
open import IO
main : _
main = run (putStr "Hello, world!")
EOS
# typecheck a simple module
system bin/"agda", simpletest
# typecheck a module that uses the standard library
system bin/"agda", "-i", lib/"agda"/"src", stdlibtest
# compile a simple module using the JS backend
system bin/"agda", "--js", simpletest
# test the GHC backend
cabal_sandbox do
cabal_install "text", "ieee754"
dbpath = Dir["#{testpath}/.cabal-sandbox/*-packages.conf.d"].first
dbopt = "--ghc-flag=-package-db=#{dbpath}"
# compile and run a simple program
system bin/"agda", "-c", dbopt, iotest
assert_equal "", shell_output(testpath/"IOTest")
# compile and run a program that uses the standard library
system bin/"agda", "-c", "-i", lib/"agda"/"src", dbopt, stdlibiotest
assert_equal "Hello, world!", shell_output(testpath/"StdlibIOTest")
end
end
end
agda: Fix for Linuxbrew
require "language/haskell"
class Agda < Formula
include Language::Haskell::Cabal
desc "Dependently typed functional programming language"
homepage "https://wiki.portal.chalmers.se/agda/"
revision OS.mac? ? 1 : 2
stable do
url "https://hackage.haskell.org/package/Agda-2.6.0.1/Agda-2.6.0.1.tar.gz"
sha256 "7bb88a9cd4a556259907ccc71d54e2acc9d3e9ce05486ffdc83f721c7c06c0e8"
resource "stdlib" do
url "https://github.com/agda/agda-stdlib.git",
:tag => "v1.1",
:revision => "dffb8023a63e7e66a90a8664752245971a915e66"
end
end
bottle do
sha256 "2baa8f12e01c319b627c0638fb507ab17e413836f8baf0eb8fc97f9fd6093e32" => :mojave
sha256 "9cd4769e7bb29ff52854efcdbba60a52efc69ac97c938667ae0aa424f11ea4e6" => :high_sierra
sha256 "9504f8bc0bf5fa728f97411307458945c8b29a6927e998794bcab8ca4506be1c" => :sierra
end
head do
url "https://github.com/agda/agda.git"
resource "stdlib" do
url "https://github.com/agda/agda-stdlib.git"
end
end
depends_on "cabal-install" => [:build, :test]
depends_on "emacs"
depends_on "ghc"
uses_from_macos "zlib"
def install
# install Agda core
install_cabal_package :using => ["alex", "happy", "cpphs"]
resource("stdlib").stage lib/"agda"
# generate the standard library's bytecode
cd lib/"agda" do
cabal_sandbox :home => buildpath, :keep_lib => true do
cabal_install "--only-dependencies"
cabal_install
system "GenerateEverything"
end
end
# generate the standard library's documentation and vim highlighting files
cd lib/"agda" do
system bin/"agda", "-i", ".", "-i", "src", "--html", "--vim", "README.agda"
end
# compile the included Emacs mode
system bin/"agda-mode", "compile"
elisp.install_symlink Dir["#{share}/*/Agda-#{version}/emacs-mode/*"]
end
def caveats; <<~EOS
To use the Agda standard library by default:
mkdir -p ~/.agda
echo #{HOMEBREW_PREFIX}/lib/agda/standard-library.agda-lib >>~/.agda/libraries
echo standard-library >>~/.agda/defaults
EOS
end
test do
simpletest = testpath/"SimpleTest.agda"
simpletest.write <<~EOS
module SimpleTest where
data ℕ : Set where
zero : ℕ
suc : ℕ → ℕ
infixl 6 _+_
_+_ : ℕ → ℕ → ℕ
zero + n = n
suc m + n = suc (m + n)
infix 4 _≡_
data _≡_ {A : Set} (x : A) : A → Set where
refl : x ≡ x
cong : ∀ {A B : Set} (f : A → B) {x y} → x ≡ y → f x ≡ f y
cong f refl = refl
+-assoc : ∀ m n o → (m + n) + o ≡ m + (n + o)
+-assoc zero _ _ = refl
+-assoc (suc m) n o = cong suc (+-assoc m n o)
EOS
stdlibtest = testpath/"StdlibTest.agda"
stdlibtest.write <<~EOS
module StdlibTest where
open import Data.Nat
open import Relation.Binary.PropositionalEquality
+-assoc : ∀ m n o → (m + n) + o ≡ m + (n + o)
+-assoc zero _ _ = refl
+-assoc (suc m) n o = cong suc (+-assoc m n o)
EOS
iotest = testpath/"IOTest.agda"
iotest.write <<~EOS
module IOTest where
open import Agda.Builtin.IO
open import Agda.Builtin.Unit
postulate
return : ∀ {A : Set} → A → IO A
{-# COMPILE GHC return = \\_ -> return #-}
main : _
main = return tt
EOS
stdlibiotest = testpath/"StdlibIOTest.agda"
stdlibiotest.write <<~EOS
module StdlibIOTest where
open import IO
main : _
main = run (putStr "Hello, world!")
EOS
# typecheck a simple module
system bin/"agda", simpletest
# typecheck a module that uses the standard library
system bin/"agda", "-i", lib/"agda"/"src", stdlibtest
# compile a simple module using the JS backend
system bin/"agda", "--js", simpletest
# test the GHC backend
cabal_sandbox do
cabal_install "text", "ieee754"
dbpath = Dir["#{testpath}/.cabal-sandbox/*-packages.conf.d"].first
dbopt = "--ghc-flag=-package-db=#{dbpath}"
# compile and run a simple program
system bin/"agda", "-c", dbopt, iotest
assert_equal "", shell_output(testpath/"IOTest")
# compile and run a program that uses the standard library
system bin/"agda", "-c", "-i", lib/"agda"/"src", dbopt, stdlibiotest
assert_equal "Hello, world!", shell_output(testpath/"StdlibIOTest")
end
end
end
|
class Aget < Formula
desc "Multithreaded HTTP download accelerator"
homepage "http://www.enderunix.org/aget/"
url "http://www.enderunix.org/aget/aget-0.4.1.tar.gz"
sha256 "d17393c7f44aab38028ae71f14b572ba1839b6e085fb2092b6ebe68bc931df4d"
head "https://github.com/EnderUNIX/Aget.git"
bottle do
cellar :any
sha256 "50eeae036e0d440673b98a1952992d10d8d7f67fca0ed7424b295606b86d33de" => :yosemite
sha256 "ec1c185478a302af5644b494dd82cf162947b3f389e1125dcaae25b00b2259c3" => :mavericks
sha256 "e13906227621a18d8c3ea3bfa3fd164ab82a398f3d556557d786c7a7899d36c2" => :mountain_lion
end
def install
# ENV replaces the MacPorts patch that ensured compile on OS X.
# https://github.com/EnderUNIX/Aget/issues/3
ENV.append_to_cflags "-D_DARWIN_C_SOURCE"
system "make", "CC=#{ENV.cc}",
"CFLAGS=#{ENV.cflags}",
"LDFLAGS=#{ENV.ldflags}"
bin.install "aget"
man1.install "aget.1"
end
end
aget: update 0.4.1 bottle.
class Aget < Formula
desc "Multithreaded HTTP download accelerator"
homepage "http://www.enderunix.org/aget/"
url "http://www.enderunix.org/aget/aget-0.4.1.tar.gz"
sha256 "d17393c7f44aab38028ae71f14b572ba1839b6e085fb2092b6ebe68bc931df4d"
head "https://github.com/EnderUNIX/Aget.git"
bottle do
cellar :any_skip_relocation
sha256 "9e4c47e799f36de6502a569d0ed75ee28070986e701ff814b353988a57c9fa1e" => :el_capitan
sha256 "50eeae036e0d440673b98a1952992d10d8d7f67fca0ed7424b295606b86d33de" => :yosemite
sha256 "ec1c185478a302af5644b494dd82cf162947b3f389e1125dcaae25b00b2259c3" => :mavericks
sha256 "e13906227621a18d8c3ea3bfa3fd164ab82a398f3d556557d786c7a7899d36c2" => :mountain_lion
end
def install
# ENV replaces the MacPorts patch that ensured compile on OS X.
# https://github.com/EnderUNIX/Aget/issues/3
ENV.append_to_cflags "-D_DARWIN_C_SOURCE"
system "make", "CC=#{ENV.cc}",
"CFLAGS=#{ENV.cflags}",
"LDFLAGS=#{ENV.ldflags}"
bin.install "aget"
man1.install "aget.1"
end
end
|
class Argo < Formula
desc "Get stuff done with container-native workflows for Kubernetes"
homepage "https://argoproj.io"
url "https://github.com/argoproj/argo.git",
tag: "v2.12.9",
revision: "737905345d70ba1ebd566ce1230e4f971993dfd0"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "373e16ebb731b43932863afc2115019f8c44f26e86698cc2aed6ecdb273ad669"
sha256 cellar: :any_skip_relocation, catalina: "1faf7ac893b790e60262b7664fa87a68a9187fab3a24580529816a03f84e4413"
sha256 cellar: :any_skip_relocation, mojave: "bcc2f077aeaa397afee18593fcc672d70c477990698023a621ee3026a22183fc"
end
depends_on "go" => :build
depends_on "node@14" => :build
depends_on "yarn" => :build
def install
# this needs to be remove to prevent multiple 'operation not permitted' errors
inreplace "Makefile", "CGO_ENABLED=0", ""
system "make", "dist/argo"
bin.install "dist/argo"
output = Utils.safe_popen_read("#{bin}/argo", "completion", "bash")
(bash_completion/"argo").write output
output = Utils.safe_popen_read("#{bin}/argo", "completion", "zsh")
(zsh_completion/"_argo").write output
end
test do
assert_match "argo:",
shell_output("#{bin}/argo version")
# argo consumes the Kubernetes configuration with the `--kubeconfig` flag
# Since it is an empty file we expect it to be invalid
touch testpath/"kubeconfig"
assert_match "invalid configuration",
shell_output("#{bin}/argo lint --kubeconfig ./kubeconfig 2>&1", 1)
end
end
argo 2.12.10
Closes #72737.
Signed-off-by: Carlo Cabrera <3ffc397d0e4bded29cb84b56167de54c01e3a55b@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Argo < Formula
desc "Get stuff done with container-native workflows for Kubernetes"
homepage "https://argoproj.io"
url "https://github.com/argoproj/argo.git",
tag: "v2.12.10",
revision: "f1e0c6174b48af69d6e8ecd235a2d709f44f8095"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "373e16ebb731b43932863afc2115019f8c44f26e86698cc2aed6ecdb273ad669"
sha256 cellar: :any_skip_relocation, catalina: "1faf7ac893b790e60262b7664fa87a68a9187fab3a24580529816a03f84e4413"
sha256 cellar: :any_skip_relocation, mojave: "bcc2f077aeaa397afee18593fcc672d70c477990698023a621ee3026a22183fc"
end
depends_on "go" => :build
depends_on "node@14" => :build
depends_on "yarn" => :build
def install
# this needs to be remove to prevent multiple 'operation not permitted' errors
inreplace "Makefile", "CGO_ENABLED=0", ""
system "make", "dist/argo"
bin.install "dist/argo"
output = Utils.safe_popen_read("#{bin}/argo", "completion", "bash")
(bash_completion/"argo").write output
output = Utils.safe_popen_read("#{bin}/argo", "completion", "zsh")
(zsh_completion/"_argo").write output
end
test do
assert_match "argo:",
shell_output("#{bin}/argo version")
# argo consumes the Kubernetes configuration with the `--kubeconfig` flag
# Since it is an empty file we expect it to be invalid
touch testpath/"kubeconfig"
assert_match "invalid configuration",
shell_output("#{bin}/argo lint --kubeconfig ./kubeconfig 2>&1", 1)
end
end
|
class Argo < Formula
desc "Get stuff done with container-native workflows for Kubernetes"
homepage "https://argoproj.io"
url "https://github.com/argoproj/argo-workflows.git",
tag: "v3.1.8",
revision: "0df0f3a98fac4e2aa5bc02213fb0a2ccce9a682a"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "1f8b04699e665a8252fd500761064b36bb1e2fbbf73160b388ba3256c029aed5"
sha256 cellar: :any_skip_relocation, big_sur: "c9346a5334135ac40dafef1661ce1164b1f2dbf176c45c21241af79825c2c607"
sha256 cellar: :any_skip_relocation, catalina: "b67cda110eb285caa8c1474614234c80c55c6eb5c57c20da8000da099a36cb68"
sha256 cellar: :any_skip_relocation, mojave: "b02666cee25d2195ee17c17d20ea5e15d22eed985018569c3f584cc7e52390d8"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e68dcff03fee6597f6f1dec9c103f3cc90c404ed74667d05ff38021c24c69c4e"
end
depends_on "go" => :build
depends_on "node@14" => :build
depends_on "yarn" => :build
def install
# this needs to be remove to prevent multiple 'operation not permitted' errors
inreplace "Makefile", "CGO_ENABLED=0", ""
system "make", "dist/argo"
bin.install "dist/argo"
output = Utils.safe_popen_read("#{bin}/argo", "completion", "bash")
(bash_completion/"argo").write output
output = Utils.safe_popen_read("#{bin}/argo", "completion", "zsh")
(zsh_completion/"_argo").write output
end
test do
assert_match "argo:",
shell_output("#{bin}/argo version")
# argo consumes the Kubernetes configuration with the `--kubeconfig` flag
# Since it is an empty file we expect it to be invalid
touch testpath/"kubeconfig"
assert_match "invalid configuration",
shell_output("#{bin}/argo lint --kubeconfig ./kubeconfig ./kubeconfig 2>&1", 1)
end
end
argo 3.1.9
Closes #84632.
Signed-off-by: Branch Vincent <0e6296586cbd330121a33cee359d4396296e2ead@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Argo < Formula
desc "Get stuff done with container-native workflows for Kubernetes"
homepage "https://argoproj.io"
url "https://github.com/argoproj/argo-workflows.git",
tag: "v3.1.9",
revision: "e4f6bcb02f10bea5c76f2f91ff223b8a380b4557"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "1f8b04699e665a8252fd500761064b36bb1e2fbbf73160b388ba3256c029aed5"
sha256 cellar: :any_skip_relocation, big_sur: "c9346a5334135ac40dafef1661ce1164b1f2dbf176c45c21241af79825c2c607"
sha256 cellar: :any_skip_relocation, catalina: "b67cda110eb285caa8c1474614234c80c55c6eb5c57c20da8000da099a36cb68"
sha256 cellar: :any_skip_relocation, mojave: "b02666cee25d2195ee17c17d20ea5e15d22eed985018569c3f584cc7e52390d8"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e68dcff03fee6597f6f1dec9c103f3cc90c404ed74667d05ff38021c24c69c4e"
end
depends_on "go" => :build
depends_on "node@14" => :build
depends_on "yarn" => :build
def install
# this needs to be remove to prevent multiple 'operation not permitted' errors
inreplace "Makefile", "CGO_ENABLED=0", ""
system "make", "dist/argo"
bin.install "dist/argo"
output = Utils.safe_popen_read("#{bin}/argo", "completion", "bash")
(bash_completion/"argo").write output
output = Utils.safe_popen_read("#{bin}/argo", "completion", "zsh")
(zsh_completion/"_argo").write output
end
test do
assert_match "argo:",
shell_output("#{bin}/argo version")
# argo consumes the Kubernetes configuration with the `--kubeconfig` flag
# Since it is an empty file we expect it to be invalid
touch testpath/"kubeconfig"
assert_match "invalid configuration",
shell_output("#{bin}/argo lint --kubeconfig ./kubeconfig ./kubeconfig 2>&1", 1)
end
end
|
class Arss < Formula
desc "Analyze a sound file into a spectrogram"
homepage "https://arss.sourceforge.io/"
url "https://downloads.sourceforge.net/project/arss/arss/0.2.3/arss-0.2.3-src.tar.gz"
sha256 "e2faca8b8a3902226353c4053cd9ab71595eec6ead657b5b44c14b4bef52b2b2"
bottle do
cellar :any
sha256 "b848efa3abde7c5fffd18289c1ab51a842cd93e0e97d6af32329acf869909d38" => :high_sierra
sha256 "2311c31ae2e80905dfc41c8adb9639314664103352540b198f24c54e0c102550" => :sierra
sha256 "5da45934b19d0cab02c809932fb8c5da3fd76d2f781bc9e2e7a98fa1825989eb" => :el_capitan
sha256 "268225389842f4952424b17c7b94759b7a3d3009053b50718f1e4155b7eace86" => :yosemite
sha256 "7159b6b56ad3878bc84b9fdf9d708f0828637db64ae12ef96f39820c2f22d061" => :mavericks
end
depends_on "cmake" => :build
depends_on "fftw"
def install
cd "src" do
system "cmake", ".", *std_cmake_args
system "make", "install"
end
end
end
arss: add test
class Arss < Formula
desc "Analyze a sound file into a spectrogram"
homepage "https://arss.sourceforge.io/"
url "https://downloads.sourceforge.net/project/arss/arss/0.2.3/arss-0.2.3-src.tar.gz"
sha256 "e2faca8b8a3902226353c4053cd9ab71595eec6ead657b5b44c14b4bef52b2b2"
bottle do
cellar :any
sha256 "b848efa3abde7c5fffd18289c1ab51a842cd93e0e97d6af32329acf869909d38" => :high_sierra
sha256 "2311c31ae2e80905dfc41c8adb9639314664103352540b198f24c54e0c102550" => :sierra
sha256 "5da45934b19d0cab02c809932fb8c5da3fd76d2f781bc9e2e7a98fa1825989eb" => :el_capitan
sha256 "268225389842f4952424b17c7b94759b7a3d3009053b50718f1e4155b7eace86" => :yosemite
sha256 "7159b6b56ad3878bc84b9fdf9d708f0828637db64ae12ef96f39820c2f22d061" => :mavericks
end
depends_on "cmake" => :build
depends_on "fftw"
def install
cd "src" do
system "cmake", ".", *std_cmake_args
system "make", "install"
end
end
test do
system "#{bin}/arss", "--version"
end
end
|
asm6 1.6
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class Asm6 < Formula
url 'http://home.comcast.net/~olimar/NES/asm6.zip'
homepage 'http://home.comcast.net/~olimar/NES/'
md5 '224943d091179a700cccbda5a047b6ef'
version '1.6'
def install
system "#{ENV.cc} -o asm6 asm6.c"
bin.install "asm6"
end
end
|
class Axel < Formula
desc "Light UNIX download accelerator"
homepage "https://github.com/eribertomota/axel"
url "https://github.com/axel-download-accelerator/axel/releases/download/v2.17.7/axel-2.17.7.tar.gz"
sha256 "b05e828fac19acb3bddb7d5e5af69617f29f34aea78dd2045cf30edc834cb4d5"
head "https://github.com/eribertomota/axel.git"
bottle do
cellar :any
sha256 "acf292cb3e442dba912aa49f6ca641c2863099f649fc52293ea288a6f8dd8f89" => :catalina
sha256 "07593f73d611d773ee8155c1f26a165ba06f2a716d15a243a4add77983cf3253" => :mojave
sha256 "c6564d7d46b9201beae836dc36fa6d3b465b51d4c1f7283d498c22145472d6bb" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "autoconf-archive" => :build
depends_on "automake" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "openssl@1.1"
def install
# Fixes the macOS build by esuring some _POSIX_C_SOURCE
# features are available:
# https://github.com/axel-download-accelerator/axel/pull/196
ENV.append_to_cflags "-D_DARWIN_C_SOURCE"
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--sysconfdir=#{etc}"
system "make", "install"
end
test do
filename = (testpath/"axel.tar.gz")
system bin/"axel", "-o", "axel.tar.gz", stable.url
filename.verify_checksum stable.checksum
assert_predicate testpath/"axel.tar.gz", :exist?
end
end
axel: update 2.17.7 bottle.
class Axel < Formula
desc "Light UNIX download accelerator"
homepage "https://github.com/eribertomota/axel"
url "https://github.com/axel-download-accelerator/axel/releases/download/v2.17.7/axel-2.17.7.tar.gz"
sha256 "b05e828fac19acb3bddb7d5e5af69617f29f34aea78dd2045cf30edc834cb4d5"
head "https://github.com/eribertomota/axel.git"
bottle do
cellar :any
sha256 "f21813f4113f1c50d6c9c241210435f5db49e63136ada0beb4e6b13b71922b7e" => :catalina
sha256 "ea45d5812240f3969a9ae75adedd25a37a133e38002d33a655ca1c46118f5153" => :mojave
sha256 "8cdedff82eae38b9e34842d3546b48b5ad4729fa949bcbcdc6e194afa78fbcfe" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "autoconf-archive" => :build
depends_on "automake" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "openssl@1.1"
def install
# Fixes the macOS build by esuring some _POSIX_C_SOURCE
# features are available:
# https://github.com/axel-download-accelerator/axel/pull/196
ENV.append_to_cflags "-D_DARWIN_C_SOURCE"
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--sysconfdir=#{etc}"
system "make", "install"
end
test do
filename = (testpath/"axel.tar.gz")
system bin/"axel", "-o", "axel.tar.gz", stable.url
filename.verify_checksum stable.checksum
assert_predicate testpath/"axel.tar.gz", :exist?
end
end
|
class Bear < Formula
desc "Generate compilation database for clang tooling"
homepage "https://github.com/rizsotto/Bear"
url "https://github.com/rizsotto/Bear/archive/2.3.12.tar.gz"
sha256 "25ef998b94138f586cd627c7cb6187259cd9623f3e9f6695623e91f2aec80c86"
head "https://github.com/rizsotto/Bear.git"
bottle do
cellar :any
sha256 "a569b9ebe198df8e6c2b505e3ad6739b577cc0aaf53c5f47686b9a7df3e6e5ed" => :mojave
sha256 "5afbef9c21e06bfac32e075ab932d012cf9a7ea54fd4b277f2968d96b59b6e56" => :high_sierra
sha256 "8164fb4aaa557d17d786f0bcbc516b96a80a3b962215c1343e72f1496564a566" => :sierra
sha256 "96e86ed2419baca6c3ac3a899c0c75ea5f5afba62b8c08c2fc975fc257ec04e1" => :el_capitan
end
depends_on "python@2"
depends_on "cmake" => :build
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
system "#{bin}/bear", "true"
assert_predicate testpath/"compile_commands.json", :exist?
end
end
bear 2.3.13
Closes #31671.
Signed-off-by: Dominyk Tiller <53e438f55903875d07efdd98a8aaf887e7208dd3@gmail.com>
class Bear < Formula
desc "Generate compilation database for clang tooling"
homepage "https://github.com/rizsotto/Bear"
url "https://github.com/rizsotto/Bear/archive/2.3.13.tar.gz"
sha256 "dc14c28bfbe0beef5ec93b4614a00bd419d5a793c8a678ba3b5544bd1dd580b6"
head "https://github.com/rizsotto/Bear.git"
bottle do
cellar :any
sha256 "a569b9ebe198df8e6c2b505e3ad6739b577cc0aaf53c5f47686b9a7df3e6e5ed" => :mojave
sha256 "5afbef9c21e06bfac32e075ab932d012cf9a7ea54fd4b277f2968d96b59b6e56" => :high_sierra
sha256 "8164fb4aaa557d17d786f0bcbc516b96a80a3b962215c1343e72f1496564a566" => :sierra
sha256 "96e86ed2419baca6c3ac3a899c0c75ea5f5afba62b8c08c2fc975fc257ec04e1" => :el_capitan
end
depends_on "cmake" => :build
depends_on "python@2"
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
system "#{bin}/bear", "true"
assert_predicate testpath/"compile_commands.json", :exist?
end
end
|
class Bind < Formula
desc "Implementation of the DNS protocols"
homepage "https://www.isc.org/bind/"
# BIND releases with even minor version numbers (9.14.x, 9.16.x, etc) are
# stable. Odd-numbered minor versions are for testing, and can be unstable
# or buggy. They are not suitable for general deployment. We have to use
# "version_scheme" because someone upgraded to 9.15.0, and required a
# downgrade.
url "https://downloads.isc.org/isc/bind9/9.18.8/bind-9.18.8.tar.xz"
sha256 "0e3c3ab9378db84ba0f37073d67ba125ae4f2ff8daf366c9db287e3f1b2c35f0"
license "MPL-2.0"
version_scheme 1
head "https://gitlab.isc.org/isc-projects/bind9.git", branch: "main"
# BIND indicates stable releases with an even-numbered minor (e.g., x.2.x)
# and the regex below only matches these versions.
livecheck do
url "https://www.isc.org/download/"
regex(/href=.*?bind[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
sha256 arm64_ventura: "31c535e832a2f4dbe73df6e003354a5f8b4b0f9d28b0090f54378723d1a6f287"
sha256 arm64_monterey: "83e591050160d6f7990f4e7866b359d7b1debaf1867c7b5cabdddb3592aefaf9"
sha256 arm64_big_sur: "7defcc05b6173c1b8248c93557cee07467ec9bbfe3c1630495adac35dbb499f4"
sha256 ventura: "764ff0b0491d01fb75c372366d3a2fdc9769862f6b6c0fdd1bf98374cbef2e36"
sha256 monterey: "dc0572b8a3121e858a017f0240a21b4d1640a6adbadaf4f839ff1b5f00a73370"
sha256 big_sur: "27fe4d293bd6ad85d5ccacd081bb4fba906b974fb67b250bc2bb9c93471e53d6"
sha256 catalina: "d327649427cec425208790551383a838a0d9444debd26acb8f85694c157d54f0"
sha256 x86_64_linux: "33b9eb56079e6046654929e0c553ccda3c936d9627ffdab76ac3748695f72951"
end
depends_on "pkg-config" => :build
depends_on "json-c"
depends_on "libidn2"
depends_on "libnghttp2"
depends_on "libuv"
depends_on "openssl@3"
def install
args = [
"--prefix=#{prefix}",
"--sysconfdir=#{pkgetc}",
"--localstatedir=#{var}",
"--with-json-c",
"--with-libidn2=#{Formula["libidn2"].opt_prefix}",
"--with-openssl=#{Formula["openssl@3"].opt_prefix}",
"--without-lmdb",
]
args << "--disable-linux-caps" if OS.linux?
system "./configure", *args
system "make"
system "make", "install"
(buildpath/"named.conf").write named_conf
system "#{sbin}/rndc-confgen", "-a", "-c", "#{buildpath}/rndc.key"
pkgetc.install "named.conf", "rndc.key"
end
def post_install
(var/"log/named").mkpath
(var/"named").mkpath
end
def named_conf
<<~EOS
logging {
category default {
_default_log;
};
channel _default_log {
file "#{var}/log/named/named.log" versions 10 size 1m;
severity info;
print-time yes;
};
};
options {
directory "#{var}/named";
};
EOS
end
plist_options startup: true
service do
run [opt_sbin/"named", "-f", "-L", var/"log/named/named.log"]
end
test do
system bin/"dig", "-v"
system bin/"dig", "brew.sh"
system bin/"dig", "ü.cl"
end
end
bind 9.18.9
Closes #116140.
Signed-off-by: Caleb Xu <fd0d85c7cac738649914e8b8b51cf1f9dc80ad9f@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Bind < Formula
desc "Implementation of the DNS protocols"
homepage "https://www.isc.org/bind/"
# BIND releases with even minor version numbers (9.14.x, 9.16.x, etc) are
# stable. Odd-numbered minor versions are for testing, and can be unstable
# or buggy. They are not suitable for general deployment. We have to use
# "version_scheme" because someone upgraded to 9.15.0, and required a
# downgrade.
url "https://downloads.isc.org/isc/bind9/9.18.9/bind-9.18.9.tar.xz"
sha256 "6a9665998d568604460df0918fc8ccfad7d29388d4d842560c056cc211cbb243"
license "MPL-2.0"
version_scheme 1
head "https://gitlab.isc.org/isc-projects/bind9.git", branch: "main"
# BIND indicates stable releases with an even-numbered minor (e.g., x.2.x)
# and the regex below only matches these versions.
livecheck do
url "https://www.isc.org/download/"
regex(/href=.*?bind[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
sha256 arm64_ventura: "31c535e832a2f4dbe73df6e003354a5f8b4b0f9d28b0090f54378723d1a6f287"
sha256 arm64_monterey: "83e591050160d6f7990f4e7866b359d7b1debaf1867c7b5cabdddb3592aefaf9"
sha256 arm64_big_sur: "7defcc05b6173c1b8248c93557cee07467ec9bbfe3c1630495adac35dbb499f4"
sha256 ventura: "764ff0b0491d01fb75c372366d3a2fdc9769862f6b6c0fdd1bf98374cbef2e36"
sha256 monterey: "dc0572b8a3121e858a017f0240a21b4d1640a6adbadaf4f839ff1b5f00a73370"
sha256 big_sur: "27fe4d293bd6ad85d5ccacd081bb4fba906b974fb67b250bc2bb9c93471e53d6"
sha256 catalina: "d327649427cec425208790551383a838a0d9444debd26acb8f85694c157d54f0"
sha256 x86_64_linux: "33b9eb56079e6046654929e0c553ccda3c936d9627ffdab76ac3748695f72951"
end
depends_on "pkg-config" => :build
depends_on "json-c"
depends_on "libidn2"
depends_on "libnghttp2"
depends_on "libuv"
depends_on "openssl@3"
def install
args = [
"--prefix=#{prefix}",
"--sysconfdir=#{pkgetc}",
"--localstatedir=#{var}",
"--with-json-c",
"--with-libidn2=#{Formula["libidn2"].opt_prefix}",
"--with-openssl=#{Formula["openssl@3"].opt_prefix}",
"--without-lmdb",
]
args << "--disable-linux-caps" if OS.linux?
system "./configure", *args
system "make"
system "make", "install"
(buildpath/"named.conf").write named_conf
system "#{sbin}/rndc-confgen", "-a", "-c", "#{buildpath}/rndc.key"
pkgetc.install "named.conf", "rndc.key"
end
def post_install
(var/"log/named").mkpath
(var/"named").mkpath
end
def named_conf
<<~EOS
logging {
category default {
_default_log;
};
channel _default_log {
file "#{var}/log/named/named.log" versions 10 size 1m;
severity info;
print-time yes;
};
};
options {
directory "#{var}/named";
};
EOS
end
plist_options startup: true
service do
run [opt_sbin/"named", "-f", "-L", var/"log/named/named.log"]
end
test do
system bin/"dig", "-v"
system bin/"dig", "brew.sh"
system bin/"dig", "ü.cl"
end
end
|
class Bind < Formula
desc "Implementation of the DNS protocols"
homepage "https://www.isc.org/bind/"
# BIND releases with even minor version numbers (9.14.x, 9.16.x, etc) are
# stable. Odd-numbered minor versions are for testing, and can be unstable
# or buggy. They are not suitable for general deployment. We have to use
# "version_scheme" because someone upgraded to 9.15.0, and required a
# downgrade.
url "https://downloads.isc.org/isc/bind9/9.16.16/bind-9.16.16.tar.xz"
sha256 "6c913902adf878e7dc5e229cea94faefc9d40f44775a30213edd08860f761d7b"
license "MPL-2.0"
version_scheme 1
head "https://gitlab.isc.org/isc-projects/bind9.git"
# BIND indicates stable releases with an even-numbered minor (e.g., x.2.x)
# and the regex below only matches these versions.
livecheck do
url "https://www.isc.org/download/"
regex(/href=.*?bind[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
sha256 arm64_big_sur: "1760d6ca4c7828dba00ee1e08d8e5aa29c45e8b6989909e376a6c5addec1cb49"
sha256 big_sur: "d461b3f29beff84605e9de44c3f28bdc5c3623e532c8123c36120c8ea042cf5b"
sha256 catalina: "c92e452d281ea1e8007c398f705c403b186ea2d855250282dd0d7dc43586db35"
sha256 mojave: "574b9afb50b52e8530968ddb03958c156692138943491de472354605c4dd4142"
end
depends_on "pkg-config" => :build
depends_on "json-c"
depends_on "libidn2"
depends_on "libuv"
depends_on "openssl@1.1"
depends_on "python@3.9"
resource "ply" do
url "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz"
sha256 "00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"
end
def install
xy = Language::Python.major_minor_version Formula["python@3.9"].opt_bin/"python3"
vendor_site_packages = libexec/"vendor/lib/python#{xy}/site-packages"
ENV.prepend_create_path "PYTHONPATH", vendor_site_packages
resources.each do |r|
r.stage do
system Formula["python@3.9"].opt_bin/"python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
# Fix "configure: error: xml2-config returns badness"
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version <= :sierra
args = [
"--prefix=#{prefix}",
"--sysconfdir=#{pkgetc}",
"--with-json-c",
"--with-openssl=#{Formula["openssl@1.1"].opt_prefix}",
"--with-libjson=#{Formula["json-c"].opt_prefix}",
"--with-python-install-dir=#{vendor_site_packages}",
"--with-python=#{Formula["python@3.9"].opt_bin}/python3",
"--without-lmdb",
"--with-libidn2=#{Formula["libidn2"].opt_prefix}",
]
on_linux do
args << "--disable-linux-caps"
end
system "./configure", *args
system "make"
system "make", "install"
(buildpath/"named.conf").write named_conf
system "#{sbin}/rndc-confgen", "-a", "-c", "#{buildpath}/rndc.key"
pkgetc.install "named.conf", "rndc.key"
end
def post_install
(var/"log/named").mkpath
(var/"named").mkpath
end
def named_conf
<<~EOS
logging {
category default {
_default_log;
};
channel _default_log {
file "#{var}/log/named/named.log" versions 10 size 1m;
severity info;
print-time yes;
};
};
options {
directory "#{var}/named";
};
EOS
end
plist_options startup: true
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>EnableTransactions</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/named</string>
<string>-f</string>
<string>-L</string>
<string>#{var}/log/named/named.log</string>
</array>
<key>ServiceIPC</key>
<false/>
</dict>
</plist>
EOS
end
test do
system bin/"dig", "-v"
system bin/"dig", "brew.sh"
system bin/"dig", "ü.cl"
end
end
bind 9.16.18
Closes #79570.
Signed-off-by: Carlo Cabrera <3ffc397d0e4bded29cb84b56167de54c01e3a55b@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Bind < Formula
desc "Implementation of the DNS protocols"
homepage "https://www.isc.org/bind/"
# BIND releases with even minor version numbers (9.14.x, 9.16.x, etc) are
# stable. Odd-numbered minor versions are for testing, and can be unstable
# or buggy. They are not suitable for general deployment. We have to use
# "version_scheme" because someone upgraded to 9.15.0, and required a
# downgrade.
url "https://downloads.isc.org/isc/bind9/9.16.18/bind-9.16.18.tar.xz"
sha256 "3c6263a4364eb5dce233f9f22b90acfa1ec2488d534f91d21663d0ac25ce5e65"
license "MPL-2.0"
version_scheme 1
head "https://gitlab.isc.org/isc-projects/bind9.git"
# BIND indicates stable releases with an even-numbered minor (e.g., x.2.x)
# and the regex below only matches these versions.
livecheck do
url "https://www.isc.org/download/"
regex(/href=.*?bind[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
sha256 arm64_big_sur: "1760d6ca4c7828dba00ee1e08d8e5aa29c45e8b6989909e376a6c5addec1cb49"
sha256 big_sur: "d461b3f29beff84605e9de44c3f28bdc5c3623e532c8123c36120c8ea042cf5b"
sha256 catalina: "c92e452d281ea1e8007c398f705c403b186ea2d855250282dd0d7dc43586db35"
sha256 mojave: "574b9afb50b52e8530968ddb03958c156692138943491de472354605c4dd4142"
end
depends_on "pkg-config" => :build
depends_on "json-c"
depends_on "libidn2"
depends_on "libuv"
depends_on "openssl@1.1"
depends_on "python@3.9"
resource "ply" do
url "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz"
sha256 "00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"
end
def install
xy = Language::Python.major_minor_version Formula["python@3.9"].opt_bin/"python3"
vendor_site_packages = libexec/"vendor/lib/python#{xy}/site-packages"
ENV.prepend_create_path "PYTHONPATH", vendor_site_packages
resources.each do |r|
r.stage do
system Formula["python@3.9"].opt_bin/"python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
# Fix "configure: error: xml2-config returns badness"
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version <= :sierra
args = [
"--prefix=#{prefix}",
"--sysconfdir=#{pkgetc}",
"--with-json-c",
"--with-openssl=#{Formula["openssl@1.1"].opt_prefix}",
"--with-libjson=#{Formula["json-c"].opt_prefix}",
"--with-python-install-dir=#{vendor_site_packages}",
"--with-python=#{Formula["python@3.9"].opt_bin}/python3",
"--without-lmdb",
"--with-libidn2=#{Formula["libidn2"].opt_prefix}",
]
on_linux do
args << "--disable-linux-caps"
end
system "./configure", *args
system "make"
system "make", "install"
(buildpath/"named.conf").write named_conf
system "#{sbin}/rndc-confgen", "-a", "-c", "#{buildpath}/rndc.key"
pkgetc.install "named.conf", "rndc.key"
end
def post_install
(var/"log/named").mkpath
(var/"named").mkpath
end
def named_conf
<<~EOS
logging {
category default {
_default_log;
};
channel _default_log {
file "#{var}/log/named/named.log" versions 10 size 1m;
severity info;
print-time yes;
};
};
options {
directory "#{var}/named";
};
EOS
end
plist_options startup: true
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>EnableTransactions</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/named</string>
<string>-f</string>
<string>-L</string>
<string>#{var}/log/named/named.log</string>
</array>
<key>ServiceIPC</key>
<false/>
</dict>
</plist>
EOS
end
test do
system bin/"dig", "-v"
system bin/"dig", "brew.sh"
system bin/"dig", "ü.cl"
end
end
|
class Bind < Formula
desc "Implementation of the DNS protocols"
homepage "https://www.isc.org/downloads/bind/"
# BIND releases with even minor version numbers (9.14.x, 9.16.x, etc) are
# stable. Odd-numbered minor versions are for testing, and can be unstable
# or buggy. They are not suitable for general deployment. We have to use
# "version_scheme" because someone upgraded to 9.15.0, and required a
# downgrade.
url "https://downloads.isc.org/isc/bind9/9.16.11/bind-9.16.11.tar.xz"
sha256 "0111f64dd7d8f515cfa129e181cce96ff82070d1b27f11a21f6856110d0699c1"
license "MPL-2.0"
version_scheme 1
head "https://gitlab.isc.org/isc-projects/bind9.git"
# BIND indicates stable releases with an even-numbered minor (e.g., x.2.x)
# and the regex below only matches these versions.
livecheck do
url "https://www.isc.org/downloads/"
regex(/href=.*?bind[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
sha256 arm64_big_sur: "6c4b3a416ce53c1b458d39072c90ef8d2aa779faefbc1301dba8899d442b2037"
sha256 big_sur: "3d8446c1e6ed36ce2865befd33976f82a4afbef7ab78618a22e05acf8a75ec93"
sha256 catalina: "2761e4f6c37938561718b170b556b0c3fb275cbdc7a9609efe192ff8eb81809f"
sha256 mojave: "8dc492d9884999ea04a43c784d160b7afc71a3bd5071fa75370c8cab59265f5a"
end
depends_on "pkg-config" => :build
depends_on "json-c"
depends_on "libidn2"
depends_on "libuv"
depends_on "openssl@1.1"
depends_on "python@3.9"
resource "ply" do
url "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz"
sha256 "00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"
end
def install
xy = Language::Python.major_minor_version Formula["python@3.9"].opt_bin/"python3"
vendor_site_packages = libexec/"vendor/lib/python#{xy}/site-packages"
ENV.prepend_create_path "PYTHONPATH", vendor_site_packages
resources.each do |r|
r.stage do
system Formula["python@3.9"].opt_bin/"python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
# Fix "configure: error: xml2-config returns badness"
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version <= :sierra
args = [
"--prefix=#{prefix}",
"--with-json-c",
"--with-openssl=#{Formula["openssl@1.1"].opt_prefix}",
"--with-libjson=#{Formula["json-c"].opt_prefix}",
"--with-python-install-dir=#{vendor_site_packages}",
"--with-python=#{Formula["python@3.9"].opt_bin}/python3",
"--without-lmdb",
"--with-libidn2=#{Formula["libidn2"].opt_prefix}",
]
on_linux do
args << "--disable-linux-caps"
end
system "./configure", *args
system "make"
system "make", "install"
(buildpath/"named.conf").write named_conf
system "#{sbin}/rndc-confgen", "-a", "-c", "#{buildpath}/rndc.key"
etc.install "named.conf", "rndc.key"
end
def post_install
(var/"log/named").mkpath
# Create initial configuration/zone/ca files.
# (Mirrors Apple system install from 10.8)
unless (var/"named").exist?
(var/"named").mkpath
(var/"named/localhost.zone").write localhost_zone
(var/"named/named.local").write named_local
end
end
def named_conf
<<~EOS
//
// Include keys file
//
include "#{etc}/rndc.key";
// Declares control channels to be used by the rndc utility.
//
// It is recommended that 127.0.0.1 be the only address used.
// This also allows non-privileged users on the local host to manage
// your name server.
//
// Default controls
//
controls {
inet 127.0.0.1 port 54 allow { any; }
keys { "rndc-key"; };
};
options {
directory "#{var}/named";
/*
* If there is a firewall between you and nameservers you want
* to talk to, you might need to uncomment the query-source
* directive below. Previous versions of BIND always asked
* questions using port 53, but BIND 8.1 uses an unprivileged
* port by default.
*/
// query-source address * port 53;
};
//
// a caching only nameserver config
//
zone "localhost" IN {
type master;
file "localhost.zone";
allow-update { none; };
};
zone "0.0.127.in-addr.arpa" IN {
type master;
file "named.local";
allow-update { none; };
};
logging {
category default {
_default_log;
};
channel _default_log {
file "#{var}/log/named/named.log";
severity info;
print-time yes;
};
};
EOS
end
def localhost_zone
<<~EOS
$TTL 86400
$ORIGIN localhost.
@ 1D IN SOA @ root (
42 ; serial (d. adams)
3H ; refresh
15M ; retry
1W ; expiry
1D ) ; minimum
1D IN NS @
1D IN A 127.0.0.1
EOS
end
def named_local
<<~EOS
$TTL 86400
@ IN SOA localhost. root.localhost. (
1997022700 ; Serial
28800 ; Refresh
14400 ; Retry
3600000 ; Expire
86400 ) ; Minimum
IN NS localhost.
1 IN PTR localhost.
EOS
end
plist_options startup: true
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>EnableTransactions</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/named</string>
<string>-f</string>
<string>-c</string>
<string>#{etc}/named.conf</string>
</array>
<key>ServiceIPC</key>
<false/>
</dict>
</plist>
EOS
end
test do
system bin/"dig", "-v"
system bin/"dig", "brew.sh"
system bin/"dig", "ü.cl"
end
end
bind 9.16.12
Closes #71406.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Bind < Formula
desc "Implementation of the DNS protocols"
homepage "https://www.isc.org/downloads/bind/"
# BIND releases with even minor version numbers (9.14.x, 9.16.x, etc) are
# stable. Odd-numbered minor versions are for testing, and can be unstable
# or buggy. They are not suitable for general deployment. We have to use
# "version_scheme" because someone upgraded to 9.15.0, and required a
# downgrade.
url "https://downloads.isc.org/isc/bind9/9.16.12/bind-9.16.12.tar.xz"
sha256 "9914af9311fd349cab441097898d94fb28d0bfd9bf6ed04fe1f97f042644da7f"
license "MPL-2.0"
version_scheme 1
head "https://gitlab.isc.org/isc-projects/bind9.git"
# BIND indicates stable releases with an even-numbered minor (e.g., x.2.x)
# and the regex below only matches these versions.
livecheck do
url "https://www.isc.org/downloads/"
regex(/href=.*?bind[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
sha256 arm64_big_sur: "6c4b3a416ce53c1b458d39072c90ef8d2aa779faefbc1301dba8899d442b2037"
sha256 big_sur: "3d8446c1e6ed36ce2865befd33976f82a4afbef7ab78618a22e05acf8a75ec93"
sha256 catalina: "2761e4f6c37938561718b170b556b0c3fb275cbdc7a9609efe192ff8eb81809f"
sha256 mojave: "8dc492d9884999ea04a43c784d160b7afc71a3bd5071fa75370c8cab59265f5a"
end
depends_on "pkg-config" => :build
depends_on "json-c"
depends_on "libidn2"
depends_on "libuv"
depends_on "openssl@1.1"
depends_on "python@3.9"
resource "ply" do
url "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz"
sha256 "00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"
end
def install
xy = Language::Python.major_minor_version Formula["python@3.9"].opt_bin/"python3"
vendor_site_packages = libexec/"vendor/lib/python#{xy}/site-packages"
ENV.prepend_create_path "PYTHONPATH", vendor_site_packages
resources.each do |r|
r.stage do
system Formula["python@3.9"].opt_bin/"python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
# Fix "configure: error: xml2-config returns badness"
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version <= :sierra
args = [
"--prefix=#{prefix}",
"--with-json-c",
"--with-openssl=#{Formula["openssl@1.1"].opt_prefix}",
"--with-libjson=#{Formula["json-c"].opt_prefix}",
"--with-python-install-dir=#{vendor_site_packages}",
"--with-python=#{Formula["python@3.9"].opt_bin}/python3",
"--without-lmdb",
"--with-libidn2=#{Formula["libidn2"].opt_prefix}",
]
on_linux do
args << "--disable-linux-caps"
end
system "./configure", *args
system "make"
system "make", "install"
(buildpath/"named.conf").write named_conf
system "#{sbin}/rndc-confgen", "-a", "-c", "#{buildpath}/rndc.key"
etc.install "named.conf", "rndc.key"
end
def post_install
(var/"log/named").mkpath
# Create initial configuration/zone/ca files.
# (Mirrors Apple system install from 10.8)
unless (var/"named").exist?
(var/"named").mkpath
(var/"named/localhost.zone").write localhost_zone
(var/"named/named.local").write named_local
end
end
def named_conf
<<~EOS
//
// Include keys file
//
include "#{etc}/rndc.key";
// Declares control channels to be used by the rndc utility.
//
// It is recommended that 127.0.0.1 be the only address used.
// This also allows non-privileged users on the local host to manage
// your name server.
//
// Default controls
//
controls {
inet 127.0.0.1 port 54 allow { any; }
keys { "rndc-key"; };
};
options {
directory "#{var}/named";
/*
* If there is a firewall between you and nameservers you want
* to talk to, you might need to uncomment the query-source
* directive below. Previous versions of BIND always asked
* questions using port 53, but BIND 8.1 uses an unprivileged
* port by default.
*/
// query-source address * port 53;
};
//
// a caching only nameserver config
//
zone "localhost" IN {
type master;
file "localhost.zone";
allow-update { none; };
};
zone "0.0.127.in-addr.arpa" IN {
type master;
file "named.local";
allow-update { none; };
};
logging {
category default {
_default_log;
};
channel _default_log {
file "#{var}/log/named/named.log";
severity info;
print-time yes;
};
};
EOS
end
def localhost_zone
<<~EOS
$TTL 86400
$ORIGIN localhost.
@ 1D IN SOA @ root (
42 ; serial (d. adams)
3H ; refresh
15M ; retry
1W ; expiry
1D ) ; minimum
1D IN NS @
1D IN A 127.0.0.1
EOS
end
def named_local
<<~EOS
$TTL 86400
@ IN SOA localhost. root.localhost. (
1997022700 ; Serial
28800 ; Refresh
14400 ; Retry
3600000 ; Expire
86400 ) ; Minimum
IN NS localhost.
1 IN PTR localhost.
EOS
end
plist_options startup: true
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>EnableTransactions</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/named</string>
<string>-f</string>
<string>-c</string>
<string>#{etc}/named.conf</string>
</array>
<key>ServiceIPC</key>
<false/>
</dict>
</plist>
EOS
end
test do
system bin/"dig", "-v"
system bin/"dig", "brew.sh"
system bin/"dig", "ü.cl"
end
end
|
class Bnfc < Formula
desc "BNF Converter"
homepage "https://bnfc.digitalgrammars.com/"
url "https://github.com/BNFC/bnfc/archive/v2.9.1.tar.gz"
sha256 "d79125168636fdcf0acd64a9b83ea620c311b16dcada0848d8a577aa8aeddec4"
license "BSD-3-Clause"
head "https://github.com/BNFC/bnfc.git"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "173f30b8a4c701eefc9e6b5c702f55237e0104a2b33120004aa1661c276483a2"
sha256 cellar: :any_skip_relocation, catalina: "6733cd26b5cdb5c8bc5ed05e1ef648eab9e56d8e484e680166894ca195dd9390"
sha256 cellar: :any_skip_relocation, mojave: "df909d3e8bfa179f607fd208beaff741e9ff02e3dc5f75e3890c5707685e4424"
end
depends_on "cabal-install" => [:build, :test]
depends_on "ghc" => [:build, :test]
depends_on "sphinx-doc" => :build
depends_on "antlr" => :test
depends_on "openjdk" => :test
uses_from_macos "bison" => :test
uses_from_macos "flex" => :test
def install
cd "source" do
system "cabal", "v2-update"
system "cabal", "v2-install", *std_cabal_v2_args
doc.install "CHANGELOG.md"
doc.install "src/BNFC.cf" => "BNFC.cf"
end
cd "docs" do
system "make", "text", "man", "SPHINXBUILD=#{Formula["sphinx-doc"].bin/"sphinx-build"}"
cd "_build" do
doc.install "text" => "manual"
man1.install "man/1/bnfc.1" => "bnfc.1"
end
end
doc.install %w[README.md examples]
end
test do
ENV.prepend_create_path "PATH", testpath/"tools-bin"
(testpath/"calc.cf").write <<~EOS
EAdd. Exp ::= Exp "+" Exp1 ;
ESub. Exp ::= Exp "-" Exp1 ;
EMul. Exp1 ::= Exp1 "*" Exp2 ;
EDiv. Exp1 ::= Exp1 "/" Exp2 ;
EInt. Exp2 ::= Integer ;
coercions Exp 2 ;
entrypoints Exp ;
comment "(#" "#)" ;
EOS
(testpath/"test.calc").write "14 * (# Parsing is fun! #) (3 + 2 / 5 - 8)"
treespace = if build.head?
" "
else
""
end
check_out = <<~EOS
Parse Successful!
[Abstract Syntax]
(EMul (EInt 14) (ESub (EAdd (EInt 3) (EDiv (EInt 2) (EInt 5))) (EInt 8)))
[Linearized Tree]
14 * (3 + 2 / 5 - 8) #{treespace}
EOS
mktemp "c-test" do
system bin/"bnfc", "-m", "-o.", "--c", testpath/"calc.cf"
system "make", "CC=#{ENV.cc}", "CCFLAGS=#{ENV.cflags}"
test_out = shell_output("./Testcalc #{testpath}/test.calc")
assert_equal check_out, test_out
end
mktemp "cxx-test" do
system bin/"bnfc", "-m", "-o.", "--cpp", testpath/"calc.cf"
system "make", "CC=#{ENV.cxx}", "CCFLAGS=#{ENV.cxxflags}"
test_out = shell_output("./Testcalc #{testpath}/test.calc")
assert_equal check_out, test_out
end
mktemp "haskell-test" do
system "cabal", "v2-update"
system "cabal", "v2-install",
"--jobs=#{ENV.make_jobs}", "--max-backjumps=100000",
"--install-method=copy", "--installdir=#{testpath/"tools-bin"}",
"alex", "happy"
system bin/"bnfc", "-m", "-o.", "--haskell", "--ghc", "-d", testpath/"calc.cf"
system "make"
test_out = shell_output("./Calc/Test #{testpath/"test.calc"}")
check_out_hs = <<~EOS
#{testpath/"test.calc"}
Parse Successful!
[Abstract Syntax]
EMul (EInt 14) (ESub (EAdd (EInt 3) (EDiv (EInt 2) (EInt 5))) (EInt 8))
[Linearized tree]
14 * (3 + 2 / 5 - 8)
EOS
assert_equal check_out_hs, test_out
end
mktemp "java-test" do
ENV.deparallelize # only the Java test needs this
jdk_dir = Formula["openjdk"].bin
antlr_bin = Formula["antlr"].bin/"antlr"
antlr_jar = Dir[Formula["antlr"].prefix/"antlr-*-complete.jar"][0]
ENV["CLASSPATH"] = ".:#{antlr_jar}"
system bin/"bnfc", "-m", "-o.", "--java", "--antlr4", testpath/"calc.cf"
system "make", "JAVAC=#{jdk_dir/"javac"}", "JAVA=#{jdk_dir/"java"}", "LEXER=#{antlr_bin}", "PARSER=#{antlr_bin}"
test_out = shell_output("#{jdk_dir}/java calc.Test #{testpath}/test.calc")
space = " "
check_out_j = <<~EOS
Parse Succesful!
[Abstract Syntax]
(EMul (EInt 14) (ESub (EAdd (EInt 3) (EDiv (EInt 2) (EInt 5))) (EInt 8)))#{space}
[Linearized Tree]
14 * (3 + 2 / 5 - 8)
EOS
assert_equal check_out_j, test_out
end
end
end
bnfc: update 2.9.1 bottle.
class Bnfc < Formula
desc "BNF Converter"
homepage "https://bnfc.digitalgrammars.com/"
url "https://github.com/BNFC/bnfc/archive/v2.9.1.tar.gz"
sha256 "d79125168636fdcf0acd64a9b83ea620c311b16dcada0848d8a577aa8aeddec4"
license "BSD-3-Clause"
head "https://github.com/BNFC/bnfc.git"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, big_sur: "196fbc36c44e6627bf8c39d852f5f75defbff9c344cfa65f04a89e04c72b6b4a"
sha256 cellar: :any_skip_relocation, catalina: "62c0fb84159e25aab115a74ef6b0b24d728528ce7a7ad7ebc92601d25f3ae96f"
sha256 cellar: :any_skip_relocation, mojave: "b5cc8d548edc6e6a48a3f150f6c20937f9466d6bfec4b64e04a1f56a2c418979"
end
depends_on "cabal-install" => [:build, :test]
depends_on "ghc" => [:build, :test]
depends_on "sphinx-doc" => :build
depends_on "antlr" => :test
depends_on "openjdk" => :test
uses_from_macos "bison" => :test
uses_from_macos "flex" => :test
def install
cd "source" do
system "cabal", "v2-update"
system "cabal", "v2-install", *std_cabal_v2_args
doc.install "CHANGELOG.md"
doc.install "src/BNFC.cf" => "BNFC.cf"
end
cd "docs" do
system "make", "text", "man", "SPHINXBUILD=#{Formula["sphinx-doc"].bin/"sphinx-build"}"
cd "_build" do
doc.install "text" => "manual"
man1.install "man/1/bnfc.1" => "bnfc.1"
end
end
doc.install %w[README.md examples]
end
test do
ENV.prepend_create_path "PATH", testpath/"tools-bin"
(testpath/"calc.cf").write <<~EOS
EAdd. Exp ::= Exp "+" Exp1 ;
ESub. Exp ::= Exp "-" Exp1 ;
EMul. Exp1 ::= Exp1 "*" Exp2 ;
EDiv. Exp1 ::= Exp1 "/" Exp2 ;
EInt. Exp2 ::= Integer ;
coercions Exp 2 ;
entrypoints Exp ;
comment "(#" "#)" ;
EOS
(testpath/"test.calc").write "14 * (# Parsing is fun! #) (3 + 2 / 5 - 8)"
treespace = if build.head?
" "
else
""
end
check_out = <<~EOS
Parse Successful!
[Abstract Syntax]
(EMul (EInt 14) (ESub (EAdd (EInt 3) (EDiv (EInt 2) (EInt 5))) (EInt 8)))
[Linearized Tree]
14 * (3 + 2 / 5 - 8) #{treespace}
EOS
mktemp "c-test" do
system bin/"bnfc", "-m", "-o.", "--c", testpath/"calc.cf"
system "make", "CC=#{ENV.cc}", "CCFLAGS=#{ENV.cflags}"
test_out = shell_output("./Testcalc #{testpath}/test.calc")
assert_equal check_out, test_out
end
mktemp "cxx-test" do
system bin/"bnfc", "-m", "-o.", "--cpp", testpath/"calc.cf"
system "make", "CC=#{ENV.cxx}", "CCFLAGS=#{ENV.cxxflags}"
test_out = shell_output("./Testcalc #{testpath}/test.calc")
assert_equal check_out, test_out
end
mktemp "haskell-test" do
system "cabal", "v2-update"
system "cabal", "v2-install",
"--jobs=#{ENV.make_jobs}", "--max-backjumps=100000",
"--install-method=copy", "--installdir=#{testpath/"tools-bin"}",
"alex", "happy"
system bin/"bnfc", "-m", "-o.", "--haskell", "--ghc", "-d", testpath/"calc.cf"
system "make"
test_out = shell_output("./Calc/Test #{testpath/"test.calc"}")
check_out_hs = <<~EOS
#{testpath/"test.calc"}
Parse Successful!
[Abstract Syntax]
EMul (EInt 14) (ESub (EAdd (EInt 3) (EDiv (EInt 2) (EInt 5))) (EInt 8))
[Linearized tree]
14 * (3 + 2 / 5 - 8)
EOS
assert_equal check_out_hs, test_out
end
mktemp "java-test" do
ENV.deparallelize # only the Java test needs this
jdk_dir = Formula["openjdk"].bin
antlr_bin = Formula["antlr"].bin/"antlr"
antlr_jar = Dir[Formula["antlr"].prefix/"antlr-*-complete.jar"][0]
ENV["CLASSPATH"] = ".:#{antlr_jar}"
system bin/"bnfc", "-m", "-o.", "--java", "--antlr4", testpath/"calc.cf"
system "make", "JAVAC=#{jdk_dir/"javac"}", "JAVA=#{jdk_dir/"java"}", "LEXER=#{antlr_bin}", "PARSER=#{antlr_bin}"
test_out = shell_output("#{jdk_dir}/java calc.Test #{testpath}/test.calc")
space = " "
check_out_j = <<~EOS
Parse Succesful!
[Abstract Syntax]
(EMul (EInt 14) (ESub (EAdd (EInt 3) (EDiv (EInt 2) (EInt 5))) (EInt 8)))#{space}
[Linearized Tree]
14 * (3 + 2 / 5 - 8)
EOS
assert_equal check_out_j, test_out
end
end
end
|
module Dragonfly
module Imaginary
VERSION = "0.0.1"
end
end
Bump version to 0.0.2
module Dragonfly
module Imaginary
VERSION = "0.0.2"
end
end
|
class Buku < Formula
include Language::Python::Virtualenv
desc "Command-line bookmark manager"
homepage "https://github.com/jarun/Buku"
url "https://github.com/jarun/Buku/archive/v2.5.tar.gz"
sha256 "27dd770837110db8348446436aca3c7ed16b2884b4064aad0deb58d4ad4a69d4"
revision 1
bottle do
cellar :any_skip_relocation
sha256 "7dff85c7485c4d5024f87b3886c64568b893c21e597cd493bd09b3d04bf5f8fa" => :sierra
sha256 "7dff85c7485c4d5024f87b3886c64568b893c21e597cd493bd09b3d04bf5f8fa" => :el_capitan
sha256 "7dff85c7485c4d5024f87b3886c64568b893c21e597cd493bd09b3d04bf5f8fa" => :yosemite
end
depends_on :python3
depends_on "openssl"
# beautifulsoup4
resource "beautifulsoup4" do
url "https://files.pythonhosted.org/packages/86/ea/8e9fbce5c8405b9614f1fd304f7109d9169a3516a493ce4f7f77c39435b7/beautifulsoup4-4.5.1.tar.gz"
sha256 "3c9474036afda9136aac6463def733f81017bf9ef3510d25634f335b0c87f5e1"
end
# cryptography
resource "cffi" do
url "https://files.pythonhosted.org/packages/0a/f3/686af8873b70028fccf67b15c78fd4e4667a3da995007afc71e786d61b0a/cffi-1.8.3.tar.gz"
sha256 "c321bd46faa7847261b89c0469569530cad5a41976bb6dba8202c0159f476568"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/03/1a/60984cb85cc38c4ebdfca27b32a6df6f1914959d8790f5a349608c78be61/cryptography-1.5.2.tar.gz"
sha256 "eb8875736734e8e870b09be43b17f40472dc189b1c422a952fa8580768204832"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/fb/84/8c27516fbaa8147acd2e431086b473c453c428e24e8fb99a1d89ce381851/idna-2.1.tar.gz"
sha256 "ed36f281aebf3cd0797f163bb165d84c31507cedd15928b095b1675e2d04c676"
end
resource "pyasn1" do
url "https://files.pythonhosted.org/packages/f7/83/377e3dd2e95f9020dbd0dfd3c47aaa7deebe3c68d3857a4e51917146ae8b/pyasn1-0.1.9.tar.gz"
sha256 "853cacd96d1f701ddd67aa03ecc05f51890135b7262e922710112f12a2ed2a7f"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/eb/83/00c55ff5cb773a78e9e47476ac1a0cd2f0fb71b34cb6e178572eaec22984/pycparser-2.16.tar.gz"
sha256 "108f9ff23869ae2f8b38e481e7b4b4d4de1e32be968f29bbe303d629c34a6260"
end
resource "six" do
url "https://files.pythonhosted.org/packages/b3/b2/238e2590826bfdd113244a40d9d3eb26918bd798fc187e2360a8367068db/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
def install
venv = virtualenv_create(libexec, "python3")
venv.pip_install resources
# Replace shebang with virtualenv python
inreplace "buku", "#!/usr/bin/env python3", "#!#{libexec}/bin/python"
bin.install "buku"
man1.install "buku.1"
bash_completion.install "auto-completion/bash/buku-completion.bash"
fish_completion.install "auto-completion/fish/buku.fish"
zsh_completion.install "auto-completion/zsh/_buku"
end
test do
ENV["XDG_DATA_HOME"] = "#{testpath}/.local/share"
# Firefox exported bookmarks file
(testpath/"bookmarks.html").write <<-EOS.undent
<!DOCTYPE NETSCAPE-Bookmark-file-1>
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">
<TITLE>Bookmarks</TITLE>
<H1>Bookmarks Menu</H1>
<DL><p>
<HR> <DT><H3 ADD_DATE="1464091987" LAST_MODIFIED="1477369518" PERSONAL_TOOLBAR_FOLDER="true">Bookmarks Toolbar</H3>
<DD>Add bookmarks to this folder to see them displayed on the Bookmarks Toolbar
<DL><p>
<DT><A HREF="https://github.com/Homebrew/brew" ADD_DATE="1477369518" LAST_MODIFIED="1477369529">Homebrew</A>
</DL><p>
</DL>
EOS
assert_match "https://github.com/Homebrew/brew", shell_output("#{bin}/buku --import bookmarks.html")
# Test crypto functionality
(testpath/"crypto-test").write <<-EOS.undent
# Lock bookmark database
spawn buku -l
expect "Password: "
send "password\r"
expect "Password: "
send "password\r"
expect {
-re ".*ERROR.*" { exit 1 }
"File encrypted"
}
# Unlock bookmark database
spawn buku -k
expect "Password: "
send "password\r"
expect {
-re ".*ERROR.*" { exit 1 }
"File decrypted"
}
EOS
system "/usr/bin/expect", "-f", "crypto-test"
assert_match "https://github.com/Homebrew/brew", shell_output("#{bin}/buku --noprompt -s github")
end
end
buku: update 2.5_1 bottle.
class Buku < Formula
include Language::Python::Virtualenv
desc "Command-line bookmark manager"
homepage "https://github.com/jarun/Buku"
url "https://github.com/jarun/Buku/archive/v2.5.tar.gz"
sha256 "27dd770837110db8348446436aca3c7ed16b2884b4064aad0deb58d4ad4a69d4"
revision 1
bottle do
sha256 "824e2a0f75cce19b6a78b613dc5063ecf587fa79296b91ae00b357296d368896" => :sierra
sha256 "ccc88b0b37cda748f65bdbf0b8706082bab820c0d63748e6de10f1667131222a" => :el_capitan
sha256 "ed6b89816df8066f51b20322b9e5dfc80c1524c47224ed2eccc03e1309f17a62" => :yosemite
end
depends_on :python3
depends_on "openssl"
# beautifulsoup4
resource "beautifulsoup4" do
url "https://files.pythonhosted.org/packages/86/ea/8e9fbce5c8405b9614f1fd304f7109d9169a3516a493ce4f7f77c39435b7/beautifulsoup4-4.5.1.tar.gz"
sha256 "3c9474036afda9136aac6463def733f81017bf9ef3510d25634f335b0c87f5e1"
end
# cryptography
resource "cffi" do
url "https://files.pythonhosted.org/packages/0a/f3/686af8873b70028fccf67b15c78fd4e4667a3da995007afc71e786d61b0a/cffi-1.8.3.tar.gz"
sha256 "c321bd46faa7847261b89c0469569530cad5a41976bb6dba8202c0159f476568"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/03/1a/60984cb85cc38c4ebdfca27b32a6df6f1914959d8790f5a349608c78be61/cryptography-1.5.2.tar.gz"
sha256 "eb8875736734e8e870b09be43b17f40472dc189b1c422a952fa8580768204832"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/fb/84/8c27516fbaa8147acd2e431086b473c453c428e24e8fb99a1d89ce381851/idna-2.1.tar.gz"
sha256 "ed36f281aebf3cd0797f163bb165d84c31507cedd15928b095b1675e2d04c676"
end
resource "pyasn1" do
url "https://files.pythonhosted.org/packages/f7/83/377e3dd2e95f9020dbd0dfd3c47aaa7deebe3c68d3857a4e51917146ae8b/pyasn1-0.1.9.tar.gz"
sha256 "853cacd96d1f701ddd67aa03ecc05f51890135b7262e922710112f12a2ed2a7f"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/eb/83/00c55ff5cb773a78e9e47476ac1a0cd2f0fb71b34cb6e178572eaec22984/pycparser-2.16.tar.gz"
sha256 "108f9ff23869ae2f8b38e481e7b4b4d4de1e32be968f29bbe303d629c34a6260"
end
resource "six" do
url "https://files.pythonhosted.org/packages/b3/b2/238e2590826bfdd113244a40d9d3eb26918bd798fc187e2360a8367068db/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
def install
venv = virtualenv_create(libexec, "python3")
venv.pip_install resources
# Replace shebang with virtualenv python
inreplace "buku", "#!/usr/bin/env python3", "#!#{libexec}/bin/python"
bin.install "buku"
man1.install "buku.1"
bash_completion.install "auto-completion/bash/buku-completion.bash"
fish_completion.install "auto-completion/fish/buku.fish"
zsh_completion.install "auto-completion/zsh/_buku"
end
test do
ENV["XDG_DATA_HOME"] = "#{testpath}/.local/share"
# Firefox exported bookmarks file
(testpath/"bookmarks.html").write <<-EOS.undent
<!DOCTYPE NETSCAPE-Bookmark-file-1>
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">
<TITLE>Bookmarks</TITLE>
<H1>Bookmarks Menu</H1>
<DL><p>
<HR> <DT><H3 ADD_DATE="1464091987" LAST_MODIFIED="1477369518" PERSONAL_TOOLBAR_FOLDER="true">Bookmarks Toolbar</H3>
<DD>Add bookmarks to this folder to see them displayed on the Bookmarks Toolbar
<DL><p>
<DT><A HREF="https://github.com/Homebrew/brew" ADD_DATE="1477369518" LAST_MODIFIED="1477369529">Homebrew</A>
</DL><p>
</DL>
EOS
assert_match "https://github.com/Homebrew/brew", shell_output("#{bin}/buku --import bookmarks.html")
# Test crypto functionality
(testpath/"crypto-test").write <<-EOS.undent
# Lock bookmark database
spawn buku -l
expect "Password: "
send "password\r"
expect "Password: "
send "password\r"
expect {
-re ".*ERROR.*" { exit 1 }
"File encrypted"
}
# Unlock bookmark database
spawn buku -k
expect "Password: "
send "password\r"
expect {
-re ".*ERROR.*" { exit 1 }
"File decrypted"
}
EOS
system "/usr/bin/expect", "-f", "crypto-test"
assert_match "https://github.com/Homebrew/brew", shell_output("#{bin}/buku --noprompt -s github")
end
end
|
module DryHamlHandlebars
VERSION = "0.0.10"
end
bumpity bump
module DryHamlHandlebars
VERSION = "0.0.11"
end
|
class Cask < Formula
desc "Emacs dependency management"
homepage "https://cask.readthedocs.org/"
url "https://github.com/cask/cask/archive/v0.7.2.tar.gz"
sha256 "5c8804933dd395ec79e957c96179bf6ac20af24066928685a713e54f44107a2c"
head "https://github.com/cask/cask.git"
bottle do
cellar :any
sha256 "68b9e9f496dabaf85bdbef1414679bb5cbd5531383db02ab625d7bab454b6a78" => :yosemite
sha256 "b8bb1e95119383cb7fa3e22eea1d73cafd77cadcc8fff32b22414115b24faabc" => :mavericks
sha256 "2a9c3376bc81daa443d7b9a10043e871f7439eb8d11ae2523b18ca0cf11e3832" => :mountain_lion
end
depends_on :emacs => "23"
def install
zsh_completion.install "etc/cask_completion.zsh"
bin.install "bin/cask"
prefix.install Dir["*.el"]
prefix.install "templates"
(share/"emacs/site-lisp").install_symlink "#{prefix}/cask-bootstrap.el"
(share/"emacs/site-lisp").install_symlink "#{prefix}/cask.el"
# Stop cask performing self-upgrades.
touch prefix/".no-upgrade"
end
test do
(testpath/"test.el").write <<-EOS.undent
(add-to-list 'load-path "#{share}/emacs/site-lisp")
(require 'cask)
(print (minibuffer-prompt-width))
EOS
assert_equal "0", shell_output("emacs -batch -l #{testpath}/test.el").strip
end
end
cask: formula-specific directory in site-lisp
class Cask < Formula
desc "Emacs dependency management"
homepage "https://cask.readthedocs.org/"
url "https://github.com/cask/cask/archive/v0.7.2.tar.gz"
sha256 "5c8804933dd395ec79e957c96179bf6ac20af24066928685a713e54f44107a2c"
head "https://github.com/cask/cask.git"
bottle do
cellar :any
sha256 "68b9e9f496dabaf85bdbef1414679bb5cbd5531383db02ab625d7bab454b6a78" => :yosemite
sha256 "b8bb1e95119383cb7fa3e22eea1d73cafd77cadcc8fff32b22414115b24faabc" => :mavericks
sha256 "2a9c3376bc81daa443d7b9a10043e871f7439eb8d11ae2523b18ca0cf11e3832" => :mountain_lion
end
depends_on :emacs => "24"
def install
bin.install "bin/cask"
prefix.install "templates"
# Lisp files must stay here: https://github.com/cask/cask/issues/305
prefix.install Dir["*.el"]
(share/"emacs/site-lisp/cask").install_symlink "#{prefix}/cask.el"
(share/"emacs/site-lisp/cask").install_symlink "#{prefix}/cask-bootstrap.el"
zsh_completion.install "etc/cask_completion.zsh"
# Stop cask performing self-upgrades.
touch prefix/".no-upgrade"
end
test do
(testpath/"test.el").write <<-EOS.undent
(add-to-list 'load-path "#{share}/emacs/site-lisp/cask")
(require 'cask)
(print (minibuffer-prompt-width))
EOS
assert_equal "0", shell_output("emacs -Q --batch -l #{testpath}/test.el").strip
end
end
|
class Node < Formula
desc "Platform built on V8 to build network applications"
homepage "https://nodejs.org/"
url "https://nodejs.org/dist/v10.7.0/node-v10.7.0.tar.xz"
sha256 "34ee6946ca67151f35c23115818f0b78233c21b7dff210648d4d6dbb5a1be962"
head "https://github.com/nodejs/node.git"
bottle do
root_url "https://homebrew.bintray.com/bottles"
sha256 "91abd24ab9aeceb24d6f5a7b298b35f86f2087da8a8edfe8e3e224217dbbe6f5" => :high_sierra
sha256 "93d61ad8bb29e6be9150ad040c1b5086aedcb4cd4ad357a32f3b3a378bcdbc3c" => :sierra
sha256 "c98990aca7283a19f4b748a60560efd04f7bd258f551a827ac176f728f3d7cde" => :el_capitan
end
option "with-debug", "Build with debugger hooks"
option "with-openssl", "Build against Homebrew's OpenSSL instead of the bundled OpenSSL"
deprecated_option "enable-debug" => "with-debug"
depends_on :python => :build if MacOS.version <= :snow_leopard
depends_on "pkg-config" => :build
depends_on "icu4c" => :recommended
depends_on "openssl" => :optional
conflicts_with "node@4", :because => "Differing versions of the same formulae."
# Per upstream - "Need g++ 4.8 or clang++ 3.4".
fails_with :clang if MacOS.version <= :snow_leopard
fails_with :gcc_4_0
fails_with :gcc
("4.3".."4.7").each do |n|
fails_with :gcc => n
end
resource "icu4c" do
url "https://ssl.icu-project.org/files/icu4c/58.1/icu4c-58_1-src.tgz"
mirror "https://nuxi.nl/distfiles/third_party/icu4c-58_1-src.tgz"
version "58.1"
sha256 "0eb46ba3746a9c2092c8ad347a29b1a1b4941144772d13a88667a7b11ea30309"
end
def install
# Never install the bundled "npm", always prefer our
# installation from tarball for better packaging control.
args = %W[--prefix=#{prefix} --without-npm]
args << "--debug" if build.with? "debug"
args << "--shared-openssl" if build.with? "openssl"
args << "--tag=head" if build.head?
if build.with? "full-icu"
resource("icu4c").stage buildpath/"deps/icu"
args << "--with-intl=full-icu"
end
system "./configure", *args
system "make", "install"
end
def post_install
rm_rf "#{prefix}/etc"
rm_rf "#{etc}/bash_completion.d/npm"
rm_rf "#{prefix}/libexec"
end
def caveats
s = ""
s += <<-EOS.undent
Homebrew did NOT install npm. Please install and configure npm manually.
EOS
if build.without? "full-icu"
s += <<-EOS.undent
Please note by default only English locale support is provided. If you need
full locale support you should either rebuild with full icu:
`brew reinstall node --with-full-icu`
or add full icu data at runtime following:
https://github.com/nodejs/node/wiki/Intl#using-and-customizing-the-small-icu-build
EOS
end
s
end
test do
path = testpath/"test.js"
path.write "console.log('hello');"
output = shell_output("#{bin}/node #{path}").strip
assert_equal "hello", output
output = shell_output("#{bin}/node -e 'console.log(new Intl.NumberFormat(\"en-EN\").format(1234.56))'").strip
assert_equal "1,234.56", output
if build.with? "full-icu"
output = shell_output("#{bin}/node -e 'console.log(new Intl.NumberFormat(\"de-DE\").format(1234.56))'").strip
assert_equal "1.234,56", output
end
end
end
update node.rb formula to v10.8.
class Node < Formula
desc "Platform built on V8 to build network applications"
homepage "https://nodejs.org/"
url "https://nodejs.org/dist/v10.8.0/node-v10.8.0.tar.xz"
sha256 "97bb21718228fd801c8355c842e764eefda888d3a87de8eb04315c74f546b9bc"
head "https://github.com/nodejs/node.git"
bottle do
root_url "https://homebrew.bintray.com/bottles"
sha256 "5f9325b4556d4874fb8b917f1e5a9b7f6cdc224b8d683387065577dc41a6020e" => :high_sierra
sha256 "17d515210e284aaed53d97765a1932026dbe967304f3101f3e2d2d4b896d5ac2" => :sierra
sha256 "f3f9dd0a91bbd765ae4ed88d72dbedf88b3cffec4e4978e2143f7d7b20f874dd" => :el_capitan
end
option "with-debug", "Build with debugger hooks"
option "with-openssl", "Build against Homebrew's OpenSSL instead of the bundled OpenSSL"
deprecated_option "enable-debug" => "with-debug"
depends_on :python => :build if MacOS.version <= :snow_leopard
depends_on "pkg-config" => :build
depends_on "icu4c" => :recommended
depends_on "openssl" => :optional
conflicts_with "node@4", :because => "Differing versions of the same formulae."
# Per upstream - "Need g++ 4.8 or clang++ 3.4".
fails_with :clang if MacOS.version <= :snow_leopard
fails_with :gcc_4_0
fails_with :gcc
("4.3".."4.7").each do |n|
fails_with :gcc => n
end
resource "icu4c" do
url "https://ssl.icu-project.org/files/icu4c/58.1/icu4c-58_1-src.tgz"
mirror "https://nuxi.nl/distfiles/third_party/icu4c-58_1-src.tgz"
version "58.1"
sha256 "0eb46ba3746a9c2092c8ad347a29b1a1b4941144772d13a88667a7b11ea30309"
end
def install
# Never install the bundled "npm", always prefer our
# installation from tarball for better packaging control.
args = %W[--prefix=#{prefix} --without-npm]
args << "--debug" if build.with? "debug"
args << "--shared-openssl" if build.with? "openssl"
args << "--tag=head" if build.head?
if build.with? "full-icu"
resource("icu4c").stage buildpath/"deps/icu"
args << "--with-intl=full-icu"
end
system "./configure", *args
system "make", "install"
end
def post_install
rm_rf "#{prefix}/etc"
rm_rf "#{etc}/bash_completion.d/npm"
rm_rf "#{prefix}/libexec"
end
def caveats
s = ""
s += <<-EOS.undent
Homebrew did NOT install npm. Please install and configure npm manually.
EOS
if build.without? "full-icu"
s += <<-EOS.undent
Please note by default only English locale support is provided. If you need
full locale support you should either rebuild with full icu:
`brew reinstall node --with-full-icu`
or add full icu data at runtime following:
https://github.com/nodejs/node/wiki/Intl#using-and-customizing-the-small-icu-build
EOS
end
s
end
test do
path = testpath/"test.js"
path.write "console.log('hello');"
output = shell_output("#{bin}/node #{path}").strip
assert_equal "hello", output
output = shell_output("#{bin}/node -e 'console.log(new Intl.NumberFormat(\"en-EN\").format(1234.56))'").strip
assert_equal "1,234.56", output
if build.with? "full-icu"
output = shell_output("#{bin}/node -e 'console.log(new Intl.NumberFormat(\"de-DE\").format(1234.56))'").strip
assert_equal "1.234,56", output
end
end
end
|
class Cbmc < Formula
desc "C Bounded Model Checker"
homepage "https://www.cprover.org/cbmc/"
url "https://github.com/diffblue/cbmc.git",
tag: "cbmc-5.14.1",
revision: "c21ede2ef8191facee70260f97ffe4ae5d954143"
license "BSD-4-Clause"
bottle do
cellar :any_skip_relocation
sha256 "703783debd704d69dc6ec9f083decd040115d8e0055194546957f5b96561f638" => :catalina
sha256 "ac256c50422ac141f1f997da92a9a6be9859e579c5300b0ff4304bae3dfcab90" => :mojave
sha256 "74ecb73f2cf558299e3577379514ba9bad66e4139a601213d6051764acd3f5df" => :high_sierra
end
depends_on "cmake" => :build
depends_on "maven" => :build
depends_on "openjdk" => :build
def install
system "git", "submodule", "update", "--init"
# Build CBMC
system "cmake", "-S.", "-Bbuild", *std_cmake_args
system "cmake", "--build", "build"
cd "build" do
system "make", "install"
end
end
test do
# Find a pointer out of bounds error
(testpath/"main.c").write <<~EOS
#include <stdlib.h>
int main() {
char *ptr = malloc(10);
char c = ptr[10];
}
EOS
assert_match "VERIFICATION FAILED",
shell_output("#{bin}/cbmc --pointer-check main.c", 10)
end
end
cbmc: update 5.14.1 bottle.
class Cbmc < Formula
desc "C Bounded Model Checker"
homepage "https://www.cprover.org/cbmc/"
url "https://github.com/diffblue/cbmc.git",
tag: "cbmc-5.14.1",
revision: "c21ede2ef8191facee70260f97ffe4ae5d954143"
license "BSD-4-Clause"
bottle do
cellar :any_skip_relocation
sha256 "684bb4c6e68f50479ce5654232828e82067542ef2f969644a8716d2973e8a210" => :catalina
sha256 "9b500d47fed9c9179c5fac7468d173322aaf2ed572c1840bc02d362d4f8d9f0e" => :mojave
sha256 "55a3b9044eb6f980356bbb7a0b472c23d2c477810dfc98c3fde4f761feedfdba" => :high_sierra
end
depends_on "cmake" => :build
depends_on "maven" => :build
depends_on "openjdk" => :build
def install
system "git", "submodule", "update", "--init"
# Build CBMC
system "cmake", "-S.", "-Bbuild", *std_cmake_args
system "cmake", "--build", "build"
cd "build" do
system "make", "install"
end
end
test do
# Find a pointer out of bounds error
(testpath/"main.c").write <<~EOS
#include <stdlib.h>
int main() {
char *ptr = malloc(10);
char c = ptr[10];
}
EOS
assert_match "VERIFICATION FAILED",
shell_output("#{bin}/cbmc --pointer-check main.c", 10)
end
end
|
class Cbmc < Formula
desc "C Bounded Model Checker"
homepage "https://www.cprover.org/cbmc/"
url "https://github.com/diffblue/cbmc.git",
tag: "cbmc-5.52.0",
revision: "bd992a25e7643f7990389f6893764e177c41aa3d"
license "BSD-4-Clause"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "bec58188390134b8c27e5c6d024a9dab0fbd19258b64ee70fc36c49b697e55ed"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "e9d18e99e474e3eb5eb167be6700026692b6e8e76cb691153dd6aa84fda16c29"
sha256 cellar: :any_skip_relocation, monterey: "f66c8009a0d7a58ec0bfe3deec916c2a007025eab79e3949b4128ba1a93bdb75"
sha256 cellar: :any_skip_relocation, big_sur: "4295a787932e5401e1c43617f729e0e8bf87773f774774d6cbe7991f0d4060b1"
sha256 cellar: :any_skip_relocation, catalina: "776ee5af39afe443ce8d559f4908a49190d1d8bb780bbcb32170094f095471c5"
sha256 cellar: :any_skip_relocation, x86_64_linux: "db03fffbd36a1445160bf840d2f19703f99145ecc7467e0f1d91790adb0e0866"
end
depends_on "cmake" => :build
depends_on "maven" => :build
depends_on "openjdk" => :build
uses_from_macos "bison" => :build
uses_from_macos "flex" => :build
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
system "cmake", "-S", ".", "-B", "build", *std_cmake_args
system "cmake", "--build", "build"
system "cmake", "--install", "build"
# lib contains only `jar` files
libexec.install lib
end
test do
# Find a pointer out of bounds error
(testpath/"main.c").write <<~EOS
#include <stdlib.h>
int main() {
char *ptr = malloc(10);
char c = ptr[10];
}
EOS
assert_match "VERIFICATION FAILED",
shell_output("#{bin}/cbmc --pointer-check main.c", 10)
end
end
cbmc: update 5.52.0 bottle.
class Cbmc < Formula
desc "C Bounded Model Checker"
homepage "https://www.cprover.org/cbmc/"
url "https://github.com/diffblue/cbmc.git",
tag: "cbmc-5.52.0",
revision: "bd992a25e7643f7990389f6893764e177c41aa3d"
license "BSD-4-Clause"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "4099bdd85ef6be84f78b7f9707bf453723d59050cfe12736e07245d0f3cea3c1"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "14e9255cfc18ab9b9a0b8bdb8bc2c49b92ddde9b096e1508e01f40716e83c4ec"
sha256 cellar: :any_skip_relocation, monterey: "35ed89ffb0cb397d5db4dcd4cf829ffd9aca4b2f8bea62a699a1a5915cf82ad6"
sha256 cellar: :any_skip_relocation, big_sur: "e4a4e1d4acd2c78f1be17aac6cc93295726dccf338b6877952a5ce83e33d0eae"
sha256 cellar: :any_skip_relocation, catalina: "f391ef12bc3ecc961099d9b02d724d52072b77191a901d97bbd9718834dbe4a9"
sha256 cellar: :any_skip_relocation, x86_64_linux: "9175cfdd8f0f26918a3ed73b3bf64e62546ff1dd3e8bf6b1bcff20e464a7a289"
end
depends_on "cmake" => :build
depends_on "maven" => :build
depends_on "openjdk" => :build
uses_from_macos "bison" => :build
uses_from_macos "flex" => :build
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
system "cmake", "-S", ".", "-B", "build", *std_cmake_args
system "cmake", "--build", "build"
system "cmake", "--install", "build"
# lib contains only `jar` files
libexec.install lib
end
test do
# Find a pointer out of bounds error
(testpath/"main.c").write <<~EOS
#include <stdlib.h>
int main() {
char *ptr = malloc(10);
char c = ptr[10];
}
EOS
assert_match "VERIFICATION FAILED",
shell_output("#{bin}/cbmc --pointer-check main.c", 10)
end
end
|
class Cgns < Formula
desc "CFD General Notation System"
homepage "http://cgns.org/"
url "https://github.com/CGNS/CGNS/archive/v4.1.1.tar.gz"
sha256 "055d345c3569df3ae832fb2611cd7e0bc61d56da41b2be1533407e949581e226"
revision 2
head "https://github.com/CGNS/CGNS.git"
bottle do
sha256 "ac5ed0dcdfd12f7e07c483d355b26d46f8380cfadb069b60d7bee21c12f6d31a" => :catalina
sha256 "e9bcc9d1af96b8ebc5ecb0d5ba16165edf750f75e53b6b81997bb5078b718c68" => :mojave
sha256 "cd64974956ac61d5574db91ceba7bcf99e17981be478864ef7b368340e993025" => :high_sierra
end
depends_on "cmake" => :build
depends_on "gcc"
depends_on "hdf5"
depends_on "szip"
uses_from_macos "zlib"
def install
args = std_cmake_args
args << "-DCGNS_ENABLE_64BIT=YES" if Hardware::CPU.is_64_bit?
args << "-DCGNS_ENABLE_FORTRAN=YES"
args << "-DCGNS_ENABLE_HDF5=YES"
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
# Avoid references to Homebrew shims
os = OS.mac? ? "mac" : "linux"
cc = OS.mac? ? "clang" : "gcc-5"
inreplace include/"cgnsBuild.defs", HOMEBREW_LIBRARY/"Homebrew/shims/#{os}/super/#{cc}", "/usr/bin/#{cc}"
end
test do
(testpath/"test.c").write <<~EOS
#include <stdio.h>
#include "cgnslib.h"
int main(int argc, char *argv[])
{
int filetype = CG_FILE_NONE;
if (cg_is_cgns(argv[0], &filetype) != CG_ERROR)
return 1;
return 0;
}
EOS
system Formula["hdf5"].opt_prefix/"bin/h5cc", testpath/"test.c", "-L#{opt_lib}", "-lcgns"
system "./a.out"
end
end
cgns: update 4.1.1_2 bottle.
class Cgns < Formula
desc "CFD General Notation System"
homepage "http://cgns.org/"
url "https://github.com/CGNS/CGNS/archive/v4.1.1.tar.gz"
sha256 "055d345c3569df3ae832fb2611cd7e0bc61d56da41b2be1533407e949581e226"
revision 2
head "https://github.com/CGNS/CGNS.git"
bottle do
sha256 "ac5ed0dcdfd12f7e07c483d355b26d46f8380cfadb069b60d7bee21c12f6d31a" => :catalina
sha256 "e9bcc9d1af96b8ebc5ecb0d5ba16165edf750f75e53b6b81997bb5078b718c68" => :mojave
sha256 "cd64974956ac61d5574db91ceba7bcf99e17981be478864ef7b368340e993025" => :high_sierra
sha256 "b222d357b5611ba61a8b9c16e981c63af7732e2b135f0599579e63401f2ba5a4" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "gcc"
depends_on "hdf5"
depends_on "szip"
uses_from_macos "zlib"
def install
args = std_cmake_args
args << "-DCGNS_ENABLE_64BIT=YES" if Hardware::CPU.is_64_bit?
args << "-DCGNS_ENABLE_FORTRAN=YES"
args << "-DCGNS_ENABLE_HDF5=YES"
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
# Avoid references to Homebrew shims
os = OS.mac? ? "mac" : "linux"
cc = OS.mac? ? "clang" : "gcc-5"
inreplace include/"cgnsBuild.defs", HOMEBREW_LIBRARY/"Homebrew/shims/#{os}/super/#{cc}", "/usr/bin/#{cc}"
end
test do
(testpath/"test.c").write <<~EOS
#include <stdio.h>
#include "cgnslib.h"
int main(int argc, char *argv[])
{
int filetype = CG_FILE_NONE;
if (cg_is_cgns(argv[0], &filetype) != CG_ERROR)
return 1;
return 0;
}
EOS
system Formula["hdf5"].opt_prefix/"bin/h5cc", testpath/"test.c", "-L#{opt_lib}", "-lcgns"
system "./a.out"
end
end
|
module Editions
class RepositoryManager
# QUESTION make batch mode a field? read git_name, git_email and repository_access from config param?
def initialize hub, git_name, git_email, repository_access = :public
@hub = hub
@git_name = git_name
@git_email = git_email
@repository_access = repository_access.to_sym
end
def clone_repository_root
%(https://#{@hub.access_token}:x-oauth-basic@github.com/)
end
def submodule_repository_root
@repository_access == :private ? 'git@github.com:' : 'https://github.com/'
end
def contributor_team org, options = {}
unless (team = (@hub.org_teams org).find {|team| team.name.downcase == 'contributors' })
team = create_contributor_team org, options if options[:auto_create]
end
team
end
def create_contributor_team org, options = {}
@hub.create_team org, name: 'Contributors', permission: (options[:permission] || 'pull')
end
def create_article_repositories org, authors, edition, options = {}
prefix = (slug = edition.periodical.slug) ? %(#{slug}-) : nil
options = options.merge prefix: prefix
team = contributor_team org, auto_create: true
article_repos = authors.map do |author|
# FIXME handle case the repository already exists
if (article_repo = create_article_repository org, author, edition, options)
article_repo.last_commit_sha = seed_article_repository article_repo, edition, options
@hub.add_team_member team.id, author
@hub.add_team_repo team.id, article_repo.full_name
end
article_repo
end.compact
if (master_repo = create_master_repository org, edition, options.merge(prefix: prefix))
seed_master_repository master_repo, article_repos, edition, options
@hub.add_team_repo team.id, master_repo.full_name
end
([master_repo] + article_repos).compact
end
def create_article_repository org, author, edition, options = {}
author_resource = @hub.user author
author_name = author_resource.name
author_resource.initials = author_name.gsub(/(?:^|\s)([A-Z])[^\s]*/, '\1')
repo_name = '%s%s-%s' % [options[:prefix], edition.month, author]
repo_qname = [org, repo_name] * '/'
repo_desc = '%s\'s %s article for %s' % [author_name, edition.month_formatted, edition.periodical.name]
begin
repo = @hub.repo repo_qname
say_warning %(The repository #{repo_qname} for #{author_name} already exists)
repo.author = author_resource
return repo
rescue; end
return unless options[:batch] || (agree %(Create the repository #{colorize repo_qname, :bold} for #{colorize author_name, :bold}? [y/n] ))
repo = @hub.create_repo repo_name,
organization: org,
homepage: edition.periodical.url,
description: repo_desc,
has_wiki: false,
has_issues: false,
has_downloads: false,
private: (@repository_access == :private),
auto_init: true
say_ok %(Successfully created the repository #{repo_qname})
repo.author = author_resource
repo
end
def seed_article_repository repo, edition, options = {}
repo_name = repo.name
repo_qname = repo.full_name
org = repo.organization.login
templates_repo_qname = [org, [options[:prefix], 'templates'].compact.join] * '/'
last_commit_sha = nil
::Dir.mktmpdir 'rugged-' do |clone_dir|
repo_clone = try_try_again limit: 3, wait: 1, message: 'Repository not yet available. Retrying in 1s...' do
# TODO perhaps only use the access token when calling push?
# TODO move this logic to Refined::Repository.clone_at
if ::Rugged.features.include? :https
::Rugged::Repository.clone_at %(#{clone_repository_root}#{repo_qname}.git), clone_dir
else
::Open3.popen3 %(git clone #{clone_repository_root}#{repo_qname}.git #{clone_dir}) do |i, o, e, t|
t.value
end
::Rugged::Repository.new clone_dir
end
end
if (author_uri = repo.author.email).nil_or_empty?
if (author_uri = repo.author.blog).nil_or_empty?
author_uri = %(https://github.com/#{repo.author.login})
else
author_uri = %(http://#{author_uri}) unless author_uri.start_with? 'http'
end
end
template_vars = {
author_name: repo.author.name,
author_email: author_uri,
repository_name: repo.name,
repository_desc: repo.description,
repository_url: %(https://github.com/#{repo.full_name}),
edition_month: edition.month,
draft_deadline: edition.pub_date.strftime('%B 15, %Y')
}
# TODO move to a function
# TODO might want to aggregate bios & headshots into a place where they are easier to locate
author_suffix = %(-#{repo.author.login})
past_bio_contents = unless (repos = (@hub.org_repos org, type: @repository_access).select {|candidate|
candidate.name != repo_name && (candidate.name.end_with? author_suffix) && (contents? candidate.full_name, 'bio.adoc')
}).empty?
::Base64.decode64 @hub.contents(repos.map(&:full_name).sort.last, path: 'bio.adoc').content
end
seed_files = {
'README.adoc' => (template_contents templates_repo_qname, 'author-readme.adoc', template_vars),
%(#{repo_name}.adoc) => (template_contents templates_repo_qname, 'article-template.adoc', template_vars),
'bio.adoc' => (past_bio_contents || (template_contents templates_repo_qname, 'bio-template.adoc', template_vars)),
'code/.gitkeep' => '',
'images/.gitkeep' => ''
}
index = repo_clone.index
::File.unlink(::File.join repo_clone.workdir, 'README.md')
index.remove 'README.md'
seed_files.each do |filename, contents|
::FileUtils.mkdir_p ::File.join(repo_clone.workdir, (::File.dirname filename)) if filename.end_with? '/.gitkeep'
::File.open(::File.join(repo_clone.workdir, filename), 'w') {|fd| fd.write contents }
index.add path: filename, oid: (::Rugged::Blob.from_workdir repo_clone, filename), mode: 0100644
end
commit_tree = index.write_tree repo_clone
index.write
commit_author = { name: @git_name, email: @git_email, time: ::Time.now }
::Rugged::Commit.create repo_clone,
author: commit_author,
committer: commit_author,
message: 'Add README, seed article and bio',
parents: [repo_clone.head.target],
tree: commit_tree,
update_ref: 'HEAD'
# TODO move this to logic to Refined::Repository.push
if ::Rugged.features.include? :https
repo_clone.push 'origin', ['refs/heads/master']
else
::Open3.popen3 'git push origin master', chdir: repo_clone.workdir do |i, o, e, t|
t.value
end
end
# NOTE backwards compatibility hack for 0.19.0
unless (last_commit_sha = repo_clone.head.target).is_a? String
last_commit_sha = repo_clone.head.target_id
end
end
last_commit_sha
end
def delete_article_repositories org, authors, edition, options = {}
authors.each do |author|
delete_article_repository org, author, edition, options
end
end
def delete_article_repository org, author, edition, options = {}
prefix = (slug = edition.periodical.slug) ? %(#{slug}-) : nil
repo_name = '%s%s-%s' % [prefix, edition.month, author]
repo_qname = [org, repo_name] * '/'
unless @hub.repository? repo_qname
say_warning %(The repository #{repo_qname} does not exist.)
return
end
return unless options[:batch] || (agree %(Are you *#{colorize 'absolutely', :underline}* sure you want to delete the repository #{colorize repo_qname, :bold}? [y/n] ))
# NOTE If OAuth is used, 'delete_repo' scope is required
# QUESTION should we remove the author from the contributor team?
if @hub.delete_repo repo_qname
say_ok %(Successfully deleted the repository #{repo_qname})
else
# NOTE this likely happens because the client isn't authenticated or doesn't have the delete_repo scope
say_warning %(The repository #{repo_qname} could not be deleted)
end
end
def delete_all_article_repositories org, edition, options = {}
previous_auto_paginate = @hub.auto_paginate
@hub.auto_paginate = true
root_name = [edition.periodical.slug, edition.month].compact * '-'
(@hub.org_repos org, type: @repository_access).select {|repo| repo.name.start_with? root_name }.each do |repo|
repo_qname = repo.full_name
next unless options[:batch] || (agree %(Are you *#{colorize 'absolutely', :underline}* sure you want to delete the repository #{colorize repo_qname, :bold}? [y/n] ))
# NOTE If OAuth is used, 'delete_repo' scope is required
# QUESTION should we remove the author from the contributor team?
if @hub.delete_repo repo_qname
say_ok %(Successfully deleted the repository #{repo_qname})
else
# NOTE this likely happens because the client isn't authenticated or doesn't have the delete_repo scope
say_warning %(The repository #{repo_qname} could not be deleted)
end
end
ensure
@hub.auto_paginate = previous_auto_paginate
end
def create_master_repository org, edition, options = {}
repo_name = [options[:prefix], edition.month].join
repo_qname = [org, repo_name] * '/'
repo_desc = '%s issue of %s' % [edition.month_formatted, edition.periodical.name]
begin
repo = @hub.repo repo_qname
say_warning %(The master repository #{repo_qname} already exists)
return repo
rescue; end
return unless options[:batch] || (agree %(Create the master repository #{colorize repo_qname, :bold}? [y/n] ))
repo = @hub.create_repo repo_name,
organization: org,
homepage: edition.periodical.url,
description: repo_desc,
has_wiki: false,
has_issues: false,
has_downloads: false,
private: (@repository_access == :private),
auto_init: true
say_ok %(Successfully created the repository #{repo_qname})
repo
end
#--
# TODO stub publisher's letter
# NOTE update submodules using
# $ git submodule foreach git pull origin master
def seed_master_repository repo, article_repos, edition, options = {}
repo_name = repo.name
repo_qname = repo.full_name
master_doc_filename = %(#{repo_name}.adoc)
::Dir.mktmpdir 'rugged-' do |clone_dir|
repo_clone = try_try_again limit: 3, wait: 1, message: 'Repository not yet available. Retrying in 1s...' do
# TODO perhaps only use the access token when calling push?
# TODO move this logic to Refined::Repository.clone_at
if ::Rugged.features.include? :https
::Rugged::Repository.clone_at %(#{clone_repository_root}#{repo_qname}.git), clone_dir
else
::Open3.popen3 %(git clone #{clone_repository_root}#{repo_qname}.git #{clone_dir}) do |i, o, e, t|
t.value
end
::Rugged::Repository.new clone_dir
end
end
author_names = article_repos.map {|r| r.author.name }
master_doc_content = <<-EOS.chomp
= #{edition.periodical.name} - #{edition.month_formatted}
#{author_names * '; '}
v#{edition.number}, #{edition.pub_date.xmlschema}
:doctype: book
:producer: #{edition.periodical.producer}
EOS
index = repo_clone.index
article_repos.each do |article_repo|
article_repo_name = article_repo.name
article_repo_qname = article_repo.full_name
author_initials = article_repo.author.initials
master_doc_content = <<-EOS.chomp
#{master_doc_content}
:codedir: #{article_repo_name}/code
:imagesdir: #{article_repo_name}/images
:listing-caption: Listing #{author_initials} -
:figure-caption: Figure #{author_initials} -
:idprefix: #{author_initials.downcase}_
include::#{article_repo_name}/#{article_repo_name}.adoc[]
EOS
::Refined::Submodule.add repo_clone,
article_repo_name,
%(#{submodule_repository_root}#{article_repo_qname}.git),
article_repo.last_commit_sha,
index: index
end
::File.open(::File.join(repo_clone.workdir, master_doc_filename), 'w') {|fd| fd.write master_doc_content }
index.add path: master_doc_filename, oid: (::Rugged::Blob.from_workdir repo_clone, master_doc_filename), mode: 0100644
::Dir.mkdir ::File.join(repo_clone.workdir, 'jacket')
::File.open(::File.join(repo_clone.workdir, 'jacket/.gitkeep'), 'w') {|fd| fd.write '' }
index.add path: 'jacket/.gitkeep', oid: (::Rugged::Blob.from_workdir repo_clone, 'jacket/.gitkeep'), mode: 0100644
::File.unlink(::File.join repo_clone.workdir, 'README.md')
index.remove 'README.md'
commit_tree = index.write_tree repo_clone
index.write
commit_author = { name: @git_name, email: @git_email, time: ::Time.now }
::Rugged::Commit.create repo_clone,
author: commit_author,
committer: commit_author,
message: 'Seed master document and link article repositories as submodules',
parents: [repo_clone.head.target],
tree: commit_tree,
update_ref: 'HEAD'
# TODO move this to logic to Refined::Repository.push
if ::Rugged.features.include? :https
repo_clone.push 'origin', ['refs/heads/master']
else
::Open3.popen3 'git push origin master', chdir: repo_clone.workdir do |i, o, e, t|
t.value
end
end
end
end
# QUESTION should we move template_contents to an Editions::TemplateManager class?
def template_contents repo, path, vars = {}
content = begin
::Base64.decode64 @hub.contents(repo, path: path).content
rescue ::Octokit::NotFound
::File.read ::File.join(DATADIR, 'templates', path)
end
unless vars.nil_or_empty?
# TODO move regexp to constant
content = content.gsub(/\{template_(.*?)\}/) { vars[$1.to_sym] }
end
content
end
def contents? repo, path
@hub.contents repo, path: path
true
rescue ::Octokit::NotFound
false
end
# TODO move me to a utility mixin
def try_try_again options = {}
attempts = 0
retry_limit = options[:limit] || 3
retry_wait = options[:wait] || 1
retry_message = options[:message] || 'Retrying...'
begin
yield
rescue => e
if attempts < retry_limit
attempts += 1
say_warning retry_message
sleep retry_wait if retry_wait > 0
retry
else
raise e
end
end
end
end
end
revise contents of master doc
module Editions
class RepositoryManager
# QUESTION make batch mode a field? read git_name, git_email and repository_access from config param?
def initialize hub, git_name, git_email, repository_access = :public
@hub = hub
@git_name = git_name
@git_email = git_email
@repository_access = repository_access.to_sym
end
def clone_repository_root
%(https://#{@hub.access_token}:x-oauth-basic@github.com/)
end
def submodule_repository_root
@repository_access == :private ? 'git@github.com:' : 'https://github.com/'
end
def contributor_team org, options = {}
unless (team = (@hub.org_teams org).find {|team| team.name.downcase == 'contributors' })
team = create_contributor_team org, options if options[:auto_create]
end
team
end
def create_contributor_team org, options = {}
@hub.create_team org, name: 'Contributors', permission: (options[:permission] || 'pull')
end
def create_article_repositories org, authors, edition, options = {}
prefix = (slug = edition.periodical.slug) ? %(#{slug}-) : nil
options = options.merge prefix: prefix
team = contributor_team org, auto_create: true
article_repos = authors.map do |author|
# FIXME handle case the repository already exists
if (article_repo = create_article_repository org, author, edition, options)
article_repo.last_commit_sha = seed_article_repository article_repo, edition, options
@hub.add_team_member team.id, author
@hub.add_team_repo team.id, article_repo.full_name
end
article_repo
end.compact
if (master_repo = create_master_repository org, edition, options.merge(prefix: prefix))
seed_master_repository master_repo, article_repos, edition, options
@hub.add_team_repo team.id, master_repo.full_name
end
([master_repo] + article_repos).compact
end
def create_article_repository org, author, edition, options = {}
author_resource = @hub.user author
author_name = author_resource.name
author_resource.initials = author_name.gsub(/(?:^|\s)([A-Z])[^\s]*/, '\1')
repo_name = '%s%s-%s' % [options[:prefix], edition.month, author]
repo_qname = [org, repo_name] * '/'
repo_desc = '%s\'s %s article for %s' % [author_name, edition.month_formatted, edition.periodical.name]
begin
repo = @hub.repo repo_qname
say_warning %(The repository #{repo_qname} for #{author_name} already exists)
repo.author = author_resource
return repo
rescue; end
return unless options[:batch] || (agree %(Create the repository #{colorize repo_qname, :bold} for #{colorize author_name, :bold}? [y/n] ))
repo = @hub.create_repo repo_name,
organization: org,
homepage: edition.periodical.url,
description: repo_desc,
has_wiki: false,
has_issues: false,
has_downloads: false,
private: (@repository_access == :private),
auto_init: true
say_ok %(Successfully created the repository #{repo_qname})
repo.author = author_resource
repo
end
def seed_article_repository repo, edition, options = {}
repo_name = repo.name
repo_qname = repo.full_name
org = repo.organization.login
templates_repo_qname = [org, [options[:prefix], 'templates'].compact.join] * '/'
last_commit_sha = nil
::Dir.mktmpdir 'rugged-' do |clone_dir|
repo_clone = try_try_again limit: 3, wait: 1, message: 'Repository not yet available. Retrying in 1s...' do
# TODO perhaps only use the access token when calling push?
# TODO move this logic to Refined::Repository.clone_at
if ::Rugged.features.include? :https
::Rugged::Repository.clone_at %(#{clone_repository_root}#{repo_qname}.git), clone_dir
else
::Open3.popen3 %(git clone #{clone_repository_root}#{repo_qname}.git #{clone_dir}) do |i, o, e, t|
t.value
end
::Rugged::Repository.new clone_dir
end
end
if (author_uri = repo.author.email).nil_or_empty?
if (author_uri = repo.author.blog).nil_or_empty?
author_uri = %(https://github.com/#{repo.author.login})
else
author_uri = %(http://#{author_uri}) unless author_uri.start_with? 'http'
end
end
template_vars = {
author_name: repo.author.name,
author_email: author_uri,
repository_name: repo.name,
repository_desc: repo.description,
repository_url: %(https://github.com/#{repo.full_name}),
edition_month: edition.month,
draft_deadline: edition.pub_date.strftime('%B 15, %Y')
}
# TODO move to a function
# TODO might want to aggregate bios & headshots into a place where they are easier to locate
author_suffix = %(-#{repo.author.login})
past_bio_contents = unless (repos = (@hub.org_repos org, type: @repository_access).select {|candidate|
candidate.name != repo_name && (candidate.name.end_with? author_suffix) && (contents? candidate.full_name, 'bio.adoc')
}).empty?
::Base64.decode64 @hub.contents(repos.map(&:full_name).sort.last, path: 'bio.adoc').content
end
seed_files = {
'README.adoc' => (template_contents templates_repo_qname, 'author-readme.adoc', template_vars),
%(#{repo_name}.adoc) => (template_contents templates_repo_qname, 'article-template.adoc', template_vars),
'bio.adoc' => (past_bio_contents || (template_contents templates_repo_qname, 'bio-template.adoc', template_vars)),
'code/.gitkeep' => '',
'images/.gitkeep' => ''
}
index = repo_clone.index
::File.unlink(::File.join repo_clone.workdir, 'README.md')
index.remove 'README.md'
seed_files.each do |filename, contents|
::FileUtils.mkdir_p ::File.join(repo_clone.workdir, (::File.dirname filename)) if filename.end_with? '/.gitkeep'
::File.open(::File.join(repo_clone.workdir, filename), 'w') {|fd| fd.write contents }
index.add path: filename, oid: (::Rugged::Blob.from_workdir repo_clone, filename), mode: 0100644
end
commit_tree = index.write_tree repo_clone
index.write
commit_author = { name: @git_name, email: @git_email, time: ::Time.now }
::Rugged::Commit.create repo_clone,
author: commit_author,
committer: commit_author,
message: 'Add README, seed article and bio',
parents: [repo_clone.head.target],
tree: commit_tree,
update_ref: 'HEAD'
# TODO move this to logic to Refined::Repository.push
if ::Rugged.features.include? :https
repo_clone.push 'origin', ['refs/heads/master']
else
::Open3.popen3 'git push origin master', chdir: repo_clone.workdir do |i, o, e, t|
t.value
end
end
# NOTE backwards compatibility hack for 0.19.0
unless (last_commit_sha = repo_clone.head.target).is_a? String
last_commit_sha = repo_clone.head.target_id
end
end
last_commit_sha
end
def delete_article_repositories org, authors, edition, options = {}
authors.each do |author|
delete_article_repository org, author, edition, options
end
end
def delete_article_repository org, author, edition, options = {}
prefix = (slug = edition.periodical.slug) ? %(#{slug}-) : nil
repo_name = '%s%s-%s' % [prefix, edition.month, author]
repo_qname = [org, repo_name] * '/'
unless @hub.repository? repo_qname
say_warning %(The repository #{repo_qname} does not exist.)
return
end
return unless options[:batch] || (agree %(Are you *#{colorize 'absolutely', :underline}* sure you want to delete the repository #{colorize repo_qname, :bold}? [y/n] ))
# NOTE If OAuth is used, 'delete_repo' scope is required
# QUESTION should we remove the author from the contributor team?
if @hub.delete_repo repo_qname
say_ok %(Successfully deleted the repository #{repo_qname})
else
# NOTE this likely happens because the client isn't authenticated or doesn't have the delete_repo scope
say_warning %(The repository #{repo_qname} could not be deleted)
end
end
def delete_all_article_repositories org, edition, options = {}
previous_auto_paginate = @hub.auto_paginate
@hub.auto_paginate = true
root_name = [edition.periodical.slug, edition.month].compact * '-'
(@hub.org_repos org, type: @repository_access).select {|repo| repo.name.start_with? root_name }.each do |repo|
repo_qname = repo.full_name
next unless options[:batch] || (agree %(Are you *#{colorize 'absolutely', :underline}* sure you want to delete the repository #{colorize repo_qname, :bold}? [y/n] ))
# NOTE If OAuth is used, 'delete_repo' scope is required
# QUESTION should we remove the author from the contributor team?
if @hub.delete_repo repo_qname
say_ok %(Successfully deleted the repository #{repo_qname})
else
# NOTE this likely happens because the client isn't authenticated or doesn't have the delete_repo scope
say_warning %(The repository #{repo_qname} could not be deleted)
end
end
ensure
@hub.auto_paginate = previous_auto_paginate
end
def create_master_repository org, edition, options = {}
repo_name = [options[:prefix], edition.month].join
repo_qname = [org, repo_name] * '/'
repo_desc = '%s issue of %s' % [edition.month_formatted, edition.periodical.name]
begin
repo = @hub.repo repo_qname
say_warning %(The master repository #{repo_qname} already exists)
return repo
rescue; end
return unless options[:batch] || (agree %(Create the master repository #{colorize repo_qname, :bold}? [y/n] ))
repo = @hub.create_repo repo_name,
organization: org,
homepage: edition.periodical.url,
description: repo_desc,
has_wiki: false,
has_issues: false,
has_downloads: false,
private: (@repository_access == :private),
auto_init: true
say_ok %(Successfully created the repository #{repo_qname})
repo
end
#--
# TODO stub publisher's letter
# NOTE update submodules using
# $ git submodule foreach git pull origin master
def seed_master_repository repo, article_repos, edition, options = {}
repo_name = repo.name
repo_qname = repo.full_name
master_doc_filename = %(#{repo_name}.adoc)
::Dir.mktmpdir 'rugged-' do |clone_dir|
repo_clone = try_try_again limit: 3, wait: 1, message: 'Repository not yet available. Retrying in 1s...' do
# TODO perhaps only use the access token when calling push?
# TODO move this logic to Refined::Repository.clone_at
if ::Rugged.features.include? :https
::Rugged::Repository.clone_at %(#{clone_repository_root}#{repo_qname}.git), clone_dir
else
::Open3.popen3 %(git clone #{clone_repository_root}#{repo_qname}.git #{clone_dir}) do |i, o, e, t|
t.value
end
::Rugged::Repository.new clone_dir
end
end
author_names = article_repos.map {|r| r.author.name }
master_doc_content = <<-EOS.chomp
= #{edition.periodical.name} - #{edition.month_formatted}
#{author_names * '; '}
v#{edition.number}, #{edition.pub_date.xmlschema}
:doctype: book
:producer: #{edition.periodical.producer}
:app-name: #{edition.periodical.name}
//:subject:
//:keywords:
//:description:
:puburl: #{edition.periodical.url}
:pubdate: #{edition.year_month}
:pubprefix: #{edition.periodical.slug}
:editionprefix: {pubprefix}-{pubdate}
:volume: #{edition.volume}
:issue: #{edition.issue}
:listing-caption: Listing
:imagesdir: images
ifdef::backend-pdf[]
:toc:
:toclevels: 1
:toc-title: Contents
endif::[]
//:front-cover-image: image:jacket/front-cover.jpg[Cover,1600,2056]
EOS
index = repo_clone.index
article_repos.each do |article_repo|
article_repo_name = article_repo.name
article_repo_qname = article_repo.full_name
author_initials = article_repo.author.initials
master_doc_content = <<-EOS.chomp
#{master_doc_content}
:codedir: #{article_repo_name}/code
:imagesdir: #{article_repo_name}/images
:idprefix: #{author_initials.downcase}_
include::#{article_repo_name}/#{article_repo_name}.adoc[]
EOS
::Refined::Submodule.add repo_clone,
article_repo_name,
%(#{submodule_repository_root}#{article_repo_qname}.git),
article_repo.last_commit_sha,
index: index
end
# FIXME reset images after parse so this assignment isn't required
master_doc_content = <<-EOS.chomp
#{master_doc_content}
:imagesdir: images
EOS
::File.open(::File.join(repo_clone.workdir, master_doc_filename), 'w') {|fd| fd.write master_doc_content }
index.add path: master_doc_filename, oid: (::Rugged::Blob.from_workdir repo_clone, master_doc_filename), mode: 0100644
::Dir.mkdir ::File.join(repo_clone.workdir, 'jacket')
::File.open(::File.join(repo_clone.workdir, 'jacket/.gitkeep'), 'w') {|fd| fd.write '' }
index.add path: 'jacket/.gitkeep', oid: (::Rugged::Blob.from_workdir repo_clone, 'jacket/.gitkeep'), mode: 0100644
::File.unlink(::File.join repo_clone.workdir, 'README.md')
index.remove 'README.md'
commit_tree = index.write_tree repo_clone
index.write
commit_author = { name: @git_name, email: @git_email, time: ::Time.now }
::Rugged::Commit.create repo_clone,
author: commit_author,
committer: commit_author,
message: 'Seed master document and link article repositories as submodules',
parents: [repo_clone.head.target],
tree: commit_tree,
update_ref: 'HEAD'
# TODO move this to logic to Refined::Repository.push
if ::Rugged.features.include? :https
repo_clone.push 'origin', ['refs/heads/master']
else
::Open3.popen3 'git push origin master', chdir: repo_clone.workdir do |i, o, e, t|
t.value
end
end
end
end
# QUESTION should we move template_contents to an Editions::TemplateManager class?
def template_contents repo, path, vars = {}
content = begin
::Base64.decode64 @hub.contents(repo, path: path).content
rescue ::Octokit::NotFound
::File.read ::File.join(DATADIR, 'templates', path)
end
unless vars.nil_or_empty?
# TODO move regexp to constant
content = content.gsub(/\{template_(.*?)\}/) { vars[$1.to_sym] }
end
content
end
def contents? repo, path
@hub.contents repo, path: path
true
rescue ::Octokit::NotFound
false
end
# TODO move me to a utility mixin
def try_try_again options = {}
attempts = 0
retry_limit = options[:limit] || 3
retry_wait = options[:wait] || 1
retry_message = options[:message] || 'Retrying...'
begin
yield
rescue => e
if attempts < retry_limit
attempts += 1
say_warning retry_message
sleep retry_wait if retry_wait > 0
retry
else
raise e
end
end
end
end
end
|
class Cimg < Formula
desc "C++ toolkit for image processing"
homepage "https://cimg.eu/"
url "https://cimg.eu/files/CImg_3.1.0.zip"
sha256 "8c5392ac57764bdaeced142f12c8a79c8dd93277dc61e90dca6b7e1e25cdb01a"
license "CECILL-2.0"
livecheck do
url "https://cimg.eu/files/"
regex(/href=.*?CImg[._-]v?(\d+(?:\.\d+)+)\.zip/i)
end
bottle do
sha256 cellar: :any_skip_relocation, all: "b7283b2ee43a8e2c3b54937e4592f26a934c7c7d64336af9d25ed1fc6ec51743"
end
on_linux do
depends_on "gcc" # C++ 17 is required
end
fails_with gcc: "5"
def install
include.install "CImg.h"
prefix.install "Licence_CeCILL-C_V1-en.txt", "Licence_CeCILL_V2-en.txt"
pkgshare.install "examples", "plugins"
end
test do
cp_r pkgshare/"examples", testpath
cp_r pkgshare/"plugins", testpath
cp include/"CImg.h", testpath
system "make", "-C", "examples", "image2ascii"
system "examples/image2ascii"
end
end
cimg: update 3.1.0 bottle.
class Cimg < Formula
desc "C++ toolkit for image processing"
homepage "https://cimg.eu/"
url "https://cimg.eu/files/CImg_3.1.0.zip"
sha256 "8c5392ac57764bdaeced142f12c8a79c8dd93277dc61e90dca6b7e1e25cdb01a"
license "CECILL-2.0"
livecheck do
url "https://cimg.eu/files/"
regex(/href=.*?CImg[._-]v?(\d+(?:\.\d+)+)\.zip/i)
end
bottle do
sha256 cellar: :any_skip_relocation, all: "e00b458f65da683573c2933c06e8cba72f9fda047bb8ab5a46b44fdd6290ae67"
end
on_linux do
depends_on "gcc" # C++ 17 is required
end
fails_with gcc: "5"
def install
include.install "CImg.h"
prefix.install "Licence_CeCILL-C_V1-en.txt", "Licence_CeCILL_V2-en.txt"
pkgshare.install "examples", "plugins"
end
test do
cp_r pkgshare/"examples", testpath
cp_r pkgshare/"plugins", testpath
cp include/"CImg.h", testpath
system "make", "-C", "examples", "image2ascii"
system "examples/image2ascii"
end
end
|
module Elastictastic
class Configuration
attr_writer :hosts, :default_index, :auto_refresh, :default_batch_size, :adapter
attr_accessor :logger, :connect_timeout, :request_timeout, :backoff_threshold, :backoff_start, :backoff_max
attr_reader :extra_middlewares
def initialize
@extra_middlewares = []
end
def host=(host)
@hosts = [host]
end
def hosts
@hosts ||= [default_host]
end
def adapter
@adapter ||= :net_http
end
def default_index
return @default_index if defined? @default_index
if url_from_env && url_from_env.path =~ /^\/([^\/]+)/
@default_index = $1
else
@default_index = 'default'
end
end
def auto_refresh
!!@auto_refresh
end
def default_batch_size
@default_batch_size ||= 100
end
def json_engine=(json_engine)
original_engine = MultiJson.engine
MultiJson.engine = json_engine
@json_engine = MultiJson.engine
ensure
MultiJson.engine = original_engine
end
def json_engine
@json_engine || MultiJson.engine
end
def use_middleware(*args)
@extra_middlewares << args
end
def presets
@presets ||= ActiveSupport::HashWithIndifferentAccess.new
end
def presets=(new_presets)
presets.merge!(new_presets)
end
private
def default_host
if url_from_env
url_from_env.class.build(
:host => url_from_env.host,
:port => url_from_env.port,
:path => url_from_env.path
)
else
'http://localhost:9200'
end
end
def url_from_env
return @url_from_env if defined? @url_from_env
@url_from_env =
if ENV['ELASTICSEARCH_URL']
URI.parse(ENV['ELASTICSEARCH_URL'])
end
end
ActiveModel::Observing::ClassMethods.public_instance_methods(false).each do |method|
delegate method, :to => :"::Elastictastic::Observing"
end
end
end
Add missing #to_s call
module Elastictastic
class Configuration
attr_writer :hosts, :default_index, :auto_refresh, :default_batch_size, :adapter
attr_accessor :logger, :connect_timeout, :request_timeout, :backoff_threshold, :backoff_start, :backoff_max
attr_reader :extra_middlewares
def initialize
@extra_middlewares = []
end
def host=(host)
@hosts = [host]
end
def hosts
@hosts ||= [default_host]
end
def adapter
@adapter ||= :net_http
end
def default_index
return @default_index if defined? @default_index
if url_from_env && url_from_env.path =~ /^\/([^\/]+)/
@default_index = $1
else
@default_index = 'default'
end
end
def auto_refresh
!!@auto_refresh
end
def default_batch_size
@default_batch_size ||= 100
end
def json_engine=(json_engine)
original_engine = MultiJson.engine
MultiJson.engine = json_engine
@json_engine = MultiJson.engine
ensure
MultiJson.engine = original_engine
end
def json_engine
@json_engine || MultiJson.engine
end
def use_middleware(*args)
@extra_middlewares << args
end
def presets
@presets ||= ActiveSupport::HashWithIndifferentAccess.new
end
def presets=(new_presets)
presets.merge!(new_presets)
end
private
def default_host
if url_from_env
url_from_env.class.build(
:host => url_from_env.host,
:port => url_from_env.port,
:path => url_from_env.path
).to_s
else
'http://localhost:9200'
end
end
def url_from_env
return @url_from_env if defined? @url_from_env
@url_from_env =
if ENV['ELASTICSEARCH_URL']
URI.parse(ENV['ELASTICSEARCH_URL'])
end
end
ActiveModel::Observing::ClassMethods.public_instance_methods(false).each do |method|
delegate method, :to => :"::Elastictastic::Observing"
end
end
end
|
class Cpio < Formula
desc "Copies files into or out of a cpio or tar archive"
homepage "https://www.gnu.org/software/cpio/"
url "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.bz2"
mirror "https://ftpmirror.gnu.org/cpio/cpio-2.13.tar.bz2"
sha256 "eab5bdc5ae1df285c59f2a4f140a98fc33678a0bf61bdba67d9436ae26b46f6d"
license "GPL-3.0-or-later"
revision 3
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "f22c470e6b85be477298907f64e9d6c0c8261d81f244ee9f7977b37f64bc2d53"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "5f0f5625adf815f5dcbce9016b20df0ea0f41475343954e057422f9bd006ab87"
sha256 cellar: :any_skip_relocation, monterey: "1b04d03bdfdb091451d3c6602e7dfbefe18c3a719c5382c6636245b9d5403c91"
sha256 cellar: :any_skip_relocation, big_sur: "dc6d56c513f95660d835533ad45717ac448767692216f22f39971e28da045bb0"
sha256 cellar: :any_skip_relocation, catalina: "63775ad863bde22691bf678e67982fbb21488c2e86843133bd34461aa5a61586"
sha256 x86_64_linux: "42071ea523978d2b2ea5a5129cb29084fef728f270577709fc1595d5a8cfbef2"
end
keg_only :shadowed_by_macos, "macOS provides cpio"
def install
system "./configure", *std_configure_args, "--disable-silent-rules"
system "make", "install"
return if OS.mac?
# Delete rmt, which causes conflict with `gnu-tar`
(libexec/"rmt").unlink
(man8/"rmt.8").unlink
end
test do
(testpath/"test.cc").write <<~EOS
#include <iostream>
#include <string>
EOS
system "ls #{testpath} | #{bin}/cpio -ov > #{testpath}/directory.cpio"
assert_path_exists "#{testpath}/directory.cpio"
end
end
cpio: update 2.13_3 bottle.
class Cpio < Formula
desc "Copies files into or out of a cpio or tar archive"
homepage "https://www.gnu.org/software/cpio/"
url "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.bz2"
mirror "https://ftpmirror.gnu.org/cpio/cpio-2.13.tar.bz2"
sha256 "eab5bdc5ae1df285c59f2a4f140a98fc33678a0bf61bdba67d9436ae26b46f6d"
license "GPL-3.0-or-later"
revision 3
bottle do
sha256 cellar: :any_skip_relocation, arm64_ventura: "a52647dfa0fc91821508e340b67e09ddb7827b66644ef4006df040502dc5f249"
sha256 cellar: :any_skip_relocation, arm64_monterey: "f22c470e6b85be477298907f64e9d6c0c8261d81f244ee9f7977b37f64bc2d53"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "5f0f5625adf815f5dcbce9016b20df0ea0f41475343954e057422f9bd006ab87"
sha256 cellar: :any_skip_relocation, monterey: "1b04d03bdfdb091451d3c6602e7dfbefe18c3a719c5382c6636245b9d5403c91"
sha256 cellar: :any_skip_relocation, big_sur: "dc6d56c513f95660d835533ad45717ac448767692216f22f39971e28da045bb0"
sha256 cellar: :any_skip_relocation, catalina: "63775ad863bde22691bf678e67982fbb21488c2e86843133bd34461aa5a61586"
sha256 x86_64_linux: "42071ea523978d2b2ea5a5129cb29084fef728f270577709fc1595d5a8cfbef2"
end
keg_only :shadowed_by_macos, "macOS provides cpio"
def install
system "./configure", *std_configure_args, "--disable-silent-rules"
system "make", "install"
return if OS.mac?
# Delete rmt, which causes conflict with `gnu-tar`
(libexec/"rmt").unlink
(man8/"rmt.8").unlink
end
test do
(testpath/"test.cc").write <<~EOS
#include <iostream>
#include <string>
EOS
system "ls #{testpath} | #{bin}/cpio -ov > #{testpath}/directory.cpio"
assert_path_exists "#{testpath}/directory.cpio"
end
end
|
class Croc < Formula
desc "Securely send things from one computer to another"
homepage "https://github.com/schollz/croc"
url "https://github.com/schollz/croc/archive/v9.1.4.tar.gz"
sha256 "6a7d304f06c6824c9648a588a5c6b9f7366d87a442dc536e810a078a4441c9dd"
license "MIT"
head "https://github.com/schollz/croc.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "039652baeb41b1ea5816bb37bc4b89dd0c3cd98718b62493e522ddb559ee29a3"
sha256 cellar: :any_skip_relocation, big_sur: "9182d66978027a376a0e32422b26e261e9283114d4f6afc78b4ca1c76d2c2a6c"
sha256 cellar: :any_skip_relocation, catalina: "f708b578664fa36e2d6f7e81f01ae6e882a0420a616be4fba056cf896ba7527f"
sha256 cellar: :any_skip_relocation, mojave: "eee107708c5a0bf821ed8608ea41a467ed892b0cd933fd7581c4389d0c8974e1"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args
end
test do
fork do
exec bin/"croc", "send", "--code=homebrew-test", "--text=mytext"
end
sleep 1
assert_match shell_output("#{bin}/croc --yes homebrew-test").chomp, "mytext"
end
end
croc: update 9.1.4 bottle.
class Croc < Formula
desc "Securely send things from one computer to another"
homepage "https://github.com/schollz/croc"
url "https://github.com/schollz/croc/archive/v9.1.4.tar.gz"
sha256 "6a7d304f06c6824c9648a588a5c6b9f7366d87a442dc536e810a078a4441c9dd"
license "MIT"
head "https://github.com/schollz/croc.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "3cb115adbe75c287533247f54f18b5b76f8e32f77ae977085a28d5fdb657dfcb"
sha256 cellar: :any_skip_relocation, big_sur: "f4ede664b391c78c5addb6b10c423db872a879f0b7d9324aed23628b8d28aa5b"
sha256 cellar: :any_skip_relocation, catalina: "00f2a6b8007c5fc6d0e3d49662933a9a26dc63e7933742a99694ebb5192f96fb"
sha256 cellar: :any_skip_relocation, mojave: "c1c3cea350756f3398805c61adf045884465fa4367f7477a2522170099683839"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args
end
test do
fork do
exec bin/"croc", "send", "--code=homebrew-test", "--text=mytext"
end
sleep 1
assert_match shell_output("#{bin}/croc --yes homebrew-test").chomp, "mytext"
end
end
|
module Evertils
module Common
module Entity
class Note < Entity::Base
#
# @since 0.2.0
def create_from_yml(full_path)
begin
if File.exist? full_path
conf = YAML::load(File.open(full_path))
required = %w(title body)
if has_required_fields(conf, required)
create(conf['title'], conf['body'], conf['parent'])
else
raise ArgumentError, 'Configuration file is missing some required fields'
end
else
raise ArgumentError, "File not found: #{full_path}"
end
rescue ArgumentError => e
puts e.message
end
end
#
# @since 0.2.0
def create(title, body, parent_notebook = nil, file = nil, share_note = false, created_on = nil)
@entity = nil
# final output
output = {}
# Create note object
our_note = ::Evernote::EDAM::Type::Note.new
our_note.resources = []
our_note.tagNames = []
# a file was requested, lets prepare it for storage
if file
if file.is_a? Array
file.each do |f|
media_resource = ENML.new(f)
body.concat(media_resource.embeddable_element)
our_note.resources << media_resource.element
end
else
media_resource = ENML.new(file)
body.concat(media_resource.embeddable_element)
our_note.resources << media_resource.element
end
end
# only join when required
body = body.join if body.is_a? Array
n_body = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
n_body += "<!DOCTYPE en-note SYSTEM \"http://xml.evernote.com/pub/enml2.dtd\">"
n_body += "<en-note>#{body}</en-note>"
# setup note properties
our_note.title = title
our_note.content = n_body
our_note.created = created_on if !created_on.is_a?(DateTime)
if !parent_notebook.is_a? Evertils::Common::Entity::Notebook
nb = Entity::Notebook.new
parent_notebook = nb.find(parent_notebook)
parent_notebook = nb.default if parent_notebook.nil?
end
# parent_notebook is optional; if omitted, default notebook is used
our_note.notebookGuid = parent_notebook.prop(:guid)
# Attempt to create note in Evernote account
begin
@entity = @evernote.call(:createNote, our_note)
share if share_note
rescue ::Evernote::EDAM::Error::EDAMUserException => edue
## Something was wrong with the note data
## See EDAMErrorCode enumeration for error code explanation
## http://dev.evernote.com/documentation/reference/Errors.html#Enum_EDAMErrorCode
Notify.error "EDAMUserException: #{edue}\nCode #{edue.errorCode}: #{edue.parameter}"
rescue ::Evernote::EDAM::Error::EDAMNotFoundException
## Parent Notebook GUID doesn't correspond to an actual notebook
Notify.error "EDAMNotFoundException: Invalid parent notebook GUID"
rescue ArgumentError => e
Notify.error e.backtrace
end
Notify.success("#{parent_notebook.prop(:stack)}/#{parent_notebook.prop(:name)}/#{our_note.title} created")
self if @entity
end
#
# @since 0.2.0
def exists?(name)
return true if !find(name).nil?
false
end
#
# @since 0.2.0
def destroy
@evernote.call(:deleteNote, @entity.guid)
end
#
# @since 0.2.9
def expunge!
@evernote.call(:expungeNote, @entity.guid)
end
#
# @since 0.2.0
def expunge
deprecation_notice('0.2.9', 'Replaced with expunge! to better follow Ruby standards for method names. Will be removed in 0.3.5')
@evernote.call(:expungeNote, @entity.guid)
end
#
# @since 0.2.9
def move_to(notebook)
nb = Evertils::Common::Manager::Notebook.new
target = nb.find(notebook)
raise "Notebook not found: #{notebook}" if target.entity.nil?
@entity.notebookGuid = target.prop(:guid)
@evernote.call(:updateNote, @entity)
end
#
# @since 0.2.8
def share
@evernote.call(:shareNote, @entity.guid)
end
#
# @since 0.2.8
def unshare
@evernote.call(:stopSharingNote, @entity.guid)
end
#
# @since 0.2.0
def find(name)
@entity = nil
filter = ::Evernote::EDAM::NoteStore::NoteFilter.new
filter.words = "intitle:#{name}"
spec = ::Evernote::EDAM::NoteStore::NotesMetadataResultSpec.new
spec.includeTitle = true
result = @evernote.call(:findNotesMetadata, filter, 0, 1, spec)
@entity = result.notes.detect { |note| note.title == name }
self if @entity
end
alias_method :find_by_name, :find
#
# @since 0.3.0
def tag(name)
@entity.tagNames = [name]
@evernote.call(:updateNote, @entity)
end
end
end
end
end
exists? doesn't need to perform an extra find
module Evertils
module Common
module Entity
class Note < Entity::Base
#
# @since 0.2.0
def create_from_yml(full_path)
begin
if File.exist? full_path
conf = YAML::load(File.open(full_path))
required = %w(title body)
if has_required_fields(conf, required)
create(conf['title'], conf['body'], conf['parent'])
else
raise ArgumentError, 'Configuration file is missing some required fields'
end
else
raise ArgumentError, "File not found: #{full_path}"
end
rescue ArgumentError => e
puts e.message
end
end
#
# @since 0.2.0
def create(title, body, parent_notebook = nil, file = nil, share_note = false, created_on = nil)
@entity = nil
# final output
output = {}
# Create note object
our_note = ::Evernote::EDAM::Type::Note.new
our_note.resources = []
our_note.tagNames = []
# a file was requested, lets prepare it for storage
if file
if file.is_a? Array
file.each do |f|
media_resource = ENML.new(f)
body.concat(media_resource.embeddable_element)
our_note.resources << media_resource.element
end
else
media_resource = ENML.new(file)
body.concat(media_resource.embeddable_element)
our_note.resources << media_resource.element
end
end
# only join when required
body = body.join if body.is_a? Array
n_body = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
n_body += "<!DOCTYPE en-note SYSTEM \"http://xml.evernote.com/pub/enml2.dtd\">"
n_body += "<en-note>#{body}</en-note>"
# setup note properties
our_note.title = title
our_note.content = n_body
our_note.created = created_on if !created_on.is_a?(DateTime)
if !parent_notebook.is_a? Evertils::Common::Entity::Notebook
nb = Entity::Notebook.new
parent_notebook = nb.find(parent_notebook)
parent_notebook = nb.default if parent_notebook.nil?
end
# parent_notebook is optional; if omitted, default notebook is used
our_note.notebookGuid = parent_notebook.prop(:guid)
# Attempt to create note in Evernote account
begin
@entity = @evernote.call(:createNote, our_note)
share if share_note
rescue ::Evernote::EDAM::Error::EDAMUserException => edue
## Something was wrong with the note data
## See EDAMErrorCode enumeration for error code explanation
## http://dev.evernote.com/documentation/reference/Errors.html#Enum_EDAMErrorCode
Notify.error "EDAMUserException: #{edue}\nCode #{edue.errorCode}: #{edue.parameter}"
rescue ::Evernote::EDAM::Error::EDAMNotFoundException
## Parent Notebook GUID doesn't correspond to an actual notebook
Notify.error "EDAMNotFoundException: Invalid parent notebook GUID"
rescue ArgumentError => e
Notify.error e.backtrace
end
Notify.success("#{parent_notebook.prop(:stack)}/#{parent_notebook.prop(:name)}/#{our_note.title} created")
self if @entity
end
#
# @since 0.2.0
def exists?
!@entity.nil?
end
#
# @since 0.2.0
def destroy
@evernote.call(:deleteNote, @entity.guid)
end
#
# @since 0.2.9
def expunge!
@evernote.call(:expungeNote, @entity.guid)
end
#
# @since 0.2.0
def expunge
deprecation_notice('0.2.9', 'Replaced with expunge! to better follow Ruby standards for method names. Will be removed in 0.3.5')
@evernote.call(:expungeNote, @entity.guid)
end
#
# @since 0.2.9
def move_to(notebook)
nb = Evertils::Common::Manager::Notebook.new
target = nb.find(notebook)
raise "Notebook not found: #{notebook}" if target.entity.nil?
@entity.notebookGuid = target.prop(:guid)
@evernote.call(:updateNote, @entity)
end
#
# @since 0.2.8
def share
@evernote.call(:shareNote, @entity.guid)
end
#
# @since 0.2.8
def unshare
@evernote.call(:stopSharingNote, @entity.guid)
end
#
# @since 0.2.0
def find(name)
@entity = nil
filter = ::Evernote::EDAM::NoteStore::NoteFilter.new
filter.words = "intitle:#{name}"
spec = ::Evernote::EDAM::NoteStore::NotesMetadataResultSpec.new
spec.includeTitle = true
result = @evernote.call(:findNotesMetadata, filter, 0, 1, spec)
@entity = result.notes.detect { |note| note.title == name }
self if @entity
end
alias_method :find_by_name, :find
#
# @since 0.3.0
def tag(name)
@entity.tagNames = [name]
@evernote.call(:updateNote, @entity)
end
end
end
end
end |
class Croc < Formula
desc "Securely send things from one computer to another"
homepage "https://github.com/schollz/croc"
url "https://github.com/schollz/croc/archive/v8.3.1.tar.gz"
sha256 "336b6c3ce3930e50326341c9131fd1f2ea207a33b331b0b6d3ce00bc324445f3"
license "MIT"
head "https://github.com/schollz/croc.git"
depends_on "go" => :build
def install
system "go", "build", *std_go_args
end
test do
fork do
exec bin/"croc", "send", "--code=homebrew-test", "--text=mytext"
end
sleep 1
assert_match shell_output("#{bin}/croc --yes homebrew-test").chomp, "mytext"
end
end
croc: add 8.3.1 bottle.
class Croc < Formula
desc "Securely send things from one computer to another"
homepage "https://github.com/schollz/croc"
url "https://github.com/schollz/croc/archive/v8.3.1.tar.gz"
sha256 "336b6c3ce3930e50326341c9131fd1f2ea207a33b331b0b6d3ce00bc324445f3"
license "MIT"
head "https://github.com/schollz/croc.git"
bottle do
cellar :any_skip_relocation
sha256 "3934c4ebae77c48c8e81382b97d744680432fee5b4eaaffb2f57f2a45df5ef39" => :catalina
sha256 "e746dc794607003225810bf4ea74d59e285311eb17169a5da876710a5cb0c5ea" => :mojave
sha256 "3a9f7d2c3618437c616a35b16a20b232c690a7129d4f0ce43f6fa729d5ec6df4" => :high_sierra
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args
end
test do
fork do
exec bin/"croc", "send", "--code=homebrew-test", "--text=mytext"
end
sleep 1
assert_match shell_output("#{bin}/croc --yes homebrew-test").chomp, "mytext"
end
end
|
module Expansions
class TemplateVisitor
attr_reader :processors
attr_reader :file
def initialize(args = {})
@processors = args.fetch(:processor_registry, TemplateProcessors.instance)
@file = args.fetch(:file, File)
end
def run_using(file_name)
processor = processors.get_processor_for(file_name)
generated_name = File.basename(file_name,File.extname(file_name))
generated_name = generated_name.gsub(/\.dotfile/,"")
generated_name = ".#{generated_name}" if (/\.dotfile/ =~ file_name)
output = File.join(File.dirname(file_name),generated_name)
file.delete(output) if file.exists?(output)
processor.process(:input => file_name,:output => output)
end
end
end
Added support for loading settings
module Expansions
class TemplateVisitor
attr_reader :processors
attr_reader :file
def initialize(args = {})
@processors = args.fetch(:processor_registry, TemplateProcessors.instance)
@file = args.fetch(:file, File)
end
def run_using(file_name)
processor = processors.get_processor_for(file_name)
generated_name = File.basename(file_name,File.extname(file_name))
generated_name = generated_name.gsub(/\.dotfile/,"")
generated_name = ".#{generated_name}" if (/\.dotfile/ =~ file_name)
output = File.join(File.dirname(file_name),generated_name)
file.delete(output) if file.exists?(output)
processor.process(:input => file_name,:output => output)
end
def load_settings_file(file_name)
settings_file = "#{file_name}.settings"
if File.exist?(settings_file)
load settings_file
end
end
end
end
|
class Cuba < Formula
desc "Library for multidimensional numerical integration"
homepage "http://www.feynarts.de/cuba/"
url "http://www.feynarts.de/cuba/Cuba-4.2.2.tar.gz"
sha256 "8d9f532fd2b9561da2272c156ef7be5f3960953e4519c638759f1b52fe03ed52"
license "LGPL-3.0"
livecheck do
url :homepage
regex(/href=.*?Cuba[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "01567f5b76f7baad0d2fd5a08083d545d0d0543795e03bb8759953f317090cf4"
sha256 cellar: :any_skip_relocation, big_sur: "6fe604c61d01768a99cb42321606f6b5feb5ed709f6d7fb419c3efb3e7e83f3a"
sha256 cellar: :any_skip_relocation, catalina: "758999a8bef3aeaf37f38402affd375ff55b4293cbdb52ee76846a25ba7f5209"
sha256 cellar: :any_skip_relocation, mojave: "abd47d8d13cfefdaf542675e465b717cb95e8b1a8ba0ca2c3745bbcf0c6bd1d0"
end
def install
ENV.deparallelize # Makefile does not support parallel build
system "./configure", "--prefix=#{prefix}"
system "make", "install"
pkgshare.install "demo"
end
test do
system ENV.cc, "-o", "demo", "-L#{lib}", "-lcuba",
"#{pkgshare}/demo/demo-c.c"
system "./demo"
end
end
cuba: update 4.2.2 bottle.
class Cuba < Formula
desc "Library for multidimensional numerical integration"
homepage "http://www.feynarts.de/cuba/"
url "http://www.feynarts.de/cuba/Cuba-4.2.2.tar.gz"
sha256 "8d9f532fd2b9561da2272c156ef7be5f3960953e4519c638759f1b52fe03ed52"
license "LGPL-3.0"
livecheck do
url :homepage
regex(/href=.*?Cuba[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "8ce1fa8a1cf71b27ebeb02406214231dcafcab672b7c38bc5a664c9e2c69d424"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "d8f141d10928d1ce281f6bb744886de1ba9f2274476d3b6b257bcc9d587231e3"
sha256 cellar: :any_skip_relocation, monterey: "238e6efde7346d58330b4ebbe562a5f52375d66bd21555867883c3fe2c0405e5"
sha256 cellar: :any_skip_relocation, big_sur: "897095ff3030916d5470e15f85ca3a0d0460416484232cc7c821dc6e98c4406d"
sha256 cellar: :any_skip_relocation, catalina: "566d4ddc7e4e3a278dceb6b83abc5ce1298b9ca715ac152695bf1e5fbb8cacc4"
end
def install
ENV.deparallelize # Makefile does not support parallel build
system "./configure", "--prefix=#{prefix}"
system "make", "install"
pkgshare.install "demo"
end
test do
system ENV.cc, "-o", "demo", "-L#{lib}", "-lcuba",
"#{pkgshare}/demo/demo-c.c"
system "./demo"
end
end
|
module Fastlane
module Actions
module SharedValues
end
class GitBranchAction < Action
def self.run(params)
ENV['GIT_BRANCH'] or `git symbolic-ref HEAD --short 2>/dev/null`.strip
end
#####################################################
# @!group Documentation
#####################################################
def self.description
"Returns the name of the current git branch"
end
def self.details
"If no branch could be found, this action will return nil"
end
def self.available_options
[]
end
def self.output
[]
end
def self.authors
["KrauseFx"]
end
def self.is_supported?(platform)
true
end
end
end
end
Added support for git branch in travis
Fixes https://github.com/fastlane/fastlane/issues/870
module Fastlane
module Actions
module SharedValues
end
class GitBranchAction < Action
def self.run(params)
return ENV['GIT_BRANCH'] if ENV['GIT_BRANCH']
return ENV["TRAVIS_BRANCH"] if ENV["TRAVIS_BRANCH"]
`git symbolic-ref HEAD --short 2>/dev/null`.strip
end
#####################################################
# @!group Documentation
#####################################################
def self.description
"Returns the name of the current git branch"
end
def self.details
"If no branch could be found, this action will return nil"
end
def self.available_options
[]
end
def self.output
[]
end
def self.authors
["KrauseFx"]
end
def self.is_supported?(platform)
true
end
end
end
end
|
class Curl < Formula
desc "Get a file from an HTTP, HTTPS or FTP server"
homepage "https://curl.haxx.se/"
url "https://curl.haxx.se/download/curl-7.47.1.tar.bz2"
sha256 "ddc643ab9382e24bbe4747d43df189a0a6ce38fcb33df041b9cb0b3cd47ae98f"
bottle do
cellar :any
sha256 "738b8d4e2650711a6c9d59b416ef45a4a7e04d48f65344c5623b87224d92f11b" => :el_capitan
sha256 "8a945137576544f723c8b76579009c1b1b53a0683208d6123b65727678533340" => :yosemite
sha256 "4ce9db9e6332c8cd322f39d48cda624251316fa51942330536d5fddd103779fc" => :mavericks
end
keg_only :provided_by_osx
option "with-libidn", "Build with support for Internationalized Domain Names"
option "with-rtmpdump", "Build with RTMP support"
option "with-libssh2", "Build with scp and sftp support"
option "with-c-ares", "Build with C-Ares async DNS support"
option "with-gssapi", "Build with GSSAPI/Kerberos authentication support."
option "with-libmetalink", "Build with libmetalink support."
option "with-libressl", "Build with LibreSSL instead of Secure Transport or OpenSSL"
option "with-nghttp2", "Build with HTTP/2 support (requires OpenSSL or LibreSSL)"
option "without-ldap", "Build without LDAP support"
deprecated_option "with-idn" => "with-libidn"
deprecated_option "with-rtmp" => "with-rtmpdump"
deprecated_option "with-ssh" => "with-libssh2"
deprecated_option "with-ares" => "with-c-ares"
# HTTP/2 support requires OpenSSL 1.0.2+ or LibreSSL 2.1.3+ for ALPN Support
# which is currently not supported by Secure Transport (DarwinSSL).
if MacOS.version < :mountain_lion || (build.with?("nghttp2") && build.without?("libressl"))
depends_on "openssl"
else
option "with-openssl", "Build with OpenSSL instead of Secure Transport"
depends_on "openssl" => :optional
end
depends_on "pkg-config" => :build
depends_on "libidn" => :optional
depends_on "rtmpdump" => :optional
depends_on "libssh2" => :optional
depends_on "c-ares" => :optional
depends_on "libmetalink" => :optional
depends_on "libressl" => :optional
depends_on "nghttp2" => :optional
def install
# Throw an error if someone actually tries to rock both SSL choices.
# Long-term, make this singular-ssl-option-only a requirement.
if build.with?("libressl") && build.with?("openssl")
ohai <<-EOS.undent
--with-openssl and --with-libressl are both specified and
curl can only use one at a time; proceeding with libressl.
EOS
end
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
]
# cURL has a new firm desire to find ssl with PKG_CONFIG_PATH instead of using
# "--with-ssl" any more. "when possible, set the PKG_CONFIG_PATH environment
# variable instead of using this option". Multi-SSL choice breaks w/o using it.
if build.with? "libressl"
ENV.prepend_path "PKG_CONFIG_PATH", "#{Formula["libressl"].opt_prefix}/lib/pkgconfig"
args << "--with-ssl=#{Formula["libressl"].opt_prefix}"
args << "--with-ca-bundle=#{etc}/libressl/cert.pem"
elsif MacOS.version < :mountain_lion || build.with?("openssl") || build.with?("nghttp2")
ENV.prepend_path "PKG_CONFIG_PATH", "#{Formula["openssl"].opt_prefix}/lib/pkgconfig"
args << "--with-ssl=#{Formula["openssl"].opt_prefix}"
args << "--with-ca-bundle=#{etc}/openssl/cert.pem"
else
args << "--with-darwinssl"
end
args << (build.with?("libssh2") ? "--with-libssh2" : "--without-libssh2")
args << (build.with?("libidn") ? "--with-libidn" : "--without-libidn")
args << (build.with?("libmetalink") ? "--with-libmetalink" : "--without-libmetalink")
args << (build.with?("gssapi") ? "--with-gssapi" : "--without-gssapi")
args << (build.with?("rtmpdump") ? "--with-librtmp" : "--without-librtmp")
if build.with? "c-ares"
args << "--enable-ares=#{Formula["c-ares"].opt_prefix}"
else
args << "--disable-ares"
end
args << "--disable-ldap" if build.without? "ldap"
system "./configure", *args
system "make", "install"
end
test do
# Fetch the curl tarball and see that the checksum matches.
# This requires a network connection, but so does Homebrew in general.
filename = (testpath/"test.tar.gz")
system "#{bin}/curl", "-L", stable.url, "-o", filename
filename.verify_checksum stable.checksum
end
end
curl: update 7.47.1 bottle for Linuxbrew.
class Curl < Formula
desc "Get a file from an HTTP, HTTPS or FTP server"
homepage "https://curl.haxx.se/"
url "https://curl.haxx.se/download/curl-7.47.1.tar.bz2"
sha256 "ddc643ab9382e24bbe4747d43df189a0a6ce38fcb33df041b9cb0b3cd47ae98f"
bottle do
cellar :any
sha256 "738b8d4e2650711a6c9d59b416ef45a4a7e04d48f65344c5623b87224d92f11b" => :el_capitan
sha256 "8a945137576544f723c8b76579009c1b1b53a0683208d6123b65727678533340" => :yosemite
sha256 "4ce9db9e6332c8cd322f39d48cda624251316fa51942330536d5fddd103779fc" => :mavericks
sha256 "39eb36a0a69afd6e90e4ac179cadec8130046c84f290c4d2fb110336138135f1" => :x86_64_linux
end
keg_only :provided_by_osx
option "with-libidn", "Build with support for Internationalized Domain Names"
option "with-rtmpdump", "Build with RTMP support"
option "with-libssh2", "Build with scp and sftp support"
option "with-c-ares", "Build with C-Ares async DNS support"
option "with-gssapi", "Build with GSSAPI/Kerberos authentication support."
option "with-libmetalink", "Build with libmetalink support."
option "with-libressl", "Build with LibreSSL instead of Secure Transport or OpenSSL"
option "with-nghttp2", "Build with HTTP/2 support (requires OpenSSL or LibreSSL)"
option "without-ldap", "Build without LDAP support"
deprecated_option "with-idn" => "with-libidn"
deprecated_option "with-rtmp" => "with-rtmpdump"
deprecated_option "with-ssh" => "with-libssh2"
deprecated_option "with-ares" => "with-c-ares"
# HTTP/2 support requires OpenSSL 1.0.2+ or LibreSSL 2.1.3+ for ALPN Support
# which is currently not supported by Secure Transport (DarwinSSL).
if MacOS.version < :mountain_lion || (build.with?("nghttp2") && build.without?("libressl"))
depends_on "openssl"
else
option "with-openssl", "Build with OpenSSL instead of Secure Transport"
depends_on "openssl" => :optional
end
depends_on "pkg-config" => :build
depends_on "libidn" => :optional
depends_on "rtmpdump" => :optional
depends_on "libssh2" => :optional
depends_on "c-ares" => :optional
depends_on "libmetalink" => :optional
depends_on "libressl" => :optional
depends_on "nghttp2" => :optional
def install
# Throw an error if someone actually tries to rock both SSL choices.
# Long-term, make this singular-ssl-option-only a requirement.
if build.with?("libressl") && build.with?("openssl")
ohai <<-EOS.undent
--with-openssl and --with-libressl are both specified and
curl can only use one at a time; proceeding with libressl.
EOS
end
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
]
# cURL has a new firm desire to find ssl with PKG_CONFIG_PATH instead of using
# "--with-ssl" any more. "when possible, set the PKG_CONFIG_PATH environment
# variable instead of using this option". Multi-SSL choice breaks w/o using it.
if build.with? "libressl"
ENV.prepend_path "PKG_CONFIG_PATH", "#{Formula["libressl"].opt_prefix}/lib/pkgconfig"
args << "--with-ssl=#{Formula["libressl"].opt_prefix}"
args << "--with-ca-bundle=#{etc}/libressl/cert.pem"
elsif MacOS.version < :mountain_lion || build.with?("openssl") || build.with?("nghttp2")
ENV.prepend_path "PKG_CONFIG_PATH", "#{Formula["openssl"].opt_prefix}/lib/pkgconfig"
args << "--with-ssl=#{Formula["openssl"].opt_prefix}"
args << "--with-ca-bundle=#{etc}/openssl/cert.pem"
else
args << "--with-darwinssl"
end
args << (build.with?("libssh2") ? "--with-libssh2" : "--without-libssh2")
args << (build.with?("libidn") ? "--with-libidn" : "--without-libidn")
args << (build.with?("libmetalink") ? "--with-libmetalink" : "--without-libmetalink")
args << (build.with?("gssapi") ? "--with-gssapi" : "--without-gssapi")
args << (build.with?("rtmpdump") ? "--with-librtmp" : "--without-librtmp")
if build.with? "c-ares"
args << "--enable-ares=#{Formula["c-ares"].opt_prefix}"
else
args << "--disable-ares"
end
args << "--disable-ldap" if build.without? "ldap"
system "./configure", *args
system "make", "install"
end
test do
# Fetch the curl tarball and see that the checksum matches.
# This requires a network connection, but so does Homebrew in general.
filename = (testpath/"test.tar.gz")
system "#{bin}/curl", "-L", stable.url, "-o", filename
filename.verify_checksum stable.checksum
end
end
|
require "fluent/plugin/filter"
module Fluent::Plugin
class ConcatFilter < Filter
Fluent::Plugin.register_filter("concat", self)
helpers :timer, :event_emitter
desc "The key for part of multiline log"
config_param :key, :string
desc "The separator of lines"
config_param :separator, :string, default: "\n"
desc "The number of lines"
config_param :n_lines, :integer, default: nil
desc "The regexp to match beginning of multiline"
config_param :multiline_start_regexp, :string, default: nil
desc "The regexp to match ending of multiline"
config_param :multiline_end_regexp, :string, default: nil
desc "The regexp to match continuous lines"
config_param :continuous_line_regexp, :string, default: nil
desc "The key to determine which stream an event belongs to"
config_param :stream_identity_key, :string, default: nil
desc "The interval between data flushes, 0 means disable timeout"
config_param :flush_interval, :time, default: 60
desc "The label name to handle timeout"
config_param :timeout_label, :string, default: nil
desc "Use timestamp of first record when buffer is flushed"
config_param :use_first_timestamp, :bool, default: false
desc "The field name that is the reference to concatenate records"
config_param :partial_key, :string, default: nil
desc "The value stored in the field specified by partial_key that represent partial log"
config_param :partial_value, :string, default: nil
desc "If true, keep partial_key in concatenated records"
config_param :keep_partial_key, :bool, default: false
desc "Use partial metadata to concatenate multiple records"
config_param :use_partial_metadata, :bool, default: false
desc "Input format of the partial metadata (fluentd or journald docker log driver)"
config_param :partial_metadata_format, :enum, list: [:"docker-fluentd", :"docker-journald", :"docker-journald-lowercase"], default: :"docker-fluentd"
desc "If true, keep partial metadata"
config_param :keep_partial_metadata, :bool, default: false
desc "Use cri log tag to concatenate multiple records"
config_param :use_partial_cri_logtag, :bool, default: false
desc "The key name that is referred to concatenate records on cri log"
config_param :partial_cri_logtag_key, :string, default: nil
desc "The key name that is referred to detect stream name on cri log"
config_param :partial_cri_stream_key, :string, default: "stream"
class TimeoutError < StandardError
end
def initialize
super
@buffer = Hash.new {|h, k| h[k] = [] }
@timeout_map_mutex = Thread::Mutex.new
@timeout_map_mutex.synchronize do
@timeout_map = Hash.new {|h, k| h[k] = Fluent::Engine.now }
end
end
def required_params
params = [@n_lines.nil?, @multiline_start_regexp.nil?, @multiline_end_regexp.nil?, @partial_key.nil?, !@use_partial_metadata, !@use_partial_cri_logtag]
names = ["n_lines", "multiline_start_regexp", "multiline_end_regexp", "partial_key", "use_partial_metadata", "use_partial_cri_logtag"]
return params, names
end
def configure(conf)
super
params, names = required_params
if params.reject{|e| e == true}.empty?
raise Fluent::ConfigError, "Either #{[names[0..-2].join(", "), names[-1]].join(" or ")} is required"
end
if @n_lines && (@multiline_start_regexp || @multiline_end_regexp)
raise Fluent::ConfigError, "n_lines and multiline_start_regexp/multiline_end_regexp are exclusive"
end
if @partial_key && @n_lines
raise Fluent::ConfigError, "partial_key and n_lines are exclusive"
end
if @partial_key && (@multiline_start_regexp || @multiline_end_regexp)
raise Fluent::ConfigError, "partial_key and multiline_start_regexp/multiline_end_regexp are exclusive"
end
if @partial_key && @partial_value.nil?
raise Fluent::ConfigError, "partial_value is required when partial_key is specified"
end
if @use_partial_metadata && @n_lines
raise Fluent::ConfigError, "use_partial_metadata and n_lines are exclusive"
end
if @use_partial_metadata && (@multiline_start_regexp || @multiline_end_regexp)
raise Fluent::ConfigError, "use_partial_metadata and multiline_start_regexp/multiline_end_regexp are exclusive"
end
if @use_partial_metadata && @partial_key
raise Fluent::ConfigError, "use_partial_metadata and partial_key are exclusive"
end
if @use_partial_cri_logtag && @n_lines
raise Fluent::ConfigError, "use_partial_cri_logtag and n_lines are exclusive"
end
if @use_partial_cri_logtag && (@multiline_start_regexp || @multiline_end_regexp)
raise Fluent::ConfigError, "use_partial_cri_logtag and multiline_start_regexp/multiline_end_regexp are exclusive"
end
if @use_partial_cri_logtag && @partial_key
raise Fluent::ConfigError, "use_partial_cri_logtag and partial_key are exclusive"
end
@mode = nil
case
when @n_lines
@mode = :line
when @partial_key
@mode = :partial
when @use_partial_metadata
@mode = :partial_metadata
case @partial_metadata_format
when :"docker-fluentd"
@partial_message_field = "partial_message".freeze
@partial_id_field = "partial_id".freeze
@partial_ordinal_field = "partial_ordinal".freeze
@partial_last_field = "partial_last".freeze
@partial_message_indicator = @partial_message_field
when :"docker-journald"
@partial_message_field = "CONTAINER_PARTIAL_MESSAGE".freeze
@partial_id_field = "CONTAINER_PARTIAL_ID".freeze
@partial_ordinal_field = "CONTAINER_PARTIAL_ORDINAL".freeze
@partial_last_field = "CONTAINER_PARTIAL_LAST".freeze
# the journald log driver does not add CONTAINER_PARTIAL_MESSAGE to the last message
# so we help ourself by using another indicator
@partial_message_indicator = @partial_id_field
when :"docker-journald-lowercase"
@partial_message_field = "container_partial_message".freeze
@partial_id_field = "container_partial_id".freeze
@partial_ordinal_field = "container_partial_ordinal".freeze
@partial_last_field = "container_partial_last".freeze
@partial_message_indicator = @partial_id_field
end
when @use_partial_cri_logtag
@mode = :partial_cri
@partial_logtag_delimiter = ":".freeze
@partial_logtag_continue = "P".freeze
@partial_logtag_full = "F".freeze
when @multiline_start_regexp || @multiline_end_regexp
@mode = :regexp
if @multiline_start_regexp
@multiline_start_regexp = Regexp.compile(@multiline_start_regexp[1..-2])
end
if @multiline_end_regexp
@multiline_end_regexp = Regexp.compile(@multiline_end_regexp[1..-2])
end
if @continuous_line_regexp
@continuous_line_regexp = Regexp.compile(@continuous_line_regexp[1..-2])
end
end
end
def start
super
@finished = false
timer_execute(:filter_concat_timer, 1, &method(:on_timer))
end
def shutdown
@finished = true
flush_remaining_buffer
super
end
def filter_stream(tag, es)
if /\Afluent\.(?:trace|debug|info|warn|error|fatal)\z/ =~ tag
return es
end
new_es = Fluent::MultiEventStream.new
es.each do |time, record|
unless record.key?(@key)
new_es.add(time, record)
next
end
if @mode == :partial
unless record.key?(@partial_key)
new_es.add(time, record)
next
end
end
if @mode == :partial_metadata
unless record.key?(@partial_message_indicator)
new_es.add(time, record)
next
end
end
begin
flushed_es = process(tag, time, record)
unless flushed_es.empty?
flushed_es.each do |_time, new_record|
time = _time if @use_first_timestamp
merged_record = record.merge(new_record)
case @mode
when :partial
merged_record.delete(@partial_key) unless @keep_partial_key
when :partial_metadata
unless @keep_partial_metadata
merged_record.delete(@partial_message_field)
merged_record.delete(@partial_id_field)
merged_record.delete(@partial_ordinal_field)
merged_record.delete(@partial_last_field)
end
when :partial_cri
merged_record.delete(@partial_cri_logtag_key) unless @keep_partial_key
merged_record.delete(@partial_cri_stream_key)
end
new_es.add(time, merged_record)
end
end
rescue => e
router.emit_error_event(tag, time, record, e)
end
end
new_es
end
private
def on_timer
return if @flush_interval <= 0
return if @finished
flush_timeout_buffer
rescue => e
log.error "failed to flush timeout buffer", error: e
end
def process(tag, time, record)
if @mode == :partial_metadata
if @stream_identity_key
stream_identity = %Q(#{tag}:#{record[@stream_identity_key]}#{record[@partial_id_field]})
else
stream_identity = %Q(#{tag}:#{record[@partial_id_field]})
end
else
if @stream_identity_key
stream_identity = "#{tag}:#{record[@stream_identity_key]}"
else
stream_identity = "#{tag}:default"
end
end
@timeout_map_mutex.synchronize do
@timeout_map[stream_identity] = Fluent::Engine.now
end
case @mode
when :line
process_line(stream_identity, tag, time, record)
when :partial
process_partial(stream_identity, tag, time, record)
when :partial_metadata
process_partial_metadata(stream_identity, tag, time, record)
when :partial_cri
process_partial_cri(stream_identity, tag, time, record)
when :regexp
process_regexp(stream_identity, tag, time, record)
end
end
def process_line(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
if @buffer[stream_identity].size >= @n_lines
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
new_es
end
def process_partial(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
unless @partial_value == record[@partial_key]
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_record.delete(@partial_key)
new_es.add(time, new_record)
end
new_es
end
def process_partial_cri(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
if record[@partial_cri_logtag_key].split(@partial_logtag_delimiter)[0] == @partial_logtag_full
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_record.delete(@partial_cri_logtag_key)
new_es.add(time, new_record)
end
new_es
end
def process_partial_metadata(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
if record[@partial_last_field] == "true"
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_record.delete(@partial_key)
new_es.add(time, new_record)
end
new_es
end
def process_regexp(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
case
when firstline?(record[@key])
if @buffer[stream_identity].empty?
@buffer[stream_identity] << [tag, time, record]
if lastline?(record[@key])
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
else
new_time, new_record = flush_buffer(stream_identity, [tag, time, record])
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
if lastline?(record[@key])
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
return new_es
end
when lastline?(record[@key])
@buffer[stream_identity] << [tag, time, record]
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
return new_es
else
if @buffer[stream_identity].empty?
if !@multiline_start_regexp
@buffer[stream_identity] << [tag, time, record]
else
new_es.add(time, record)
return new_es
end
else
if continuous_line?(record[@key])
# Continuation of the previous line
@buffer[stream_identity] << [tag, time, record]
else
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
new_es.add(time, record)
end
end
end
new_es
end
def firstline?(text)
@multiline_start_regexp && @multiline_start_regexp.match?(text)
end
def lastline?(text)
@multiline_end_regexp && @multiline_end_regexp.match?(text)
end
def continuous_line?(text)
if @continuous_line_regexp
@continuous_line_regexp.match?(text)
else
true
end
end
def flush_buffer(stream_identity, new_element = nil)
lines = if @mode == :partial_metadata
@buffer[stream_identity]
.sort_by {|_tag, _time, record| record[@partial_ordinal_field].to_i }
.map {|_tag, _time, record| record[@key] }
else
@buffer[stream_identity].map {|_tag, _time, record| record[@key] }
end
_tag, time, first_record = @buffer[stream_identity].first
new_record = {
@key => lines.join(@separator)
}
@buffer[stream_identity] = []
@buffer[stream_identity] << new_element if new_element
[time, first_record.merge(new_record)]
end
def flush_timeout_buffer
now = Fluent::Engine.now
timeout_stream_identities = []
@timeout_map_mutex.synchronize do
@timeout_map.each do |stream_identity, previous_timestamp|
next if @flush_interval > (now - previous_timestamp)
next if @buffer[stream_identity].empty?
time, flushed_record = flush_buffer(stream_identity)
timeout_stream_identities << stream_identity
tag = stream_identity.split(":").first
message = "Timeout flush: #{stream_identity}"
handle_timeout_error(tag, @use_first_timestamp ? time : now, flushed_record, message)
log.info(message)
end
@timeout_map.reject! do |stream_identity, _|
timeout_stream_identities.include?(stream_identity)
end
end
end
def flush_remaining_buffer
@buffer.each do |stream_identity, elements|
next if elements.empty?
lines = elements.map {|_tag, _time, record| record[@key] }
new_record = {
@key => lines.join(@separator)
}
tag, time, record = elements.first
message = "Flush remaining buffer: #{stream_identity}"
handle_timeout_error(tag, time, record.merge(new_record), message)
log.info(message)
end
@buffer.clear
end
def handle_timeout_error(tag, time, record, message)
if @timeout_label
event_router = event_emitter_router(@timeout_label)
event_router.emit(tag, time, record)
else
router.emit_error_event(tag, time, record, TimeoutError.new(message))
end
end
end
end
Use all? method to check exclusive parameters
Signed-off-by: Hiroshi Hatake <a476a6e5b5893650242fbf0b2961500a9bd9032e@calyptia.com>
require "fluent/plugin/filter"
module Fluent::Plugin
class ConcatFilter < Filter
Fluent::Plugin.register_filter("concat", self)
helpers :timer, :event_emitter
desc "The key for part of multiline log"
config_param :key, :string
desc "The separator of lines"
config_param :separator, :string, default: "\n"
desc "The number of lines"
config_param :n_lines, :integer, default: nil
desc "The regexp to match beginning of multiline"
config_param :multiline_start_regexp, :string, default: nil
desc "The regexp to match ending of multiline"
config_param :multiline_end_regexp, :string, default: nil
desc "The regexp to match continuous lines"
config_param :continuous_line_regexp, :string, default: nil
desc "The key to determine which stream an event belongs to"
config_param :stream_identity_key, :string, default: nil
desc "The interval between data flushes, 0 means disable timeout"
config_param :flush_interval, :time, default: 60
desc "The label name to handle timeout"
config_param :timeout_label, :string, default: nil
desc "Use timestamp of first record when buffer is flushed"
config_param :use_first_timestamp, :bool, default: false
desc "The field name that is the reference to concatenate records"
config_param :partial_key, :string, default: nil
desc "The value stored in the field specified by partial_key that represent partial log"
config_param :partial_value, :string, default: nil
desc "If true, keep partial_key in concatenated records"
config_param :keep_partial_key, :bool, default: false
desc "Use partial metadata to concatenate multiple records"
config_param :use_partial_metadata, :bool, default: false
desc "Input format of the partial metadata (fluentd or journald docker log driver)"
config_param :partial_metadata_format, :enum, list: [:"docker-fluentd", :"docker-journald", :"docker-journald-lowercase"], default: :"docker-fluentd"
desc "If true, keep partial metadata"
config_param :keep_partial_metadata, :bool, default: false
desc "Use cri log tag to concatenate multiple records"
config_param :use_partial_cri_logtag, :bool, default: false
desc "The key name that is referred to concatenate records on cri log"
config_param :partial_cri_logtag_key, :string, default: nil
desc "The key name that is referred to detect stream name on cri log"
config_param :partial_cri_stream_key, :string, default: "stream"
class TimeoutError < StandardError
end
def initialize
super
@buffer = Hash.new {|h, k| h[k] = [] }
@timeout_map_mutex = Thread::Mutex.new
@timeout_map_mutex.synchronize do
@timeout_map = Hash.new {|h, k| h[k] = Fluent::Engine.now }
end
end
def required_params
params = [@n_lines.nil?, @multiline_start_regexp.nil?, @multiline_end_regexp.nil?, @partial_key.nil?, !@use_partial_metadata, !@use_partial_cri_logtag]
names = ["n_lines", "multiline_start_regexp", "multiline_end_regexp", "partial_key", "use_partial_metadata", "use_partial_cri_logtag"]
return params, names
end
def configure(conf)
super
params, names = required_params
if params.all?
raise Fluent::ConfigError, "Either #{[names[0..-2].join(", "), names[-1]].join(" or ")} is required"
end
if @n_lines && (@multiline_start_regexp || @multiline_end_regexp)
raise Fluent::ConfigError, "n_lines and multiline_start_regexp/multiline_end_regexp are exclusive"
end
if @partial_key && @n_lines
raise Fluent::ConfigError, "partial_key and n_lines are exclusive"
end
if @partial_key && (@multiline_start_regexp || @multiline_end_regexp)
raise Fluent::ConfigError, "partial_key and multiline_start_regexp/multiline_end_regexp are exclusive"
end
if @partial_key && @partial_value.nil?
raise Fluent::ConfigError, "partial_value is required when partial_key is specified"
end
if @use_partial_metadata && @n_lines
raise Fluent::ConfigError, "use_partial_metadata and n_lines are exclusive"
end
if @use_partial_metadata && (@multiline_start_regexp || @multiline_end_regexp)
raise Fluent::ConfigError, "use_partial_metadata and multiline_start_regexp/multiline_end_regexp are exclusive"
end
if @use_partial_metadata && @partial_key
raise Fluent::ConfigError, "use_partial_metadata and partial_key are exclusive"
end
if @use_partial_cri_logtag && @n_lines
raise Fluent::ConfigError, "use_partial_cri_logtag and n_lines are exclusive"
end
if @use_partial_cri_logtag && (@multiline_start_regexp || @multiline_end_regexp)
raise Fluent::ConfigError, "use_partial_cri_logtag and multiline_start_regexp/multiline_end_regexp are exclusive"
end
if @use_partial_cri_logtag && @partial_key
raise Fluent::ConfigError, "use_partial_cri_logtag and partial_key are exclusive"
end
@mode = nil
case
when @n_lines
@mode = :line
when @partial_key
@mode = :partial
when @use_partial_metadata
@mode = :partial_metadata
case @partial_metadata_format
when :"docker-fluentd"
@partial_message_field = "partial_message".freeze
@partial_id_field = "partial_id".freeze
@partial_ordinal_field = "partial_ordinal".freeze
@partial_last_field = "partial_last".freeze
@partial_message_indicator = @partial_message_field
when :"docker-journald"
@partial_message_field = "CONTAINER_PARTIAL_MESSAGE".freeze
@partial_id_field = "CONTAINER_PARTIAL_ID".freeze
@partial_ordinal_field = "CONTAINER_PARTIAL_ORDINAL".freeze
@partial_last_field = "CONTAINER_PARTIAL_LAST".freeze
# the journald log driver does not add CONTAINER_PARTIAL_MESSAGE to the last message
# so we help ourself by using another indicator
@partial_message_indicator = @partial_id_field
when :"docker-journald-lowercase"
@partial_message_field = "container_partial_message".freeze
@partial_id_field = "container_partial_id".freeze
@partial_ordinal_field = "container_partial_ordinal".freeze
@partial_last_field = "container_partial_last".freeze
@partial_message_indicator = @partial_id_field
end
when @use_partial_cri_logtag
@mode = :partial_cri
@partial_logtag_delimiter = ":".freeze
@partial_logtag_continue = "P".freeze
@partial_logtag_full = "F".freeze
when @multiline_start_regexp || @multiline_end_regexp
@mode = :regexp
if @multiline_start_regexp
@multiline_start_regexp = Regexp.compile(@multiline_start_regexp[1..-2])
end
if @multiline_end_regexp
@multiline_end_regexp = Regexp.compile(@multiline_end_regexp[1..-2])
end
if @continuous_line_regexp
@continuous_line_regexp = Regexp.compile(@continuous_line_regexp[1..-2])
end
end
end
def start
super
@finished = false
timer_execute(:filter_concat_timer, 1, &method(:on_timer))
end
def shutdown
@finished = true
flush_remaining_buffer
super
end
def filter_stream(tag, es)
if /\Afluent\.(?:trace|debug|info|warn|error|fatal)\z/ =~ tag
return es
end
new_es = Fluent::MultiEventStream.new
es.each do |time, record|
unless record.key?(@key)
new_es.add(time, record)
next
end
if @mode == :partial
unless record.key?(@partial_key)
new_es.add(time, record)
next
end
end
if @mode == :partial_metadata
unless record.key?(@partial_message_indicator)
new_es.add(time, record)
next
end
end
begin
flushed_es = process(tag, time, record)
unless flushed_es.empty?
flushed_es.each do |_time, new_record|
time = _time if @use_first_timestamp
merged_record = record.merge(new_record)
case @mode
when :partial
merged_record.delete(@partial_key) unless @keep_partial_key
when :partial_metadata
unless @keep_partial_metadata
merged_record.delete(@partial_message_field)
merged_record.delete(@partial_id_field)
merged_record.delete(@partial_ordinal_field)
merged_record.delete(@partial_last_field)
end
when :partial_cri
merged_record.delete(@partial_cri_logtag_key) unless @keep_partial_key
merged_record.delete(@partial_cri_stream_key)
end
new_es.add(time, merged_record)
end
end
rescue => e
router.emit_error_event(tag, time, record, e)
end
end
new_es
end
private
def on_timer
return if @flush_interval <= 0
return if @finished
flush_timeout_buffer
rescue => e
log.error "failed to flush timeout buffer", error: e
end
def process(tag, time, record)
if @mode == :partial_metadata
if @stream_identity_key
stream_identity = %Q(#{tag}:#{record[@stream_identity_key]}#{record[@partial_id_field]})
else
stream_identity = %Q(#{tag}:#{record[@partial_id_field]})
end
else
if @stream_identity_key
stream_identity = "#{tag}:#{record[@stream_identity_key]}"
else
stream_identity = "#{tag}:default"
end
end
@timeout_map_mutex.synchronize do
@timeout_map[stream_identity] = Fluent::Engine.now
end
case @mode
when :line
process_line(stream_identity, tag, time, record)
when :partial
process_partial(stream_identity, tag, time, record)
when :partial_metadata
process_partial_metadata(stream_identity, tag, time, record)
when :partial_cri
process_partial_cri(stream_identity, tag, time, record)
when :regexp
process_regexp(stream_identity, tag, time, record)
end
end
def process_line(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
if @buffer[stream_identity].size >= @n_lines
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
new_es
end
def process_partial(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
unless @partial_value == record[@partial_key]
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_record.delete(@partial_key)
new_es.add(time, new_record)
end
new_es
end
def process_partial_cri(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
if record[@partial_cri_logtag_key].split(@partial_logtag_delimiter)[0] == @partial_logtag_full
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_record.delete(@partial_cri_logtag_key)
new_es.add(time, new_record)
end
new_es
end
def process_partial_metadata(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
if record[@partial_last_field] == "true"
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_record.delete(@partial_key)
new_es.add(time, new_record)
end
new_es
end
def process_regexp(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
case
when firstline?(record[@key])
if @buffer[stream_identity].empty?
@buffer[stream_identity] << [tag, time, record]
if lastline?(record[@key])
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
else
new_time, new_record = flush_buffer(stream_identity, [tag, time, record])
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
if lastline?(record[@key])
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
return new_es
end
when lastline?(record[@key])
@buffer[stream_identity] << [tag, time, record]
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
return new_es
else
if @buffer[stream_identity].empty?
if !@multiline_start_regexp
@buffer[stream_identity] << [tag, time, record]
else
new_es.add(time, record)
return new_es
end
else
if continuous_line?(record[@key])
# Continuation of the previous line
@buffer[stream_identity] << [tag, time, record]
else
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
new_es.add(time, record)
end
end
end
new_es
end
def firstline?(text)
@multiline_start_regexp && @multiline_start_regexp.match?(text)
end
def lastline?(text)
@multiline_end_regexp && @multiline_end_regexp.match?(text)
end
def continuous_line?(text)
if @continuous_line_regexp
@continuous_line_regexp.match?(text)
else
true
end
end
def flush_buffer(stream_identity, new_element = nil)
lines = if @mode == :partial_metadata
@buffer[stream_identity]
.sort_by {|_tag, _time, record| record[@partial_ordinal_field].to_i }
.map {|_tag, _time, record| record[@key] }
else
@buffer[stream_identity].map {|_tag, _time, record| record[@key] }
end
_tag, time, first_record = @buffer[stream_identity].first
new_record = {
@key => lines.join(@separator)
}
@buffer[stream_identity] = []
@buffer[stream_identity] << new_element if new_element
[time, first_record.merge(new_record)]
end
def flush_timeout_buffer
now = Fluent::Engine.now
timeout_stream_identities = []
@timeout_map_mutex.synchronize do
@timeout_map.each do |stream_identity, previous_timestamp|
next if @flush_interval > (now - previous_timestamp)
next if @buffer[stream_identity].empty?
time, flushed_record = flush_buffer(stream_identity)
timeout_stream_identities << stream_identity
tag = stream_identity.split(":").first
message = "Timeout flush: #{stream_identity}"
handle_timeout_error(tag, @use_first_timestamp ? time : now, flushed_record, message)
log.info(message)
end
@timeout_map.reject! do |stream_identity, _|
timeout_stream_identities.include?(stream_identity)
end
end
end
def flush_remaining_buffer
@buffer.each do |stream_identity, elements|
next if elements.empty?
lines = elements.map {|_tag, _time, record| record[@key] }
new_record = {
@key => lines.join(@separator)
}
tag, time, record = elements.first
message = "Flush remaining buffer: #{stream_identity}"
handle_timeout_error(tag, time, record.merge(new_record), message)
log.info(message)
end
@buffer.clear
end
def handle_timeout_error(tag, time, record, message)
if @timeout_label
event_router = event_emitter_router(@timeout_label)
event_router.emit(tag, time, record)
else
router.emit_error_event(tag, time, record, TimeoutError.new(message))
end
end
end
end
|
class Dbus < Formula
# releases: even (1.10.x) = stable, odd (1.11.x) = development
desc "Message bus system, providing inter-application communication"
homepage "https://wiki.freedesktop.org/www/Software/dbus"
url "https://dbus.freedesktop.org/releases/dbus/dbus-1.10.12.tar.gz"
mirror "https://mirrors.ocf.berkeley.edu/debian/pool/main/d/dbus/dbus_1.10.12.orig.tar.gz"
sha256 "210a79430b276eafc6406c71705e9140d25b9956d18068df98a70156dc0e475d"
bottle do
sha256 "8848b7e368750df3a9526f4c5d47a0649359e9e89cd9d94cb45e706402bdb66c" => :sierra
sha256 "153735bc649bf2b7c8e995a5fdf44947d9f1c1a0091f1e351283a9621f281298" => :el_capitan
sha256 "93fba26972dd5930cc211f9d76b4d16c93a120ce7b3c19c2f2be2aedbceac09c" => :yosemite
end
devel do
url "https://dbus.freedesktop.org/releases/dbus/dbus-1.11.4.tar.gz"
mirror "https://mirrors.ocf.berkeley.edu/debian/pool/main/d/dbus/dbus_1.11.4.orig.tar.gz"
sha256 "474de2afde8087adbd26b3fc5cbf6ec45559763c75b21981169a9a1fbac256c9"
end
head do
url "https://anongit.freedesktop.org/git/dbus/dbus.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
# Patch applies the config templating fixed in https://bugs.freedesktop.org/show_bug.cgi?id=94494
# Homebrew pr/issue: 50219
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/0a8a55872e/d-bus/org.freedesktop.dbus-session.plist.osx.diff"
sha256 "a8aa6fe3f2d8f873ad3f683013491f5362d551bf5d4c3b469f1efbc5459a20dc"
end
depends_on "expat" unless OS.mac?
def install
# Fix the TMPDIR to one D-Bus doesn't reject due to odd symbols
ENV["TMPDIR"] = "/tmp"
system "./autogen.sh", "--no-configure" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--localstatedir=#{var}",
"--sysconfdir=#{etc}",
"--disable-xml-docs",
"--disable-doxygen-docs",
("--enable-launchd" if OS.mac?),
("--with-launchd-agent-dir=#{prefix}" if OS.mac?),
"--without-x",
"--disable-tests"
system "make"
ENV.deparallelize
system "make", "install"
end
def post_install
# Generate D-Bus's UUID for this machine
system "#{bin}/dbus-uuidgen", "--ensure=#{var}/lib/dbus/machine-id"
end
test do
system "#{bin}/dbus-daemon", "--version"
end
end
dbus: update 1.10.12 bottle for Linuxbrew.
Closes Linuxbrew/homebrew-core#1129.
Signed-off-by: Shaun Jackman <b580dab3251a9622aba3803114310c23fdb42900@gmail.com>
class Dbus < Formula
# releases: even (1.10.x) = stable, odd (1.11.x) = development
desc "Message bus system, providing inter-application communication"
homepage "https://wiki.freedesktop.org/www/Software/dbus"
url "https://dbus.freedesktop.org/releases/dbus/dbus-1.10.12.tar.gz"
mirror "https://mirrors.ocf.berkeley.edu/debian/pool/main/d/dbus/dbus_1.10.12.orig.tar.gz"
sha256 "210a79430b276eafc6406c71705e9140d25b9956d18068df98a70156dc0e475d"
bottle do
sha256 "8848b7e368750df3a9526f4c5d47a0649359e9e89cd9d94cb45e706402bdb66c" => :sierra
sha256 "153735bc649bf2b7c8e995a5fdf44947d9f1c1a0091f1e351283a9621f281298" => :el_capitan
sha256 "93fba26972dd5930cc211f9d76b4d16c93a120ce7b3c19c2f2be2aedbceac09c" => :yosemite
sha256 "a85f86c8f580e8c5e9e0808233dcd0549ae5e64685842b1b975f537cef91b5f8" => :x86_64_linux
end
devel do
url "https://dbus.freedesktop.org/releases/dbus/dbus-1.11.4.tar.gz"
mirror "https://mirrors.ocf.berkeley.edu/debian/pool/main/d/dbus/dbus_1.11.4.orig.tar.gz"
sha256 "474de2afde8087adbd26b3fc5cbf6ec45559763c75b21981169a9a1fbac256c9"
end
head do
url "https://anongit.freedesktop.org/git/dbus/dbus.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
# Patch applies the config templating fixed in https://bugs.freedesktop.org/show_bug.cgi?id=94494
# Homebrew pr/issue: 50219
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/0a8a55872e/d-bus/org.freedesktop.dbus-session.plist.osx.diff"
sha256 "a8aa6fe3f2d8f873ad3f683013491f5362d551bf5d4c3b469f1efbc5459a20dc"
end
depends_on "expat" unless OS.mac?
def install
# Fix the TMPDIR to one D-Bus doesn't reject due to odd symbols
ENV["TMPDIR"] = "/tmp"
system "./autogen.sh", "--no-configure" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--localstatedir=#{var}",
"--sysconfdir=#{etc}",
"--disable-xml-docs",
"--disable-doxygen-docs",
("--enable-launchd" if OS.mac?),
("--with-launchd-agent-dir=#{prefix}" if OS.mac?),
"--without-x",
"--disable-tests"
system "make"
ENV.deparallelize
system "make", "install"
end
def post_install
# Generate D-Bus's UUID for this machine
system "#{bin}/dbus-uuidgen", "--ensure=#{var}/lib/dbus/machine-id"
end
test do
system "#{bin}/dbus-daemon", "--version"
end
end
|
require "fluent/plugin/filter"
module Fluent::Plugin
class ConcatFilter < Filter
Fluent::Plugin.register_filter("concat", self)
helpers :timer, :event_emitter
desc "The key for part of multiline log"
config_param :key, :string
desc "The separator of lines"
config_param :separator, :string, default: "\n"
desc "The number of lines"
config_param :n_lines, :integer, default: nil
desc "The regexp to match beginning of multiline"
config_param :multiline_start_regexp, :string, default: nil
desc "The regexp to match ending of multiline"
config_param :multiline_end_regexp, :string, default: nil
desc "The regexp to match continuous lines"
config_param :continuous_line_regexp, :string, default: nil
desc "The key to determine which stream an event belongs to"
config_param :stream_identity_key, :string, default: nil
desc "The interval between data flushes, 0 means disable timeout"
config_param :flush_interval, :time, default: 60
desc "The label name to handle timeout"
config_param :timeout_label, :string, default: nil
desc "Use timestamp of first record when buffer is flushed"
config_param :use_first_timestamp, :bool, default: false
class TimeoutError < StandardError
end
def initialize
super
@buffer = Hash.new {|h, k| h[k] = [] }
@timeout_map = Hash.new {|h, k| h[k] = Fluent::Engine.now }
end
def configure(conf)
super
if @n_lines && (@multiline_start_regexp || @multiline_end_regexp || @continuous_line_regexp)
raise Fluent::ConfigError, "n_lines and multiline_start_regexp/multiline_end_regexp/continuous_line_regexp are exclusive"
end
if @n_lines.nil? && @multiline_start_regexp.nil? && @multiline_end_regexp.nil?
raise Fluent::ConfigError, "Either n_lines or multiline_start_regexp or multiline_end_regexp is required"
end
@mode = nil
case
when @n_lines
@mode = :line
when @multiline_start_regexp || @multiline_end_regexp
@mode = :regexp
if @multiline_start_regexp
@multiline_start_regexp = Regexp.compile(@multiline_start_regexp[1..-2])
end
if @multiline_end_regexp
@multiline_end_regexp = Regexp.compile(@multiline_end_regexp[1..-2])
end
if @continuous_line_regexp
@continuous_line_regexp = Regexp.compile(@continuous_line_regexp[1..-2])
end
end
end
def start
super
@finished = false
timer_execute(:filter_concat_timer, 1, &method(:on_timer))
end
def shutdown
@finished = true
flush_remaining_buffer
super
end
def filter_stream(tag, es)
new_es = Fluent::MultiEventStream.new
es.each do |time, record|
if /\Afluent\.(?:trace|debug|info|warn|error|fatal)\z/ =~ tag
new_es.add(time, record)
next
end
unless record.key?(@key)
new_es.add(time, record)
next
end
begin
flushed_es = process(tag, time, record)
unless flushed_es.empty?
flushed_es.each do |_time, new_record|
time = _time if @use_first_timestamp
new_es.add(time, record.merge(new_record))
end
end
rescue => e
router.emit_error_event(tag, time, record, e)
end
end
new_es
end
private
def on_timer
return if @flush_interval <= 0
return if @finished
flush_timeout_buffer
rescue => e
log.error "failed to flush timeout buffer", error: e
end
def process(tag, time, record)
if @stream_identity_key
stream_identity = "#{tag}:#{record[@stream_identity_key]}"
else
stream_identity = "#{tag}:default"
end
@timeout_map[stream_identity] = Fluent::Engine.now
case @mode
when :line
process_line(stream_identity, tag, time, record)
when :regexp
process_regexp(stream_identity, tag, time, record)
end
end
def process_line(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
if @buffer[stream_identity].size >= @n_lines
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
new_es
end
def process_regexp(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
case
when firstline?(record[@key])
if @buffer[stream_identity].empty?
@buffer[stream_identity] << [tag, time, record]
if lastline?(record[@key])
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
else
new_time, new_record = flush_buffer(stream_identity, [tag, time, record])
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
if lastline?(record[@key])
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
return new_es
end
when lastline?(record[@key])
@buffer[stream_identity] << [tag, time, record]
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
return new_es
else
if @buffer[stream_identity].empty?
if !@multiline_start_regexp
@buffer[stream_identity] << [tag, time, record]
else
new_es.add(time, record)
return new_es
end
else
if continuous_line?(record[@key])
# Continuation of the previous line
@buffer[stream_identity] << [tag, time, record]
else
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
new_es.add(time, record)
end
end
end
new_es
end
def firstline?(text)
@multiline_start_regexp && !!@multiline_start_regexp.match(text)
end
def lastline?(text)
@multiline_end_regexp && !!@multiline_end_regexp.match(text)
end
def continuous_line?(text)
if @continuous_line_regexp
!!@continuous_line_regexp.match(text)
else
true
end
end
def flush_buffer(stream_identity, new_element = nil)
lines = @buffer[stream_identity].map {|_tag, _time, record| record[@key] }
_tag, time, first_record = @buffer[stream_identity].first
new_record = {
@key => lines.join(@separator)
}
@buffer[stream_identity] = []
@buffer[stream_identity] << new_element if new_element
[time, first_record.merge(new_record)]
end
def flush_timeout_buffer
now = Fluent::Engine.now
timeout_stream_identities = []
@timeout_map.each do |stream_identity, previous_timestamp|
next if @flush_interval > (now - previous_timestamp)
next if @buffer[stream_identity].empty?
time, flushed_record = flush_buffer(stream_identity)
timeout_stream_identities << stream_identity
tag = stream_identity.split(":").first
message = "Timeout flush: #{stream_identity}"
handle_timeout_error(tag, @use_first_timestamp ? time : now, flushed_record, message)
log.info(message)
end
@timeout_map.reject! do |stream_identity, _|
timeout_stream_identities.include?(stream_identity)
end
end
def flush_remaining_buffer
@buffer.each do |stream_identity, elements|
next if elements.empty?
lines = elements.map {|_tag, _time, record| record[@key] }
new_record = {
@key => lines.join(@separator)
}
tag, time, record = elements.first
message = "Flush remaining buffer: #{stream_identity}"
handle_timeout_error(tag, time, record.merge(new_record), message)
log.info(message)
end
@buffer.clear
end
def handle_timeout_error(tag, time, record, message)
if @timeout_label
event_router = event_emitter_router(@timeout_label)
event_router.emit(tag, time, record)
else
router.emit_error_event(tag, time, record, TimeoutError.new(message))
end
end
end
end
Use Thread::Mutex to resolve `@timeout_map` race condition
Prevent `@timeout_map`'s error "can't add a new key into hash during
iteration".
Fix #49
require "fluent/plugin/filter"
module Fluent::Plugin
class ConcatFilter < Filter
Fluent::Plugin.register_filter("concat", self)
helpers :timer, :event_emitter
desc "The key for part of multiline log"
config_param :key, :string
desc "The separator of lines"
config_param :separator, :string, default: "\n"
desc "The number of lines"
config_param :n_lines, :integer, default: nil
desc "The regexp to match beginning of multiline"
config_param :multiline_start_regexp, :string, default: nil
desc "The regexp to match ending of multiline"
config_param :multiline_end_regexp, :string, default: nil
desc "The regexp to match continuous lines"
config_param :continuous_line_regexp, :string, default: nil
desc "The key to determine which stream an event belongs to"
config_param :stream_identity_key, :string, default: nil
desc "The interval between data flushes, 0 means disable timeout"
config_param :flush_interval, :time, default: 60
desc "The label name to handle timeout"
config_param :timeout_label, :string, default: nil
desc "Use timestamp of first record when buffer is flushed"
config_param :use_first_timestamp, :bool, default: false
class TimeoutError < StandardError
end
def initialize
super
@buffer = Hash.new {|h, k| h[k] = [] }
@timeout_map_mutex = Thread::Mutex.new
@timeout_map_mutex.synchronize do
@timeout_map = Hash.new {|h, k| h[k] = Fluent::Engine.now }
end
end
def configure(conf)
super
if @n_lines && (@multiline_start_regexp || @multiline_end_regexp || @continuous_line_regexp)
raise Fluent::ConfigError, "n_lines and multiline_start_regexp/multiline_end_regexp/continuous_line_regexp are exclusive"
end
if @n_lines.nil? && @multiline_start_regexp.nil? && @multiline_end_regexp.nil?
raise Fluent::ConfigError, "Either n_lines or multiline_start_regexp or multiline_end_regexp is required"
end
@mode = nil
case
when @n_lines
@mode = :line
when @multiline_start_regexp || @multiline_end_regexp
@mode = :regexp
if @multiline_start_regexp
@multiline_start_regexp = Regexp.compile(@multiline_start_regexp[1..-2])
end
if @multiline_end_regexp
@multiline_end_regexp = Regexp.compile(@multiline_end_regexp[1..-2])
end
if @continuous_line_regexp
@continuous_line_regexp = Regexp.compile(@continuous_line_regexp[1..-2])
end
end
end
def start
super
@finished = false
timer_execute(:filter_concat_timer, 1, &method(:on_timer))
end
def shutdown
@finished = true
flush_remaining_buffer
super
end
def filter_stream(tag, es)
new_es = Fluent::MultiEventStream.new
es.each do |time, record|
if /\Afluent\.(?:trace|debug|info|warn|error|fatal)\z/ =~ tag
new_es.add(time, record)
next
end
unless record.key?(@key)
new_es.add(time, record)
next
end
begin
flushed_es = process(tag, time, record)
unless flushed_es.empty?
flushed_es.each do |_time, new_record|
time = _time if @use_first_timestamp
new_es.add(time, record.merge(new_record))
end
end
rescue => e
router.emit_error_event(tag, time, record, e)
end
end
new_es
end
private
def on_timer
return if @flush_interval <= 0
return if @finished
flush_timeout_buffer
rescue => e
log.error "failed to flush timeout buffer", error: e
end
def process(tag, time, record)
if @stream_identity_key
stream_identity = "#{tag}:#{record[@stream_identity_key]}"
else
stream_identity = "#{tag}:default"
end
@timeout_map_mutex.synchronize do
@timeout_map[stream_identity] = Fluent::Engine.now
end
case @mode
when :line
process_line(stream_identity, tag, time, record)
when :regexp
process_regexp(stream_identity, tag, time, record)
end
end
def process_line(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
@buffer[stream_identity] << [tag, time, record]
if @buffer[stream_identity].size >= @n_lines
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
new_es
end
def process_regexp(stream_identity, tag, time, record)
new_es = Fluent::MultiEventStream.new
case
when firstline?(record[@key])
if @buffer[stream_identity].empty?
@buffer[stream_identity] << [tag, time, record]
if lastline?(record[@key])
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
else
new_time, new_record = flush_buffer(stream_identity, [tag, time, record])
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
if lastline?(record[@key])
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
end
return new_es
end
when lastline?(record[@key])
@buffer[stream_identity] << [tag, time, record]
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
return new_es
else
if @buffer[stream_identity].empty?
if !@multiline_start_regexp
@buffer[stream_identity] << [tag, time, record]
else
new_es.add(time, record)
return new_es
end
else
if continuous_line?(record[@key])
# Continuation of the previous line
@buffer[stream_identity] << [tag, time, record]
else
new_time, new_record = flush_buffer(stream_identity)
time = new_time if @use_first_timestamp
new_es.add(time, new_record)
new_es.add(time, record)
end
end
end
new_es
end
def firstline?(text)
@multiline_start_regexp && !!@multiline_start_regexp.match(text)
end
def lastline?(text)
@multiline_end_regexp && !!@multiline_end_regexp.match(text)
end
def continuous_line?(text)
if @continuous_line_regexp
!!@continuous_line_regexp.match(text)
else
true
end
end
def flush_buffer(stream_identity, new_element = nil)
lines = @buffer[stream_identity].map {|_tag, _time, record| record[@key] }
_tag, time, first_record = @buffer[stream_identity].first
new_record = {
@key => lines.join(@separator)
}
@buffer[stream_identity] = []
@buffer[stream_identity] << new_element if new_element
[time, first_record.merge(new_record)]
end
def flush_timeout_buffer
now = Fluent::Engine.now
timeout_stream_identities = []
@timeout_map_mutex.synchronize do
@timeout_map.each do |stream_identity, previous_timestamp|
next if @flush_interval > (now - previous_timestamp)
next if @buffer[stream_identity].empty?
time, flushed_record = flush_buffer(stream_identity)
timeout_stream_identities << stream_identity
tag = stream_identity.split(":").first
message = "Timeout flush: #{stream_identity}"
handle_timeout_error(tag, @use_first_timestamp ? time : now, flushed_record, message)
log.info(message)
end
@timeout_map.reject! do |stream_identity, _|
timeout_stream_identities.include?(stream_identity)
end
end
end
def flush_remaining_buffer
@buffer.each do |stream_identity, elements|
next if elements.empty?
lines = elements.map {|_tag, _time, record| record[@key] }
new_record = {
@key => lines.join(@separator)
}
tag, time, record = elements.first
message = "Flush remaining buffer: #{stream_identity}"
handle_timeout_error(tag, time, record.merge(new_record), message)
log.info(message)
end
@buffer.clear
end
def handle_timeout_error(tag, time, record, message)
if @timeout_label
event_router = event_emitter_router(@timeout_label)
event_router.emit(tag, time, record)
else
router.emit_error_event(tag, time, record, TimeoutError.new(message))
end
end
end
end
|
# dbus: Build a bottle for Linuxbrew
class Dbus < Formula
# releases: even (1.10.x) = stable, odd (1.11.x) = development
desc "Message bus system, providing inter-application communication"
homepage "https://wiki.freedesktop.org/www/Software/dbus"
url "https://dbus.freedesktop.org/releases/dbus/dbus-1.12.16.tar.gz"
mirror "https://deb.debian.org/debian/pool/main/d/dbus/dbus_1.12.16.orig.tar.gz"
sha256 "54a22d2fa42f2eb2a871f32811c6005b531b9613b1b93a0d269b05e7549fec80"
bottle do
sha256 "651603d019666efaa74e73d4cfec6430e991c3e241ff05dfd4a393d6f3d0695f" => :mojave
sha256 "0cb8e6e96ecd0c85c6690ebd0f7a7688f1284024ceac12707cf555a9abdb6866" => :high_sierra
sha256 "75faeefeaff6028bec5dbf4c04c40d7d4def0ff50797b9f4b3520ec34c4e4111" => :sierra
end
head do
url "https://anongit.freedesktop.org/git/dbus/dbus.git"
depends_on "autoconf" => :build
depends_on "autoconf-archive" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "xmlto" => :build if OS.mac?
unless OS.mac?
depends_on "pkg-config" => :build
depends_on "expat"
end
# Patch applies the config templating fixed in https://bugs.freedesktop.org/show_bug.cgi?id=94494
# Homebrew pr/issue: 50219
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/0a8a55872e/d-bus/org.freedesktop.dbus-session.plist.osx.diff"
sha256 "a8aa6fe3f2d8f873ad3f683013491f5362d551bf5d4c3b469f1efbc5459a20dc"
end
def install
# Fix the TMPDIR to one D-Bus doesn't reject due to odd symbols
ENV["TMPDIR"] = "/tmp"
if OS.mac?
# macOS doesn't include a pkg-config file for expat
ENV["EXPAT_CFLAGS"] = "-I#{MacOS.sdk_path}/usr/include"
ENV["EXPAT_LIBS"] = "-lexpat"
end
ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog"
system "./autogen.sh", "--no-configure" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--localstatedir=#{var}",
"--sysconfdir=#{etc}",
("--enable-xml-docs" if OS.mac?),
("--disable-xml-docs" unless OS.mac?),
"--disable-doxygen-docs",
("--enable-launchd" if OS.mac?),
("--with-launchd-agent-dir=#{prefix}" if OS.mac?),
"--without-x",
"--disable-tests"
system "make", "install"
end
def plist_name
"org.freedesktop.dbus-session"
end
def post_install
# Generate D-Bus's UUID for this machine
system "#{bin}/dbus-uuidgen", "--ensure=#{var}/lib/dbus/machine-id"
end
test do
system "#{bin}/dbus-daemon", "--version"
end
end
dbus: Fix build by not applying the macOS patch on Linux
Closes #14273.
Signed-off-by: Michka Popoff <7b0496f66f66ee22a38826c310c38b415671b832@gmail>
class Dbus < Formula
# releases: even (1.10.x) = stable, odd (1.11.x) = development
desc "Message bus system, providing inter-application communication"
homepage "https://wiki.freedesktop.org/www/Software/dbus"
url "https://dbus.freedesktop.org/releases/dbus/dbus-1.12.16.tar.gz"
mirror "https://deb.debian.org/debian/pool/main/d/dbus/dbus_1.12.16.orig.tar.gz"
sha256 "54a22d2fa42f2eb2a871f32811c6005b531b9613b1b93a0d269b05e7549fec80"
bottle do
sha256 "651603d019666efaa74e73d4cfec6430e991c3e241ff05dfd4a393d6f3d0695f" => :mojave
sha256 "0cb8e6e96ecd0c85c6690ebd0f7a7688f1284024ceac12707cf555a9abdb6866" => :high_sierra
sha256 "75faeefeaff6028bec5dbf4c04c40d7d4def0ff50797b9f4b3520ec34c4e4111" => :sierra
end
head do
url "https://anongit.freedesktop.org/git/dbus/dbus.git"
depends_on "autoconf" => :build
depends_on "autoconf-archive" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "xmlto" => :build if OS.mac?
unless OS.mac?
depends_on "pkg-config" => :build
depends_on "expat"
end
if OS.mac?
# Patch applies the config templating fixed in https://bugs.freedesktop.org/show_bug.cgi?id=94494
# Homebrew pr/issue: 50219
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/0a8a55872e/d-bus/org.freedesktop.dbus-session.plist.osx.diff"
sha256 "a8aa6fe3f2d8f873ad3f683013491f5362d551bf5d4c3b469f1efbc5459a20dc"
end
end
def install
# Fix the TMPDIR to one D-Bus doesn't reject due to odd symbols
ENV["TMPDIR"] = "/tmp"
if OS.mac?
# macOS doesn't include a pkg-config file for expat
ENV["EXPAT_CFLAGS"] = "-I#{MacOS.sdk_path}/usr/include"
ENV["EXPAT_LIBS"] = "-lexpat"
end
ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog"
system "./autogen.sh", "--no-configure" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--localstatedir=#{var}",
"--sysconfdir=#{etc}",
("--enable-xml-docs" if OS.mac?),
("--disable-xml-docs" unless OS.mac?),
"--disable-doxygen-docs",
("--enable-launchd" if OS.mac?),
("--with-launchd-agent-dir=#{prefix}" if OS.mac?),
"--without-x",
"--disable-tests"
system "make", "install"
end
def plist_name
"org.freedesktop.dbus-session"
end
def post_install
# Generate D-Bus's UUID for this machine
system "#{bin}/dbus-uuidgen", "--ensure=#{var}/lib/dbus/machine-id"
end
test do
system "#{bin}/dbus-daemon", "--version"
end
end
|
class FlyingSphinx::IndexRequest
attr_reader :index_id, :indices
INDEX_COMPLETE_CHECKING_INTERVAL = 3
# Remove all Delta jobs from the queue. If the
# delayed_jobs table does not exist, this method will do nothing.
#
def self.cancel_jobs
return unless defined?(::Delayed) && ::Delayed::Job.table_exists?
::Delayed::Job.delete_all "handler LIKE '--- !ruby/object:FlyingSphinx::%'"
end
def self.output_last_index
index = FlyingSphinx::Configuration.new.api.get('indices/last').body
puts "Index Job Status: #{index.status}"
puts "Index Log:\n#{index.log}"
end
def initialize(indices = [])
@indices = indices
end
# Shows index name in Delayed::Job#name.
#
def display_name
"#{self.class.name} for #{indices.join(', ')}"
end
def update_and_index
update_sphinx_configuration
update_sphinx_reference_files
index
end
def status_message
raise "Index Request failed to start. Something's not right!" if @index_id.nil?
status = request_status
case status
when 'FINISHED'
'Index Request has completed.'
when 'FAILED'
'Index Request failed.'
when 'PENDING'
'Index Request is still pending - something has gone wrong.'
else
"Unknown index response: '#{status}'."
end
end
# Runs Sphinx's indexer tool to process the index. Currently assumes Sphinx is
# running.
#
# @return [Boolean] true
#
def perform
index
true
end
private
def configuration
@configuration ||= FlyingSphinx::Configuration.new
end
def update_sphinx_configuration
FlyingSphinx::SphinxConfiguration.new.upload_to api
end
def update_sphinx_reference_files
FlyingSphinx::SettingFiles.new.upload_to api
end
def index
if FlyingSphinx::Tunnel.required?
tunnelled_index
else
direct_index
end
rescue Net::SSH::Exception => err
# Server closed the connection on us. That's (hopefully) expected, nothing
# to worry about.
puts "SSH/Indexing Error: #{err.message}" if log?
rescue RuntimeError => err
puts err.message
end
def tunnelled_index
FlyingSphinx::Tunnel.connect(configuration) do
begin_request unless request_begun?
true
end
end
def direct_index
begin_request
while !request_complete?
sleep 3
end
end
def begin_request
response = api.post 'indices', :indices => indices.join(',')
@index_id = response.body.id
@request_begun = true
raise RuntimeError, 'Your account does not support delta indexing. Upgrading plans is probably the best way around this.' if response.body.status == 'BLOCKED'
end
def request_begun?
@request_begun
end
def request_complete?
case request_status
when 'FINISHED', 'FAILED'
true
when 'PENDING'
false
else
raise "Unknown index response: '#{response.body}'"
end
end
def request_status
api.get("indices/#{index_id}").body.status
end
def cancel_request
return if index_id.nil?
puts "Connecting Flying Sphinx to the Database failed"
puts "Cancelling Index Request..."
api.put("indices/#{index_id}", :status => 'CANCELLED')
end
def api
configuration.api
end
def log?
ENV['VERBOSE_LOGGING'] && ENV['VERBOSE_LOGGING'].length > 0
end
end
More unnecessary methods.
class FlyingSphinx::IndexRequest
attr_reader :index_id, :indices
INDEX_COMPLETE_CHECKING_INTERVAL = 3
# Remove all Delta jobs from the queue. If the
# delayed_jobs table does not exist, this method will do nothing.
#
def self.cancel_jobs
return unless defined?(::Delayed) && ::Delayed::Job.table_exists?
::Delayed::Job.delete_all "handler LIKE '--- !ruby/object:FlyingSphinx::%'"
end
def self.output_last_index
index = FlyingSphinx::Configuration.new.api.get('indices/last').body
puts "Index Job Status: #{index.status}"
puts "Index Log:\n#{index.log}"
end
def initialize(indices = [])
@indices = indices
end
# Shows index name in Delayed::Job#name.
#
def display_name
"#{self.class.name} for #{indices.join(', ')}"
end
def update_and_index
FlyingSphinx::SphinxConfiguration.new.upload_to api
FlyingSphinx::SettingFiles.new.upload_to api
index
end
def status_message
raise "Index Request failed to start. Something's not right!" if @index_id.nil?
status = request_status
case status
when 'FINISHED'
'Index Request has completed.'
when 'FAILED'
'Index Request failed.'
when 'PENDING'
'Index Request is still pending - something has gone wrong.'
else
"Unknown index response: '#{status}'."
end
end
# Runs Sphinx's indexer tool to process the index. Currently assumes Sphinx is
# running.
#
# @return [Boolean] true
#
def perform
index
true
end
private
def configuration
@configuration ||= FlyingSphinx::Configuration.new
end
def index
if FlyingSphinx::Tunnel.required?
tunnelled_index
else
direct_index
end
rescue Net::SSH::Exception => err
# Server closed the connection on us. That's (hopefully) expected, nothing
# to worry about.
puts "SSH/Indexing Error: #{err.message}" if log?
rescue RuntimeError => err
puts err.message
end
def tunnelled_index
FlyingSphinx::Tunnel.connect(configuration) do
begin_request unless request_begun?
true
end
end
def direct_index
begin_request
while !request_complete?
sleep 3
end
end
def begin_request
response = api.post 'indices', :indices => indices.join(',')
@index_id = response.body.id
@request_begun = true
raise RuntimeError, 'Your account does not support delta indexing. Upgrading plans is probably the best way around this.' if response.body.status == 'BLOCKED'
end
def request_begun?
@request_begun
end
def request_complete?
case request_status
when 'FINISHED', 'FAILED'
true
when 'PENDING'
false
else
raise "Unknown index response: '#{response.body}'"
end
end
def request_status
api.get("indices/#{index_id}").body.status
end
def cancel_request
return if index_id.nil?
puts "Connecting Flying Sphinx to the Database failed"
puts "Cancelling Index Request..."
api.put("indices/#{index_id}", :status => 'CANCELLED')
end
def api
configuration.api
end
def log?
ENV['VERBOSE_LOGGING'] && ENV['VERBOSE_LOGGING'].length > 0
end
end
|
class Ddgr < Formula
desc "DuckDuckGo from the terminal"
homepage "https://github.com/jarun/ddgr"
url "https://github.com/jarun/ddgr/archive/v1.4.tar.gz"
sha256 "045063b4d7262992a7ea3cd9fe9715a199318828de82073f54c42631d3ef41b7"
bottle do
cellar :any_skip_relocation
sha256 "82adec1f9d44f702dda20a4099e396b98f4bfcf876a238fa1eeb102f5dd7c86b" => :mojave
sha256 "1fcc37ddba0118645c78282e191b7c13568e40e72c07c69366223e1656310845" => :high_sierra
sha256 "1fcc37ddba0118645c78282e191b7c13568e40e72c07c69366223e1656310845" => :sierra
sha256 "1fcc37ddba0118645c78282e191b7c13568e40e72c07c69366223e1656310845" => :el_capitan
end
depends_on "python"
def install
system "make", "install", "PREFIX=#{prefix}"
bash_completion.install "auto-completion/bash/ddgr-completion.bash"
fish_completion.install "auto-completion/fish/ddgr.fish"
zsh_completion.install "auto-completion/zsh/_ddgr"
end
test do
ENV["PYTHONIOENCODING"] = "utf-8"
assert_match "Homebrew", shell_output("#{bin}/ddgr --noprompt Homebrew")
end
end
ddgr 1.5
Closes #33415.
Signed-off-by: FX Coudert <c329953660db96eae534be5bbf1a735c2baf69b5@gmail.com>
class Ddgr < Formula
desc "DuckDuckGo from the terminal"
homepage "https://github.com/jarun/ddgr"
url "https://github.com/jarun/ddgr/archive/v1.5.tar.gz"
sha256 "b442f707a2c2ead42233d3bf3a9bf919e32ab9860e20d9d39f860840c13c0392"
bottle do
cellar :any_skip_relocation
sha256 "82adec1f9d44f702dda20a4099e396b98f4bfcf876a238fa1eeb102f5dd7c86b" => :mojave
sha256 "1fcc37ddba0118645c78282e191b7c13568e40e72c07c69366223e1656310845" => :high_sierra
sha256 "1fcc37ddba0118645c78282e191b7c13568e40e72c07c69366223e1656310845" => :sierra
sha256 "1fcc37ddba0118645c78282e191b7c13568e40e72c07c69366223e1656310845" => :el_capitan
end
depends_on "python"
def install
system "make", "install", "PREFIX=#{prefix}"
bash_completion.install "auto-completion/bash/ddgr-completion.bash"
fish_completion.install "auto-completion/fish/ddgr.fish"
zsh_completion.install "auto-completion/zsh/_ddgr"
end
test do
ENV["PYTHONIOENCODING"] = "utf-8"
assert_match "Homebrew", shell_output("#{bin}/ddgr --noprompt Homebrew")
end
end
|
class Ddgr < Formula
include Language::Python::Shebang
desc "DuckDuckGo from the terminal"
homepage "https://github.com/jarun/ddgr"
url "https://github.com/jarun/ddgr/archive/v1.9.tar.gz"
sha256 "3dfe82fab649f1cec904a1de63f78692be329a3b6928c1615f22c76f6e21c36f"
license "GPL-3.0"
revision 2
bottle do
sha256 cellar: :any_skip_relocation, all: "eec5e7fe4cc40ceaba669038d3c7c7ff09ab6203eae500022555adccdf92e4ef"
end
depends_on "python@3.10"
def install
rewrite_shebang detected_python_shebang, "ddgr"
system "make", "install", "PREFIX=#{prefix}"
bash_completion.install "auto-completion/bash/ddgr-completion.bash"
fish_completion.install "auto-completion/fish/ddgr.fish"
zsh_completion.install "auto-completion/zsh/_ddgr"
end
test do
ENV["PYTHONIOENCODING"] = "utf-8"
assert_match "q:Homebrew", shell_output("#{bin}/ddgr --debug --noprompt Homebrew 2>&1")
end
end
ddgr 2.0
* ddgr 2.0
* ddgr
Closes #96284.
Co-authored-by: Thierry Moisan <8bf87a6c4caed0437859f8c8fafc6782533e4540@gmail.com>
Co-authored-by: rui <907c7afd57be493757f13ccd1dd45dddf02db069@chenrui.dev>
Signed-off-by: Branch Vincent <0e6296586cbd330121a33cee359d4396296e2ead@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Ddgr < Formula
include Language::Python::Shebang
desc "DuckDuckGo from the terminal"
homepage "https://github.com/jarun/ddgr"
url "https://github.com/jarun/ddgr/archive/v2.0.tar.gz"
sha256 "7e46430b0a8c479a5feca832adb73f2f09804bf603dedc50f4cf2e1da4c75f88"
license "GPL-3.0-or-later"
bottle do
sha256 cellar: :any_skip_relocation, all: "eec5e7fe4cc40ceaba669038d3c7c7ff09ab6203eae500022555adccdf92e4ef"
end
depends_on "python@3.10"
def install
rewrite_shebang detected_python_shebang, "ddgr"
system "make", "install", "PREFIX=#{prefix}"
bash_completion.install "auto-completion/bash/ddgr-completion.bash"
fish_completion.install "auto-completion/fish/ddgr.fish"
zsh_completion.install "auto-completion/zsh/_ddgr"
end
test do
ENV["PYTHONIOENCODING"] = "utf-8"
assert_match "q:Homebrew", shell_output("#{bin}/ddgr --debug --noprompt Homebrew 2>&1")
end
end
|
class Deno < Formula
desc "Secure runtime for JavaScript and TypeScript"
homepage "https://deno.land/"
url "https://github.com/denoland/deno/releases/download/v1.0.2/deno_src.tar.gz"
sha256 "ae31e17fd3c3d92af21a00a4078a5b14f9db5f26b6bea3391d2a13798f9080b9"
bottle do
cellar :any_skip_relocation
sha256 "d28418b0059df404fcd0be59c3c757471a917ce803863e1591b996d7d2439b28" => :catalina
sha256 "0e73b837fced37fa4643c48264367593fafd884887192c0bcd730b39b46f6543" => :mojave
sha256 "1aa720fa0e11b7c30d12c8dc219195f9bc07d11a3d719811a2f371796e13b0da" => :high_sierra
end
depends_on "llvm" => :build
depends_on "ninja" => :build
depends_on "rust" => :build
depends_on :xcode => ["10.0", :build] # required by v8 7.9+
depends_on :macos # Due to Python 2 (see https://github.com/denoland/deno/issues/2893)
uses_from_macos "xz"
resource "gn" do
url "https://gn.googlesource.com/gn.git",
:revision => "5ed3c9cc67b090d5e311e4bd2aba072173e82db9"
end
def install
# Build gn from source (used as a build tool here)
(buildpath/"gn").install resource("gn")
cd "gn" do
system "python", "build/gen.py"
system "ninja", "-C", "out/", "gn"
end
# env args for building a release build with our clang, ninja and gn
ENV["GN"] = buildpath/"gn/out/gn"
# build rusty_v8 from source
ENV["V8_FROM_SOURCE"] = "1"
# overwrite Chromium minimum sdk version of 10.15
ENV["FORCE_MAC_SDK_MIN"] = "10.13"
# build with llvm and link against system libc++ (no runtime dep)
ENV["CLANG_BASE_PATH"] = Formula["llvm"].prefix
ENV.remove "HOMEBREW_LIBRARY_PATHS", Formula["llvm"].opt_lib
cd "cli" do
system "cargo", "install", "-vv", "--locked", "--root", prefix, "--path", "."
end
# Install bash and zsh completion
output = Utils.popen_read("#{bin}/deno completions bash")
(bash_completion/"deno").write output
output = Utils.popen_read("#{bin}/deno completions zsh")
(zsh_completion/"_deno").write output
end
test do
(testpath/"hello.ts").write <<~EOS
console.log("hello", "deno");
EOS
assert_match "hello deno", shell_output("#{bin}/deno run hello.ts")
assert_match "console.log",
shell_output("#{bin}/deno run --allow-read=#{testpath} https://deno.land/std@0.50.0/examples/cat.ts " \
"#{testpath}/hello.ts")
end
end
deno: update 1.0.2 bottle.
class Deno < Formula
desc "Secure runtime for JavaScript and TypeScript"
homepage "https://deno.land/"
url "https://github.com/denoland/deno/releases/download/v1.0.2/deno_src.tar.gz"
sha256 "ae31e17fd3c3d92af21a00a4078a5b14f9db5f26b6bea3391d2a13798f9080b9"
bottle do
cellar :any_skip_relocation
sha256 "af2ebe919ba90c3e4bf68f9b33045b31d94ec26e83c157d289b3c8b7cb85eda2" => :catalina
sha256 "170b16e596fc66feb1529a6b458763ba05c8059f65d1f736dc605debbaf8aae4" => :mojave
sha256 "854da518700f685f6479aa15fedc6b52ba15f0741c7e343e9233c2f0cf6c6a11" => :high_sierra
end
depends_on "llvm" => :build
depends_on "ninja" => :build
depends_on "rust" => :build
depends_on :xcode => ["10.0", :build] # required by v8 7.9+
depends_on :macos # Due to Python 2 (see https://github.com/denoland/deno/issues/2893)
uses_from_macos "xz"
resource "gn" do
url "https://gn.googlesource.com/gn.git",
:revision => "5ed3c9cc67b090d5e311e4bd2aba072173e82db9"
end
def install
# Build gn from source (used as a build tool here)
(buildpath/"gn").install resource("gn")
cd "gn" do
system "python", "build/gen.py"
system "ninja", "-C", "out/", "gn"
end
# env args for building a release build with our clang, ninja and gn
ENV["GN"] = buildpath/"gn/out/gn"
# build rusty_v8 from source
ENV["V8_FROM_SOURCE"] = "1"
# overwrite Chromium minimum sdk version of 10.15
ENV["FORCE_MAC_SDK_MIN"] = "10.13"
# build with llvm and link against system libc++ (no runtime dep)
ENV["CLANG_BASE_PATH"] = Formula["llvm"].prefix
ENV.remove "HOMEBREW_LIBRARY_PATHS", Formula["llvm"].opt_lib
cd "cli" do
system "cargo", "install", "-vv", "--locked", "--root", prefix, "--path", "."
end
# Install bash and zsh completion
output = Utils.popen_read("#{bin}/deno completions bash")
(bash_completion/"deno").write output
output = Utils.popen_read("#{bin}/deno completions zsh")
(zsh_completion/"_deno").write output
end
test do
(testpath/"hello.ts").write <<~EOS
console.log("hello", "deno");
EOS
assert_match "hello deno", shell_output("#{bin}/deno run hello.ts")
assert_match "console.log",
shell_output("#{bin}/deno run --allow-read=#{testpath} https://deno.land/std@0.50.0/examples/cat.ts " \
"#{testpath}/hello.ts")
end
end
|
require "language/go"
class Devd < Formula
desc "Local webserver for developers"
homepage "https://github.com/cortesi/devd"
url "https://github.com/cortesi/devd/archive/v0.3.tar.gz"
sha256 "e806421a4de6572eb3196b215a86a3b72bcd0f5a12956224d191e47663f9c4ab"
depends_on "go" => :build
go_resource "github.com/GeertJohan/go.rice" do
url "https://github.com/GeertJohan/go.rice.git", :revision => "ada95a01c963696fb73320ee662195af68be81ae"
end
# go.rice dependencies
go_resource "github.com/daaku/go.zipexe" do
url "https://github.com/daaku/go.zipexe.git", :revision => "a5fe2436ffcb3236e175e5149162b41cd28bd27d"
end
go_resource "github.com/kardianos/osext" do
url "https://github.com/kardianos/osext.git", :revision => "6e7f843663477789fac7c02def0d0909e969b4e5"
end
go_resource "github.com/GeertJohan/go.incremental" do
url "https://github.com/GeertJohan/go.incremental.git", :revision => "92fd0ce4a694213e8b3dfd2d39b16e51d26d0fbf"
end
go_resource "github.com/akavel/rsrc" do
url "https://github.com/akavel/rsrc.git", :revision => "ba14da1f827188454a4591717fff29999010887f"
end
go_resource "github.com/jessevdk/go-flags" do
url "https://github.com/jessevdk/go-flags.git", :revision => "fc93116606d0a71d7e9de0ad5734fdb4b8eae834"
end
# devd dependencies
go_resource "github.com/bmatcuk/doublestar" do
# v1.0.1
url "https://github.com/bmatcuk/doublestar.git", :revision => "4f612bd6c10e2ef68e2ea50aabc50c3681bbac86"
end
go_resource "github.com/dustin/go-humanize" do
url "https://github.com/dustin/go-humanize.git", :revision => "64dbdae0d393b7d71480a6dace78456396b55286"
end
go_resource "github.com/fatih/color" do
url "https://github.com/fatih/color.git", :revision => "9aae6aaa22315390f03959adca2c4d395b02fcef"
end
go_resource "github.com/goji/httpauth" do
url "https://github.com/goji/httpauth.git", :revision => "c1b2bcd8769bd15cc56751223fd4b9f45ca987ca"
end
go_resource "github.com/gorilla/websocket" do
url "https://github.com/gorilla/websocket.git", :revision => "361d4c0ffd78338ebe0a9e6320cdbe115d7dc026"
end
go_resource "github.com/juju/ratelimit" do
url "https://github.com/juju/ratelimit.git", :revision => "772f5c38e468398c4511514f4f6aa9a4185bc0a0"
end
go_resource "github.com/mitchellh/go-homedir" do
url "https://github.com/mitchellh/go-homedir.git", :revision => "d682a8f0cf139663a984ff12528da460ca963de9"
end
go_resource "github.com/rjeczalik/notify" do
url "https://github.com/rjeczalik/notify.git", :revision => "1869adb163fffce8fb5b8755379d1042cdb4c4f8"
end
go_resource "github.com/toqueteos/webbrowser" do
# v1.0
url "https://github.com/toqueteos/webbrowser.git", :revision => "21fc9f95c83442fd164094666f7cb4f9fdd56cd6"
end
go_resource "github.com/alecthomas/template" do
url "https://github.com/alecthomas/template.git", :revision => "b867cc6ab45cece8143cfcc6fc9c77cf3f2c23c0"
end
go_resource "github.com/alecthomas/units" do
url "https://github.com/alecthomas/units.git", :revision => "2efee857e7cfd4f3d0138cc3cbb1b4966962b93a"
end
go_resource "github.com/mattn/go-colorable" do
url "https://github.com/mattn/go-colorable.git", :revision => "51a7e7a8b1665b25ca173debdc8d52d493348f15"
end
go_resource "github.com/mattn/go-isatty" do
url "https://github.com/mattn/go-isatty.git", :revision => "d6aaa2f596ae91a0a58d8e7f2c79670991468e4f"
end
go_resource "golang.org/x/crypto" do
url "https://go.googlesource.com/crypto.git", :revision => "575fdbe86e5dd89229707ebec0575ce7d088a4a6"
end
go_resource "golang.org/x/net" do
url "https://go.googlesource.com/net.git", :revision => "c764672d0ee39ffd83cfcb375804d3181302b62b"
end
go_resource "gopkg.in/alecthomas/kingpin.v2" do
# v2.4.1
url "https://github.com/alecthomas/kingpin.git", :revision => "95529ad11b3c862a5b828a2142b9e50db579cf2c"
end
def install
ENV["GOOS"] = "darwin"
ENV["GOARCH"] = MacOS.prefer_64_bit? ? "amd64" : "386"
ENV["GOPATH"] = buildpath
mkdir_p buildpath/"src/github.com/cortesi/"
ln_sf buildpath, buildpath/"src/github.com/cortesi/devd"
Language::Go.stage_deps resources, buildpath/"src"
system "go", "install", "github.com/GeertJohan/go.rice/rice"
ENV.prepend_path "PATH", buildpath/"bin"
# NOTE: versions after v0.3 have improved build script, thus
# it would be simplier to call in future
# system "./build", "single"
# meanwhile, we do compilation like this:
system "rice", "embed-go"
cd "#{buildpath}/livereload" do
system "rice", "embed-go"
end
system "go", "build", "-o", "#{bin}/devd", "./cmd/devd"
doc.install "README.md"
end
test do
begin
io = IO.popen("#{bin}/devd #{testpath}")
sleep 2
ensure
Process.kill("SIGINT", io.pid)
Process.wait(io.pid)
end
assert_match "Listening on http://devd.io", io.read
end
end
devd: add 0.3 bottle.
require "language/go"
class Devd < Formula
desc "Local webserver for developers"
homepage "https://github.com/cortesi/devd"
url "https://github.com/cortesi/devd/archive/v0.3.tar.gz"
sha256 "e806421a4de6572eb3196b215a86a3b72bcd0f5a12956224d191e47663f9c4ab"
bottle do
cellar :any_skip_relocation
sha256 "24300e18e3291b229c950f2e9712df430e09e15fd57895041d3e29fa26086b39" => :el_capitan
sha256 "249f1d803e732f1a1a91973d610f56ba129338b52831a11cfe08e1d519ac0116" => :yosemite
sha256 "613460e6d2368b1c35930eb78d24656a906db92d6a0ed2ad6e8b5764e42d7806" => :mavericks
end
depends_on "go" => :build
go_resource "github.com/GeertJohan/go.rice" do
url "https://github.com/GeertJohan/go.rice.git", :revision => "ada95a01c963696fb73320ee662195af68be81ae"
end
# go.rice dependencies
go_resource "github.com/daaku/go.zipexe" do
url "https://github.com/daaku/go.zipexe.git", :revision => "a5fe2436ffcb3236e175e5149162b41cd28bd27d"
end
go_resource "github.com/kardianos/osext" do
url "https://github.com/kardianos/osext.git", :revision => "6e7f843663477789fac7c02def0d0909e969b4e5"
end
go_resource "github.com/GeertJohan/go.incremental" do
url "https://github.com/GeertJohan/go.incremental.git", :revision => "92fd0ce4a694213e8b3dfd2d39b16e51d26d0fbf"
end
go_resource "github.com/akavel/rsrc" do
url "https://github.com/akavel/rsrc.git", :revision => "ba14da1f827188454a4591717fff29999010887f"
end
go_resource "github.com/jessevdk/go-flags" do
url "https://github.com/jessevdk/go-flags.git", :revision => "fc93116606d0a71d7e9de0ad5734fdb4b8eae834"
end
# devd dependencies
go_resource "github.com/bmatcuk/doublestar" do
# v1.0.1
url "https://github.com/bmatcuk/doublestar.git", :revision => "4f612bd6c10e2ef68e2ea50aabc50c3681bbac86"
end
go_resource "github.com/dustin/go-humanize" do
url "https://github.com/dustin/go-humanize.git", :revision => "64dbdae0d393b7d71480a6dace78456396b55286"
end
go_resource "github.com/fatih/color" do
url "https://github.com/fatih/color.git", :revision => "9aae6aaa22315390f03959adca2c4d395b02fcef"
end
go_resource "github.com/goji/httpauth" do
url "https://github.com/goji/httpauth.git", :revision => "c1b2bcd8769bd15cc56751223fd4b9f45ca987ca"
end
go_resource "github.com/gorilla/websocket" do
url "https://github.com/gorilla/websocket.git", :revision => "361d4c0ffd78338ebe0a9e6320cdbe115d7dc026"
end
go_resource "github.com/juju/ratelimit" do
url "https://github.com/juju/ratelimit.git", :revision => "772f5c38e468398c4511514f4f6aa9a4185bc0a0"
end
go_resource "github.com/mitchellh/go-homedir" do
url "https://github.com/mitchellh/go-homedir.git", :revision => "d682a8f0cf139663a984ff12528da460ca963de9"
end
go_resource "github.com/rjeczalik/notify" do
url "https://github.com/rjeczalik/notify.git", :revision => "1869adb163fffce8fb5b8755379d1042cdb4c4f8"
end
go_resource "github.com/toqueteos/webbrowser" do
# v1.0
url "https://github.com/toqueteos/webbrowser.git", :revision => "21fc9f95c83442fd164094666f7cb4f9fdd56cd6"
end
go_resource "github.com/alecthomas/template" do
url "https://github.com/alecthomas/template.git", :revision => "b867cc6ab45cece8143cfcc6fc9c77cf3f2c23c0"
end
go_resource "github.com/alecthomas/units" do
url "https://github.com/alecthomas/units.git", :revision => "2efee857e7cfd4f3d0138cc3cbb1b4966962b93a"
end
go_resource "github.com/mattn/go-colorable" do
url "https://github.com/mattn/go-colorable.git", :revision => "51a7e7a8b1665b25ca173debdc8d52d493348f15"
end
go_resource "github.com/mattn/go-isatty" do
url "https://github.com/mattn/go-isatty.git", :revision => "d6aaa2f596ae91a0a58d8e7f2c79670991468e4f"
end
go_resource "golang.org/x/crypto" do
url "https://go.googlesource.com/crypto.git", :revision => "575fdbe86e5dd89229707ebec0575ce7d088a4a6"
end
go_resource "golang.org/x/net" do
url "https://go.googlesource.com/net.git", :revision => "c764672d0ee39ffd83cfcb375804d3181302b62b"
end
go_resource "gopkg.in/alecthomas/kingpin.v2" do
# v2.4.1
url "https://github.com/alecthomas/kingpin.git", :revision => "95529ad11b3c862a5b828a2142b9e50db579cf2c"
end
def install
ENV["GOOS"] = "darwin"
ENV["GOARCH"] = MacOS.prefer_64_bit? ? "amd64" : "386"
ENV["GOPATH"] = buildpath
mkdir_p buildpath/"src/github.com/cortesi/"
ln_sf buildpath, buildpath/"src/github.com/cortesi/devd"
Language::Go.stage_deps resources, buildpath/"src"
system "go", "install", "github.com/GeertJohan/go.rice/rice"
ENV.prepend_path "PATH", buildpath/"bin"
# NOTE: versions after v0.3 have improved build script, thus
# it would be simplier to call in future
# system "./build", "single"
# meanwhile, we do compilation like this:
system "rice", "embed-go"
cd "#{buildpath}/livereload" do
system "rice", "embed-go"
end
system "go", "build", "-o", "#{bin}/devd", "./cmd/devd"
doc.install "README.md"
end
test do
begin
io = IO.popen("#{bin}/devd #{testpath}")
sleep 2
ensure
Process.kill("SIGINT", io.pid)
Process.wait(io.pid)
end
assert_match "Listening on http://devd.io", io.read
end
end
|
class Dfmt < Formula
desc "Formatter for D source code"
homepage "https://github.com/dlang-community/dfmt"
url "https://github.com/dlang-community/dfmt.git",
:tag => "v0.12.0",
:revision => "09caf255e537ea832a87c9aeb7ec4ed38d751300"
head "https://github.com/dlang-community/dfmt.git", :branch => "v0.x.x", :shallow => false
bottle do
cellar :any_skip_relocation
sha256 "8a3b74f50717d236b6ef445de2608ab31e4c43ae10af01e71d9b95d590c4c8e7" => :catalina
sha256 "db3e4c50f54d7dd31c9b2768d7b0539d8dea62ae0d7c3ccef7c8ce955721a595" => :mojave
sha256 "2fa9649c94a8f624d772d76a1cd43ecc3e8b016a29711d4ea0b6e92906646f09" => :high_sierra
end
depends_on "dmd" => :build
def install
system "make"
bin.install "bin/dfmt"
end
test do
(testpath/"test.d").write <<~EOS
import std.stdio; void main() { writeln("Hello, world without explicit compilations!"); }
EOS
expected = <<~EOS
import std.stdio;
void main()
{
writeln("Hello, world without explicit compilations!");
}
EOS
system "#{bin}/dfmt", "-i", "test.d"
assert_equal expected, (testpath/"test.d").read
end
end
dfmt: add license
class Dfmt < Formula
desc "Formatter for D source code"
homepage "https://github.com/dlang-community/dfmt"
url "https://github.com/dlang-community/dfmt.git",
:tag => "v0.12.0",
:revision => "09caf255e537ea832a87c9aeb7ec4ed38d751300"
license "BSL-1.0"
head "https://github.com/dlang-community/dfmt.git", :branch => "v0.x.x", :shallow => false
bottle do
cellar :any_skip_relocation
sha256 "8a3b74f50717d236b6ef445de2608ab31e4c43ae10af01e71d9b95d590c4c8e7" => :catalina
sha256 "db3e4c50f54d7dd31c9b2768d7b0539d8dea62ae0d7c3ccef7c8ce955721a595" => :mojave
sha256 "2fa9649c94a8f624d772d76a1cd43ecc3e8b016a29711d4ea0b6e92906646f09" => :high_sierra
end
depends_on "dmd" => :build
def install
system "make"
bin.install "bin/dfmt"
end
test do
(testpath/"test.d").write <<~EOS
import std.stdio; void main() { writeln("Hello, world without explicit compilations!"); }
EOS
expected = <<~EOS
import std.stdio;
void main()
{
writeln("Hello, world without explicit compilations!");
}
EOS
system "#{bin}/dfmt", "-i", "test.d"
assert_equal expected, (testpath/"test.d").read
end
end
|
class Dfmt < Formula
desc "Formatter for D source code"
homepage "https://github.com/dlang-community/dfmt"
url "https://github.com/dlang-community/dfmt.git",
tag: "v0.14.2",
revision: "6a24f0dc7c490f4cb06cdc9d21b841bee84615f4"
license "BSL-1.0"
head "https://github.com/dlang-community/dfmt.git", branch: "v0.x.x"
bottle do
sha256 cellar: :any_skip_relocation, monterey: "b6f7197f2f1580bff0f06ed086ca679c310d87d64ed506ce8deb89fd49a7d514"
sha256 cellar: :any_skip_relocation, big_sur: "e558a7feb8739e2064851d66769050b26b8bab8bf009356a773cda5a39e924e4"
sha256 cellar: :any_skip_relocation, catalina: "3b322f9dbfb2062678b18e6e53719204446315e2fc70a788759505cdc663b839"
sha256 cellar: :any_skip_relocation, x86_64_linux: "852cdd41ee99f72be5b7e22af09e07a5da3457791016a78ec0038f3172c96120"
end
depends_on "dmd" => :build
def install
system "make"
bin.install "bin/dfmt"
end
test do
(testpath/"test.d").write <<~EOS
import std.stdio; void main() { writeln("Hello, world without explicit compilations!"); }
EOS
expected = <<~EOS
import std.stdio;
void main()
{
writeln("Hello, world without explicit compilations!");
}
EOS
system "#{bin}/dfmt", "-i", "test.d"
assert_equal expected, (testpath/"test.d").read
end
end
dfmt: build on Apple Silicon using `ldc`
Closes #98446.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Dfmt < Formula
desc "Formatter for D source code"
homepage "https://github.com/dlang-community/dfmt"
url "https://github.com/dlang-community/dfmt.git",
tag: "v0.14.2",
revision: "6a24f0dc7c490f4cb06cdc9d21b841bee84615f4"
license "BSL-1.0"
head "https://github.com/dlang-community/dfmt.git", branch: "v0.x.x"
bottle do
sha256 cellar: :any_skip_relocation, monterey: "b6f7197f2f1580bff0f06ed086ca679c310d87d64ed506ce8deb89fd49a7d514"
sha256 cellar: :any_skip_relocation, big_sur: "e558a7feb8739e2064851d66769050b26b8bab8bf009356a773cda5a39e924e4"
sha256 cellar: :any_skip_relocation, catalina: "3b322f9dbfb2062678b18e6e53719204446315e2fc70a788759505cdc663b839"
sha256 cellar: :any_skip_relocation, x86_64_linux: "852cdd41ee99f72be5b7e22af09e07a5da3457791016a78ec0038f3172c96120"
end
on_macos do
depends_on "ldc" => :build
end
on_linux do
depends_on "dmd" => :build
end
def install
target = OS.mac? ? "ldc" : "dmd"
system "make", target
bin.install "bin/dfmt"
end
test do
(testpath/"test.d").write <<~EOS
import std.stdio; void main() { writeln("Hello, world without explicit compilations!"); }
EOS
expected = <<~EOS
import std.stdio;
void main()
{
writeln("Hello, world without explicit compilations!");
}
EOS
system "#{bin}/dfmt", "-i", "test.d"
assert_equal expected, (testpath/"test.d").read
end
end
|
class Dnsx < Formula
desc "DNS query and resolution tool"
homepage "https://github.com/projectdiscovery/dnsx"
url "https://github.com/projectdiscovery/dnsx/archive/v1.0.6.tar.gz"
sha256 "ef0ade0508f0d6d8ed7e80f06e2ed888eaca9837fde513fc801451044b6a567e"
license "MIT"
head "https://github.com/projectdiscovery/dnsx.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a7649cd5bd8bc64650deb59d632bbb41dc4019ec0b274429e91c528f867b1657"
sha256 cellar: :any_skip_relocation, big_sur: "86d7ea41fa000eec1131088f2bc587b59cafebdfcf5a69bf9de6bc08cedf4428"
sha256 cellar: :any_skip_relocation, catalina: "849ac2af9f0c7243ede6c4eff3c48940b5481308eea710a421a2b78b9227e9f2"
sha256 cellar: :any_skip_relocation, mojave: "65d790cbb774b8bd2ff2b4ca18125754ebc3d340ad233a2076902e6465e2bc2c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "60b87827d4c15c1f99f9792af793ef87c63aaa47756ea8362d1fc57b7ea028ad"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args(ldflags: "-s -w"), "./cmd/dnsx"
end
test do
(testpath/"domains.txt").write "docs.brew.sh"
expected_output = "docs.brew.sh [homebrew.github.io]"
assert_equal expected_output,
shell_output("#{bin}/dnsx -silent -l #{testpath}/domains.txt -cname -resp").strip
end
end
dnsx: update 1.0.6 bottle.
class Dnsx < Formula
desc "DNS query and resolution tool"
homepage "https://github.com/projectdiscovery/dnsx"
url "https://github.com/projectdiscovery/dnsx/archive/v1.0.6.tar.gz"
sha256 "ef0ade0508f0d6d8ed7e80f06e2ed888eaca9837fde513fc801451044b6a567e"
license "MIT"
head "https://github.com/projectdiscovery/dnsx.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "98aa3d8b0c4f290f1c49ddd6a2494402e131a5d80a7a5b6121578d45ffb2a97c"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a7649cd5bd8bc64650deb59d632bbb41dc4019ec0b274429e91c528f867b1657"
sha256 cellar: :any_skip_relocation, monterey: "02f2786ef09886d28d569d47c962257efe7f54b68b4217166cf9b2aa96018ac4"
sha256 cellar: :any_skip_relocation, big_sur: "86d7ea41fa000eec1131088f2bc587b59cafebdfcf5a69bf9de6bc08cedf4428"
sha256 cellar: :any_skip_relocation, catalina: "849ac2af9f0c7243ede6c4eff3c48940b5481308eea710a421a2b78b9227e9f2"
sha256 cellar: :any_skip_relocation, mojave: "65d790cbb774b8bd2ff2b4ca18125754ebc3d340ad233a2076902e6465e2bc2c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "60b87827d4c15c1f99f9792af793ef87c63aaa47756ea8362d1fc57b7ea028ad"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args(ldflags: "-s -w"), "./cmd/dnsx"
end
test do
(testpath/"domains.txt").write "docs.brew.sh"
expected_output = "docs.brew.sh [homebrew.github.io]"
assert_equal expected_output,
shell_output("#{bin}/dnsx -silent -l #{testpath}/domains.txt -cname -resp").strip
end
end
|
class Dpkg < Formula
desc "Debian package management system"
homepage "https://wiki.debian.org/Teams/Dpkg"
# Please always keep the Homebrew mirror as the primary URL as the
# dpkg site removes tarballs regularly which means we get issues
# unnecessarily and older versions of the formula are broken.
url "https://dl.bintray.com/homebrew/mirror/dpkg-1.19.5.tar.xz"
mirror "https://deb.debian.org/debian/pool/main/d/dpkg/dpkg_1.19.5.tar.xz"
sha256 "ed327c9973610a695e1950613861c734974d9476e76ef713724558601ce63544"
bottle do
sha256 "74943c7f943cdebd482064dcb3a1e1015c3c12c0dbc278bb154b0dcaf6a693cf" => :mojave
sha256 "bb7c36470baff8a906a9a73b57b7e2c01502e559ab2ec56a983052b68692ba06" => :high_sierra
sha256 "c2976a3a08cd491521d3eddb4ad9713a4e4ad0bcf7fa37dc2fadd4fed6b90683" => :sierra
end
depends_on "pkg-config" => :build
depends_on "gnu-tar"
depends_on "gpatch"
depends_on "perl"
depends_on "xz" # For LZMA
def install
# We need to specify a recent gnutar, otherwise various dpkg C programs will
# use the system "tar", which will fail because it lacks certain switches.
ENV["TAR"] = Formula["gnu-tar"].opt_bin/"gtar"
# Since 1.18.24 dpkg mandates the use of GNU patch to prevent occurrences
# of the CVE-2017-8283 vulnerability.
# https://www.openwall.com/lists/oss-security/2017/04/20/2
ENV["PATCH"] = Formula["gpatch"].opt_bin/"patch"
# Theoretically, we could reinsert a patch here submitted upstream previously
# but the check for PERL_LIB remains in place and incompatible with Homebrew.
# Using an env and scripting is a solution less likely to break over time.
# Both variables need to be set. One is compile-time, the other run-time.
ENV["PERL_LIBDIR"] = libexec/"lib/perl5"
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{libexec}",
"--sysconfdir=#{etc}",
"--localstatedir=#{var}",
"--disable-dselect",
"--disable-start-stop-daemon"
system "make"
system "make", "install"
bin.install Dir[libexec/"bin/*"]
man.install Dir[libexec/"share/man/*"]
(lib/"pkgconfig").install_symlink Dir[libexec/"lib/pkgconfig/*.pc"]
bin.env_script_all_files(libexec/"bin", :PERL5LIB => ENV["PERL5LIB"])
(buildpath/"dummy").write "Vendor: dummy\n"
(etc/"dpkg/origins").install "dummy"
(etc/"dpkg/origins").install_symlink "dummy" => "default"
end
def post_install
(var/"lib/dpkg").mkpath
(var/"log").mkpath
end
def caveats; <<~EOS
This installation of dpkg is not configured to install software, so
commands such as `dpkg -i`, `dpkg --configure` will fail.
EOS
end
test do
# Do not remove the empty line from the end of the control file
# All deb control files MUST end with an empty line
(testpath/"test/data/homebrew.txt").write "brew"
(testpath/"test/DEBIAN/control").write <<~EOS
Package: test
Version: 1.40.99
Architecture: amd64
Description: I am a test
Maintainer: Dpkg Developers <test@test.org>
EOS
system bin/"dpkg", "-b", testpath/"test", "test.deb"
assert_predicate testpath/"test.deb", :exist?
rm_rf "test"
system bin/"dpkg", "-x", "test.deb", testpath
assert_predicate testpath/"data/homebrew.txt", :exist?
end
end
dpkg: update 1.19.5 bottle.
class Dpkg < Formula
desc "Debian package management system"
homepage "https://wiki.debian.org/Teams/Dpkg"
# Please always keep the Homebrew mirror as the primary URL as the
# dpkg site removes tarballs regularly which means we get issues
# unnecessarily and older versions of the formula are broken.
url "https://dl.bintray.com/homebrew/mirror/dpkg-1.19.5.tar.xz"
mirror "https://deb.debian.org/debian/pool/main/d/dpkg/dpkg_1.19.5.tar.xz"
sha256 "ed327c9973610a695e1950613861c734974d9476e76ef713724558601ce63544"
bottle do
sha256 "6d8f6d049e693ebb7473080775b390492b3fa36f770a0dd50f096c44d42290b4" => :mojave
sha256 "3d33081da370d0a60c8173b7784f29801b9de5d25c4dfdd75605df8d018bb2fd" => :high_sierra
sha256 "1e9cd78be9898662c43d3acbcf24d23d87d26fd58a66c9bb99985dcd7f896d65" => :sierra
end
depends_on "pkg-config" => :build
depends_on "gnu-tar"
depends_on "gpatch"
depends_on "perl"
depends_on "xz" # For LZMA
def install
# We need to specify a recent gnutar, otherwise various dpkg C programs will
# use the system "tar", which will fail because it lacks certain switches.
ENV["TAR"] = Formula["gnu-tar"].opt_bin/"gtar"
# Since 1.18.24 dpkg mandates the use of GNU patch to prevent occurrences
# of the CVE-2017-8283 vulnerability.
# https://www.openwall.com/lists/oss-security/2017/04/20/2
ENV["PATCH"] = Formula["gpatch"].opt_bin/"patch"
# Theoretically, we could reinsert a patch here submitted upstream previously
# but the check for PERL_LIB remains in place and incompatible with Homebrew.
# Using an env and scripting is a solution less likely to break over time.
# Both variables need to be set. One is compile-time, the other run-time.
ENV["PERL_LIBDIR"] = libexec/"lib/perl5"
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{libexec}",
"--sysconfdir=#{etc}",
"--localstatedir=#{var}",
"--disable-dselect",
"--disable-start-stop-daemon"
system "make"
system "make", "install"
bin.install Dir[libexec/"bin/*"]
man.install Dir[libexec/"share/man/*"]
(lib/"pkgconfig").install_symlink Dir[libexec/"lib/pkgconfig/*.pc"]
bin.env_script_all_files(libexec/"bin", :PERL5LIB => ENV["PERL5LIB"])
(buildpath/"dummy").write "Vendor: dummy\n"
(etc/"dpkg/origins").install "dummy"
(etc/"dpkg/origins").install_symlink "dummy" => "default"
end
def post_install
(var/"lib/dpkg").mkpath
(var/"log").mkpath
end
def caveats; <<~EOS
This installation of dpkg is not configured to install software, so
commands such as `dpkg -i`, `dpkg --configure` will fail.
EOS
end
test do
# Do not remove the empty line from the end of the control file
# All deb control files MUST end with an empty line
(testpath/"test/data/homebrew.txt").write "brew"
(testpath/"test/DEBIAN/control").write <<~EOS
Package: test
Version: 1.40.99
Architecture: amd64
Description: I am a test
Maintainer: Dpkg Developers <test@test.org>
EOS
system bin/"dpkg", "-b", testpath/"test", "test.deb"
assert_predicate testpath/"test.deb", :exist?
rm_rf "test"
system bin/"dpkg", "-x", "test.deb", testpath
assert_predicate testpath/"data/homebrew.txt", :exist?
end
end
|
class Dune < Formula
desc "Composable build system for OCaml"
homepage "https://dune.build/"
url "https://github.com/ocaml/dune/releases/download/2.7.0/dune-2.7.0.tbz"
sha256 "b417ca85bdce4171e71255be4a9c5a7572646cb1dcb221bba3757dc6ac8f1c15"
license "MIT"
head "https://github.com/ocaml/dune.git"
bottle do
cellar :any_skip_relocation
sha256 "637ff9902fcd06fc9f048c5bf1a9214d0bd8b3a8a883a8b312650f1972985dcf" => :catalina
sha256 "b1825a28dd391ffa2b3d4eadcf5495523b37e0fa63dfd92eaa067e51fa7f6f27" => :mojave
sha256 "4d70f58146199488cb87c607b691b962cdfd0ccbbf13852af2e835323ce554a7" => :high_sierra
end
depends_on "ocaml" => [:build, :test]
def install
system "ocaml", "configure.ml"
system "ocaml", "bootstrap.ml"
system "./dune.exe", "build", "-p", "dune", "--profile", "dune-bootstrap"
bin.install "_build/default/bin/dune.exe"
mv bin/"dune.exe", bin/"dune"
end
test do
contents = "bar"
target_fname = "foo.txt"
(testpath/"dune").write("(rule (with-stdout-to #{target_fname} (echo #{contents})))")
system bin/"dune", "build", "foo.txt", "--root", "."
output = File.read(testpath/"_build/default/#{target_fname}")
assert_match contents, output
end
end
dune: update 2.7.0 bottle.
class Dune < Formula
desc "Composable build system for OCaml"
homepage "https://dune.build/"
url "https://github.com/ocaml/dune/releases/download/2.7.0/dune-2.7.0.tbz"
sha256 "b417ca85bdce4171e71255be4a9c5a7572646cb1dcb221bba3757dc6ac8f1c15"
license "MIT"
head "https://github.com/ocaml/dune.git"
bottle do
cellar :any_skip_relocation
sha256 "637ff9902fcd06fc9f048c5bf1a9214d0bd8b3a8a883a8b312650f1972985dcf" => :catalina
sha256 "b1825a28dd391ffa2b3d4eadcf5495523b37e0fa63dfd92eaa067e51fa7f6f27" => :mojave
sha256 "4d70f58146199488cb87c607b691b962cdfd0ccbbf13852af2e835323ce554a7" => :high_sierra
sha256 "4e4ab4b71b66ca7ce30fb5354ebc7274fb7fbd6c393f3aa22d32b9c18c883bd0" => :x86_64_linux
end
depends_on "ocaml" => [:build, :test]
def install
system "ocaml", "configure.ml"
system "ocaml", "bootstrap.ml"
system "./dune.exe", "build", "-p", "dune", "--profile", "dune-bootstrap"
bin.install "_build/default/bin/dune.exe"
mv bin/"dune.exe", bin/"dune"
end
test do
contents = "bar"
target_fname = "foo.txt"
(testpath/"dune").write("(rule (with-stdout-to #{target_fname} (echo #{contents})))")
system bin/"dune", "build", "foo.txt", "--root", "."
output = File.read(testpath/"_build/default/#{target_fname}")
assert_match contents, output
end
end
|
class Entr < Formula
desc "Run arbitrary commands when files change"
homepage "http://entrproject.org/"
url "http://entrproject.org/code/entr-3.8.tar.gz"
mirror "https://bitbucket.org/eradman/entr/get/entr-3.8.tar.gz"
sha256 "ebb1e793d948db76481f081011bf1dad8b4449e067f4e5fe68176191f84b26bd"
bottle do
cellar :any_skip_relocation
sha256 "4d65e64b31f996f59ea61c30cebc547cee38633eed91a9882a0c22dfcb4e6e77" => :high_sierra
sha256 "eefae48abeb986b3d0f4f60b4090bf85c86249efb42ad3a70ad65c6f690ef7af" => :sierra
sha256 "65b4a69116adedd4b3f1677f1d2946f12d31a117527439c2eacbeff746fab7eb" => :el_capitan
sha256 "acfa5e389ca6b0d29f3a3a62abd9f585af12b9f3edb96bb902e76badd3dcfa00" => :yosemite
end
head do
url "https://bitbucket.org/eradman/entr", :using => :hg
depends_on :hg => :build
end
def install
ENV["PREFIX"] = prefix
ENV["MANPREFIX"] = man
system "./configure"
system "make"
system "make", "install"
end
test do
touch testpath/"test.1"
fork do
sleep 0.5
touch testpath/"test.2"
end
assert_equal "New File", pipe_output("#{bin}/entr -p -d echo 'New File'", testpath).strip
end
end
entr 3.9
Closes #18255.
Signed-off-by: FX Coudert <c329953660db96eae534be5bbf1a735c2baf69b5@gmail.com>
class Entr < Formula
desc "Run arbitrary commands when files change"
homepage "http://entrproject.org/"
url "http://entrproject.org/code/entr-3.9.tar.gz"
mirror "https://bitbucket.org/eradman/entr/get/entr-3.9.tar.gz"
sha256 "a49daac8c46290f9886ac4ef3143da5b042a696c3969f6b40383e591c137d1ce"
bottle do
cellar :any_skip_relocation
sha256 "4d65e64b31f996f59ea61c30cebc547cee38633eed91a9882a0c22dfcb4e6e77" => :high_sierra
sha256 "eefae48abeb986b3d0f4f60b4090bf85c86249efb42ad3a70ad65c6f690ef7af" => :sierra
sha256 "65b4a69116adedd4b3f1677f1d2946f12d31a117527439c2eacbeff746fab7eb" => :el_capitan
sha256 "acfa5e389ca6b0d29f3a3a62abd9f585af12b9f3edb96bb902e76badd3dcfa00" => :yosemite
end
head do
url "https://bitbucket.org/eradman/entr", :using => :hg
depends_on :hg => :build
end
def install
ENV["PREFIX"] = prefix
ENV["MANPREFIX"] = man
system "./configure"
system "make"
system "make", "install"
end
test do
touch testpath/"test.1"
fork do
sleep 0.5
touch testpath/"test.2"
end
assert_equal "New File", pipe_output("#{bin}/entr -p -d echo 'New File'", testpath).strip
end
end
|
class Etcd < Formula
desc "Key value store for shared configuration and service discovery"
homepage "https://github.com/etcd-io/etcd"
url "https://github.com/etcd-io/etcd/archive/v3.3.12.tar.gz"
sha256 "0452a98bd485d757fd85d2182f8eac8c2dad315bcb6cf29a797ced9e2669c413"
head "https://github.com/etcd-io/etcd.git"
bottle do
cellar :any_skip_relocation
sha256 "aec9fd433640c6d28a7c290377a85bddc62108cc9f48d31f67f69cdd0ed48ff3" => :mojave
sha256 "aca19e97bb0aa68e5c7e0ecfa53ccfe732c5064d404829acc77971b80b60aa23" => :high_sierra
sha256 "306287bc18445a088e05c9b3a7d53274c52ee02f4e935eb30ddf9629827e1ddb" => :sierra
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
mkdir_p "src/github.com/etcd-io"
ln_s buildpath, "src/github.com/etcd-io/etcd"
system "./build"
bin.install "bin/etcd"
bin.install "bin/etcdctl"
end
plist_options :manual => "etcd"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/etcd</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
</dict>
</plist>
EOS
end
test do
begin
test_string = "Hello from brew test!"
etcd_pid = fork do
exec bin/"etcd", "--force-new-cluster", "--data-dir=#{testpath}"
end
# sleep to let etcd get its wits about it
sleep 10
etcd_uri = "http://127.0.0.1:2379/v2/keys/brew_test"
system "curl", "--silent", "-L", etcd_uri, "-XPUT", "-d", "value=#{test_string}"
curl_output = shell_output("curl --silent -L #{etcd_uri}")
response_hash = JSON.parse(curl_output)
assert_match(test_string, response_hash.fetch("node").fetch("value"))
ensure
# clean up the etcd process before we leave
Process.kill("HUP", etcd_pid)
end
end
end
etcd: update 3.3.12 bottle.
class Etcd < Formula
desc "Key value store for shared configuration and service discovery"
homepage "https://github.com/etcd-io/etcd"
url "https://github.com/etcd-io/etcd/archive/v3.3.12.tar.gz"
sha256 "0452a98bd485d757fd85d2182f8eac8c2dad315bcb6cf29a797ced9e2669c413"
head "https://github.com/etcd-io/etcd.git"
bottle do
cellar :any_skip_relocation
sha256 "2d43653b282635230c278353d20371604daa844bdf23ccd57df5d245efa197fc" => :mojave
sha256 "a74726216e107deff2f0754783335a8f33f90e7ff44e70d1d12f187df2f73a9e" => :high_sierra
sha256 "ee5445e6c1303c285e3c631f9620186dd9f78dc2fdfb0ede12959ee6c8aa0ae5" => :sierra
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
mkdir_p "src/github.com/etcd-io"
ln_s buildpath, "src/github.com/etcd-io/etcd"
system "./build"
bin.install "bin/etcd"
bin.install "bin/etcdctl"
end
plist_options :manual => "etcd"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/etcd</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
</dict>
</plist>
EOS
end
test do
begin
test_string = "Hello from brew test!"
etcd_pid = fork do
exec bin/"etcd", "--force-new-cluster", "--data-dir=#{testpath}"
end
# sleep to let etcd get its wits about it
sleep 10
etcd_uri = "http://127.0.0.1:2379/v2/keys/brew_test"
system "curl", "--silent", "-L", etcd_uri, "-XPUT", "-d", "value=#{test_string}"
curl_output = shell_output("curl --silent -L #{etcd_uri}")
response_hash = JSON.parse(curl_output)
assert_match(test_string, response_hash.fetch("node").fetch("value"))
ensure
# clean up the etcd process before we leave
Process.kill("HUP", etcd_pid)
end
end
end
|
require 'formula'
class Exim < Formula
homepage 'http://exim.org'
url 'http://ftp.exim.org/pub/exim/exim4/exim-4.85.tar.bz2'
mirror 'http://www.mirrorservice.org/sites/ftp.exim.org/pub/exim/exim4/exim-4.85.tar.bz2'
sha1 '6b40d5a6ae59f86b4780ad50aaf0d930330d7b67'
bottle do
sha1 "011a332c09baaf4d00c322b56b91e22bcc7a8334" => :yosemite
sha1 "16bc2450378a8061ad290cdbe47797381381d5bb" => :mavericks
sha1 "6d90530045d473748f044d00bc5b14957da17808" => :mountain_lion
end
option 'support-maildir', 'Support delivery in Maildir format'
depends_on 'pcre'
depends_on 'berkeley-db4'
depends_on 'openssl'
def install
cp 'src/EDITME', 'Local/Makefile'
inreplace 'Local/Makefile' do |s|
s.remove_make_var! "EXIM_MONITOR"
s.change_make_var! "EXIM_USER", ENV['USER']
s.change_make_var! "SYSTEM_ALIASES_FILE", etc/'aliases'
s.gsub! '/usr/exim/configure', etc/'exim.conf'
s.gsub! '/usr/exim', prefix
s.gsub! '/var/spool/exim', var/'spool/exim'
# http://trac.macports.org/ticket/38654
s.gsub! 'TMPDIR="/tmp"', 'TMPDIR=/tmp'
s << "SUPPORT_MAILDIR=yes\n" if build.include? 'support-maildir'
s << "AUTH_PLAINTEXT=yes\n"
s << "SUPPORT_TLS=yes\n"
s << "TLS_LIBS=-lssl -lcrypto\n"
# For non-/usr/local HOMEBREW_PREFIX
s << "LOOKUP_INCLUDE=-I#{HOMEBREW_PREFIX}/include\n"
s << "LOOKUP_LIBS=-L#{HOMEBREW_PREFIX}/lib\n"
end
bdb4 = Formula["berkeley-db4"]
inreplace 'OS/Makefile-Darwin' do |s|
s.remove_make_var! %w{CC CFLAGS}
# Add include and lib paths for BDB 4
s.gsub! "# Exim: OS-specific make file for Darwin (Mac OS X).", "INCLUDE=-I#{bdb4.include}"
s.gsub! "DBMLIB =", "DBMLIB=#{bdb4.lib}/libdb-4.dylib"
end
# The compile script ignores CPPFLAGS
ENV.append 'CFLAGS', ENV.cppflags
ENV.j1 # See: https://lists.exim.org/lurker/thread/20111109.083524.87c96d9b.en.html
system "make"
system "make INSTALL_ARG=-no_chown install"
man8.install 'doc/exim.8'
(bin+'exim_ctl').write startup_script
end
# Inspired by MacPorts startup script. Fixes restart issue due to missing setuid.
def startup_script; <<-EOS.undent
#!/bin/sh
PID=#{var}/spool/exim/exim-daemon.pid
case "$1" in
start)
echo "starting exim mail transfer agent"
#{bin}/exim -bd -q30m
;;
restart)
echo "restarting exim mail transfer agent"
/bin/kill -15 `/bin/cat $PID` && sleep 1 && #{bin}/exim -bd -q30m
;;
stop)
echo "stopping exim mail transfer agent"
/bin/kill -15 `/bin/cat $PID`
;;
*)
echo "Usage: #{bin}/exim_ctl {start|stop|restart}"
exit 1
;;
esac
EOS
end
def caveats; <<-EOS.undent
Start with:
exim_ctl start
Don't forget to run it as root to be able to bind port 25.
EOS
end
end
exim: add LMTP support
Compile with LMTP support for easier interaction with Dovecot.
Closes Homebrew/homebrew#39503.
Signed-off-by: Tim D. Smith <46f1a0bd5592a2f9244ca321b129902a06b53e03@tim-smith.us>
require 'formula'
class Exim < Formula
homepage 'http://exim.org'
url 'http://ftp.exim.org/pub/exim/exim4/exim-4.85.tar.bz2'
mirror 'http://www.mirrorservice.org/sites/ftp.exim.org/pub/exim/exim4/exim-4.85.tar.bz2'
sha1 '6b40d5a6ae59f86b4780ad50aaf0d930330d7b67'
bottle do
sha1 "011a332c09baaf4d00c322b56b91e22bcc7a8334" => :yosemite
sha1 "16bc2450378a8061ad290cdbe47797381381d5bb" => :mavericks
sha1 "6d90530045d473748f044d00bc5b14957da17808" => :mountain_lion
end
option 'support-maildir', 'Support delivery in Maildir format'
depends_on 'pcre'
depends_on 'berkeley-db4'
depends_on 'openssl'
def install
cp 'src/EDITME', 'Local/Makefile'
inreplace 'Local/Makefile' do |s|
s.remove_make_var! "EXIM_MONITOR"
s.change_make_var! "EXIM_USER", ENV['USER']
s.change_make_var! "SYSTEM_ALIASES_FILE", etc/'aliases'
s.gsub! '/usr/exim/configure', etc/'exim.conf'
s.gsub! '/usr/exim', prefix
s.gsub! '/var/spool/exim', var/'spool/exim'
# http://trac.macports.org/ticket/38654
s.gsub! 'TMPDIR="/tmp"', 'TMPDIR=/tmp'
s << "SUPPORT_MAILDIR=yes\n" if build.include? 'support-maildir'
s << "AUTH_PLAINTEXT=yes\n"
s << "SUPPORT_TLS=yes\n"
s << "TLS_LIBS=-lssl -lcrypto\n"
s << "TRANSPORT_LMTP=yes\n"
# For non-/usr/local HOMEBREW_PREFIX
s << "LOOKUP_INCLUDE=-I#{HOMEBREW_PREFIX}/include\n"
s << "LOOKUP_LIBS=-L#{HOMEBREW_PREFIX}/lib\n"
end
bdb4 = Formula["berkeley-db4"]
inreplace 'OS/Makefile-Darwin' do |s|
s.remove_make_var! %w{CC CFLAGS}
# Add include and lib paths for BDB 4
s.gsub! "# Exim: OS-specific make file for Darwin (Mac OS X).", "INCLUDE=-I#{bdb4.include}"
s.gsub! "DBMLIB =", "DBMLIB=#{bdb4.lib}/libdb-4.dylib"
end
# The compile script ignores CPPFLAGS
ENV.append 'CFLAGS', ENV.cppflags
ENV.j1 # See: https://lists.exim.org/lurker/thread/20111109.083524.87c96d9b.en.html
system "make"
system "make INSTALL_ARG=-no_chown install"
man8.install 'doc/exim.8'
(bin+'exim_ctl').write startup_script
end
# Inspired by MacPorts startup script. Fixes restart issue due to missing setuid.
def startup_script; <<-EOS.undent
#!/bin/sh
PID=#{var}/spool/exim/exim-daemon.pid
case "$1" in
start)
echo "starting exim mail transfer agent"
#{bin}/exim -bd -q30m
;;
restart)
echo "restarting exim mail transfer agent"
/bin/kill -15 `/bin/cat $PID` && sleep 1 && #{bin}/exim -bd -q30m
;;
stop)
echo "stopping exim mail transfer agent"
/bin/kill -15 `/bin/cat $PID`
;;
*)
echo "Usage: #{bin}/exim_ctl {start|stop|restart}"
exit 1
;;
esac
EOS
end
def caveats; <<-EOS.undent
Start with:
exim_ctl start
Don't forget to run it as root to be able to bind port 25.
EOS
end
end
|
class Ffuf < Formula
desc "Fast web fuzzer written in Go"
homepage "https://github.com/ffuf/ffuf"
url "https://github.com/ffuf/ffuf/archive/v1.3.0.tar.gz"
sha256 "f717ede958c16846c7f67c14fac2a9296fcb3ad249045d04ec97c2d1d30e83ea"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "32637322c8b42463adf6b42636cd318c9fd29e09fa8be62ee77a229e161b2a61"
sha256 cellar: :any_skip_relocation, big_sur: "2243d50f59109b79cd9ec47ee45b3bdb002c1f546944f2f46bbc09281ae387be"
sha256 cellar: :any_skip_relocation, catalina: "ab6fee3beb856c25b674e87863a29733f6da5434d3a1b93035b91d42a6ba72e3"
sha256 cellar: :any_skip_relocation, mojave: "3b0edfedd94bbbd378465d24a36258289c4a4b9401eddd673b64568f6da008dd"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args, "-ldflags", "-s -w"
end
test do
(testpath/"words.txt").write <<~EOS
dog
cat
horse
snake
ape
EOS
output = shell_output("#{bin}/ffuf -u https://example.org/FUZZ -w words.txt 2>&1")
assert_match %r{:: Progress: \[5/5\].*Errors: 0 ::$}, output
end
end
ffuf: update 1.3.0 bottle.
class Ffuf < Formula
desc "Fast web fuzzer written in Go"
homepage "https://github.com/ffuf/ffuf"
url "https://github.com/ffuf/ffuf/archive/v1.3.0.tar.gz"
sha256 "f717ede958c16846c7f67c14fac2a9296fcb3ad249045d04ec97c2d1d30e83ea"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "32637322c8b42463adf6b42636cd318c9fd29e09fa8be62ee77a229e161b2a61"
sha256 cellar: :any_skip_relocation, big_sur: "2243d50f59109b79cd9ec47ee45b3bdb002c1f546944f2f46bbc09281ae387be"
sha256 cellar: :any_skip_relocation, catalina: "ab6fee3beb856c25b674e87863a29733f6da5434d3a1b93035b91d42a6ba72e3"
sha256 cellar: :any_skip_relocation, mojave: "3b0edfedd94bbbd378465d24a36258289c4a4b9401eddd673b64568f6da008dd"
sha256 cellar: :any_skip_relocation, x86_64_linux: "55f52a12708b9563459f99e69cdf332244c7cea454b91b572f6bfdc9ccd1daa8"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args, "-ldflags", "-s -w"
end
test do
(testpath/"words.txt").write <<~EOS
dog
cat
horse
snake
ape
EOS
output = shell_output("#{bin}/ffuf -u https://example.org/FUZZ -w words.txt 2>&1")
assert_match %r{:: Progress: \[5/5\].*Errors: 0 ::$}, output
end
end
|
require 'formula'
class Fits < Formula
desc "File Information Tool Set"
homepage 'http://fitstool.org/'
url 'http://projects.iq.harvard.edu/files/fits/files/fits-0.8.4_0.zip'
version '0.8.4'
sha1 'f9b6b13cf1e818c6cdcfec71eb3dcd6804dd8819'
bottle do
cellar :any
sha256 "e606253277eb78b26d24ff3dfd582d7bc1fae03d13e393ff0512885fdc278066" => :yosemite
sha256 "0e97437daf0e227b2ec937cf9034db585a92a17e34cd22ed2f8fe2b80be15003" => :mavericks
sha256 "050cb99d9da1f008a4721c5e3cf962a19fc591f075e6126b9d2bea7482495dfd" => :mountain_lion
end
# provided jars may not be compatible with installed java,
# but works when built from source
depends_on "ant" => :build
depends_on :java => "1.8"
def install
system "ant"
inreplace 'fits-env.sh' do |s|
s.gsub! "FITS_HOME=`echo \"$0\" | sed 's,/[^/]*$,,'`", "FITS_HOME=#{prefix}"
s.gsub! "${FITS_HOME}/lib", libexec
end
prefix.install %w{ COPYING COPYING.LESSER tools xml }
prefix.install Dir['*.txt']
libexec.install Dir['lib/*']
# fits-env.sh is a helper script that sets up environment
# variables, so we want to tuck this away in libexec
libexec.install 'fits-env.sh'
inreplace %w[fits.sh fits-ngserver.sh],
'"$(dirname $BASH_SOURCE)/fits-env.sh"', "'#{libexec}/fits-env.sh'"
bin.install 'fits.sh' => 'fits'
bin.install 'fits-ngserver.sh' => 'fits-ngserver'
end
end
fits 0.8.6-1
Closes Homebrew/homebrew#40434.
Signed-off-by: Baptiste Fontaine <bfee279af59f3e3f71f7ce1fa037ea7b90f93cbf@yahoo.fr>
class Fits < Formula
desc "File Information Tool Set"
homepage "http://projects.iq.harvard.edu/fits"
url "https://projects.iq.harvard.edu/files/fits/files/fits-0.8.6_1.zip"
version "0.8.6-1"
sha256 "d45f67a2606aaa0fdcbbade576f70f1590916b043fec28dcfdef1a8242fd4040"
bottle do
cellar :any
sha256 "e606253277eb78b26d24ff3dfd582d7bc1fae03d13e393ff0512885fdc278066" => :yosemite
sha256 "0e97437daf0e227b2ec937cf9034db585a92a17e34cd22ed2f8fe2b80be15003" => :mavericks
sha256 "050cb99d9da1f008a4721c5e3cf962a19fc591f075e6126b9d2bea7482495dfd" => :mountain_lion
end
# provided jars may not be compatible with installed java,
# but works when built from source
depends_on "ant" => :build
depends_on :java => "1.7+"
def install
system "ant"
inreplace "fits-env.sh" do |s|
s.gsub! "FITS_HOME=`echo \"$0\" | sed 's,/[^/]*$,,'`", "FITS_HOME=#{prefix}"
s.gsub! "${FITS_HOME}/lib", libexec
end
prefix.install %w[COPYING COPYING.LESSER tools xml]
prefix.install Dir["*.txt"]
libexec.install Dir["lib/*"]
# fits-env.sh is a helper script that sets up environment
# variables, so we want to tuck this away in libexec
libexec.install "fits-env.sh"
inreplace %w[fits.sh fits-ngserver.sh],
'"$(dirname $BASH_SOURCE)/fits-env.sh"', "'#{libexec}/fits-env.sh'"
bin.install "fits.sh" => "fits"
bin.install "fits-ngserver.sh" => "fits-ngserver"
end
test do
assert_match 'mimetype="audio/mpeg"',
shell_output("#{bin}/fits -i #{test_fixtures "test.mp3"} 2>&1")
end
end
|
require 'formula'
class Flac < Formula
homepage 'http://xiph.org/flac/'
url 'http://downloads.xiph.org/releases/flac/flac-1.3.0.tar.xz'
sha1 'a136e5748f8fb1e6c524c75000a765fc63bb7b1b'
option :universal
depends_on 'xz' => :build
depends_on 'lame'
depends_on 'libogg' => :optional
fails_with :llvm do
build 2326
cause "Undefined symbols when linking"
end
def install
ENV.universal_binary if build.universal?
# sadly the asm optimisations won't compile since Leopard
system "./configure", "--disable-dependency-tracking",
"--disable-debug",
"--disable-asm-optimizations",
"--enable-sse",
"--enable-static",
"--prefix=#{prefix}",
"--mandir=#{man}"
ENV['OBJ_FORMAT']='macho'
# adds universal flags to the generated libtool script
inreplace "libtool" do |s|
s.gsub! ":$verstring\"", ":$verstring -arch #{Hardware::CPU.arch_32_bit} -arch #{Hardware::CPU.arch_64_bit}\""
end
system "make install"
(bin/'flac2mp3').write DATA.read
end
end
__END__
#!/usr/bin/env ruby
# http://gist.github.com/gists/2998853/
# Forked from http://gist.github.com/gists/124242
filename, quality = ARGV[0], ARGV[1]
abort "Usage: flac2mp3 FLACFILE [V2|V1|V0|320]\nDefault (and recommended) quality is V0." if filename.nil?
qualarg = case quality
when "V0","V1","V2" then quality
when "320" then "b 320"
else "V0"
end
map = {"TITLE" => "--tt", "ARTIST" => "--ta", "ALBUM" => "--tl", "TRACKNUMBER" => "--tn", "GENRE" => "--tg", "DATE" => "--ty"}
args = ""
`metaflac --export-tags-to=- "#{filename}"`.each_line do |line|
key, value = line.strip.split('=', 2)
key.upcase!
args << %Q{#{map[key]} "#{value.gsub('"', '\"')}" } if map[key]
end
basename = File.basename(filename, File.extname(filename))
puts "Encoding #{basename}.mp3"
exec %Q[flac -sdc "#{filename}" | lame -#{qualarg} #{args} - "#{basename}.mp3"]
flac: fix building on 10.9
* Based on the work of @fernandotcl.
- https://gist.github.com/fernandotcl/7120507
Closes Homebrew/homebrew#23254.
Closes Homebrew/homebrew#23540.
Signed-off-by: Xiyue Deng <4900478725231961caf32165c870e05cea7389d8@gmail.com>
require 'formula'
class Flac < Formula
homepage 'http://xiph.org/flac/'
url 'http://downloads.xiph.org/releases/flac/flac-1.3.0.tar.xz'
sha1 'a136e5748f8fb1e6c524c75000a765fc63bb7b1b'
option :universal
depends_on 'xz' => :build
depends_on 'lame'
depends_on 'libogg' => :optional
depends_on 'pkg-config' => :build
depends_on 'libtool' => :build
depends_on 'automake' => :build
depends_on 'autoconf' => :build
fails_with :llvm do
build 2326
cause "Undefined symbols when linking"
end
def patches
#fixes compilation on mac os 10.9 maverick
[
"https://gist.github.com/leiflm/7139949/raw"
]
end
def install
ENV.universal_binary if build.universal?
# sadly the asm optimisations won't compile since Leopard
system "./autogen.sh"
system "./configure", "--disable-dependency-tracking",
"--disable-debug",
"--disable-asm-optimizations",
"--enable-sse",
"--enable-static",
"--prefix=#{prefix}",
"--mandir=#{man}"
ENV['OBJ_FORMAT']='macho'
# adds universal flags to the generated libtool script
inreplace "libtool" do |s|
s.gsub! ":$verstring\"", ":$verstring -arch #{Hardware::CPU.arch_32_bit} -arch #{Hardware::CPU.arch_64_bit}\""
end
system "make install"
(bin/'flac2mp3').write DATA.read
end
end
__END__
#!/usr/bin/env ruby
# http://gist.github.com/gists/2998853/
# Forked from http://gist.github.com/gists/124242
filename, quality = ARGV[0], ARGV[1]
abort "Usage: flac2mp3 FLACFILE [V2|V1|V0|320]\nDefault (and recommended) quality is V0." if filename.nil?
qualarg = case quality
when "V0","V1","V2" then quality
when "320" then "b 320"
else "V0"
end
map = {"TITLE" => "--tt", "ARTIST" => "--ta", "ALBUM" => "--tl", "TRACKNUMBER" => "--tn", "GENRE" => "--tg", "DATE" => "--ty"}
args = ""
`metaflac --export-tags-to=- "#{filename}"`.each_line do |line|
key, value = line.strip.split('=', 2)
key.upcase!
args << %Q{#{map[key]} "#{value.gsub('"', '\"')}" } if map[key]
end
basename = File.basename(filename, File.extname(filename))
puts "Encoding #{basename}.mp3"
exec %Q[flac -sdc "#{filename}" | lame -#{qualarg} #{args} - "#{basename}.mp3"]
|
class Flex < Formula
desc "Fast Lexical Analyzer, generates Scanners (tokenizers)"
homepage "https://github.com/westes/flex"
url "https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz"
sha256 "e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995"
license "BSD-2-Clause"
revision 2
bottle do
sha256 arm64_big_sur: "5067cf61a1b9e5fba26590a01f0a1464c7fefa5450c3d3c673acb237b45a9c3c"
sha256 big_sur: "0efd85122905dd05c8feb197492a72fcd0435270366c89dbe347fcbbe5d66ac1"
sha256 catalina: "902e2701bb4d8130fe3177211dda84b6ebc6a520467874a52bcd7ff043b949cc"
sha256 mojave: "2051ed8f0de322732b111f2cc82069e82f6dfd4d839e6d098bbebcd7f92220e6"
sha256 high_sierra: "9c224c27a3d40a53b6f778a6b825f8b4f14654080b144e50f1bec9cc608c757d"
sha256 sierra: "a958106ee0895b21c7577478b847ecdbc601ce6a723543c5da455bfe0eee5f8f"
end
head do
url "https://github.com/westes/flex.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
# https://github.com/westes/flex/issues/294
depends_on "gnu-sed" => :build
depends_on "libtool" => :build
end
keg_only :provided_by_macos
depends_on "help2man" => :build
depends_on "gettext"
uses_from_macos "bison" => :build
uses_from_macos "m4"
def install
if build.head?
ENV.prepend_path "PATH", Formula["gnu-sed"].opt_libexec/"gnubin"
system "./autogen.sh"
end
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-shared",
"--prefix=#{prefix}"
system "make", "install"
bin.install_symlink "flex" => "lex"
end
test do
(testpath/"test.flex").write <<~EOS
CHAR [a-z][A-Z]
%%
{CHAR}+ printf("%s", yytext);
[ \\t\\n]+ printf("\\n");
%%
int main()
{
yyin = stdin;
yylex();
}
EOS
system "#{bin}/flex", "test.flex"
system ENV.cc, "lex.yy.c", "-L#{lib}", "-lfl", "-o", "test"
assert_equal shell_output("echo \"Hello World\" | ./test"), <<~EOS
Hello
World
EOS
end
end
flex: update 2.6.4_2 bottle.
class Flex < Formula
desc "Fast Lexical Analyzer, generates Scanners (tokenizers)"
homepage "https://github.com/westes/flex"
url "https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz"
sha256 "e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995"
license "BSD-2-Clause"
revision 2
bottle do
sha256 arm64_big_sur: "ba78304da35f69526d386e1d1decca8818b155b4dda4f470d9393d23cf713e11"
sha256 big_sur: "89ec2b04b1aab94297f490c60fe6ca2bcde7de9b7661482728b07931e635d21c"
sha256 catalina: "e563a7a42aceff203cca4f420ebc6a8bbd5075a2b0007d46724f037ebc7b41a5"
sha256 mojave: "687132db0837bdcb6e02b5715f6a07f658bdf109b5353908f260d46d354f7bdb"
end
head do
url "https://github.com/westes/flex.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
# https://github.com/westes/flex/issues/294
depends_on "gnu-sed" => :build
depends_on "libtool" => :build
end
keg_only :provided_by_macos
depends_on "help2man" => :build
depends_on "gettext"
uses_from_macos "bison" => :build
uses_from_macos "m4"
def install
if build.head?
ENV.prepend_path "PATH", Formula["gnu-sed"].opt_libexec/"gnubin"
system "./autogen.sh"
end
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-shared",
"--prefix=#{prefix}"
system "make", "install"
bin.install_symlink "flex" => "lex"
end
test do
(testpath/"test.flex").write <<~EOS
CHAR [a-z][A-Z]
%%
{CHAR}+ printf("%s", yytext);
[ \\t\\n]+ printf("\\n");
%%
int main()
{
yyin = stdin;
yylex();
}
EOS
system "#{bin}/flex", "test.flex"
system ENV.cc, "lex.yy.c", "-L#{lib}", "-lfl", "-o", "test"
assert_equal shell_output("echo \"Hello World\" | ./test"), <<~EOS
Hello
World
EOS
end
end
|
class Flex < Formula
desc "Fast Lexical Analyzer, generates Scanners (tokenizers)"
homepage "https://github.com/westes/flex"
url "https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz"
sha256 "e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995"
license "BSD-2-Clause"
revision 2
bottle do
sha256 arm64_big_sur: "ba78304da35f69526d386e1d1decca8818b155b4dda4f470d9393d23cf713e11"
sha256 big_sur: "89ec2b04b1aab94297f490c60fe6ca2bcde7de9b7661482728b07931e635d21c"
sha256 catalina: "e563a7a42aceff203cca4f420ebc6a8bbd5075a2b0007d46724f037ebc7b41a5"
sha256 mojave: "687132db0837bdcb6e02b5715f6a07f658bdf109b5353908f260d46d354f7bdb"
sha256 x86_64_linux: "b2bff056ad86d8a1cb1a08944867b5f60636ad4e7edca623810937330d87d8eb"
end
head do
url "https://github.com/westes/flex.git", branch: "master"
depends_on "autoconf" => :build
depends_on "automake" => :build
# https://github.com/westes/flex/issues/294
depends_on "gnu-sed" => :build
depends_on "libtool" => :build
depends_on :macos
end
keg_only :provided_by_macos
depends_on "help2man" => :build
depends_on "gettext"
uses_from_macos "bison" => :build
uses_from_macos "m4"
def install
if build.head?
ENV.prepend_path "PATH", Formula["gnu-sed"].opt_libexec/"gnubin"
system "./autogen.sh"
end
# Fix segmentation fault during install on Ubuntu 18.04 (caused by glibc 2.26+),
# remove with the next release
on_linux do
ENV.append "CPPFLAGS", "-D_GNU_SOURCE"
end
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-shared",
"--prefix=#{prefix}"
system "make", "install"
bin.install_symlink "flex" => "lex"
end
test do
(testpath/"test.flex").write <<~EOS
CHAR [a-z][A-Z]
%%
{CHAR}+ printf("%s", yytext);
[ \\t\\n]+ printf("\\n");
%%
int main()
{
yyin = stdin;
yylex();
}
EOS
system "#{bin}/flex", "test.flex"
system ENV.cc, "lex.yy.c", "-L#{lib}", "-lfl", "-o", "test"
assert_equal shell_output("echo \"Hello World\" | ./test"), <<~EOS
Hello
World
EOS
end
end
flex: fix brew style.
Part of https://github.com/Homebrew/brew/pull/11955.
class Flex < Formula
desc "Fast Lexical Analyzer, generates Scanners (tokenizers)"
homepage "https://github.com/westes/flex"
url "https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz"
sha256 "e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995"
license "BSD-2-Clause"
revision 2
bottle do
sha256 arm64_big_sur: "ba78304da35f69526d386e1d1decca8818b155b4dda4f470d9393d23cf713e11"
sha256 big_sur: "89ec2b04b1aab94297f490c60fe6ca2bcde7de9b7661482728b07931e635d21c"
sha256 catalina: "e563a7a42aceff203cca4f420ebc6a8bbd5075a2b0007d46724f037ebc7b41a5"
sha256 mojave: "687132db0837bdcb6e02b5715f6a07f658bdf109b5353908f260d46d354f7bdb"
sha256 x86_64_linux: "b2bff056ad86d8a1cb1a08944867b5f60636ad4e7edca623810937330d87d8eb"
end
head do
url "https://github.com/westes/flex.git", branch: "master"
depends_on "autoconf" => :build
depends_on "automake" => :build
# https://github.com/westes/flex/issues/294
depends_on "gnu-sed" => :build
depends_on "libtool" => :build
depends_on :macos
end
keg_only :provided_by_macos
depends_on "help2man" => :build
depends_on "gettext"
uses_from_macos "bison" => :build
uses_from_macos "m4"
def install
if build.head?
ENV.prepend_path "PATH", Formula["gnu-sed"].opt_libexec/"gnubin"
system "./autogen.sh"
end
# Fix segmentation fault during install on Ubuntu 18.04 (caused by glibc 2.26+),
# remove with the next release
ENV.append "CPPFLAGS", "-D_GNU_SOURCE" if OS.linux?
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-shared",
"--prefix=#{prefix}"
system "make", "install"
bin.install_symlink "flex" => "lex"
end
test do
(testpath/"test.flex").write <<~EOS
CHAR [a-z][A-Z]
%%
{CHAR}+ printf("%s", yytext);
[ \\t\\n]+ printf("\\n");
%%
int main()
{
yyin = stdin;
yylex();
}
EOS
system "#{bin}/flex", "test.flex"
system ENV.cc, "lex.yy.c", "-L#{lib}", "-lfl", "-o", "test"
assert_equal shell_output("echo \"Hello World\" | ./test"), <<~EOS
Hello
World
EOS
end
end
|
##########################################
# Author: David Sulpy (david@sulpy.com) #
# License: MIT #
##########################################
module CloudDeploy
class BaseAmiHelper
gem 'aws-sdk', '>= 2.0.0'
require 'aws-sdk'
def initialize(options = {})
@app_name = options[:app_name]
if (options[:access_key_id] == nil || options[:access_key_id] == '')
raise "access_key_id cannot be empty or nil"
end
if (options[:secret_access_key] == nil || options[:secret_access_key] == '')
raise "secret_access_key cannot be empty or nil"
end
@access_key_id = options[:access_key_id]
@secret_access_key = options[:secret_access_key]
Aws.config.update({
credentials: Aws::Credentials.new(@access_key_id, @secret_access_key),
region: options[:region] || 'us-east-1'
})
end
def get_most_recent_base_ami()
ec2_client = Aws::EC2::Client.new
puts "looking for images with the name #{@app_name}-base"
resp = ec2_client.describe_images({
owners: ["self"],
filters: [
{
name: "name",
values: ["#{@app_name}-base*"]
}
]
})
sorted_base_amis = resp.images.sort {|a,b| b.creation_date <=> a.creation_date}
puts "found latest ami #{sorted_base_amis[0].name} (#{sorted_base_amis[0].image_id})"
return "#{sorted_base_amis[0].image_id}"
end
end
end
added a nil response if get_most_recent_base_ami doesn't have any base ami's that match the name patter
##########################################
# Author: David Sulpy (david@sulpy.com) #
# License: MIT #
##########################################
module CloudDeploy
class BaseAmiHelper
gem 'aws-sdk', '>= 2.0.0'
require 'aws-sdk'
def initialize(options = {})
@app_name = options[:app_name]
if (options[:access_key_id] == nil || options[:access_key_id] == '')
raise "access_key_id cannot be empty or nil"
end
if (options[:secret_access_key] == nil || options[:secret_access_key] == '')
raise "secret_access_key cannot be empty or nil"
end
@access_key_id = options[:access_key_id]
@secret_access_key = options[:secret_access_key]
Aws.config.update({
credentials: Aws::Credentials.new(@access_key_id, @secret_access_key),
region: options[:region] || 'us-east-1'
})
end
def get_most_recent_base_ami()
ec2_client = Aws::EC2::Client.new
puts "looking for images with the name #{@app_name}-base"
resp = ec2_client.describe_images({
owners: ["self"],
filters: [
{
name: "name",
values: ["#{@app_name}-base*"]
}
]
})
sorted_base_amis = resp.images.sort {|a,b| b.creation_date <=> a.creation_date}
if (sorted_base_amis.count >= 0)
puts "no amis found..."
return nil
end
puts "found latest ami #{sorted_base_amis[0].name} (#{sorted_base_amis[0].image_id})"
return "#{sorted_base_amis[0].image_id}"
end
end
end |
class Flex < Formula
desc "Fast Lexical Analyzer, generates Scanners (tokenizers)"
homepage "http://flex.sourceforge.net"
url "https://github.com/westes/flex/releases/download/v2.6.2/flex-2.6.2.tar.gz"
sha256 "9a01437a1155c799b7dc2508620564ef806ba66250c36bf5f9034b1c207cb2c9"
bottle do
sha256 "c7bcd12da4584e7d59e3801f92711820f2b9223d686326693a42da7733cd408d" => :sierra
sha256 "b9f443e7292fe613dca088f7c4d26bf636086bee799c0dda06d8371b6702b410" => :el_capitan
sha256 "637020dcd2cb5895b9da6c248e6035a3cbb91e3a310b7e71cb5f9c4ae959f149" => :yosemite
end
keg_only :provided_by_osx, "Some formulae require a newer version of flex."
depends_on "help2man" => :build
depends_on "gettext"
depends_on "homebrew/dupes/m4" unless OS.mac?
depends_on "bison" => :build unless OS.mac?
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-shared",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.flex").write <<-EOS.undent
CHAR [a-z][A-Z]
%%
{CHAR}+ printf("%s", yytext);
[ \\t\\n]+ printf("\\n");
%%
int main()
{
yyin = stdin;
yylex();
}
EOS
system "#{bin}/flex", "test.flex"
system ENV.cc, "lex.yy.c", "-L#{lib}", "-lfl", "-o", "test"
assert_equal shell_output("echo \"Hello World\" | ./test"), <<-EOS.undent
Hello
World
EOS
end
end
flex: update 2.6.2 bottle for Linuxbrew.
Closes Linuxbrew/homebrew-core#1125.
Signed-off-by: Shaun Jackman <b580dab3251a9622aba3803114310c23fdb42900@gmail.com>
class Flex < Formula
desc "Fast Lexical Analyzer, generates Scanners (tokenizers)"
homepage "http://flex.sourceforge.net"
url "https://github.com/westes/flex/releases/download/v2.6.2/flex-2.6.2.tar.gz"
sha256 "9a01437a1155c799b7dc2508620564ef806ba66250c36bf5f9034b1c207cb2c9"
bottle do
sha256 "c7bcd12da4584e7d59e3801f92711820f2b9223d686326693a42da7733cd408d" => :sierra
sha256 "b9f443e7292fe613dca088f7c4d26bf636086bee799c0dda06d8371b6702b410" => :el_capitan
sha256 "637020dcd2cb5895b9da6c248e6035a3cbb91e3a310b7e71cb5f9c4ae959f149" => :yosemite
sha256 "1763602d7b6d77709f7393b18ab7cf52456a09228904fd3997e560771c2f21a8" => :x86_64_linux
end
keg_only :provided_by_osx, "Some formulae require a newer version of flex."
depends_on "help2man" => :build
depends_on "gettext"
depends_on "homebrew/dupes/m4" unless OS.mac?
depends_on "bison" => :build unless OS.mac?
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-shared",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.flex").write <<-EOS.undent
CHAR [a-z][A-Z]
%%
{CHAR}+ printf("%s", yytext);
[ \\t\\n]+ printf("\\n");
%%
int main()
{
yyin = stdin;
yylex();
}
EOS
system "#{bin}/flex", "test.flex"
system ENV.cc, "lex.yy.c", "-L#{lib}", "-lfl", "-o", "test"
assert_equal shell_output("echo \"Hello World\" | ./test"), <<-EOS.undent
Hello
World
EOS
end
end
|
class Flix < Formula
desc "Statically typed functional, imperative, and logic programming language"
homepage "https://flix.dev/"
url "https://github.com/flix/flix/archive/refs/tags/v0.32.0.tar.gz"
sha256 "83ff239a686e72c880a22d2f88c0b0f9402e0eca7e60e00e14bc9208cd51419a"
license "Apache-2.0"
head "https://github.com/flix/flix.git", branch: "master"
livecheck do
url :stable
regex(/^v?\.?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "48a3e556903e1797edb749ba722bb771d332e44b44c2f15461d3f7a017fbf920"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "41e7ffacb938c58cf1b492bd4e7f491fd833eb12b2ba70871b55af990a6bc257"
sha256 cellar: :any_skip_relocation, monterey: "2c755b30b3b52398140876d86dc2721bdf895a7a7ea69af41ec4942b98213180"
sha256 cellar: :any_skip_relocation, big_sur: "a74995ce38fc6c21f354e20bc788b586448c0494a932a83902f0abe8df662c2b"
sha256 cellar: :any_skip_relocation, catalina: "605bb223fa9b8e3eae06c39c34eb31bb69c72ae30662a631dea078ea0e7a7198"
sha256 cellar: :any_skip_relocation, x86_64_linux: "76af0c9eb71e3a281e1516d3a194716c472acc9cca59ca9cd89405762dfd6018"
end
depends_on "gradle" => :build
depends_on "scala" => :build
depends_on "openjdk"
def install
system Formula["gradle"].bin/"gradle", "build", "jar"
prefix.install "build/libs/flix-#{version}.jar"
bin.write_jar_script prefix/"flix-#{version}.jar", "flix"
end
test do
system bin/"flix", "init"
assert_match "Hello World!", shell_output("#{bin/"flix"} run")
assert_match "Running 1 tests...", shell_output("#{bin/"flix"} test")
end
end
flix 0.33.0
Closes #114267.
Signed-off-by: FX Coudert <c329953660db96eae534be5bbf1a735c2baf69b5@gmail.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Flix < Formula
desc "Statically typed functional, imperative, and logic programming language"
homepage "https://flix.dev/"
url "https://github.com/flix/flix/archive/refs/tags/v0.33.0.tar.gz"
sha256 "61aa2124bee727a4a913beb6c6f6337a9475109c1c7550f4f6e0ccf24ab3cbd3"
license "Apache-2.0"
head "https://github.com/flix/flix.git", branch: "master"
livecheck do
url :stable
regex(/^v?\.?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "48a3e556903e1797edb749ba722bb771d332e44b44c2f15461d3f7a017fbf920"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "41e7ffacb938c58cf1b492bd4e7f491fd833eb12b2ba70871b55af990a6bc257"
sha256 cellar: :any_skip_relocation, monterey: "2c755b30b3b52398140876d86dc2721bdf895a7a7ea69af41ec4942b98213180"
sha256 cellar: :any_skip_relocation, big_sur: "a74995ce38fc6c21f354e20bc788b586448c0494a932a83902f0abe8df662c2b"
sha256 cellar: :any_skip_relocation, catalina: "605bb223fa9b8e3eae06c39c34eb31bb69c72ae30662a631dea078ea0e7a7198"
sha256 cellar: :any_skip_relocation, x86_64_linux: "76af0c9eb71e3a281e1516d3a194716c472acc9cca59ca9cd89405762dfd6018"
end
depends_on "gradle" => :build
depends_on "scala" => :build
depends_on "openjdk"
def install
system Formula["gradle"].bin/"gradle", "build", "jar"
prefix.install "build/libs/flix-#{version}.jar"
bin.write_jar_script prefix/"flix-#{version}.jar", "flix"
end
test do
system bin/"flix", "init"
assert_match "Hello World!", shell_output("#{bin/"flix"} run")
assert_match "Running 1 tests...", shell_output("#{bin/"flix"} test")
end
end
|
class Flow < Formula
desc "Static type checker for JavaScript"
homepage "https://flowtype.org/"
url "https://github.com/facebook/flow/archive/v0.172.0.tar.gz"
sha256 "6057ce4ccbfd85d3b41645f931e2e0d8631ec29cc43d154da041f9b0645ba5df"
license "MIT"
head "https://github.com/facebook/flow.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "d3a577b51d47c399178feabfa3a1be0da34cb2e61bc7e44fda37f052ce6201d3"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "c2dc114e21faae4770c7c95fd5c6b1f83c6d5250b6a3a8cb4108f6e0337f62c0"
sha256 cellar: :any_skip_relocation, monterey: "c7bb181f22b3046296e12caa40776f6371a9c70e4294a8906bdf15606d1ed7dc"
sha256 cellar: :any_skip_relocation, big_sur: "44cc92d952db9e2a89a61382da4e286e102ff05581f0b675f892ebf09285220a"
sha256 cellar: :any_skip_relocation, catalina: "90699556527ea2bcba2947aa31238599d3fe7c41bb7cc2a8a853295fd7983f0e"
sha256 cellar: :any_skip_relocation, x86_64_linux: "af1833c1bc6e9664951858b57bdf9c5c7af205d5953bbbc3a13a1c1b75431508"
end
depends_on "ocaml" => :build
depends_on "opam" => :build
uses_from_macos "m4" => :build
uses_from_macos "rsync" => :build
uses_from_macos "unzip" => :build
def install
system "make", "all-homebrew"
bin.install "bin/flow"
bash_completion.install "resources/shell/bash-completion" => "flow-completion.bash"
zsh_completion.install_symlink bash_completion/"flow-completion.bash" => "_flow"
end
test do
system "#{bin}/flow", "init", testpath
(testpath/"test.js").write <<~EOS
/* @flow */
var x: string = 123;
EOS
expected = /Found 1 error/
assert_match expected, shell_output("#{bin}/flow check #{testpath}", 2)
end
end
flow 0.173.0
Closes #96198.
Signed-off-by: Thierry Moisan <8bf87a6c4caed0437859f8c8fafc6782533e4540@gmail.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Flow < Formula
desc "Static type checker for JavaScript"
homepage "https://flowtype.org/"
url "https://github.com/facebook/flow/archive/v0.173.0.tar.gz"
sha256 "e7536be16211e338a792a8818c377f06780cb0524f9208b666b3272ca0044f77"
license "MIT"
head "https://github.com/facebook/flow.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "d3a577b51d47c399178feabfa3a1be0da34cb2e61bc7e44fda37f052ce6201d3"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "c2dc114e21faae4770c7c95fd5c6b1f83c6d5250b6a3a8cb4108f6e0337f62c0"
sha256 cellar: :any_skip_relocation, monterey: "c7bb181f22b3046296e12caa40776f6371a9c70e4294a8906bdf15606d1ed7dc"
sha256 cellar: :any_skip_relocation, big_sur: "44cc92d952db9e2a89a61382da4e286e102ff05581f0b675f892ebf09285220a"
sha256 cellar: :any_skip_relocation, catalina: "90699556527ea2bcba2947aa31238599d3fe7c41bb7cc2a8a853295fd7983f0e"
sha256 cellar: :any_skip_relocation, x86_64_linux: "af1833c1bc6e9664951858b57bdf9c5c7af205d5953bbbc3a13a1c1b75431508"
end
depends_on "ocaml" => :build
depends_on "opam" => :build
uses_from_macos "m4" => :build
uses_from_macos "rsync" => :build
uses_from_macos "unzip" => :build
def install
system "make", "all-homebrew"
bin.install "bin/flow"
bash_completion.install "resources/shell/bash-completion" => "flow-completion.bash"
zsh_completion.install_symlink bash_completion/"flow-completion.bash" => "_flow"
end
test do
system "#{bin}/flow", "init", testpath
(testpath/"test.js").write <<~EOS
/* @flow */
var x: string = 123;
EOS
expected = /Found 1 error/
assert_match expected, shell_output("#{bin}/flow check #{testpath}", 2)
end
end
|
class Flow < Formula
desc "Static type checker for JavaScript"
homepage "https://flowtype.org/"
url "https://github.com/facebook/flow/archive/v0.149.0.tar.gz"
sha256 "b88fd7594ae924cce82060409ba656013dacbbdb23ef918ade9ce9282cc9a3b2"
license "MIT"
head "https://github.com/facebook/flow.git"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "0feaf978d1c67097f593d3942f9794bf8c9ffab4b319715695c16bd0f9c82129"
sha256 cellar: :any_skip_relocation, catalina: "8f1e93701b1433c3aea806a9c6d6f1b24d54b836ea440c7909fa6508573591a2"
sha256 cellar: :any_skip_relocation, mojave: "17d893b8a8571305ec799083f2bde9015fb3abe733a12631dea588bbf25b2687"
end
depends_on "ocaml" => :build
depends_on "opam" => :build
uses_from_macos "m4" => :build
uses_from_macos "rsync" => :build
uses_from_macos "unzip" => :build
def install
system "make", "all-homebrew"
bin.install "bin/flow"
bash_completion.install "resources/shell/bash-completion" => "flow-completion.bash"
zsh_completion.install_symlink bash_completion/"flow-completion.bash" => "_flow"
end
test do
system "#{bin}/flow", "init", testpath
(testpath/"test.js").write <<~EOS
/* @flow */
var x: string = 123;
EOS
expected = /Found 1 error/
assert_match expected, shell_output("#{bin}/flow check #{testpath}", 2)
end
end
flow: update 0.149.0 bottle.
class Flow < Formula
desc "Static type checker for JavaScript"
homepage "https://flowtype.org/"
url "https://github.com/facebook/flow/archive/v0.149.0.tar.gz"
sha256 "b88fd7594ae924cce82060409ba656013dacbbdb23ef918ade9ce9282cc9a3b2"
license "MIT"
head "https://github.com/facebook/flow.git"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "1838b3fa0b7a252776cf38c946a82b06bdb79d39720436b1bd42fdfee29491e5"
sha256 cellar: :any_skip_relocation, catalina: "ec70620cbbb6084fd71b4590c6e833d29d4bbf5af9577cec849ae7b31b40f15b"
sha256 cellar: :any_skip_relocation, mojave: "3f6a8bf1e0ec27445aff3d269ebba7113a93b6e70b296ec2fd19b5874bbfe5a9"
end
depends_on "ocaml" => :build
depends_on "opam" => :build
uses_from_macos "m4" => :build
uses_from_macos "rsync" => :build
uses_from_macos "unzip" => :build
def install
system "make", "all-homebrew"
bin.install "bin/flow"
bash_completion.install "resources/shell/bash-completion" => "flow-completion.bash"
zsh_completion.install_symlink bash_completion/"flow-completion.bash" => "_flow"
end
test do
system "#{bin}/flow", "init", testpath
(testpath/"test.js").write <<~EOS
/* @flow */
var x: string = 123;
EOS
expected = /Found 1 error/
assert_match expected, shell_output("#{bin}/flow check #{testpath}", 2)
end
end
|
class Flow < Formula
desc "Static type checker for JavaScript"
homepage "https://flowtype.org/"
url "https://github.com/facebook/flow/archive/v0.117.1.tar.gz"
sha256 "fefa18ec3949e5907a45b7147253c06cf813a4e46434cfb1cb0baed562c4a161"
head "https://github.com/facebook/flow.git"
bottle do
cellar :any_skip_relocation
sha256 "462c169b7c71d1dd2e7b77326e02a5afe179257f768604aec039b65189fdf28b" => :catalina
sha256 "2cd54458468890b8ba7999f12814732bd08d5ab174c12865f18dcac68fdee0c6" => :mojave
sha256 "bcd29c63faf777949fe9479cea6c3ebcaa7b8d1c2d472e887982aff4ce1e9b38" => :high_sierra
end
depends_on "ocaml" => :build
depends_on "opam" => :build
def install
system "make", "all-homebrew"
bin.install "bin/flow"
bash_completion.install "resources/shell/bash-completion" => "flow-completion.bash"
zsh_completion.install_symlink bash_completion/"flow-completion.bash" => "_flow"
end
test do
system "#{bin}/flow", "init", testpath
(testpath/"test.js").write <<~EOS
/* @flow */
var x: string = 123;
EOS
expected = /Found 1 error/
assert_match expected, shell_output("#{bin}/flow check #{testpath}", 2)
end
end
flow 0.118.0
Closes #49822.
Signed-off-by: Alexander Bayandin <673dbf9b1367181cd47bae83bf10b2ffe51be6ac@gmail.com>
class Flow < Formula
desc "Static type checker for JavaScript"
homepage "https://flowtype.org/"
url "https://github.com/facebook/flow/archive/v0.118.0.tar.gz"
sha256 "d9eaadc7c92e40f1eb9c74458c7e138fd32832c5cef9a5f30ebba22eaa8cc381"
head "https://github.com/facebook/flow.git"
bottle do
cellar :any_skip_relocation
sha256 "462c169b7c71d1dd2e7b77326e02a5afe179257f768604aec039b65189fdf28b" => :catalina
sha256 "2cd54458468890b8ba7999f12814732bd08d5ab174c12865f18dcac68fdee0c6" => :mojave
sha256 "bcd29c63faf777949fe9479cea6c3ebcaa7b8d1c2d472e887982aff4ce1e9b38" => :high_sierra
end
depends_on "ocaml" => :build
depends_on "opam" => :build
def install
system "make", "all-homebrew"
bin.install "bin/flow"
bash_completion.install "resources/shell/bash-completion" => "flow-completion.bash"
zsh_completion.install_symlink bash_completion/"flow-completion.bash" => "_flow"
end
test do
system "#{bin}/flow", "init", testpath
(testpath/"test.js").write <<~EOS
/* @flow */
var x: string = 123;
EOS
expected = /Found 1 error/
assert_match expected, shell_output("#{bin}/flow check #{testpath}", 2)
end
end
|
class Flow < Formula
desc "Static type checker for JavaScript"
homepage "https://flowtype.org/"
url "https://github.com/facebook/flow/archive/v0.176.2.tar.gz"
sha256 "385ab3bf00a22f319570dd1f9fd49cf7398404a9bfa3fc009ed3652b179695b2"
license "MIT"
head "https://github.com/facebook/flow.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "bf63ea66dce7e6b3dc7455fe656b03173d86f21486805b24f51fd920705e86f7"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "d3c5d584812518523f4cbd8eaef620e38043f2e4496630983cecba9de294ff2b"
sha256 cellar: :any_skip_relocation, monterey: "72b66202c4fef793878b041ddbc47b615e8e22cce40e7c17fde8514ea22e244e"
sha256 cellar: :any_skip_relocation, big_sur: "1a158682b57aedcd337c2ec5e049cfec8c9f24ab40968827f7ddaa62a76bfcb8"
sha256 cellar: :any_skip_relocation, catalina: "02bc4f9aaa74ca071e9d071691fee9a144179006d5ee8a4ba76de8b5c0abd6ac"
sha256 cellar: :any_skip_relocation, x86_64_linux: "9a6f066085d3c2400217f42b092a234f32f804fecde9d68a6f77e7657e8af5f7"
end
depends_on "ocaml" => :build
depends_on "opam" => :build
uses_from_macos "m4" => :build
uses_from_macos "rsync" => :build
uses_from_macos "unzip" => :build
def install
system "make", "all-homebrew"
bin.install "bin/flow"
bash_completion.install "resources/shell/bash-completion" => "flow-completion.bash"
zsh_completion.install_symlink bash_completion/"flow-completion.bash" => "_flow"
end
test do
system "#{bin}/flow", "init", testpath
(testpath/"test.js").write <<~EOS
/* @flow */
var x: string = 123;
EOS
expected = /Found 1 error/
assert_match expected, shell_output("#{bin}/flow check #{testpath}", 2)
end
end
flow 0.176.3
Closes #99856.
Signed-off-by: Michael Cho <ad37dc0e034c3938811c0096de6272abd124db31@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Flow < Formula
desc "Static type checker for JavaScript"
homepage "https://flowtype.org/"
url "https://github.com/facebook/flow/archive/v0.176.3.tar.gz"
sha256 "a44d5d83dd1cf199350e1a95c70ad92a5e78be9d813e8456fb20af9e34d99b58"
license "MIT"
head "https://github.com/facebook/flow.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "bf63ea66dce7e6b3dc7455fe656b03173d86f21486805b24f51fd920705e86f7"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "d3c5d584812518523f4cbd8eaef620e38043f2e4496630983cecba9de294ff2b"
sha256 cellar: :any_skip_relocation, monterey: "72b66202c4fef793878b041ddbc47b615e8e22cce40e7c17fde8514ea22e244e"
sha256 cellar: :any_skip_relocation, big_sur: "1a158682b57aedcd337c2ec5e049cfec8c9f24ab40968827f7ddaa62a76bfcb8"
sha256 cellar: :any_skip_relocation, catalina: "02bc4f9aaa74ca071e9d071691fee9a144179006d5ee8a4ba76de8b5c0abd6ac"
sha256 cellar: :any_skip_relocation, x86_64_linux: "9a6f066085d3c2400217f42b092a234f32f804fecde9d68a6f77e7657e8af5f7"
end
depends_on "ocaml" => :build
depends_on "opam" => :build
uses_from_macos "m4" => :build
uses_from_macos "rsync" => :build
uses_from_macos "unzip" => :build
def install
system "make", "all-homebrew"
bin.install "bin/flow"
bash_completion.install "resources/shell/bash-completion" => "flow-completion.bash"
zsh_completion.install_symlink bash_completion/"flow-completion.bash" => "_flow"
end
test do
system "#{bin}/flow", "init", testpath
(testpath/"test.js").write <<~EOS
/* @flow */
var x: string = 123;
EOS
expected = /Found 1 error/
assert_match expected, shell_output("#{bin}/flow check #{testpath}", 2)
end
end
|
class Frps < Formula
desc "Server app of fast reverse proxy to expose a local server to the internet"
homepage "https://github.com/fatedier/frp"
url "https://github.com/fatedier/frp.git",
tag: "v0.36.0",
revision: "76a1efccd96755675bd15cc747820b154b0baccb"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "bf36d3f6f1e1c871f2df1f8abbc5d5ef0f7e8247752fbe6ff377fd5918dd2272"
sha256 cellar: :any_skip_relocation, big_sur: "dae1eded5cefc7233d05a52703bfc5ef9c70a2023980960a1514665bfdfd7e10"
sha256 cellar: :any_skip_relocation, catalina: "59cf1fb891b1126a981bbf01ee2be2a9e960cc588503d6df5c1d1420b3ac3614"
sha256 cellar: :any_skip_relocation, mojave: "22451bb38ca33a41d6a8afa493faa6b9f8a74c50685a9d0c661f662621263401"
end
depends_on "go" => :build
def install
(buildpath/"bin").mkpath
(etc/"frp").mkpath
system "make", "frps"
bin.install "bin/frps"
etc.install "conf/frps.ini" => "frp/frps.ini"
etc.install "conf/frps_full.ini" => "frp/frps_full.ini"
end
plist_options manual: "frps -c #{HOMEBREW_PREFIX}/etc/frp/frps.ini"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>KeepAlive</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/frps</string>
<string>-c</string>
<string>#{etc}/frp/frps.ini</string>
</array>
<key>StandardErrorPath</key>
<string>#{var}/log/frps.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/frps.log</string>
</dict>
</plist>
EOS
end
test do
assert_match version.to_s, shell_output("#{bin}/frps -v")
assert_match "Flags", shell_output("#{bin}/frps --help")
read, write = IO.pipe
fork do
exec bin/"frps", out: write
end
sleep 3
output = read.gets
assert_match "frps uses command line arguments for config", output
end
end
frps: update 0.36.0 bottle.
class Frps < Formula
desc "Server app of fast reverse proxy to expose a local server to the internet"
homepage "https://github.com/fatedier/frp"
url "https://github.com/fatedier/frp.git",
tag: "v0.36.0",
revision: "76a1efccd96755675bd15cc747820b154b0baccb"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "03fdb08a5bacecb3f2243d51c0e709d614d29efa789cb9f8f7a8572f1c01d895"
sha256 cellar: :any_skip_relocation, big_sur: "62fd2df721c2f0707968160afe2720ccf7dbc90a354614c24f9e07020e28e23f"
sha256 cellar: :any_skip_relocation, catalina: "fcc5d611fb59caaf3f98de2f07caf3dba0db09af3bf7d9189a6d8b5fad4760a5"
sha256 cellar: :any_skip_relocation, mojave: "6eac84ff2383fc2af5c46deb708b95876b6ed9f7607c4d90c9a6c13e87932ad5"
end
depends_on "go" => :build
def install
(buildpath/"bin").mkpath
(etc/"frp").mkpath
system "make", "frps"
bin.install "bin/frps"
etc.install "conf/frps.ini" => "frp/frps.ini"
etc.install "conf/frps_full.ini" => "frp/frps_full.ini"
end
plist_options manual: "frps -c #{HOMEBREW_PREFIX}/etc/frp/frps.ini"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>KeepAlive</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/frps</string>
<string>-c</string>
<string>#{etc}/frp/frps.ini</string>
</array>
<key>StandardErrorPath</key>
<string>#{var}/log/frps.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/frps.log</string>
</dict>
</plist>
EOS
end
test do
assert_match version.to_s, shell_output("#{bin}/frps -v")
assert_match "Flags", shell_output("#{bin}/frps --help")
read, write = IO.pipe
fork do
exec bin/"frps", out: write
end
sleep 3
output = read.gets
assert_match "frps uses command line arguments for config", output
end
end
|
class Fwup < Formula
desc "Configurable embedded Linux firmware update creator and runner"
homepage "https://github.com/fhunleth/fwup"
url "https://github.com/fhunleth/fwup/releases/download/v1.2.0/fwup-1.2.0.tar.gz"
sha256 "74cc3636bc3923c676c6e95da65c611d47914452eebfe6110a6850be99855acd"
bottle do
cellar :any
sha256 "f86e4b6a69b7e5e6608fb2b0951ba00b3c7c6e6c98dc2b4cac3caba4f3f65735" => :high_sierra
sha256 "a8fd7141c7b34959f9c2cfb05ccaaf0b1219bd2af5949cdf14bb7a2ce74e84e5" => :sierra
sha256 "97e2c68121acf424af31355d19b96a6536dd05b4c92cc6d8459a045d165461c1" => :el_capitan
end
depends_on "pkg-config" => :build
depends_on "confuse"
depends_on "libarchive"
depends_on "libsodium"
def install
system "./configure", "--prefix=#{prefix}", "--disable-dependency-tracking"
system "make", "install"
end
test do
system bin/"fwup", "-g"
assert_predicate testpath/"fwup-key.priv", :exist?, "Failed to create fwup-key.priv!"
assert_predicate testpath/"fwup-key.pub", :exist?, "Failed to create fwup-key.pub!"
end
end
fwup: update 1.2.0 bottle for Linuxbrew.
class Fwup < Formula
desc "Configurable embedded Linux firmware update creator and runner"
homepage "https://github.com/fhunleth/fwup"
url "https://github.com/fhunleth/fwup/releases/download/v1.2.0/fwup-1.2.0.tar.gz"
sha256 "74cc3636bc3923c676c6e95da65c611d47914452eebfe6110a6850be99855acd"
bottle do
cellar :any
sha256 "f86e4b6a69b7e5e6608fb2b0951ba00b3c7c6e6c98dc2b4cac3caba4f3f65735" => :high_sierra
sha256 "a8fd7141c7b34959f9c2cfb05ccaaf0b1219bd2af5949cdf14bb7a2ce74e84e5" => :sierra
sha256 "97e2c68121acf424af31355d19b96a6536dd05b4c92cc6d8459a045d165461c1" => :el_capitan
sha256 "672c2b9feed50b702a706a5f00e00476c8a0cf31c37db8d4fa92922483bf959c" => :x86_64_linux
end
depends_on "pkg-config" => :build
depends_on "confuse"
depends_on "libarchive"
depends_on "libsodium"
def install
system "./configure", "--prefix=#{prefix}", "--disable-dependency-tracking"
system "make", "install"
end
test do
system bin/"fwup", "-g"
assert_predicate testpath/"fwup-key.priv", :exist?, "Failed to create fwup-key.priv!"
assert_predicate testpath/"fwup-key.pub", :exist?, "Failed to create fwup-key.pub!"
end
end
|
class Gdal < Formula
desc "Geospatial Data Abstraction Library"
homepage "https://www.gdal.org/"
url "https://download.osgeo.org/gdal/3.3.0/gdal-3.3.0.tar.xz"
sha256 "190c8f4b56afc767f43836b2a5cd53cc52ee7fdc25eb78c6079c5a244e28efa7"
license "MIT"
revision 2
livecheck do
url "https://download.osgeo.org/gdal/CURRENT/"
regex(/href=.*?gdal[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_big_sur: "a4ff6604a70966105427573703e555f45bbee90fafee4b31cd4b474378e5acca"
sha256 big_sur: "dcade6589d3ec0a13543ae9d58cdd6f4e28cf9b7fb90371fae21b8127c8d6d2b"
sha256 catalina: "1e52bee36f36cec0379aea8fc7fa27a5b99eadf026e027708cd263a1417575e6"
sha256 mojave: "ced85858cbf314c4e8c0e67620447caf5d188861205863fc26a94b6d83c3fa3f"
end
head do
url "https://github.com/OSGeo/gdal.git"
depends_on "doxygen" => :build
end
depends_on "pkg-config" => :build
depends_on "cfitsio"
depends_on "epsilon"
depends_on "expat"
depends_on "freexl"
depends_on "geos"
depends_on "giflib"
depends_on "hdf5"
depends_on "jpeg"
depends_on "json-c"
depends_on "libdap"
depends_on "libgeotiff"
depends_on "libpng"
depends_on "libpq"
depends_on "libspatialite"
depends_on "libtiff"
depends_on "libxml2"
depends_on "netcdf"
depends_on "numpy"
depends_on "openjpeg"
depends_on "pcre"
depends_on "poppler"
depends_on "proj@7"
depends_on "python@3.9"
depends_on "sqlite" # To ensure compatibility with SpatiaLite
depends_on "unixodbc" # macOS version is not complete enough
depends_on "webp"
depends_on "xerces-c"
depends_on "xz" # get liblzma compression algorithm library from XZutils
depends_on "zstd"
uses_from_macos "curl"
on_linux do
depends_on "bash-completion"
end
conflicts_with "cpl", because: "both install cpl_error.h"
def install
args = [
# Base configuration
"--prefix=#{prefix}",
"--mandir=#{man}",
"--disable-debug",
"--with-libtool",
"--with-local=#{prefix}",
"--with-opencl",
"--with-threads",
# GDAL native backends
"--with-pam",
"--with-pcidsk=internal",
"--with-pcraster=internal",
# Homebrew backends
"--with-expat=#{Formula["expat"].prefix}",
"--with-freexl=#{Formula["freexl"].opt_prefix}",
"--with-geos=#{Formula["geos"].opt_prefix}/bin/geos-config",
"--with-geotiff=#{Formula["libgeotiff"].opt_prefix}",
"--with-gif=#{Formula["giflib"].opt_prefix}",
"--with-jpeg=#{Formula["jpeg"].opt_prefix}",
"--with-libjson-c=#{Formula["json-c"].opt_prefix}",
"--with-libtiff=#{Formula["libtiff"].opt_prefix}",
"--with-pg=yes",
"--with-png=#{Formula["libpng"].opt_prefix}",
"--with-spatialite=#{Formula["libspatialite"].opt_prefix}",
"--with-sqlite3=#{Formula["sqlite"].opt_prefix}",
"--with-proj=#{Formula["proj@7"].opt_prefix}",
"--with-zstd=#{Formula["zstd"].opt_prefix}",
"--with-liblzma=yes",
"--with-cfitsio=#{Formula["cfitsio"].opt_prefix}",
"--with-hdf5=#{Formula["hdf5"].opt_prefix}",
"--with-netcdf=#{Formula["netcdf"].opt_prefix}",
"--with-openjpeg",
"--with-xerces=#{Formula["xerces-c"].opt_prefix}",
"--with-odbc=#{Formula["unixodbc"].opt_prefix}",
"--with-dods-root=#{Formula["libdap"].opt_prefix}",
"--with-epsilon=#{Formula["epsilon"].opt_prefix}",
"--with-webp=#{Formula["webp"].opt_prefix}",
"--with-poppler=#{Formula["poppler"].opt_prefix}",
# Explicitly disable some features
"--with-armadillo=no",
"--with-qhull=no",
"--without-exr",
"--without-grass",
"--without-jasper",
"--without-jpeg12",
"--without-libgrass",
"--without-mysql",
"--without-perl",
"--without-python",
# Unsupported backends are either proprietary or have no compatible version
# in Homebrew. Podofo is disabled because Poppler provides the same
# functionality and then some.
"--without-ecw",
"--without-fgdb",
"--without-fme",
"--without-gta",
"--without-hdf4",
"--without-idb",
"--without-ingres",
"--without-jp2mrsid",
"--without-kakadu",
"--without-mrsid",
"--without-mrsid_lidar",
"--without-msg",
"--without-oci",
"--without-ogdi",
"--without-podofo",
"--without-rasdaman",
"--without-sde",
"--without-sosi",
]
on_macos do
args << "--with-curl=/usr/bin/curl-config"
end
on_linux do
args << "--with-curl=#{Formula["curl"].opt_bin}/curl-config"
end
system "./configure", *args
system "make"
system "make", "install"
# Build Python bindings
cd "swig/python" do
system Formula["python@3.9"].opt_bin/"python3", *Language::Python.setup_install_args(prefix)
end
bin.install Dir["swig/python/scripts/*.py"]
system "make", "man" if build.head?
# Force man installation dir: https://trac.osgeo.org/gdal/ticket/5092
system "make", "install-man", "INST_MAN=#{man}"
# Clean up any stray doxygen files
Dir.glob("#{bin}/*.dox") { |p| rm p }
end
test do
# basic tests to see if third-party dylibs are loading OK
system "#{bin}/gdalinfo", "--formats"
system "#{bin}/ogrinfo", "--formats"
# Changed Python package name from "gdal" to "osgeo.gdal" in 3.2.0.
system Formula["python@3.9"].opt_bin/"python3", "-c", "import osgeo.gdal"
end
end
gdal: update 3.3.0_2 bottle.
class Gdal < Formula
desc "Geospatial Data Abstraction Library"
homepage "https://www.gdal.org/"
url "https://download.osgeo.org/gdal/3.3.0/gdal-3.3.0.tar.xz"
sha256 "190c8f4b56afc767f43836b2a5cd53cc52ee7fdc25eb78c6079c5a244e28efa7"
license "MIT"
revision 2
livecheck do
url "https://download.osgeo.org/gdal/CURRENT/"
regex(/href=.*?gdal[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_big_sur: "959af890f4c921228ca48fb1c0632a4cfeb5e96d3e330ca5c00ceca2acf3ab3b"
sha256 big_sur: "bb339b16d188b360ce3eef5a5aacfb3d1ca3b84b802e7d0de6b42b4a95a3c90c"
sha256 catalina: "3f578946fffe342fa5523968e02a14458b2b977f46d5980a801124c032106957"
sha256 mojave: "0458c2646d661b28e956bbb5286d6ecb56da6d33805aff8520094c8c05dd8cab"
end
head do
url "https://github.com/OSGeo/gdal.git"
depends_on "doxygen" => :build
end
depends_on "pkg-config" => :build
depends_on "cfitsio"
depends_on "epsilon"
depends_on "expat"
depends_on "freexl"
depends_on "geos"
depends_on "giflib"
depends_on "hdf5"
depends_on "jpeg"
depends_on "json-c"
depends_on "libdap"
depends_on "libgeotiff"
depends_on "libpng"
depends_on "libpq"
depends_on "libspatialite"
depends_on "libtiff"
depends_on "libxml2"
depends_on "netcdf"
depends_on "numpy"
depends_on "openjpeg"
depends_on "pcre"
depends_on "poppler"
depends_on "proj@7"
depends_on "python@3.9"
depends_on "sqlite" # To ensure compatibility with SpatiaLite
depends_on "unixodbc" # macOS version is not complete enough
depends_on "webp"
depends_on "xerces-c"
depends_on "xz" # get liblzma compression algorithm library from XZutils
depends_on "zstd"
uses_from_macos "curl"
on_linux do
depends_on "bash-completion"
end
conflicts_with "cpl", because: "both install cpl_error.h"
def install
args = [
# Base configuration
"--prefix=#{prefix}",
"--mandir=#{man}",
"--disable-debug",
"--with-libtool",
"--with-local=#{prefix}",
"--with-opencl",
"--with-threads",
# GDAL native backends
"--with-pam",
"--with-pcidsk=internal",
"--with-pcraster=internal",
# Homebrew backends
"--with-expat=#{Formula["expat"].prefix}",
"--with-freexl=#{Formula["freexl"].opt_prefix}",
"--with-geos=#{Formula["geos"].opt_prefix}/bin/geos-config",
"--with-geotiff=#{Formula["libgeotiff"].opt_prefix}",
"--with-gif=#{Formula["giflib"].opt_prefix}",
"--with-jpeg=#{Formula["jpeg"].opt_prefix}",
"--with-libjson-c=#{Formula["json-c"].opt_prefix}",
"--with-libtiff=#{Formula["libtiff"].opt_prefix}",
"--with-pg=yes",
"--with-png=#{Formula["libpng"].opt_prefix}",
"--with-spatialite=#{Formula["libspatialite"].opt_prefix}",
"--with-sqlite3=#{Formula["sqlite"].opt_prefix}",
"--with-proj=#{Formula["proj@7"].opt_prefix}",
"--with-zstd=#{Formula["zstd"].opt_prefix}",
"--with-liblzma=yes",
"--with-cfitsio=#{Formula["cfitsio"].opt_prefix}",
"--with-hdf5=#{Formula["hdf5"].opt_prefix}",
"--with-netcdf=#{Formula["netcdf"].opt_prefix}",
"--with-openjpeg",
"--with-xerces=#{Formula["xerces-c"].opt_prefix}",
"--with-odbc=#{Formula["unixodbc"].opt_prefix}",
"--with-dods-root=#{Formula["libdap"].opt_prefix}",
"--with-epsilon=#{Formula["epsilon"].opt_prefix}",
"--with-webp=#{Formula["webp"].opt_prefix}",
"--with-poppler=#{Formula["poppler"].opt_prefix}",
# Explicitly disable some features
"--with-armadillo=no",
"--with-qhull=no",
"--without-exr",
"--without-grass",
"--without-jasper",
"--without-jpeg12",
"--without-libgrass",
"--without-mysql",
"--without-perl",
"--without-python",
# Unsupported backends are either proprietary or have no compatible version
# in Homebrew. Podofo is disabled because Poppler provides the same
# functionality and then some.
"--without-ecw",
"--without-fgdb",
"--without-fme",
"--without-gta",
"--without-hdf4",
"--without-idb",
"--without-ingres",
"--without-jp2mrsid",
"--without-kakadu",
"--without-mrsid",
"--without-mrsid_lidar",
"--without-msg",
"--without-oci",
"--without-ogdi",
"--without-podofo",
"--without-rasdaman",
"--without-sde",
"--without-sosi",
]
on_macos do
args << "--with-curl=/usr/bin/curl-config"
end
on_linux do
args << "--with-curl=#{Formula["curl"].opt_bin}/curl-config"
end
system "./configure", *args
system "make"
system "make", "install"
# Build Python bindings
cd "swig/python" do
system Formula["python@3.9"].opt_bin/"python3", *Language::Python.setup_install_args(prefix)
end
bin.install Dir["swig/python/scripts/*.py"]
system "make", "man" if build.head?
# Force man installation dir: https://trac.osgeo.org/gdal/ticket/5092
system "make", "install-man", "INST_MAN=#{man}"
# Clean up any stray doxygen files
Dir.glob("#{bin}/*.dox") { |p| rm p }
end
test do
# basic tests to see if third-party dylibs are loading OK
system "#{bin}/gdalinfo", "--formats"
system "#{bin}/ogrinfo", "--formats"
# Changed Python package name from "gdal" to "osgeo.gdal" in 3.2.0.
system Formula["python@3.9"].opt_bin/"python3", "-c", "import osgeo.gdal"
end
end
|
class Gdal < Formula
desc "Geospatial Data Abstraction Library"
homepage "http://www.gdal.org/"
url "https://download.osgeo.org/gdal/2.2.4/gdal-2.2.4.tar.xz"
sha256 "441eb1d1acb35238ca43a1a0a649493fc91fdcbab231d0747e9d462eea192278"
bottle do
sha256 "e12a190d34c9b0e93bdad0b0511b66b4ea30d88a1eb421139a1692c5319a3568" => :high_sierra
sha256 "e5b261299699570aacc75f5d97a85c9e6ff834f46d0561d63557c5efdedd6196" => :sierra
sha256 "1f5ce5618a147582fdb21c786def1b14ad170c561cacf504612b62f30a50a952" => :el_capitan
sha256 "a8001482570fdc2067317fdf30a8534880808f560a599bb37e20c5af8235b521" => :x86_64_linux
end
head do
url "https://svn.osgeo.org/gdal/trunk/gdal"
depends_on "doxygen" => :build
end
option "with-complete", "Use additional Homebrew libraries to provide more drivers."
option "with-unsupported", "Allow configure to drag in any library it can find. Invoke this at your own risk."
deprecated_option "enable-unsupported" => "with-unsupported"
deprecated_option "complete" => "with-complete"
depends_on "freexl"
depends_on "geos"
depends_on "giflib"
depends_on "jpeg"
depends_on "json-c"
depends_on "libgeotiff"
depends_on "libpng"
depends_on "libpq"
depends_on "libspatialite"
depends_on "libtiff"
depends_on "libxml2"
depends_on "pcre"
depends_on "proj"
depends_on "sqlite" # To ensure compatibility with SpatiaLite
depends_on "mysql" => :optional
if build.with? "complete"
depends_on "cfitsio"
depends_on "epsilon"
depends_on "hdf5"
depends_on "jasper"
depends_on "json-c"
depends_on "libdap"
depends_on "libxml2"
depends_on "netcdf"
depends_on "podofo"
depends_on "poppler"
depends_on "unixodbc" # macOS version is not complete enough
depends_on "webp"
depends_on "xerces-c"
depends_on "xz" # get liblzma compression algorithm library from XZutils
end
def install
# Reduce memory usage below 4 GB for Circle CI.
ENV["MAKEFLAGS"] = "-j1" if ENV["CIRCLECI"]
# Fixes: error: inlining failed in call to always_inline __m128i _mm_shuffle_epi8
ENV.append_to_cflags "-msse4.1" if ENV["CIRCLECI"]
args = [
# Base configuration
"--prefix=#{prefix}",
"--mandir=#{man}",
"--disable-debug",
"--with-libtool",
"--with-local=#{prefix}",
"--with-threads",
# GDAL native backends
"--with-bsb",
"--with-grib",
"--with-pam",
"--with-pcidsk=internal",
"--with-pcraster=internal",
# Homebrew backends
"--with-freexl=#{Formula["freexl"].opt_prefix}",
"--with-geos=#{Formula["geos"].opt_prefix}/bin/geos-config",
"--with-geotiff=#{Formula["libgeotiff"].opt_prefix}",
"--with-gif=#{Formula["giflib"].opt_prefix}",
"--with-jpeg=#{Formula["jpeg"].opt_prefix}",
"--with-libjson-c=#{Formula["json-c"].opt_prefix}",
"--with-libtiff=#{Formula["libtiff"].opt_prefix}",
"--with-pg=#{Formula["libpq"].opt_prefix}/bin/pg_config",
"--with-png=#{Formula["libpng"].opt_prefix}",
"--with-spatialite=#{Formula["libspatialite"].opt_prefix}",
"--with-sqlite3=#{Formula["sqlite"].opt_prefix}",
"--with-static-proj4=#{Formula["proj"].opt_prefix}",
# Explicitly disable some features
"--without-grass",
"--without-jpeg12",
"--without-libgrass",
"--without-perl",
"--without-php",
"--without-python",
"--without-ruby",
"--with-armadillo=no",
]
if OS.mac?
args << "--with-opencl"
args << "--with-curl=/usr/bin/curl-config"
else
args << "--with-curl=#{Formula["curl"].opt_bin}/curl-config"
args << "--with-libz=#{Formula["zlib"].opt_prefix}"
args << "--with-expat=#{Formula["expat"].opt_prefix}"
end
if build.with?("mysql")
args << "--with-mysql=#{Formula["mysql"].opt_prefix}/bin/mysql_config"
else
args << "--without-mysql"
end
# Optional Homebrew packages supporting additional formats
supported_backends = %w[liblzma cfitsio hdf5 netcdf jasper xerces odbc
dods-root epsilon webp podofo]
if build.with? "complete"
supported_backends.delete "liblzma"
args << "--with-liblzma=yes"
args.concat supported_backends.map { |b| "--with-" + b + "=" + HOMEBREW_PREFIX }
elsif build.without? "unsupported"
args.concat supported_backends.map { |b| "--without-" + b }
end
# Unsupported backends are either proprietary or have no compatible version
# in Homebrew. Podofo is disabled because Poppler provides the same
# functionality and then some.
unsupported_backends = %w[gta ogdi fme hdf4 openjpeg fgdb ecw kakadu mrsid
jp2mrsid mrsid_lidar msg oci ingres dwgdirect
idb sde podofo rasdaman sosi]
if build.without? "unsupported"
args.concat unsupported_backends.map { |b| "--without-" + b }
end
system "./configure", *args
system "make"
system "make", "install"
system "make", "man" if build.head?
# Force man installation dir: https://trac.osgeo.org/gdal/ticket/5092
system "make", "install-man", "INST_MAN=#{man}"
# Clean up any stray doxygen files
Dir.glob("#{bin}/*.dox") { |p| rm p }
end
test do
# basic tests to see if third-party dylibs are loading OK
system "#{bin}/gdalinfo", "--formats"
system "#{bin}/ogrinfo", "--formats"
end
end
gdal: remove trailing whitespace
class Gdal < Formula
desc "Geospatial Data Abstraction Library"
homepage "http://www.gdal.org/"
url "https://download.osgeo.org/gdal/2.2.4/gdal-2.2.4.tar.xz"
sha256 "441eb1d1acb35238ca43a1a0a649493fc91fdcbab231d0747e9d462eea192278"
bottle do
sha256 "e12a190d34c9b0e93bdad0b0511b66b4ea30d88a1eb421139a1692c5319a3568" => :high_sierra
sha256 "e5b261299699570aacc75f5d97a85c9e6ff834f46d0561d63557c5efdedd6196" => :sierra
sha256 "1f5ce5618a147582fdb21c786def1b14ad170c561cacf504612b62f30a50a952" => :el_capitan
sha256 "a8001482570fdc2067317fdf30a8534880808f560a599bb37e20c5af8235b521" => :x86_64_linux
end
head do
url "https://svn.osgeo.org/gdal/trunk/gdal"
depends_on "doxygen" => :build
end
option "with-complete", "Use additional Homebrew libraries to provide more drivers."
option "with-unsupported", "Allow configure to drag in any library it can find. Invoke this at your own risk."
deprecated_option "enable-unsupported" => "with-unsupported"
deprecated_option "complete" => "with-complete"
depends_on "freexl"
depends_on "geos"
depends_on "giflib"
depends_on "jpeg"
depends_on "json-c"
depends_on "libgeotiff"
depends_on "libpng"
depends_on "libpq"
depends_on "libspatialite"
depends_on "libtiff"
depends_on "libxml2"
depends_on "pcre"
depends_on "proj"
depends_on "sqlite" # To ensure compatibility with SpatiaLite
depends_on "mysql" => :optional
if build.with? "complete"
depends_on "cfitsio"
depends_on "epsilon"
depends_on "hdf5"
depends_on "jasper"
depends_on "json-c"
depends_on "libdap"
depends_on "libxml2"
depends_on "netcdf"
depends_on "podofo"
depends_on "poppler"
depends_on "unixodbc" # macOS version is not complete enough
depends_on "webp"
depends_on "xerces-c"
depends_on "xz" # get liblzma compression algorithm library from XZutils
end
def install
# Reduce memory usage below 4 GB for Circle CI.
ENV["MAKEFLAGS"] = "-j1" if ENV["CIRCLECI"]
# Fixes: error: inlining failed in call to always_inline __m128i _mm_shuffle_epi8
ENV.append_to_cflags "-msse4.1" if ENV["CIRCLECI"]
args = [
# Base configuration
"--prefix=#{prefix}",
"--mandir=#{man}",
"--disable-debug",
"--with-libtool",
"--with-local=#{prefix}",
"--with-threads",
# GDAL native backends
"--with-bsb",
"--with-grib",
"--with-pam",
"--with-pcidsk=internal",
"--with-pcraster=internal",
# Homebrew backends
"--with-freexl=#{Formula["freexl"].opt_prefix}",
"--with-geos=#{Formula["geos"].opt_prefix}/bin/geos-config",
"--with-geotiff=#{Formula["libgeotiff"].opt_prefix}",
"--with-gif=#{Formula["giflib"].opt_prefix}",
"--with-jpeg=#{Formula["jpeg"].opt_prefix}",
"--with-libjson-c=#{Formula["json-c"].opt_prefix}",
"--with-libtiff=#{Formula["libtiff"].opt_prefix}",
"--with-pg=#{Formula["libpq"].opt_prefix}/bin/pg_config",
"--with-png=#{Formula["libpng"].opt_prefix}",
"--with-spatialite=#{Formula["libspatialite"].opt_prefix}",
"--with-sqlite3=#{Formula["sqlite"].opt_prefix}",
"--with-static-proj4=#{Formula["proj"].opt_prefix}",
# Explicitly disable some features
"--without-grass",
"--without-jpeg12",
"--without-libgrass",
"--without-perl",
"--without-php",
"--without-python",
"--without-ruby",
"--with-armadillo=no",
]
if OS.mac?
args << "--with-opencl"
args << "--with-curl=/usr/bin/curl-config"
else
args << "--with-curl=#{Formula["curl"].opt_bin}/curl-config"
args << "--with-libz=#{Formula["zlib"].opt_prefix}"
args << "--with-expat=#{Formula["expat"].opt_prefix}"
end
if build.with?("mysql")
args << "--with-mysql=#{Formula["mysql"].opt_prefix}/bin/mysql_config"
else
args << "--without-mysql"
end
# Optional Homebrew packages supporting additional formats
supported_backends = %w[liblzma cfitsio hdf5 netcdf jasper xerces odbc
dods-root epsilon webp podofo]
if build.with? "complete"
supported_backends.delete "liblzma"
args << "--with-liblzma=yes"
args.concat supported_backends.map { |b| "--with-" + b + "=" + HOMEBREW_PREFIX }
elsif build.without? "unsupported"
args.concat supported_backends.map { |b| "--without-" + b }
end
# Unsupported backends are either proprietary or have no compatible version
# in Homebrew. Podofo is disabled because Poppler provides the same
# functionality and then some.
unsupported_backends = %w[gta ogdi fme hdf4 openjpeg fgdb ecw kakadu mrsid
jp2mrsid mrsid_lidar msg oci ingres dwgdirect
idb sde podofo rasdaman sosi]
if build.without? "unsupported"
args.concat unsupported_backends.map { |b| "--without-" + b }
end
system "./configure", *args
system "make"
system "make", "install"
system "make", "man" if build.head?
# Force man installation dir: https://trac.osgeo.org/gdal/ticket/5092
system "make", "install-man", "INST_MAN=#{man}"
# Clean up any stray doxygen files
Dir.glob("#{bin}/*.dox") { |p| rm p }
end
test do
# basic tests to see if third-party dylibs are loading OK
system "#{bin}/gdalinfo", "--formats"
system "#{bin}/ogrinfo", "--formats"
end
end
|
require 'formula'
def complete?
ARGV.include? "--complete"
end
def postgres?
ARGV.include? "--with-postgres"
end
def mysql?
ARGV.include? "--with-mysql"
end
def no_python?
ARGV.include? "--without-python"
end
def opencl?
ARGV.include? "--enable-opencl"
end
class Gdal < Formula
url 'http://download.osgeo.org/gdal/gdal-1.8.1.tar.gz'
head 'https://svn.osgeo.org/gdal/trunk/gdal', :using => :svn
homepage 'http://www.gdal.org/'
md5 'b32269893afc9dc9eced45e74e4c6bb4'
depends_on 'jpeg'
depends_on 'giflib'
depends_on 'proj'
depends_on 'geos'
if complete?
# Raster libraries
depends_on "netcdf" # Also brings in HDF5
depends_on "jasper" # May need a keg-only GeoJasPer library as this one is
# not geo-spatially enabled.
depends_on "cfitsio"
depends_on "epsilon"
def patches
# EPSILON v0.9.x slightly modified the naming of some struct members. A
# fix is in the GDAL trunk but was kept out of 1.8.1 due to concern for
# users of EPSILON v0.8.x. Homebrew installs 0.9.2+ so this concern is a
# moot point.
{:p1 => DATA}
end
# Vector libraries
depends_on "unixodbc" # OS X version is not complete enough
depends_on "libspatialite"
depends_on "xerces-c"
depends_on "poppler"
# Other libraries
depends_on "lzma" # Compression algorithmn library
end
depends_on "postgresql" if postgres?
depends_on "mysql" if mysql?
def options
[
['--complete', 'Use additional Homebrew libraries to provide more drivers.'],
['--with-postgres', 'Specify PostgreSQL as a dependency.'],
['--with-mysql', 'Specify MySQL as a dependency.'],
['--without-python', 'Build without Python support (disables a lot of tools).'],
['--enable-opencl', 'Build with support for OpenCL.']
]
end
def get_configure_args
args = [
# Base configuration.
"--disable-debug",
"--with-local=#{prefix}",
"--with-threads",
"--with-libtool",
# GDAL native backends.
"--with-libtiff=internal", # For bigTIFF support
"--with-geotiff=internal",
"--with-pcraster=internal",
"--with-pcidsk=internal",
"--with-bsb",
"--with-grib",
"--with-pam",
# Backends supported by OS X.
"--with-libz=/usr",
"--with-png=/usr/X11",
"--with-expat=/usr",
"--with-sqlite3=/usr",
# Default Homebrew backends.
"--with-jpeg=#{HOMEBREW_PREFIX}",
"--with-jpeg12",
"--with-gif=#{HOMEBREW_PREFIX}",
"--with-curl=/usr/bin/curl-config",
# GRASS backend explicitly disabled. Creates a chicken-and-egg problem.
# Should be installed seperately after GRASS installation using the
# official GDAL GRASS plugin.
"--without-grass",
"--without-libgrass",
# OPeNDAP support also explicitly disabled for now---causes the
# configuration of other components such as Curl and Spatialite to fail
# for unknown reasons.
"--with-dods-root=no"
]
# Optional library support for additional formats.
if complete?
args.concat [
"--with-liblzma=yes",
"--with-netcdf=#{HOMEBREW_PREFIX}",
"--with-hdf5=#{HOMEBREW_PREFIX}",
"--with-jasper=#{HOMEBREW_PREFIX}",
"--with-cfitsio=#{HOMEBREW_PREFIX}",
"--with-epsilon=#{HOMEBREW_PREFIX}",
"--with-odbc=#{HOMEBREW_PREFIX}",
"--with-spatialite=#{HOMEBREW_PREFIX}",
"--with-xerces=#{HOMEBREW_PREFIX}",
"--with-poppler=#{HOMEBREW_PREFIX}"
]
else
args.concat [
"--without-cfitsio",
"--without-netcdf",
"--without-ogdi",
"--without-hdf4",
"--without-hdf5",
"--without-openjpeg",
"--without-jasper",
"--without-xerces",
"--without-epsilon",
"--without-spatialite",
"--without-libkml",
"--without-poppler",
# The following libraries are either proprietary or available under
# non-free licenses. Interested users will have to install such
# software manually.
"--without-msg",
"--without-mrsid",
"--without-jp2mrsid",
"--without-kakadu",
"--without-fme",
"--without-ecw",
"--without-dwgdirect"
]
end
# Database support.
args << "--without-pg" unless postgres?
args << "--without-mysql" unless mysql?
args << "--without-sde" # ESRI ArcSDE databases
args << "--without-ingres" # Ingres databases
args << "--without-oci" # Oracle databases
args << "--without-idb" # IBM Informix DataBlades
# Hombrew-provided databases.
args << "--with-pg=#{HOMEBREW_PREFIX}/bin/pg_config" if postgres?
args << "--with-mysql=#{HOMEBREW_PREFIX}/bin/mysql_config" if mysql?
args << "--without-python" # Installed using a seperate set of
# steps so that everything winds up
# in the prefix.
# Scripting APIs that have not been re-worked to respect Homebrew prefixes.
#
# Currently disabled as they install willy-nilly into locations outside of
# the Hombrew prefix. Enable if you feel like it, but uninstallation may be
# a manual affair.
#
# TODO: Fix installation of script bindings so they install into the
# Homebrew prefix.
args << "--without-perl"
args << "--without-php"
args << "--without-ruby"
# OpenCL support
args << "--with-opencl" if opencl?
return args
end
def install
system "./configure", "--prefix=#{prefix}", *get_configure_args
system "make"
system "make install"
unless no_python?
# If setuptools happens to be installed, setup.py will cowardly refuse to
# install to anywhere that is not on the PYTHONPATH.
#
# Really setuptools, we're all consenting adults here...
python_lib = lib + "python"
ENV.append 'PYTHONPATH', python_lib
# setuptools is also apparently incapable of making the directory it's
# self
python_lib.mkpath
# `python-config` may try to talk us into building bindings for more
# architectures than we really should.
if MacOS.prefer_64_bit?
ENV.append_to_cflags '-arch x86_64'
else
ENV.append_to_cflags '-arch i386'
end
Dir.chdir 'swig/python' do
system "python", "setup.py", "install_lib", "--install-dir=#{python_lib}"
bin.install Dir['scripts/*']
end
end
end
unless no_python?
def caveats
<<-EOS
This version of GDAL was built with Python support. In addition to providing
modules that makes GDAL functions available to Python scripts, the Python
binding provides ~18 additional command line tools. However, both the Python
bindings and the additional tools will be unusable unless the following
directory is added to the PYTHONPATH:
#{HOMEBREW_PREFIX}/lib/python
EOS
end
end
end
__END__
This patch updates GDAL to be compatible with EPSILON 0.9.x. Changes sourced from the GDAL trunk:
http://trac.osgeo.org/gdal/changeset/22363
Patch can be removed when GDAL hits 1.9.0.
diff --git a/frmts/epsilon/epsilondataset.cpp b/frmts/epsilon/epsilondataset.cpp
index b12928a..3f967cc 100644
--- a/frmts/epsilon/epsilondataset.cpp
+++ b/frmts/epsilon/epsilondataset.cpp
@@ -48,6 +48,13 @@ typedef struct
vsi_l_offset offset;
} BlockDesc;
+#ifdef I_WANT_COMPATIBILITY_WITH_EPSILON_0_8_1
+#define GET_FIELD(hdr, field) \
+ (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.field : hdr.tc.field
+#else
+#define GET_FIELD(hdr, field) \
+ (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.hdr_data.gs.field : hdr.hdr_data.tc.field
+#endif
/************************************************************************/
/* ==================================================================== */
@@ -237,8 +244,8 @@ CPLErr EpsilonRasterBand::IReadBlock( int nBlockXOff,
return CE_Failure;
}
- int w = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.w : hdr.tc.w;
- int h = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.h : hdr.tc.h;
+ int w = GET_FIELD(hdr, w);
+ int h = GET_FIELD(hdr, h);
int i;
if (poGDS->nBands == 1)
@@ -505,12 +512,12 @@ int EpsilonDataset::ScanBlocks(int* pnBands)
continue;
}
- int W = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.W : hdr.tc.W;
- int H = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.H : hdr.tc.H;
- int x = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.x : hdr.tc.x;
- int y = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.y : hdr.tc.y;
- int w = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.w : hdr.tc.w;
- int h = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.h : hdr.tc.h;
+ int W = GET_FIELD(hdr, W);
+ int H = GET_FIELD(hdr, H);
+ int x = GET_FIELD(hdr, x);
+ int y = GET_FIELD(hdr, y);
+ int w = GET_FIELD(hdr, w);
+ int h = GET_FIELD(hdr, h);
//CPLDebug("EPSILON", "W=%d,H=%d,x=%d,y=%d,w=%d,h=%d,offset=" CPL_FRMT_GUIB,
// W, H, x, y, w, h, nStartBlockFileOff);
gdal: stylistic changes
require 'formula'
def complete?
ARGV.include? "--complete"
end
def postgres?
ARGV.include? "--with-postgres"
end
def mysql?
ARGV.include? "--with-mysql"
end
def no_python?
ARGV.include? "--without-python"
end
def opencl?
ARGV.include? "--enable-opencl"
end
class Gdal < Formula
url 'http://download.osgeo.org/gdal/gdal-1.8.1.tar.gz'
homepage 'http://www.gdal.org/'
md5 'b32269893afc9dc9eced45e74e4c6bb4'
head 'https://svn.osgeo.org/gdal/trunk/gdal', :using => :svn
depends_on 'jpeg'
depends_on 'giflib'
depends_on 'proj'
depends_on 'geos'
depends_on "postgresql" if postgres?
depends_on "mysql" if mysql?
if complete?
# Raster libraries
depends_on "netcdf" # Also brings in HDF5
depends_on "jasper" # May need a keg-only GeoJasPer library as this one is
# not geo-spatially enabled.
depends_on "cfitsio"
depends_on "epsilon"
# Vector libraries
depends_on "unixodbc" # OS X version is not complete enough
depends_on "libspatialite"
depends_on "xerces-c"
depends_on "poppler"
# Other libraries
depends_on "lzma" # Compression algorithmn library
end
def patches
if complete?
# EPSILON v0.9.x slightly modified the naming of some struct members. A
# fix is in the GDAL trunk but was kept out of 1.8.1 due to concern for
# users of EPSILON v0.8.x. Homebrew installs 0.9.2+ so this concern is a
# moot point.
{:p1 => DATA}
end
end
def options
[
['--complete', 'Use additional Homebrew libraries to provide more drivers.'],
['--with-postgres', 'Specify PostgreSQL as a dependency.'],
['--with-mysql', 'Specify MySQL as a dependency.'],
['--without-python', 'Build without Python support (disables a lot of tools).'],
['--enable-opencl', 'Build with support for OpenCL.']
]
end
def get_configure_args
args = [
# Base configuration.
"--disable-debug",
"--with-local=#{prefix}",
"--with-threads",
"--with-libtool",
# GDAL native backends.
"--with-libtiff=internal", # For bigTIFF support
"--with-geotiff=internal",
"--with-pcraster=internal",
"--with-pcidsk=internal",
"--with-bsb",
"--with-grib",
"--with-pam",
# Backends supported by OS X.
"--with-libz=/usr",
"--with-png=/usr/X11",
"--with-expat=/usr",
"--with-sqlite3=/usr",
# Default Homebrew backends.
"--with-jpeg=#{HOMEBREW_PREFIX}",
"--with-jpeg12",
"--with-gif=#{HOMEBREW_PREFIX}",
"--with-curl=/usr/bin/curl-config",
# GRASS backend explicitly disabled. Creates a chicken-and-egg problem.
# Should be installed seperately after GRASS installation using the
# official GDAL GRASS plugin.
"--without-grass",
"--without-libgrass",
# OPeNDAP support also explicitly disabled for now---causes the
# configuration of other components such as Curl and Spatialite to fail
# for unknown reasons.
"--with-dods-root=no"
]
# Optional library support for additional formats.
if complete?
args.concat [
"--with-liblzma=yes",
"--with-netcdf=#{HOMEBREW_PREFIX}",
"--with-hdf5=#{HOMEBREW_PREFIX}",
"--with-jasper=#{HOMEBREW_PREFIX}",
"--with-cfitsio=#{HOMEBREW_PREFIX}",
"--with-epsilon=#{HOMEBREW_PREFIX}",
"--with-odbc=#{HOMEBREW_PREFIX}",
"--with-spatialite=#{HOMEBREW_PREFIX}",
"--with-xerces=#{HOMEBREW_PREFIX}",
"--with-poppler=#{HOMEBREW_PREFIX}"
]
else
args.concat [
"--without-cfitsio",
"--without-netcdf",
"--without-ogdi",
"--without-hdf4",
"--without-hdf5",
"--without-openjpeg",
"--without-jasper",
"--without-xerces",
"--without-epsilon",
"--without-spatialite",
"--without-libkml",
"--without-poppler",
# The following libraries are either proprietary or available under
# non-free licenses. Interested users will have to install such
# software manually.
"--without-msg",
"--without-mrsid",
"--without-jp2mrsid",
"--without-kakadu",
"--without-fme",
"--without-ecw",
"--without-dwgdirect"
]
end
# Database support.
args << "--without-pg" unless postgres?
args << "--without-mysql" unless mysql?
args << "--without-sde" # ESRI ArcSDE databases
args << "--without-ingres" # Ingres databases
args << "--without-oci" # Oracle databases
args << "--without-idb" # IBM Informix DataBlades
# Hombrew-provided databases.
args << "--with-pg=#{HOMEBREW_PREFIX}/bin/pg_config" if postgres?
args << "--with-mysql=#{HOMEBREW_PREFIX}/bin/mysql_config" if mysql?
args << "--without-python" # Installed using a seperate set of
# steps so that everything winds up
# in the prefix.
# Scripting APIs that have not been re-worked to respect Homebrew prefixes.
#
# Currently disabled as they install willy-nilly into locations outside of
# the Hombrew prefix. Enable if you feel like it, but uninstallation may be
# a manual affair.
#
# TODO: Fix installation of script bindings so they install into the
# Homebrew prefix.
args << "--without-perl"
args << "--without-php"
args << "--without-ruby"
# OpenCL support
args << "--with-opencl" if opencl?
return args
end
def install
system "./configure", "--prefix=#{prefix}", *get_configure_args
system "make"
system "make install"
unless no_python?
# If setuptools happens to be installed, setup.py will cowardly refuse to
# install to anywhere that is not on the PYTHONPATH.
#
# Really setuptools, we're all consenting adults here...
python_lib = lib + "python"
ENV.append 'PYTHONPATH', python_lib
# setuptools is also apparently incapable of making the directory it's
# self
python_lib.mkpath
# `python-config` may try to talk us into building bindings for more
# architectures than we really should.
if MacOS.prefer_64_bit?
ENV.append_to_cflags '-arch x86_64'
else
ENV.append_to_cflags '-arch i386'
end
Dir.chdir 'swig/python' do
system "python", "setup.py", "install_lib", "--install-dir=#{python_lib}"
bin.install Dir['scripts/*']
end
end
end
unless no_python?
def caveats
<<-EOS
This version of GDAL was built with Python support. In addition to providing
modules that makes GDAL functions available to Python scripts, the Python
binding provides ~18 additional command line tools. However, both the Python
bindings and the additional tools will be unusable unless the following
directory is added to the PYTHONPATH:
#{HOMEBREW_PREFIX}/lib/python
EOS
end
end
end
__END__
This patch updates GDAL to be compatible with EPSILON 0.9.x. Changes sourced from the GDAL trunk:
http://trac.osgeo.org/gdal/changeset/22363
Patch can be removed when GDAL hits 1.9.0.
diff --git a/frmts/epsilon/epsilondataset.cpp b/frmts/epsilon/epsilondataset.cpp
index b12928a..3f967cc 100644
--- a/frmts/epsilon/epsilondataset.cpp
+++ b/frmts/epsilon/epsilondataset.cpp
@@ -48,6 +48,13 @@ typedef struct
vsi_l_offset offset;
} BlockDesc;
+#ifdef I_WANT_COMPATIBILITY_WITH_EPSILON_0_8_1
+#define GET_FIELD(hdr, field) \
+ (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.field : hdr.tc.field
+#else
+#define GET_FIELD(hdr, field) \
+ (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.hdr_data.gs.field : hdr.hdr_data.tc.field
+#endif
/************************************************************************/
/* ==================================================================== */
@@ -237,8 +244,8 @@ CPLErr EpsilonRasterBand::IReadBlock( int nBlockXOff,
return CE_Failure;
}
- int w = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.w : hdr.tc.w;
- int h = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.h : hdr.tc.h;
+ int w = GET_FIELD(hdr, w);
+ int h = GET_FIELD(hdr, h);
int i;
if (poGDS->nBands == 1)
@@ -505,12 +512,12 @@ int EpsilonDataset::ScanBlocks(int* pnBands)
continue;
}
- int W = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.W : hdr.tc.W;
- int H = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.H : hdr.tc.H;
- int x = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.x : hdr.tc.x;
- int y = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.y : hdr.tc.y;
- int w = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.w : hdr.tc.w;
- int h = (hdr.block_type == EPS_GRAYSCALE_BLOCK) ? hdr.gs.h : hdr.tc.h;
+ int W = GET_FIELD(hdr, W);
+ int H = GET_FIELD(hdr, H);
+ int x = GET_FIELD(hdr, x);
+ int y = GET_FIELD(hdr, y);
+ int w = GET_FIELD(hdr, w);
+ int h = GET_FIELD(hdr, h);
//CPLDebug("EPSILON", "W=%d,H=%d,x=%d,y=%d,w=%d,h=%d,offset=" CPL_FRMT_GUIB,
// W, H, x, y, w, h, nStartBlockFileOff);
|
class Gdbm < Formula
desc "GNU database manager"
homepage "https://www.gnu.org/software/gdbm/"
url "https://ftp.gnu.org/gnu/gdbm/gdbm-1.16.tar.gz"
mirror "https://ftpmirror.gnu.org/gdbm/gdbm-1.16.tar.gz"
sha256 "c8a18bc6259da0c3eefefb018f8aa298fddc6f86c6fc0f0dec73270896ab512f"
bottle do
cellar :any
sha256 "04899aebecf79de7b1a1fd56ea2c57443bb8a3b4741e006c38c233554ccb0672" => :high_sierra
sha256 "aeb282fe2d4fbee1f056b7da013db3355ee8644979bcb55cbdd97f8bc21fe240" => :sierra
sha256 "826e5048722eb9ba535b8b3da24b0cb93fe7a3a47a19b1f034c40ffbb85304b8" => :el_capitan
end
option "with-libgdbm-compat", "Build libgdbm_compat, a compatibility layer which provides UNIX-like dbm and ndbm interfaces."
# Use --without-readline because readline detection is broken in 1.13
# https://github.com/Homebrew/homebrew-core/pull/10903
def install
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--without-readline
--prefix=#{prefix}
]
args << "--enable-libgdbm-compat" if build.with? "libgdbm-compat"
system "./configure", *args
system "make", "install"
end
test do
pipe_output("#{bin}/gdbmtool --norc --newdb test", "store 1 2\nquit\n")
assert_predicate testpath/"test", :exist?
assert_match /2/, pipe_output("#{bin}/gdbmtool --norc test", "fetch 1\nquit\n")
end
end
gdbm: update 1.16 bottle.
class Gdbm < Formula
desc "GNU database manager"
homepage "https://www.gnu.org/software/gdbm/"
url "https://ftp.gnu.org/gnu/gdbm/gdbm-1.16.tar.gz"
mirror "https://ftpmirror.gnu.org/gdbm/gdbm-1.16.tar.gz"
sha256 "c8a18bc6259da0c3eefefb018f8aa298fddc6f86c6fc0f0dec73270896ab512f"
bottle do
cellar :any
sha256 "52f2c6347af039f27c0ecd3f1c5559fb215fc1f6ed0ca0ff1641f3267dd966e6" => :high_sierra
sha256 "79d6094df951b8f008487becbe495bc82468e1af1991ae6fad2d2ded944322b1" => :sierra
sha256 "b5d7bdd8b4ea746e87837d0f2b4b5af80296279ef284703355d8b9105c7e9400" => :el_capitan
end
option "with-libgdbm-compat", "Build libgdbm_compat, a compatibility layer which provides UNIX-like dbm and ndbm interfaces."
# Use --without-readline because readline detection is broken in 1.13
# https://github.com/Homebrew/homebrew-core/pull/10903
def install
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--without-readline
--prefix=#{prefix}
]
args << "--enable-libgdbm-compat" if build.with? "libgdbm-compat"
system "./configure", *args
system "make", "install"
end
test do
pipe_output("#{bin}/gdbmtool --norc --newdb test", "store 1 2\nquit\n")
assert_predicate testpath/"test", :exist?
assert_match /2/, pipe_output("#{bin}/gdbmtool --norc test", "fetch 1\nquit\n")
end
end
|
class Gdcm < Formula
desc "Grassroots DICOM library and utilities for medical files"
homepage "https://sourceforge.net/projects/gdcm/"
url "https://github.com/malaterre/GDCM/archive/v3.0.12.tar.gz"
sha256 "4709ee5acce9ee69727bc8664ef7a13f4a3bbb8d48d1c3a0468241d0ac1ac977"
license "BSD-3-Clause"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 arm64_monterey: "0ecacc3dc372754b142872c877e51e6bf9edc949d2884f613f1684ddf799659e"
sha256 arm64_big_sur: "24c41c93ff1329f76f5620937c28d4eda5d2e2f4920695249016b7c10cae4495"
sha256 monterey: "dd63445740ea75cd0d65a0902a36b40cbdc333f50de35336f5d6e93f8a4c7746"
sha256 big_sur: "5d2cfb3687c878888fd6906b61dd57fb04b3bcea5dab35b290cf12d6c8e0c47e"
sha256 catalina: "f0b959b6bbfac7da6307af849fe136dc5850963ca302f21e8b3d5affec52ea93"
sha256 x86_64_linux: "53dcc4e002081f1c7352ba8577ff61001e1e683524400befd4df88ae4b3b2df4"
end
depends_on "cmake" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "swig" => :build
depends_on "openjpeg"
depends_on "openssl@1.1"
depends_on "python@3.9"
depends_on "vtk@8.2"
uses_from_macos "expat"
uses_from_macos "zlib"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
ENV.cxx11
python3 = Formula["python@3.9"].opt_bin/"python3"
xy = Language::Python.major_minor_version python3
python_include =
Utils.safe_popen_read(python3, "-c", "from distutils import sysconfig;print(sysconfig.get_python_inc(True))")
.chomp
python_executable = Utils.safe_popen_read(python3, "-c", "import sys;print(sys.executable)").chomp
args = std_cmake_args + %W[
-GNinja
-DGDCM_BUILD_APPLICATIONS=ON
-DGDCM_BUILD_SHARED_LIBS=ON
-DGDCM_BUILD_TESTING=OFF
-DGDCM_BUILD_EXAMPLES=OFF
-DGDCM_BUILD_DOCBOOK_MANPAGES=OFF
-DGDCM_USE_VTK=ON
-DGDCM_USE_SYSTEM_EXPAT=ON
-DGDCM_USE_SYSTEM_ZLIB=ON
-DGDCM_USE_SYSTEM_UUID=ON
-DGDCM_USE_SYSTEM_OPENJPEG=ON
-DGDCM_USE_SYSTEM_OPENSSL=ON
-DGDCM_WRAP_PYTHON=ON
-DPYTHON_EXECUTABLE=#{python_executable}
-DPYTHON_INCLUDE_DIR=#{python_include}
-DGDCM_INSTALL_PYTHONMODULE_DIR=#{lib}/python#{xy}/site-packages
-DCMAKE_INSTALL_RPATH=#{lib}
-DGDCM_NO_PYTHON_LIBS_LINKING=ON
]
mkdir "build" do
ENV.append "LDFLAGS", "-undefined dynamic_lookup" if OS.mac?
system "cmake", "..", *args
system "ninja"
system "ninja", "install"
end
end
test do
(testpath/"test.cxx").write <<~EOS
#include "gdcmReader.h"
int main(int, char *[])
{
gdcm::Reader reader;
reader.SetFileName("file.dcm");
}
EOS
system ENV.cxx, "-std=c++11", "-isystem", "#{include}/gdcm-3.0", "-o", "test.cxx.o", "-c", "test.cxx"
system ENV.cxx, "-std=c++11", "test.cxx.o", "-o", "test", "-L#{lib}", "-lgdcmDSED"
system "./test"
system Formula["python@3.9"].opt_bin/"python3", "-c", "import gdcm"
end
end
gdcm 3.0.14
Closes #104590.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Gdcm < Formula
desc "Grassroots DICOM library and utilities for medical files"
homepage "https://sourceforge.net/projects/gdcm/"
url "https://github.com/malaterre/GDCM/archive/v3.0.14.tar.gz"
sha256 "12582a87a1f043ce77005590ef1060e92ad36ec07ccf132da49c59f857d413ee"
license "BSD-3-Clause"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 arm64_monterey: "0ecacc3dc372754b142872c877e51e6bf9edc949d2884f613f1684ddf799659e"
sha256 arm64_big_sur: "24c41c93ff1329f76f5620937c28d4eda5d2e2f4920695249016b7c10cae4495"
sha256 monterey: "dd63445740ea75cd0d65a0902a36b40cbdc333f50de35336f5d6e93f8a4c7746"
sha256 big_sur: "5d2cfb3687c878888fd6906b61dd57fb04b3bcea5dab35b290cf12d6c8e0c47e"
sha256 catalina: "f0b959b6bbfac7da6307af849fe136dc5850963ca302f21e8b3d5affec52ea93"
sha256 x86_64_linux: "53dcc4e002081f1c7352ba8577ff61001e1e683524400befd4df88ae4b3b2df4"
end
depends_on "cmake" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "swig" => :build
depends_on "openjpeg"
depends_on "openssl@1.1"
depends_on "python@3.9"
depends_on "vtk@8.2"
uses_from_macos "expat"
uses_from_macos "zlib"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
ENV.cxx11
python3 = Formula["python@3.9"].opt_bin/"python3"
xy = Language::Python.major_minor_version python3
python_include =
Utils.safe_popen_read(python3, "-c", "from distutils import sysconfig;print(sysconfig.get_python_inc(True))")
.chomp
python_executable = Utils.safe_popen_read(python3, "-c", "import sys;print(sys.executable)").chomp
args = std_cmake_args + %W[
-GNinja
-DGDCM_BUILD_APPLICATIONS=ON
-DGDCM_BUILD_SHARED_LIBS=ON
-DGDCM_BUILD_TESTING=OFF
-DGDCM_BUILD_EXAMPLES=OFF
-DGDCM_BUILD_DOCBOOK_MANPAGES=OFF
-DGDCM_USE_VTK=ON
-DGDCM_USE_SYSTEM_EXPAT=ON
-DGDCM_USE_SYSTEM_ZLIB=ON
-DGDCM_USE_SYSTEM_UUID=ON
-DGDCM_USE_SYSTEM_OPENJPEG=ON
-DGDCM_USE_SYSTEM_OPENSSL=ON
-DGDCM_WRAP_PYTHON=ON
-DPYTHON_EXECUTABLE=#{python_executable}
-DPYTHON_INCLUDE_DIR=#{python_include}
-DGDCM_INSTALL_PYTHONMODULE_DIR=#{lib}/python#{xy}/site-packages
-DCMAKE_INSTALL_RPATH=#{lib}
-DGDCM_NO_PYTHON_LIBS_LINKING=ON
]
mkdir "build" do
ENV.append "LDFLAGS", "-undefined dynamic_lookup" if OS.mac?
system "cmake", "..", *args
system "ninja"
system "ninja", "install"
end
end
test do
(testpath/"test.cxx").write <<~EOS
#include "gdcmReader.h"
int main(int, char *[])
{
gdcm::Reader reader;
reader.SetFileName("file.dcm");
}
EOS
system ENV.cxx, "-std=c++11", "-isystem", "#{include}/gdcm-3.0", "-o", "test.cxx.o", "-c", "test.cxx"
system ENV.cxx, "-std=c++11", "test.cxx.o", "-o", "test", "-L#{lib}", "-lgdcmDSED"
system "./test"
system Formula["python@3.9"].opt_bin/"python3", "-c", "import gdcm"
end
end
|
class Gitg < Formula
desc "GNOME GUI client to view git repositories"
homepage "https://wiki.gnome.org/Apps/Gitg"
url "https://download.gnome.org/sources/gitg/3.18/gitg-3.18.0.tar.xz"
sha256 "fa4b7b9c492f13f5f1d864af1281ea377ac8c7619c856e05f533b18989edf421"
bottle do
sha256 "5da83eb431d2bac44657f9be88a3e227d112754bd520215a7d590a62a243f08d" => :el_capitan
sha256 "ee6f8558b43e01274c102da3437a53b6bc27171722e9daf38768ea9605c0e8dc" => :yosemite
sha256 "42a8be25a6674a2c09ae5b4d7b52e6cef909c2af2973359db1ed41edf0497fce" => :mavericks
end
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "intltool" => :build
depends_on "webkitgtk"
depends_on "gtksourceview3"
depends_on "gobject-introspection"
depends_on "libgit2-glib"
depends_on "gsettings-desktop-schemas"
depends_on "libgee"
depends_on "json-glib"
depends_on "libsecret"
depends_on "libpeas"
depends_on "gtkspell3"
depends_on "hicolor-icon-theme"
depends_on "gnome-icon-theme"
depends_on :python3 => :optional
if build.with?("python3")
depends_on "pygobject3" => "with-python3"
end
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--disable-schemas-compile"
system "make", "install"
end
def post_install
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
system "#{Formula["gtk+3"].opt_bin}/gtk3-update-icon-cache", "-f", "-t", "#{HOMEBREW_PREFIX}/share/icons/hicolor"
end
test do
# test executable
assert_match /#{version}/, shell_output("#{bin}/gitg --version")
# test API
(testpath/"test.c").write <<-EOS.undent
#include <libgitg/libgitg.h>
int main(int argc, char *argv[]) {
GType gtype = gitg_stage_status_file_get_type();
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
gobject_introspection = Formula["gobject-introspection"]
gtkx3 = Formula["gtk+3"]
harfbuzz = Formula["harfbuzz"]
libepoxy = Formula["libepoxy"]
libffi = Formula["libffi"]
libgee = Formula["libgee"]
libgit2 = Formula["libgit2"]
libgit2_glib = Formula["libgit2-glib"]
libpng = Formula["libpng"]
libsoup = Formula["libsoup"]
pango = Formula["pango"]
pixman = Formula["pixman"]
webkitgtk = Formula["webkitgtk"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/gio-unix-2.0/
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{gobject_introspection.opt_include}/gobject-introspection-1.0
-I#{gtkx3.opt_include}/gtk-3.0
-I#{harfbuzz.opt_include}/harfbuzz
-I#{include}/libgitg-1.0
-I#{libepoxy.opt_include}
-I#{libgee.opt_include}/gee-0.8
-I#{libffi.opt_lib}/libffi-3.0.13/include
-I#{libgit2.opt_include}
-I#{libgit2_glib.opt_include}/libgit2-glib-1.0
-I#{libpng.opt_include}/libpng16
-I#{libsoup.opt_include}/libsoup-2.4
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-I#{webkitgtk.opt_include}/webkitgtk-4.0
-DGIT_SSH=1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{gobject_introspection.opt_lib}
-L#{gtkx3.opt_lib}
-L#{libgee.opt_lib}
-L#{libgit2.opt_lib}
-L#{libgit2_glib.opt_lib}
-L#{libsoup.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-L#{webkitgtk.opt_lib}
-latk-1.0
-lcairo
-lcairo-gobject
-lgdk-3
-lgdk_pixbuf-2.0
-lgio-2.0
-lgirepository-1.0
-lgit2
-lgit2-glib-1.0
-lgitg-1.0
-lglib-2.0
-lgmodule-2.0
-lgobject-2.0
-lgthread-2.0
-lgtk-3
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
gitg 3.20.1
class Gitg < Formula
desc "GNOME GUI client to view git repositories"
homepage "https://wiki.gnome.org/Apps/Gitg"
url "https://download.gnome.org/sources/gitg/3.20/gitg-3.20.1.tar.xz"
sha256 "104420bcdd765fa2196a7b146ba1e0fa82a5686ed5ba9af40e31e88e601aa585"
bottle do
sha256 "5da83eb431d2bac44657f9be88a3e227d112754bd520215a7d590a62a243f08d" => :el_capitan
sha256 "ee6f8558b43e01274c102da3437a53b6bc27171722e9daf38768ea9605c0e8dc" => :yosemite
sha256 "42a8be25a6674a2c09ae5b4d7b52e6cef909c2af2973359db1ed41edf0497fce" => :mavericks
end
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "intltool" => :build
depends_on "webkitgtk"
depends_on "gtksourceview3"
depends_on "gobject-introspection"
depends_on "libgit2-glib"
depends_on "gsettings-desktop-schemas"
depends_on "libgee"
depends_on "json-glib"
depends_on "libsecret"
depends_on "libpeas"
depends_on "gtkspell3"
depends_on "hicolor-icon-theme"
depends_on "gnome-icon-theme"
depends_on :python3 => :optional
depends_on "pygobject3" => "with-python3" if build.with?("python3")
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--disable-schemas-compile"
system "make", "install"
end
def post_install
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
system "#{Formula["gtk+3"].opt_bin}/gtk3-update-icon-cache", "-f", "-t", "#{HOMEBREW_PREFIX}/share/icons/hicolor"
end
test do
# test executable
assert_match version.to_s, shell_output("#{bin}/gitg --version")
# test API
(testpath/"test.c").write <<-EOS.undent
#include <libgitg/libgitg.h>
int main(int argc, char *argv[]) {
GType gtype = gitg_stage_status_file_get_type();
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
gobject_introspection = Formula["gobject-introspection"]
gtkx3 = Formula["gtk+3"]
harfbuzz = Formula["harfbuzz"]
libepoxy = Formula["libepoxy"]
libffi = Formula["libffi"]
libgee = Formula["libgee"]
libgit2 = Formula["libgit2"]
libgit2_glib = Formula["libgit2-glib"]
libpng = Formula["libpng"]
libsoup = Formula["libsoup"]
pango = Formula["pango"]
pixman = Formula["pixman"]
webkitgtk = Formula["webkitgtk"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/gio-unix-2.0/
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{gobject_introspection.opt_include}/gobject-introspection-1.0
-I#{gtkx3.opt_include}/gtk-3.0
-I#{harfbuzz.opt_include}/harfbuzz
-I#{include}/libgitg-1.0
-I#{libepoxy.opt_include}
-I#{libgee.opt_include}/gee-0.8
-I#{libffi.opt_lib}/libffi-3.0.13/include
-I#{libgit2.opt_include}
-I#{libgit2_glib.opt_include}/libgit2-glib-1.0
-I#{libpng.opt_include}/libpng16
-I#{libsoup.opt_include}/libsoup-2.4
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-I#{webkitgtk.opt_include}/webkitgtk-4.0
-DGIT_SSH=1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{gobject_introspection.opt_lib}
-L#{gtkx3.opt_lib}
-L#{libgee.opt_lib}
-L#{libgit2.opt_lib}
-L#{libgit2_glib.opt_lib}
-L#{libsoup.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-L#{webkitgtk.opt_lib}
-latk-1.0
-lcairo
-lcairo-gobject
-lgdk-3
-lgdk_pixbuf-2.0
-lgio-2.0
-lgirepository-1.0
-lgit2
-lgit2-glib-1.0
-lgitg-1.0
-lglib-2.0
-lgmodule-2.0
-lgobject-2.0
-lgthread-2.0
-lgtk-3
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
|
class Glew < Formula
desc "OpenGL Extension Wrangler Library"
homepage "https://glew.sourceforge.io/"
url "https://downloads.sourceforge.net/project/glew/glew/2.1.0/glew-2.1.0.tgz"
sha256 "04de91e7e6763039bc11940095cd9c7f880baba82196a7765f727ac05a993c95"
head "https://github.com/nigels-com/glew.git"
bottle do
cellar :any
sha256 "a81e04f8be35080991e136e0b2229448fd237a31991d34d5a2e1c5f8db795201" => :mojave
sha256 "6923b0c452de864a5be7a4d1c47803f434590e9caca1366c57811aead7e5a34b" => :high_sierra
sha256 "17d6b3bbb956bd1672a26490eb58a82eaa0e3e1adb926f3e87ba060bdf999cf3" => :sierra
sha256 "7d4cc74d42072da62ef61737bf28b638f52b4f56b2b8234f4709427eb44a11fe" => :el_capitan
sha256 "a2f2237afc466ec31735d03c983e962240555e7ad32f2bc7b5cbceb996f48ade" => :yosemite
end
depends_on "cmake" => :build
def install
cd "build" do
system "cmake", "./cmake", *std_cmake_args
system "make"
system "make", "install"
end
doc.install Dir["doc/*"]
end
test do
(testpath/"test.c").write <<~EOS
#include <GL/glew.h>
#include <GLUT/glut.h>
int main(int argc, char** argv) {
glutInit(&argc, argv);
glutCreateWindow("GLEW Test");
GLenum err = glewInit();
if (GLEW_OK != err) {
return 1;
}
return 0;
}
EOS
system ENV.cc, testpath/"test.c", "-o", "test", "-L#{lib}", "-lGLEW",
"-framework", "GLUT"
system "./test"
end
end
glew: update 2.1.0 bottle.
class Glew < Formula
desc "OpenGL Extension Wrangler Library"
homepage "https://glew.sourceforge.io/"
url "https://downloads.sourceforge.net/project/glew/glew/2.1.0/glew-2.1.0.tgz"
sha256 "04de91e7e6763039bc11940095cd9c7f880baba82196a7765f727ac05a993c95"
head "https://github.com/nigels-com/glew.git"
bottle do
cellar :any
sha256 "8a848d279644c654db3f5a782811a0db9b405d6b6dd49b0ba303b9b8866b0793" => :catalina
sha256 "a81e04f8be35080991e136e0b2229448fd237a31991d34d5a2e1c5f8db795201" => :mojave
sha256 "6923b0c452de864a5be7a4d1c47803f434590e9caca1366c57811aead7e5a34b" => :high_sierra
sha256 "17d6b3bbb956bd1672a26490eb58a82eaa0e3e1adb926f3e87ba060bdf999cf3" => :sierra
sha256 "7d4cc74d42072da62ef61737bf28b638f52b4f56b2b8234f4709427eb44a11fe" => :el_capitan
sha256 "a2f2237afc466ec31735d03c983e962240555e7ad32f2bc7b5cbceb996f48ade" => :yosemite
end
depends_on "cmake" => :build
def install
cd "build" do
system "cmake", "./cmake", *std_cmake_args
system "make"
system "make", "install"
end
doc.install Dir["doc/*"]
end
test do
(testpath/"test.c").write <<~EOS
#include <GL/glew.h>
#include <GLUT/glut.h>
int main(int argc, char** argv) {
glutInit(&argc, argv);
glutCreateWindow("GLEW Test");
GLenum err = glewInit();
if (GLEW_OK != err) {
return 1;
}
return 0;
}
EOS
system ENV.cc, testpath/"test.c", "-o", "test", "-L#{lib}", "-lGLEW",
"-framework", "GLUT"
system "./test"
end
end
|
class Glfw < Formula
desc "Multi-platform library for OpenGL applications"
homepage "http://www.glfw.org/"
url "https://github.com/glfw/glfw/archive/3.2.1.tar.gz"
sha256 "e10f0de1384d75e6fc210c53e91843f6110d6c4f3afbfb588130713c2f9d8fe8"
head "https://github.com/glfw/glfw.git"
bottle do
cellar :any
sha256 "c19bbe78ab9d7d376b2cd265389348e4ad4572b9881bb1048b05d3eb4bc67762" => :sierra
sha256 "874e364604c386252a1d639f24c8d2333bc4715c67acd77109c291d724509538" => :el_capitan
sha256 "ecfc037c61cedd936d230880dd052691e8c07c4f10c3c95ccde4d8bc4e3f5e35" => :yosemite
end
option :universal
option "without-shared-library", "Build static library only (defaults to building dylib only)"
option "with-examples", "Build examples"
option "with-test", "Build test programs"
depends_on "cmake" => :build
deprecated_option "build-examples" => "with-examples"
deprecated_option "static" => "without-shared-library"
deprecated_option "build-tests" => "with-test"
deprecated_option "with-tests" => "with-test"
def install
ENV.universal_binary if build.universal?
args = std_cmake_args + %w[
-DGLFW_USE_CHDIR=TRUE
-DGLFW_USE_MENUBAR=TRUE
]
args << "-DGLFW_BUILD_UNIVERSAL=TRUE" if build.universal?
args << "-DBUILD_SHARED_LIBS=TRUE" if build.with? "shared-library"
args << "-DGLFW_BUILD_EXAMPLES=TRUE" if build.with? "examples"
args << "-DGLFW_BUILD_TESTS=TRUE" if build.with? "test"
args << "."
system "cmake", *args
system "make", "install"
libexec.install Dir["examples/*"] if build.with? "examples"
libexec.install Dir["tests/*"] if build.with? "test"
end
test do
(testpath/"test.c").write <<-EOS.undent
#define GLFW_INCLUDE_GLU
#include <GLFW/glfw3.h>
#include <stdlib.h>
int main()
{
if (!glfwInit())
exit(EXIT_FAILURE);
glfwTerminate();
return 0;
}
EOS
if build.with? "shared-library"
system ENV.cc, "test.c", "-o", "test",
"-I#{include}", "-L#{lib}", "-lglfw"
else
system ENV.cc, "test.c", "-o", "test",
"-I#{include}", "-L#{lib}", "-lglfw3",
"-framework", "IOKit",
"-framework", "CoreVideo",
"-framework", "AppKit"
end
system "./test"
end
end
glfw: drop universal
Closes #10348.
Signed-off-by: FX Coudert <c329953660db96eae534be5bbf1a735c2baf69b5@gmail.com>
class Glfw < Formula
desc "Multi-platform library for OpenGL applications"
homepage "http://www.glfw.org/"
url "https://github.com/glfw/glfw/archive/3.2.1.tar.gz"
sha256 "e10f0de1384d75e6fc210c53e91843f6110d6c4f3afbfb588130713c2f9d8fe8"
head "https://github.com/glfw/glfw.git"
bottle do
cellar :any
sha256 "c19bbe78ab9d7d376b2cd265389348e4ad4572b9881bb1048b05d3eb4bc67762" => :sierra
sha256 "874e364604c386252a1d639f24c8d2333bc4715c67acd77109c291d724509538" => :el_capitan
sha256 "ecfc037c61cedd936d230880dd052691e8c07c4f10c3c95ccde4d8bc4e3f5e35" => :yosemite
end
option "without-shared-library", "Build static library only (defaults to building dylib only)"
option "with-examples", "Build examples"
option "with-test", "Build test programs"
depends_on "cmake" => :build
deprecated_option "build-examples" => "with-examples"
deprecated_option "static" => "without-shared-library"
deprecated_option "build-tests" => "with-test"
deprecated_option "with-tests" => "with-test"
def install
args = std_cmake_args + %w[
-DGLFW_USE_CHDIR=TRUE
-DGLFW_USE_MENUBAR=TRUE
]
args << "-DBUILD_SHARED_LIBS=TRUE" if build.with? "shared-library"
args << "-DGLFW_BUILD_EXAMPLES=TRUE" if build.with? "examples"
args << "-DGLFW_BUILD_TESTS=TRUE" if build.with? "test"
args << "."
system "cmake", *args
system "make", "install"
libexec.install Dir["examples/*"] if build.with? "examples"
libexec.install Dir["tests/*"] if build.with? "test"
end
test do
(testpath/"test.c").write <<-EOS.undent
#define GLFW_INCLUDE_GLU
#include <GLFW/glfw3.h>
#include <stdlib.h>
int main()
{
if (!glfwInit())
exit(EXIT_FAILURE);
glfwTerminate();
return 0;
}
EOS
if build.with? "shared-library"
system ENV.cc, "test.c", "-o", "test",
"-I#{include}", "-L#{lib}", "-lglfw"
else
system ENV.cc, "test.c", "-o", "test",
"-I#{include}", "-L#{lib}", "-lglfw3",
"-framework", "IOKit",
"-framework", "CoreVideo",
"-framework", "AppKit"
end
system "./test"
end
end
|
class Glib < Formula
desc "Core application library for C"
homepage "https://developer.gnome.org/glib/"
url "https://download.gnome.org/sources/glib/2.54/glib-2.54.1.tar.xz"
sha256 "50c01b1419324f10fbf9b9709ec2164b18586968bdce7540583bf32302cf47a3"
bottle do
sha256 "b2e868b829d820d8f713aa15f87fabe17137209267af7c9b80a6cdfd9478e20b" => :high_sierra
sha256 "7e595082aacd2060267024407734dca23431ba90f619d57dea3c605673db70d4" => :sierra
sha256 "f627657c66e12140468ab708210b6c134f8cd3c618dfb87b954d9a876b9d9ba4" => :el_capitan
end
option "with-test", "Build a debug build and run tests. NOTE: Not all tests succeed yet"
deprecated_option "test" => "with-test"
depends_on "pkg-config" => :build
# next three lines can be removed when bug 780271 is fixed and gio.patch is modified accordingly
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "gettext"
depends_on "libffi"
depends_on "pcre"
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix,
# but needed to fix an assumption about the location of the d-bus machine
# id file.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/59e4d32/glib/hardcoded-paths.diff"
sha256 "a4cb96b5861672ec0750cb30ecebe1d417d38052cac12fbb8a77dbf04a886fcb"
end
# Fixes compilation with FSF GCC. Doesn't fix it on every platform, due
# to unrelated issues in GCC, but improves the situation.
# Patch submitted upstream: https://bugzilla.gnome.org/show_bug.cgi?id=672777
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/13efbb2/glib/gio.patch"
sha256 "628f8ea171a29c67fb06461ce4cfe549846b8fe64d83466e18e225726615b997"
end
# Revert some bad macOS specific commits
# https://bugzilla.gnome.org/show_bug.cgi?id=780271
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/73738ca/glib/revert-appinfo-contenttype.patch"
sha256 "675369c6d956b5533865178a2a78a6b2dcb921fbcfd81d35e92fc1592323e5e4"
end
def install
inreplace %w[gio/gdbusprivate.c gio/xdgmime/xdgmime.c glib/gutils.c],
"@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX
# renaming is necessary for patches to work
mv "gio/gcocoanotificationbackend.c", "gio/gcocoanotificationbackend.m"
mv "gio/gnextstepsettingsbackend.c", "gio/gnextstepsettingsbackend.m"
rm "gio/gosxappinfo.h"
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = %W[
--disable-maintainer-mode
--disable-dependency-tracking
--disable-silent-rules
--disable-dtrace
--disable-libelf
--enable-static
--prefix=#{prefix}
--localstatedir=#{var}
--with-gio-module-dir=#{HOMEBREW_PREFIX}/lib/gio/modules
]
# next line can be removed when bug 780271 is fixed and gio.patch is modified accordingly
system "autoreconf", "-i", "-f"
system "./configure", *args
# disable creating directory for GIO_MODULE_DIR, we will do this manually in post_install
inreplace "gio/Makefile", "$(mkinstalldirs) $(DESTDIR)$(GIO_MODULE_DIR)", ""
system "make"
# the spawn-multithreaded tests require more open files
system "ulimit -n 1024; make check" if build.with? "test"
system "make", "install"
# `pkg-config --libs glib-2.0` includes -lintl, and gettext itself does not
# have a pkgconfig file, so we add gettext lib and include paths here.
gettext = Formula["gettext"].opt_prefix
inreplace lib+"pkgconfig/glib-2.0.pc" do |s|
s.gsub! "Libs: -L${libdir} -lglib-2.0 -lintl",
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! "Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include",
"Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end
(share+"gtk-doc").rmtree
end
def post_install
(HOMEBREW_PREFIX/"lib/gio/modules").mkpath
end
test do
(testpath/"test.c").write <<~EOS
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
system ENV.cc, "-o", "test", "test.c", "-I#{include}/glib-2.0",
"-I#{lib}/glib-2.0/include", "-L#{lib}", "-lglib-2.0"
system "./test"
end
end
glib 2.54.2
Closes #19986.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Glib < Formula
desc "Core application library for C"
homepage "https://developer.gnome.org/glib/"
url "https://download.gnome.org/sources/glib/2.54/glib-2.54.2.tar.xz"
sha256 "bb89e5c5aad33169a8c7f28b45671c7899c12f74caf707737f784d7102758e6c"
bottle do
sha256 "b2e868b829d820d8f713aa15f87fabe17137209267af7c9b80a6cdfd9478e20b" => :high_sierra
sha256 "7e595082aacd2060267024407734dca23431ba90f619d57dea3c605673db70d4" => :sierra
sha256 "f627657c66e12140468ab708210b6c134f8cd3c618dfb87b954d9a876b9d9ba4" => :el_capitan
end
option "with-test", "Build a debug build and run tests. NOTE: Not all tests succeed yet"
deprecated_option "test" => "with-test"
depends_on "pkg-config" => :build
# next three lines can be removed when bug 780271 is fixed and gio.patch is modified accordingly
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "gettext"
depends_on "libffi"
depends_on "pcre"
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix,
# but needed to fix an assumption about the location of the d-bus machine
# id file.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/59e4d32/glib/hardcoded-paths.diff"
sha256 "a4cb96b5861672ec0750cb30ecebe1d417d38052cac12fbb8a77dbf04a886fcb"
end
# Fixes compilation with FSF GCC. Doesn't fix it on every platform, due
# to unrelated issues in GCC, but improves the situation.
# Patch submitted upstream: https://bugzilla.gnome.org/show_bug.cgi?id=672777
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/13efbb2/glib/gio.patch"
sha256 "628f8ea171a29c67fb06461ce4cfe549846b8fe64d83466e18e225726615b997"
end
# Revert some bad macOS specific commits
# https://bugzilla.gnome.org/show_bug.cgi?id=780271
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/73738ca/glib/revert-appinfo-contenttype.patch"
sha256 "675369c6d956b5533865178a2a78a6b2dcb921fbcfd81d35e92fc1592323e5e4"
end
def install
inreplace %w[gio/gdbusprivate.c gio/xdgmime/xdgmime.c glib/gutils.c],
"@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX
# renaming is necessary for patches to work
mv "gio/gcocoanotificationbackend.c", "gio/gcocoanotificationbackend.m"
mv "gio/gnextstepsettingsbackend.c", "gio/gnextstepsettingsbackend.m"
rm "gio/gosxappinfo.h"
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = %W[
--disable-maintainer-mode
--disable-dependency-tracking
--disable-silent-rules
--disable-dtrace
--disable-libelf
--enable-static
--prefix=#{prefix}
--localstatedir=#{var}
--with-gio-module-dir=#{HOMEBREW_PREFIX}/lib/gio/modules
]
# next line can be removed when bug 780271 is fixed and gio.patch is modified accordingly
system "autoreconf", "-i", "-f"
system "./configure", *args
# disable creating directory for GIO_MODULE_DIR, we will do this manually in post_install
inreplace "gio/Makefile", "$(mkinstalldirs) $(DESTDIR)$(GIO_MODULE_DIR)", ""
system "make"
# the spawn-multithreaded tests require more open files
system "ulimit -n 1024; make check" if build.with? "test"
system "make", "install"
# `pkg-config --libs glib-2.0` includes -lintl, and gettext itself does not
# have a pkgconfig file, so we add gettext lib and include paths here.
gettext = Formula["gettext"].opt_prefix
inreplace lib+"pkgconfig/glib-2.0.pc" do |s|
s.gsub! "Libs: -L${libdir} -lglib-2.0 -lintl",
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! "Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include",
"Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end
(share+"gtk-doc").rmtree
end
def post_install
(HOMEBREW_PREFIX/"lib/gio/modules").mkpath
end
test do
(testpath/"test.c").write <<~EOS
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
system ENV.cc, "-o", "test", "test.c", "-I#{include}/glib-2.0",
"-I#{lib}/glib-2.0/include", "-L#{lib}", "-lglib-2.0"
system "./test"
end
end
|
class Glib < Formula
desc "Core application library for C"
homepage "https://developer.gnome.org/glib/"
url "https://download.gnome.org/sources/glib/2.48/glib-2.48.2.tar.xz"
sha256 "f25e751589cb1a58826eac24fbd4186cda4518af772806b666a3f91f66e6d3f4"
bottle do
sha256 "468aabe13d99c4c375a7146c6e850f792ed84300103aaf2b54d0f9605045ca22" => :sierra
sha256 "a6a94d49727bf7892f9a48c8d1a9e35a46492bcf03b54e92e8fafddd4b8104bb" => :el_capitan
sha256 "62f9057b7af5023c7c644a51e733dc7a2b01095a067b85507d1dc0cba60ebd5c" => :yosemite
sha256 "08fbaf1f03390ff36506341b5a9567eaef8eb003195d52cfd7a2ba3c35e5bc69" => :mavericks
end
option :universal
option "with-test", "Build a debug build and run tests. NOTE: Not all tests succeed yet"
deprecated_option "test" => "with-test"
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "libffi"
depends_on "pcre"
fails_with :llvm do
build 2334
cause "Undefined symbol errors while linking"
end
resource "config.h.ed" do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/eb51d82/glib/config.h.ed"
version "111532"
sha256 "9f1e23a084bc879880e589893c17f01a2f561e20835d6a6f08fcc1dad62388f1"
end
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix,
# but needed to fix an assumption about the location of the d-bus machine
# id file.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/59e4d32/glib/hardcoded-paths.diff"
sha256 "a4cb96b5861672ec0750cb30ecebe1d417d38052cac12fbb8a77dbf04a886fcb"
end
# Fixes compilation with FSF GCC. Doesn't fix it on every platform, due
# to unrelated issues in GCC, but improves the situation.
# Patch submitted upstream: https://bugzilla.gnome.org/show_bug.cgi?id=672777
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/59e4d32/glib/gio.patch"
sha256 "cc3f0f6d561d663dfcdd6154b075150f68a36f5a92f94e5163c1c20529bfdf32"
end
if build.universal?
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fe50d25d/glib/universal.diff"
sha256 "e21f902907cca543023c930101afe1d0c1a7ad351daa0678ba855341f3fd1b57"
end
end
# Reverts GNotification support on OS X.
# This only supports OS X 10.9, and the reverted commits removed the
# ability to build glib on older versions of OS X.
# https://bugzilla.gnome.org/show_bug.cgi?id=747146
# Reverts upstream commits 36e093a31a9eb12021e7780b9e322c29763ffa58
# and 89058e8a9b769ab223bc75739f5455dab18f7a3d, with equivalent changes
# also applied to configure and gio/Makefile.in
if MacOS.version < :mavericks
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/59e4d32/glib/gnotification-mountain.patch"
sha256 "723def732304552ca55ae9f5b568ff3e8a59a14d512af72b6c1f0421f8228a68"
end
end
def install
ENV.universal_binary if build.universal?
inreplace %w[gio/gdbusprivate.c gio/xdgmime/xdgmime.c glib/gutils.c],
"@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX
# renaming is necessary for patches to work
mv "gio/gcocoanotificationbackend.c", "gio/gcocoanotificationbackend.m" unless MacOS.version < :mavericks
mv "gio/gnextstepsettingsbackend.c", "gio/gnextstepsettingsbackend.m"
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = %W[
--disable-maintainer-mode
--disable-dependency-tracking
--disable-silent-rules
--disable-dtrace
--disable-libelf
--disable-selinux
--enable-static
--prefix=#{prefix}
--localstatedir=#{var}
--with-gio-module-dir=#{HOMEBREW_PREFIX}/lib/gio/modules
]
system "./configure", *args
if build.universal?
buildpath.install resource("config.h.ed")
system "ed -s - config.h <config.h.ed"
end
# disable creating directory for GIO_MOUDLE_DIR, we will do this manually in post_install
inreplace "gio/Makefile", "$(mkinstalldirs) $(DESTDIR)$(GIO_MODULE_DIR)", ""
system "make"
# the spawn-multithreaded tests require more open files
system "ulimit -n 1024; make check" if build.with? "test"
system "make", "install"
# `pkg-config --libs glib-2.0` includes -lintl, and gettext itself does not
# have a pkgconfig file, so we add gettext lib and include paths here.
gettext = Formula["gettext"].opt_prefix
inreplace lib+"pkgconfig/glib-2.0.pc" do |s|
s.gsub! "Libs: -L${libdir} -lglib-2.0 -lintl",
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! "Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include",
"Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end if OS.mac?
(share+"gtk-doc").rmtree
end
def post_install
(HOMEBREW_PREFIX/"lib/gio/modules").mkpath
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
flags = ["-I#{include}/glib-2.0", "-I#{lib}/glib-2.0/include", "-lglib-2.0"]
system ENV.cc, "-o", "test", "test.c", *(flags + ENV.cflags.to_s.split)
system "./test"
end
end
glib: update 2.48.2 bottle for Linuxbrew.
Closes Linuxbrew/homebrew-core#910.
Signed-off-by: Bob W. Hogg <c772a964fd55352a3510e5d535dd9ccc9ac30168@linux.com>
class Glib < Formula
desc "Core application library for C"
homepage "https://developer.gnome.org/glib/"
url "https://download.gnome.org/sources/glib/2.48/glib-2.48.2.tar.xz"
sha256 "f25e751589cb1a58826eac24fbd4186cda4518af772806b666a3f91f66e6d3f4"
bottle do
sha256 "468aabe13d99c4c375a7146c6e850f792ed84300103aaf2b54d0f9605045ca22" => :sierra
sha256 "a6a94d49727bf7892f9a48c8d1a9e35a46492bcf03b54e92e8fafddd4b8104bb" => :el_capitan
sha256 "62f9057b7af5023c7c644a51e733dc7a2b01095a067b85507d1dc0cba60ebd5c" => :yosemite
sha256 "08fbaf1f03390ff36506341b5a9567eaef8eb003195d52cfd7a2ba3c35e5bc69" => :mavericks
sha256 "3ab4af336a973f6fe4e86eea239fe22e7699747155fb35470821c238b77c587e" => :x86_64_linux
end
option :universal
option "with-test", "Build a debug build and run tests. NOTE: Not all tests succeed yet"
deprecated_option "test" => "with-test"
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "libffi"
depends_on "pcre"
fails_with :llvm do
build 2334
cause "Undefined symbol errors while linking"
end
resource "config.h.ed" do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/eb51d82/glib/config.h.ed"
version "111532"
sha256 "9f1e23a084bc879880e589893c17f01a2f561e20835d6a6f08fcc1dad62388f1"
end
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix,
# but needed to fix an assumption about the location of the d-bus machine
# id file.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/59e4d32/glib/hardcoded-paths.diff"
sha256 "a4cb96b5861672ec0750cb30ecebe1d417d38052cac12fbb8a77dbf04a886fcb"
end
# Fixes compilation with FSF GCC. Doesn't fix it on every platform, due
# to unrelated issues in GCC, but improves the situation.
# Patch submitted upstream: https://bugzilla.gnome.org/show_bug.cgi?id=672777
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/59e4d32/glib/gio.patch"
sha256 "cc3f0f6d561d663dfcdd6154b075150f68a36f5a92f94e5163c1c20529bfdf32"
end
if build.universal?
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fe50d25d/glib/universal.diff"
sha256 "e21f902907cca543023c930101afe1d0c1a7ad351daa0678ba855341f3fd1b57"
end
end
# Reverts GNotification support on OS X.
# This only supports OS X 10.9, and the reverted commits removed the
# ability to build glib on older versions of OS X.
# https://bugzilla.gnome.org/show_bug.cgi?id=747146
# Reverts upstream commits 36e093a31a9eb12021e7780b9e322c29763ffa58
# and 89058e8a9b769ab223bc75739f5455dab18f7a3d, with equivalent changes
# also applied to configure and gio/Makefile.in
if MacOS.version < :mavericks
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/59e4d32/glib/gnotification-mountain.patch"
sha256 "723def732304552ca55ae9f5b568ff3e8a59a14d512af72b6c1f0421f8228a68"
end
end
def install
ENV.universal_binary if build.universal?
inreplace %w[gio/gdbusprivate.c gio/xdgmime/xdgmime.c glib/gutils.c],
"@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX
# renaming is necessary for patches to work
mv "gio/gcocoanotificationbackend.c", "gio/gcocoanotificationbackend.m" unless MacOS.version < :mavericks
mv "gio/gnextstepsettingsbackend.c", "gio/gnextstepsettingsbackend.m"
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = %W[
--disable-maintainer-mode
--disable-dependency-tracking
--disable-silent-rules
--disable-dtrace
--disable-libelf
--disable-selinux
--enable-static
--prefix=#{prefix}
--localstatedir=#{var}
--with-gio-module-dir=#{HOMEBREW_PREFIX}/lib/gio/modules
]
system "./configure", *args
if build.universal?
buildpath.install resource("config.h.ed")
system "ed -s - config.h <config.h.ed"
end
# disable creating directory for GIO_MOUDLE_DIR, we will do this manually in post_install
inreplace "gio/Makefile", "$(mkinstalldirs) $(DESTDIR)$(GIO_MODULE_DIR)", ""
system "make"
# the spawn-multithreaded tests require more open files
system "ulimit -n 1024; make check" if build.with? "test"
system "make", "install"
# `pkg-config --libs glib-2.0` includes -lintl, and gettext itself does not
# have a pkgconfig file, so we add gettext lib and include paths here.
gettext = Formula["gettext"].opt_prefix
inreplace lib+"pkgconfig/glib-2.0.pc" do |s|
s.gsub! "Libs: -L${libdir} -lglib-2.0 -lintl",
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! "Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include",
"Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end if OS.mac?
(share+"gtk-doc").rmtree
end
def post_install
(HOMEBREW_PREFIX/"lib/gio/modules").mkpath
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
flags = ["-I#{include}/glib-2.0", "-I#{lib}/glib-2.0/include", "-lglib-2.0"]
system ENV.cc, "-o", "test", "test.c", *(flags + ENV.cflags.to_s.split)
system "./test"
end
end
|
class Glib < Formula
include Language::Python::Shebang
desc "Core application library for C"
homepage "https://developer.gnome.org/glib/"
url "https://download.gnome.org/sources/glib/2.64/glib-2.64.4.tar.xz"
sha256 "f7e0b325b272281f0462e0f7fff25a833820cac19911ff677251daf6d87bce50"
license "LGPL-2.1"
revision 2
bottle do
sha256 "5b4079bc14d3e16b745686b6cc7a3bca8877ac914f4ea11b82cda7e5af21c51c" => :catalina
sha256 "64fd37a69bcafc9cc7995e00d7851c91283ba9e6dcf2064be99e66a8694fc460" => :mojave
sha256 "66301047c7acc3002533fc2682433906013b1eb24d9f1accb6d0bcbe2233ae67" => :high_sierra
end
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "libffi"
depends_on "pcre"
depends_on "python@3.8"
on_linux do
depends_on "util-linux"
end
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix,
# but needed to fix an assumption about the location of the d-bus machine
# id file.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/6164294a75541c278f3863b111791376caa3ad26/glib/hardcoded-paths.diff"
sha256 "a57fec9e85758896ff5ec1ad483050651b59b7b77e0217459ea650704b7d422b"
end
# Fixes a runtime error on ARM and PowerPC Macs.
# Can be removed in the next release.
# https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1566
patch do
url "https://gitlab.gnome.org/GNOME/glib/-/commit/c60d6599c9182ce44fdfaa8dde2955f55fc0d628.patch"
sha256 "9e3de41571edaa4bce03959abf885aad4edd069a622a5b642bf40294d748792e"
end
# Enables G_GNUC_FALLTHROUGH on clang.
# Necessary for pango to build on recent versions of clang.
# Will be in the next release.
patch do
url "https://gitlab.gnome.org/GNOME/glib/-/commit/5f38ae5ffca3213addc5b279a46d537792d031db.patch"
sha256 "12128966a693dd45d2e20286437aea13b1fe554aed0907cbc33131d3b76be890"
end
def install
inreplace %w[gio/gdbusprivate.c gio/xdgmime/xdgmime.c glib/gutils.c],
"@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = std_meson_args + %W[
-Diconv=auto
-Dgio_module_dir=#{HOMEBREW_PREFIX}/lib/gio/modules
-Dbsymbolic_functions=false
-Ddtrace=false
]
mkdir "build" do
system "meson", *args, ".."
system "ninja", "-v"
system "ninja", "install", "-v"
bin.find { |f| rewrite_shebang detected_python_shebang, f }
end
# ensure giomoduledir contains prefix, as this pkgconfig variable will be
# used by glib-networking and glib-openssl to determine where to install
# their modules
inreplace lib/"pkgconfig/gio-2.0.pc",
"giomoduledir=#{HOMEBREW_PREFIX}/lib/gio/modules",
"giomoduledir=${libdir}/gio/modules"
# `pkg-config --libs glib-2.0` includes -lintl, and gettext itself does not
# have a pkgconfig file, so we add gettext lib and include paths here.
gettext = Formula["gettext"].opt_prefix
inreplace lib+"pkgconfig/glib-2.0.pc" do |s|
s.gsub! "Libs: -L${libdir} -lglib-2.0 -lintl",
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! "Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include",
"Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end
# `pkg-config --print-requires-private gobject-2.0` includes libffi,
# but that package is keg-only so it needs to look for the pkgconfig file
# in libffi's opt path.
libffi = Formula["libffi"].opt_prefix
inreplace lib+"pkgconfig/gobject-2.0.pc" do |s|
s.gsub! "Requires.private: libffi",
"Requires.private: #{libffi}/lib/pkgconfig/libffi.pc"
end
bash_completion.install Dir["gio/completion/*"]
end
def post_install
(HOMEBREW_PREFIX/"lib/gio/modules").mkpath
end
test do
(testpath/"test.c").write <<~EOS
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
system ENV.cc, "-o", "test", "test.c", "-I#{include}/glib-2.0",
"-I#{lib}/glib-2.0/include", "-L#{lib}", "-lglib-2.0"
system "./test"
end
end
glib: update 2.64.4_2 bottle.
class Glib < Formula
include Language::Python::Shebang
desc "Core application library for C"
homepage "https://developer.gnome.org/glib/"
url "https://download.gnome.org/sources/glib/2.64/glib-2.64.4.tar.xz"
sha256 "f7e0b325b272281f0462e0f7fff25a833820cac19911ff677251daf6d87bce50"
license "LGPL-2.1"
revision 2
bottle do
sha256 "288fc814fdcc2b48b4296d700ce59468ae3a79bc11fa7978ca4de715afe88619" => :catalina
sha256 "0f0caafe83c71689fadef8bfd71339bbec3647101dfa623e98e566b0bda33b00" => :mojave
sha256 "60d204b976de73876d740a691f00e4c3d6af0255d5e8ee4787a93bf523ff84b4" => :high_sierra
end
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "libffi"
depends_on "pcre"
depends_on "python@3.8"
on_linux do
depends_on "util-linux"
end
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix,
# but needed to fix an assumption about the location of the d-bus machine
# id file.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/6164294a75541c278f3863b111791376caa3ad26/glib/hardcoded-paths.diff"
sha256 "a57fec9e85758896ff5ec1ad483050651b59b7b77e0217459ea650704b7d422b"
end
# Fixes a runtime error on ARM and PowerPC Macs.
# Can be removed in the next release.
# https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1566
patch do
url "https://gitlab.gnome.org/GNOME/glib/-/commit/c60d6599c9182ce44fdfaa8dde2955f55fc0d628.patch"
sha256 "9e3de41571edaa4bce03959abf885aad4edd069a622a5b642bf40294d748792e"
end
# Enables G_GNUC_FALLTHROUGH on clang.
# Necessary for pango to build on recent versions of clang.
# Will be in the next release.
patch do
url "https://gitlab.gnome.org/GNOME/glib/-/commit/5f38ae5ffca3213addc5b279a46d537792d031db.patch"
sha256 "12128966a693dd45d2e20286437aea13b1fe554aed0907cbc33131d3b76be890"
end
def install
inreplace %w[gio/gdbusprivate.c gio/xdgmime/xdgmime.c glib/gutils.c],
"@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = std_meson_args + %W[
-Diconv=auto
-Dgio_module_dir=#{HOMEBREW_PREFIX}/lib/gio/modules
-Dbsymbolic_functions=false
-Ddtrace=false
]
mkdir "build" do
system "meson", *args, ".."
system "ninja", "-v"
system "ninja", "install", "-v"
bin.find { |f| rewrite_shebang detected_python_shebang, f }
end
# ensure giomoduledir contains prefix, as this pkgconfig variable will be
# used by glib-networking and glib-openssl to determine where to install
# their modules
inreplace lib/"pkgconfig/gio-2.0.pc",
"giomoduledir=#{HOMEBREW_PREFIX}/lib/gio/modules",
"giomoduledir=${libdir}/gio/modules"
# `pkg-config --libs glib-2.0` includes -lintl, and gettext itself does not
# have a pkgconfig file, so we add gettext lib and include paths here.
gettext = Formula["gettext"].opt_prefix
inreplace lib+"pkgconfig/glib-2.0.pc" do |s|
s.gsub! "Libs: -L${libdir} -lglib-2.0 -lintl",
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! "Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include",
"Cflags: -I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end
# `pkg-config --print-requires-private gobject-2.0` includes libffi,
# but that package is keg-only so it needs to look for the pkgconfig file
# in libffi's opt path.
libffi = Formula["libffi"].opt_prefix
inreplace lib+"pkgconfig/gobject-2.0.pc" do |s|
s.gsub! "Requires.private: libffi",
"Requires.private: #{libffi}/lib/pkgconfig/libffi.pc"
end
bash_completion.install Dir["gio/completion/*"]
end
def post_install
(HOMEBREW_PREFIX/"lib/gio/modules").mkpath
end
test do
(testpath/"test.c").write <<~EOS
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
system ENV.cc, "-o", "test", "test.c", "-I#{include}/glib-2.0",
"-I#{lib}/glib-2.0/include", "-L#{lib}", "-lglib-2.0"
system "./test"
end
end
|
# lib/frecon/models/participation.rb
#
# Copyright (C) 2014 Christopher Cooper, Sam Craig, Tiger Huang, Vincent Mai, Sam Mercier, and Kristofer Rye
#
# This file is part of FReCon, an API for scouting at FRC Competitions, which is
# licensed under the MIT license. You should have received a copy of the MIT
# license with this program. If not, please see
# <http://opensource.org/licenses/MIT>.
require "frecon/model"
module FReCon
class Participation < Model
belongs_to :robot
belongs_to :competition
has_many :records, dependent: :destroy
validates :robot_id, :competition_id, presence: true
end
end
Participation: Add the #matches selector.
# lib/frecon/models/participation.rb
#
# Copyright (C) 2014 Christopher Cooper, Sam Craig, Tiger Huang, Vincent Mai, Sam Mercier, and Kristofer Rye
#
# This file is part of FReCon, an API for scouting at FRC Competitions, which is
# licensed under the MIT license. You should have received a copy of the MIT
# license with this program. If not, please see
# <http://opensource.org/licenses/MIT>.
require "frecon/model"
module FReCon
class Participation < Model
belongs_to :robot
belongs_to :competition
has_many :records, dependent: :destroy
validates :robot_id, :competition_id, presence: true
def matches
Match.in id: records.map(&:match_id)
end
end
end
|
class Glib < Formula
desc "Core application library for C"
homepage "https://developer.gnome.org/glib/"
url "https://download.gnome.org/sources/glib/2.64/glib-2.64.2.tar.xz"
sha256 "9a2f21ed8f13b9303399de13a0252b7cbcede593d26971378ec6cb90e87f2277"
bottle do
sha256 "08a55a8645d8fcea984bacb09c991115fe346a598f33d4fc4a9d583a164921c9" => :catalina
sha256 "42138d5d30d5eab37f17c75e8b191034c175b615a6777021460365be90b6f49a" => :mojave
sha256 "3abd649cb9c2c8f0bfb5a21bab86353078dbf2faeb2323f8051236b27c9bc1aa" => :high_sierra
end
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "libffi"
depends_on "pcre"
depends_on "python@3.8"
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix,
# but needed to fix an assumption about the location of the d-bus machine
# id file.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/6164294a75541c278f3863b111791376caa3ad26/glib/hardcoded-paths.diff"
sha256 "a57fec9e85758896ff5ec1ad483050651b59b7b77e0217459ea650704b7d422b"
end
def install
Language::Python.rewrite_python_shebang(Formula["python@3.8"].opt_bin/"python3")
inreplace %w[gio/gdbusprivate.c gio/xdgmime/xdgmime.c glib/gutils.c],
"@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = %W[
-Diconv=auto
-Dgio_module_dir=#{HOMEBREW_PREFIX}/lib/gio/modules
-Dbsymbolic_functions=false
-Ddtrace=false
]
mkdir "build" do
system "meson", "--prefix=#{prefix}", *args, ".."
system "ninja", "-v"
# Some files have been generated with a Python shebang, rewrite these too
Language::Python.rewrite_python_shebang(Formula["python@3.8"].opt_bin/"python3")
system "ninja", "install", "-v"
end
# ensure giomoduledir contains prefix, as this pkgconfig variable will be
# used by glib-networking and glib-openssl to determine where to install
# their modules
inreplace lib/"pkgconfig/gio-2.0.pc",
"giomoduledir=#{HOMEBREW_PREFIX}/lib/gio/modules",
"giomoduledir=${libdir}/gio/modules"
# `pkg-config --libs glib-2.0` includes -lintl, and gettext itself does not
# have a pkgconfig file, so we add gettext lib and include paths here.
gettext = Formula["gettext"].opt_prefix
inreplace lib+"pkgconfig/glib-2.0.pc" do |s|
s.gsub! "Libs: -L${libdir} -lglib-2.0 -lintl",
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! "Cflags:-I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include",
"Cflags:-I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end
# `pkg-config --print-requires-private gobject-2.0` includes libffi,
# but that package is keg-only so it needs to look for the pkgconfig file
# in libffi's opt path.
libffi = Formula["libffi"].opt_prefix
inreplace lib+"pkgconfig/gobject-2.0.pc" do |s|
s.gsub! "Requires.private: libffi",
"Requires.private: #{libffi}/lib/pkgconfig/libffi.pc"
end
bash_completion.install Dir["gio/completion/*"]
end
def post_install
(HOMEBREW_PREFIX/"lib/gio/modules").mkpath
end
test do
(testpath/"test.c").write <<~EOS
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
system ENV.cc, "-o", "test", "test.c", "-I#{include}/glib-2.0",
"-I#{lib}/glib-2.0/include", "-L#{lib}", "-lglib-2.0"
system "./test"
end
end
glib: update 2.64.2 bottle.
class Glib < Formula
desc "Core application library for C"
homepage "https://developer.gnome.org/glib/"
url "https://download.gnome.org/sources/glib/2.64/glib-2.64.2.tar.xz"
sha256 "9a2f21ed8f13b9303399de13a0252b7cbcede593d26971378ec6cb90e87f2277"
bottle do
sha256 "bd5f7582b25b04e593f633c89823881466785b557074b5e791ef408104037a50" => :catalina
sha256 "b4b5969bb271182d18652a3c2062eb5970b2558258d44b1a8998a31baff4ce75" => :mojave
sha256 "27aa18379e7d253099322c01d2f488dcac2344ac27b2c736e3f82c76148f8394" => :high_sierra
end
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "libffi"
depends_on "pcre"
depends_on "python@3.8"
# https://bugzilla.gnome.org/show_bug.cgi?id=673135 Resolved as wontfix,
# but needed to fix an assumption about the location of the d-bus machine
# id file.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/6164294a75541c278f3863b111791376caa3ad26/glib/hardcoded-paths.diff"
sha256 "a57fec9e85758896ff5ec1ad483050651b59b7b77e0217459ea650704b7d422b"
end
def install
Language::Python.rewrite_python_shebang(Formula["python@3.8"].opt_bin/"python3")
inreplace %w[gio/gdbusprivate.c gio/xdgmime/xdgmime.c glib/gutils.c],
"@@HOMEBREW_PREFIX@@", HOMEBREW_PREFIX
# Disable dtrace; see https://trac.macports.org/ticket/30413
args = %W[
-Diconv=auto
-Dgio_module_dir=#{HOMEBREW_PREFIX}/lib/gio/modules
-Dbsymbolic_functions=false
-Ddtrace=false
]
mkdir "build" do
system "meson", "--prefix=#{prefix}", *args, ".."
system "ninja", "-v"
# Some files have been generated with a Python shebang, rewrite these too
Language::Python.rewrite_python_shebang(Formula["python@3.8"].opt_bin/"python3")
system "ninja", "install", "-v"
end
# ensure giomoduledir contains prefix, as this pkgconfig variable will be
# used by glib-networking and glib-openssl to determine where to install
# their modules
inreplace lib/"pkgconfig/gio-2.0.pc",
"giomoduledir=#{HOMEBREW_PREFIX}/lib/gio/modules",
"giomoduledir=${libdir}/gio/modules"
# `pkg-config --libs glib-2.0` includes -lintl, and gettext itself does not
# have a pkgconfig file, so we add gettext lib and include paths here.
gettext = Formula["gettext"].opt_prefix
inreplace lib+"pkgconfig/glib-2.0.pc" do |s|
s.gsub! "Libs: -L${libdir} -lglib-2.0 -lintl",
"Libs: -L${libdir} -lglib-2.0 -L#{gettext}/lib -lintl"
s.gsub! "Cflags:-I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include",
"Cflags:-I${includedir}/glib-2.0 -I${libdir}/glib-2.0/include -I#{gettext}/include"
end
# `pkg-config --print-requires-private gobject-2.0` includes libffi,
# but that package is keg-only so it needs to look for the pkgconfig file
# in libffi's opt path.
libffi = Formula["libffi"].opt_prefix
inreplace lib+"pkgconfig/gobject-2.0.pc" do |s|
s.gsub! "Requires.private: libffi",
"Requires.private: #{libffi}/lib/pkgconfig/libffi.pc"
end
bash_completion.install Dir["gio/completion/*"]
end
def post_install
(HOMEBREW_PREFIX/"lib/gio/modules").mkpath
end
test do
(testpath/"test.c").write <<~EOS
#include <string.h>
#include <glib.h>
int main(void)
{
gchar *result_1, *result_2;
char *str = "string";
result_1 = g_convert(str, strlen(str), "ASCII", "UTF-8", NULL, NULL, NULL);
result_2 = g_convert(result_1, strlen(result_1), "UTF-8", "ASCII", NULL, NULL, NULL);
return (strcmp(str, result_2) == 0) ? 0 : 1;
}
EOS
system ENV.cc, "-o", "test", "test.c", "-I#{include}/glib-2.0",
"-I#{lib}/glib-2.0/include", "-L#{lib}", "-lglib-2.0"
system "./test"
end
end
|
require 'formula'
class Glog < Formula
homepage 'http://code.google.com/p/google-glog/'
url 'https://google-glog.googlecode.com/files/glog-0.3.3.tar.gz'
sha1 'ed40c26ecffc5ad47c618684415799ebaaa30d65'
depends_on 'gflags'
if MacOS.version >= :mavericks
# Since 0.3.4 has not yet been released, manually apply
# r134 that refactors the way headers are included.
patch do
url "https://gist.githubusercontent.com/danslo/7128754/raw/9b19991da4753f5efb87ae9a6939e6c3e9bc1fdf/glog_logging_r134.diff"
sha1 "a4a1a3d1467115f927935c441715b0f8c362abba"
end
# Don't use tr1 prefix when we're using libc++:
# https://code.google.com/p/google-glog/issues/detail?id=121 (patch mirrored on gist.github.com)
patch do
url "https://gist.githubusercontent.com/noahm/7364571/raw/436283200fe5a3ac5d00d769bb2203260bebfcf9/libc%2B%2B.diff"
sha1 "14fe8c422a92ebd6908861ee22cfe1a689191c18"
end
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make install"
end
end
glog: gflags compatibility fix
Closes Homebrew/homebrew#34416. The upstream issue tracker for Glog is pretty dead, and
gflags development seems to be outpacing it considerably, so vendoring
the old version of gflags is a workaround.
Closes Homebrew/homebrew#34451.
Signed-off-by: Mike McQuaid <a17fed27eaa842282862ff7c1b9c8395a26ac320@mikemcquaid.com>
require "formula"
class Glog < Formula
homepage "https://code.google.com/p/google-glog/"
url "https://google-glog.googlecode.com/files/glog-0.3.3.tar.gz"
sha1 "ed40c26ecffc5ad47c618684415799ebaaa30d65"
# Vendor an older version of gflags as the new version makes compile = nope.
resource "gflags" do
url "https://gflags.googlecode.com/files/gflags-2.0.tar.gz"
sha1 "dfb0add1b59433308749875ac42796c41e824908"
end
if MacOS.version >= :mavericks
# Since 0.3.4 has not yet been released, manually apply
# r134 that refactors the way headers are included.
patch do
url "https://gist.githubusercontent.com/danslo/7128754/raw/9b19991da4753f5efb87ae9a6939e6c3e9bc1fdf/glog_logging_r134.diff"
sha1 "a4a1a3d1467115f927935c441715b0f8c362abba"
end
# Don't use tr1 prefix when we're using libc++:
# https://code.google.com/p/google-glog/issues/detail?id=121 (patch mirrored on gist.github.com)
patch do
url "https://gist.githubusercontent.com/noahm/7364571/raw/436283200fe5a3ac5d00d769bb2203260bebfcf9/libc%2B%2B.diff"
sha1 "14fe8c422a92ebd6908861ee22cfe1a689191c18"
end
end
def install
resource("gflags").stage do
system "./configure", "--disable-dependency-tracking", "--prefix=#{libexec}/gflags"
system "make", "install"
end
# Find our sandboxed gflags.
ENV.append_to_cflags "-I#{libexec}/gflags/include"
ENV.append "LDFLAGS", "-L#{libexec}/gflags/lib"
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
assert_equal "#{libexec}/gflags/lib/libgflags.2.dylib (compatibility version 4.0.0, current version 4.0.0)",
shell_output("otool -L #{lib}/libglog.0.dylib | grep libgflags").strip
end
end
|
class Gmsh < Formula
desc "3D finite element grid generator with CAD engine"
homepage "https://gmsh.info/"
url "https://gmsh.info/src/gmsh-4.2.3-source.tgz"
sha256 "58a8c8828b66e77680b52feb8a91fb4790d0a93906e411f3bfa1928864e52858"
head "https://gitlab.onelab.info/gmsh/gmsh.git"
bottle do
cellar :any
sha256 "a07f8f04a788c6a444a700800b7d353a41e3b20b5e67ab086fafd23cbe62649f" => :mojave
sha256 "0c4a368ac3bb8c72e4f2a2e44bac79daadb2828447165e0ed2e49db0133a7b1f" => :high_sierra
sha256 "b7d6e26fb366e4aee94027f8983235610ff2063b3a72102284fae17b2bda47d2" => :sierra
end
depends_on "cmake" => :build
depends_on "cairo"
depends_on "fltk"
depends_on "gcc" # for gfortran
depends_on "open-mpi"
depends_on "opencascade"
def install
args = std_cmake_args + %W[
-DENABLE_OS_SPECIFIC_INSTALL=0
-DGMSH_BIN=#{bin}
-DGMSH_LIB=#{lib}
-DGMSH_DOC=#{pkgshare}/gmsh
-DGMSH_MAN=#{man}
-DENABLE_BUILD_LIB=ON
-DENABLE_BUILD_SHARED=ON
-DENABLE_NATIVE_FILE_CHOOSER=ON
-DENABLE_PETSC=OFF
-DENABLE_SLEPC=OFF
-DENABLE_OCC=ON
]
ENV["CASROOT"] = Formula["opencascade"].opt_prefix
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
# Move onelab.py into libexec instead of bin
mkdir_p libexec
mv bin/"onelab.py", libexec
end
end
test do
system "#{bin}/gmsh", "#{share}/doc/gmsh/tutorial/t1.geo", "-parse_and_exit"
end
end
gmsh 4.3.0
Closes #39139.
Signed-off-by: Jan Viljanen <15d570f5591572cb420df95d36f61f7b9e9e0533@users.noreply.github.com>
class Gmsh < Formula
desc "3D finite element grid generator with CAD engine"
homepage "https://gmsh.info/"
url "https://gmsh.info/src/gmsh-4.3.0-source.tgz"
sha256 "54a236f5708bc105d5b60ddb2b95ea7062537ccd2720860377994c1a9bb86429"
head "https://gitlab.onelab.info/gmsh/gmsh.git"
bottle do
cellar :any
sha256 "a07f8f04a788c6a444a700800b7d353a41e3b20b5e67ab086fafd23cbe62649f" => :mojave
sha256 "0c4a368ac3bb8c72e4f2a2e44bac79daadb2828447165e0ed2e49db0133a7b1f" => :high_sierra
sha256 "b7d6e26fb366e4aee94027f8983235610ff2063b3a72102284fae17b2bda47d2" => :sierra
end
depends_on "cmake" => :build
depends_on "cairo"
depends_on "fltk"
depends_on "gcc" # for gfortran
depends_on "open-mpi"
depends_on "opencascade"
def install
args = std_cmake_args + %W[
-DENABLE_OS_SPECIFIC_INSTALL=0
-DGMSH_BIN=#{bin}
-DGMSH_LIB=#{lib}
-DGMSH_DOC=#{pkgshare}/gmsh
-DGMSH_MAN=#{man}
-DENABLE_BUILD_LIB=ON
-DENABLE_BUILD_SHARED=ON
-DENABLE_NATIVE_FILE_CHOOSER=ON
-DENABLE_PETSC=OFF
-DENABLE_SLEPC=OFF
-DENABLE_OCC=ON
]
ENV["CASROOT"] = Formula["opencascade"].opt_prefix
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
# Move onelab.py into libexec instead of bin
mkdir_p libexec
mv bin/"onelab.py", libexec
end
end
test do
system "#{bin}/gmsh", "#{share}/doc/gmsh/tutorial/t1.geo", "-parse_and_exit"
end
end
|
class Gmsh < Formula
desc "3D finite element grid generator with CAD engine"
homepage "https://gmsh.info/"
url "https://gmsh.info/src/gmsh-4.0.4-source.tgz"
sha256 "0a4269a133b6c23a3fca5d3b381d73117ea073b3fbb2c867327677df87a679c3"
head "https://gitlab.onelab.info/gmsh/gmsh.git"
bottle do
cellar :any
sha256 "f32487ea4bb0bb69c5a919114a8a47fb4553c067b0c13b8c7c3fea02349740e8" => :mojave
sha256 "ee0290632bf696cb34e1a554f7c2f749a1b89b18bde9a219e95e80b944fe24f9" => :high_sierra
sha256 "404d8578f91b65a4fc305c0eacb24a5673e6b576ebb071b1186f2c1e86c3db21" => :sierra
sha256 "901558025b6a05e1982f7d93330089a3432445e5ae3d97d9815d7917f677ddf8" => :el_capitan
end
option "with-opencascade", "Build with opencascade support"
depends_on "cmake" => :build
depends_on "gcc" # for gfortran
depends_on "open-mpi"
depends_on "fltk" => :optional
depends_on "cairo" if build.with? "fltk"
depends_on "opencascade" => :optional
def install
args = std_cmake_args + %W[
-DENABLE_OS_SPECIFIC_INSTALL=0
-DGMSH_BIN=#{bin}
-DGMSH_LIB=#{lib}
-DGMSH_DOC=#{pkgshare}/gmsh
-DGMSH_MAN=#{man}
-DENABLE_BUILD_LIB=ON
-DENABLE_BUILD_SHARED=ON
-DENABLE_NATIVE_FILE_CHOOSER=ON
-DENABLE_PETSC=OFF
-DENABLE_SLEPC=OFF
]
if build.with? "opencascade"
ENV["CASROOT"] = Formula["opencascade"].opt_prefix
args << "-DENABLE_OCC=ON"
else
args << "-DENABLE_OCC=OFF"
end
args << "-DENABLE_FLTK=OFF" if build.without? "fltk"
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
# Move onelab.py into libexec instead of bin
mkdir_p libexec
mv bin/"onelab.py", libexec
end
end
def caveats
"To use onelab.py set your PYTHONDIR to #{libexec}"
end
test do
system "#{bin}/gmsh", "#{share}/doc/gmsh/tutorial/t1.geo", "-parse_and_exit"
end
end
gmsh: update 4.0.4 bottle.
class Gmsh < Formula
desc "3D finite element grid generator with CAD engine"
homepage "https://gmsh.info/"
url "https://gmsh.info/src/gmsh-4.0.4-source.tgz"
sha256 "0a4269a133b6c23a3fca5d3b381d73117ea073b3fbb2c867327677df87a679c3"
head "https://gitlab.onelab.info/gmsh/gmsh.git"
bottle do
cellar :any
sha256 "35fbb6b2e313c3772219c3d7414878759523a289d4e71f10449480b156271dfc" => :mojave
sha256 "b218029b152574295683196a2046398f8ae0288039eb47c375c7f03e7f7d4a35" => :high_sierra
sha256 "6b6493cea3f0ae4ab694f7071700f3407f2d17e285198b715aa238701107f4d4" => :sierra
end
option "with-opencascade", "Build with opencascade support"
depends_on "cmake" => :build
depends_on "gcc" # for gfortran
depends_on "open-mpi"
depends_on "fltk" => :optional
depends_on "cairo" if build.with? "fltk"
depends_on "opencascade" => :optional
def install
args = std_cmake_args + %W[
-DENABLE_OS_SPECIFIC_INSTALL=0
-DGMSH_BIN=#{bin}
-DGMSH_LIB=#{lib}
-DGMSH_DOC=#{pkgshare}/gmsh
-DGMSH_MAN=#{man}
-DENABLE_BUILD_LIB=ON
-DENABLE_BUILD_SHARED=ON
-DENABLE_NATIVE_FILE_CHOOSER=ON
-DENABLE_PETSC=OFF
-DENABLE_SLEPC=OFF
]
if build.with? "opencascade"
ENV["CASROOT"] = Formula["opencascade"].opt_prefix
args << "-DENABLE_OCC=ON"
else
args << "-DENABLE_OCC=OFF"
end
args << "-DENABLE_FLTK=OFF" if build.without? "fltk"
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
# Move onelab.py into libexec instead of bin
mkdir_p libexec
mv bin/"onelab.py", libexec
end
end
def caveats
"To use onelab.py set your PYTHONDIR to #{libexec}"
end
test do
system "#{bin}/gmsh", "#{share}/doc/gmsh/tutorial/t1.geo", "-parse_and_exit"
end
end
|
require "language/go"
class Goad < Formula
desc "AWS Lambda powered, highly distributed, load testing tool built in Go"
homepage "https://goad.io/"
url "https://github.com/goadapp/goad.git",
:tag => "2.0.4",
:revision => "e015a55faa940cde2bc7b38af65709d52235eaca"
bottle do
cellar :any_skip_relocation
sha256 "24ee4f845d29ac4ff17835ce605a73f9eee51c96493dbe79b53603505cdbacda" => :high_sierra
sha256 "1e303913ddbcc7d2284f380ffb6351c7eab64f2fdc8f95864343b97486d88f8b" => :sierra
sha256 "1ac09ed9e1ec55c4309b4e09171abc4907702ab44e713a068ef8549f2dcb0ed3" => :el_capitan
sha256 "1740f751efcb6a906a0807ec5d0c308bf9c9b77f1942c9d25bfeec75552c0e80" => :yosemite
end
depends_on "go" => :build
go_resource "github.com/jteeuwen/go-bindata" do
url "https://github.com/jteeuwen/go-bindata.git",
:revision => "a0ff2567cfb70903282db057e799fd826784d41d"
end
def install
ENV["GOPATH"] = buildpath
dir = buildpath/"src/github.com/goadapp/goad"
dir.install buildpath.children
ENV.prepend_create_path "PATH", buildpath/"bin"
Language::Go.stage_deps resources, buildpath/"src"
cd "src/github.com/jteeuwen/go-bindata/go-bindata" do
system "go", "install"
end
cd dir do
system "make", "build"
bin.install "build/goad"
prefix.install_metafiles
end
end
test do
system "#{bin}/goad", "--version"
end
end
goad: update 2.0.4 bottle.
require "language/go"
class Goad < Formula
desc "AWS Lambda powered, highly distributed, load testing tool built in Go"
homepage "https://goad.io/"
url "https://github.com/goadapp/goad.git",
:tag => "2.0.4",
:revision => "e015a55faa940cde2bc7b38af65709d52235eaca"
bottle do
cellar :any_skip_relocation
sha256 "c6992780a4e3c773e30ab9d57a8000618d6da51973224e8f325fe6f1c25cbceb" => :high_sierra
sha256 "49f467700edf1b3bfc0564562bb55c2dd7ee758449bdd17903242cae6e11e6df" => :sierra
sha256 "884d65d177cc21ff4ba6dc1e9bbd6f11c2ebaa6c77ffeb6a2bd148f3e3b8a926" => :el_capitan
end
depends_on "go" => :build
go_resource "github.com/jteeuwen/go-bindata" do
url "https://github.com/jteeuwen/go-bindata.git",
:revision => "a0ff2567cfb70903282db057e799fd826784d41d"
end
def install
ENV["GOPATH"] = buildpath
dir = buildpath/"src/github.com/goadapp/goad"
dir.install buildpath.children
ENV.prepend_create_path "PATH", buildpath/"bin"
Language::Go.stage_deps resources, buildpath/"src"
cd "src/github.com/jteeuwen/go-bindata/go-bindata" do
system "go", "install"
end
cd dir do
system "make", "build"
bin.install "build/goad"
prefix.install_metafiles
end
end
test do
system "#{bin}/goad", "--version"
end
end
|
#
# Cookbook Name:: runit
# Provider:: service
#
# Author:: Joshua Timberman <jtimberman@chef.io>
# Author:: Sean OMeara <sean@chef.io>
# Copyright 2011-2015, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Chef
class Provider
class RunitService < Chef::Provider::LWRPBase
use_inline_resources if defined?(use_inline_resources)
def whyrun_supported?
true
end
# Mix in helpers from libraries/helpers.rb
include RunitCookbook::Helpers
# actions
action :create do
# sv_templates
if new_resource.sv_templates
directory sv_dir_name do
owner new_resource.owner
group new_resource.group
mode '0755'
recursive true
action :create
end
template "#{sv_dir_name}/run" do
owner new_resource.owner
group new_resource.group
source "sv-#{new_resource.run_template_name}-run.erb"
cookbook template_cookbook
mode '0755'
variables(options: new_resource.options)
action :create
end
# log stuff
if new_resource.log
directory "#{sv_dir_name}/log" do
owner new_resource.owner
group new_resource.group
recursive true
action :create
end
directory "#{sv_dir_name}/log/main" do
owner new_resource.owner
group new_resource.group
mode '0755'
recursive true
action :create
end
if new_resource.default_logger
directory "/var/log/#{new_resource.service_name}" do
owner new_resource.owner
group new_resource.group
mode '00755'
recursive true
action :create
end
link "/var/log/#{new_resource.service_name}/config" do
to "#{sv_dir_name}/log/config"
end
file "#{sv_dir_name}/log/run" do
content default_logger_content
owner new_resource.owner
group new_resource.group
mode '00755'
action :create
end
else
template "#{sv_dir_name}/log/run" do
owner new_resource.owner
group new_resource.group
mode '00755'
source "sv-#{new_resource.log_template_name}-log-run.erb"
cookbook template_cookbook
variables(options: new_resource.options)
action :create
end
end
template "#{sv_dir_name}/log/config" do
owner new_resource.owner
group new_resource.group
mode '00644'
cookbook 'runit'
source 'log-config.erb'
variables(config: new_resource)
action :create
end
end
# environment stuff
directory "#{sv_dir_name}/env" do
owner new_resource.owner
group new_resource.group
mode '00755'
action :create
end
new_resource.env.map do |var, value|
file "#{sv_dir_name}/env/#{var}" do
owner new_resource.owner
group new_resource.group
content value
mode 00640
action :create
end
end
ruby_block 'zap extra env files' do
block { zap_extra_env_files }
only_if { extra_env_files? }
action :run
end
if new_resource.check
template "#{sv_dir_name}/check" do
owner new_resource.owner
group new_resource.group
mode '00755'
cookbook template_cookbook
source "sv-#{new_resource.check_script_template_name}-check.erb"
variables(options: new_resource.options)
action :create
end
end
if new_resource.finish
template "#{sv_dir_name}/finish" do
owner new_resource.owner
group new_resource.group
mode '00755'
source "sv-#{new_resource.finish_script_template_name}-finish.erb"
cookbook template_cookbook
variables(options: new_resource.options) if new_resource.options.respond_to?(:has_key?)
action :create
end
end
directory "#{sv_dir_name}/control" do
owner new_resource.owner
group new_resource.group
mode '00755'
action :create
end
new_resource.control.map do |signal|
template "#{sv_dir_name}/control/#{signal}" do
owner new_resource.owner
group new_resource.group
mode '0755'
source "sv-#{new_resource.control_template_names[signal]}-#{signal}.erb"
cookbook template_cookbook
variables(options: new_resource.options)
action :create
end
end
# lsb_init
if node['platform'] == 'debian'
ruby_block "unlink #{parsed_lsb_init_dir}/#{new_resource.service_name}" do
block { ::File.unlink("#{parsed_lsb_init_dir}/#{new_resource.service_name}") }
only_if { ::File.symlink?("#{parsed_lsb_init_dir}/#{new_resource.service_name}") }
end
template "#{parsed_lsb_init_dir}/#{new_resource.service_name}" do
owner 'root'
group 'root'
mode '00755'
cookbook 'runit'
source 'init.d.erb'
variables(
name: new_resource.service_name,
sv_bin: new_resource.sv_bin,
init_dir: ::File.join(parsed_lsb_init_dir, '')
)
action :create
end
else
link "#{parsed_lsb_init_dir}/#{new_resource.service_name}" do
to sv_bin
action :create
end
end
# Create/Delete service down file
# To prevent unexpected behavior, require users to explicitly set
# delete_downfile to remove any down file that may already exist
df_action = :nothing
if new_resource.start_down
df_action = :create
elsif new_resource.delete_downfile
df_action = :delete
end
file down_file do
mode 00644
backup false
content '# File created and managed by chef!'
action df_action
end
end
end
action :disable do
ruby_block "disable #{new_resource.service_name}" do
block { disable_service }
only_if { enabled? }
end
end
action :enable do
# FIXME: remove action_create in next major version
action_create
link "#{service_dir_name}" do
to sv_dir_name
action :create
end
# FIXME: replace me
# ruby_block 'wait_for_service' do
# block wait_for_service
# end
end
# signals
[:down, :hup, :int, :term, :kill, :quit].each do |signal|
action signal do
runit_send_signal(signal)
end
end
[:up, :once, :cont].each do |signal|
action signal do
runit_send_signal(signal)
end
end
action :usr1 do
runit_send_signal(1, :usr1)
end
action :usr2 do
runit_send_signal(2, :usr2)
end
action :nothing do
end
action :restart do
restart_service
end
action :start do
start_service
end
action :stop do
stop_service
end
action :reload do
reload_service
end
action :status do
running?
end
end
end
end
enable action should wait for service "ok" socket
#
# Cookbook Name:: runit
# Provider:: service
#
# Author:: Joshua Timberman <jtimberman@chef.io>
# Author:: Sean OMeara <sean@chef.io>
# Copyright 2011-2015, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Chef
class Provider
class RunitService < Chef::Provider::LWRPBase
use_inline_resources if defined?(use_inline_resources)
def whyrun_supported?
true
end
# Mix in helpers from libraries/helpers.rb
include RunitCookbook::Helpers
# actions
action :create do
# sv_templates
if new_resource.sv_templates
directory sv_dir_name do
owner new_resource.owner
group new_resource.group
mode '0755'
recursive true
action :create
end
template "#{sv_dir_name}/run" do
owner new_resource.owner
group new_resource.group
source "sv-#{new_resource.run_template_name}-run.erb"
cookbook template_cookbook
mode '0755'
variables(options: new_resource.options)
action :create
end
# log stuff
if new_resource.log
directory "#{sv_dir_name}/log" do
owner new_resource.owner
group new_resource.group
recursive true
action :create
end
directory "#{sv_dir_name}/log/main" do
owner new_resource.owner
group new_resource.group
mode '0755'
recursive true
action :create
end
if new_resource.default_logger
directory "/var/log/#{new_resource.service_name}" do
owner new_resource.owner
group new_resource.group
mode '00755'
recursive true
action :create
end
link "/var/log/#{new_resource.service_name}/config" do
to "#{sv_dir_name}/log/config"
end
file "#{sv_dir_name}/log/run" do
content default_logger_content
owner new_resource.owner
group new_resource.group
mode '00755'
action :create
end
else
template "#{sv_dir_name}/log/run" do
owner new_resource.owner
group new_resource.group
mode '00755'
source "sv-#{new_resource.log_template_name}-log-run.erb"
cookbook template_cookbook
variables(options: new_resource.options)
action :create
end
end
template "#{sv_dir_name}/log/config" do
owner new_resource.owner
group new_resource.group
mode '00644'
cookbook 'runit'
source 'log-config.erb'
variables(config: new_resource)
action :create
end
end
# environment stuff
directory "#{sv_dir_name}/env" do
owner new_resource.owner
group new_resource.group
mode '00755'
action :create
end
new_resource.env.map do |var, value|
file "#{sv_dir_name}/env/#{var}" do
owner new_resource.owner
group new_resource.group
content value
mode 00640
action :create
end
end
ruby_block 'zap extra env files' do
block { zap_extra_env_files }
only_if { extra_env_files? }
action :run
end
if new_resource.check
template "#{sv_dir_name}/check" do
owner new_resource.owner
group new_resource.group
mode '00755'
cookbook template_cookbook
source "sv-#{new_resource.check_script_template_name}-check.erb"
variables(options: new_resource.options)
action :create
end
end
if new_resource.finish
template "#{sv_dir_name}/finish" do
owner new_resource.owner
group new_resource.group
mode '00755'
source "sv-#{new_resource.finish_script_template_name}-finish.erb"
cookbook template_cookbook
variables(options: new_resource.options) if new_resource.options.respond_to?(:has_key?)
action :create
end
end
directory "#{sv_dir_name}/control" do
owner new_resource.owner
group new_resource.group
mode '00755'
action :create
end
new_resource.control.map do |signal|
template "#{sv_dir_name}/control/#{signal}" do
owner new_resource.owner
group new_resource.group
mode '0755'
source "sv-#{new_resource.control_template_names[signal]}-#{signal}.erb"
cookbook template_cookbook
variables(options: new_resource.options)
action :create
end
end
# lsb_init
if node['platform'] == 'debian'
ruby_block "unlink #{parsed_lsb_init_dir}/#{new_resource.service_name}" do
block { ::File.unlink("#{parsed_lsb_init_dir}/#{new_resource.service_name}") }
only_if { ::File.symlink?("#{parsed_lsb_init_dir}/#{new_resource.service_name}") }
end
template "#{parsed_lsb_init_dir}/#{new_resource.service_name}" do
owner 'root'
group 'root'
mode '00755'
cookbook 'runit'
source 'init.d.erb'
variables(
name: new_resource.service_name,
sv_bin: new_resource.sv_bin,
init_dir: ::File.join(parsed_lsb_init_dir, '')
)
action :create
end
else
link "#{parsed_lsb_init_dir}/#{new_resource.service_name}" do
to sv_bin
action :create
end
end
# Create/Delete service down file
# To prevent unexpected behavior, require users to explicitly set
# delete_downfile to remove any down file that may already exist
df_action = :nothing
if new_resource.start_down
df_action = :create
elsif new_resource.delete_downfile
df_action = :delete
end
file down_file do
mode 00644
backup false
content '# File created and managed by chef!'
action df_action
end
end
end
action :disable do
ruby_block "disable #{new_resource.service_name}" do
block { disable_service }
only_if { enabled? }
end
end
action :enable do
# FIXME: remove action_create in next major version
action_create
link "#{service_dir_name}" do
to sv_dir_name
action :create
end
ruby_block 'wait_for_service' do
block do
sleep 1 until ::FileTest.pipe?("#{service_dir_name}/supervise/ok")
if new_resource.log
sleep 1 until ::FileTest.pipe?("#{service_dir_name}/log/supervise/ok")
end
end
action :run
end
end
# signals
[:down, :hup, :int, :term, :kill, :quit].each do |signal|
action signal do
runit_send_signal(signal)
end
end
[:up, :once, :cont].each do |signal|
action signal do
runit_send_signal(signal)
end
end
action :usr1 do
runit_send_signal(1, :usr1)
end
action :usr2 do
runit_send_signal(2, :usr2)
end
action :nothing do
end
action :restart do
restart_service
end
action :start do
start_service
end
action :stop do
stop_service
end
action :reload do
reload_service
end
action :status do
running?
end
end
end
end
|
module Gedspec
module Gedcom
class StackParser
def initialize(gedcom_structure = nil, *args)
@start_callbacks = {}
@end_callbacks = {}
@gedcom_structure = gedcom_structure
end
def tag_start(context, callback_method, params = nil)
@start_callbacks[context.downcase] = [callback_method, params]
end
def tag_end(context, callback_method, params = nil)
@end_callbacks[context.downcase] = [callback_method, params]
end
def tag_handler(type, context, data)
tag = context.join('/').downcase
callback, params = instance_variable_get("@#{type}_callbacks")[tag]
case callback
when Symbol
send(callback, data, params)
when Proc, Method
callback.call(data, params)
end
end
def parse
context_stack = []
data_stack = []
current_level = get_level(@gedcom_structure)
@gedcom_structure.each do |line|
level, tag, rest = line.strip.split(' ', 3)
while level.to_i <= current_level
tag_handler(:end, context_stack, data_stack.pop)
context_stack.pop
current_level -= 1
end
tag, rest = rest, tag if tag =~ /@.*@/
context_stack << tag
data_stack << rest
current_level = level.to_i
tag_handler(:start, context_stack, data_stack.last)
end
end
def get_level(structure)
level = structure.scan(/^\d+/)[0]
level && level.to_i
end
def update_attr(data, params)
data = params[:proc].call(data) if params[:proc]
var = instance_variable_get(params[:attr])
case params[:append]
when :cont
if var
data = var + "\n" + data
end
when :conc
data = (var || "") + data
end
instance_variable_set(params[:attr], data)
end
# end
end
end
end
Fix indenting
module Gedspec
module Gedcom
class StackParser
def initialize(gedcom_structure = nil, *args)
@start_callbacks = {}
@end_callbacks = {}
@gedcom_structure = gedcom_structure
end
def tag_start(context, callback_method, params = nil)
@start_callbacks[context.downcase] = [callback_method, params]
end
def tag_end(context, callback_method, params = nil)
@end_callbacks[context.downcase] = [callback_method, params]
end
def tag_handler(type, context, data)
tag = context.join('/').downcase
callback, params = instance_variable_get("@#{type}_callbacks")[tag]
case callback
when Symbol
send(callback, data, params)
when Proc, Method
callback.call(data, params)
end
end
def parse
context_stack = []
data_stack = []
current_level = get_level(@gedcom_structure)
@gedcom_structure.each do |line|
level, tag, rest = line.strip.split(' ', 3)
while level.to_i <= current_level
tag_handler(:end, context_stack, data_stack.pop)
context_stack.pop
current_level -= 1
end
tag, rest = rest, tag if tag =~ /@.*@/
context_stack << tag
data_stack << rest
current_level = level.to_i
tag_handler(:start, context_stack, data_stack.last)
end
end
def get_level(structure)
level = structure.scan(/^\d+/)[0]
level && level.to_i
end
def update_attr(data, params)
data = params[:proc].call(data) if params[:proc]
var = instance_variable_get(params[:attr])
case params[:append]
when :cont
if var
data = var + "\n" + data
end
when :conc
data = (var || "") + data
end
instance_variable_set(params[:attr], data)
end
end
end
end |
class Gosu < Formula
desc "Pragmatic language for the JVM"
homepage "https://gosu-lang.github.io/"
url "https://github.com/gosu-lang/gosu-lang/archive/v1.14.12.tar.gz"
sha256 "d6dec97ca98571ba07059b30c04955c9afc583ffc1b333bdb06ed77bb00c6c0f"
head "https://github.com/gosu-lang/gosu-lang.git"
bottle do
cellar :any_skip_relocation
sha256 "2ffaea191fee11809de6ea6e34af7c66517b580ec3e734025b0bcfcebaaa705b" => :mojave
sha256 "761367a27bf9f1370a6d6494e31ee526087c6dcea671280872e5afbf047e9801" => :high_sierra
sha256 "c332bae630d5048078996f3a6bf538237c39b62343310f777cfb05d8b5aef2d9" => :sierra
end
depends_on "maven" => :build
depends_on :java => "1.8"
skip_clean "libexec/ext"
def install
cmd = Language::Java.java_home_cmd("1.8")
ENV["JAVA_HOME"] = Utils.popen_read(cmd).chomp
system "mvn", "package"
libexec.install Dir["gosu/target/gosu-#{version}-full/gosu-#{version}/*"]
(libexec/"ext").mkpath
(bin/"gosu").write_env_script libexec/"bin/gosu", Language::Java.java_home_env("1.8")
end
test do
(testpath/"test.gsp").write 'print ("burp")'
assert_equal "burp", shell_output("#{bin}/gosu test.gsp").chomp
end
end
gosu 1.14.13
Closes #38286.
Signed-off-by: FX Coudert <c329953660db96eae534be5bbf1a735c2baf69b5@gmail.com>
class Gosu < Formula
desc "Pragmatic language for the JVM"
homepage "https://gosu-lang.github.io/"
url "https://github.com/gosu-lang/gosu-lang/archive/v1.14.13.tar.gz"
sha256 "791c6d423f90c3161b568e3b907bc27802f59f60bb63719845c7c9814e9b3bb4"
head "https://github.com/gosu-lang/gosu-lang.git"
bottle do
cellar :any_skip_relocation
sha256 "2ffaea191fee11809de6ea6e34af7c66517b580ec3e734025b0bcfcebaaa705b" => :mojave
sha256 "761367a27bf9f1370a6d6494e31ee526087c6dcea671280872e5afbf047e9801" => :high_sierra
sha256 "c332bae630d5048078996f3a6bf538237c39b62343310f777cfb05d8b5aef2d9" => :sierra
end
depends_on "maven" => :build
depends_on :java => "1.8"
skip_clean "libexec/ext"
def install
cmd = Language::Java.java_home_cmd("1.8")
ENV["JAVA_HOME"] = Utils.popen_read(cmd).chomp
system "mvn", "package"
libexec.install Dir["gosu/target/gosu-#{version}-full/gosu-#{version}/*"]
(libexec/"ext").mkpath
(bin/"gosu").write_env_script libexec/"bin/gosu", Language::Java.java_home_env("1.8")
end
test do
(testpath/"test.gsp").write 'print ("burp")'
assert_equal "burp", shell_output("#{bin}/gosu test.gsp").chomp
end
end
|
require 'winrm'
require 'winrm-elevated'
class MiqWinRM
attr_reader :uri, :username, :password, :hostname, :port, :connection, :executor
def initialize
@port = 5985
@elevated_runner = @executor = nil
require 'uri'
end
def build_uri
URI::HTTP.build(:port => @port, :path => "/wsman", :host => @hostname).to_s
end
def connect(options = {})
validate_options(options)
@uri = build_uri
@connection = raw_connect(@username, @password, @uri)
end
def execute
@executor = @connection.create_executor
end
def elevate
@elevated_runner = WinRM::Elevated::Runner.new(@connection)
end
def run_powershell_script(script)
execute if @executor.nil?
$log.debug "Running powershell script on #{hostname} as #{username}:\n#{script}" unless $log.nil?
@executor.run_powershell_script(script)
rescue WinRM::WinRMAuthorizationError
$log.info "Error Logging In to #{hostname} using user \"#{username}\"" unless $log.nil?
raise
rescue WinRM::WinRMWMIError => error
$log.debug "Error Running Powershell on #{hostname} using user \"#{username}\": #{error}" unless $log.nil?
if error.to_s.include? "This user is allowed a maximum number of "
$log.debug "Re-opening connection and retrying" unless $log.nil?
@executor.close
@executor = nil
@connection = raw_connect(@username, @password, @uri) if @elevated_runner
@elevated_runner = nil
retry
else
raise
end
end
def run_elevated_powershell_script(script)
elevate if @elevated_runner.nil?
$log.debug "Running powershell script elevated on #{hostname} as #{username}:\n#{script}" unless $log.nil?
@elevated_runner.powershell_elevated(script, @username, @password)
rescue WinRM::WinRMAuthorizationError
$log.info "Error Logging In to #{hostname} using user \"#{username}\"" unless $log.nil?
raise
rescue WinRM::WinRMWMIError => error
$log.debug "Error Running Powershell on #{hostname} using user \"#{username}\": #{error}" unless $log.nil?
if error.to_s.include? "This user is allowed a maximum number of "
$log.debug "Re-opening connection and retrying" unless $log.nil?
@connection = raw_connect(@username, @password, @uri)
@elevated_runner = nil
@executor.close if @executor
@executor = nil
retry
else
raise
end
end
private
def validate_options(options)
raise "no Username defined" if options[:user].nil?
raise "no Password defined" if options[:pass].nil?
raise "no Hostname defined" if options[:hostname].nil?
@username = options[:user]
@password = options[:pass]
@hostname = options[:hostname]
@port = options[:port] unless options[:port].nil?
end
def raw_connect(user, pass, uri)
# HACK: WinRM depends on the gssapi gem for encryption purposes.
# The gssapi code outputs the following warning:
# WARNING: Could not load IOV methods. Check your GSSAPI C library for an update
# WARNING: Could not load AEAD methods. Check your GSSAPI C library for an update
# After much googling, this warning is considered benign and can be ignored.
# Please note - the webmock gem depends on gssapi too and prints out the
# above warning when rspec tests are run.
# silence_warnings { require 'winrm' }
WinRM::WinRMWebService.new(uri, :ssl, :user => user, :pass => pass, :disable_sspi => true)
end
end
Reset WinRM connection when WMI fails due to Max Operations Limit
The WinRM gem currently assumes that a Windows server allows 1500
operations per connection, and resets the connection just before hitting
the limit.
Unfortunately, this limit is configurable and a WMI error may be returned
by any operation. Issue /https://github.com/WinRb/WinRM/issues/197
tracks this problem. The gem owner will eventually fix this issue by watching
for the error and simply resetting the connection when it occurs, but he
won't be doing so until version 2 of the gem - which is a while off.
We are propagating the fix up a level to the MiqWinRM wrapper class here.
require 'winrm'
require 'winrm-elevated'
class MiqWinRM
WMI_RETRIES = 2
attr_reader :uri, :username, :password, :hostname, :port, :connection, :executor
def initialize
@port = 5985
@elevated_runner = @executor = nil
require 'uri'
end
def build_uri
URI::HTTP.build(:port => @port, :path => "/wsman", :host => @hostname).to_s
end
def connect(options = {})
validate_options(options)
@uri = build_uri
@connection = raw_connect(@username, @password, @uri)
end
def execute
@executor = @connection.create_executor
end
def elevate
@elevated_runner = WinRM::Elevated::Runner.new(@connection)
end
def run_powershell_script(script)
wmi_error_retries = 0
begin
execute if @executor.nil?
$log.debug "Running powershell script on #{hostname} as #{username}:\n#{script}" if $log
@executor.run_powershell_script(script)
rescue WinRM::WinRMAuthorizationError
$log.info "Error Logging In to #{hostname} using user \"#{username}\"" if $log
raise
rescue WinRM::WinRMWMIError => error
$log.debug "Error Running Powershell on #{hostname} using user \"#{username}\": #{error}" if $log
raise if wmi_error_retries > WMI_RETRIES
wmi_error_retries += 1
if error.to_s.include? "This user is allowed a maximum number of "
$log.debug "Re-opening connection and retrying" if $log
@executor.close
@executor = nil
@connection = raw_connect(@username, @password, @uri) if @elevated_runner
@elevated_runner = nil
retry
else
raise
end
end
end
def run_elevated_powershell_script(script)
wmi_error_retries = 0
begin
elevate if @elevated_runner.nil?
$log.debug "Running powershell script elevated on #{hostname} as #{username}:\n#{script}" if $log
@elevated_runner.powershell_elevated(script, @username, @password)
rescue WinRM::WinRMAuthorizationError
$log.info "Error Logging In to #{hostname} using user \"#{username}\"" if $log
raise
rescue WinRM::WinRMWMIError => error
$log.debug "Error Running Powershell on #{hostname} using user \"#{username}\": #{error}" if $log
raise if wmi_error_retries > WMI_RETRIES
wmi_error_retries += 1
if error.to_s.include? "This user is allowed a maximum number of "
$log.debug "Re-opening connection and retrying" if $log
@connection = raw_connect(@username, @password, @uri)
@elevated_runner = nil
@executor.close if @executor
@executor = nil
retry
else
raise
end
end
end
private
def validate_options(options)
raise "no Username defined" if options[:user].nil?
raise "no Password defined" if options[:pass].nil?
raise "no Hostname defined" if options[:hostname].nil?
@username = options[:user]
@password = options[:pass]
@hostname = options[:hostname]
@port = options[:port] unless options[:port].nil?
end
def raw_connect(user, pass, uri)
# HACK: WinRM depends on the gssapi gem for encryption purposes.
# The gssapi code outputs the following warning:
# WARNING: Could not load IOV methods. Check your GSSAPI C library for an update
# WARNING: Could not load AEAD methods. Check your GSSAPI C library for an update
# After much googling, this warning is considered benign and can be ignored.
# Please note - the webmock gem depends on gssapi too and prints out the
# above warning when rspec tests are run.
# silence_warnings { require 'winrm' }
WinRM::WinRMWebService.new(uri, :ssl, :user => user, :pass => pass, :disable_sspi => true)
end
end
|
class Govc < Formula
desc "Command-line tool for VMware vSphere"
homepage "https://github.com/vmware/govmomi/tree/master/govc"
url "https://github.com/vmware/govmomi/archive/v0.22.2.tar.gz"
sha256 "738c294440cb7124035c3eab2f0934671dfbafc75c85f0ebfd48fcc584e7d8ac"
bottle do
cellar :any_skip_relocation
sha256 "f4ad630f17943663e243439aacbcc22f7d4f69a1a7e13216fd5aa83fae84b154" => :catalina
sha256 "97bea8d63ad8b1c0bc90004275fa503d03e7cb6cbb619bbeba7aba6a98b3cf26" => :mojave
sha256 "397463704f1aa119b226ab83e877c23b66b9c88e60d126ddc3a8834770d58e9c" => :high_sierra
end
depends_on "go" => :build
def install
system "go", "build", "-o", "#{bin}/#{name}", "./#{name}"
end
test do
assert_match "GOVC_URL=foo", shell_output("#{bin}/#{name} env -u=foo")
end
end
govc: update 0.22.2 bottle.
class Govc < Formula
desc "Command-line tool for VMware vSphere"
homepage "https://github.com/vmware/govmomi/tree/master/govc"
url "https://github.com/vmware/govmomi/archive/v0.22.2.tar.gz"
sha256 "738c294440cb7124035c3eab2f0934671dfbafc75c85f0ebfd48fcc584e7d8ac"
bottle do
cellar :any_skip_relocation
sha256 "f4ad630f17943663e243439aacbcc22f7d4f69a1a7e13216fd5aa83fae84b154" => :catalina
sha256 "97bea8d63ad8b1c0bc90004275fa503d03e7cb6cbb619bbeba7aba6a98b3cf26" => :mojave
sha256 "397463704f1aa119b226ab83e877c23b66b9c88e60d126ddc3a8834770d58e9c" => :high_sierra
sha256 "4e7ee59352e5fdf648ae6346a37f6b2accf26a303a0abdd86fa0df0d9f274db0" => :x86_64_linux
end
depends_on "go" => :build
def install
system "go", "build", "-o", "#{bin}/#{name}", "./#{name}"
end
test do
assert_match "GOVC_URL=foo", shell_output("#{bin}/#{name} env -u=foo")
end
end
|
Gtk.load_class :ListStore
module Gtk
# Overrides for GtkListStore
class ListStore
setup_method :newv
setup_method :set_valuesv
alias_method :initialize, :initializev
alias_method :set, :set_valuesv
end
end
Correctly set up instance method
(This is probably not really needed.)
Gtk.load_class :ListStore
module Gtk
# Overrides for GtkListStore
class ListStore
setup_method :newv
setup_instance_method :set_valuesv
alias_method :initialize, :initializev
alias_method :set, :set_valuesv
end
end
|
class Grip < Formula
desc "GitHub Markdown previewer"
homepage "https://github.com/joeyespo/grip"
url "https://github.com/joeyespo/grip/archive/v4.3.2.tar.gz"
sha256 "c39c91764e1674718e185f7dec12b70347ca5ba845eeb67e996a9e9fd8710bbe"
bottle do
cellar :any_skip_relocation
sha256 "9afb02161a9c4b7762c5ec0af1a569b0510a84bdca3be8815818a65491d59b3c" => :high_sierra
sha256 "963f45c38b82491a65a25701d9a6fd5ac455dc79e26a2096391d87c4d9906d17" => :sierra
sha256 "4efc8cb0c63c7e7a6f0d5a48a918085c9500aea5fa722846bfdd5d4ddd25b31b" => :el_capitan
sha256 "c88f5f49a2d9abf4ec1ce943dd100b74f18652314c2a02f47ab2668b6b2c1c78" => :yosemite
sha256 "02829b92c3bfc2c5569c703452912f272d17e949a7e1f93978b891613f1f645a" => :mavericks
end
depends_on :python if MacOS.version <= :snow_leopard
resource "click" do
url "https://files.pythonhosted.org/packages/7a/00/c14926d8232b36b08218067bcd5853caefb4737cda3f0a47437151344792/click-6.6.tar.gz"
sha256 "cc6a19da8ebff6e7074f731447ef7e112bd23adf3de5c597cf9989f2fd8defe9"
end
resource "docopt" do
url "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz"
sha256 "49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
end
resource "Flask" do
url "https://files.pythonhosted.org/packages/55/8a/78e165d30f0c8bb5d57c429a30ee5749825ed461ad6c959688872643ffb3/Flask-0.11.1.tar.gz"
sha256 "b4713f2bfb9ebc2966b8a49903ae0d3984781d5c878591cf2f7b484d28756b0e"
end
resource "itsdangerous" do
url "https://files.pythonhosted.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz"
sha256 "cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz"
sha256 "bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4"
end
resource "Markdown" do
url "https://files.pythonhosted.org/packages/9b/53/4492f2888408a2462fd7f364028b6c708f3ecaa52a028587d7dd729f40b4/Markdown-2.6.6.tar.gz"
sha256 "9a292bb40d6d29abac8024887bcfc1159d7a32dc1d6f1f6e8d6d8e293666c504"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz"
sha256 "a4ec1aff59b95a14b45eb2e23761a0179e98319da5a7eb76b56ea8cdc7b871c3"
end
resource "path-and-address" do
url "https://files.pythonhosted.org/packages/2b/b5/749fab14d9e84257f3b0583eedb54e013422b6c240491a4ae48d9ea5e44f/path-and-address-2.0.1.zip"
sha256 "e96363d982b3a2de8531f4cd5f086b51d0248b58527227d43cf5014d045371b7"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz"
sha256 "88e4c8a91b2af5962bfa5ea2447ec6dd357018e86e94c7d14bd8cacbc5b55d81"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/49/6f/183063f01aae1e025cf0130772b55848750a2f3a89bfa11b385b35d7329d/requests-2.10.0.tar.gz"
sha256 "63f1815788157130cee16a933b2ee184038e975f0017306d723ac326b5525b54"
end
resource "Werkzeug" do
url "https://files.pythonhosted.org/packages/b7/7f/44d3cfe5a12ba002b253f6985a4477edfa66da53787a2a838a40f6415263/Werkzeug-0.11.10.tar.gz"
sha256 "cc64dafbacc716cdd42503cf6c44cb5a35576443d82f29f6829e5c49264aeeee"
end
def install
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
resources.each do |r|
r.stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
end
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python2.7/site-packages"
system "python", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
assert_match "<strong>Homebrew</strong> is awesome",
pipe_output("#{bin}/grip - --export -", "**Homebrew** is awesome.")
end
end
grip 4.4.0
Closes #18497.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Grip < Formula
desc "GitHub Markdown previewer"
homepage "https://github.com/joeyespo/grip"
url "https://files.pythonhosted.org/packages/f2/5e/439b9af338a4cd4c8e7853c4635f92b7407e723b35a5bcb63f851b2997fb/grip-4.4.0.tar.gz"
sha256 "c1de49a8257ad3acb5363cf82220c25856d038257d30a94ed362ed6fa6f00e90"
bottle do
cellar :any_skip_relocation
sha256 "9afb02161a9c4b7762c5ec0af1a569b0510a84bdca3be8815818a65491d59b3c" => :high_sierra
sha256 "963f45c38b82491a65a25701d9a6fd5ac455dc79e26a2096391d87c4d9906d17" => :sierra
sha256 "4efc8cb0c63c7e7a6f0d5a48a918085c9500aea5fa722846bfdd5d4ddd25b31b" => :el_capitan
sha256 "c88f5f49a2d9abf4ec1ce943dd100b74f18652314c2a02f47ab2668b6b2c1c78" => :yosemite
sha256 "02829b92c3bfc2c5569c703452912f272d17e949a7e1f93978b891613f1f645a" => :mavericks
end
depends_on :python if MacOS.version <= :snow_leopard
resource "certifi" do
url "https://files.pythonhosted.org/packages/20/d0/3f7a84b0c5b89e94abbd073a5f00c7176089f526edb056686751d5064cbd/certifi-2017.7.27.1.tar.gz"
sha256 "40523d2efb60523e113b44602298f0960e900388cf3bb6043f645cf57ea9e3f5"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "click" do
url "https://files.pythonhosted.org/packages/95/d9/c3336b6b5711c3ab9d1d3a80f1a3e2afeb9d8c02a7166462f6cc96570897/click-6.7.tar.gz"
sha256 "f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
end
resource "docopt" do
url "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz"
sha256 "49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
end
resource "Flask" do
url "https://files.pythonhosted.org/packages/eb/12/1c7bd06fcbd08ba544f25bf2c6612e305a70ea51ca0eda8007344ec3f123/Flask-0.12.2.tar.gz"
sha256 "49f44461237b69ecd901cc7ce66feea0319b9158743dd27a2899962ab214dac1"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/f4/bd/0467d62790828c23c47fc1dfa1b1f052b24efdf5290f071c7a91d0d82fd3/idna-2.6.tar.gz"
sha256 "2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f"
end
resource "itsdangerous" do
url "https://files.pythonhosted.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz"
sha256 "cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz"
sha256 "ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff"
end
resource "Markdown" do
url "https://files.pythonhosted.org/packages/29/82/dfe242bcfd9eec0e7bf93a80a8f8d8515a95b980c44f5c0b45606397a423/Markdown-2.6.9.tar.gz"
sha256 "73af797238b95768b3a9b6fe6270e250e5c09d988b8e5b223fd5efa4e06faf81"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz"
sha256 "a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665"
end
resource "path-and-address" do
url "https://files.pythonhosted.org/packages/2b/b5/749fab14d9e84257f3b0583eedb54e013422b6c240491a4ae48d9ea5e44f/path-and-address-2.0.1.zip"
sha256 "e96363d982b3a2de8531f4cd5f086b51d0248b58527227d43cf5014d045371b7"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz"
sha256 "dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/b0/e1/eab4fc3752e3d240468a8c0b284607899d2fbfb236a56b7377a329aa8d09/requests-2.18.4.tar.gz"
sha256 "9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/ee/11/7c59620aceedcc1ef65e156cc5ce5a24ef87be4107c2b74458464e437a5d/urllib3-1.22.tar.gz"
sha256 "cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
end
resource "Werkzeug" do
url "https://files.pythonhosted.org/packages/56/41/c095a77eb2dd69bf278dd664a97d3416af04e9ba1a00b8c138f772741d31/Werkzeug-0.12.2.tar.gz"
sha256 "903a7b87b74635244548b30d30db4c8947fe64c5198f58899ddcd3a13c23bb26"
end
def install
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
resources.each do |r|
r.stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
end
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python2.7/site-packages"
system "python", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
assert_match "<strong>Homebrew</strong> is awesome",
pipe_output("#{bin}/grip - --export -", "**Homebrew** is awesome.")
end
end
|
require 'rinku'
module GitHub::HTML
# HTML Filter for auto_linking urls in HTML.
class AutolinkFilter < Filter
def call
return html if context[:autolink] == false
Rinku.auto_link(html, :urls, nil, %w[a script kbd pre code])
end
end
end
Allow short-URL (intranet) autolinks in Enterprise
Fixes #4045
require 'rinku'
module GitHub::HTML
# HTML Filter for auto_linking urls in HTML.
class AutolinkFilter < Filter
def call
return html if context[:autolink] == false
flags = 0
if GitHub.enterprise?
flags |= Rinku::AUTOLINK_SHORT_DOMAINS
end
Rinku.auto_link(html, :urls, nil, %w[a script kbd pre code], flags)
end
end
end
|
class Grpc < Formula
desc "Next generation open source RPC library and framework"
homepage "https://grpc.io/"
url "https://github.com/grpc/grpc.git",
tag: "v1.33.2",
revision: "ee5b762f33a42170144834f5ab7efda9d76c480b",
shallow: false
license "Apache-2.0"
head "https://github.com/grpc/grpc.git"
livecheck do
url "https://github.com/grpc/grpc/releases/latest"
regex(%r{href=.*?/tag/v?(\d+(?:\.\d+)+)["' >]}i)
end
bottle do
cellar :any
sha256 "42e892a55e3fbbd128f65f48cfe8ae59414c15508c49128525841105fbe2ca5a" => :big_sur
sha256 "b4336b217349d80961690d4ffeaf498be65855eba290847ae31f5c02f8e8ac4c" => :catalina
sha256 "ac0adba235aabbea547163407944d441bd8ca30589f1d0778f6f508b67db6de9" => :mojave
sha256 "762bcf5de8619962cad10d9692d3a6f9950af9c1db61f04c4d402449d0dda818" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cmake" => :build
depends_on "libtool" => :build
depends_on "abseil"
depends_on "c-ares"
depends_on "gflags"
depends_on "openssl@1.1"
depends_on "protobuf"
depends_on "re2"
def install
mkdir "cmake/build" do
args = %w[
../..
-DCMAKE_CXX_STANDARD=17
-DCMAKE_CXX_STANDARD_REQUIRED=TRUE
-DBUILD_SHARED_LIBS=ON
-DgRPC_BUILD_TESTS=OFF
-DgRPC_INSTALL=ON
-DgRPC_ABSL_PROVIDER=package
-DgRPC_CARES_PROVIDER=package
-DgRPC_PROTOBUF_PROVIDER=package
-DgRPC_SSL_PROVIDER=package
-DgRPC_ZLIB_PROVIDER=package
-DgRPC_RE2_PROVIDER=package
] + std_cmake_args
system "cmake", *args
system "make", "install"
args = %w[
../..
-DCMAKE_EXE_LINKER_FLAGS=-lgflags
-DCMAKE_SHARED_LINKER_FLAGS=-lgflags
-DBUILD_SHARED_LIBS=ON
-DgRPC_BUILD_TESTS=ON
-DgRPC_GFLAGS_PROVIDER=package
] + std_cmake_args
system "cmake", *args
system "make", "grpc_cli"
bin.install "grpc_cli"
lib.install Dir["libgrpc++_test_config*.dylib"]
end
end
test do
(testpath/"test.cpp").write <<~EOS
#include <grpc/grpc.h>
int main() {
grpc_init();
grpc_shutdown();
return GRPC_STATUS_OK;
}
EOS
system ENV.cc, "test.cpp", "-I#{include}", "-L#{lib}", "-lgrpc", "-o", "test"
system "./test"
end
end
grpc: revision for protobuf
class Grpc < Formula
desc "Next generation open source RPC library and framework"
homepage "https://grpc.io/"
url "https://github.com/grpc/grpc.git",
tag: "v1.33.2",
revision: "ee5b762f33a42170144834f5ab7efda9d76c480b",
shallow: false
license "Apache-2.0"
revision 1
head "https://github.com/grpc/grpc.git"
livecheck do
url "https://github.com/grpc/grpc/releases/latest"
regex(%r{href=.*?/tag/v?(\d+(?:\.\d+)+)["' >]}i)
end
bottle do
cellar :any
sha256 "42e892a55e3fbbd128f65f48cfe8ae59414c15508c49128525841105fbe2ca5a" => :big_sur
sha256 "b4336b217349d80961690d4ffeaf498be65855eba290847ae31f5c02f8e8ac4c" => :catalina
sha256 "ac0adba235aabbea547163407944d441bd8ca30589f1d0778f6f508b67db6de9" => :mojave
sha256 "762bcf5de8619962cad10d9692d3a6f9950af9c1db61f04c4d402449d0dda818" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cmake" => :build
depends_on "libtool" => :build
depends_on "abseil"
depends_on "c-ares"
depends_on "gflags"
depends_on "openssl@1.1"
depends_on "protobuf"
depends_on "re2"
def install
mkdir "cmake/build" do
args = %w[
../..
-DCMAKE_CXX_STANDARD=17
-DCMAKE_CXX_STANDARD_REQUIRED=TRUE
-DBUILD_SHARED_LIBS=ON
-DgRPC_BUILD_TESTS=OFF
-DgRPC_INSTALL=ON
-DgRPC_ABSL_PROVIDER=package
-DgRPC_CARES_PROVIDER=package
-DgRPC_PROTOBUF_PROVIDER=package
-DgRPC_SSL_PROVIDER=package
-DgRPC_ZLIB_PROVIDER=package
-DgRPC_RE2_PROVIDER=package
] + std_cmake_args
system "cmake", *args
system "make", "install"
args = %w[
../..
-DCMAKE_EXE_LINKER_FLAGS=-lgflags
-DCMAKE_SHARED_LINKER_FLAGS=-lgflags
-DBUILD_SHARED_LIBS=ON
-DgRPC_BUILD_TESTS=ON
-DgRPC_GFLAGS_PROVIDER=package
] + std_cmake_args
system "cmake", *args
system "make", "grpc_cli"
bin.install "grpc_cli"
lib.install Dir["libgrpc++_test_config*.dylib"]
end
end
test do
(testpath/"test.cpp").write <<~EOS
#include <grpc/grpc.h>
int main() {
grpc_init();
grpc_shutdown();
return GRPC_STATUS_OK;
}
EOS
system ENV.cc, "test.cpp", "-I#{include}", "-L#{lib}", "-lgrpc", "-o", "test"
system "./test"
end
end
|
module GoogleCalendarApiV2
module Base
attr_reader :connection
private
def success?(response)
case response.code.to_i
when 200, 201
true
when 401
raise GoogleCalendarApiV2::AuthenticationError.new response
else
false
end
end
def redirect?(response)
response.is_a? Net::HTTPRedirection
end
end
end
Getting rid of base class
|
class Grpc < Formula
desc "Next generation open source RPC library and framework"
homepage "https://www.grpc.io/"
url "https://github.com/grpc/grpc/archive/v1.14.1.tar.gz"
sha256 "16f22430210abf92e06626a5a116e114591075e5854ac78f1be8564171658b70"
head "https://github.com/grpc/grpc.git"
bottle do
sha256 "0afc6fa16339917ed4458e0b5c10f943bd4267722793a69286d35b7a237dbf44" => :high_sierra
sha256 "df1214fdcd443fc05749c1528d3985d4c9fadfc2d55277181fd90ea604bb82fc" => :sierra
sha256 "21f88c0cbeafd4066acbec151e820a7a7de2662f1fce238ed1d68b26d07d32b3" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "c-ares"
depends_on "openssl"
depends_on "protobuf"
depends_on "gflags"
resource "gtest" do
url "https://github.com/google/googletest/archive/release-1.8.0.tar.gz"
sha256 "58a6f4277ca2bc8565222b3bbd58a177609e9c488e8a72649359ba51450db7d8"
end
def install
system "make", "install", "prefix=#{prefix}"
system "make", "install-plugins", "prefix=#{prefix}"
(buildpath/"third_party/googletest").install resource("gtest")
system "make", "grpc_cli", "prefix=#{prefix}"
bin.install "bins/opt/grpc_cli"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <grpc/grpc.h>
int main() {
grpc_init();
grpc_shutdown();
return GRPC_STATUS_OK;
}
EOS
system ENV.cc, "test.cpp", "-I#{include}", "-L#{lib}", "-lgrpc", "-o", "test"
system "./test"
end
end
grpc: update 1.14.1 bottle.
class Grpc < Formula
desc "Next generation open source RPC library and framework"
homepage "https://www.grpc.io/"
url "https://github.com/grpc/grpc/archive/v1.14.1.tar.gz"
sha256 "16f22430210abf92e06626a5a116e114591075e5854ac78f1be8564171658b70"
head "https://github.com/grpc/grpc.git"
bottle do
sha256 "e1466a0ffb702f2ba76e03875701ed7ec88624f55497b5de47f5727cfc2dfa1e" => :high_sierra
sha256 "ca43a0fdc5e5f4265300cef6f9b1e70449a529a2d050d5e49f53e1852af2d311" => :sierra
sha256 "2520997527906e3e7f74463d6c6a5cd80db5cad9e4dc840b53ae73cad158b629" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "c-ares"
depends_on "openssl"
depends_on "protobuf"
depends_on "gflags"
resource "gtest" do
url "https://github.com/google/googletest/archive/release-1.8.0.tar.gz"
sha256 "58a6f4277ca2bc8565222b3bbd58a177609e9c488e8a72649359ba51450db7d8"
end
def install
system "make", "install", "prefix=#{prefix}"
system "make", "install-plugins", "prefix=#{prefix}"
(buildpath/"third_party/googletest").install resource("gtest")
system "make", "grpc_cli", "prefix=#{prefix}"
bin.install "bins/opt/grpc_cli"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <grpc/grpc.h>
int main() {
grpc_init();
grpc_shutdown();
return GRPC_STATUS_OK;
}
EOS
system ENV.cc, "test.cpp", "-I#{include}", "-L#{lib}", "-lgrpc", "-o", "test"
system "./test"
end
end
|
# Author: Hiroshi Ichikawa <http://gimite.net/>
# The license of this source is "New BSD Licence"
require "rubygems"
require "oauth2"
module GoogleDrive
class OAuth2Fetcher #:nodoc:
class Response
def initialize(raw_res)
@raw_res = raw_res
end
def code
return @raw_res.status.to_s()
end
def body
return @raw_res.body
end
def [](name)
return @raw_res.headers[name]
end
end
def initialize(oauth2_token)
@oauth2_token = oauth2_token
end
def request_raw(method, url, data, extra_header, auth)
if method == :delete || method == :get
raw_res = @oauth2_token.request(method, url, {:header => extra_header})
else
raw_res = @oauth2_token.request(method, url, {:header => extra_header, :body => data})
end
return Response.new(raw_res)
end
end
end
Correct the headers hash key for OAuth2
The OAuth2::Client#request method in the OAuth2 library looks for header
values using the hash key :headers, not :header.
# Author: Hiroshi Ichikawa <http://gimite.net/>
# The license of this source is "New BSD Licence"
require "rubygems"
require "oauth2"
module GoogleDrive
class OAuth2Fetcher #:nodoc:
class Response
def initialize(raw_res)
@raw_res = raw_res
end
def code
return @raw_res.status.to_s()
end
def body
return @raw_res.body
end
def [](name)
return @raw_res.headers[name]
end
end
def initialize(oauth2_token)
@oauth2_token = oauth2_token
end
def request_raw(method, url, data, extra_header, auth)
if method == :delete || method == :get
raw_res = @oauth2_token.request(method, url, {:headers => extra_header})
else
raw_res = @oauth2_token.request(method, url, {:headers => extra_header, :body => data})
end
return Response.new(raw_res)
end
end
end
|
class Gtkx < Formula
desc "GUI toolkit"
homepage "http://gtk.org/"
url "https://download.gnome.org/sources/gtk+/2.24/gtk+-2.24.28.tar.xz"
sha256 "b2c6441e98bc5232e5f9bba6965075dcf580a8726398f7374d39f90b88ed4656"
revision 3
bottle do
sha256 "afac113e6701cf013e0235e0fd91fcfc6659bc75220ca03e408a7a0f38671bb9" => :yosemite
sha256 "e7daa89de1184b1edc71242fd65b9b608885ebe6c92f5f793af8a46ef5912b28" => :mavericks
sha256 "17dcc4df1082000447b968c831f657b5f1aa863f32b8397900a38feaa7147db0" => :mountain_lion
end
option "with-quartz-relocation", "Build with quartz relocation support"
depends_on "pkg-config" => :build
depends_on "gdk-pixbuf"
depends_on "jasper" => :optional
depends_on "atk"
depends_on "pango"
depends_on "gobject-introspection"
depends_on "hicolor-icon-theme"
fails_with :llvm do
build 2326
cause "Undefined symbols when linking"
end
# Patch to allow Freeciv's gtk2 client to run.
# See:
# - https://bugzilla.gnome.org/show_bug.cgi?id=557780
# - Homebrew/homebrew-games#278
patch do
url "https://bug557780.bugzilla-attachments.gnome.org/attachment.cgi?id=306776"
sha256 "4d7a1fe8d55174dc7f0be0016814668098d38bbec233b05a6c46180e96a159fc"
end
# Fix crash on OS X 10.11
# See: https://bugzilla.gnome.org/show_bug.cgi?id=753992
patch do
url "https://bug753992.bugzilla-attachments.gnome.org/attachment.cgi?id=312565"
sha256 "e2e8d5c236d4de7d5b5fd79a2e90861b281746132a3f96aca6ab0cb780926876"
end
def install
args = ["--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--disable-glibtest",
"--enable-introspection=yes",
"--with-gdktarget=quartz",
"--disable-visibility"]
args << "--enable-quartz-relocation" if build.with?("quartz-relocation")
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gtk/gtk.h>
int main(int argc, char *argv[]) {
GtkWidget *label = gtk_label_new("Hello World!");
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
libpng = Formula["libpng"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/gtk-2.0
-I#{libpng.opt_include}/libpng16
-I#{lib}/gtk-2.0/include
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lgdk-quartz-2.0
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lgtk-quartz-2.0
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
gtk+: update 2.24.28_3 bottle.
class Gtkx < Formula
desc "GUI toolkit"
homepage "http://gtk.org/"
url "https://download.gnome.org/sources/gtk+/2.24/gtk+-2.24.28.tar.xz"
sha256 "b2c6441e98bc5232e5f9bba6965075dcf580a8726398f7374d39f90b88ed4656"
revision 3
bottle do
sha256 "c50b23ea76ad0379e43a44ac2520e2243c5b2f2aded21c7e82c36c20e6a90e1a" => :el_capitan
sha256 "72a95671b8b9ba6aaf3e8900f4af6bd9b0b0fcdd6621a200838d3e2622bc7a26" => :yosemite
sha256 "0754d744caed63c14ce80b5c3895679d1b93dad9832ca6105488eefa809bb7c1" => :mavericks
end
option "with-quartz-relocation", "Build with quartz relocation support"
depends_on "pkg-config" => :build
depends_on "gdk-pixbuf"
depends_on "jasper" => :optional
depends_on "atk"
depends_on "pango"
depends_on "gobject-introspection"
depends_on "hicolor-icon-theme"
fails_with :llvm do
build 2326
cause "Undefined symbols when linking"
end
# Patch to allow Freeciv's gtk2 client to run.
# See:
# - https://bugzilla.gnome.org/show_bug.cgi?id=557780
# - Homebrew/homebrew-games#278
patch do
url "https://bug557780.bugzilla-attachments.gnome.org/attachment.cgi?id=306776"
sha256 "4d7a1fe8d55174dc7f0be0016814668098d38bbec233b05a6c46180e96a159fc"
end
# Fix crash on OS X 10.11
# See: https://bugzilla.gnome.org/show_bug.cgi?id=753992
patch do
url "https://bug753992.bugzilla-attachments.gnome.org/attachment.cgi?id=312565"
sha256 "e2e8d5c236d4de7d5b5fd79a2e90861b281746132a3f96aca6ab0cb780926876"
end
def install
args = ["--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--disable-glibtest",
"--enable-introspection=yes",
"--with-gdktarget=quartz",
"--disable-visibility"]
args << "--enable-quartz-relocation" if build.with?("quartz-relocation")
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gtk/gtk.h>
int main(int argc, char *argv[]) {
GtkWidget *label = gtk_label_new("Hello World!");
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
libpng = Formula["libpng"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/gtk-2.0
-I#{libpng.opt_include}/libpng16
-I#{lib}/gtk-2.0/include
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lgdk-quartz-2.0
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lgtk-quartz-2.0
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
|
module Gosu
module TexturePacker
VERSION = '0.1.7'
end
end
Bump version to 0.1.8
module Gosu
module TexturePacker
VERSION = '0.1.8'
end
end
|
class Gtkx < Formula
desc "GUI toolkit"
homepage "http://gtk.org/"
url "https://download.gnome.org/sources/gtk+/2.24/gtk+-2.24.28.tar.xz"
sha256 "b2c6441e98bc5232e5f9bba6965075dcf580a8726398f7374d39f90b88ed4656"
revision 1
bottle do
sha256 "bbc0dc6e82ebed36acfecbd5216a7e05709ce54353fae2e88ae6dc89d02b4c49" => :yosemite
sha256 "e3ac7c303dcf388bf87835a85a1e143c2957eaada72de82527bb0364196a9d35" => :mavericks
sha256 "67af28491ac9622e5c19b0d37802c9569e95ab21342b83672de0b6aac98c5c72" => :mountain_lion
end
depends_on "pkg-config" => :build
depends_on "gdk-pixbuf"
depends_on "jasper" => :optional
depends_on "atk"
depends_on "pango"
depends_on "gobject-introspection"
option "with-quartz-relocation"
fails_with :llvm do
build 2326
cause "Undefined symbols when linking"
end
def install
args = ["--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--disable-glibtest",
"--enable-introspection=yes",
"--with-gdktarget=quartz",
"--disable-visibility"]
args << "--enable-quartz-relocation" if build.with?("quartz-relocation")
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gtk/gtk.h>
int main(int argc, char *argv[]) {
GtkWidget *label = gtk_label_new("Hello World!");
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
libpng = Formula["libpng"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/gtk-2.0
-I#{libpng.opt_include}/libpng16
-I#{lib}/gtk-2.0/include
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lgdk-quartz-2.0
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lgtk-quartz-2.0
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
gtk+ formula updated
class Gtkx < Formula
desc "GUI toolkit"
homepage "http://gtk.org/"
url "https://download.gnome.org/sources/gtk+/2.24/gtk+-2.24.28.tar.xz"
sha256 "b2c6441e98bc5232e5f9bba6965075dcf580a8726398f7374d39f90b88ed4656"
revision 1
bottle do
sha256 "bbc0dc6e82ebed36acfecbd5216a7e05709ce54353fae2e88ae6dc89d02b4c49" => :yosemite
sha256 "e3ac7c303dcf388bf87835a85a1e143c2957eaada72de82527bb0364196a9d35" => :mavericks
sha256 "67af28491ac9622e5c19b0d37802c9569e95ab21342b83672de0b6aac98c5c72" => :mountain_lion
end
option "with-quartz-relocation", "Build with quartz relocation support"
depends_on "pkg-config" => :build
depends_on "gdk-pixbuf"
depends_on "jasper" => :optional
depends_on "atk"
depends_on "pango"
depends_on "gobject-introspection"
fails_with :llvm do
build 2326
cause "Undefined symbols when linking"
end
def install
args = ["--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--disable-glibtest",
"--enable-introspection=yes",
"--with-gdktarget=quartz",
"--disable-visibility"]
args << "--enable-quartz-relocation" if build.with?("quartz-relocation")
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gtk/gtk.h>
int main(int argc, char *argv[]) {
GtkWidget *label = gtk_label_new("Hello World!");
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
libpng = Formula["libpng"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/gtk-2.0
-I#{libpng.opt_include}/libpng16
-I#{lib}/gtk-2.0/include
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lgdk-quartz-2.0
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lgtk-quartz-2.0
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
|
require 'govuk_mirrorer/indexer'
module GovukSeedCrawler
class GetUrls
attr_reader :urls
def initialize(site_root)
raise "No :site_root defined" unless site_root
indexer = GovukMirrorer::Indexer.new(site_root)
@urls = indexer.all_start_urls
end
end
end
Fix typo in error message
require 'govuk_mirrorer/indexer'
module GovukSeedCrawler
class GetUrls
attr_reader :urls
def initialize(site_root)
raise "No site_root defined" unless site_root
indexer = GovukMirrorer::Indexer.new(site_root)
@urls = indexer.all_start_urls
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.