repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/storage/scp.rb | lib/backup/storage/scp.rb | require "net/scp"
module Backup
module Storage
class SCP < Base
include Storage::Cycler
class Error < Backup::Error; end
##
# Server credentials
attr_accessor :username, :password, :ssh_options
##
# Server IP Address and SCP port
attr_accessor :ip, :port
def initialize(model, storage_id = nil)
super
@port ||= 22
@path ||= "backups"
@ssh_options ||= {}
path.sub!(/^~\//, "")
end
private
def connection
Net::SSH.start(
ip, username, { password: password, port: port }.merge(ssh_options)
) { |ssh| yield ssh }
end
def transfer!
connection do |ssh|
ssh.exec!("mkdir -p '#{remote_path}'")
package.filenames.each do |filename|
src = File.join(Config.tmp_path, filename)
dest = File.join(remote_path, filename)
Logger.info "Storing '#{ip}:#{dest}'..."
ssh.scp.upload!(src, dest)
end
end
end
# Called by the Cycler.
# Any error raised will be logged as a warning.
def remove!(package)
Logger.info "Removing backup package dated #{package.time}..."
errors = []
connection do |ssh|
ssh.exec!("rm -r '#{remote_path_for(package)}'") do |_, stream, data|
errors << data if stream == :stderr
end
end
unless errors.empty?
raise Error, "Net::SSH reported the following errors:\n" +
errors.join("\n")
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/storage/qiniu.rb | lib/backup/storage/qiniu.rb | require "qiniu"
module Backup
module Storage
class Qiniu < Base
include Storage::Cycler
class Error < Backup::Error; end
##
# Qiniu API credentials
attr_accessor :access_key, :secret_key
##
# Qiniu bucket name
attr_accessor :bucket
def initialize(model, storage_id = nil)
super
@path ||= "backups"
check_configuration
config_credentials
end
private
def transfer!
package.filenames.each do |filename|
src = File.join(Config.tmp_path, filename)
dest = File.join(remote_path, filename)
Logger.info "Storing '#{dest}'..."
::Qiniu.upload_file(uptoken: ::Qiniu.generate_upload_token,
bucket: bucket,
file: src,
key: dest)
end
end
# Called by the Cycler.
# Any error raised will be logged as a warning.
def remove!(package)
Logger.info "Removing backup package dated #{package.time}..."
remote_path = remote_path_for(package)
package.filenames.each do |filename|
::Qiniu.delete(bucket, File.join(remote_path, filename))
end
end
def check_configuration
required = %w[access_key secret_key bucket]
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
Configuration Error
#{required.map { |name| "##{name}" }.join(", ")} are all required
EOS
end
def config_credentials
::Qiniu.establish_connection!(access_key: access_key, secret_key: secret_key)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/storage/cycler.rb | lib/backup/storage/cycler.rb | module Backup
module Storage
module Cycler
class Error < Backup::Error; end
private
# Adds the current package being stored to the YAML cycle data file
# and will remove any old package file(s) when the storage limit
# set by #keep is exceeded.
def cycle!
Logger.info "Cycling Started..."
packages = yaml_load.unshift(package)
cycled_packages = []
if keep.is_a?(Date) || keep.is_a?(Time)
cycled_packages = packages.select do |p|
p.time_as_object < keep.to_time
end
else
excess = packages.count - keep.to_i
cycled_packages = packages.last(excess) if excess > 0
end
saved_packages = packages - cycled_packages
cycled_packages.each { |package| delete_package package }
yaml_save(saved_packages)
end
def delete_package(package)
remove!(package) unless package.no_cycle
rescue => err
Logger.warn Error.wrap(err, <<-EOS)
There was a problem removing the following package:
Trigger: #{package.trigger} :: Dated: #{package.time}
Package included the following #{package.filenames.count} file(s):
#{package.filenames.join("\n")}
EOS
end
# Returns path to the YAML data file.
def yaml_file
@yaml_file ||= begin
filename = self.class.to_s.split("::").last
filename << "-#{storage_id}" if storage_id
File.join(Config.data_path, package.trigger, "#{filename}.yml")
end
end
# Returns stored Package objects, sorted by #time descending (oldest last).
def yaml_load
loaded =
if File.exist?(yaml_file) && !File.zero?(yaml_file)
if YAML.respond_to?(:safe_load_file)
YAML.safe_load_file(yaml_file, permitted_classes: [Backup::Package])
else
YAML.load_file(yaml_file)
end
else
[]
end
loaded.sort_by!(&:time).reverse!
end
# Stores the given package objects to the YAML data file.
def yaml_save(packages)
FileUtils.mkdir_p(File.dirname(yaml_file))
File.open(yaml_file, "w") do |file|
file.write(packages.to_yaml)
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/storage/cloud_files.rb | lib/backup/storage/cloud_files.rb | require "backup/cloud_io/cloud_files"
module Backup
module Storage
class CloudFiles < Base
include Storage::Cycler
class Error < Backup::Error; end
##
# Rackspace CloudFiles Credentials
attr_accessor :username, :api_key
##
# Rackspace Auth URL (optional)
attr_accessor :auth_url
##
# Rackspace Service Net
# (LAN-based transfers to avoid charges and improve performance)
attr_accessor :servicenet
##
# Rackspace Region (optional)
attr_accessor :region
##
# Rackspace Container Name
attr_accessor :container
##
# Rackspace Container Name for SLO Segments
# Required if #segment_size is set. Must be different from #container.
attr_accessor :segments_container
##
# SLO Segment size, specified in MiB.
#
# Each package file larger than +segment_size+
# will be uploaded as a Static Large Objects (SLO).
#
# Defaults to 0 for backward compatibility (pre v.3.7.0),
# since #segments_container would be required.
#
# Minimum: 1 (0 disables SLO support)
# Maximum: 5120 (5 GiB)
attr_accessor :segment_size
##
# If set, all backup package files (including SLO segments) will be
# scheduled for automatic removal by the server.
#
# The `keep` option should not be used if this is set,
# unless you're transitioning from the `keep` option.
attr_accessor :days_to_keep
##
# Number of times to retry failed operations.
#
# Default: 10
attr_accessor :max_retries
##
# Time in seconds to pause before each retry.
#
# Default: 30
attr_accessor :retry_waitsec
##
# Additional options to pass along to fog.
# e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
attr_accessor :fog_options
def initialize(model, storage_id = nil)
super
@servicenet ||= false
@segment_size ||= 0
@max_retries ||= 10
@retry_waitsec ||= 30
@path ||= "backups"
path.sub!(/^\//, "")
check_configuration
end
private
def cloud_io
@cloud_io ||= CloudIO::CloudFiles.new(
username: username,
api_key: api_key,
auth_url: auth_url,
region: region,
servicenet: servicenet,
container: container,
segments_container: segments_container,
segment_size: segment_size,
days_to_keep: days_to_keep,
max_retries: max_retries,
retry_waitsec: retry_waitsec,
fog_options: fog_options
)
end
def transfer!
package.filenames.each do |filename|
src = File.join(Config.tmp_path, filename)
dest = File.join(remote_path, filename)
Logger.info "Storing '#{container}/#{dest}'..."
cloud_io.upload(src, dest)
end
package.no_cycle = true if days_to_keep
end
# Called by the Cycler.
# Any error raised will be logged as a warning.
def remove!(package)
Logger.info "Removing backup package dated #{package.time}..."
remote_path = remote_path_for(package)
objects = cloud_io.objects(remote_path)
raise Error, "Package at '#{remote_path}' not found" if objects.empty?
slo_objects, objects = objects.partition(&:slo?)
cloud_io.delete_slo(slo_objects)
cloud_io.delete(objects)
end
def check_configuration
required = %w[username api_key container]
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
Configuration Error
#{required.map { |name| "##{name}" }.join(", ")} are all required
EOS
raise Error, <<-EOS if segment_size > 0 && segments_container.to_s.empty?
Configuration Error
#segments_container is required if #segment_size is > 0
EOS
raise Error, <<-EOS if container == segments_container
Configuration Error
#container and #segments_container must not be the same container.
EOS
raise Error, <<-EOS if segment_size > 5120
Configuration Error
#segment_size is too large (max 5120)
EOS
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/hipchat.rb | lib/backup/notifier/hipchat.rb | require "hipchat"
module Backup
module Notifier
class Hipchat < Base
##
# The Hipchat API token
attr_accessor :token
##
# The Hipchat API version
# Either 'v1' or 'v2' (default is 'v1')
attr_accessor :api_version
##
# Who the notification should appear from
attr_accessor :from
##
# Custom server URL
attr_accessor :server_url
##
# The rooms that should be notified
attr_accessor :rooms_notified
##
# Notify users in the room
attr_accessor :notify_users
##
# The background color of a success message.
# One of :yellow, :red, :green, :purple, or :random. (default: yellow)
attr_accessor :success_color
##
# The background color of a warning message.
# One of :yellow, :red, :green, :purple, or :random. (default: yellow)
attr_accessor :warning_color
##
# The background color of an error message.
# One of :yellow, :red, :green, :purple, or :random. (default: yellow)
attr_accessor :failure_color
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@notify_users ||= false
@rooms_notified ||= []
@success_color ||= "yellow"
@warning_color ||= "yellow"
@failure_color ||= "yellow"
@api_version ||= "v1"
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
status_data = status_data_for(status)
msg = message.call(model, status: status_data)
send_message(msg, status_data[:color])
end
def client_options
{ api_version: @api_version }.tap do |h|
h[:server_url] = server_url if server_url
end
end
# Hipchat::Client will raise an error if unsuccessful.
def send_message(msg, color)
client = HipChat::Client.new(token, client_options)
rooms_to_notify.each do |room|
client[room].send(from, msg, color: color, notify: notify_users)
end
end
def rooms_to_notify
Array(rooms_notified).map { |r| r.split(",").map(&:strip) }.flatten
end
def status_data_for(status)
data = super(status)
data[:color] = status_color_for(status)
data
end
def status_color_for(status)
{
success: success_color,
warning: warning_color,
failure: failure_color
}[status]
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/command.rb | lib/backup/notifier/command.rb | module Backup
module Notifier
class Command < Base
##
# Command to execute.
#
# Make sure it is accessible from your $PATH, or provide
# the absolute path to the command.
attr_accessor :command
##
# Arguments to pass to the command.
#
# Must be an array of strings or callable objects.
#
# Callables will be invoked with #call(model, status),
# and the return value used as the argument.
#
# In strings you can use the following placeholders:
#
# %l - Model label
# %t - Model trigger
# %d - Backup duration (HH:MM:SS)
# %s - Status (success/failure/warning)
# %v - Status verb (succeeded/failed/succeeded with warnings)
#
# All placeholders can be used with uppercase letters to capitalize
# the value.
#
# Defaults to ["%L %v"]
attr_accessor :args
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@args ||= ["%L %v"]
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
IO.popen([@command] + args.map { |arg| format_arg(arg, status) })
end
def format_arg(arg, status)
if arg.respond_to?(:call)
arg.call(model, status)
else
arg.gsub(/%(\w)/) do |match|
ph = match[1]
val = case ph.downcase
when "l"
model.label
when "t"
model.trigger.to_s
when "d"
model.duration
when "v"
status_verb(status)
when "s"
status.to_s
end
val.capitalize! if ph == ph.upcase
val
end
end
end
def status_verb(status)
case status
when :success
"succeeded"
when :failure
"failed"
when :warning
"succeeded with warnings"
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/http_post.rb | lib/backup/notifier/http_post.rb | require "uri"
module Backup
module Notifier
class HttpPost < Base
##
# URI to post notification to.
#
# URI scheme may be `http` or `https`.
#
# If Basic Authentication is needed, supply the `user:password` in the URI.
# e.g. 'https://user:pass@www.example.com/path'
#
# Port may also be supplied.
# e.g. 'http://www.example.com:8080/path'
attr_accessor :uri
##
# Hash of additional HTTP headers to send.
#
# This notifier sets the following headers:
# { 'User-Agent' => "Backup/#{ Backup::VERSION }",
# 'Content-Type' => 'x-www-form-urlencoded' }
#
# 'Content-Type' may not be changed.
# 'User-Agent' may be overridden or omitted by setting it to +nil+.
# e.g. { 'Authorization' => 'my_auth_info', 'User-Agent' => nil }
attr_accessor :headers
##
# Hash of additional POST parameters to send.
#
# This notifier will set two parameters:
# { 'status' => 'success|warning|failure',
# 'message' => '[Backup::(Success|Warning|Failure)] label (trigger)' }
#
# 'status' may not be changed.
# 'message' may be overridden or omitted by setting a +nil+ value.
# e.g. { 'auth_token' => 'my_token', 'message' => nil }
attr_accessor :params
##
# Successful HTTP Status Code(s) that should be returned.
#
# This may be a single code or an Array of acceptable codes.
# e.g. [200, 201, 204]
#
# If any other response code is returned, the request will be retried
# using `max_retries` and `retry_waitsec`.
#
# Default: 200
attr_accessor :success_codes
##
# Verify the server's certificate when using SSL.
#
# This will default to +true+ for most systems.
# It may be forced by setting to +true+, or disabled by setting to +false+.
attr_accessor :ssl_verify_peer
##
# Path to a +cacert.pem+ file to use for +ssl_verify_peer+.
#
# This is provided (via Excon), but may be specified if needed.
attr_accessor :ssl_ca_file
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@headers ||= {}
@params ||= {}
@success_codes ||= 200
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
msg = message.call(model, status: status_data_for(status))
opts = {
headers: { "User-Agent" => "Backup/#{VERSION}" }
.merge(headers).reject { |_, value| value.nil? }
.merge("Content-Type" => "application/x-www-form-urlencoded"),
body: URI.encode_www_form({ "message" => msg }
.merge(params).reject { |_, value| value.nil? }
.merge("status" => status.to_s)),
expects: success_codes # raise error if unsuccessful
}
opts[:ssl_verify_peer] = ssl_verify_peer unless ssl_verify_peer.nil?
opts[:ssl_ca_file] = ssl_ca_file if ssl_ca_file
Excon.post(uri, opts)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/ses.rb | lib/backup/notifier/ses.rb | require "aws-sdk"
require "mail"
module Backup
module Notifier
class Ses < Base
##
# Amazon Simple Email Service (SES) Credentials
attr_accessor :access_key_id, :secret_access_key, :use_iam_profile
##
# SES Region
attr_accessor :region
##
# Sender Email Address
attr_accessor :from
##
# Receiver Email Address
attr_accessor :to
##
# CC receiver Email Address
attr_accessor :cc
##
# BCC receiver Email Address
attr_accessor :bcc
##
# Set reply to email address
attr_accessor :reply_to
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@region ||= "eu-west-1"
@send_log_on ||= [:warning, :failure]
end
##
# Array of statuses for which the log file should be attached.
#
# Available statuses are: `:success`, `:warning` and `:failure`.
# Default: [:warning, :failure]
attr_accessor :send_log_on
private
def client
credentials = if use_iam_profile
Aws::InstanceProfileCredentials.new
else
Aws::Credentials.new(access_key_id, secret_access_key)
end
Aws::SES::Client.new(
region: region,
credentials: credentials
)
end
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent, including a copy of the current
# : backup log, if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent, including a copy of the current
# : backup log, if `on_failure` is `true`.
#
def notify!(status)
email = ::Mail.new
email.to = to
email.from = from
email.cc = cc
email.bcc = bcc
email.reply_to = reply_to
email.subject = message.call(model, status: status_data_for(status))
# By default, the `mail` gem doesn't include BCC in raw output, which is
# needed for SES to send to those addresses.
email[:bcc].include_in_headers = true
send_log = send_log_on.include?(status)
template = Backup::Template.new(model: model, send_log: send_log)
email.body = template.result(sprintf("notifier/mail/%s.erb", status.to_s))
if send_log
email.convert_to_multipart
email.attachments["#{model.time}.#{model.trigger}.log"] = {
mime_type: "text/plain;",
content: Logger.messages.map(&:formatted_lines).flatten.join("\n")
}
end
send_opts = {
raw_message: {
data: email.to_s
}
}
if email.respond_to?(:destinations)
send_opts[:destinations] = email.destinations
end
client.send_raw_email(send_opts)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/datadog.rb | lib/backup/notifier/datadog.rb | require "dogapi"
module Backup
module Notifier
class DataDog < Base
##
# The DataDog API key
attr_accessor :api_key
##
# The title of the event
attr_accessor :title
attr_deprecate :text,
version: "4.2",
message: "Please use the `message` attribute. For more information "\
"see https://github.com/backup/backup/pull/698"
##
# The timestamp for the event
attr_accessor :date_happened
##
# The priority of the event (low/normal)
attr_accessor :priority
##
# The host that generated the event
attr_accessor :host
##
# The tags for this host (should be an array)
attr_accessor :tags
##
# The alert_type of the event (error/warning/info/success)
attr_accessor :alert_type
##
# The aggregation_key for the event
attr_accessor :aggregation_key
##
# The source_type for the event (nagios, hudson, jenkins, user, my apps, feed, chef, puppet, git, bitbucket, fabric, capistrano)
attr_accessor :source_type_name
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@title ||= "Backup #{model.label}"
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
msg = message.call(model, status: status_data_for(status))
hash = { alert_type: default_alert_type(status) }
hash.store(:msg_title, @title)
hash.store(:date_happened, @date_happened) if @date_happened
hash.store(:priority, @priority) if @priority
hash.store(:host, @host) if @host
hash.store(:tags, @tags) if @tags
hash.store(:aggregation_key, @aggregation_key) if @aggregation_key
hash.store(:source_type_name, @source_type_name) if @source_type_name
hash.store(:alert_type, @alert_type) if @alert_type
send_event(msg, hash)
end
# Dogapi::Client will raise an error if unsuccessful.
def send_event(msg, hash)
client = Dogapi::Client.new(@api_key)
event = Dogapi::Event.new(msg, hash)
client.emit_event(event)
end
# set alert type
def default_alert_type(status)
case status
when :success then "success"
when :warning then "warning"
when :failure then "error"
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/pagerduty.rb | lib/backup/notifier/pagerduty.rb | require "pagerduty"
module Backup
module Notifier
class PagerDuty < Base
##
# PagerDuty Service API Key. Should be a 32 character hex string.
attr_accessor :service_key
##
# Determines if a backup with a warning should resolve an incident rather
# than trigger one.
#
# Defaults to false.
attr_accessor :resolve_on_warning
def initialize(mode, &block)
super
instance_eval(&block) if block_given?
@resolve_on_warning ||= false
end
private
##
# Trigger or resolve a PagerDuty incident for this model
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : The incident will be resolved if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : An incident will be triggered if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : An incident will be triggered if `on_failure` is `true`.
#
def notify!(status)
incident_description = "Backup - #{model.label}"
incident_key = "backup/#{model.trigger}"
incident_details = {
incident_key: incident_key,
details: {
trigger: model.trigger,
label: model.label,
started_at: model.started_at,
finished_at: model.finished_at,
duration: model.duration,
status: status,
exception: model.exception
}
}
event_type = case status
when :success then :resolve
when :warning then resolve_on_warning ? :resolve : :trigger
when :failure then :trigger
end
case event_type
when :trigger
pagerduty.trigger(incident_description, incident_details)
when :resolve
incident = pagerduty.get_incident(incident_key)
incident.resolve(incident_description, incident_details)
end
end
def pagerduty
@pagerduty ||= Pagerduty.new(service_key)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/pushover.rb | lib/backup/notifier/pushover.rb | require "uri"
module Backup
module Notifier
class Pushover < Base
##
# The API User Token
attr_accessor :user
##
# The API Application Token
attr_accessor :token
##
# The user's device identifier to sent the message directly to,
# rather than all of the user's devices
attr_accessor :device
##
# The message title
attr_accessor :title
##
# The priority of the notification
attr_accessor :priority
def initialize(model, &block)
super
instance_eval(&block) if block_given?
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
send_message(message.call(model, status: status_data_for(status)))
end
def send_message(message)
uri = "https://api.pushover.net/1/messages.json"
data = { user: user, token: token, message: message }
[:device, :title, :priority].each do |param|
val = send(param)
data.merge!(param => val) if val
end
options = {
headers: { "Content-Type" => "application/x-www-form-urlencoded" },
body: URI.encode_www_form(data)
}
options[:expects] = 200 # raise error if unsuccessful
Excon.post(uri, options)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/twitter.rb | lib/backup/notifier/twitter.rb | require "twitter"
module Backup
module Notifier
class Twitter < Base
##
# Twitter consumer key credentials
attr_accessor :consumer_key, :consumer_secret
##
# OAuth credentials
attr_accessor :oauth_token, :oauth_token_secret
def initialize(model, &block)
super
instance_eval(&block) if block_given?
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
send_message(message.call(model, status: status_data_for(status)))
end
# Twitter::Client will raise an error if unsuccessful.
def send_message(message)
client = ::Twitter::REST::Client.new do |config|
config.consumer_key = @consumer_key
config.consumer_secret = @consumer_secret
config.access_token = @oauth_token
config.access_token_secret = @oauth_token_secret
end
client.update(message)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/flowdock.rb | lib/backup/notifier/flowdock.rb | require "flowdock"
module Backup
module Notifier
class FlowDock < Base
##
# The Flowdock API token
attr_accessor :token
##
# Who the notification should appear from
attr_accessor :from_name
# Which email the notification should appear from
attr_accessor :from_email
##
# source for message
attr_accessor :source
##
# Subject for message
attr_accessor :subject
##
# tag message in inbox
attr_accessor :tags
##
# link for message
attr_accessor :link
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@subject ||= default_subject
@source ||= default_source
@tags ||= []
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
@tags += default_tags(status)
send_message(message.call(model, status: status_data_for(status)))
end
# Flowdock::Client will raise an error if unsuccessful.
def send_message(msg)
client = Flowdock::Flow.new(
api_token: token, source: source,
from: { name: from_name, address: from_email }
)
client.push_to_team_inbox(subject: subject,
content: msg,
tags: tags,
link: link)
end
# set related tags
def default_tags(status)
case status
when :success then ["#BackupSuccess"]
when :warning then ["#BackupWarning"]
when :failure then ["#BackupFailure"]
end
end
# set default source
def default_source
"Backup #{model.label}"
end
# set default subject
def default_subject
"Backup Notification"
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/prowl.rb | lib/backup/notifier/prowl.rb | require "uri"
module Backup
module Notifier
class Prowl < Base
##
# Application name
# Tell something like your server name. Example: "Server1 Backup"
attr_accessor :application
##
# API-Key
# Create a Prowl account and request an API key on prowlapp.com.
attr_accessor :api_key
def initialize(model, &block)
@message =
lambda do |m, _|
"#{m.label} (#{m.trigger})"
end
super
instance_eval(&block) if block_given?
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
send_message(status)
end
def send_message(status)
uri = "https://api.prowlapp.com/publicapi/add"
status_data = status_data_for(status)
data = {
application: application,
apikey: api_key,
event: status_data[:message],
description: message.call(model, status: status_data)
}
options = {
headers: { "Content-Type" => "application/x-www-form-urlencoded" },
body: URI.encode_www_form(data)
}
options[:expects] = 200 # raise error if unsuccessful
Excon.post(uri, options)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/zabbix.rb | lib/backup/notifier/zabbix.rb | module Backup
module Notifier
class Zabbix < Base
attr_accessor :zabbix_host
attr_accessor :zabbix_port
attr_accessor :service_name
attr_accessor :service_host
attr_accessor :item_key
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@zabbix_host ||= Config.hostname
@zabbix_port ||= 10_051
@service_name ||= "Backup #{model.trigger}"
@service_host ||= Config.hostname
@item_key ||= "backup_status"
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
send_message(message.call(model, status: status_data_for(status)))
end
def send_message(message)
msg = [service_host, service_name, model.exit_status, message].join("\t")
cmd = utility(:zabbix_sender).to_s +
" -z '#{zabbix_host}'" \
" -p '#{zabbix_port}'" \
" -s #{service_host}" \
" -k #{item_key}" \
" -o '#{msg}'"
run("echo '#{msg}' | #{cmd}")
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/slack.rb | lib/backup/notifier/slack.rb | require "uri"
require "json"
module Backup
module Notifier
class Slack < Base
##
# The incoming webhook url
attr_accessor :webhook_url
##
# The channel to send messages to
attr_accessor :channel
##
# The username to display along with the notification
attr_accessor :username
##
# The emoji icon to display along with the notification
#
# See http://www.emoji-cheat-sheet.com for a list of icons.
#
# Default: :floppy_disk:
attr_accessor :icon_emoji
##
# Array of statuses for which the log file should be attached.
#
# Available statuses are: `:success`, `:warning` and `:failure`.
# Default: [:warning, :failure]
attr_accessor :send_log_on
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@send_log_on ||= [:warning, :failure]
@icon_emoji ||= ":floppy_disk:"
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
data = {
text: message.call(model, status: status_data_for(status)),
attachments: [attachment(status)]
}
[:channel, :username, :icon_emoji].each do |param|
val = send(param)
data.merge!(param => val) if val
end
options = {
headers: { "Content-Type" => "application/x-www-form-urlencoded" },
body: URI.encode_www_form(payload: JSON.dump(data))
}
options[:expects] = 200 # raise error if unsuccessful
Excon.post(uri, options)
end
def attachment(status)
{
fallback: "#{title(status)} - Job: #{model.label} (#{model.trigger})",
text: title(status),
color: color(status),
fields: [
{
title: "Job",
value: "#{model.label} (#{model.trigger})",
short: false
},
{
title: "Started",
value: model.started_at,
short: true
},
{
title: "Finished",
value: model.finished_at,
short: true
},
{
title: "Duration",
value: model.duration,
short: true
},
{
title: "Version",
value: "Backup v#{Backup::VERSION}\nRuby: #{RUBY_DESCRIPTION}",
short: false
},
log_field(status)
].compact
}
end
def log_field(status)
send_log = send_log_on.include?(status)
return unless send_log
{
title: "Detailed Backup Log",
value: Logger.messages.map(&:formatted_lines).flatten.join("\n"),
short: false
}
end
def color(status)
case status
when :success then "good"
when :failure then "danger"
when :warning then "warning"
end
end
def title(status)
case status
when :success then "Backup Completed Successfully!"
when :failure then "Backup Failed!"
when :warning then "Backup Completed Successfully (with Warnings)!"
end
end
def uri
@uri ||= webhook_url
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/base.rb | lib/backup/notifier/base.rb | module Backup
module Notifier
class Error < Backup::Error; end
class Base
include Utilities::Helpers
include Config::Helpers
##
# When set to true, the user will be notified by email
# when a backup process ends without raising any exceptions
attr_accessor :on_success
alias :notify_on_success? :on_success
##
# When set to true, the user will be notified by email
# when a backup process is successful, but has warnings
attr_accessor :on_warning
alias :notify_on_warning? :on_warning
##
# When set to true, the user will be notified by email
# when a backup process raises an exception before finishing
attr_accessor :on_failure
alias :notify_on_failure? :on_failure
##
# Number of times to retry failed attempts to send notification.
# Default: 10
attr_accessor :max_retries
##
# Time in seconds to pause before each retry.
# Default: 30
attr_accessor :retry_waitsec
##
# Message to send. Depends on notifier implementation if this is used.
# Default: lambda returning:
# "#{ message } #{ model.label } (#{ model.trigger })"
#
# @yieldparam [model] Backup::Model
# @yieldparam [data] Hash containing `message` and `key` values.
attr_accessor :message
attr_reader :model
def initialize(model)
@model = model
load_defaults!
@on_success = true if on_success.nil?
@on_warning = true if on_warning.nil?
@on_failure = true if on_failure.nil?
@max_retries ||= 10
@retry_waitsec ||= 30
@message ||= lambda do |m, data|
"[#{data[:status][:message]}] #{m.label} (#{m.trigger})"
end
end
# This method is called from an ensure block in Model#perform! and must
# not raise any exceptions. However, each Notifier's #notify! method
# should raise an exception if the request fails so it may be retried.
def perform!
status =
case model.exit_status
when 0
:success if notify_on_success?
when 1
:warning if notify_on_success? || notify_on_warning?
else
:failure if notify_on_failure?
end
if status
Logger.info "Sending notification using #{notifier_name}..."
with_retries { notify!(status) }
end
rescue Exception => err
Logger.error Error.wrap(err, "#{notifier_name} Failed!")
end
private
def with_retries
retries = 0
begin
yield
rescue StandardError, Timeout::Error => err
retries += 1
raise if retries > max_retries
Logger.info Error.wrap(err, "Retry ##{retries} of #{max_retries}.")
sleep(retry_waitsec)
retry
end
end
##
# Return the notifier name, with Backup namespace removed
def notifier_name
self.class.to_s.sub("Backup::", "")
end
##
# Return status data for message creation
def status_data_for(status)
{
success: {
message: "Backup::Success",
key: :success
},
warning: {
message: "Backup::Warning",
key: :warning
},
failure: {
message: "Backup::Failure",
key: :failure
}
}[status]
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/campfire.rb | lib/backup/notifier/campfire.rb | require "json"
module Backup
module Notifier
class Campfire < Base
##
# Campfire api authentication token
attr_accessor :api_token
##
# Campfire account's subdomain
attr_accessor :subdomain
##
# Campfire account's room id
attr_accessor :room_id
def initialize(model, &block)
super
instance_eval(&block) if block_given?
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
send_message(message.call(model, status: status_data_for(status)))
end
def send_message(message)
uri = "https://#{subdomain}.campfirenow.com/room/#{room_id}/speak.json"
options = {
headers: { "Content-Type" => "application/json" },
body: JSON.dump(
message: { body: message, type: "Textmessage" }
)
}
options[:user] = api_token
options[:password] = "x" # Basic Auth
options[:expects] = 201 # raise error if unsuccessful
Excon.post(uri, options)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/mail.rb | lib/backup/notifier/mail.rb | require "mail"
module Backup
module Notifier
class Mail < Base
##
# Mail delivery method to be used by the Mail gem.
#
# Supported methods:
#
# [:smtp - ::Mail::SMTP (default)]
# Settings used by this method:
# {#address}, {#port}, {#domain}, {#user_name}, {#password},
# {#authentication}, {#encryption}, {#openssl_verify_mode}
#
# [:sendmail - ::Mail::Sendmail]
# Settings used by this method:
# {#sendmail_args}
#
# [:exim - ::Mail::Exim]
# Settings used by this method:
# {#exim_args}
#
# [:file - ::Mail::FileDelivery]
# Settings used by this method:
# {#mail_folder}
#
attr_accessor :delivery_method
##
# Sender Email Address
attr_accessor :from
##
# Receiver Email Address
attr_accessor :to
##
# CC receiver Email Address
attr_accessor :cc
##
# BCC receiver Email Address
attr_accessor :bcc
##
# Set reply to email address
attr_accessor :reply_to
##
# SMTP Server Address
attr_accessor :address
##
# SMTP Server Port
attr_accessor :port
##
# Your domain (if applicable)
attr_accessor :domain
##
# SMTP Server Username (sender email's credentials)
attr_accessor :user_name
##
# SMTP Server Password (sender email's credentials)
attr_accessor :password
##
# Authentication type
#
# Acceptable values: +:plain+, +:login+, +:cram_md5+
attr_accessor :authentication
##
# Set the method of encryption to be used for the +SMTP+ connection.
#
# [:starttls (default)]
# Use +STARTTLS+ to upgrade the connection to a +SSL/TLS+ connection.
#
# [:tls or :ssl]
# Use a +SSL/TLS+ connection.
#
# [:none]
# No encryption will be used.
attr_accessor :encryption
##
# OpenSSL Verify Mode
#
# Valid modes: +:none+, +:peer+, +:client_once+, +:fail_if_no_peer_cert+
# See +OpenSSL::SSL+ for details.
#
# Use +:none+ for a self-signed and/or wildcard certificate
attr_accessor :openssl_verify_mode
##
# Optional arguments to pass to `sendmail`
#
# Note that this will override the defaults set by the Mail gem
# (currently: '-i'). So, if set here, be sure to set all the arguments
# you require.
#
# Example: '-i -X/tmp/traffic.log'
attr_accessor :sendmail_args
##
# Optional arguments to pass to `exim`
#
# Note that this will override the defaults set by the Mail gem
# (currently: '-i -t') So, if set here, be sure to set all the arguments
# you require.
#
# Example: '-i -t -X/tmp/traffic.log'
attr_accessor :exim_args
##
# Folder where mail will be kept when using the `:file` `delivery_method`.
#
# Default location is '$HOME/Backup/emails'
attr_accessor :mail_folder
##
# Array of statuses for which the log file should be attached.
#
# Available statuses are: `:success`, `:warning` and `:failure`.
# Default: [:warning, :failure]
attr_accessor :send_log_on
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@send_log_on ||= [:warning, :failure]
@encryption ||= :starttls
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent, including a copy of the current
# : backup log, if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent, including a copy of the current
# : backup log, if `on_failure` is `true`.
#
def notify!(status)
email = new_email
email.subject = message.call(model, status: status_data_for(status))
send_log = send_log_on.include?(status)
template = Backup::Template.new(model: model, send_log: send_log)
email.body = template.result(sprintf("notifier/mail/%s.erb", status.to_s))
if send_log
email.convert_to_multipart
email.attachments["#{model.time}.#{model.trigger}.log"] = {
mime_type: "text/plain;",
content: Logger.messages.map(&:formatted_lines).flatten.join("\n")
}
end
email.deliver! # raise error if unsuccessful
end
##
# Configures the Mail gem by setting the defaults.
# Creates and returns a new email, based on the @delivery_method used.
def new_email
method = %w[smtp sendmail exim file test]
.index(@delivery_method.to_s) ? @delivery_method.to_s : "smtp"
options =
case method
when "smtp"
opts = {
address: @address,
port: @port,
user_name: @user_name,
password: @password,
authentication: @authentication,
enable_starttls_auto: @encryption == :starttls,
openssl_verify_mode: @openssl_verify_mode,
ssl: @encryption == :ssl,
tls: @encryption == :tls
}
# Don't override default domain setting if domain not applicable.
# ref https://github.com/mikel/mail/blob/2.6.3/lib/mail/network/delivery_methods/smtp.rb#L82
opts[:domain] = @domain if @domain
opts
when "sendmail"
opts = {}
opts[:location] = utility(:sendmail)
opts[:arguments] = @sendmail_args if @sendmail_args
opts
when "exim"
opts = {}
opts[:location] = utility(:exim)
opts[:arguments] = @exim_args if @exim_args
opts
when "file"
@mail_folder ||= File.join(Config.root_path, "emails")
{ location: File.expand_path(@mail_folder) }
when "test" then {}
end
email = ::Mail.new
email.delivery_method method.to_sym, options
email.to = to
email.from = from
email.cc = cc
email.bcc = bcc
email.reply_to = reply_to
email
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/notifier/nagios.rb | lib/backup/notifier/nagios.rb | module Backup
module Notifier
class Nagios < Base
##
# Host of Nagios server to notify on backup completion.
attr_accessor :nagios_host
##
# Port of Nagios server to notify on backup completion.
attr_accessor :nagios_port
##
# Nagios nrpe configuration file.
attr_accessor :send_nsca_cfg
##
# Name of the Nagios service for the backup check.
attr_accessor :service_name
##
# Host name in Nagios for the backup check.
attr_accessor :service_host
def initialize(model, &block)
super
instance_eval(&block) if block_given?
@nagios_host ||= Config.hostname
@nagios_port ||= 5667
@send_nsca_cfg ||= "/etc/nagios/send_nsca.cfg"
@service_name ||= "Backup #{model.trigger}"
@service_host ||= Config.hostname
end
private
##
# Notify the user of the backup operation results.
#
# `status` indicates one of the following:
#
# `:success`
# : The backup completed successfully.
# : Notification will be sent if `on_success` is `true`.
#
# `:warning`
# : The backup completed successfully, but warnings were logged.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
# `:failure`
# : The backup operation failed.
# : Notification will be sent if `on_warning` or `on_success` is `true`.
#
def notify!(status)
send_message(message.call(model, status: status_data_for(status)))
end
def send_message(message)
cmd = "#{utility(:send_nsca)} -H '#{nagios_host}' -p '#{nagios_port}' -c '#{send_nsca_cfg}'"
msg = [service_host, service_name, model.exit_status, message].join("\t")
run("echo '#{msg}' | #{cmd}")
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/logger/logfile.rb | lib/backup/logger/logfile.rb | module Backup
class Logger
class Logfile
class Error < Backup::Error; end
class Options
##
# Enable the use of Backup's log file.
#
# While not necessary, as this is +true+ by default,
# this may also be set on the command line using +--logfile+.
#
# The use of Backup's log file may be disabled using the
# command line option +--no-logfile+.
#
# If +--no--logfile+ is used on the command line, then the
# log file will be disabled and any setting here will be ignored.
#
# @param [Boolean, nil]
# @return [Boolean, nil] Default: +true+
attr_reader :enabled
##
# Path to directory where Backup's logfile will be written.
#
# This may be given as an absolute path, or a path relative
# to Backup's +--root-path+ (which defaults to +~/Backup+).
#
# This may also be set on the command line using +--log-path+.
# If set on the command line, any setting here will be ignored.
#
# @param [String]
# @return [String] Default: 'log'
attr_reader :log_path
##
# Backup's logfile in which backup logs can be written
#
# As there is already a log_path, this can simply be just a file name
# that will be created (If not exists) on log_path directory
#
# This may also be set on the command line using +--log-file+.
# If set on the command line, any setting here will be ignored.
#
# @param [String]
# @return [String] Default: 'backup.log'
attr_reader :log_file
##
# Size in bytes to truncate logfile to before backup jobs are run.
#
# This is done once before all +triggers+, so the maximum logfile size
# would be this value plus whatever the jobs produce.
#
# @param [Integer]
# @return [Integer] Default: +500_000+
attr_accessor :max_bytes
def initialize
@enabled = true
@log_path = ""
@max_bytes = 500_000
end
def enabled?
!!enabled
end
def enabled=(val)
@enabled = val unless enabled.nil?
end
def log_path=(val)
@log_path = val.to_s.strip if log_path.empty?
end
end
def initialize(options)
@options = options
@logfile = setup_logfile
truncate!
end
def log(message)
File.open(@logfile, "a") { |f| f.puts message.formatted_lines }
end
private
##
# Returns the full path to the log file, based on the configured
# @options.log_path, and ensures the path to the log file exists.
def setup_logfile
# strip any trailing '/' in case the user supplied this as part of
# an absolute path, so we can match it against File.expand_path()
path = @options.log_path.chomp("/")
if path.empty?
path = File.join(Backup::Config.root_path, "log")
elsif path != File.expand_path(path)
path = File.join(Backup::Config.root_path, path)
end
FileUtils.mkdir_p(path)
log_file = @options.log_file || "backup.log"
path = File.join(path, log_file)
if File.exist?(path) && !File.writable?(path)
raise Error, "Log File at '#{path}' is not writable"
end
path
end
##
# Truncates the logfile to @options.max_bytes
def truncate!
return unless File.exist?(@logfile)
if File.stat(@logfile).size > @options.max_bytes
FileUtils.cp(@logfile, @logfile + "~")
File.open(@logfile + "~", "r") do |io_in|
File.open(@logfile, "w") do |io_out|
io_in.seek(-@options.max_bytes, IO::SEEK_END) && io_in.gets
while line = io_in.gets
io_out.puts line
end
end
end
FileUtils.rm_f(@logfile + "~")
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/logger/console.rb | lib/backup/logger/console.rb | module Backup
class Logger
class Console
class Options
##
# Disables all console output.
#
# This may also be set on the command line using +--quiet+.
#
# If +--no-quiet+ is used on the command line, console output
# will be enabled and any setting here will be ignored.
#
# @param [Boolean, nil]
# @return [Boolean, nil] Default: +false+
attr_reader :quiet
def initialize
@quiet = false
end
def enabled?
!quiet
end
def quiet=(val)
@quiet = val unless quiet.nil?
end
end
COLORS = {
info: "\e[32m%s\e[0m", # green
warn: "\e[33m%s\e[0m", # yellow
error: "\e[31m%s\e[0m" # red
}
def initialize(_options = nil)
$stdout.sync = $stderr.sync = true
end
def log(message)
io = message.level == :info ? $stdout : $stderr
lines = message.formatted_lines
lines.map! { |line| COLORS[message.level] % line } if io.tty?
io.puts lines
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/logger/syslog.rb | lib/backup/logger/syslog.rb | module Backup
class Logger
class Syslog
class Options
##
# Enables logging to the system's Syslog compatible logger.
#
# This may also be enabled using +--syslog+ on the command line.
#
# If +--no-syslog+ is used on the command line, this will be
# disabled and any settings here will be ignored.
#
# @param [Boolean, nil]
# @return [Boolean, nil] Default: +false+
attr_reader :enabled
##
# Specify the identification string to be used with Syslog.
#
# @param [String]
# @return [String] Default: 'backup'
attr_accessor :ident
##
# Specify the options to be used with Syslog.
#
# See the Ruby Standard Library documentation for +Syslog+ for more info.
# http://rdoc.info/stdlib/syslog/Syslog.open
#
# Note that setting this to +nil+ will cause this to default
# to a setting of +Syslog::LOG_PID | Syslog::LOG_CONS+
#
# @param [Integer]
# @return [Integer] Default: +Syslog::LOG_PID+
attr_accessor :options
##
# Specify the facility to be used with Syslog.
#
# See the Ruby Standard Library documentation for +Syslog+ for more info.
# http://rdoc.info/stdlib/syslog/Syslog.open
#
# Note that setting this to +nil+ will cause this to default
# to a setting of +Syslog::LOG_USER+
#
# @param [Integer]
# @return [Integer] Default: +Syslog::LOG_LOCAL0+
attr_accessor :facility
##
# Specify the priority level to be used for +:info+ messages.
#
# See the Ruby Standard Library documentation for +Syslog+ for more info.
# http://rdoc.info/stdlib/syslog/Syslog.log
#
# @param [Integer]
# @return [Integer] Default: +Syslog::LOG_INFO+
attr_accessor :info
##
# Specify the priority level to be used for +:warn+ messages.
#
# See the Ruby Standard Library documentation for +Syslog+ for more info.
# http://rdoc.info/stdlib/syslog/Syslog.log
#
# @param [Integer]
# @return [Integer] Default: +Syslog::LOG_WARNING+
attr_accessor :warn
##
# Specify the priority level to be used for +:error+ messages.
#
# See the Ruby Standard Library documentation for +Syslog+ for more info.
# http://rdoc.info/stdlib/syslog/Syslog.log
#
# @param [Integer]
# @return [Integer] Default: +Syslog::LOG_ERR+
attr_accessor :error
def initialize
@enabled = false
@ident = "backup"
@options = ::Syslog::LOG_PID
@facility = ::Syslog::LOG_LOCAL0
@info = ::Syslog::LOG_INFO
@warn = ::Syslog::LOG_WARNING
@error = ::Syslog::LOG_ERR
end
def enabled?
!!enabled
end
def enabled=(val)
@enabled = val unless enabled.nil?
end
end
def initialize(options)
@options = options
end
##
# Message lines are sent without formatting (timestamp, level),
# since Syslog will provide it's own timestamp and priority.
def log(message)
level = @options.send(message.level)
::Syslog.open(@options.ident, @options.options, @options.facility) do |s|
message.lines.each { |line| s.log(level, "%s", line) }
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/logger/fog_adapter.rb | lib/backup/logger/fog_adapter.rb | # require only the logger
require "formatador"
require "fog/core/logger"
module Backup
class Logger
module FogAdapter
class << self
# Logged as :info so these won't generate warnings.
# This is mostly to keep STDOUT clean and to provide
# supplemental messages for our own warnings.
# These will generally occur during retry attempts.
def write(message)
Logger.info message.chomp
end
def tty?
false
end
end
end
end
end
Fog::Logger[:warning] = Backup::Logger::FogAdapter
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/cloud_io/base.rb | lib/backup/cloud_io/base.rb | module Backup
module CloudIO
class Error < Backup::Error; end
class FileSizeError < Backup::Error; end
class Base
attr_reader :max_retries, :retry_waitsec
def initialize(options = {})
@max_retries = options[:max_retries]
@retry_waitsec = options[:retry_waitsec]
end
private
def with_retries(operation)
retries = 0
begin
yield
rescue => err
retries += 1
raise Error.wrap(err, <<-EOS) if retries > max_retries
Max Retries (#{max_retries}) Exceeded!
Operation: #{operation}
Be sure to check the log messages for each retry attempt.
EOS
Logger.info Error.wrap(err, <<-EOS)
Retry ##{retries} of #{max_retries}
Operation: #{operation}
EOS
sleep(retry_waitsec)
retry
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/cloud_io/s3.rb | lib/backup/cloud_io/s3.rb | require "backup/cloud_io/base"
require "fog"
require "digest/md5"
require "base64"
require "stringio"
module Backup
module CloudIO
class S3 < Base
class Error < Backup::Error; end
MAX_FILE_SIZE = 1024**3 * 5 # 5 GiB
MAX_MULTIPART_SIZE = 1024**4 * 5 # 5 TiB
attr_reader :access_key_id, :secret_access_key, :use_iam_profile,
:region, :bucket, :chunk_size, :encryption, :storage_class,
:fog_options
def initialize(options = {})
super
@access_key_id = options[:access_key_id]
@secret_access_key = options[:secret_access_key]
@use_iam_profile = options[:use_iam_profile]
@region = options[:region]
@bucket = options[:bucket]
@chunk_size = options[:chunk_size]
@encryption = options[:encryption]
@storage_class = options[:storage_class]
@fog_options = options[:fog_options]
end
# The Syncer may call this method in multiple threads.
# However, #objects is always called prior to multithreading.
def upload(src, dest)
file_size = File.size(src)
chunk_bytes = chunk_size * 1024**2
if chunk_bytes > 0 && file_size > chunk_bytes
raise FileSizeError, <<-EOS if file_size > MAX_MULTIPART_SIZE
File Too Large
File: #{src}
Size: #{file_size}
Max Multipart Upload Size is #{MAX_MULTIPART_SIZE} (5 TiB)
EOS
chunk_bytes = adjusted_chunk_bytes(chunk_bytes, file_size)
upload_id = initiate_multipart(dest)
parts = upload_parts(src, dest, upload_id, chunk_bytes, file_size)
complete_multipart(dest, upload_id, parts)
else
raise FileSizeError, <<-EOS if file_size > MAX_FILE_SIZE
File Too Large
File: #{src}
Size: #{file_size}
Max File Size is #{MAX_FILE_SIZE} (5 GiB)
EOS
put_object(src, dest)
end
end
# Returns all objects in the bucket with the given prefix.
#
# - #get_bucket returns a max of 1000 objects per request.
# - Returns objects in alphabetical order.
# - If marker is given, only objects after the marker are in the response.
def objects(prefix)
objects = []
resp = nil
prefix = prefix.chomp("/")
opts = { "prefix" => prefix + "/" }
while resp.nil? || resp.body["IsTruncated"]
opts["marker"] = objects.last.key unless objects.empty?
with_retries("GET '#{bucket}/#{prefix}/*'") do
resp = connection.get_bucket(bucket, opts)
end
resp.body["Contents"].each do |obj_data|
objects << Object.new(self, obj_data)
end
end
objects
end
# Used by Object to fetch metadata if needed.
def head_object(object)
resp = nil
with_retries("HEAD '#{bucket}/#{object.key}'") do
resp = connection.head_object(bucket, object.key)
end
resp
end
# Delete object(s) from the bucket.
#
# - Called by the Storage (with objects) and the Syncer (with keys)
# - Deletes 1000 objects per request.
# - Missing objects will be ignored.
def delete(objects_or_keys)
keys = Array(objects_or_keys).dup
keys.map!(&:key) if keys.first.is_a?(Object)
opts = { quiet: true } # only report Errors in DeleteResult
until keys.empty?
keys_partial = keys.slice!(0, 1000)
with_retries("DELETE Multiple Objects") do
resp = connection.delete_multiple_objects(bucket, keys_partial, opts.dup)
unless resp.body["DeleteResult"].empty?
errors = resp.body["DeleteResult"].map do |result|
error = result["Error"]
"Failed to delete: #{error["Key"]}\n" \
"Reason: #{error["Code"]}: #{error["Message"]}"
end.join("\n")
raise Error, "The server returned the following:\n#{errors}"
end
end
end
end
private
def connection
@connection ||=
begin
opts = { provider: "AWS", region: region }
if use_iam_profile
opts[:use_iam_profile] = true
else
opts[:aws_access_key_id] = access_key_id
opts[:aws_secret_access_key] = secret_access_key
end
opts.merge!(fog_options || {})
conn = Fog::Storage.new(opts)
conn.sync_clock
conn
end
end
def put_object(src, dest)
md5 = Base64.encode64(Digest::MD5.file(src).digest).chomp
options = headers.merge("Content-MD5" => md5)
with_retries("PUT '#{bucket}/#{dest}'") do
File.open(src, "r") do |file|
connection.put_object(bucket, dest, file, options)
end
end
end
def initiate_multipart(dest)
Logger.info "\s\sInitiate Multipart '#{bucket}/#{dest}'"
resp = nil
with_retries("POST '#{bucket}/#{dest}' (Initiate)") do
resp = connection.initiate_multipart_upload(bucket, dest, headers)
end
resp.body["UploadId"]
end
# Each part's MD5 is sent to verify the transfer.
# AWS will concatenate all parts into a single object
# once the multipart upload is completed.
def upload_parts(src, dest, upload_id, chunk_bytes, file_size)
total_parts = (file_size / chunk_bytes.to_f).ceil
progress = (0.1..0.9).step(0.1).map { |n| (total_parts * n).floor }
Logger.info "\s\sUploading #{total_parts} Parts..."
parts = []
File.open(src, "r") do |file|
part_number = 0
while data = file.read(chunk_bytes)
part_number += 1
md5 = Base64.encode64(Digest::MD5.digest(data)).chomp
with_retries("PUT '#{bucket}/#{dest}' Part ##{part_number}") do
resp = connection.upload_part(
bucket, dest, upload_id, part_number, StringIO.new(data),
"Content-MD5" => md5
)
parts << resp.headers["ETag"]
end
if i = progress.rindex(part_number)
Logger.info "\s\s...#{i + 1}0% Complete..."
end
end
end
parts
end
def complete_multipart(dest, upload_id, parts)
Logger.info "\s\sComplete Multipart '#{bucket}/#{dest}'"
with_retries("POST '#{bucket}/#{dest}' (Complete)") do
resp = connection.complete_multipart_upload(bucket, dest, upload_id, parts)
raise Error, <<-EOS if resp.body["Code"]
The server returned the following error:
#{resp.body["Code"]}: #{resp.body["Message"]}
EOS
end
end
def headers
headers = {}
enc = encryption.to_s.upcase
headers["x-amz-server-side-encryption"] = enc unless enc.empty?
sc = storage_class.to_s.upcase
headers["x-amz-storage-class"] = sc unless sc.empty? || sc == "STANDARD"
headers
end
def adjusted_chunk_bytes(chunk_bytes, file_size)
return chunk_bytes if file_size / chunk_bytes.to_f <= 10_000
mb = orig_mb = chunk_bytes / 1024**2
mb += 1 until file_size / (1024**2 * mb).to_f <= 10_000
Logger.warn Error.new(<<-EOS)
Chunk Size Adjusted
Your original #chunk_size of #{orig_mb} MiB has been adjusted
to #{mb} MiB in order to satisfy the limit of 10,000 chunks.
To enforce your chosen #chunk_size, you should use the Splitter.
e.g. split_into_chunks_of #{mb * 10_000} (#chunk_size * 10_000)
EOS
1024**2 * mb
end
class Object
attr_reader :key, :etag, :storage_class
def initialize(cloud_io, data)
@cloud_io = cloud_io
@key = data["Key"]
@etag = data["ETag"]
@storage_class = data["StorageClass"]
end
# currently 'AES256' or nil
def encryption
metadata["x-amz-server-side-encryption"]
end
private
def metadata
@metadata ||= @cloud_io.head_object(self).headers
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/cloud_io/cloud_files.rb | lib/backup/cloud_io/cloud_files.rb | require "backup/cloud_io/base"
require "fog"
require "digest/md5"
module Backup
module CloudIO
class CloudFiles < Base
class Error < Backup::Error; end
MAX_FILE_SIZE = 1024**3 * 5 # 5 GiB
MAX_SLO_SIZE = 1024**3 * 5000 # 1000 segments @ 5 GiB
SEGMENT_BUFFER = 1024**2 # 1 MiB
attr_reader :username, :api_key, :auth_url, :region, :servicenet,
:container, :segments_container, :segment_size, :days_to_keep,
:fog_options
def initialize(options = {})
super
@username = options[:username]
@api_key = options[:api_key]
@auth_url = options[:auth_url]
@region = options[:region]
@servicenet = options[:servicenet]
@container = options[:container]
@segments_container = options[:segments_container]
@segment_size = options[:segment_size]
@days_to_keep = options[:days_to_keep]
@fog_options = options[:fog_options]
end
# The Syncer may call this method in multiple threads,
# but #objects is always called before this occurs.
def upload(src, dest)
create_containers
file_size = File.size(src)
segment_bytes = segment_size * 1024**2
if segment_bytes > 0 && file_size > segment_bytes
raise FileSizeError, <<-EOS if file_size > MAX_SLO_SIZE
File Too Large
File: #{src}
Size: #{file_size}
Max SLO Size is #{MAX_SLO_SIZE} (5 GiB * 1000 segments)
EOS
segment_bytes = adjusted_segment_bytes(segment_bytes, file_size)
segments = upload_segments(src, dest, segment_bytes, file_size)
upload_manifest(dest, segments)
else
raise FileSizeError, <<-EOS if file_size > MAX_FILE_SIZE
File Too Large
File: #{src}
Size: #{file_size}
Max File Size is #{MAX_FILE_SIZE} (5 GiB)
EOS
put_object(src, dest)
end
end
# Returns all objects in the container with the given prefix.
#
# - #get_container returns a max of 10000 objects per request.
# - Returns objects sorted using a sqlite binary collating function.
# - If marker is given, only objects after the marker are in the response.
def objects(prefix)
objects = []
resp = nil
prefix = prefix.chomp("/")
opts = { prefix: prefix + "/" }
create_containers
while resp.nil? || resp.body.count == 10_000
opts[:marker] = objects.last.name unless objects.empty?
with_retries("GET '#{container}/#{prefix}/*'") do
resp = connection.get_container(container, opts)
end
resp.body.each do |obj_data|
objects << Object.new(self, obj_data)
end
end
objects
end
# Used by Object to fetch metadata if needed.
def head_object(object)
resp = nil
with_retries("HEAD '#{container}/#{object.name}'") do
resp = connection.head_object(container, object.name)
end
resp
end
# Delete non-SLO object(s) from the container.
#
# - Called by the Storage (with objects) and the Syncer (with names)
# - Deletes 10,000 objects per request.
# - Missing objects will be ignored.
def delete(objects_or_names)
names = Array(objects_or_names).dup
names.map!(&:name) if names.first.is_a?(Object)
until names.empty?
names_partial = names.slice!(0, 10_000)
with_retries("DELETE Multiple Objects") do
resp = connection.delete_multiple_objects(container, names_partial)
resp_status = resp.body["Response Status"]
raise Error, <<-EOS unless resp_status == "200 OK"
#{resp_status}
The server returned the following:
#{resp.body.inspect}
EOS
end
end
end
# Delete an SLO object(s) from the container.
#
# - Used only by the Storage. The Syncer cannot use SLOs.
# - Removes the SLO manifest object and all associated segments.
# - Missing segments will be ignored.
def delete_slo(objects)
Array(objects).each do |object|
with_retries("DELETE SLO Manifest '#{container}/#{object.name}'") do
resp = connection.delete_static_large_object(container, object.name)
resp_status = resp.body["Response Status"]
raise Error, <<-EOS unless resp_status == "200 OK"
#{resp_status}
The server returned the following:
#{resp.body.inspect}
EOS
end
end
end
private
def connection
@connection ||= Fog::Storage.new({
provider: "Rackspace",
rackspace_username: username,
rackspace_api_key: api_key,
rackspace_auth_url: auth_url,
rackspace_region: region,
rackspace_servicenet: servicenet
}.merge(fog_options || {}))
end
def create_containers
return if @containers_created
@containers_created = true
with_retries("Create Containers") do
connection.put_container(container)
connection.put_container(segments_container) if segments_container
end
end
def put_object(src, dest)
opts = headers.merge("ETag" => Digest::MD5.file(src).hexdigest)
with_retries("PUT '#{container}/#{dest}'") do
File.open(src, "r") do |file|
connection.put_object(container, dest, file, opts)
end
end
end
# Each segment is uploaded using chunked transfer encoding using
# SEGMENT_BUFFER, and each segment's MD5 is sent to verify the transfer.
# Each segment's MD5 and byte_size will also be verified when the
# SLO manifest object is uploaded.
def upload_segments(src, dest, segment_bytes, file_size)
total_segments = (file_size / segment_bytes.to_f).ceil
progress = (0.1..0.9).step(0.1).map { |n| (total_segments * n).floor }
Logger.info "\s\sUploading #{total_segments} SLO Segments..."
segments = []
File.open(src, "r") do |file|
segment_number = 0
until file.eof?
segment_number += 1
object = "#{dest}/#{segment_number.to_s.rjust(4, "0")}"
pos = file.pos
md5 = segment_md5(file, segment_bytes)
opts = headers.merge("ETag" => md5)
with_retries("PUT '#{segments_container}/#{object}'") do
file.seek(pos)
offset = 0
connection.put_object(segments_container, object, nil, opts) do
# block is called to stream data until it returns ''
data = ""
if offset <= segment_bytes - SEGMENT_BUFFER
data = file.read(SEGMENT_BUFFER).to_s # nil => ''
offset += data.size
end
data
end
end
segments << {
path: "#{segments_container}/#{object}",
etag: md5,
size_bytes: file.pos - pos
}
if i = progress.rindex(segment_number)
Logger.info "\s\s...#{i + 1}0% Complete..."
end
end
end
segments
end
def segment_md5(file, segment_bytes)
md5 = Digest::MD5.new
offset = 0
while offset <= segment_bytes - SEGMENT_BUFFER
data = file.read(SEGMENT_BUFFER)
break unless data
offset += data.size
md5 << data
end
md5.hexdigest
end
# Each segment's ETag and byte_size will be verified once uploaded.
# Request will raise an exception if verification fails or segments
# are not found. However, each segment's ETag was verified when we
# uploaded the segments, so this should only retry failed requests.
def upload_manifest(dest, segments)
Logger.info "\s\sStoring SLO Manifest '#{container}/#{dest}'"
with_retries("PUT SLO Manifest '#{container}/#{dest}'") do
connection.put_static_obj_manifest(container, dest, segments, headers)
end
end
# If :days_to_keep was set, each object will be scheduled for deletion.
# This includes non-SLO objects, the SLO manifest and all segments.
def headers
headers = {}
headers["X-Delete-At"] = delete_at if delete_at
headers
end
def delete_at
return unless days_to_keep
@delete_at ||= (Time.now.utc + days_to_keep * 60**2 * 24).to_i
end
def adjusted_segment_bytes(segment_bytes, file_size)
return segment_bytes if file_size / segment_bytes.to_f <= 1000
mb = orig_mb = segment_bytes / 1024**2
mb += 1 until file_size / (1024**2 * mb).to_f <= 1000
Logger.warn Error.new(<<-EOS)
Segment Size Adjusted
Your original #segment_size of #{orig_mb} MiB has been adjusted
to #{mb} MiB in order to satisfy the limit of 1000 segments.
To enforce your chosen #segment_size, you should use the Splitter.
e.g. split_into_chunks_of #{mb * 1000} (#segment_size * 1000)
EOS
1024**2 * mb
end
class Object
attr_reader :name, :hash
def initialize(cloud_io, data)
@cloud_io = cloud_io
@name = data["name"]
@hash = data["hash"]
end
def slo?
!!metadata["X-Static-Large-Object"]
end
def marked_for_deletion?
!!metadata["X-Delete-At"]
end
private
def metadata
@metadata ||= @cloud_io.head_object(self).headers
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/compressor/bzip2.rb | lib/backup/compressor/bzip2.rb | module Backup
module Compressor
class Bzip2 < Base
##
# Specify the level of compression to use.
#
# Values should be a single digit from 1 to 9.
# Note that setting the level to either extreme may or may not
# give the desired result. Be sure to check the documentation
# for the compressor being used.
#
# The default `level` is 9.
attr_accessor :level
##
# Creates a new instance of Backup::Compressor::Bzip2
def initialize(&block)
load_defaults!
@level ||= false
instance_eval(&block) if block_given?
@cmd = "#{utility(:bzip2)}#{options}"
@ext = ".bz2"
end
private
def options
" -#{@level}" if @level
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/compressor/base.rb | lib/backup/compressor/base.rb | module Backup
module Compressor
class Base
include Utilities::Helpers
include Config::Helpers
##
# Yields to the block the compressor command and filename extension.
def compress_with
log!
yield @cmd, @ext
end
private
##
# Return the compressor name, with Backup namespace removed
def compressor_name
self.class.to_s.sub("Backup::", "")
end
##
# Logs a message to the console and log file to inform
# the client that Backup is using the compressor
def log!
Logger.info "Using #{compressor_name} for compression.\n" \
" Command: '#{@cmd}'\n" \
" Ext: '#{@ext}'"
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/compressor/gzip.rb | lib/backup/compressor/gzip.rb | module Backup
module Compressor
class Gzip < Base
class Error < Backup::Error; end
extend Utilities::Helpers
##
# Specify the level of compression to use.
#
# Values should be a single digit from 1 to 9.
# Note that setting the level to either extreme may or may not
# give the desired result. Be sure to check the documentation
# for the compressor being used.
#
# The default `level` is 6.
attr_accessor :level
##
# Use the `--rsyncable` option with `gzip`.
#
# This option directs `gzip` to compress data using an algorithm that
# allows `rsync` to efficiently detect changes. This is especially useful
# when used to compress `Archive` or `Database` backups that will be
# stored using Backup's `RSync` Storage option.
#
# The `--rsyncable` option is only available on patched versions of `gzip`.
# While most distributions apply this patch, this option may not be
# available on your system. If it's not available, Backup will log a
# warning and continue to use the compressor without this option.
attr_accessor :rsyncable
##
# Determine if +--rsyncable+ is supported and cache the result.
def self.has_rsyncable?
return @has_rsyncable unless @has_rsyncable.nil?
cmd = "#{utility(:gzip)} --rsyncable --version >/dev/null 2>&1; echo $?"
@has_rsyncable = `#{cmd}`.chomp == "0"
end
##
# Creates a new instance of Backup::Compressor::Gzip
def initialize(&block)
load_defaults!
@level ||= false
@rsyncable ||= false
instance_eval(&block) if block_given?
@cmd = "#{utility(:gzip)}#{options}"
@ext = ".gz"
end
private
def options
opts = ""
opts << " -#{@level}" if @level
if @rsyncable
if self.class.has_rsyncable?
opts << " --rsyncable"
else
Logger.warn Error.new(<<-EOS)
'rsyncable' option ignored.
Your system's 'gzip' does not support the `--rsyncable` option.
EOS
end
end
opts
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/compressor/custom.rb | lib/backup/compressor/custom.rb | module Backup
module Compressor
class Custom < Base
##
# Specify the system command to invoke a compressor,
# including any command-line arguments.
# e.g. @compressor.command = 'pbzip2 -p2 -4'
#
# The data to be compressed will be piped to the command's STDIN,
# and it should write the compressed data to STDOUT.
# i.e. `cat file.tar | %command% > file.tar.%extension%`
attr_accessor :command
##
# File extension to append to the compressed file's filename.
# e.g. @compressor.extension = '.bz2'
attr_accessor :extension
##
# Initializes a new custom compressor.
def initialize(&block)
load_defaults!
instance_eval(&block) if block_given?
@cmd = set_cmd
@ext = set_ext
end
private
##
# Return the command line using the full path.
# Ensures the command exists and is executable.
def set_cmd
parts = @command.to_s.split(" ")
parts[0] = utility(parts[0])
parts.join(" ")
end
##
# Return the extension given without whitespace.
# If extension was not set, return an empty string
def set_ext
@extension.to_s.strip
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/config/helpers.rb | lib/backup/config/helpers.rb | require "ostruct"
module Backup
module Config
module Helpers
def self.included(klass)
klass.extend ClassMethods
end
module ClassMethods
def defaults
@defaults ||= Config::Defaults.new
if block_given?
yield @defaults
else
@defaults
end
end
# Used only within the specs
def clear_defaults!
defaults.reset!
end
def deprecations
@deprecations ||= {}
end
def log_deprecation_warning(name, deprecation)
msg = "#{self}##{name} has been deprecated as of " \
"backup v.#{deprecation[:version]}"
msg << "\n#{deprecation[:message]}" if deprecation[:message]
Logger.warn Config::Error.new(<<-EOS)
[DEPRECATION WARNING]
#{msg}
EOS
end
protected
##
# Method to deprecate an attribute.
#
# :version
# Must be set to the backup version which will first
# introduce the deprecation.
#
# :action
# If set, this Proc will be called with a reference to the
# class instance and the value set on the deprecated accessor.
# e.g. deprecation[:action].call(klass, value)
# This should perform whatever action is neccessary, such as
# transferring the value to a new accessor.
#
# :message
# If set, this will be appended to #log_deprecation_warning
#
# Note that this replaces the `attr_accessor` method, or other
# method previously used to set the accessor being deprecated.
# #method_missing will handle any calls to `name=`.
#
def attr_deprecate(name, args = {})
deprecations[name] = {
version: nil,
message: nil,
action: nil
}.merge(args)
end
end # ClassMethods
private
##
# Sets any pre-configured default values.
# If a default value was set for an invalid accessor,
# this will raise a NameError.
def load_defaults!
self.class.defaults._attributes.each do |name|
val = self.class.defaults.send(name)
val = val.dup rescue val
send(:"#{ name }=", val)
end
end
##
# Check missing methods for deprecated attribute accessors.
#
# If a value is set on an accessor that has been deprecated
# using #attr_deprecate, a warning will be issued and any
# :action (Proc) specified will be called with a reference to
# the class instance and the value set on the deprecated accessor.
# See #attr_deprecate and #log_deprecation_warning
#
# Note that OpenStruct (used for setting defaults) does not allow
# multiple arguments when assigning values for members.
# So, we won't allow it here either, even though an attr_accessor
# will accept and convert them into an Array. Therefore, setting
# an option value using multiple values, whether as a default or
# directly on the class' accessor, should not be supported.
# i.e. if an option will accept being set as an Array, then it
# should be explicitly set as such. e.g. option = [val1, val2]
#
def method_missing(name, *args)
deprecation = nil
if method = name.to_s.chomp!("=")
if (len = args.count) != 1
raise ArgumentError,
"wrong number of arguments (#{len} for 1)", caller(1)
end
deprecation = self.class.deprecations[method.to_sym]
end
if deprecation
self.class.log_deprecation_warning(method, deprecation)
deprecation[:action].call(self, args[0]) if deprecation[:action]
else
super
end
end
end # Helpers
# Store for pre-configured defaults.
class Defaults < OpenStruct
# Returns an Array of all attribute method names
# that default values were set for.
def _attributes
@table.keys
end
# Used only within the specs
def reset!
@table.clear
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/config/dsl.rb | lib/backup/config/dsl.rb | module Backup
module Config
# Context for loading user config.rb and model files.
class DSL
class Error < Backup::Error; end
Model = Backup::Model
class << self
private
# List the available database, storage, syncer, compressor, encryptor
# and notifier constants. These are used to define constant names within
# Backup::Config::DSL so that users may use a constant instead of a string.
# Nested namespaces are represented using Hashs. Deep nesting supported.
#
# Example, instead of:
# database "MySQL" do |mysql|
# sync_with "RSync::Local" do |rsync|
#
# You can do:
# database MySQL do |mysql|
# sync_with RSync::Local do |rsync|
#
def add_dsl_constants
create_modules(
DSL,
[ # Databases
["MySQL", "PostgreSQL", "MongoDB", "Redis", "Riak", "OpenLDAP", "SQLite"],
# Storages
["S3", "CloudFiles", "Dropbox", "FTP",
"SFTP", "SCP", "RSync", "Local", "Qiniu"],
# Compressors
["Gzip", "Bzip2", "Custom"],
# Encryptors
["OpenSSL", "GPG"],
# Syncers
[
{ "Cloud" => ["CloudFiles", "S3"] },
{ "RSync" => ["Push", "Pull", "Local"] }
],
# Notifiers
["Mail", "Twitter", "Campfire", "Prowl",
"Hipchat", "PagerDuty", "Pushover", "HttpPost", "Nagios",
"Slack", "FlowDock", "Zabbix", "Ses", "DataDog", "Command"]
]
)
end
def create_modules(scope, names)
names.flatten.each do |name|
if name.is_a?(Hash)
name.each do |key, val|
create_modules(get_or_create_empty_module(scope, key), [val])
end
else
get_or_create_empty_module(scope, name)
end
end
end
def get_or_create_empty_module(scope, const)
if scope.const_defined?(const)
scope.const_get(const)
else
scope.const_set(const, Module.new)
end
end
end
add_dsl_constants # add constants on load
attr_reader :_config_options
def initialize
@_config_options = {}
end
# Allow users to set command line path options in config.rb
[:root_path, :data_path, :tmp_path].each do |name|
define_method name do |path|
_config_options[name] = path
end
end
# Allows users to create preconfigured models.
def preconfigure(name, &block)
unless name.is_a?(String) && name =~ /^[A-Z]/
raise Error, "Preconfigured model names must be given as a string " \
"and start with a capital letter."
end
if DSL.const_defined?(name)
raise Error, "'#{name}' is already in use " \
"and can not be used for a preconfigured model."
end
DSL.const_set(name, Class.new(Model))
DSL.const_get(name).preconfigure(&block)
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/database/mysql.rb | lib/backup/database/mysql.rb | module Backup
module Database
class MySQL < Base
class Error < Backup::Error; end
##
# Name of the database that needs to get dumped
# To dump all databases, set this to `:all` or leave blank.
attr_accessor :name
##
# Credentials for the specified database
attr_accessor :username, :password
##
# Connectivity options
attr_accessor :host, :port, :socket
##
# Tables to skip while dumping the database
#
# If `name` is set to :all (or not specified), these must include
# a database name. e.g. 'name.table'.
# If `name` is given, these may simply be table names.
attr_accessor :skip_tables
##
# Tables to dump. This in only valid if `name` is specified.
# If none are given, the entire database will be dumped.
attr_accessor :only_tables
##
# Additional "mysqldump" or "innobackupex (backup creation)" options
attr_accessor :additional_options
##
# Additional innobackupex log preparation phase ("apply-logs") options
attr_accessor :prepare_options
##
# Default is :mysqldump (which is built in MySQL and generates
# a textual SQL file), but can be changed to :innobackupex, which
# has more feasible restore times for large databases.
# See: http://www.percona.com/doc/percona-xtrabackup/
attr_accessor :backup_engine
##
# If true (which is the default behaviour), the backup will be prepared
# after it has been successfuly created. This option is only valid if
# :backup_engine is set to :innobackupex.
attr_accessor :prepare_backup
##
# If set the backup engine command block is executed as the given user
attr_accessor :sudo_user
##
# If set, do not suppress innobackupdb output (useful for debugging)
attr_accessor :verbose
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
@name ||= :all
@backup_engine ||= :mysqldump
@prepare_backup = true if @prepare_backup.nil?
end
##
# Performs the mysqldump or innobackupex command and outputs
# the dump file in the +dump_path+ using +dump_filename+.
#
# <trigger>/databases/MySQL[-<database_id>].[sql|tar][.gz]
def perform!
super
pipeline = Pipeline.new
dump_ext = sql_backup? ? "sql" : "tar"
pipeline << sudo_option(sql_backup? ? mysqldump : innobackupex)
if model.compressor
model.compressor.compress_with do |command, ext|
pipeline << command
dump_ext << ext
end
end
pipeline << "#{utility(:cat)} > " \
"'#{File.join(dump_path, dump_filename)}.#{dump_ext}'"
pipeline.run
if pipeline.success?
log!(:finished)
else
raise Error, "Dump Failed!\n#{pipeline.error_messages}"
end
end
private
def mysqldump
"#{utility(:mysqldump)} #{user_options} #{credential_options} " \
"#{connectivity_options} #{name_option} " \
"#{tables_to_dump} #{tables_to_skip}"
end
def credential_options
opts = []
opts << "--user=#{Shellwords.escape(username)}" if username
opts << "--password=#{Shellwords.escape(password)}" if password
opts.join(" ")
end
def connectivity_options
return "--socket='#{socket}'" if socket
opts = []
opts << "--host='#{host}'" if host
opts << "--port='#{port}'" if port
opts.join(" ")
end
def user_options
Array(additional_options).join(" ")
end
def user_prepare_options
Array(prepare_options).join(" ")
end
def name_option
dump_all? ? "--all-databases" : name
end
def tables_to_dump
Array(only_tables).join(" ") unless dump_all?
end
def tables_to_skip
Array(skip_tables).map do |table|
table = dump_all? || table["."] ? table : "#{name}.#{table}"
"--ignore-table='#{table}'"
end.join(" ")
end
def dump_all?
name == :all
end
def sql_backup?
backup_engine.to_sym == :mysqldump
end
def innobackupex
# Creation phase
"#{utility(:innobackupex)} #{credential_options} " \
"#{connectivity_options} #{user_options} " \
"--no-timestamp #{temp_dir} #{quiet_option} && " +
innobackupex_prepare +
# Move files to tar-ed stream on stdout
"#{utility(:tar)} --remove-files -cf - " \
"-C #{File.dirname(temp_dir)} #{File.basename(temp_dir)}"
end
def innobackupex_prepare
return "" unless @prepare_backup
# Log applying phase (prepare for restore)
"#{utility(:innobackupex)} --apply-log #{temp_dir} " \
"#{user_prepare_options} #{quiet_option} && "
end
def sudo_option(command_block)
return command_block unless sudo_user
"sudo -s -u #{sudo_user} -- <<END_OF_SUDO\n" \
"#{command_block}\n" \
"END_OF_SUDO\n"
end
def quiet_option
verbose ? "" : " 2> /dev/null "
end
def temp_dir
File.join(dump_path, "#{dump_filename}.bkpdir")
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/database/openldap.rb | lib/backup/database/openldap.rb | module Backup
module Database
class OpenLDAP < Base
class Error < Backup::Error; end
##
# Name of the ldap backup
attr_accessor :name
##
# run slapcat under sudo if needed
# make sure to set SUID on a file, to let you run the file with permissions of file owner
# eg. sudo chmod u+s /usr/sbin/slapcat
attr_accessor :use_sudo
##
# Stores the location of the slapd.conf or slapcat confdir
attr_accessor :slapcat_conf
##
# Additional slapcat options
attr_accessor :slapcat_args
##
# Path to slapcat utility (optional)
attr_accessor :slapcat_utility
##
# Takes the name of the archive and the configuration block
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
@name ||= "ldap_backup"
@use_sudo ||= false
@slapcat_args ||= []
@slapcat_utility ||= utility(:slapcat)
@slapcat_conf ||= "/etc/ldap/slapd.d"
end
##
# Performs the slapcat command and outputs the
# data to the specified path based on the 'trigger'
def perform!
super
pipeline = Pipeline.new
dump_ext = "ldif"
pipeline << slapcat
if @model.compressor
@model.compressor.compress_with do |command, ext|
pipeline << command
dump_ext << ext
end
end
pipeline << "#{utility(:cat)} > " \
"'#{File.join(dump_path, dump_filename)}.#{dump_ext}'"
pipeline.run
if pipeline.success?
log!(:finished)
else
raise Error, "Dump Failed!\n" + pipeline.error_messages
end
end
private
##
# Builds the full slapcat string based on all attributes
def slapcat
command = "#{slapcat_utility} #{slapcat_conf_option} #{slapcat_conf} #{user_options}"
command.prepend("sudo ") if use_sudo
command
end
##
# Uses different slapcat switch depending on confdir or conffile set
def slapcat_conf_option
@slapcat_conf.include?(".d") ? "-F" : "-f"
end
##
# Builds a compatible string for the additional options
# specified by the user
def user_options
slapcat_args.join(" ")
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/database/riak.rb | lib/backup/database/riak.rb | module Backup
module Database
class Riak < Base
##
# Node is the node from which to perform the backup.
# Default: riak@127.0.0.1
attr_accessor :node
##
# Cookie is the Erlang cookie/shared secret used to connect to the node.
# Default: riak
attr_accessor :cookie
##
# Username for the riak instance
# Default: riak
attr_accessor :user
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
@node ||= "riak@127.0.0.1"
@cookie ||= "riak"
@user ||= "riak"
end
##
# Performs the dump using `riak-admin backup`.
#
# This will be stored in the final backup package as
# <trigger>/databases/<dump_filename>-<node>[.gz]
def perform!
super
dump_file = File.join(dump_path, dump_filename)
with_riak_owned_dump_path do
run("#{riakadmin} backup #{node} #{cookie} '#{dump_file}' node")
end
if model.compressor
model.compressor.compress_with do |command, ext|
dump_file << "-#{node}" # `riak-admin` appends `node` to the filename.
run("#{command} -c '#{dump_file}' > '#{dump_file + ext}'")
FileUtils.rm_f(dump_file)
end
end
log!(:finished)
end
private
##
# The `riak-admin backup` command is run as the riak +user+,
# so +user+ must have write priviledges to the +dump_path+.
#
# Note that the riak +user+ must also have access to +dump_path+.
# This means Backup's +tmp_path+ can not be under the home directory of
# the user running Backup, since the absence of the execute bit on their
# home directory would deny +user+ access.
def with_riak_owned_dump_path
run "#{utility(:sudo)} -n #{utility(:chown)} #{user} '#{dump_path}'"
yield
ensure
# reclaim ownership
run "#{utility(:sudo)} -n #{utility(:chown)} -R " \
"#{Config.user} '#{dump_path}'"
end
##
# `riak-admin` must be run as the riak +user+.
# It will do this itself, but without `-n` and emits a message on STDERR.
def riakadmin
"#{utility(:sudo)} -n -u #{user} #{utility("riak-admin")}"
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/database/redis.rb | lib/backup/database/redis.rb | module Backup
module Database
class Redis < Base
class Error < Backup::Error; end
MODES = [:copy, :sync]
##
# Mode of operation.
#
# [:copy]
# Copies the redis dump file specified by {#rdb_path}.
# This data will be current as of the last RDB Snapshot
# performed by the server (per your redis.conf settings).
# You may set {#invoke_save} to +true+ to have Backup issue
# a +SAVE+ command to update the dump file with the current
# data before performing the copy.
#
# [:sync]
# Performs a dump of your redis data using +redis-cli --rdb -+.
# Redis implements this internally using a +SYNC+ command.
# The operation is analogous to requesting a +BGSAVE+, then having the
# dump returned. This mode is capable of dumping data from a local or
# remote server. Requires Redis v2.6 or better.
#
# Defaults to +:copy+.
attr_accessor :mode
##
# Full path to the redis dump file.
#
# Required when {#mode} is +:copy+.
attr_accessor :rdb_path
##
# Perform a +SAVE+ command using the +redis-cli+ utility
# before copying the dump file specified by {#rdb_path}.
#
# Only valid when {#mode} is +:copy+.
attr_accessor :invoke_save
##
# Connectivity options for the +redis-cli+ utility.
attr_accessor :host, :port, :socket
##
# Password for the +redis-cli+ utility.
attr_accessor :password
##
# Additional options for the +redis-cli+ utility.
attr_accessor :additional_options
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
@mode ||= :copy
raise Error, "'#{mode}' is not a valid mode" unless MODES.include?(mode)
if mode == :copy && rdb_path.nil?
raise Error, "`rdb_path` must be set when `mode` is :copy"
end
end
##
# Performs the dump based on {#mode} and stores the Redis dump file
# to the +dump_path+ using the +dump_filename+.
#
# <trigger>/databases/Redis[-<database_id>].rdb[.gz]
def perform!
super
case mode
when :sync
# messages output by `redis-cli --rdb` on $stderr
Logger.configure do
ignore_warning(/Transfer finished with success/)
ignore_warning(/SYNC sent to master/)
end
sync!
when :copy
save! if invoke_save
copy!
end
log!(:finished)
end
private
def sync!
pipeline = Pipeline.new
dump_ext = "rdb"
pipeline << "#{redis_cli_cmd} --rdb -"
if model.compressor
model.compressor.compress_with do |command, ext|
pipeline << command
dump_ext << ext
end
end
pipeline << "#{utility(:cat)} > " \
"'#{File.join(dump_path, dump_filename)}.#{dump_ext}'"
pipeline.run
unless pipeline.success?
raise Error, "Dump Failed!\n" + pipeline.error_messages
end
end
def save!
resp = run("#{redis_cli_cmd} SAVE")
unless resp =~ /OK$/
raise Error, <<-EOS
Failed to invoke the `SAVE` command
Response was: #{resp}
EOS
end
rescue Error
if resp =~ /save already in progress/
unless (attempts ||= "0").next! == "5"
sleep 5
retry
end
end
raise
end
def copy!
unless File.exist?(rdb_path)
raise Error, <<-EOS
Redis database dump not found
`rdb_path` was '#{rdb_path}'
EOS
end
dst_path = File.join(dump_path, dump_filename + ".rdb")
if model.compressor
model.compressor.compress_with do |command, ext|
run("#{command} -c '#{rdb_path}' > '#{dst_path + ext}'")
end
else
FileUtils.cp(rdb_path, dst_path)
end
end
def redis_cli_cmd
"#{utility("redis-cli")} #{password_option} " \
"#{connectivity_options} #{user_options}"
end
def password_option
return unless password
"-a '#{password}'"
end
def connectivity_options
return "-s '#{socket}'" if socket
opts = []
opts << "-h '#{host}'" if host
opts << "-p '#{port}'" if port
opts.join(" ")
end
def user_options
Array(additional_options).join(" ")
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/database/base.rb | lib/backup/database/base.rb | module Backup
module Database
class Error < Backup::Error; end
class Base
include Utilities::Helpers
include Config::Helpers
attr_reader :model, :database_id, :dump_path
##
# If given, +database_id+ will be appended to the #dump_filename.
# This is required if multiple Databases of the same class are added to
# the model.
def initialize(model, database_id = nil)
@model = model
@database_id = database_id.to_s.gsub(/\W/, "_") if database_id
@dump_path = File.join(Config.tmp_path, model.trigger, "databases")
load_defaults!
end
def perform!
log!(:started)
prepare!
end
private
def prepare!
FileUtils.mkdir_p(dump_path)
end
##
# Sets the base filename for the final dump file to be saved in +dump_path+,
# based on the class name. e.g. databases/MySQL.sql
#
# +database_id+ will be appended if it is defined.
# e.g. databases/MySQL-database_id.sql
#
# If multiple Databases of the same class are defined and no +database_id+
# is defined, the user will be warned and one will be auto-generated.
#
# Model#initialize calls this method *after* all defined databases have
# been initialized so `backup check` can report these warnings.
def dump_filename
@dump_filename ||=
begin
unless database_id
if model.databases.select { |d| d.class == self.class }.count > 1
sleep 1
@database_id = Time.now.to_i.to_s[-5, 5]
Logger.warn Error.new(<<-EOS)
Database Identifier Missing
When multiple Databases are configured in a single Backup Model
that have the same class (MySQL, PostgreSQL, etc.), the optional
+database_id+ must be specified to uniquely identify each instance.
e.g. database MySQL, :database_id do |db|
This will result in an output file in your final backup package like:
databases/MySQL-database_id.sql
Backup has auto-generated an identifier (#{database_id}) for this
database dump and will now continue.
EOS
end
end
self.class.name.split("::").last + (database_id ? "-#{database_id}" : "")
end
end
def database_name
@database_name ||= self.class.to_s.sub("Backup::", "") +
(database_id ? " (#{database_id})" : "")
end
def log!(action)
msg =
case action
when :started then "Started..."
when :finished then "Finished!"
end
Logger.info "#{database_name} #{msg}"
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/database/mongodb.rb | lib/backup/database/mongodb.rb | module Backup
module Database
class MongoDB < Base
class Error < Backup::Error; end
##
# Name of the database that needs to get dumped
attr_accessor :name
##
# Credentials for the specified database
attr_accessor :username, :password, :authdb
##
# Connectivity options
attr_accessor :host, :port
##
# IPv6 support (disabled by default)
attr_accessor :ipv6
##
# Collections to dump, collections that aren't specified won't get dumped
attr_accessor :only_collections
##
# Additional "mongodump" options
attr_accessor :additional_options
##
# Forces mongod to flush all pending write operations to the disk and
# locks the entire mongod instance to prevent additional writes until the
# dump is complete.
#
# Note that if Profiling is enabled, this will disable it and will not
# re-enable it after the dump is complete.
attr_accessor :lock
##
# Creates a dump of the database that includes an oplog, to create a
# point-in-time snapshot of the state of a mongod instance.
#
# If this option is used, you would not use the `lock` option.
#
# This will only work against nodes that maintain a oplog.
# This includes all members of a replica set, as well as master nodes in
# master/slave replication deployments.
attr_accessor :oplog
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
end
def perform!
super
lock_database if @lock
dump!
package!
ensure
unlock_database if @lock
end
private
##
# Performs all required mongodump commands, dumping the output files
# into the +dump_packaging_path+ directory for packaging.
def dump!
FileUtils.mkdir_p dump_packaging_path
collections = Array(only_collections)
if collections.empty?
run(mongodump)
else
collections.each do |collection|
run("#{mongodump} --collection='#{collection}'")
end
end
end
##
# Creates a tar archive of the +dump_packaging_path+ directory
# and stores it in the +dump_path+ using +dump_filename+.
#
# <trigger>/databases/MongoDB[-<database_id>].tar[.gz]
#
# If successful, +dump_packaging_path+ is removed.
def package!
pipeline = Pipeline.new
dump_ext = "tar"
pipeline << "#{utility(:tar)} -cf - " \
"-C '#{dump_path}' '#{dump_filename}'"
if model.compressor
model.compressor.compress_with do |command, ext|
pipeline << command
dump_ext << ext
end
end
pipeline << "#{utility(:cat)} > " \
"'#{File.join(dump_path, dump_filename)}.#{dump_ext}'"
pipeline.run
if pipeline.success?
FileUtils.rm_rf dump_packaging_path
log!(:finished)
else
raise Error, "Dump Failed!\n#{pipeline.error_messages}"
end
end
def dump_packaging_path
File.join(dump_path, dump_filename)
end
def mongodump
"#{utility(:mongodump)} #{name_option} #{credential_options} " \
"#{connectivity_options} #{ipv6_option} #{oplog_option} " \
"#{user_options} --out='#{dump_packaging_path}'"
end
def name_option
return unless name
"--db='#{name}'"
end
def credential_options
opts = []
opts << "--username='#{username}'" if username
opts << "--password='#{password}'" if password
opts << "--authenticationDatabase='#{authdb}'" if authdb
opts.join(" ")
end
def connectivity_options
opts = []
opts << "--host='#{host}'" if host
opts << "--port='#{port}'" if port
opts.join(" ")
end
def ipv6_option
"--ipv6" if ipv6
end
def oplog_option
"--oplog" if oplog
end
def user_options
Array(additional_options).join(" ")
end
def lock_database
lock_command = <<-EOS.gsub(/^ +/, "")
echo 'use admin
db.setProfilingLevel(0)
db.fsyncLock()' | #{mongo_shell}
EOS
run(lock_command)
end
def unlock_database
unlock_command = <<-EOS.gsub(/^ +/, "")
echo 'use admin
db.fsyncUnlock()' | #{mongo_shell}
EOS
run(unlock_command)
end
def mongo_shell
cmd = "#{utility(:mongo)} #{connectivity_options}".rstrip
cmd << " #{credential_options}".rstrip
cmd << " #{ipv6_option}".rstrip
cmd << " '#{name}'" if name
cmd
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/database/postgresql.rb | lib/backup/database/postgresql.rb | module Backup
module Database
class PostgreSQL < Base
class Error < Backup::Error; end
##
# Name of the database that needs to get dumped.
# To dump all databases, set this to `:all` or leave blank.
# +username+ must be a PostgreSQL superuser to run `pg_dumpall`.
attr_accessor :name
##
# Credentials for the specified database
attr_accessor :username, :password
##
# If set the pg_dump(all) command is executed as the given user
attr_accessor :sudo_user
##
# Connectivity options
attr_accessor :host, :port, :socket
##
# Tables to skip while dumping the database.
# If `name` is set to :all (or not specified), these are ignored.
attr_accessor :skip_tables
##
# Tables to dump. This in only valid if `name` is specified.
# If none are given, the entire database will be dumped.
attr_accessor :only_tables
##
# Additional "pg_dump" or "pg_dumpall" options
attr_accessor :additional_options
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
@name ||= :all
end
##
# Performs the pgdump command and outputs the dump file
# in the +dump_path+ using +dump_filename+.
#
# <trigger>/databases/PostgreSQL[-<database_id>].sql[.gz]
def perform!
super
pipeline = Pipeline.new
dump_ext = "sql"
pipeline << (dump_all? ? pgdumpall : pgdump)
if model.compressor
model.compressor.compress_with do |command, ext|
pipeline << command
dump_ext << ext
end
end
pipeline << "#{utility(:cat)} > " \
"'#{File.join(dump_path, dump_filename)}.#{dump_ext}'"
pipeline.run
if pipeline.success?
log!(:finished)
else
raise Error, "Dump Failed!\n" + pipeline.error_messages
end
end
def pgdump
password_option.to_s +
sudo_option.to_s +
"#{utility(:pg_dump)} #{username_option} #{connectivity_options} " \
"#{user_options} #{tables_to_dump} #{tables_to_skip} #{name}"
end
def pgdumpall
password_option.to_s +
sudo_option.to_s +
"#{utility(:pg_dumpall)} #{username_option} " \
"#{connectivity_options} #{user_options}"
end
def password_option
"PGPASSWORD=#{Shellwords.escape(password)} " if password
end
def sudo_option
"#{utility(:sudo)} -n -H -u #{sudo_user} " if sudo_user
end
def username_option
"--username=#{Shellwords.escape(username)}" if username
end
def connectivity_options
return "--host='#{socket}'" if socket
opts = []
opts << "--host='#{host}'" if host
opts << "--port='#{port}'" if port
opts.join(" ")
end
def user_options
Array(additional_options).join(" ")
end
def tables_to_dump
Array(only_tables).map do |table|
"--table='#{table}'"
end.join(" ")
end
def tables_to_skip
Array(skip_tables).map do |table|
"--exclude-table='#{table}'"
end.join(" ")
end
def dump_all?
name == :all
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/database/sqlite.rb | lib/backup/database/sqlite.rb | module Backup
module Database
class SQLite < Base
class Error < Backup::Error; end
##
# Path to the sqlite3 file
attr_accessor :path
##
# Path to sqlite utility (optional)
attr_accessor :sqlitedump_utility
##
# Creates a new instance of the SQLite adapter object
def initialize(model, database_id = nil, &block)
super
instance_eval(&block) if block_given?
@sqlitedump_utility ||= utility(:sqlitedump)
end
##
# Performs the sqlitedump command and outputs the
# data to the specified path based on the 'trigger'
def perform!
super
dump = "echo '.dump' | #{sqlitedump_utility} #{path}"
pipeline = Pipeline.new
dump_ext = "sql"
pipeline << dump
if model.compressor
model.compressor.compress_with do |command, ext|
pipeline << command
dump_ext << ext
end
end
pipeline << "cat > '#{File.join(dump_path, dump_filename)}.#{dump_ext}'"
pipeline.run
if pipeline.success?
log!(:finished)
else
raise Error,
"#{database_name} Dump Failed!\n" + pipeline.error_messages
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/base.rb | lib/backup/syncer/base.rb | module Backup
module Syncer
class Base
include Utilities::Helpers
include Config::Helpers
##
# Path to store the synced files/directories to
attr_accessor :path
##
# Flag for mirroring the files/directories
attr_accessor :mirror
##
# Optional user-defined identifier to differentiate multiple syncers
# defined within a single backup model. Currently this is only used
# in the log messages.
attr_reader :syncer_id
attr_reader :excludes
def initialize(syncer_id = nil)
@syncer_id = syncer_id
load_defaults!
@mirror ||= false
@directories ||= []
@excludes ||= []
end
##
# Syntactical suger for the DSL for adding directories
def directories(&block)
return @directories unless block_given?
instance_eval(&block)
end
def add(path)
directories << path
end
# For Cloud Syncers, +pattern+ can be a string (with shell-style
# wildcards) or a regex.
# For RSync, each +pattern+ will be passed to rsync's --exclude option.
def exclude(pattern)
excludes << pattern
end
private
def syncer_name
@syncer_name ||= self.class.to_s.sub("Backup::", "") +
(syncer_id ? " (#{syncer_id})" : "")
end
def log!(action)
msg = case action
when :started then "Started..."
when :finished then "Finished!"
end
Logger.info "#{syncer_name} #{msg}"
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/rsync/local.rb | lib/backup/syncer/rsync/local.rb | module Backup
module Syncer
module RSync
class Local < Base
def perform!
log!(:started)
create_dest_path!
run("#{rsync_command} #{paths_to_push} '#{dest_path}'")
log!(:finished)
end
private
# Expand path, since this is local and shell-quoted.
def dest_path
@dest_path ||= File.expand_path(path)
end
def create_dest_path!
FileUtils.mkdir_p dest_path
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/rsync/base.rb | lib/backup/syncer/rsync/base.rb | module Backup
module Syncer
module RSync
class Base < Syncer::Base
##
# Additional String or Array of options for the rsync cli
attr_accessor :additional_rsync_options
attr_accessor :archive
def initialize(syncer_id = nil, &block)
super
instance_eval(&block) if block_given?
@path ||= "~/backups"
@archive = @archive.nil? ? true : @archive
end
private
##
# Common base command for Local/Push/Pull
def rsync_command
utility(:rsync) << archive_option << mirror_option << exclude_option <<
" #{Array(additional_rsync_options).join(" ")}".rstrip
end
def mirror_option
mirror ? " --delete" : ""
end
def archive_option
archive ? " --archive" : ""
end
def exclude_option
excludes.map { |pattern| " --exclude='#{pattern}'" }.join
end
##
# Each path is expanded, since these refer to local paths and are
# being shell-quoted. This will also remove any trailing `/` from
# each path, as we don't want rsync's "trailing / on source directories"
# behavior. This method is used by RSync::Local and RSync::Push.
def paths_to_push
directories.map { |dir| "'#{File.expand_path(dir)}'" }.join(" ")
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/rsync/push.rb | lib/backup/syncer/rsync/push.rb | module Backup
module Syncer
module RSync
class Push < Base
##
# Mode of operation
#
# [:ssh (default)]
# Connects to the remote via SSH.
# Does not use an rsync daemon on the remote.
#
# [:ssh_daemon]
# Connects to the remote via SSH.
# Spawns a single-use daemon on the remote, which allows certain
# daemon features (like modules) to be used.
#
# [:rsync_daemon]
# Connects directly to an rsync daemon via TCP.
# Data transferred is not encrypted.
#
attr_accessor :mode
##
# Server Address
attr_accessor :host
##
# SSH or RSync port
#
# For `:ssh` or `:ssh_daemon` mode, this specifies the SSH port to use
# and defaults to 22.
#
# For `:rsync_daemon` mode, this specifies the TCP port to use
# and defaults to 873.
attr_accessor :port
##
# SSH User
#
# If the user running the backup is not the same user that needs to
# authenticate with the remote server, specify the user here.
#
# The user must have SSH keys setup for passphrase-less access to the
# remote. If the SSH User does not have passphrase-less keys, or no
# default keys in their `~/.ssh` directory, you will need to use the
# `-i` option in `:additional_ssh_options` to specify the
# passphrase-less key to use.
#
# Used only for `:ssh` and `:ssh_daemon` modes.
attr_accessor :ssh_user
##
# Additional SSH Options
#
# Used to supply a String or Array of options to be passed to the SSH
# command in `:ssh` and `:ssh_daemon` modes.
#
# For example, if you need to supply a specific SSH key for the `ssh_user`,
# you would set this to: "-i '/path/to/id_rsa'". Which would produce:
#
# rsync -e "ssh -p 22 -i '/path/to/id_rsa'"
#
# Arguments may be single-quoted, but should not contain any double-quotes.
#
# Used only for `:ssh` and `:ssh_daemon` modes.
attr_accessor :additional_ssh_options
##
# RSync User
#
# If the user running the backup is not the same user that needs to
# authenticate with the rsync daemon, specify the user here.
#
# Used only for `:ssh_daemon` and `:rsync_daemon` modes.
attr_accessor :rsync_user
##
# RSync Password
#
# If specified, Backup will write the password to a temporary file and
# use it with rsync's `--password-file` option for daemon authentication.
#
# Note that setting this will override `rsync_password_file`.
#
# Used only for `:ssh_daemon` and `:rsync_daemon` modes.
attr_accessor :rsync_password
##
# RSync Password File
#
# If specified, this path will be passed to rsync's `--password-file`
# option for daemon authentication.
#
# Used only for `:ssh_daemon` and `:rsync_daemon` modes.
attr_accessor :rsync_password_file
##
# Flag for compressing (only compresses for the transfer)
attr_accessor :compress
def initialize(syncer_id = nil)
super
@mode ||= :ssh
@port ||= mode == :rsync_daemon ? 873 : 22
@compress ||= false
end
def perform!
log!(:started)
write_password_file!
create_dest_path!
run("#{rsync_command} #{paths_to_push} " \
"#{host_options}'#{dest_path}'")
log!(:finished)
ensure
remove_password_file!
end
private
##
# Remove any preceeding '~/', since this is on the remote,
# and remove any trailing `/`.
def dest_path
@dest_path ||= path.sub(/^~\//, "").sub(/\/$/, "")
end
##
# Runs a 'mkdir -p' command on the remote to ensure the dest_path exists.
# This used because rsync will attempt to create the path, but will only
# call 'mkdir' without the '-p' option. This is only applicable in :ssh
# mode, and only used if the path would require this.
def create_dest_path!
return unless mode == :ssh && dest_path.index("/").to_i > 0
run "#{utility(:ssh)} #{ssh_transport_args} #{host} " +
%("mkdir -p '#{dest_path}'")
end
##
# For Push, this will prepend the #dest_path.
# For Pull, this will prepend the first path in #paths_to_pull.
def host_options
if mode == :ssh
"#{host}:"
else
user = "#{rsync_user}@" if rsync_user
"#{user}#{host}::"
end
end
##
# Common base command, plus options for Push/Pull
def rsync_command
super << compress_option << password_option << transport_options
end
def compress_option
compress ? " --compress" : ""
end
def password_option
return "" if mode == :ssh
path = @password_file ? @password_file.path : rsync_password_file
path ? " --password-file='#{File.expand_path(path)}'" : ""
end
def transport_options
if mode == :rsync_daemon
" --port #{port}"
else
%( -e "#{utility(:ssh)} #{ssh_transport_args}")
end
end
def ssh_transport_args
args = "-p #{port} "
args << "-l #{ssh_user} " if ssh_user
args << Array(additional_ssh_options).join(" ")
args.rstrip
end
def write_password_file!
return unless rsync_password && mode != :ssh
@password_file = Tempfile.new("backup-rsync-password")
@password_file.write(rsync_password)
@password_file.close
end
def remove_password_file!
@password_file.delete if @password_file
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/rsync/pull.rb | lib/backup/syncer/rsync/pull.rb | module Backup
module Syncer
module RSync
class Pull < Push
def perform!
log!(:started)
write_password_file!
create_dest_path!
run("#{rsync_command} #{host_options}#{paths_to_pull} " \
"'#{dest_path}'")
log!(:finished)
ensure
remove_password_file!
end
private
##
# Returns the syntax for pulling multiple paths from the remote host.
# e.g.
# rsync -a -e "ssh -p 22" host:'path1' :'path2' '/dest'
# rsync -a rsync_user@host::'modname/path1' ::'modname/path2' '/dest'
#
# Remove any preceeding '~/', since these paths are on the remote.
# Also remove any trailing `/`, since we don't want rsync's
# "trailing / on source directories" behavior.
def paths_to_pull
sep = mode == :ssh ? ":" : "::"
directories.map do |dir|
"#{sep}'#{dir.sub(/^~\//, "").sub(/\/$/, "")}'"
end.join(" ").sub(/^#{ sep }/, "")
end
# Expand path, since this is local and shell-quoted.
def dest_path
@dest_path ||= File.expand_path(path)
end
def create_dest_path!
FileUtils.mkdir_p dest_path
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/cloud/base.rb | lib/backup/syncer/cloud/base.rb | module Backup
module Syncer
module Cloud
class Error < Backup::Error; end
class Base < Syncer::Base
MUTEX = Mutex.new
##
# Number of threads to use for concurrency.
#
# Default: 0 (no concurrency)
attr_accessor :thread_count
##
# Number of times to retry failed operations.
#
# Default: 10
attr_accessor :max_retries
##
# Time in seconds to pause before each retry.
#
# Default: 30
attr_accessor :retry_waitsec
def initialize(syncer_id = nil, &block)
super
instance_eval(&block) if block_given?
@thread_count ||= 0
@max_retries ||= 10
@retry_waitsec ||= 30
@path ||= "backups"
@path = path.sub(/^\//, "")
end
def perform!
log!(:started)
@transfer_count = 0
@unchanged_count = 0
@skipped_count = 0
@orphans = thread_count > 0 ? Queue.new : []
directories.each { |dir| sync_directory(dir) }
orphans_result = process_orphans
Logger.info "\nSummary:"
Logger.info "\s\sTransferred Files: #{@transfer_count}"
Logger.info "\s\s#{orphans_result}"
Logger.info "\s\sUnchanged Files: #{@unchanged_count}"
Logger.warn "\s\sSkipped Files: #{@skipped_count}" if @skipped_count > 0
log!(:finished)
end
private
def sync_directory(dir)
remote_base = path.empty? ? File.basename(dir) :
File.join(path, File.basename(dir))
Logger.info "Gathering remote data for '#{remote_base}'..."
remote_files = get_remote_files(remote_base)
Logger.info("Gathering local data for '#{File.expand_path(dir)}'...")
local_files = LocalFile.find(dir, excludes)
relative_paths = (local_files.keys | remote_files.keys).sort
if relative_paths.empty?
Logger.info "No local or remote files found"
else
Logger.info "Syncing..."
sync_block = proc do |relative_path|
local_file = local_files[relative_path]
remote_md5 = remote_files[relative_path]
remote_path = File.join(remote_base, relative_path)
sync_file(local_file, remote_path, remote_md5)
end
if thread_count > 0
sync_in_threads(relative_paths, sync_block)
else
relative_paths.each(&sync_block)
end
end
end
def sync_in_threads(relative_paths, sync_block)
queue = Queue.new
queue << relative_paths.shift until relative_paths.empty?
num_threads = [thread_count, queue.size].min
Logger.info "\s\sUsing #{num_threads} Threads"
threads = Array.new(num_threads) do
Thread.new do
loop do
path = queue.shift(true) rescue nil
path ? sync_block.call(path) : break
end
end
end
# abort if any thread raises an exception
while threads.any?(&:alive?)
if threads.any? { |thr| thr.status.nil? }
threads.each(&:kill)
Thread.pass while threads.any?(&:alive?)
break
end
sleep num_threads * 0.1
end
threads.each(&:join)
end
# If an exception is raised in multiple threads, only the exception
# raised in the first thread that Thread#join is called on will be
# handled. So all exceptions are logged first with their details,
# then a generic exception is raised.
def sync_file(local_file, remote_path, remote_md5)
if local_file && File.exist?(local_file.path)
if local_file.md5 == remote_md5
MUTEX.synchronize { @unchanged_count += 1 }
else
Logger.info("\s\s[transferring] '#{remote_path}'")
begin
cloud_io.upload(local_file.path, remote_path)
MUTEX.synchronize { @transfer_count += 1 }
rescue CloudIO::FileSizeError => err
MUTEX.synchronize { @skipped_count += 1 }
Logger.warn Error.wrap(err, "Skipping '#{remote_path}'")
rescue => err
Logger.error(err)
raise Error, <<-EOS
Syncer Failed!
See the Retry [info] and [error] messages (if any)
for details on each failed operation.
EOS
end
end
elsif remote_md5
@orphans << remote_path
end
end
def process_orphans
if @orphans.empty?
return mirror ? "Deleted Files: 0" : "Orphaned Files: 0"
end
if @orphans.is_a?(Queue)
@orphans = Array.new(@orphans.size) { @orphans.shift }
end
if mirror
Logger.info @orphans.map { |path|
"\s\s[removing] '#{path}'"
}.join("\n")
begin
cloud_io.delete(@orphans)
"Deleted Files: #{@orphans.count}"
rescue => err
Logger.warn Error.wrap(err, "Delete Operation Failed")
"Attempted to Delete: #{@orphans.count} " \
"(See log messages for actual results)"
end
else
Logger.info @orphans.map { |path|
"\s\s[orphaned] '#{path}'"
}.join("\n")
"Orphaned Files: #{@orphans.count}"
end
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/cloud/s3.rb | lib/backup/syncer/cloud/s3.rb | require "backup/cloud_io/s3"
module Backup
module Syncer
module Cloud
class S3 < Base
class Error < Backup::Error; end
##
# Amazon Simple Storage Service (S3) Credentials
attr_accessor :access_key_id, :secret_access_key, :use_iam_profile
##
# Amazon S3 bucket name
attr_accessor :bucket
##
# Region of the specified S3 bucket
attr_accessor :region
##
# Encryption algorithm to use for Amazon Server-Side Encryption
#
# Supported values:
#
# - :aes256
#
# Default: nil
attr_accessor :encryption
##
# Storage class to use for the S3 objects uploaded
#
# Supported values:
#
# - :standard (default)
# - :reduced_redundancy
#
# Default: :standard
attr_accessor :storage_class
##
# Additional options to pass along to fog.
# e.g. Fog::Storage.new({ :provider => 'AWS' }.merge(fog_options))
attr_accessor :fog_options
def initialize(syncer_id = nil)
super
@storage_class ||= :standard
check_configuration
end
private
def cloud_io
@cloud_io ||= CloudIO::S3.new(
access_key_id: access_key_id,
secret_access_key: secret_access_key,
use_iam_profile: use_iam_profile,
bucket: bucket,
region: region,
encryption: encryption,
storage_class: storage_class,
max_retries: max_retries,
retry_waitsec: retry_waitsec,
# Syncer can not use multipart upload.
chunk_size: 0,
fog_options: fog_options
)
end
def get_remote_files(remote_base)
hash = {}
cloud_io.objects(remote_base).each do |object|
relative_path = object.key.sub(remote_base + "/", "")
hash[relative_path] = object.etag
end
hash
end
def check_configuration
required =
if use_iam_profile
%w[bucket]
else
%w[access_key_id secret_access_key bucket]
end
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
Configuration Error
#{required.map { |name| "##{name}" }.join(", ")} are all required
EOS
raise Error, <<-EOS if encryption && encryption.to_s.upcase != "AES256"
Configuration Error
#encryption must be :aes256 or nil
EOS
classes = ["STANDARD", "REDUCED_REDUNDANCY"]
raise Error, <<-EOS unless classes.include?(storage_class.to_s.upcase)
Configuration Error
#storage_class must be :standard or :reduced_redundancy
EOS
end
end # Class S3 < Base
end # module Cloud
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/cloud/cloud_files.rb | lib/backup/syncer/cloud/cloud_files.rb | require "backup/cloud_io/cloud_files"
module Backup
module Syncer
module Cloud
class CloudFiles < Base
class Error < Backup::Error; end
##
# Rackspace CloudFiles Credentials
attr_accessor :username, :api_key
##
# Rackspace CloudFiles Container
attr_accessor :container
##
# Rackspace AuthURL (optional)
attr_accessor :auth_url
##
# Rackspace Region (optional)
attr_accessor :region
##
# Rackspace Service Net
# (LAN-based transfers to avoid charges and improve performance)
attr_accessor :servicenet
##
# Additional options to pass along to fog.
# e.g. Fog::Storage.new({ :provider => 'Rackspace' }.merge(fog_options))
attr_accessor :fog_options
def initialize(syncer_id = nil)
super
@servicenet ||= false
check_configuration
end
private
def cloud_io
@cloud_io ||= CloudIO::CloudFiles.new(
username: username,
api_key: api_key,
auth_url: auth_url,
region: region,
servicenet: servicenet,
container: container,
max_retries: max_retries,
retry_waitsec: retry_waitsec,
# Syncer can not use SLOs.
segments_container: nil,
segment_size: 0,
fog_options: fog_options
)
end
def get_remote_files(remote_base)
hash = {}
cloud_io.objects(remote_base).each do |object|
relative_path = object.name.sub(remote_base + "/", "")
hash[relative_path] = object.hash
end
hash
end
def check_configuration
required = %w[username api_key container]
raise Error, <<-EOS if required.map { |name| send(name) }.any?(&:nil?)
Configuration Error
#{required.map { |name| "##{name}" }.join(", ")} are all required
EOS
end
end # class Cloudfiles < Base
end # module Cloud
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/syncer/cloud/local_file.rb | lib/backup/syncer/cloud/local_file.rb | require "digest/md5"
module Backup
module Syncer
module Cloud
class LocalFile
attr_reader :path
attr_accessor :md5
class << self
# Returns a Hash of LocalFile objects for each file within +dir+,
# except those matching any of the +excludes+.
# Hash keys are the file's path relative to +dir+.
def find(dir, excludes = [])
dir = File.expand_path(dir)
hash = {}
find_md5(dir, excludes).each do |file|
hash[file.path.sub(dir + "/", "")] = file
end
hash
end
# Return a new LocalFile object if it's valid.
# Otherwise, log a warning and return nil.
def new(*args)
file = super
if file.invalid?
Logger.warn("\s\s[skipping] #{file.path}\n" \
"\s\sPath Contains Invalid UTF-8 byte sequences")
file = nil
end
file
end
private
# Returns an Array of file paths and their md5 hashes.
def find_md5(dir, excludes)
found = []
(Dir.entries(dir) - %w[. ..]).map { |e| File.join(dir, e) }.each do |path|
if File.directory?(path)
unless exclude?(excludes, path)
found += find_md5(path, excludes)
end
elsif File.file?(path)
if file = new(path)
unless exclude?(excludes, file.path)
file.md5 = Digest::MD5.file(file.path).hexdigest
found << file
end
end
end
end
found
end
# Returns true if +path+ matches any of the +excludes+.
# Note this can not be called if +path+ includes invalid UTF-8.
def exclude?(excludes, path)
excludes.any? do |ex|
if ex.is_a?(String)
File.fnmatch?(ex, path)
elsif ex.is_a?(Regexp)
ex.match(path)
end
end
end
end
# If +path+ contains invalid UTF-8, it will be sanitized
# and the LocalFile object will be flagged as invalid.
# This is done so @file.path may be logged.
def initialize(path)
@path = sanitize(path)
end
def invalid?
!!@invalid
end
private
def sanitize(str)
str.each_char.map do |char|
begin
char.unpack("U")
char
rescue
@invalid = true
"\xEF\xBF\xBD" # => "\uFFFD"
end
end.join
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/encryptor/base.rb | lib/backup/encryptor/base.rb | module Backup
module Encryptor
class Base
include Utilities::Helpers
include Config::Helpers
def initialize
load_defaults!
end
private
##
# Return the encryptor name, with Backup namespace removed
def encryptor_name
self.class.to_s.sub("Backup::", "")
end
##
# Logs a message to the console and log file to inform
# the client that Backup is encrypting the archive
def log!
Logger.info "Using #{encryptor_name} to encrypt the archive."
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/encryptor/open_ssl.rb | lib/backup/encryptor/open_ssl.rb | module Backup
module Encryptor
class OpenSSL < Base
BASE_OPTIONS = [
"aes-256-cbc",
"-pbkdf2",
"-iter", "310000" # As per OWASP "Password Storage Cheat Sheet"
].freeze
##
# The password that'll be used to encrypt the backup. This
# password will be required to decrypt the backup later on.
attr_accessor :password
##
# The password file to use to encrypt the backup.
attr_accessor :password_file
##
# Determines whether the 'base64' should be used or not
attr_accessor :base64
##
# Determines whether the 'salt' flag should be used
# *DEPRECATED*: This is the default behavior, this flag is useless
attr_accessor :salt
##
# Creates a new instance of Backup::Encryptor::OpenSSL and
# sets the password attribute to what was provided
def initialize(&block)
super
@base64 ||= false
@salt ||= true
@password_file ||= nil
instance_eval(&block) if block_given?
end
##
# This is called as part of the procedure run by the Packager.
# It sets up the needed options to pass to the openssl command,
# then yields the command to use as part of the packaging procedure.
# Once the packaging procedure is complete, it will return
# so that any clean-up may be performed after the yield.
def encrypt_with
log!
yield "#{utility(:openssl)} #{options}", ".enc"
end
private
##
# Uses the 256bit AES encryption cipher, which is what the
# US Government uses to encrypt information at the "Top Secret" level.
#
# The -base64 option will make the encrypted output base64 encoded,
# this makes the encrypted file readable using text editors
#
# Always sets a password option, if even no password is given,
# but will prefer the password_file option if both are given.
def options
opts = BASE_OPTIONS.dup
opts << "-base64" if @base64
opts <<
if @password_file.to_s.empty?
"-k #{Shellwords.escape(@password)}"
else
"-pass file:#{@password_file}"
end
opts.join(" ")
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
backup/backup | https://github.com/backup/backup/blob/86c9b07c2a2974376888b1506001f77792d6359a/lib/backup/encryptor/gpg.rb | lib/backup/encryptor/gpg.rb | module Backup
module Encryptor
##
# The GPG Encryptor allows you to encrypt your final archive using GnuPG,
# using one of three {#mode modes} of operation.
#
# == First, setup defaults in your +config.rb+ file
#
# Configure the {#keys} Hash using {.defaults} in your +config.rb+
# to specify all valid {#recipients} and their Public Key.
#
# Backup::Encryptor::GPG.defaults do |encryptor|
# # setup all GnuPG public keys
# encryptor.keys = {}
# encryptor.keys['joe@example.com'] = <<-EOS
# # ...public key here...
# EOS
# encryptor.keys['mary@example.com'] = <<-EOS
# # ...public key here...
# EOS
# end
#
# The optional {#gpg_config} and {#gpg_homedir} options would also
# typically be set using {.defaults} in +config.rb+ as well.
#
# == Then, setup each of your Models
#
# Set the desired {#recipients} and/or {#passphrase} (or {#passphrase_file})
# for each {Model}, depending on the {#mode} used.
#
# === my_backup_01
#
# This archive can only be decrypted using the private key for joe@example.com
#
# Model.new(:my_backup_01, 'Backup Job #1') do
# # ... archives, databases, compressor and storage options, etc...
# encrypt_with GPG do |encryptor|
# encryptor.mode = :asymmetric
# encryptor.recipients = 'joe@example.com'
# end
# end
#
# === my_backup_02
#
# This archive can only be decrypted using the passphrase "a secret".
#
# Model.new(:my_backup_02, 'Backup Job #2') do
# # ... archives, databases, compressor and storage options, etc...
# encrypt_with GPG do |encryptor|
# encryptor.mode = :symmetric
# encryptor.passphrase = 'a secret'
# end
# end
#
# === my_backup_03
#
# This archive may be decrypted using either the private key for joe@example.com
# *or* mary@example.com, *and* may also be decrypted using the passphrase.
#
# Model.new(:my_backup_03, 'Backup Job #3') do
# # ... archives, databases, compressor and storage options, etc...
# encrypt_with GPG do |encryptor|
# encryptor.mode = :both
# encryptor.passphrase = 'a secret'
# encryptor.recipients = ['joe@example.com', 'mary@example.com']
# end
# end
#
class GPG < Base
class Error < Backup::Error; end
MODES = [:asymmetric, :symmetric, :both]
##
# Sets the mode of operation.
#
# [:asymmetric]
# In this mode, the final backup archive will be encrypted using the
# public key(s) specified by the key identifiers in {#recipients}.
# The archive may then be decrypted by anyone with a private key that
# corresponds to one of the public keys used. See {#recipients} and
# {#keys} for more information.
#
# [:symmetric]
# In this mode, the final backup archive will be encrypted using the
# passphrase specified by {#passphrase} or {#passphrase_file}.
# The archive will be encrypted using the encryption algorithm
# specified in your GnuPG configuration. See {#gpg_config} for more
# information. Anyone with the passphrase may decrypt the archive.
#
# [:both]
# In this mode, both +:asymmetric+ and +:symmetric+ options are used.
# Meaning that the archive may be decrypted by anyone with a valid
# private key or by using the proper passphrase.
#
# @param mode [String, Symbol] Sets the mode of operation.
# (Defaults to +:asymmetric+)
# @return [Symbol] mode that was set.
# @raise [Backup::Errors::Encryptor::GPG::InvalidModeError]
# if mode given is invalid.
#
attr_reader :mode
def mode=(mode)
@mode = mode.to_sym
raise Error, "'#{@mode}' is not a valid mode." unless MODES.include?(@mode)
end
##
# Specifies the GnuPG configuration to be used.
#
# This should be given as the text of a +gpg.conf+ file. It will be
# written to a temporary file, which will be passed to the +gpg+ command
# to use instead of the +gpg.conf+ found in the GnuPG home directory.
# This allows you to be certain your preferences are used.
#
# This is especially useful if you've also set {#gpg_homedir} and plan
# on allowing Backup to automatically create that directory and import
# all your public keys specified in {#keys}. In this situation, that
# folder would not contain any +gpg.conf+ file, so GnuPG would simply
# use it's defaults.
#
# While this may be specified on a per-Model basis, you would generally
# just specify this in the defaults. Leading tabs/spaces are stripped
# before writing the given string to the temporary configuration file.
#
# Backup::Encryptor::GPG.defaults do |enc|
# enc.gpg_config = <<-EOF
# # safely override preferences set in the receiver's public key(s)
# personal-cipher-preferences TWOFISH AES256 BLOWFISH AES192 CAST5 AES
# personal-digest-preferences SHA512 SHA256 SHA1 MD5
# personal-compress-preferences BZIP2 ZLIB ZIP Uncompressed
# # cipher algorithm for symmetric encryption
# # (if personal-cipher-preferences are not specified)
# s2k-cipher-algo TWOFISH
# # digest algorithm for mangling the symmetric encryption passphrase
# s2k-digest-algo SHA512
# EOF
# end
#
# @see #gpg_homedir
# @return [String]
attr_accessor :gpg_config
##
# Set the GnuPG home directory to be used.
#
# This allows you to specify the GnuPG home directory on the system
# where Backup will be run, keeping the keyrings used by Backup separate
# from the default keyrings of the user running Backup.
# By default, this would be +`~/.gnupg`+.
#
# If a directory is specified here, Backup will create it if needed
# and ensure the correct permissions are set. All public keys Backup
# imports would be added to the +pubring.gpg+ file within this directory,
# and +gpg+ would be given this directory using it's +--homedir+ option.
#
# Any +gpg.conf+ file located in this directory would also be used by
# +gpg+, unless {#gpg_config} is specified.
#
# The given path will be expanded before use.
#
# @return [String]
attr_accessor :gpg_homedir
##
# Specifies a Hash of public key identifiers and their public keys.
#
# While not _required_, it is recommended that all public keys you intend
# to use be setup in {#keys}. The best place to do this is in your defaults
# in +config.rb+.
#
# Backup::Encryptor::GPG.defaults do |enc|
# enc.keys = {}
#
# enc.keys['joe@example.com'] = <<-EOS
# -----BEGIN PGP PUBLIC KEY BLOCK-----
# Version: GnuPG v1.4.12 (GNU/Linux)
#
# mQMqBEd5F8MRCACfArHCJFR6nkmxNiW+UE4PAW3bQla9JWFqCwu4VqLkPI/lHb5p
# xHff8Fzy2O89BxD/6hXSDx2SlVmAGHOCJhShx1vfNGVYNsJn2oNK50in9kGvD0+m
# [...]
# SkQEHOxhMiFjAN9q4LuirSOu65uR1bnTmF+Z92++qMIuEkH4/LnN
# =8gNa
# -----END PGP PUBLIC KEY BLOCK-----
# EOS
#
# enc.keys['mary@example.com'] = <<-EOS
# -----BEGIN PGP PUBLIC KEY BLOCK-----
# Version: GnuPG v1.4.12 (GNU/Linux)
#
# 2SlVmAGHOCJhShx1vfNGVYNxHff8Fzy2O89BxD/6in9kGvD0+mhXSDxsJn2oNK50
# kmxNiW+UmQMqBEd5F8MRCACfArHCJFR6qCwu4VqLkPI/lHb5pnE4PAW3bQla9JWF
# [...]
# AN9q4LSkQEHOxhMiFjuirSOu65u++qMIuEkH4/LnNR1bnTmF+Z92
# =8gNa
# -----END PGP PUBLIC KEY BLOCK-----
#
# EOS
# end
#
# All leading spaces/tabs will be stripped from the key, so the above
# form may be used to set each identifier's key.
#
# When a public key can not be found for an identifier specified in
# {#recipients}, the corresponding public key from this Hash will be
# imported into +pubring.gpg+ in the GnuPG home directory ({#gpg_homedir}).
# Therefore, each key *must* be the same identifier used in {#recipients}.
#
# To obtain the public key in ASCII format, use:
#
# $ gpg -a --export joe@example.com
#
# See {#recipients} for information on what may be used as valid identifiers.
#
# @return [Hash]
attr_accessor :keys
##
# Specifies the recipients to use when encrypting the backup archive.
#
# When {#mode} is set to +:asymmetric+ or +:both+, the public key for
# each recipient given here will be used to encrypt the archive. Each
# recipient will be able to decrypt the archive using their private key.
#
# If there is only one recipient, this may be specified as a String.
# Otherwise, this should be an Array of Strings. Each String must be a
# valid public key identifier, and *must* be the same identifier used to
# specify the recipient's public key in {#keys}. This is so that if a
# public key is not found for the given identifier, it may be imported
# from {#keys}.
#
# Valid identifiers which may be used are as follows:
#
# [Key Fingerprint]
# The key fingerprint is a 40-character hex string, which uniquely
# identifies a public key. This may be obtained using the following:
#
# $ gpg --fingerprint john.smith@example.com
# pub 1024R/4E5E8D8A 2012-07-20
# Key fingerprint = FFEA D1DB 201F B214 873E 7399 4A83 569F 4E5E 8D8A
# uid John Smith <john.smith@example.com>
# sub 1024R/92C8DFD8 2012-07-20
#
# [Long Key ID]
# The long Key ID is the last 16-characters of the key's fingerprint.
#
# The Long Key ID in this example is: 4A83569F4E5E8D8A
#
# $ gpg --keyid-format long -k john.smith@example.com
# pub 1024R/4A83569F4E5E8D8A 2012-07-20
# uid John Smith <john.smith@example.com>
# sub 1024R/662F18DB92C8DFD8 2012-07-20
#
# [Short Key ID]
# The short Key ID is the last 8-characters of the key's fingerprint.
# This is the default key format seen when listing keys.
#
# The Short Key ID in this example is: 4E5E8D8A
#
# $ gpg -k john.smith@example.com
# pub 1024R/4E5E8D8A 2012-07-20
# uid John Smith <john.smith@example.com>
# sub 1024R/92C8DFD8 2012-07-20
#
# [Email Address]
# This must exactly match an email address for one of the UID records
# associated with the recipient's public key.
#
# Recipient identifier forms may be mixed, as long as the identifier used
# here is the same as that used in {#keys}. Also, all spaces will be stripped
# from the identifier when used, so the following would be valid.
#
# Backup::Model.new(:my_backup, 'My Backup') do
# encrypt_with GPG do |enc|
# enc.recipients = [
# # John Smith
# '4A83 569F 4E5E 8D8A',
# # Mary Smith
# 'mary.smith@example.com'
# ]
# end
# end
#
# @return [String, Array]
attr_accessor :recipients
##
# Specifies the passphrase to use symmetric encryption.
#
# When {#mode} is +:symmetric+ or +:both+, this passphrase will be used
# to symmetrically encrypt the archive.
#
# Use of this option will override the use of {#passphrase_file}.
#
# @return [String]
attr_accessor :passphrase
##
# Specifies the passphrase file to use symmetric encryption.
#
# When {#mode} is +:symmetric+ or +:both+, this file will be passed
# to the +gpg+ command line, where +gpg+ will read the first line from
# this file and use it for the passphrase.
#
# The file path given here will be expanded to a full path.
#
# If {#passphrase} is specified, {#passphrase_file} will be ignored.
# Therefore, if you have set {#passphrase} in your global defaults,
# but wish to use {#passphrase_file} with a specific {Model}, be sure
# to clear {#passphrase} within that model's configuration.
#
# Backup::Encryptor::GPG.defaults do |enc|
# enc.passphrase = 'secret phrase'
# end
#
# Backup::Model.new(:my_backup, 'My Backup') do
# # other directives...
# encrypt_with GPG do |enc|
# enc.mode = :symmetric
# enc.passphrase = nil
# enc.passphrase_file = '/path/to/passphrase.file'
# end
# end
#
# @return [String]
attr_accessor :passphrase_file
##
# Configures default accessor values for new class instances.
#
# If all required options are set, then no further configuration
# would be needed within a Model's definition when an Encryptor is added.
# Therefore, the following example is sufficient to encrypt +:my_backup+:
#
# # Defaults set in config.rb
# Backup::Encryptor::GPG.defaults do |encryptor|
# encryptor.keys = {}
# encryptor.keys['joe@example.com'] = <<-EOS
# -----BEGIN PGP PUBLIC KEY BLOCK-----
# Version: GnuPG v1.4.12 (GNU/Linux)
#
# mI0EUBR6CwEEAMVSlFtAXO4jXYnVFAWy6chyaMw+gXOFKlWojNXOOKmE3SujdLKh
# kWqnafx7VNrb8cjqxz6VZbumN9UgerFpusM3uLCYHnwyv/rGMf4cdiuX7gGltwGb
# (...etc...)
# mLekS3xntUhhgHKc4lhf4IVBqG4cFmwSZ0tZEJJUSESb3TqkkdnNLjE=
# =KEW+
# -----END PGP PUBLIC KEY BLOCK-----
# EOS
#
# encryptor.recipients = 'joe@example.com'
# end
#
# # Encryptor set in the model
# Backup::Model.new(:my_backup, 'My Backup') do
# # archives, storage options, etc...
# encrypt_with GPG
# end
#
# @!scope class
# @see Config::Helpers::ClassMethods#defaults
# @yield [config] OpenStruct object
# @!method defaults
##
# Creates a new instance of Backup::Encryptor::GPG.
#
# This constructor is not used directly when configuring Backup.
# Use {Model#encrypt_with}.
#
# Model.new(:backup_trigger, 'Backup Label') do
# archive :my_archive do |archive|
# archive.add '/some/directory'
# end
#
# compress_with Gzip
#
# encrypt_with GPG do |encryptor|
# encryptor.mode = :both
# encryptor.passphrase = 'a secret'
# encryptor.recipients = ['joe@example.com', 'mary@example.com']
# end
#
# store_with SFTP
#
# notify_by Mail
# end
#
# @api private
def initialize(&block)
super
instance_eval(&block) if block_given?
@mode ||= :asymmetric
end
##
# This is called as part of the procedure run by the Packager.
# It sets up the needed options to pass to the gpg command,
# then yields the command to use as part of the packaging procedure.
# Once the packaging procedure is complete, it will return
# so that any clean-up may be performed after the yield.
# Cleanup is also ensured, as temporary files may hold sensitive data.
# If no options can be built, the packaging process will be aborted.
#
# @api private
def encrypt_with
log!
prepare
if mode_options.empty?
raise Error, "Encryption could not be performed for mode '#{mode}'"
end
yield "#{utility(:gpg)} #{base_options} #{mode_options}", ".gpg"
ensure
cleanup
end
private
##
# Remove any temporary directories and reset all instance variables.
#
def prepare
FileUtils.rm_rf(@tempdirs, secure: true) if @tempdirs
@tempdirs = []
@base_options = nil
@mode_options = nil
@user_recipients = nil
@user_keys = nil
@system_identifiers = nil
end
alias :cleanup :prepare
##
# Returns the options needed for the gpg command line which are
# not dependant on the #mode. --no-tty supresses output of certain
# messages, like the "Reading passphrase from file descriptor..."
# messages during symmetric encryption
#
def base_options
@base_options ||= begin
opts = ["--no-tty"]
path = setup_gpg_homedir
opts << "--homedir '#{path}'" if path
path = setup_gpg_config
opts << "--options '#{path}'" if path
opts.join(" ")
end
end
##
# Setup the given :gpg_homedir if needed, ensure the proper permissions
# are set, and return the directory's path. Otherwise, return false.
#
# If the GnuPG files do not exist, trigger their creation by requesting
# --list-secret-keys. Some commands, like for symmetric encryption, will
# issue messages about their creation on STDERR, which generates unwanted
# warnings in the log. This way, if any of these files are created here,
# we will get those messages on STDOUT for the log, without the actual
# secret key listing which we don't care about.
#
def setup_gpg_homedir
return false unless gpg_homedir
path = File.expand_path(gpg_homedir)
FileUtils.mkdir_p(path)
FileUtils.chown(Config.user, nil, path)
FileUtils.chmod(0o700, path)
unless %w[pubring.gpg secring.gpg trustdb.gpg]
.all? { |name| File.exist? File.join(path, name) }
run("#{utility(:gpg)} --homedir '#{path}' -K 2>&1 >/dev/null")
end
path
rescue => err
raise Error.wrap \
err, "Failed to create or set permissions for #gpg_homedir"
end
##
# Write the given #gpg_config to a tempfile, within a tempdir, and
# return the file's path to be given to the gpg --options argument.
# If no #gpg_config is set, return false.
#
# This is required in order to set the proper permissions on the
# directory containing the tempfile. The tempdir will be removed
# after the packaging procedure is completed.
#
# Once written, we'll call check_gpg_config to make sure there are
# no problems that would prevent gpg from running with this config.
# If any errors occur during this process, we can not proceed.
# We'll cleanup to remove the tempdir (if created) and raise an error.
#
def setup_gpg_config
return false unless gpg_config
dir = Dir.mktmpdir("backup-gpg_config", Config.tmp_path)
@tempdirs << dir
file = Tempfile.open("backup-gpg_config", dir)
file.write gpg_config.gsub(/^[[:blank:]]+/, "")
file.close
check_gpg_config(file.path)
file.path
rescue => err
cleanup
raise Error.wrap(err, "Error creating temporary file for #gpg_config.")
end
##
# Make sure the temporary GnuPG config file created from #gpg_config
# does not have any syntax errors that would prevent gpg from running.
# If so, raise the returned error message.
# Note that Cli::Helpers#run may also raise an error here.
#
def check_gpg_config(path)
ret = run(
"#{utility(:gpg)} --options '#{path}' --gpgconf-test 2>&1"
).chomp
raise ret unless ret.empty?
end
##
# Returns the options needed for the gpg command line to perform
# the encryption based on the #mode.
#
def mode_options
@mode_options ||= begin
s_opts = symmetric_options if mode != :asymmetric
a_opts = asymmetric_options if mode != :symmetric
[s_opts, a_opts].compact.join(" ")
end
end
##
# Process :passphrase or :passphrase_file and return the command line
# options to perform symmetric encryption. If no :passphrase is
# specified, or an error occurs creating a temporary file for it, then
# try to use :passphrase_file if it's set.
# If the option can not be set, log a warning and return nil.
#
def symmetric_options
path = setup_passphrase_file
unless path || passphrase_file.to_s.empty?
path = File.expand_path(passphrase_file.to_s)
end
if path && File.exist?(path)
"-c --passphrase-file '#{path}'"
else
Logger.warn("Symmetric encryption options could not be set.")
nil
end
end
##
# Create a temporary file, within a tempdir, to hold the :passphrase and
# return the file's path. If an error occurs, log a warning.
# Return false if no :passphrase is set or an error occurs.
#
def setup_passphrase_file
return false if passphrase.to_s.empty?
dir = Dir.mktmpdir("backup-gpg_passphrase", Config.tmp_path)
@tempdirs << dir
file = Tempfile.open("backup-gpg_passphrase", dir)
file.write passphrase.to_s
file.close
file.path
rescue => err
Logger.warn Error.wrap(err, "Error creating temporary passphrase file.")
false
end
##
# Process :recipients, importing their public key from :keys if needed,
# and return the command line options to perform asymmetric encryption.
# Log a warning and return nil if no valid recipients are found.
#
def asymmetric_options
if user_recipients.empty?
Logger.warn "No recipients available for asymmetric encryption."
nil
else
# skip trust database checks
"-e --trust-model always " +
user_recipients.map { |r| "-r '#{r}'" }.join(" ")
end
end
##
# Returns an Array of the public key identifiers the user specified
# in :recipients. Each identifier is 'cleaned' so that exact matches
# can be performed. Then each is checked to ensure it will find a
# public key that exists in the system's public keyring.
# If the identifier does not match an existing key, the public key
# associated with the identifier in :keys will be imported for use.
# If no key can be found in the system or in :keys for the identifier,
# a warning will be issued; as we will attempt to encrypt the backup
# and proceed if at all possible.
#
def user_recipients
@user_recipients ||= begin
[recipients].flatten.compact.map do |identifier|
identifier = clean_identifier(identifier)
if system_identifiers.include?(identifier)
identifier
else
key = user_keys[identifier]
if key
# will log a warning and return nil if the import fails
import_key(identifier, key)
else
Logger.warn \
"No public key was found in #keys for '#{identifier}'"
nil
end
end
end.compact
end
end
##
# Returns the #keys hash set by the user with all identifiers
# (Hash keys) 'cleaned' for exact matching. If the cleaning process
# creates duplicate keys, the user will be warned.
#
def user_keys
@user_keys ||= begin
_keys = keys || {}
ret = Hash[_keys.map { |k, v| [clean_identifier(k), v] }]
if ret.keys.count != _keys.keys.count
Logger.warn \
"Duplicate public key identifiers were detected in #keys."
end
ret
end
end
##
# Cleans a public key identifier.
# Strip out all spaces, upcase non-email identifiers,
# and wrap email addresses in <> to perform exact matching.
#
def clean_identifier(str)
str = str.to_s.gsub(/[[:blank:]]+/, "")
str =~ /@/ ? "<#{str.gsub(/(<|>)/, "")}>" : str.upcase
end
##
# Import the given public key and return the 16 character Key ID.
# If the import fails, return nil.
# Note that errors raised by Cli::Helpers#run may also be rescued here.
#
def import_key(identifier, key)
file = Tempfile.open("backup-gpg_import", Config.tmp_path)
file.write(key.gsub(/^[[:blank:]]+/, ""))
file.close
ret = run "#{utility(:gpg)} #{base_options} " \
"--keyid-format 0xlong --import '#{file.path}' 2>&1"
file.delete
keyid = ret.match(/ 0x(\w{16})/).to_a[1]
raise "GPG Returned:\n#{ret.gsub(/^\s*/, " ")}" unless keyid
keyid
rescue => err
Logger.warn Error.wrap(
err, "Public key import failed for '#{identifier}'"
)
nil
end
##
# Parse the information for all the public keys found in the public
# keyring (based on #gpg_homedir setting) and return an Array of all
# identifiers which could be used to specify a valid key.
#
def system_identifiers
@system_identifiers ||= begin
skip_key = false
data = run "#{utility(:gpg)} #{base_options} " \
"--with-colons --fixed-list-mode --fingerprint"
data.lines.map do |line|
line.strip!
# process public key record
if line =~ /^pub:/
validity, keyid, capabilities = line.split(":").values_at(1, 4, 11)
# skip keys marked as revoked ('r'), expired ('e'),
# invalid ('i') or disabled ('D')
if validity[0, 1] =~ /(r|e|i)/ || capabilities =~ /D/
skip_key = true
next nil
else
skip_key = false
# return both the long and short id
next [keyid[-8..-1], keyid]
end
else
# wait for the next valid public key record
next if skip_key
# process UID records for the current public key
if line =~ /^uid:/
validity, userid = line.split(":").values_at(1, 9)
# skip records marked as revoked ('r'), expired ('e')
# or invalid ('i')
if validity !~ /(r|e|i)/
# return the last email found in user id string,
# since this includes user supplied comments.
# return nil if no email found.
email = nil
str = userid
while match = str.match(/<.+?@.+?>/)
email = match[0]
str = match.post_match
end
next email
end
# return public key's fingerprint
elsif line =~ /^fpr:/
next line.split(":")[9]
end
nil # ignore any other lines
end
end.flatten.compact
end
end
end
end
end
| ruby | MIT | 86c9b07c2a2974376888b1506001f77792d6359a | 2026-01-04T15:45:04.712671Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/observers/email_delivery_observer.rb | app/observers/email_delivery_observer.rb | # frozen_string_literal: true
class EmailDeliveryObserver
def self.delivered_email(message)
EmailDeliveryObserver::HandleEmailEvent.perform(message)
EmailDeliveryObserver::HandleCustomerEmailInfo.perform(message)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/observers/email_delivery_observer/handle_customer_email_info.rb | app/observers/email_delivery_observer/handle_customer_email_info.rb | # frozen_string_literal: true
module EmailDeliveryObserver::HandleCustomerEmailInfo
class InvalidHeaderError < StandardError
attr_reader :metadata
def initialize(message, metadata)
super(message)
@metadata = metadata
end
def bugsnag_meta_data
{ debug: metadata }
end
end
extend self
def perform(message)
message_info = build_message_info(message)
return if message_info.ignore?
email_info = find_or_initialize_customer_email_info(**message_info.attributes)
email_info.mark_sent!
rescue InvalidHeaderError => e
Bugsnag.notify(e)
end
private
def build_message_info(message)
mailer_method_name, purchase_id, charge_id = parse_message_headers(message)
OpenStruct.new(
ignore?: purchase_id.nil? && charge_id.nil?,
attributes: {
email_name: mailer_method_name,
purchase_id:,
charge_id:,
},
)
end
def find_or_initialize_customer_email_info(email_name:, purchase_id:, charge_id:)
if charge_id.present?
CustomerEmailInfo.find_or_initialize_for_charge(charge_id:, email_name:)
else
CustomerEmailInfo.find_or_initialize_for_purchase(purchase_id:, email_name:)
end
end
def parse_message_headers(message)
email_provider = message.header[MailerInfo.header_name(:email_provider)].value
case email_provider
when MailerInfo::EMAIL_PROVIDER_SENDGRID
parse_sendgrid_headers(message)
when MailerInfo::EMAIL_PROVIDER_RESEND
parse_resend_headers(message)
else
raise "Unknown email provider: #{email_provider}"
end
rescue => e
raise InvalidHeaderError.new(
"Failed to parse #{email_provider} header: #{e.message}",
message.header.to_json
)
end
# Sample SendGrid header:
# For purchase:
# {
# "environment": "test",
# "category": ["CustomerMailer" , "CustomerMailer.receipt"],
# "unique_args": {
# "purchase_id": 1,
# "mailer_class": "CustomerMailer",
# "mailer_method":"receipt"
# }
# }
# For charge:
# {
# "environment": "test",
# "category": ["CustomerMailer" , "CustomerMailer.receipt"],
# "unique_args": {
# "charge_id": 1,
# "mailer_class": "CustomerMailer",
# "mailer_method":"receipt"
# }
# }
def parse_sendgrid_headers(message)
sendgrid_header = message.header[MailerInfo::SENDGRID_X_SMTPAPI_HEADER].value
data = JSON.parse(sendgrid_header)["unique_args"]
[data["mailer_method"], data["purchase_id"], data["charge_id"]]
end
# Sample Resend header (unencrypted):
# {
# "X-GUM-Email-Provider"=>"resend",
# "X-GUM-Environment"=>"development",
# "X-GUM-Category"=>"[\"CustomerMailer\",\"CustomerMailer.receipt\"]",
# "X-GUM-Mailer-Class"=>"CustomerMailer",
# "X-GUM-Mailer-Method"=>"receipt",
# "X-GUM-Mailer-Args"=>"\"[1]\"",
# "X-GUM-Purchase-Id"=>1,
# "X-GUM-Workflow-Ids"=>"[1,2,3]"
# }
def parse_resend_headers(message)
[
message.header[MailerInfo.header_name(:mailer_method)].value,
message.header[MailerInfo.header_name(:purchase_id)]&.value,
message.header[MailerInfo.header_name(:charge_id)]&.value,
].map { MailerInfo.decrypt(_1) }
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/observers/email_delivery_observer/handle_email_event.rb | app/observers/email_delivery_observer/handle_email_event.rb | # frozen_string_literal: true
module EmailDeliveryObserver::HandleEmailEvent
extend self
def perform(message)
message.to.each do |email|
EmailEvent.log_send_events(email, message.date)
rescue => e
Rails.logger.error "Error logging email event - #{email} - #{e.message}"
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/jobs/delete_unused_public_files_job.rb | app/jobs/delete_unused_public_files_job.rb | # frozen_string_literal: true
class DeleteUnusedPublicFilesJob
include Sidekiq::Job
sidekiq_options queue: :low, retry: 3
def perform
PublicFile
.alive
.with_attached_file
.where("scheduled_for_deletion_at < ?", Time.current)
.find_in_batches do |batch|
ReplicaLagWatcher.watch
batch.each do |public_file|
ActiveRecord::Base.transaction do
public_file.mark_deleted!
blob = public_file.blob
next unless blob
next if ActiveStorage::Attachment.where(blob_id: blob.id).where.not(record: public_file).exists?
public_file.file.purge_later
end
end
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/subdomain_redirector_service.rb | app/services/subdomain_redirector_service.rb | # frozen_string_literal: true
class SubdomainRedirectorService
CACHE_KEY = "subdomain_redirects_cache_key"
private_constant :CACHE_KEY
REDIS_KEY = "subdomain_redirects_config"
private_constant :REDIS_KEY
PROTECTED_HOSTS = VALID_API_REQUEST_HOSTS + VALID_REQUEST_HOSTS
private_constant :PROTECTED_HOSTS
def update(config)
subdomain_redirect_namespace.set(REDIS_KEY, config)
Rails.cache.delete(CACHE_KEY)
end
def redirects
Rails.cache.fetch(CACHE_KEY) do
config = subdomain_redirect_namespace.get(REDIS_KEY)
return {} if config.blank?
redirect_config = {}
config.split("\n").each do |config_line|
host, location = config_line.split("=", 2).map(&:strip)
if host.present? && location.present?
redirect_config[host.downcase] = location unless PROTECTED_HOSTS.include?(host)
end
end
redirect_config
end
end
def redirect_url_for(request)
# Remove the trailing '/' from the host if the path is empty
redirect_url = request.fullpath == "/" ? request.host : request.host + request.fullpath
redirects[redirect_url]
end
def redirect_config_as_text
redirects.map { |host, location| "#{host}=#{location}" }.join("\n")
end
private
def subdomain_redirect_namespace
@_subdomain_redirect_namespace ||= Redis::Namespace.new(:subdomain_redirect_namespace, redis: $redis)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/admin_funds_csv_report_service.rb | app/services/admin_funds_csv_report_service.rb | # frozen_string_literal: true
class AdminFundsCsvReportService
attr_reader :report
def initialize(report)
@report = report
end
def generate
CSV.generate do |csv|
report.each do |(type, data)|
data.each do |payment_method|
transaction_type_key = type == "Purchases" ? "Sales" : "Charges"
row_title = payment_method["Processor"] == "PayPal" ? type : ""
payment_method[transaction_type_key].each do |(key, value)|
if key == :total_transaction_count
csv << [row_title, payment_method["Processor"], key, value]
else
csv << ["", "", key, value]
end
end
end
end
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/sitemap_service.rb | app/services/sitemap_service.rb | # frozen_string_literal: true
class SitemapService
HOST = UrlService.root_domain_with_protocol
MAX_SITEMAP_LINKS = 50_000
SITEMAP_PATH_MONTHLY = "sitemap/products/monthly"
def generate(date = Date.current)
# Parse date from Sidekiq job argument
date = Date.parse(date) if date.is_a?(String)
period = (date.to_time.beginning_of_month..date.to_time.end_of_month)
year = date.year
create_sitemap(period, "sitemap", "#{SITEMAP_PATH_MONTHLY}/#{year}/#{date.month}/")
end
private
def create_sitemap(period, filename, path, include_index: false)
sitemap_config(filename, path, include_index)
SitemapGenerator::Sitemap.create do
Link.alive.where(created_at: period).find_each do |product|
relative_url = Rails.application.routes.url_helpers.short_link_path(product)
add relative_url, changefreq: "daily", priority: 1, lastmod: product.updated_at, images: [{ loc: product.preview_url }],
host: product.user.subdomain_with_protocol
end
end
RobotsService.new.expire_sitemap_configs_cache
if ping_search_engines?
SitemapGenerator::Sitemap.ping_search_engines
end
end
def sitemap_config(filename, path, include_index)
SitemapGenerator::Sitemap.default_host = HOST
SitemapGenerator::Sitemap.max_sitemap_links = MAX_SITEMAP_LINKS
SitemapGenerator::Sitemap.sitemaps_path = path
SitemapGenerator::Sitemap.filename = filename
SitemapGenerator::Sitemap.include_index = include_index
SitemapGenerator::Sitemap.include_root = false
if upload_sitemap_to_s3?
SitemapGenerator::Sitemap.sitemaps_host = PUBLIC_STORAGE_CDN_S3_PROXY_HOST
SitemapGenerator::Sitemap.public_path = "tmp/"
SitemapGenerator::Sitemap.adapter = SitemapGenerator::AwsSdkAdapter.new(
PUBLIC_STORAGE_S3_BUCKET,
aws_access_key_id: GlobalConfig.get("S3_SITEMAP_UPLOADER_ACCESS_KEY"),
aws_secret_access_key: GlobalConfig.get("S3_SITEMAP_UPLOADER_SECRET_ACCESS_KEY"),
aws_region: AWS_DEFAULT_REGION
)
end
end
def ping_search_engines?
Rails.env.production?
end
def upload_sitemap_to_s3?
Rails.env.production? || Rails.env.staging?
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/subscribe_preview_generator_service.rb | app/services/subscribe_preview_generator_service.rb | # frozen_string_literal: true
# Used for OpenGraph consumers like: https://developer.twitter.com/en/docs/twitter-for-websites/cards/overview/summary-card-with-large-image
class SubscribePreviewGeneratorService
RETINA_PIXEL_RATIO = 2
ASPECT_RATIO = 128/67r
WIDTH = 512
HEIGHT = WIDTH / ASPECT_RATIO
CHROME_ARGS = [
"force-device-scale-factor=#{RETINA_PIXEL_RATIO}",
"headless",
"no-sandbox",
"disable-setuid-sandbox",
"disable-dev-shm-usage",
"user-data-dir=/tmp/chrome",
].freeze
def self.generate_pngs(users)
options = Selenium::WebDriver::Chrome::Options.new(args: CHROME_ARGS)
driver = Selenium::WebDriver.for(:chrome, options:)
users.map do |user|
url = Rails.application.routes.url_helpers.user_subscribe_preview_url(
user.username,
host: DOMAIN,
protocol: PROTOCOL,
)
driver.navigate.to url
wait = Selenium::WebDriver::Wait.new(timeout: 10)
wait.until { driver.execute_script("return document.readyState") == "complete" }
driver.manage.window.size = Selenium::WebDriver::Dimension.new(WIDTH, HEIGHT)
driver.screenshot_as(:png)
end
ensure
driver.quit if driver.present?
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/save_installment_service.rb | app/services/save_installment_service.rb | # frozen_string_literal: true
class SaveInstallmentService
attr_reader :seller, :params, :installment, :product, :preview_email_recipient, :error
def initialize(seller:, params:, installment: nil, preview_email_recipient:)
@seller = seller
@params = params
@installment = installment
@preview_email_recipient = preview_email_recipient
end
def process
set_product_and_enforce_ownership
return false if error.present?
ensure_seller_is_eligible_to_publish_or_schedule_emails
return false if error.present?
build_installment_if_needed
begin
unless installment.new_record?
installment.assign_attributes(installment.published? ? published_installment_params : installment_attrs)
end
ActiveRecord::Base.transaction do
installment.message = SaveContentUpsellsService.new(seller:, content: installment.message, old_content: installment.message_was).from_html
save_installment
if params[:to_be_published_at].present?
schedule_installment
elsif params[:publish].present?
publish_installment
elsif params[:send_preview_email].present?
installment.send_preview_email(preview_email_recipient)
end
raise ActiveRecord::Rollback if error.present?
end
rescue Installment::InstallmentInvalid, Installment::PreviewEmailError => e
@error = e.message
rescue => e
@error ||= e.message
Bugsnag.notify(e)
end
error.nil?
end
private
def save_installment
unless installment.published?
installment.installment_type = installment_params[:installment_type]
installment.base_variant = installment_params[:installment_type] == Installment::VARIANT_TYPE ? BaseVariant.find_by_external_id(params[:variant_external_id]) : nil
installment.link = product_or_variant_type? ? product : nil
installment.seller = seller
end
if (installment.published? || installment.add_and_validate_filters(installment_attrs, seller)) && installment.save
SaveFilesService.perform(installment, product_files_params)
update_profile_posts_sections!
else
@error = installment.errors.full_messages.first
end
end
def update_profile_posts_sections!
seller.seller_profile_posts_sections.each do |section|
shown_posts = Set.new(section.shown_posts)
if section.external_id.in?(installment_params[:shown_in_profile_sections])
shown_posts.add(installment.id)
else
shown_posts.delete(installment.id)
end
section.update!(shown_posts: shown_posts.to_a)
end
end
def schedule_installment
@error = "You have to confirm your email address before you can do that." unless seller.confirmed?
return if error.present?
timezone = ActiveSupport::TimeZone[seller.timezone]
to_be_published_at = timezone.parse(params[:to_be_published_at])
installment_rule = installment.installment_rule || installment.build_installment_rule
installment_rule.to_be_published_at = to_be_published_at
installment.ready_to_publish = true
if installment_rule.save && installment.save
PublishScheduledPostJob.perform_at(to_be_published_at, installment.id, installment_rule.version)
else
@error = installment_rule.errors.full_messages.first
end
end
def publish_installment
return if error.present?
installment.publish!
installment.installment_rule&.mark_deleted!
if installment.can_be_blasted?
blast_id = PostEmailBlast.create!(post: installment, requested_at: Time.current).id
SendPostBlastEmailsJob.perform_async(blast_id)
end
end
def set_product_and_enforce_ownership
return unless product_or_variant_type?
@product = installment_params[:link_id].present? ? Link.fetch(installment_params[:link_id], user: seller) : nil
@error = "Product not found" unless product.present?
end
def ensure_seller_is_eligible_to_publish_or_schedule_emails
if (params[:to_be_published_at].present? || params[:publish].present?) && installment_params[:send_emails] && !seller.eligible_to_send_emails?
@error = "You are not eligible to publish or schedule emails. Please ensure you have made at least $#{Installment::MINIMUM_SALES_CENTS_VALUE / 100} in sales and received a payout."
end
end
def build_installment_if_needed
return if installment.present?
@installment = product_or_variant_type? ? product.installments.build(installment_attrs) : seller.installments.build(installment_attrs)
end
def product_or_variant_type?
[Installment::PRODUCT_TYPE, Installment::VARIANT_TYPE].include?(installment_params[:installment_type])
end
def installment_params
params.require(:installment).permit(:name, :message, :installment_type, :link_id,
:paid_more_than_cents, :paid_less_than_cents, :created_after, :created_before,
:bought_from, :shown_on_profile, :send_emails, :allow_comments,
bought_products: [], bought_variants: [], affiliate_products: [],
not_bought_products: [], not_bought_variants: [], shown_in_profile_sections: [])
end
def installment_attrs
installment_params.except(:shown_in_profile_sections)
end
def published_installment_params
allowed_params = [:name, :message, :shown_on_profile, :allow_comments]
allowed_params << :send_emails unless installment.has_been_blasted?
published_at = params[:installment][:published_at]
allowed_params << :published_at if published_at.present? && installment.published_at.to_date.to_s != DateTime.parse(published_at).to_date.to_s
params.require(:installment).permit(allowed_params)
end
def product_files_params
params.require(:installment).permit(files: [:external_id, :position, :url, :stream_only, subtitle_files: [:url, :language]]) || {}
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/notion_api.rb | app/services/notion_api.rb | # frozen_string_literal: true
class NotionApi
include HTTParty
base_uri "https://api.notion.com/v1"
def get_bot_token(code:, user:)
body = {
code:,
grant_type: "authorization_code",
external_account: {
key: user.external_id,
name: user.email
}
}
self.class.post("/oauth/token", body: body.to_json, headers:)
end
private
def headers
client_id = GlobalConfig.get("NOTION_OAUTH_CLIENT_ID")
client_secret = GlobalConfig.get("NOTION_OAUTH_CLIENT_SECRET")
token = Base64.strict_encode64("#{client_id}:#{client_secret}")
{
"Authorization" => "Basic #{token}",
"Content-Type" => "application/json"
}
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/update_payout_method.rb | app/services/update_payout_method.rb | # frozen_string_literal: true
class UpdatePayoutMethod
attr_reader :params, :user
BANK_ACCOUNT_TYPES = {
AchAccount.name => { class: AchAccount, permitted_params: [:routing_number] },
CanadianBankAccount.name => { class: CanadianBankAccount, permitted_params: %i[institution_number transit_number] },
AustralianBankAccount.name => { class: AustralianBankAccount, permitted_params: [:bsb_number] },
UkBankAccount.name => { class: UkBankAccount, permitted_params: [:sort_code] },
EuropeanBankAccount.name => { class: EuropeanBankAccount, permitted_params: [] },
HongKongBankAccount.name => { class: HongKongBankAccount, permitted_params: [:clearing_code, :branch_code] },
NewZealandBankAccount.name => { class: NewZealandBankAccount, permitted_params: [] },
SingaporeanBankAccount.name => { class: SingaporeanBankAccount, permitted_params: [:bank_code, :branch_code] },
SwissBankAccount.name => { class: SwissBankAccount, permitted_params: [] },
PolandBankAccount.name => { class: PolandBankAccount, permitted_params: [] },
CzechRepublicBankAccount.name => { class: CzechRepublicBankAccount, permitted_params: [] },
ThailandBankAccount.name => { class: ThailandBankAccount, permitted_params: [:bank_code] },
BulgariaBankAccount.name => { class: BulgariaBankAccount, permitted_params: [] },
DenmarkBankAccount.name => { class: DenmarkBankAccount, permitted_params: [] },
HungaryBankAccount.name => { class: HungaryBankAccount, permitted_params: [] },
KoreaBankAccount.name => { class: KoreaBankAccount, permitted_params: [:bank_code] },
UaeBankAccount.name => { class: UaeBankAccount, permitted_params: [] },
AntiguaAndBarbudaBankAccount.name => { class: AntiguaAndBarbudaBankAccount, permitted_params: [:bank_code] },
TanzaniaBankAccount.name => { class: TanzaniaBankAccount, permitted_params: [:bank_code] },
NamibiaBankAccount.name => { class: NamibiaBankAccount, permitted_params: [:bank_code] },
IsraelBankAccount.name => { class: IsraelBankAccount, permitted_params: [] },
TrinidadAndTobagoBankAccount.name => { class: TrinidadAndTobagoBankAccount, permitted_params: [:bank_code, :branch_code] },
PhilippinesBankAccount.name => { class: PhilippinesBankAccount, permitted_params: [:bank_code] },
RomaniaBankAccount.name => { class: RomaniaBankAccount, permitted_params: [] },
SwedenBankAccount.name => { class: SwedenBankAccount, permitted_params: [] },
MexicoBankAccount.name => { class: MexicoBankAccount, permitted_params: [] },
ArgentinaBankAccount.name => { class: ArgentinaBankAccount, permitted_params: [] },
LiechtensteinBankAccount.name => { class: LiechtensteinBankAccount, permitted_params: [] },
PeruBankAccount.name => { class: PeruBankAccount, permitted_params: [] },
NorwayBankAccount.name => { class: NorwayBankAccount, permitted_params: [] },
IndianBankAccount.name => { class: IndianBankAccount, permitted_params: [:ifsc] },
VietnamBankAccount.name => { class: VietnamBankAccount, permitted_params: [:bank_code] },
TaiwanBankAccount.name => { class: TaiwanBankAccount, permitted_params: [:bank_code] },
BosniaAndHerzegovinaBankAccount.name => { class: BosniaAndHerzegovinaBankAccount, permitted_params: [:bank_code] },
IndonesiaBankAccount.name => { class: IndonesiaBankAccount, permitted_params: [:bank_code] },
CostaRicaBankAccount.name => { class: CostaRicaBankAccount, permitted_params: [] },
BotswanaBankAccount.name => { class: BotswanaBankAccount, permitted_params: [:bank_code] },
ChileBankAccount.name => { class: ChileBankAccount, permitted_params: [:bank_code] },
PakistanBankAccount.name => { class: PakistanBankAccount, permitted_params: [:bank_code] },
TurkeyBankAccount.name => { class: TurkeyBankAccount, permitted_params: [:bank_code] },
MoroccoBankAccount.name => { class: MoroccoBankAccount, permitted_params: [:bank_code] },
AzerbaijanBankAccount.name => { class: AzerbaijanBankAccount, permitted_params: [:bank_code, :branch_code] },
AlbaniaBankAccount.name => { class: AlbaniaBankAccount, permitted_params: [:bank_code] },
BahrainBankAccount.name => { class: BahrainBankAccount, permitted_params: [:bank_code] },
JordanBankAccount.name => { class: JordanBankAccount, permitted_params: [:bank_code] },
EthiopiaBankAccount.name => { class: EthiopiaBankAccount, permitted_params: [:bank_code] },
BruneiBankAccount.name => { class: BruneiBankAccount, permitted_params: [:bank_code] },
GuyanaBankAccount.name => { class: GuyanaBankAccount, permitted_params: [:bank_code] },
GuatemalaBankAccount.name => { class: GuatemalaBankAccount, permitted_params: [:bank_code] },
NigeriaBankAccount.name => { class: NigeriaBankAccount, permitted_params: [:bank_code] },
SerbiaBankAccount.name => { class: SerbiaBankAccount, permitted_params: [:bank_code] },
SouthAfricaBankAccount.name => { class: SouthAfricaBankAccount, permitted_params: [:bank_code] },
KenyaBankAccount.name => { class: KenyaBankAccount, permitted_params: [:bank_code] },
RwandaBankAccount.name => { class: RwandaBankAccount, permitted_params: [:bank_code] },
EgyptBankAccount.name => { class: EgyptBankAccount, permitted_params: [:bank_code] },
ColombiaBankAccount.name => { class: ColombiaBankAccount, permitted_params: [:bank_code, :account_type] },
SaudiArabiaBankAccount.name => { class: SaudiArabiaBankAccount, permitted_params: [:bank_code] },
JapanBankAccount.name => { class: JapanBankAccount, permitted_params: [:bank_code, :branch_code] },
KazakhstanBankAccount.name => { class: KazakhstanBankAccount, permitted_params: [:bank_code] },
EcuadorBankAccount.name => { class: EcuadorBankAccount, permitted_params: [:bank_code] },
MalaysiaBankAccount.name => { class: MalaysiaBankAccount, permitted_params: [:bank_code] },
GibraltarBankAccount.name => { class: GibraltarBankAccount, permitted_params: [] },
UruguayBankAccount.name => { class: UruguayBankAccount, permitted_params: [:bank_code] },
MauritiusBankAccount.name => { class: MauritiusBankAccount, permitted_params: [:bank_code] },
AngolaBankAccount.name => { class: AngolaBankAccount, permitted_params: [:bank_code] },
NigerBankAccount.name => { class: NigerBankAccount, permitted_params: [] },
SanMarinoBankAccount.name => { class: SanMarinoBankAccount, permitted_params: [:bank_code] },
JamaicaBankAccount.name => { class: JamaicaBankAccount, permitted_params: [:bank_code, :branch_code] },
BangladeshBankAccount.name => { class: BangladeshBankAccount, permitted_params: [:bank_code] },
BhutanBankAccount.name => { class: BhutanBankAccount, permitted_params: [:bank_code] },
LaosBankAccount.name => { class: LaosBankAccount, permitted_params: [:bank_code] },
MozambiqueBankAccount.name => { class: MozambiqueBankAccount, permitted_params: [:bank_code] },
OmanBankAccount.name => { class: OmanBankAccount, permitted_params: [:bank_code] },
DominicanRepublicBankAccount.name => { class: DominicanRepublicBankAccount, permitted_params: [:bank_code, :branch_code] },
UzbekistanBankAccount.name => { class: UzbekistanBankAccount, permitted_params: [:bank_code, :branch_code] },
BoliviaBankAccount.name => { class: BoliviaBankAccount, permitted_params: [:bank_code] },
TunisiaBankAccount.name => { class: TunisiaBankAccount, permitted_params: [] },
MoldovaBankAccount.name => { class: MoldovaBankAccount, permitted_params: [:bank_code] },
NorthMacedoniaBankAccount.name => { class: NorthMacedoniaBankAccount, permitted_params: [:bank_code] },
PanamaBankAccount.name => { class: PanamaBankAccount, permitted_params: [:bank_code] },
ElSalvadorBankAccount.name => { class: ElSalvadorBankAccount, permitted_params: [:bank_code] },
MadagascarBankAccount.name => { class: MadagascarBankAccount, permitted_params: [:bank_code] },
ParaguayBankAccount.name => { class: ParaguayBankAccount, permitted_params: [:bank_code] },
GhanaBankAccount.name => { class: GhanaBankAccount, permitted_params: [:bank_code] },
ArmeniaBankAccount.name => { class: ArmeniaBankAccount, permitted_params: [:bank_code] },
SriLankaBankAccount.name => { class: SriLankaBankAccount, permitted_params: [:bank_code, :branch_code] },
KuwaitBankAccount.name => { class: KuwaitBankAccount, permitted_params: [:bank_code] },
IcelandBankAccount.name => { class: IcelandBankAccount, permitted_params: [] },
QatarBankAccount.name => { class: QatarBankAccount, permitted_params: [:bank_code] },
BahamasBankAccount.name => { class: BahamasBankAccount, permitted_params: [:bank_code] },
SaintLuciaBankAccount.name => { class: SaintLuciaBankAccount, permitted_params: [:bank_code] },
SenegalBankAccount.name => { class: SenegalBankAccount, permitted_params: [] },
CambodiaBankAccount.name => { class: CambodiaBankAccount, permitted_params: [:bank_code] },
MongoliaBankAccount.name => { class: MongoliaBankAccount, permitted_params: [:bank_code] },
GabonBankAccount.name => { class: GabonBankAccount, permitted_params: [:bank_code] },
MonacoBankAccount.name => { class: MonacoBankAccount, permitted_params: [] },
AlgeriaBankAccount.name => { class: AlgeriaBankAccount, permitted_params: [:bank_code] },
MacaoBankAccount.name => { class: MacaoBankAccount, permitted_params: [:bank_code] },
BeninBankAccount.name => { class: BeninBankAccount, permitted_params: [] },
CoteDIvoireBankAccount.name => { class: CoteDIvoireBankAccount, permitted_params: [] },
}.freeze
private_constant :BANK_ACCOUNT_TYPES
def self.bank_account_types
BANK_ACCOUNT_TYPES
end
def initialize(user_params:, seller:)
@params = user_params
@user = seller
end
def process
old_bank_account = user.active_bank_account
if params[:card]
chargeable = ChargeProcessor.get_chargeable_for_params(params[:card], nil)
return { error: :check_card_information_prompt } if chargeable.nil?
credit_card = CreditCard.create(chargeable)
return { error: :credit_card_error, data: credit_card.errors.full_messages.to_sentence } if credit_card.errors.present?
old_bank_account.try(:mark_deleted!)
bank_account = CardBankAccount.new
bank_account.user = user
bank_account.credit_card = credit_card
bank_account.save
return { error: :bank_account_error, data: bank_account.errors.full_messages.to_sentence } if bank_account.errors.present?
user.update!(payment_address: "") if user.payment_address.present?
elsif params[:bank_account].present? &&
params[:bank_account][:type].present? &&
(params[:bank_account][:account_holder_full_name].present? || params[:bank_account][:account_number].present?)
raise unless params[:bank_account][:type].in?(BANK_ACCOUNT_TYPES)
if params[:bank_account][:account_number].present?
bank_account_account_number = params[:bank_account][:account_number].delete("-").strip
bank_account_account_number_confirmation = params[:bank_account][:account_number_confirmation].delete("-").strip
return { error: :account_number_does_not_match } if bank_account_account_number != bank_account_account_number_confirmation
old_bank_account.try(:mark_deleted!)
bank_account = BANK_ACCOUNT_TYPES[params[:bank_account][:type]][:class].new(bank_account_params_for_bank_account_type)
bank_account.user = user
bank_account.account_holder_full_name = params[:bank_account][:account_holder_full_name]
bank_account.account_number = bank_account_account_number
bank_account.account_number_last_four = bank_account_account_number.last(4)
bank_account.account_type = params[:bank_account][:account_type] if params[:bank_account][:account_type].present?
bank_account.save
return { error: :bank_account_error, data: bank_account.errors.full_messages.to_sentence } if bank_account.errors.present?
user.update!(payment_address: "") if user.payment_address.present?
elsif params[:bank_account][:account_holder_full_name].present?
old_bank_account.update(account_holder_full_name: params[:bank_account][:account_holder_full_name])
end
elsif params[:payment_address].present?
payment_address = params[:payment_address].strip
return { error: :provide_valid_email_prompt } unless EmailFormatValidator.valid?(payment_address)
return { error: :provide_ascii_only_email_prompt } unless payment_address.ascii_only?
return { error: :paypal_payouts_not_supported } unless paypal_payouts_supported?
user.payment_address = payment_address
user.save!
user.forfeit_unpaid_balance!(:payout_method_change)
user.stripe_account&.delete_charge_processor_account!
user.active_bank_account&.mark_deleted!
user.user_compliance_info_requests.requested.find_each(&:mark_provided!)
user.update!(payouts_paused_internally: false, payouts_paused_by: nil) if user.payouts_paused_by_source == User::PAYOUT_PAUSE_SOURCE_STRIPE && !user.flagged? && !user.suspended?
CheckPaymentAddressWorker.perform_async(user.id)
end
{ success: true }
end
private
def bank_account_params_for_bank_account_type
bank_account_type = params[:bank_account][:type]
permitted_params = BANK_ACCOUNT_TYPES[bank_account_type][:permitted_params]
params[:bank_account].permit(*permitted_params)
end
def paypal_payouts_supported?
user.can_setup_paypal_payouts? || switching_to_uae_individual_account?
end
def switching_to_uae_individual_account?
params.dig(:user, :country) == Compliance::Countries::ARE.alpha2 && !params.dig(:user, :is_business)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/tra_tin_validation_service.rb | app/services/tra_tin_validation_service.rb | # frozen_string_literal: true
class TraTinValidationService
attr_reader :tra_tin
def initialize(tra_tin)
@tra_tin = tra_tin
end
def process
return false if tra_tin.blank?
tra_tin.match?(/\A\d{2}-\d{6}-[A-Z]\z/)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/abn_validation_service.rb | app/services/abn_validation_service.rb | # frozen_string_literal: true
class AbnValidationService
attr_reader :abn_id
def initialize(abn_id)
@abn_id = abn_id
end
def process
return false if abn_id.blank?
response = Rails.cache.fetch("vatstack_validation_#{abn_id}", expires_in: 10.minutes) do
url = "https://api.vatstack.com/v1/validations"
headers = {
"X-API-KEY" => VATSTACK_API_KEY
}
params = {
type: "au_gst",
query: abn_id
}.stringify_keys
HTTParty.post(url, body: params, timeout: 5, headers:)
end
return false if "INVALID_INPUT" == response["code"]
return false if response["valid"].nil?
response["valid"] && response["active"]
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/update_user_compliance_info.rb | app/services/update_user_compliance_info.rb | # frozen_string_literal: true
class UpdateUserComplianceInfo
attr_reader :compliance_params, :user
def initialize(compliance_params:, user:)
@compliance_params = compliance_params
@user = user
end
def process
if compliance_params.present?
old_compliance_info = user.fetch_or_build_user_compliance_info
saved, new_compliance_info = old_compliance_info.dup_and_save do |new_compliance_info|
# if the following fields are submitted and are blank, we don't clear the field for the user
new_compliance_info.first_name = compliance_params[:first_name] if compliance_params[:first_name].present?
new_compliance_info.last_name = compliance_params[:last_name] if compliance_params[:last_name].present?
new_compliance_info.first_name_kanji = compliance_params[:first_name_kanji] if compliance_params[:first_name_kanji].present?
new_compliance_info.last_name_kanji = compliance_params[:last_name_kanji] if compliance_params[:last_name_kanji].present?
new_compliance_info.first_name_kana = compliance_params[:first_name_kana] if compliance_params[:first_name_kana].present?
new_compliance_info.last_name_kana = compliance_params[:last_name_kana] if compliance_params[:last_name_kana].present?
new_compliance_info.street_address = compliance_params[:street_address] if compliance_params[:street_address].present?
new_compliance_info.building_number = compliance_params[:building_number] if compliance_params[:building_number].present?
new_compliance_info.street_address_kanji = compliance_params[:street_address_kanji] if compliance_params[:street_address_kanji].present?
new_compliance_info.street_address_kana = compliance_params[:street_address_kana] if compliance_params[:street_address_kana].present?
new_compliance_info.city = compliance_params[:city] if compliance_params[:city].present?
new_compliance_info.state = compliance_params[:state] if compliance_params[:state].present?
new_compliance_info.country = Compliance::Countries.mapping[compliance_params[:country]] if compliance_params[:country].present? && compliance_params[:is_business]
new_compliance_info.zip_code = compliance_params[:zip_code] if compliance_params[:zip_code].present?
new_compliance_info.business_name = compliance_params[:business_name] if compliance_params[:business_name].present?
new_compliance_info.business_name_kanji = compliance_params[:business_name_kanji] if compliance_params[:business_name_kanji].present?
new_compliance_info.business_name_kana = compliance_params[:business_name_kana] if compliance_params[:business_name_kana].present?
new_compliance_info.business_street_address = compliance_params[:business_street_address] if compliance_params[:business_street_address].present?
new_compliance_info.business_building_number = compliance_params[:business_building_number] if compliance_params[:business_building_number].present?
new_compliance_info.business_street_address_kanji = compliance_params[:business_street_address_kanji] if compliance_params[:business_street_address_kanji].present?
new_compliance_info.business_street_address_kana = compliance_params[:business_street_address_kana] if compliance_params[:business_street_address_kana].present?
new_compliance_info.business_city = compliance_params[:business_city] if compliance_params[:business_city].present?
new_compliance_info.business_state = compliance_params[:business_state] if compliance_params[:business_state].present?
new_compliance_info.business_country = Compliance::Countries.mapping[compliance_params[:business_country]] if compliance_params[:business_country].present? && compliance_params[:is_business]
new_compliance_info.business_zip_code = compliance_params[:business_zip_code] if compliance_params[:business_zip_code].present?
new_compliance_info.business_type = compliance_params[:business_type] if compliance_params[:business_type].present?
new_compliance_info.is_business = compliance_params[:is_business] unless compliance_params[:is_business].nil?
new_compliance_info.individual_tax_id = compliance_params[:ssn_last_four] if compliance_params[:ssn_last_four].present?
new_compliance_info.individual_tax_id = compliance_params[:individual_tax_id] if compliance_params[:individual_tax_id].present?
new_compliance_info.business_tax_id = compliance_params[:business_tax_id] if compliance_params[:business_tax_id].present?
new_compliance_info.birthday = Date.new(compliance_params[:dob_year].to_i, compliance_params[:dob_month].to_i, compliance_params[:dob_day].to_i) if compliance_params[:dob_year].present? && compliance_params[:dob_year].to_i > 0
new_compliance_info.skip_stripe_job_on_create = true
new_compliance_info.phone = compliance_params[:phone] if compliance_params[:phone].present?
new_compliance_info.business_phone = compliance_params[:business_phone] if compliance_params[:business_phone].present?
new_compliance_info.job_title = compliance_params[:job_title] if compliance_params[:job_title].present?
new_compliance_info.nationality = compliance_params[:nationality] if compliance_params[:nationality].present?
new_compliance_info.business_vat_id_number = compliance_params[:business_vat_id_number] if compliance_params[:business_vat_id_number].present?
end
return { success: false, error_message: new_compliance_info.errors.full_messages.to_sentence } unless saved
begin
StripeMerchantAccountManager.handle_new_user_compliance_info(new_compliance_info)
rescue Stripe::InvalidRequestError => e
return { success: false, error_message: "Compliance info update failed with this error: #{e.message.split("Please contact us").first.strip}", error_code: "stripe_error" }
end
end
{ success: true }
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/mva_validation_service.rb | app/services/mva_validation_service.rb | # frozen_string_literal: true
class MvaValidationService
attr_reader :mva_id
def initialize(mva_id)
@mva_id = mva_id
end
def process
return false if mva_id.blank?
response = Rails.cache.fetch("vatstack_validation_#{mva_id}", expires_in: 10.minutes) do
url = "https://api.vatstack.com/v1/validations"
headers = {
"X-API-KEY" => VATSTACK_API_KEY
}
params = {
type: "no_vat",
query: mva_id
}.stringify_keys
HTTParty.post(url, body: params, timeout: 5, headers:)
end
return false if "INVALID_INPUT" == response["code"]
return false if response["valid"].nil?
response["valid"] && response["active"]
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/oman_vat_number_validation_service.rb | app/services/oman_vat_number_validation_service.rb | # frozen_string_literal: true
class OmanVatNumberValidationService
attr_reader :vat_number
def initialize(vat_number)
@vat_number = vat_number
end
def process
return false if vat_number.blank?
vat_number.match?(/\AOM\d{10}\z/)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/stripe_tax_forms_api.rb | app/services/stripe_tax_forms_api.rb | # frozen_string_literal: true
class StripeTaxFormsApi
include HTTParty
def initialize(stripe_account_id:, form_type:, year:)
@stripe_account_id = stripe_account_id
@form_type = form_type
@year = year
end
def download_tax_form
tax_form = tax_forms_by_year[year]
return if tax_form.nil?
pdf = Tempfile.new(["tax_form_#{form_type}_#{year}_#{stripe_account_id}", ".pdf"])
pdf.binmode
url = "https://files.stripe.com/v1/tax/forms/#{tax_form.id}/pdf"
headers = { "Authorization" => "Bearer #{Stripe.api_key}", "Stripe-Version" => "2022-11-15; retrieve_tax_forms_beta=v1;" }
HTTParty.get(url, headers:, stream_body: true) do |fragment|
pdf.write(fragment)
end
pdf.rewind
pdf
rescue HTTParty::Error => e
Bugsnag.notify(e)
nil
end
def tax_forms_by_year
raise "Invalid tax form type: #{form_type}" unless UserTaxForm::TAX_FORM_TYPES.include?(form_type)
Rails.cache.fetch("stripe_tax_forms_#{form_type}_#{stripe_account_id}", expires_in: 1.day) do
params = { type: form_type, "payee[account]": stripe_account_id }
opts = { stripe_version: Stripe.api_version }
tax_forms = {}
response = Stripe.raw_request(:get, "/v1/tax/forms", params, opts)
Stripe.deserialize(response.http_body).auto_paging_each do |tax_form|
year = tax_form[tax_form.type].reporting_year
tax_forms[year] = tax_form
end
tax_forms
end
rescue Stripe::StripeError => e
Bugsnag.notify(e)
{}
end
private
attr_reader :stripe_account_id, :form_type, :year
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/qst_validation_service.rb | app/services/qst_validation_service.rb | # frozen_string_literal: true
class QstValidationService
attr_reader :qst_id
def initialize(qst_id)
@qst_id = qst_id
end
def process
return false if qst_id.blank?
Rails.cache.fetch("revenu_quebec_validation_#{qst_id}", expires_in: 10.minutes) do
valid_qst?
end
end
private
QST_VALIDATION_ENDPOINT_TEMPLATE = Addressable::Template.new(
"https://svcnab2b.revenuquebec.ca/2019/02/ValidationTVQ/{qst_id}"
)
def valid_qst?
response = HTTParty.get(QST_VALIDATION_ENDPOINT_TEMPLATE.expand(qst_id:).to_s, timeout: 5)
response.code == 200 && response.parsed_response.dig("Resultat", "StatutSousDossierUsager") == "R"
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/suo_semaphore.rb | app/services/suo_semaphore.rb | # frozen_string_literal: true
class SuoSemaphore
class << self
def recurring_charge(subscription_id)
Suo::Client::Redis.new("locks:recurring_charge:#{subscription_id}", default_options)
end
def product_inventory(product_id, extra_options = {})
options = default_options.merge(stale_lock_expiration: 60).merge(extra_options)
Suo::Client::Redis.new("locks:product:#{product_id}:inventory", options)
end
private
def default_options
{ client: $redis }
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/expiring_s3_file_service.rb | app/services/expiring_s3_file_service.rb | # frozen_string_literal: true
class ExpiringS3FileService
DEFAULT_FILE_EXPIRY = 7.days
def initialize(file:,
filename: nil,
path: nil,
prefix: "File",
extension: nil,
expiry: DEFAULT_FILE_EXPIRY,
bucket: S3_BUCKET)
raise ArgumentError.new("Either filename or extension is required") unless filename || extension
@file = file
timestamp = Time.current.strftime("%s")
filename ||= "#{prefix}_#{timestamp}_#{SecureRandom.hex}.#{extension}"
@key = path.present? ? File.join(path, filename) : filename
@expiry = expiry.to_i
@bucket = bucket
end
def perform
s3_obj = Aws::S3::Resource.new.bucket(@bucket).object(@key)
# Uses upload_file which takes care of large files automatically for us:
# https://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Object.html#upload_file-instance_method
s3_obj.upload_file(@file, content_type: MIME::Types.type_for(@key).first.to_s)
s3_obj.presigned_url(:get, expires_in: @expiry, response_content_disposition: "attachment")
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/save_files_service.rb | app/services/save_files_service.rb | # frozen_string_literal: true
class SaveFilesService
delegate :product_files, to: :owner
attr_reader :owner, :params, :rich_content_params
def self.perform(*args)
new(*args).perform
end
# Params:
# +owner+ - an object of a model having the WithProductFiles mixin
# +params+ - a nested hash of product files' attributes:
# {
# files: {
# "0" => {
# unique_url_identifier: "c7675710fc594849b5d37715c5be5383",
# display_name: "Ruby video tutorial",
# description: "100 detailed recipes",
# subtitles: {
# "aghsha2828ah": {
# url: "#{AWS_S3_ENDPOINT}/gumroad_dev/attachments/5427372145012/5db55fc31ed743818107b00ce6ad100b/original/sample.srt",
# language: "English"
# },
# ...
# }
# },
# "1" => {
# ...
# }
# },
# folders: {
# "0" => {
# id: "absh2226677aaa",
# name: "Ruby Recipes",
# },
# "1" => {
# ...
# }
# }
# }
def initialize(owner, params, rich_content_params = [])
@owner = owner
@params = params
@rich_content_params = rich_content_params
end
def perform
params[:files] = [] unless params.key?(:files)
save_files_params = updated_file_params(params[:files])
owner.save_files!(save_files_params, rich_content_params)
end
private
def updated_file_params(all_file_params)
all_file_params = all_file_params.is_a?(Array) ? all_file_params : all_file_params.values
all_file_params.each do |file_params|
file_params[:filetype] = "link" if file_params.delete(:extension) == "URL"
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/recommended_wishlists_service.rb | app/services/recommended_wishlists_service.rb | # frozen_string_literal: true
class RecommendedWishlistsService
def self.fetch(limit:, current_seller:, curated_product_ids: [], taxonomy_id: nil)
scope = Wishlist.where(recommendable: true).order(recent_follower_count: :desc)
scope = scope.where.not(user_id: current_seller.id) if current_seller.present?
return scope.limit(limit) if curated_product_ids.blank? && taxonomy_id.blank?
matching_wishlists = Wishlist.from(scope.limit(10_000), :wishlists)
matching_wishlists = matching_wishlists.joins(:wishlist_products).where(wishlist_products: { product_id: curated_product_ids }) if curated_product_ids.present?
matching_wishlists = matching_wishlists.joins(wishlist_products: :product).where(links: { taxonomy_id: }) if taxonomy_id.present?
matching_wishlists = matching_wishlists.distinct.limit(limit).to_a
missing_count = limit - matching_wishlists.count
if taxonomy_id.blank? && missing_count > 0 && missing_count < limit
matching_wishlists += scope.where.not(id: matching_wishlists.pluck(:id)).limit(missing_count)
end
ids = matching_wishlists.pluck(:id)
Wishlist.where(id: ids).order(Arel.sql("FIELD(id, #{ids.map { ActiveRecord::Base.connection.quote(_1) }.join(',')})"))
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/product_duplicator_service.rb | app/services/product_duplicator_service.rb | # frozen_string_literal: true
class ProductDuplicatorService
REDIS_STORAGE_NS = Redis::Namespace.new(:product_duplicator_service, redis: $redis)
private_constant :REDIS_STORAGE_NS
TIMEOUT_FOR_DUPLICATE_PRODUCT_CACHE = 10.minutes
private_constant :TIMEOUT_FOR_DUPLICATE_PRODUCT_CACHE
DUPLICATING = "product_duplicating"
DUPLICATED = "product_duplicated"
DUPLICATION_FAILED = "product_duplication_failed"
attr_reader :product, :duplicated_product
def initialize(product_id)
@product = Link.find(product_id)
# Maintains a mapping of original product file external IDs and
# the new product file external IDs like:
# { "old_product_file_external_id" => "new_product_file_external_id" }
@product_file_external_ids_mapping = {}
end
def duplicate
ApplicationRecord.connection.stick_to_primary!
ApplicationRecord.connection.transaction do
@duplicated_product = product.dup
duplicated_product.unique_permalink = nil
duplicated_product.custom_permalink = nil
duplicated_product.name = "#{product.name} (copy)"
duplicated_product.price_cents = product.price_cents
duplicated_product.rental_price_cents = product.rental_price_cents if product.rental_price_cents.present?
duplicated_product.is_collab = false
mark_duplicate_product_as_draft
duplicated_product.is_duplicating = false
duplicated_product.save!
duplicate_prices
duplicate_asset_previews
duplicate_thumbnail
duplicate_product_files # Copy product files before copying the variants and skus.
duplicate_public_product_files
duplicate_rich_content(original_entity: product, duplicate_entity: duplicated_product)
duplicate_offer_codes
duplicate_product_taggings
duplicate_skus # Copy skus before variant categories and variants
duplicate_variant_categories_and_variants
duplicate_preorder_link
duplicate_third_party_analytics
duplicate_shipping_destinations
duplicate_refund_policy
end
# Post process Asset Previews if product was persisted from outside the transaction
post_process_attachments
set_recently_duplicated_product
duplicated_product
end
def recently_duplicated_product
duplicated_product_id = REDIS_STORAGE_NS.get(product.id)
Link.where(id: duplicated_product_id).first
end
private
attr_reader :product_file_external_ids_mapping
def set_recently_duplicated_product
REDIS_STORAGE_NS.setex(product.id, TIMEOUT_FOR_DUPLICATE_PRODUCT_CACHE, duplicated_product.id)
end
def mark_duplicate_product_as_draft
duplicated_product.draft = true
duplicated_product.purchase_disabled_at = Time.current
end
def duplicate_prices
duplicated_product.prices.each(&:mark_deleted!) # Delete the default prices that are associated with the product on creation. Ref: Product::Prices.associate_price.
product.prices.alive.each do |price|
new_price = price.dup
new_price.link = duplicated_product
new_price.save!
end
end
def duplicate_asset_previews
product.asset_previews.alive.in_order.each do |asset_preview|
new_asset_preview = asset_preview.dup
new_asset_preview.link = duplicated_product
new_asset_preview.file.attach duped_blob(asset_preview.file) if asset_preview.file.attached?
new_asset_preview.save!
end
end
def duplicate_thumbnail
return unless product.thumbnail.present?
new_thumbnail = product.thumbnail.dup
new_thumbnail.product = duplicated_product
new_thumbnail.file.attach duped_blob(product.thumbnail.file) if product.thumbnail.file.attached?
new_thumbnail.file.analyze
new_thumbnail.save!
end
def post_process_attachments
return unless duplicated_product.present?
duplicated_product.asset_previews.each do |asset_preview|
asset_preview.file.analyze if asset_preview.file.attached?
end
return unless duplicated_product.thumbnail.present?
duplicated_product.thumbnail.file.analyze if duplicated_product.thumbnail.file.attached?
end
def duped_blob(file)
blob = ActiveStorage::Blob.create_and_upload!(io: StringIO.new(file.download), filename: file.filename, content_type: file.content_type)
blob.analyze
blob
end
def duplicate_product_files
product_folder_ids_mapping = {}
product.product_folders.alive.each do |product_folder|
new_product_folder = product_folder.dup
new_product_folder.link = duplicated_product
new_product_folder.save!
product_folder_ids_mapping[product_folder.id] = new_product_folder.id
end
product.product_files.alive.each do |product_file|
new_product_file = product_file.dup
new_product_file.link = duplicated_product
new_product_file.is_linked_to_existing_file = true
new_product_file.folder_id = product_folder_ids_mapping[product_file.folder_id]
product_file.transcoded_videos.alive.each do |transcoded_video|
new_transcoded_video = transcoded_video.dup
new_transcoded_video.streamable = new_product_file
new_transcoded_video.link = duplicated_product
new_transcoded_video.save!
end
product_file.subtitle_files.each do |subtitle_file|
new_subtitle_file = subtitle_file.dup
new_subtitle_file.product_file = new_product_file
new_subtitle_file.save!
end
if product_file.dropbox_file
new_dropbox_file = product_file.dropbox_file.dup
new_dropbox_file.product_file = new_product_file
new_dropbox_file.link = duplicated_product
new_dropbox_file.save!
end
new_product_file.save!
@product_file_external_ids_mapping[product_file.external_id] = new_product_file.external_id
end
end
def duplicate_public_product_files
public_files = product.public_files.alive.with_attached_file
description = duplicated_product.description
doc = Nokogiri::HTML.fragment(description)
doc.css("public-file-embed").each do |embed_node|
id = embed_node.attr("id")
if id.blank?
embed_node.remove
next
end
public_file = public_files.find { _1.public_id == id }
if public_file.present?
new_public_file = public_file.dup
new_public_file.file.attach(public_file.file.blob)
new_public_file.resource = duplicated_product
new_public_file.public_id = PublicFile.generate_public_id
new_public_file.save!
embed_node.set_attribute("id", new_public_file.public_id)
else
embed_node.remove
end
end
duplicated_product.update!(description: doc.to_html)
end
def duplicate_offer_codes
duplicated_product.offer_codes = product.offer_codes
end
def duplicate_product_taggings
product.product_taggings.each do |product_tagging|
new_product_tagging = product_tagging.dup
new_product_tagging.product = duplicated_product
new_product_tagging.save!
end
end
def duplicate_variant_categories_and_variants
if product.is_tiered_membership
tier_category = duplicated_product.variant_categories.alive.first
if tier_category
tier_category.mark_deleted!
end
end
product.variant_categories.each do |variant_category|
new_variant_category = variant_category.dup
new_variant_category.link = duplicated_product
new_variant_category.save!
variant_category.variants.each do |variant|
new_variant = variant.dup
new_variant.variant_category = new_variant_category
duplicate_variant_product_files(original_variant: variant, duplicate_variant: new_variant)
variant.skus.each do |sku|
new_sku = duplicated_product.skus.where(name: sku.name).first
new_variant.skus << new_sku
end
new_variant.save!
duplicate_rich_content(original_entity: variant, duplicate_entity: new_variant)
end
end
end
def duplicate_skus
product.skus.each do |sku|
new_sku = sku.dup
new_sku.link = duplicated_product
duplicate_variant_product_files(original_variant: sku, duplicate_variant: new_sku)
new_sku.save!
end
end
def duplicate_variant_product_files(original_variant:, duplicate_variant:)
original_variant.product_files.alive.each do |product_file|
duplicate_product_file_external_id = product_file_external_ids_mapping[product_file.external_id]
next if duplicate_product_file_external_id.blank?
duplicate_product_file = ProductFile.find_by_external_id(duplicate_product_file_external_id)
next if duplicate_product_file.blank?
duplicate_variant.product_files << duplicate_product_file
end
end
def duplicate_preorder_link
return unless product.preorder_link
new_preorder_link = product.preorder_link.dup
new_preorder_link.link = duplicated_product
new_preorder_link.release_at = 1.month.from_now if new_preorder_link.release_at <= 24.hours.from_now
new_preorder_link.save!
end
def duplicate_third_party_analytics
product.third_party_analytics.each do |third_party_analytic|
new_third_party_analytic = third_party_analytic.dup
new_third_party_analytic.link = duplicated_product
new_third_party_analytic.save!
end
end
def duplicate_shipping_destinations
product.shipping_destinations.each do |shipping_destination|
new_shipping_destination = shipping_destination.dup
new_shipping_destination.link = duplicated_product
new_shipping_destination.save!
end
end
def duplicate_refund_policy
return unless product.product_refund_policy.present?
new_refund_policy = product.product_refund_policy.dup
new_refund_policy.product = duplicated_product
new_refund_policy.save!
end
def duplicate_rich_content(original_entity:, duplicate_entity:)
original_entity.alive_rich_contents.find_each do |original_entity_rich_content|
duplicate_entity_rich_content = original_entity_rich_content.dup
duplicate_entity_rich_content.entity = duplicate_entity
update_file_embed_ids_in_rich_content(duplicate_entity_rich_content.description)
duplicate_entity_rich_content.save!
end
end
def update_file_embed_ids_in_rich_content(content)
content.each do |node|
update_file_embed_ids_in_rich_content(node["content"]) if node["type"] == RichContent::FILE_EMBED_GROUP_NODE_TYPE
next if node["type"] != "fileEmbed"
next if node.dig("attrs", "id").blank?
new_product_file_external_id = product_file_external_ids_mapping[node.dig("attrs", "id")]
next if new_product_file_external_id.blank?
node["attrs"]["id"] = new_product_file_external_id
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/save_public_files_service.rb | app/services/save_public_files_service.rb | # frozen_string_literal: true
class SavePublicFilesService
attr_reader :resource, :files_params, :content
def initialize(resource:, files_params:, content:)
@resource = resource
@files_params = files_params.presence || []
@content = content.to_s
end
def process
ActiveRecord::Base.transaction do
persisted_files = resource.alive_public_files
doc = Nokogiri::HTML.fragment(content)
file_ids_in_content = extract_file_ids_from_content(doc)
update_existing_files(persisted_files, file_ids_in_content)
schedule_unused_files_for_deletion(persisted_files, file_ids_in_content)
clean_invalid_file_embeds(doc, persisted_files)
doc.to_html
end
end
private
def extract_file_ids_from_content(doc)
saved_file_ids_from_files_params = files_params.filter { _1.dig("status", "type") == "saved" }.map { _1["id"] }
doc.css("public-file-embed").map { _1.attr("id") }.compact.select { _1.in?(saved_file_ids_from_files_params) }
end
def update_existing_files(persisted_files, file_ids_in_content)
files_params
.select { _1["id"].in?(file_ids_in_content) }
.each do |file_params|
persisted_file = persisted_files.find { _1.public_id == file_params["id"] }
next if persisted_file.nil?
persisted_file.display_name = file_params["name"].presence || "Untitled"
persisted_file.scheduled_for_deletion_at = nil
persisted_file.save!
end
end
def schedule_unused_files_for_deletion(persisted_files, file_ids_in_content)
persisted_files
.reject { _1.scheduled_for_deletion? || _1.public_id.in?(file_ids_in_content) }
.each(&:schedule_for_deletion!)
end
def clean_invalid_file_embeds(doc, persisted_files)
valid_file_ids = persisted_files.reject(&:scheduled_for_deletion?).map(&:public_id)
doc.css("public-file-embed").each do |node|
id = node.attr("id")
node.remove if id.blank? || !id.in?(valid_file_ids)
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/url_service.rb | app/services/url_service.rb | # frozen_string_literal: true
class UrlService
class << self
def domain_with_protocol
"#{PROTOCOL}://#{DOMAIN}"
end
def root_domain_with_protocol
"#{PROTOCOL}://#{ROOT_DOMAIN}"
end
def discover_domain_with_protocol
"#{PROTOCOL}://#{DISCOVER_DOMAIN}"
end
def api_domain_with_protocol
"#{PROTOCOL}://#{API_DOMAIN}"
end
def short_domain_with_protocol
"#{PROTOCOL}://#{SHORT_DOMAIN}"
end
def discover_full_path(taxonomy_path, query_params = nil)
discover_url = Rails.application.routes.url_helpers.discover_url({
protocol: PROTOCOL,
host: DISCOVER_DOMAIN
})
uri = Addressable::URI.parse(discover_url)
uri.path = taxonomy_path
uri.query = query_params.compact.to_query if query_params.present?
uri.to_s
end
def widget_script_base_url(seller: nil)
custom_domain_with_protocol(seller) || root_domain_with_protocol
end
def widget_product_link_base_url(seller: nil, allow_custom_domain: true)
(allow_custom_domain && custom_domain_with_protocol(seller)) || seller&.subdomain_with_protocol || root_domain_with_protocol
end
private
def custom_domain_with_protocol(seller)
return if Rails.env.development?
return unless seller.present? && seller.custom_domain&.active?
domain = seller.custom_domain.domain
is_strictly_pointing_to_gumroad = CustomDomainVerificationService.new(domain:).domains_pointed_to_gumroad.include?(domain)
"#{PROTOCOL}://#{domain}" if is_strictly_pointing_to_gumroad
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/trn_validation_service.rb | app/services/trn_validation_service.rb | # frozen_string_literal: true
class TrnValidationService
attr_reader :trn
def initialize(trn)
@trn = trn
end
def process
return false if trn.blank?
trn.length == 15
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/purchase_search_service.rb | app/services/purchase_search_service.rb | # frozen_string_literal: true
class PurchaseSearchService
DEFAULT_OPTIONS = {
# There must not be any active filters by default: calling .search without any options should return all purchases.
# Values - They can be an ActiveRecord object, an id, or an Array of both
seller: nil,
purchaser: nil,
revenue_sharing_user: nil,
product: nil,
exclude_product: nil,
exclude_purchasers_of_product: nil,
variant: nil,
exclude_variant: nil,
exclude_purchasers_of_variant: nil,
exclude_purchase: nil,
any_products_or_variants: nil,
affiliate_user: nil,
taxonomy: nil,
# Booleans
exclude_non_original_subscription_purchases: false,
exclude_deactivated_subscriptions: false,
exclude_cancelled_or_pending_cancellation_subscriptions: false,
exclude_refunded: false,
exclude_refunded_except_subscriptions: false,
exclude_unreversed_chargedback: false,
exclude_cant_contact: false,
exclude_giftees: false,
exclude_gifters: false,
exclude_non_successful_preorder_authorizations: false,
exclude_bundle_product_purchases: false,
exclude_commission_completion_purchases: false,
# Ranges
price_greater_than: nil, # Integer, compared to price_cents
price_less_than: nil, # Integer, compared to price_cents
created_after: nil, # Time or valid datetime string
created_on_or_after: nil, # Time or valid datetime string
created_before: nil, # Time or valid datetime string
created_on_or_before: nil, # Time or valid datetime string
# Others
country: nil,
email: nil,
state: nil,
archived: nil, # Boolean
recommended: nil, # Boolean
# Fulltext search
seller_query: nil, # String
buyer_query: nil, # String
# Native ES params
# Most useful defaults to have when using this service in console
from: 0,
size: 5,
sort: nil, # usually: [ { created_at: :desc }, { id: :desc } ],
_source: false,
aggs: {},
track_total_hits: nil,
}
attr_accessor :body
def initialize(options = {})
@options = DEFAULT_OPTIONS.merge(options)
build_body
end
def process
Purchase.search(@body)
end
def query = @body[:query]
def self.search(options = {})
new(options).process
end
private
def build_body
@body = { query: { bool: Hash.new { |hash, key| hash[key] = [] } } }
### Filters
# Objects and ids
build_body_seller
build_body_purchaser
build_body_product
build_body_buyer_search
build_body_exclude_product
build_body_exclude_purchasers_of_product
build_body_variant
build_body_exclude_variant
build_body_exclude_purchasers_of_variant
build_body_exclude_purchase
build_body_any_products_or_variants
build_body_affiliate_user
build_body_revenue_sharing_user
build_body_taxonomy
# Booleans
build_body_exclude_refunded
build_body_exclude_refunded_except_subscriptions
build_body_exclude_unreversed_chargedback
build_body_exclude_non_original_subscription_purchases
build_body_exclude_not_charged_non_free_trial_purchases
build_body_exclude_deactivated_subscriptions
build_body_exclude_cancelled_or_pending_cancellation_subscriptions
build_body_exclude_cant_contact
build_body_exclude_giftees
build_body_exclude_gifters
build_body_exclude_non_successful_preorder_authorizations
build_body_exclude_bundle_product_purchases
build_body_exclude_commission_completion_purchases
# Ranges
build_body_price_greater_than
build_body_price_less_than
build_body_created_after
build_body_created_on_or_after
build_body_created_before
build_body_created_on_or_before
# Others
build_body_country
build_body_email
build_body_state
build_body_archived
build_body_recommended
### Fulltext search
build_body_fulltext_search
build_body_native_params
end
def build_body_seller
return if @options[:seller].blank?
ids = Array.wrap(@options[:seller]).map do |seller|
seller.is_a?(User) ? seller.id : seller
end
@body[:query][:bool][:filter] << { terms: { "seller_id" => ids } }
end
def build_body_purchaser
return if @options[:purchaser].blank?
ids = Array.wrap(@options[:purchaser]).map do |purchaser|
purchaser.is_a?(User) ? purchaser.id : purchaser
end
@body[:query][:bool][:filter] << { terms: { "purchaser_id" => ids } }
end
def build_body_product
return if @options[:product].blank?
ids = Array.wrap(@options[:product]).map do |product|
product.is_a?(Link) ? product.id : product
end
@body[:query][:bool][:filter] << { terms: { "product_id" => ids } }
end
def build_body_exclude_product
Array.wrap(@options[:exclude_product]).each do |product|
product_id = product.is_a?(Link) ? product.id : product
@body[:query][:bool][:must_not] << { term: { "product_id" => product_id } }
end
end
def build_body_exclude_purchasers_of_product
Array.wrap(@options[:exclude_purchasers_of_product]).each do |product|
product_id = product.is_a?(Link) ? product.id : product
@body[:query][:bool][:must_not] << {
term: { "product_ids_from_same_seller_purchased_by_purchaser" => product_id }
}
end
end
def build_body_variant
return if @options[:variant].blank?
variant_ids = Array.wrap(@options[:variant]).map do |variant|
variant.is_a?(BaseVariant) ? variant.id : variant
end
@body[:query][:bool][:filter] << { terms: { "variant_ids" => variant_ids } }
end
def build_body_exclude_variant
Array.wrap(@options[:exclude_variant]).each do |variant|
variant_id = variant.is_a?(BaseVariant) ? variant.id : variant
@body[:query][:bool][:must_not] << { term: { "variant_ids" => variant_id } }
end
end
def build_body_exclude_purchasers_of_variant
Array.wrap(@options[:exclude_purchasers_of_variant]).each do |variant|
variant_id = variant.is_a?(BaseVariant) ? variant.id : variant
@body[:query][:bool][:must_not] << {
term: { "variant_ids_from_same_seller_purchased_by_purchaser" => variant_id }
}
end
end
def build_body_exclude_purchase
Array.wrap(@options[:exclude_purchase]).each do |purchase|
purchase_id = purchase.is_a?(Purchase) ? purchase.id : purchase
@body[:query][:bool][:must_not] << { term: { "id" => purchase_id } }
end
end
def build_body_any_products_or_variants
return if @options[:any_products_or_variants].blank?
should = []
if @options[:any_products_or_variants][:products].present?
product_ids = Array.wrap(@options[:any_products_or_variants][:products]).map do |product|
product.is_a?(Link) ? product.id : product
end
should << { terms: { "product_id" => product_ids } }
end
if @options[:any_products_or_variants][:variants].present?
variant_ids = Array.wrap(@options[:any_products_or_variants][:variants]).map do |variant|
variant.is_a?(BaseVariant) ? variant.id : variant
end
should << { terms: { "variant_ids" => variant_ids } }
end
return if should.empty?
@body[:query][:bool][:filter] << { bool: { minimum_should_match: 1, should: } }
end
def build_body_affiliate_user
return if @options[:affiliate_user].blank?
ids = Array.wrap(@options[:affiliate_user]).map do |affiliate_user|
affiliate_user.is_a?(User) ? affiliate_user.id : affiliate_user
end
@body[:query][:bool][:filter] << { terms: { "affiliate_credit_affiliate_user_id" => ids } }
end
def build_body_revenue_sharing_user
return if @options[:revenue_sharing_user].blank?
ids = Array.wrap(@options[:revenue_sharing_user]).map do |user|
user.is_a?(User) ? user.id : user
end
should = [
{ terms: { "affiliate_credit_affiliate_user_id" => ids } },
{ terms: { "seller_id" => ids } },
]
@body[:query][:bool][:filter] << { bool: { minimum_should_match: 1, should: } }
end
def build_body_taxonomy
return if @options[:taxonomy].blank?
ids = Array.wrap(@options[:taxonomy]).map do |taxonomy|
taxonomy.is_a?(Taxonomy) ? taxonomy.id : taxonomy
end
@body[:query][:bool][:filter] << { terms: { "taxonomy_id" => ids } }
end
def build_body_exclude_refunded
return unless @options[:exclude_refunded]
@body[:query][:bool][:filter] << { term: { "stripe_refunded" => false } }
end
def build_body_exclude_refunded_except_subscriptions
return unless @options[:exclude_refunded_except_subscriptions]
@body[:query][:bool][:filter] << { term: { "not_refunded_except_subscriptions" => true } }
end
def build_body_exclude_unreversed_chargedback
return unless @options[:exclude_unreversed_chargedback]
@body[:query][:bool][:filter] << { term: { "not_chargedback_or_chargedback_reversed" => true } }
end
def build_body_exclude_non_original_subscription_purchases
return unless @options[:exclude_non_original_subscription_purchases]
@body[:query][:bool][:filter] << { term: { "not_subscription_or_original_subscription_purchase" => true } }
end
def build_body_exclude_not_charged_non_free_trial_purchases
return unless @options[:exclude_not_charged_non_free_trial_purchases]
@body[:query][:bool][:must_not] << {
bool: {
must: [
{ term: { "purchase_state" => "not_charged" } },
bool: { must_not: [{ term: { "selected_flags" => "is_free_trial_purchase" } }] }
]
}
}
end
def build_body_exclude_deactivated_subscriptions
return unless @options[:exclude_deactivated_subscriptions]
@body[:query][:bool][:must_not] << { exists: { field: "subscription_deactivated_at" } }
end
def build_body_exclude_cancelled_or_pending_cancellation_subscriptions
return unless @options[:exclude_cancelled_or_pending_cancellation_subscriptions]
@body[:query][:bool][:must_not] << { exists: { field: "subscription_cancelled_at" } }
end
def build_body_exclude_cant_contact
return unless @options[:exclude_cant_contact]
@body[:query][:bool][:filter] << { term: { "can_contact" => true } }
end
def build_body_exclude_giftees
return unless @options[:exclude_giftees]
@body[:query][:bool][:must_not] << { term: { "selected_flags" => "is_gift_receiver_purchase" } }
end
def build_body_exclude_gifters
return unless @options[:exclude_gifters]
@body[:query][:bool][:must_not] << { term: { "selected_flags" => "is_gift_sender_purchase" } }
end
def build_body_exclude_non_successful_preorder_authorizations
return unless @options[:exclude_non_successful_preorder_authorizations]
@body[:query][:bool][:filter] << { term: { "successful_authorization_or_without_preorder" => true } }
end
def build_body_exclude_bundle_product_purchases
return unless @options[:exclude_bundle_product_purchases]
@body[:query][:bool][:must_not] << { term: { "selected_flags" => "is_bundle_product_purchase" } }
end
def build_body_exclude_commission_completion_purchases
return unless @options[:exclude_commission_completion_purchases]
@body[:query][:bool][:must_not] << { term: { "selected_flags" => "is_commission_completion_purchase" } }
end
def build_body_price_greater_than
return unless @options[:price_greater_than]
@body[:query][:bool][:must] << { range: { "price_cents" => { gt: @options[:price_greater_than] } } }
end
def build_body_price_less_than
return unless @options[:price_less_than]
@body[:query][:bool][:must] << { range: { "price_cents" => { lt: @options[:price_less_than] } } }
end
def build_body_created_after
return unless @options[:created_after]
@body[:query][:bool][:must] << { range: { "created_at" => { gt: @options[:created_after].iso8601 } } }
end
def build_body_created_on_or_after
return unless @options[:created_on_or_after]
@body[:query][:bool][:must] << { range: { "created_at" => { gte: @options[:created_on_or_after].iso8601 } } }
end
def build_body_created_before
return unless @options[:created_before]
@body[:query][:bool][:must] << { range: { "created_at" => { lt: @options[:created_before].iso8601 } } }
end
def build_body_created_on_or_before
return unless @options[:created_on_or_before]
@body[:query][:bool][:must] << { range: { "created_at" => { lte: @options[:created_on_or_before].iso8601 } } }
end
def build_body_country
return unless @options[:country]
@body[:query][:bool][:filter] << {
terms: {
"country_or_ip_country" => Array.wrap(@options[:country])
}
}
end
def build_body_email
return unless @options[:email]
@body[:query][:bool][:filter] << { term: { "email.raw" => @options[:email].downcase } }
end
def build_body_state
return unless @options[:state]
@body[:query][:bool][:filter] << {
terms: {
"purchase_state" => Array.wrap(@options[:state])
}
}
end
def build_body_archived
return if @options[:archived].nil?
must = @options[:archived] ? :must : :must_not
@body[:query][:bool][must] << { term: { "selected_flags" => "is_archived" } }
end
def build_body_recommended
return if @options[:recommended].nil?
must = @options[:recommended] ? :must : :must_not
@body[:query][:bool][must] << { term: { "selected_flags" => "was_product_recommended" } }
end
def build_body_fulltext_search
return if @options[:seller_query].blank?
query_string = @options[:seller_query].strip.downcase
shoulds = []
all_words_query = query_string.match(/\A"(.*)"\z/).try(:[], 1)
if all_words_query
shoulds << {
multi_match: {
query: all_words_query,
fields: ["full_name"],
operator: "and",
}
}
else
shoulds << {
multi_match: {
query: query_string,
fields: ["email", "email_domain", "full_name"]
}
}
if query_string.include?("@")
shoulds << { term: { "email.raw" => query_string } }
shoulds << { term: { "paypal_email.raw" => query_string } }
end
if query_string.match?(/\A[a-f0-9]{8}-[a-f0-9]{8}-[a-f0-9]{8}-[a-f0-9]{8}\z/)
shoulds << { term: { "license_serial" => query_string.upcase } }
end
end
@body[:query][:bool][:must] << {
bool: {
minimum_should_match: 1,
should: shoulds,
}
}
end
def build_body_buyer_search
return if @options[:buyer_query].blank?
query_string = @options[:buyer_query].strip.downcase
body[:query][:bool][:must] << {
multi_match: {
query: query_string,
fields: ["product_name", "product_description", "seller_name"]
}
}
end
def build_body_native_params
[
:from,
:size,
:sort,
:_source,
:aggs,
:track_total_hits,
].each do |option_name|
next if @options[option_name].nil?
@body[option_name] = @options[option_name]
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/secure_encrypt_service.rb | app/services/secure_encrypt_service.rb | # frozen_string_literal: true
class SecureEncryptService
class Error < StandardError; end
class MissingKeyError < Error; end
class InvalidKeyError < Error; end
class << self
# Encrypts the given text.
#
# @param text [String] The text to encrypt.
# @return [String] The encrypted text.
def encrypt(text)
encryptor.encrypt_and_sign(text)
end
# Decrypts the given encrypted text.
#
# @param encrypted_text [String] The encrypted text to decrypt.
# @return [String, nil] The decrypted text, or nil if decryption fails.
def decrypt(encrypted_text)
encryptor.decrypt_and_verify(encrypted_text)
rescue ActiveSupport::MessageEncryptor::InvalidMessage
nil
end
# Verifies if the user input matches the encrypted text.
#
# @param encrypted [String] The encrypted text.
# @param text [String] The user input to compare against.
# @return [Boolean] True if the user input matches the decrypted text, false otherwise.
def verify(encrypted, text)
decrypted_text = decrypt(encrypted)
return false if decrypted_text.nil? || text.nil?
ActiveSupport::SecurityUtils.secure_compare(decrypted_text, text)
end
private
def encryptor
@encryptor ||= begin
key = GlobalConfig.get("SECURE_ENCRYPT_KEY")
raise MissingKeyError, "SECURE_ENCRYPT_KEY is not set." if key.blank?
raise InvalidKeyError, "SECURE_ENCRYPT_KEY must be 32 bytes for aes-256-gcm." if key.bytesize != 32
ActiveSupport::MessageEncryptor.new(key, cipher: "aes-256-gcm")
end
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/gst_validation_service.rb | app/services/gst_validation_service.rb | # frozen_string_literal: true
class GstValidationService
attr_reader :gst_id
def initialize(gst_id)
@gst_id = gst_id
end
def process
return false if gst_id.blank?
Rails.cache.fetch("iras_validation_#{gst_id}", expires_in: 10.minutes) do
headers = {
"X-IBM-Client-Id" => IRAS_API_ID,
"X-IBM-Client-Secret" => IRAS_API_SECRET,
"accept" => "application/json",
"content-type" => "application/json"
}
body = {
clientID: IRAS_API_ID,
regID: gst_id
}.to_json
response = HTTParty.post(IRAS_ENDPOINT, body:, timeout: 5, headers:)
response["returnCode"] == "10" && response["data"]["Status"] == "Registered"
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/forfeit_balance_service.rb | app/services/forfeit_balance_service.rb | # frozen_string_literal: true
class ForfeitBalanceService
include CurrencyHelper
attr_reader :user, :reason
def initialize(user:, reason:)
@user = user
@reason = reason
end
def process
return unless balance_amount_cents_to_forfeit > 0
balances_to_forfeit.each(&:mark_forfeited!)
balance_ids = balances_to_forfeit.ids.join(", ")
user.comments.create!(
author_id: GUMROAD_ADMIN_ID,
comment_type: Comment::COMMENT_TYPE_BALANCE_FORFEITED,
content: "Balance of #{balance_amount_formatted} has been forfeited. Reason: #{reason_comment}. Balance IDs: #{balance_ids}"
)
end
def balance_amount_formatted
formatted_dollar_amount(balance_amount_cents_to_forfeit)
end
def balance_amount_cents_to_forfeit
@_balance_amount_cents_to_forfeit ||= balances_to_forfeit.sum(:amount_cents)
end
private
def reason_comment
case reason
when :account_closure
"Account closed"
when :country_change
"Country changed"
when :payout_method_change
"Payout method changed"
end
end
def balances_to_forfeit
@_balances_to_forfeit ||= send("balances_to_forfeit_on_#{reason}")
end
def balances_to_forfeit_on_account_closure
user.unpaid_balances
end
# Forfeiting is only needed if balance is in a Gumroad-controlled Stripe account
def balances_to_forfeit_on_country_change
user.unpaid_balances.where.not(merchant_account_id: [
MerchantAccount.gumroad(StripeChargeProcessor.charge_processor_id),
MerchantAccount.gumroad(PaypalChargeProcessor.charge_processor_id),
MerchantAccount.gumroad(BraintreeChargeProcessor.charge_processor_id)
])
end
# Forfeiting is only needed if balance is in a Gumroad-controlled Stripe account
def balances_to_forfeit_on_payout_method_change
balances_to_forfeit_on_country_change
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/email_suppression_manager.rb | app/services/email_suppression_manager.rb | # frozen_string_literal: true
class EmailSuppressionManager
SUPPRESSION_LISTS = [:bounces, :spam_reports]
private_constant :SUPPRESSION_LISTS
def initialize(email)
@email = email
end
def reasons_for_suppression
# Scan all subusers for the email and note the reasons for suppressions
sendgrid_subusers.inject({}) do |reasons, (subuser, api_key)|
supression_reasons = email_suppression_reasons(api_key)
reasons[subuser] = supression_reasons if supression_reasons.present?
reasons
end
end
def unblock_email
# Scan all subusers for the email and delete it from each suppression list
# Return true if the email is unblocked from any of the lists
sendgrid_subusers.inject(false) do |unblocked, (_, api_key)|
unblocked | unblock_suppressed_email(api_key)
end
end
private
attr_reader :email
def sendgrid(api_key)
SendGrid::API.new(api_key:)
end
def email_suppression_reasons(api_key)
suppression = sendgrid(api_key).client.suppression
SUPPRESSION_LISTS.inject([]) do |reasons, list|
parsed_body = suppression.public_send(list)._(email).get.parsed_body
begin
reasons << { list:, reason: parsed_body.first[:reason] } if parsed_body.present?
rescue => e
Bugsnag.notify(e)
Rails.logger.info "[EmailSuppressionManager] Error parsing SendGrid response: #{parsed_body}"
end
reasons
end
end
def unblock_suppressed_email(api_key)
suppression = sendgrid(api_key).client.suppression
# Scan all lists for the email and delete it from each list
# Return true if the email is found in any of the lists
SUPPRESSION_LISTS.inject(false) do |unblocked, list|
unblocked | successful_response?(suppression.public_send(list)._(email).delete.status_code)
end
end
def successful_response?(status_code)
(200..299).include?(status_code.to_i)
end
def sendgrid_subusers
{
gumroad: GlobalConfig.get("SENDGRID_GUMROAD_TRANSACTIONS_API_KEY"),
followers: GlobalConfig.get("SENDGRID_GUMROAD_FOLLOWER_CONFIRMATION_API_KEY"),
creators: GlobalConfig.get("SENDGRID_GR_CREATORS_API_KEY"),
customers_level_1: GlobalConfig.get("SENDGRID_GR_CUSTOMERS_API_KEY"),
customers_level_2: GlobalConfig.get("SENDGRID_GR_CUSTOMERS_LEVEL_2_API_KEY")
}
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/payout_users_service.rb | app/services/payout_users_service.rb | # frozen_string_literal: true
class PayoutUsersService
attr_reader :date, :processor_type, :user_ids, :payout_type
def initialize(date_string:, processor_type:, user_ids:, payout_type: Payouts::PAYOUT_TYPE_STANDARD)
@date = date_string
@processor_type = processor_type
@user_ids = Array.wrap(user_ids)
@payout_type = payout_type
end
def process
payments, cross_border_payments = create_payments
PayoutProcessorType.get(processor_type).process_payments(payments) if payments.present?
cross_border_payments.each do |payment|
ProcessPaymentWorker.perform_in(25.hours, payment.id)
end
payments + cross_border_payments
end
def create_payments
payments = []
cross_border_payments = []
user_ids.each do |user_id|
user = User.find(user_id)
payout_period_end_date = date
if payout_type == Payouts::PAYOUT_TYPE_INSTANT
instantly_payable_balances = user.instantly_payable_unpaid_balances_up_to_date(date)
payout_period_end_date = instantly_payable_balances.sort_by(&:date).last.date.to_s
end
payment, payment_errors = Payouts.create_payment(payout_period_end_date, processor_type, user, payout_type:)
if payment_errors.blank? && payment.present?
# Money transferred to a cross-border-payouts Stripe Connect a/c becomes payable after 24 hours,
# so schedule those payouts for 25 hours from now instead of processing them immediately.
cross_border_payout = payment.processor == PayoutProcessorType::STRIPE &&
!payment.user.merchant_accounts.find_by(charge_processor_merchant_id: payment.stripe_connect_account_id)&.is_a_stripe_connect_account? &&
Country.new(user.alive_user_compliance_info.legal_entity_country_code).supports_stripe_cross_border_payouts?
if cross_border_payout
cross_border_payments << payment
else
payments << payment
end
else
Rails.logger.info("Payouts: Create payment errors for user with id: #{user_id} #{payment_errors.inspect}")
end
rescue => e
Rails.logger.error "Error in PayoutUsersService creating payment for user ID #{user_id} => #{e.class.name}: #{e.message}"
Rails.logger.error "Error in PayoutUsersService creating payment for user ID #{user_id} => #{e.backtrace.join("\n")}"
Bugsnag.notify(e)
next
end
[payments, cross_border_payments]
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/post_sendgrid_api.rb | app/services/post_sendgrid_api.rb | # frozen_string_literal: true
class PostSendgridApi
include Rails.application.routes.url_helpers
include ActionView::Helpers::SanitizeHelper
include MailerHelper, CustomMailerRouteBuilder
_routes.default_url_options = Rails.application.config.action_mailer.default_url_options
MAX_RECIPIENTS = 1_000 # SendGrid's API limit: https://docs.sendgrid.com/for-developers/sending-email/personalizations
def self.process(**args) = new(**args).send_emails
# Sends post emails via SendGrid API.
# How it works:
# - renders and locally caches the email template
# - sends the emails via SendGrid API, with substitutions for each recipient
# - records the emails as sent in EmailInfo
# - records the emails in EmailEvent
# - updates delivery statistics
# - sends push notifications
# It does not:
# - check whether the post has already been sent to the email addresses (it's the caller's responsibility)
# - create any UrlRedirect records (same)
#
# `recipients` keys:
# required => :email (string)
# optional => :purchase, :subscription, :follower, :affiliate, :url_redirect (records)
def initialize(post:, recipients:, cache: {}, blast: nil, preview: false)
@post = post
@recipients = recipients
@cache = cache
@blast = blast
@preview = preview
@cache[@post] ||= {}
end
def send_emails
mail_json = build_mail
if Rails.application.config.action_mailer.perform_deliveries != false && !Rails.env.test?
sendgrid = SendGrid::API.new(api_key: GlobalConfig.get("SENDGRID_GR_CREATORS_API_KEY"))
result = nil
duration = Benchmark.realtime do
result = sendgrid.client.mail._("send").post(request_body: mail_json)
end
Rails.logger.info(
"[#{self.class.name}] Sent post #{@post.id} to #{@recipients.size} recipients" \
" (duration: #{duration.round(3)}s, status: #{result.status_code})")
raise SendGridApiResponseError.new(result.body) unless (200..299).include?(result.status_code.to_i)
else
Rails.logger.info(
"[#{self.class.name}] Would have sent post #{@post.id} to #{@recipients.size} recipients" \
" (perform_deliveries = false)")
end
unless @preview
update_delivery_statistics
send_push_notifications
create_email_info_records
upsert_email_events_documents
end
true
end
if Rails.env.development? || Rails.env.test?
@mails = {}
class << self
attr_reader :mails
end
end
def build_mail
return if @recipients.empty?
validate_recipients
fetch_rendered_template
from = SendGrid::Email.new(
email: creators_from_email_address(@post.seller.username),
name: from_email_address_name(@post.seller.name)
)
reply_to = SendGrid::Email.new(email: @post.seller.support_or_form_email)
mail = SendGrid::Mail.new
mail.from = from
mail.subject = @post.subject
mail.reply_to = reply_to
mail.add_content SendGrid::Content.new(type: "text/plain", value: @cache[@post][:template][:plaintext])
mail.add_content SendGrid::Content.new(type: "text/html", value: @cache[@post][:template][:html])
mail.add_category SendGrid::Category.new(name: self.class.name)
mail.add_custom_arg SendGrid::CustomArg.new(key: "installment_id", value: @post.id)
mail.add_custom_arg SendGrid::CustomArg.new(key: "seller_id", value: @post.seller_id)
@recipients.each do |recipient|
mail.add_personalization(build_personalization_for_recipient(recipient))
end
mail_json = mail.to_json
log_mail_debug_info(mail_json)
mail_json
end
private
def fetch_rendered_template
@cache[@post][:assigns] ||= {
has_post_url: @post.shown_on_profile?,
has_download_button: @cache[@post][:has_files?],
has_comment_button: @post.shown_on_profile? && @post.allow_comments?,
has_seller_update_reason: @post.seller_or_product_or_variant_type?,
gumroad_url: root_url,
}
@cache[@post][:template] ||= begin
rendered_html = ApplicationController.renderer.render(
template: "posts/post_email",
layout: false,
assigns: { post: @post }.merge(@cache[@post][:assigns])
)
premailer = Premailer::Rails::CustomizedPremailer.new(rendered_html)
{ html: premailer.to_inline_css, plaintext: premailer.to_plain_text }
end
# Also cache other slow operations needed to render the template
@cache[@post][:sanitized_product_name] = strip_tags(@post.link.name) if !@cache[@post].key?(:sanitized_product_name) && @post.product_or_variant_type?
end
def build_personalization_for_recipient(recipient)
assigns = @cache[@post][:assigns]
personalization = SendGrid::Personalization.new
personalization.add_to(SendGrid::Email.new(email: recipient[:email]))
personalization.add_substitution SendGrid::Substitution.new(key: "{{subject}}", value: @post.subject)
if assigns[:has_post_url]
post_url = build_mailer_post_route(post: @post, purchase: recipient[:purchase])
personalization.add_substitution SendGrid::Substitution.new(key: "{{post_url}}", value: post_url)
end
if assigns[:has_download_button]
download_url = recipient[:url_redirect]&.download_page_url
personalization.add_substitution SendGrid::Substitution.new(key: "{{download_url}}", value: download_url)
personalization.add_substitution SendGrid::Substitution.new(key: "{{t_view_attachments_prompt}}", value: "View content")
end
if assigns[:has_comment_button]
personalization.add_substitution SendGrid::Substitution.new(key: "{{t_post_a_comment}}", value: "Reply with a comment")
end
if assigns[:has_seller_update_reason]
if @post.seller_type?
seller_update_reason = "You've received this post because you've purchased a product from #{@post.seller.name.presence || @post.seller.email || "Gumroad"}."
elsif @post.product_or_variant_type?
product_name = recipient[:product_name] || @cache[@post][:sanitized_product_name]
download_url_or_product_url = recipient[:url_redirect]&.download_page_url || @post.link.long_url
seller_update_reason = @post.member_cancellation_trigger? ?
"You've received this email because you cancelled your membership to <a href=\"#{download_url_or_product_url}\">#{product_name}</a>." :
@post.link.is_recurring_billing ?
"You've received this email because you subscribed to <a href=\"#{download_url_or_product_url}\">#{product_name}</a>." :
"You've received this email because you've purchased <a href=\"#{download_url_or_product_url}\">#{product_name}</a>."
end
personalization.add_substitution SendGrid::Substitution.new(key: "{{seller_update_reason}}", value: seller_update_reason)
end
personalization.add_substitution SendGrid::Substitution.new(key: "{{t_powered_by}}", value: "Powered by")
personalization.add_substitution SendGrid::Substitution.new(key: "{{t_unsubscribe}}", value: "Unsubscribe")
unsubscribe_url = if recipient[:purchase]
unsubscribe_purchase_url(recipient[:purchase].secure_external_id(scope: "unsubscribe"))
elsif recipient[:follower]
cancel_follow_url(recipient[:follower].external_id)
elsif recipient[:affiliate]
unsubscribe_posts_affiliate_url(recipient[:affiliate].external_id)
else
"#"
end
personalization.add_substitution SendGrid::Substitution.new(key: "{{unsubscribe_url}}", value: unsubscribe_url)
%i[purchase subscription follower affiliate].each do |record_name|
personalization.add_custom_arg(SendGrid::CustomArg.new(key: "#{record_name}_id", value: recipient[record_name].id)) if recipient[record_name]
end
if recipient[:purchase]
personalization.add_custom_arg SendGrid::CustomArg.new(key: "type", value: "CreatorContactingCustomersMailer.purchase_installment")
personalization.add_custom_arg SendGrid::CustomArg.new(key: "identifier", value: "[#{recipient[:purchase].id}, #{@post.id}]")
elsif recipient[:follower]
personalization.add_custom_arg SendGrid::CustomArg.new(key: "type", value: "CreatorContactingCustomersMailer.follower_installment")
personalization.add_custom_arg SendGrid::CustomArg.new(key: "identifier", value: "[#{recipient[:follower].id}, #{@post.id}]")
elsif recipient[:affiliate]
personalization.add_custom_arg SendGrid::CustomArg.new(key: "type", value: "CreatorContactingCustomersMailer.direct_affiliate_installment")
personalization.add_custom_arg SendGrid::CustomArg.new(key: "identifier", value: "[#{recipient[:affiliate].id}, #{@post.id}]")
end
personalization
end
def update_delivery_statistics
@post.increment_total_delivered(by: @recipients.size)
PostEmailBlast.acknowledge_email_delivery(@blast.id, by: @recipients.size) if @blast
end
def send_push_notifications
emails = @recipients.map { _1[:email] }
users_by_email = User.where(email: emails).select(:id, :email).index_by(&:email)
return if users_by_email.empty?
notification_jobs_arguments = @recipients.map do |recipient|
user = users_by_email[recipient[:email]]
next if user.nil?
data = {
"installment_id" => @post.external_id,
"subscription_id" => recipient[:subscription]&.external_id,
"purchase_id" => recipient[:purchase]&.external_id,
"follower_id" => recipient[:follower]&.external_id,
}.compact
body = "By #{@post.seller.name}"
[user.id, Device::APP_TYPES[:consumer], @post.subject, body, data]
end.compact
PushNotificationWorker.set(queue: "low").perform_bulk(notification_jobs_arguments)
end
def create_email_info_records
attributes = @recipients.map do |recipient|
next unless recipient.key?(:purchase)
{ purchase_id: recipient[:purchase].id }
end.compact
return if attributes.empty?
base_attributes = {
type: CreatorContactingCustomersEmailInfo.name,
installment_id: @post.id,
email_name: EmailEventInfo::PURCHASE_INSTALLMENT_MAILER_METHOD,
state: "sent",
sent_at: Time.current,
}
EmailInfo.create_with(base_attributes).insert_all!(attributes)
end
def upsert_email_events_documents
EmailEvent.log_send_events(@recipients.map { _1[:email] }, Time.current)
end
def validate_recipients
raise "Too many recipients (#{@recipients.size} > #{MAX_RECIPIENTS})" if @recipients.size > MAX_RECIPIENTS
@cache[@post][:has_files?] = @post.has_files? unless @cache[@post].key?(:has_files?)
@recipients.each do |recipient|
raise "Recipients must have an email" if recipient[:email].blank?
raise "Recipients of a post with files must have a url_redirect" if @cache[@post][:has_files?] && recipient[:url_redirect].blank?
raise "Recipients can't have a purchase and/or a follower and/or an affiliate record" if recipient.slice(:purchase, :follower, :affiliate).values.compact.size > 1
end
end
def log_mail_debug_info(mail_json)
return unless Rails.env.development? || Rails.env.test?
return if ENV["POST_SENDGRID_API_SKIP_DEBUG"] == "1" # Needed for accurate performance testing in development
Rails.logger.info("[#{self.class.name}] SendGrid API request body:")
Rails.logger.info(mail_json)
content = mail_json["content"].find { _1["type"] == "text/html" }["value"]
mail_json["personalizations"].each do |personalization|
content_with_substitutions = content.dup
personalization["substitutions"].each { content_with_substitutions.gsub!(_1, _2) }
recipient_email = personalization["to"][0]["email"]
self.class.mails[recipient_email] = {
subject: mail_json["subject"],
from: mail_json.dig("from", "email"),
reply_to: mail_json.dig("reply_to", "email"),
content: content_with_substitutions,
custom_args: mail_json["custom_args"].merge(personalization["custom_args"] || {}),
}
if ENV["POST_SENDGRID_API_SAVE_EMAILS"] == "1"
mails_dir = Rails.root.join("tmp", "mails")
FileUtils.mkdir_p(mails_dir)
file = File.new(File.join(mails_dir, "#{Time.current.to_f}-#{@post.id}-#{recipient_email}.html"), "w")
file.syswrite(content_with_substitutions)
end
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/user_custom_domain_request_service.rb | app/services/user_custom_domain_request_service.rb | # frozen_string_literal: true
class UserCustomDomainRequestService
class << self
def valid?(request)
!GumroadDomainConstraint.matches?(request) && !DiscoverDomainConstraint.matches?(request) && CustomDomain.find_by_host(request.host)&.product.nil?
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/tip_options_service.rb | app/services/tip_options_service.rb | # frozen_string_literal: true
class TipOptionsService
DEFAULT_TIP_OPTIONS = [0, 10, 20]
DEFAULT_DEFAULT_TIP_OPTION = 0
def self.get_tip_options
options = $redis.get(RedisKey.tip_options)
parsed_options = options ? JSON.parse(options) : DEFAULT_TIP_OPTIONS
are_tip_options_valid?(parsed_options) ? parsed_options : DEFAULT_TIP_OPTIONS
rescue
DEFAULT_TIP_OPTIONS
end
def self.set_tip_options(options)
raise ArgumentError, "Tip options must be an array of integers" unless are_tip_options_valid?(options)
$redis.set(RedisKey.tip_options, options.to_json)
end
def self.get_default_tip_option
option = $redis.get(RedisKey.default_tip_option)&.to_i || DEFAULT_DEFAULT_TIP_OPTION
is_default_tip_option_valid?(option) ? option : DEFAULT_DEFAULT_TIP_OPTION
end
def self.set_default_tip_option(option)
raise ArgumentError, "Default tip option must be an integer" unless is_default_tip_option_valid?(option)
$redis.set(RedisKey.default_tip_option, option)
end
private
def self.are_tip_options_valid?(options)
options.is_a?(Array) && options.all? { |o| o.is_a?(Integer) }
end
def self.is_default_tip_option_valid?(option)
option.is_a?(Integer)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/adult_keyword_detector.rb | app/services/adult_keyword_detector.rb | # frozen_string_literal: true
class AdultKeywordDetector
# TODO: Add "pin-up" and "AB/DL". We may have to revisit our approach so we can include non-alphabet characters in the
# list of adult keywords
ADULT_KEYWORD_REGEX = Regexp.new(
"\\b(" +
["futa", "pussy", "bondage", "bdsm", "lewd", "ahegao", "nude", "milking", "topless", "lolita", "lewds",
"creampie", "dildo", "gape", "semen", "cuckold", "hairjob", "tickling", "hogtied", "uncensored", "thong",
"pinup", "impregnation", "gagged", "hentai", "squirt", "orgasm", "virginkiller", "abdl", "crotch",
"breast inflation", "ahri", "granblue", "lingerie",
"boudoir", "kink", "shibari", "gutpunch", "gutpunching", "abs punch", "necro",
"vibrator", "fetish", "nsfw", "saucy", "footjob", "joi"].join("|") +
")\\b"
).freeze
# From https://stackoverflow.com/a/4052294/3315873
# There are three cases in Unicode, not two. Furthermore, you also have non-cased letters.
# Letters in general are specified by the \pL property, and each of these also belongs to exactly one of five
# subcategories:
#
# uppercase letters, specified with \p{Lu}; eg: AÇDZÞΣSSὩΙST
# titlecase letters, specified with \p{Lt}; eg: LjDzSsᾨSt (actually Ss and St are an upper- and then a lowercase letter, but they are what you get if you ask for the titlecase of ß and ſt, respectively)
# lowercase letters, specified with \p{Ll}; eg: aαçdzςσþßᾡſt
# modifier letters, specified with \p{Lm}; eg: ʰʲᴴᴭʺˈˠᵠꜞ
# other letters, specified with \p{Lo}; eg: ƻאᎯᚦ京
# We use this regex to get "words" from text.
# Matches
# a sequence of one or more letters starting with an uppercase or titlecase letter
# OR
# a sequence of one more letters made up of only uppercase and/or titlecase letters
TOKENIZATION_REGEX = /((?:[\p{Lu}\p{Lt}]?[\p{Ll}\p{Lm}\p{Lo}]+)|(?:[\p{Lu}\p{Lt}])+)/
# We use this regex to change non-letter characters to a space.
# Matches any unicode letter or a space.
NOT_LETTER_OR_SPACE_REGEX = /[^\p{Lu}\p{Lt}\p{Ll}\p{Lm}\p{Lo} ]/
def self.adult?(text)
tokens = if text.present?
# 1. Change all non-letters characters to a blank space
# 2. Split the text into words
# 3. Make all the words lower-case
text.gsub(NOT_LETTER_OR_SPACE_REGEX, " ").scan(TOKENIZATION_REGEX).flatten.map(&:downcase)
else
[]
end
tokens_as_string = tokens.join(" ")
ADULT_KEYWORD_REGEX.match?(tokens_as_string)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/custom_domain_verification_service.rb | app/services/custom_domain_verification_service.rb | # frozen_string_literal: true
class CustomDomainVerificationService
RESOLVER_TIMEOUT_IN_SECONDS = 5
SSL_CERT_CHECK_CACHE_EXPIRY = 10.days
attr_reader :domain
def initialize(domain:)
@domain = domain
@dns_resolver = Resolv::DNS.new
dns_resolver.timeouts = RESOLVER_TIMEOUT_IN_SECONDS
end
def process
points_to_gumroad?
rescue => e
Rails.logger.error e.full_message
false
end
def points_to_gumroad?
@_does_point_to_gumroad ||= domains_pointed_to_gumroad.any?
end
def domains_pointed_to_gumroad
pointed_domains = []
parsed_host = PublicSuffix.parse(domain)
if parsed_host.trd.nil? || parsed_host.trd == CustomDomain::WWW_PREFIX
parsed_domain_with_www_prefix = "#{CustomDomain::WWW_PREFIX}.#{parsed_host.domain}"
pointed_domains << parsed_host.domain if cname_or_alias_configured?(parsed_host.domain)
pointed_domains << parsed_domain_with_www_prefix if cname_or_alias_configured?(parsed_domain_with_www_prefix)
else
pointed_domains << domain if cname_or_alias_configured?(domain)
end
pointed_domains
end
def has_valid_ssl_certificates?
domains_pointed_to_gumroad.all? do |domain|
ssl_cert_check_redis_namespace.get(ssl_cert_check_cache_key(domain)) || has_valid_ssl_certificate?(domain)
end
end
private
attr_reader :dns_resolver
def has_valid_ssl_certificate?(domain)
@ssl_service ||= SslCertificates::Base.new
ssl_cert_s3_key = @ssl_service.ssl_file_path(domain, "cert")
@s3 ||= Aws::S3::Resource.new(credentials: Aws::InstanceProfileCredentials.new)
cert_obj = @s3.bucket(SslCertificates::Base::SECRETS_S3_BUCKET).object(ssl_cert_s3_key)
cert = cert_obj.exists? && cert_obj.get.body.read
valid = OpenSSL::X509::Certificate.new(cert).not_after > Time.current if cert.present?
# Cache only when the certificate is valid
ssl_cert_check_redis_namespace.set(ssl_cert_check_cache_key(domain), valid, ex: SSL_CERT_CHECK_CACHE_EXPIRY) if valid
valid
end
def cname_or_alias_configured?(domain_variant)
cname_is_setup_correctly?(domain_variant) || alias_is_setup_correctly?(domain_variant)
rescue => e
Rails.logger.info("CNAME/ALIAS check error for custom domain '#{domain}'. Error: #{e.inspect}")
false
end
def cname_is_setup_correctly?(domain_variant)
# Example:
#
# > domain = "production-sample-shop.gumroad.com"
# > Resolv::DNS.new.getresources(domain, Resolv::DNS::Resource::IN::CNAME).first.name.to_s
# => "domains.gumroad.com"
#
# We would verify this against the domain stored in CUSTOM_DOMAIN_CNAME to check if the domain
# is setup correctly.
current_domain_cname = dns_resolver.getresources(domain_variant, Resolv::DNS::Resource::IN::CNAME)
!current_domain_cname.empty? && current_domain_cname.first.name.to_s == CUSTOM_DOMAIN_CNAME
end
def alias_is_setup_correctly?(domain_variant)
alias_records_correctly_configured?(CUSTOM_DOMAIN_CNAME, domain_variant) || alias_records_correctly_configured?(CUSTOM_DOMAIN_STATIC_IP_HOST, domain_variant)
end
def alias_records_correctly_configured?(target_domain, seller_domain)
# Example:
#
# > domain = "production-sample-shop.gumroad.com"
# > Resolv::DNS.new.getresources(domain, Resolv::DNS::Resource::IN::A).map { |record| record.address.to_s }
# => ["50.19.197.177", "3.214.103.12", "54.164.66.117"]
# > Resolv::DNS.new.getresources(CUSTOM_DOMAIN_CNAME, Resolv::DNS::Resource::IN::A).map { |record| record.address.to_s }
# => ["50.19.197.177", "3.214.103.12", "54.164.66.117"]
#
# When the sorted list of IPs match, we can confirm alias is setup correctly.
current_domain_addresses = dns_resolver.getresources(seller_domain, Resolv::DNS::Resource::IN::A).map { |record| record.address.to_s }
custom_domain_addresses = dns_resolver.getresources(target_domain, Resolv::DNS::Resource::IN::A).map { |record| record.address.to_s }
current_domain_addresses.sort == custom_domain_addresses.sort
end
def ssl_cert_check_redis_namespace
@_ssl_cert_check_redis_namespace ||= Redis::Namespace.new(:ssl_cert_check_namespace, redis: $redis)
end
def ssl_cert_check_cache_key(domain)
"ssl_cert_check:#{domain}"
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/mailer_attachment_or_link_service.rb | app/services/mailer_attachment_or_link_service.rb | # frozen_string_literal: true
# Given a file:
# - Check if file size is acceptable for direct attachment
# - Yes: Return file
# - No: Return temporary expiring S3 link
class MailerAttachmentOrLinkService
# https://sendgrid.com/docs/ui/sending-email/attachments-with-digioh/
# Can send upto 30 MB, but recommended is 10.
MAX_FILE_SIZE = 10.megabytes
attr_reader :file, :filename, :extension
def initialize(file:, filename: nil, extension: nil)
@file = file
@filename = filename
@extension = extension
end
def perform
if file.size <= MAX_FILE_SIZE
{ file:, url: nil }
else
# Ensure start of file before read
file.rewind
{ file: nil, url: ExpiringS3FileService.new(file:,
extension:,
filename: @filename).perform }
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/recommended_products_service.rb | app/services/recommended_products_service.rb | # frozen_string_literal: true
class RecommendedProductsService
MODELS = ["sales"]
MODELS.each do |key|
const_set("MODEL_#{key.upcase}", key)
end
# Returns a ActiveRecord::Relation of ordered products records.
#
# NOTES:
# 1- Because it returns an ActiveRecord::Relation, the result can be used to preload associated records:
# Example: `.fetch(...).includes(:product_review_stat).as_json`
#
# 2- To be able to guarantee the correct order of products returned,
# the order is "hardcoded" in the relation's SQL query; please do not reorder (including with `find_each`).
#
# There is no guarantee of any products being returned at all.
def self.fetch(
model:, # one of MODELS
ids: [],
exclude_ids: [],
user_ids: nil,
number_of_results: 10
)
case model
when MODEL_SALES
return Link.none if user_ids&.length == 0
recommended_products = SalesRelatedProductsInfo.related_products(ids, limit: number_of_results)
recommended_products = recommended_products.where(user_id: user_ids) unless user_ids.nil?
recommended_products.alive.where.not(id: exclude_ids)
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/community_chat_recap_generator_service.rb | app/services/community_chat_recap_generator_service.rb | # frozen_string_literal: true
class CommunityChatRecapGeneratorService
MAX_MESSAGES_TO_SUMMARIZE = 1000
MAX_SUMMARY_LENGTH = 500
MIN_SUMMARY_BULLET_POINTS = 1
MAX_SUMMARY_BULLET_POINTS = 5
OPENAI_REQUEST_TIMEOUT_IN_SECONDS = 10
DAILY_SUMMARY_SYSTEM_PROMPT = <<~PROMPT
You are an AI assistant that creates concise, informative daily summaries of community chat conversations.
Your task is to analyze the provided chat messages and create a summary with minimum #{MIN_SUMMARY_BULLET_POINTS} and maximum #{MAX_SUMMARY_BULLET_POINTS} bullet points (maximum #{MAX_SUMMARY_LENGTH} characters) highlighting key discussions, questions answered, important announcements, or decisions made. Messages from the creator of the community are highlighted with [CREATOR], refer to them as "creator". Refer to each customer as "a customer" and never mention their actual names. Make important words/phrases bold using <strong> tags. Do not say anything that was not said in the messages.
Format your response in an HTML unordered list exactly like this:
<ul>
<li>Summary bullet point 1</li>
<li>Summary bullet point 2</li>
</ul>
Keep the summary conversational and easy to read.
Make sure to include all significant topics discussed.
Don't include usernames or timestamps in your summary.
If there are very few messages, keep the summary brief but informative.
PROMPT
WEEKLY_SUMMARY_SYSTEM_PROMPT = <<~PROMPT
You are an AI assistant that creates concise, informative weekly summaries of community chat conversations.
Your task is to analyze the provided daily summaries and create a weekly summary with minimum #{MIN_SUMMARY_BULLET_POINTS} and maximum #{MAX_SUMMARY_BULLET_POINTS} bullet points (maximum #{MAX_SUMMARY_LENGTH} characters) highlighting key discussions, questions answered, important announcements, or decisions made. Make important words/phrases bold using <strong> tags.
Format your response in an HTML unordered list exactly like this:
<ul>
<li>Summary bullet point 1</li>
<li>Summary bullet point 2</li>
</ul>
Keep the summary conversational and easy to read.
Make sure to include all significant topics discussed.
Don't include usernames or timestamps in your summary.
If there are very few daily summaries, keep the weekly summary brief but informative.
PROMPT
def initialize(community_chat_recap:)
@community_chat_recap = community_chat_recap
@community = community_chat_recap.community
@recap_run = community_chat_recap.community_chat_recap_run
@recap_frequency = recap_run.recap_frequency
@from_date = recap_run.from_date
@to_date = recap_run.to_date
end
def process
return if community_chat_recap.status_finished?
recap_frequency == "daily" ? create_daily_recap : create_weekly_recap
end
private
attr_reader :community_chat_recap, :community, :recap_run, :recap_frequency, :from_date, :to_date
def create_daily_recap
messages = community.community_chat_messages
.includes(:user)
.alive
.where(created_at: from_date..to_date)
.order(created_at: :asc)
.limit(MAX_MESSAGES_TO_SUMMARIZE)
summary, input_token_count, output_token_count = if messages.present?
generate_daily_summary(messages)
else
["", 0, 0]
end
community_chat_recap.assign_attributes(
seller: community.seller,
summary:,
summarized_message_count: messages.size,
input_token_count:,
output_token_count:,
status: "finished",
error_message: nil
)
community_chat_recap.save!
end
def create_weekly_recap
daily_recap_runs = CommunityChatRecapRun.includes(:community_chat_recaps).where(recap_frequency: "daily").between(from_date, to_date).where(community_chat_recaps: { community:, status: "finished" })
daily_recaps = daily_recap_runs.map(&:community_chat_recaps).flatten.sort_by(&:created_at)
summary, input_token_count, output_token_count = if daily_recaps.present?
generate_weekly_summary(daily_recaps)
else
["", 0, 0]
end
community_chat_recap.assign_attributes(
seller: community.seller,
summary:,
summarized_message_count: daily_recaps.sum(&:summarized_message_count),
input_token_count:,
output_token_count:,
status: "finished",
error_message: nil
)
community_chat_recap.save!
end
def generate_daily_summary(messages)
formatted_messages = messages.map do |message|
timestamp = message.created_at.strftime("%Y-%m-%d %H:%M:%S")
"[#{timestamp}] [Name: #{message.user.display_name}] #{message.user.id == community.seller_id ? "[CREATOR]" : ""}: #{message.content}"
end.join("\n\n")
Rails.logger.info("Formatted messages used for generating daily summary: #{formatted_messages}") if Rails.env.development?
with_retries("daily summary") do
response = OpenAI::Client.new.chat(
parameters: {
model: "gpt-4o-mini",
messages: [
{ role: "system", content: DAILY_SUMMARY_SYSTEM_PROMPT },
{ role: "user", content: "Here are today's chat messages in the community:\n\n#{formatted_messages}" }
],
temperature: 0.7
}
)
content = response.dig("choices", 0, "message", "content")
summary_match = content.match(/(<ul>.+?<\/ul>)/m)
summary = summary_match ? summary_match[1].strip : content.strip
input_token_count = response.dig("usage", "prompt_tokens")
output_token_count = response.dig("usage", "completion_tokens")
[summary, input_token_count, output_token_count]
end
end
def generate_weekly_summary(daily_recaps)
formatted_summaries = daily_recaps.map(&:summary).join("\n")
Rails.logger.info("Formatted daily summaries used for generating weekly summary: #{formatted_summaries}") if Rails.env.development?
return ["", 0, 0] if formatted_summaries.strip.blank?
with_retries("weekly summary") do
response = OpenAI::Client.new(request_timeout: OPENAI_REQUEST_TIMEOUT_IN_SECONDS).chat(
parameters: {
model: "gpt-4o-mini",
messages: [
{ role: "system", content: WEEKLY_SUMMARY_SYSTEM_PROMPT },
{ role: "user", content: "Here are the daily summaries:\n\n#{formatted_summaries}" }
],
temperature: 0.7
}
)
content = response.dig("choices", 0, "message", "content")
summary_match = content.match(/(<ul>.+?<\/ul>)/m)
summary = summary_match ? summary_match[1].strip : content.strip
input_token_count = response.dig("usage", "prompt_tokens")
output_token_count = response.dig("usage", "completion_tokens")
[summary, input_token_count, output_token_count]
end
end
def with_retries(operation, max_tries: 3, delay: 1)
tries = 0
begin
tries += 1
yield
rescue => e
if tries < max_tries
Rails.logger.info("Failed to generate #{operation}, attempt #{tries}/#{max_tries} (ID: #{community_chat_recap.id}): #{e.message}")
sleep(delay)
retry
else
Rails.logger.error("Failed to generate #{operation} after #{max_tries} attempts (ID: #{community_chat_recap.id}): #{e.message}")
raise
end
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/pdf_stamping_service.rb | app/services/pdf_stamping_service.rb | # frozen_string_literal: true
module PdfStampingService
class Error < StandardError; end
extend self
ERRORS_TO_RESCUE = [
PdfStampingService::Stamp::Error,
PDF::Reader::MalformedPDFError
].freeze
def can_stamp_file?(product_file:)
PdfStampingService::Stamp.can_stamp_file?(product_file:)
end
def stamp_for_purchase!(purchase)
PdfStampingService::StampForPurchase.perform!(purchase)
end
def cache_key_for_purchase(purchase_id)
"stamp_pdf_for_purchase_job_#{purchase_id}"
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/kra_pin_validation_service.rb | app/services/kra_pin_validation_service.rb | # frozen_string_literal: true
class KraPinValidationService
attr_reader :kra_pin
def initialize(kra_pin)
@kra_pin = kra_pin
end
def process
return false if kra_pin.blank?
kra_pin.match?(/\A[A-Z]\d{9}[A-Z]\z/)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/save_utm_link_service.rb | app/services/save_utm_link_service.rb | # frozen_string_literal: true
class SaveUtmLinkService
def initialize(seller:, params:, utm_link: nil)
@seller = seller
@params = params
@utm_link = utm_link
end
def perform
if utm_link.present?
utm_link.update!(params_permitted_for_update)
else
seller.utm_links.create!(params_permitted_for_create)
end
end
private
attr_reader :seller, :params, :utm_link
def params_permitted_for_create
target_resource_id = params[:target_resource_id]
modified_params = params.dup
if target_resource_id.present?
target_resource_id = ObfuscateIds.decrypt(target_resource_id)
modified_params.merge!(target_resource_id:)
end
modified_params.slice(:title, :target_resource_type, :target_resource_id, :permalink, :utm_source, :utm_medium, :utm_campaign, :utm_term, :utm_content, :ip_address, :browser_guid)
end
def params_permitted_for_update
params.slice(:title, :utm_source, :utm_medium, :utm_campaign, :utm_term, :utm_content)
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/post_resend_api.rb | app/services/post_resend_api.rb | # frozen_string_literal: true
class PostResendApi
include Rails.application.routes.url_helpers
include ActionView::Helpers::SanitizeHelper
include MailerHelper, CustomMailerRouteBuilder
_routes.default_url_options = Rails.application.config.action_mailer.default_url_options
MAX_RECIPIENTS = 100 # Resend's batch send limit
def self.process(**args) = new(**args).send_emails
def initialize(post:, recipients:, cache: {}, blast: nil, preview: false)
@post = post
@recipients = recipients
@cache = cache
@blast = blast
@preview = preview
@cache[@post] ||= {}
end
def send_emails
return true if @recipients.empty?
validate_recipients!
fetch_rendered_template
emails = build_emails
if Rails.application.config.action_mailer.perform_deliveries != false && !Rails.env.test?
Resend.api_key = GlobalConfig.get("RESEND_CREATORS_API_KEY")
duration = Benchmark.realtime do
response = Resend::Batch.send(emails)
unless response.success?
raise ResendApiResponseError.new(response.body)
end
end
Rails.logger.info(
"[#{self.class.name}] Sent post #{@post.id} to #{@recipients.size} recipients" \
" (duration: #{duration.round(3)}s)")
else
Rails.logger.info(
"[#{self.class.name}] Would have sent post #{@post.id} to #{@recipients.size} recipients" \
" (perform_deliveries = false)")
end
unless @preview
update_delivery_statistics
send_push_notifications
create_email_info_records
upsert_email_events_documents
end
true
end
if Rails.env.development? || Rails.env.test?
@mails = {}
class << self
attr_reader :mails
end
end
private
def build_emails
@recipients.map do |recipient|
email = build_email_for_recipient(recipient)
log_mail_debug_info(email, recipient) if Rails.env.development? || Rails.env.test?
email
end
end
def build_email_for_recipient(recipient)
headers = MailerInfo.build_headers(
mailer_class: EmailEventInfo::CREATOR_CONTACTING_CUSTOMERS_MAILER_CLASS,
mailer_method: determine_mailer_method(recipient),
mailer_args: [recipient.values_at(:purchase, :follower, :affiliate).compact.first&.id, @post.id].compact,
email_provider: MailerInfo::EMAIL_PROVIDER_RESEND,
)
email = {
from: "#{from_email_address_name(@post.seller.name)} <#{creators_from_email_address(@post.seller.username)}>",
reply_to: @post.seller.support_or_form_email,
to: [recipient[:email]],
subject: @post.subject,
html: personalize_content(@cache[@post][:template][:html], recipient),
text: personalize_content(@cache[@post][:template][:plaintext], recipient),
headers: headers
}
email
end
def determine_mailer_method(recipient)
if recipient[:purchase]
EmailEventInfo::PURCHASE_INSTALLMENT_MAILER_METHOD
elsif recipient[:follower]
EmailEventInfo::FOLLOWER_INSTALLMENT_MAILER_METHOD
elsif recipient[:affiliate]
EmailEventInfo::DIRECT_AFFILIATE_INSTALLMENT_MAILER_METHOD
end
end
def personalize_content(content, recipient)
assigns = @cache[@post][:assigns]
substitutions = {
"{{subject}}" => @post.subject,
"{{t_powered_by}}" => "Powered by",
"{{t_unsubscribe}}" => "Unsubscribe",
"{{unsubscribe_url}}" => build_unsubscribe_url(recipient)
}
if assigns[:has_post_url]
substitutions["{{post_url}}"] = build_mailer_post_route(post: @post, purchase: recipient[:purchase])
end
if assigns[:has_download_button]
substitutions["{{download_url}}"] = recipient[:url_redirect]&.download_page_url
substitutions["{{t_view_attachments_prompt}}"] = "View content"
end
if assigns[:has_comment_button]
substitutions["{{t_post_a_comment}}"] = "Reply with a comment"
end
if assigns[:has_seller_update_reason]
substitutions["{{seller_update_reason}}"] = build_seller_update_reason(recipient)
end
content = content.dup
substitutions.each { |key, value| content.gsub!(key, value.to_s) }
content
end
def build_unsubscribe_url(recipient)
if recipient[:purchase]
unsubscribe_purchase_url(recipient[:purchase].secure_external_id(scope: "unsubscribe"))
elsif recipient[:follower]
cancel_follow_url(recipient[:follower].external_id)
elsif recipient[:affiliate]
unsubscribe_posts_affiliate_url(recipient[:affiliate].external_id)
else
"#"
end
end
def build_seller_update_reason(recipient)
if @post.seller_type?
"You've received this email because you've purchased a product from #{@post.seller.name.presence || @post.seller.email || "Gumroad"}."
elsif @post.product_or_variant_type?
product_name = recipient[:product_name] || @cache[@post][:sanitized_product_name]
download_url_or_product_url = recipient[:url_redirect]&.download_page_url || @post.link.long_url
@post.member_cancellation_trigger? ?
"You've received this email because you cancelled your membership to <a href=\"#{download_url_or_product_url}\">#{product_name}</a>." :
@post.link.is_recurring_billing ?
"You've received this email because you subscribed to <a href=\"#{download_url_or_product_url}\">#{product_name}</a>." :
"You've received this email because you've purchased <a href=\"#{download_url_or_product_url}\">#{product_name}</a>."
end
end
def fetch_rendered_template
@cache[@post][:assigns] ||= {
has_post_url: @post.shown_on_profile?,
has_download_button: @cache[@post][:has_files?],
has_comment_button: @post.shown_on_profile? && @post.allow_comments?,
has_seller_update_reason: @post.seller_or_product_or_variant_type?,
gumroad_url: root_url,
}
@cache[@post][:template] ||= begin
rendered_html = ApplicationController.renderer.render(
template: "posts/post_email",
layout: false,
assigns: { post: @post }.merge(@cache[@post][:assigns])
)
premailer = Premailer::Rails::CustomizedPremailer.new(rendered_html)
{ html: premailer.to_inline_css, plaintext: premailer.to_plain_text }
end
@cache[@post][:sanitized_product_name] = strip_tags(@post.link.name) if !@cache[@post].key?(:sanitized_product_name) && @post.product_or_variant_type?
end
def validate_recipients!
raise "Too many recipients (#{@recipients.size} > #{MAX_RECIPIENTS})" if @recipients.size > MAX_RECIPIENTS
@cache[@post][:has_files?] = @post.has_files? unless @cache[@post].key?(:has_files?)
@recipients.each do |recipient|
raise "Recipients must have an email" if recipient[:email].blank?
raise "Recipients of a post with files must have a url_redirect" if @cache[@post][:has_files?] && recipient[:url_redirect].blank?
raise "Recipients can't have a purchase and/or a follower and/or an affiliate record" if recipient.slice(:purchase, :follower, :affiliate).values.compact.size > 1
end
end
def update_delivery_statistics
@post.increment_total_delivered(by: @recipients.size)
PostEmailBlast.acknowledge_email_delivery(@blast.id, by: @recipients.size) if @blast
end
def send_push_notifications
emails = @recipients.map { _1[:email] }
users_by_email = User.where(email: emails).select(:id, :email).index_by(&:email)
return if users_by_email.empty?
notification_jobs_arguments = @recipients.map do |recipient|
user = users_by_email[recipient[:email]]
next if user.nil?
data = {
"installment_id" => @post.external_id,
"subscription_id" => recipient[:subscription]&.external_id,
"purchase_id" => recipient[:purchase]&.external_id,
"follower_id" => recipient[:follower]&.external_id,
}.compact
body = "By #{@post.seller.name}"
[user.id, Device::APP_TYPES[:consumer], @post.subject, body, data]
end.compact
PushNotificationWorker.set(queue: "low").perform_bulk(notification_jobs_arguments)
end
def create_email_info_records
attributes = @recipients.map do |recipient|
next unless recipient.key?(:purchase)
{ purchase_id: recipient[:purchase].id }
end.compact
return if attributes.empty?
base_attributes = {
type: CreatorContactingCustomersEmailInfo.name,
installment_id: @post.id,
email_name: EmailEventInfo::PURCHASE_INSTALLMENT_MAILER_METHOD,
state: "sent",
sent_at: Time.current,
}
EmailInfo.create_with(base_attributes).insert_all!(attributes)
end
def upsert_email_events_documents
EmailEvent.log_send_events(@recipients.map { _1[:email] }, Time.current)
end
def log_mail_debug_info(email, recipient)
Rails.logger.info("[#{self.class.name}] Resend API request body:")
Rails.logger.info(email.to_json)
self.class.mails[recipient[:email]] = {
subject: email[:subject],
from: email[:from],
reply_to: email[:reply_to],
content: email[:html],
headers: email[:headers]
}
if ENV["POST_RESEND_API_SAVE_EMAILS"] == "1"
mails_dir = Rails.root.join("tmp", "mails")
FileUtils.mkdir_p(mails_dir)
file = File.new(File.join(mails_dir, "#{Time.current.to_f}-#{@post.id}-#{recipient[:email]}.html"), "w")
file.syswrite(email[:html])
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/tax_id_validation_service.rb | app/services/tax_id_validation_service.rb | # frozen_string_literal: true
class TaxIdValidationService
attr_reader :tax_id, :country_code
def initialize(tax_id, country_code)
@tax_id = tax_id
@country_code = country_code
end
def process
return false if tax_id.blank?
return false if country_code.blank?
Rails.cache.fetch("tax_id_validation_#{tax_id}_#{country_code}", expires_in: 10.minutes) do
valid_tax_id?
end
end
private
TAX_ID_PRO_ENDPOINT_TEMPLATE = Addressable::Template.new(
"https://v3.api.taxid.pro/validate?country={country_code}&tin={tax_id}"
)
TAX_ID_PRO_HEADERS = {
"Authorization" => "Bearer #{TAX_ID_PRO_API_KEY}"
}
def valid_tax_id?
response = HTTParty.get(TAX_ID_PRO_ENDPOINT_TEMPLATE.expand(country_code:, tax_id:).to_s, headers: TAX_ID_PRO_HEADERS, timeout: 5)
response.code == 200 && response.parsed_response["is_valid"]
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/zoom_api.rb | app/services/zoom_api.rb | # frozen_string_literal: true
class ZoomApi
include HTTParty
ZOOM_OAUTH_URL = "https://zoom.us/oauth/token"
base_uri "https://api.zoom.us/v2"
def oauth_token(code, redirect_uri)
body = {
grant_type: "authorization_code",
code:,
redirect_uri:
}
HTTParty.post(ZOOM_OAUTH_URL, body: URI.encode_www_form(body), headers: oauth_header)
end
def user_info(token)
rate_limited_call { self.class.get("/users/me", headers: request_header(token)) }
end
private
def oauth_header
client_id = GlobalConfig.get("ZOOM_CLIENT_ID")
client_secret = GlobalConfig.get("ZOOM_CLIENT_SECRET")
token = Base64.strict_encode64("#{client_id}:#{client_secret}")
{ "Authorization" => "Basic #{token}", "Content-Type" => "application/x-www-form-urlencoded" }
end
def request_header(token)
{ "Authorization" => "Bearer #{token}" }
end
def rate_limited_call(&block)
key = "ZOOM_API_RATE_LIMIT"
ratelimit = Ratelimit.new(key, { redis: $redis })
ratelimit.exec_within_threshold key, threshold: 30, interval: 1 do
ratelimit.add(key)
block.call
end
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
antiwork/gumroad | https://github.com/antiwork/gumroad/blob/638f6c3a40b23b907c09f6881d4df18339da069c/app/services/username_generator_service.rb | app/services/username_generator_service.rb | # frozen_string_literal: true
class UsernameGeneratorService
attr_reader :user
def initialize(user)
@user = user
end
def username
return if user_data.nil?
name = ensure_valid_username(openai_completion)
name += random_digit while User.exists?(username: name)
return if name.length > 20
name
end
private
def openai_completion
response = OpenAI::Client.new.chat(parameters: { messages: [{ role: "user", content: prompt }],
model: "gpt-4o-mini",
temperature: 0.0,
max_tokens: 10 })
response.dig("choices", 0, "message", "content").strip
end
# Although very unlikely, in theory OpenAI could return a username that is invalid
# This method ensures that the username meets our validation criteria:
# 1. Only lowercase letters and numbers
# 2. At least one letter
# 3. Not in DENYLIST
# 4. Between 3 and 20 characters
def ensure_valid_username(name)
name = name.downcase.gsub(/[^a-z0-9]/, "")
name += "a" if name.blank? || name.match?(/^[0-9]+$/)
name += random_digit if DENYLIST.include?(name)
name += random_digit while name.length < 3
name.first(20)
end
def random_digit
SecureRandom.random_number(9).to_s
end
def prompt
"Generate a username for a user with #{user_data}. It should be one word and all lowercase with no numbers. Avoid any generic sounding names like #{bad_usernames.join(", ")}."
end
def user_data
if user.email.present?
"the email address #{user.email}. DO NOT use the email domain if it's a generic email provider"
elsif user.name.present?
"the name #{user.name}"
end
end
def bad_usernames
%w[support hi hello contact info help]
end
end
| ruby | MIT | 638f6c3a40b23b907c09f6881d4df18339da069c | 2026-01-04T15:39:11.815677Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.