repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/app/controllers/sabisu_rails/explorer_controller.rb | app/controllers/sabisu_rails/explorer_controller.rb | module SabisuRails
class ExplorerController < SabisuRails::BaseController
def index
@resources = SabisuRails.resources_names
@explorer = SabisuRails::Explorer.new(params[:explorer] || {})
@response = SabisuRails::Request.new(@explorer, params[@explorer.resource_name], params[:explorer]).response
respond_to do |format|
format.html
format.js
end
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/app/controllers/sabisu_rails/application_controller.rb | app/controllers/sabisu_rails/application_controller.rb | module SabisuRails
class ApplicationController < ActionController::Base
end
end | ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails.rb | lib/sabisu_rails.rb | require 'httparty'
module SabisuRails
extend ActiveSupport::Autoload
autoload :Client
autoload :Request
autoload :RouteRecognizer
autoload :Explorer
autoload :Helpers
autoload :Builders
# We ignore some attribues that might cause a collision between models
@@default_ignored_attributes = %w{ created_at updated_at id }
# We append the extra attributes you want to ignore to the default ones
mattr_accessor :ignored_attributes
@@ignored_attributes = @@ignored_attributes.to_a + @@default_ignored_attributes
# Base api uri for the endpoints
mattr_accessor :base_api_uri
@@base_api_uri = nil
# HTTP methods for the api
@@default_http_methods = %w{ GET POST PUT DELETE PATCH }
mattr_accessor :http_methods
@@http_methods = @@http_methods.to_a + @@default_http_methods
# Headers to include on each request
mattr_accessor :api_headers
@@api_headers = {}
# Layout
mattr_accessor :layout
@@layout = "sabisu"
# Resources
mattr_accessor :resources
@@resources = []
mattr_reader :resources_names
# Authentication
mattr_accessor :authentication_username
@@authentication_username = "admin"
mattr_accessor :authentication_password
@@authentication_password = "sekret"
# Application name
mattr_accessor :app_name
@@app_name = 'Sabisu'
# Sets the default format for requests to the api, :json, :xml
mattr_accessor :api_format
@@api_format = :json
# Sets basic_auth headers
mattr_accessor :basic_auth_username
@@basic_auth_username = nil
mattr_accessor :basic_auth_password
@@basic_auth_password = nil
# Sets the digest_auth header credentials
mattr_accessor :digest_auth_username
@@digest_auth_username = nil
mattr_accessor :digest_auth_password
@@digest_auth_password = nil
mattr_accessor :default_resource
@@configured = false
def self.configured? #:nodoc:
@@configured
end
def self.resources_names
@@resources_names ||= @@resources.map { |resource| resource.is_a?(Hash) ? resource.keys[0].to_s : resource.to_s }
end
def self.basic_auth_header?
@@basic_auth_username.present? && @@basic_auth_password.present?
end
def self.digest_auth_header?
@@digest_auth_username.present? && @@digest_auth_password.present?
end
#Method to configure sabisu
def self.setup
@@configured = true
yield self
end
end
require 'sabisu_rails/railtie'
require 'sabisu_rails/engine'
require 'sabisu_rails/version'
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/generators/sabisu_rails/install_generator.rb | lib/generators/sabisu_rails/install_generator.rb | module SabisuRails
module Generators
class InstallGenerator < Rails::Generators::Base
source_root File.expand_path("../templates", __FILE__)
desc "Creates a Sabisu initializer in your application"
def copy_initializer
template "sabisu_rails.rb", "config/initializers/sabisu_rails.rb"
end
def mount_sabisu_engine
route %Q(mount SabisuRails::Engine => "/sabisu_rails")
end
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/generators/sabisu_rails/templates/sabisu_rails.rb | lib/generators/sabisu_rails/templates/sabisu_rails.rb | # Use this module to configure the sabisu available options
SabisuRails.setup do |config|
# Base uri for posting the
# config.base_api_uri = nil
# Ignored attributes for building the forms
# config.ignored_attributes = %w{ created_at updated_at id }
# HTTP methods
# config.http_methods = %w{ GET POST PUT DELETE PATCH }
# Headers to include on each request
#
# You can configure the api headers fairly easy by just adding the correct headers
# config.api_headers = { "Accept" => "application/json,application/vnd.application.v1" }
#
# config.api_headers = {}
# Layout configuration
# config.layout = "sabisu"
# Resources on the api
# config.resources = [:products, :users...]
# Application name
# mattr_accessor :app_name
# @@app_name = Rails.application.class.parent_name
# Authentication
# mattr_accessor :authentication_username
# @@authentication_username = "admin"
# mattr_accessor :authentication_password
# @@authentication_password = "sekret"
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/version.rb | lib/sabisu_rails/version.rb | module SabisuRails
VERSION = "0.0.2.beta"
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/builders.rb | lib/sabisu_rails/builders.rb | module SabisuRails
module Builders
autoload :Base, 'sabisu_rails/builders/base'
autoload :UrlParamsBuilder, 'sabisu_rails/builders/url_params_builder'
autoload :HeadersBuilder, 'sabisu_rails/builders/headers_builder'
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/helpers.rb | lib/sabisu_rails/helpers.rb | module SabisuRails
module Helpers
autoload :Required, 'sabisu_rails/helpers/required'
autoload :Type, 'sabisu_rails/helpers/type'
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/railtie.rb | lib/sabisu_rails/railtie.rb | require 'rails/railtie'
module SabisuRails
class Railtie < ::Rails::Railtie
config.eager_load_namespaces << SabisuRails
config.after_initialize do
SabisuRails.default_resource = SabisuRails.resources_names.first
SabisuRails.app_name = Rails.application.class.parent_name
end
config.after_initialize do
unless SabisuRails.configured?
warn '[Sabisu] Sabisu is not configured in the application and will use the default values.' +
' We recommend you to check the file just created with the installer and setup it up.'
end
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/client.rb | lib/sabisu_rails/client.rb | module SabisuRails
module Client
module ClassMethods
def setup_client
base_uri SabisuRails.base_api_uri
headers SabisuRails.api_headers
headers "Content-Type" => "application/#{SabisuRails.api_format}"
basic_auth SabisuRails.basic_auth_username, SabisuRails.basic_auth_password if SabisuRails.basic_auth_header?
digest_auth SabisuRails.digest_auth_username, SabisuRails.digest_auth_password if SabisuRails.digest_auth_header?
format SabisuRails.api_format
end
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/request.rb | lib/sabisu_rails/request.rb | module SabisuRails
class Request
include HTTParty
extend SabisuRails::Client::ClassMethods
setup_client
def initialize(explorer, body_params, params)
@explorer = explorer
@body_params = body_params || {}
@params = params || {}
@headers = SabisuRails::Builders::HeadersBuilder.new(@params[:headers]).build
@url_params = SabisuRails::Builders::UrlParamsBuilder.new(@params[:url_params]).build
end
def response
self.class.send(@explorer.http_method, "/#{@explorer.resource}/#{@explorer.uri_pattern}", request_options_hash)
end
def request_options_hash
options = { headers: @headers, query: @url_params }
options[:body] = resource_body_params if @explorer.require_body_params?
options
end
def resource_body_params
body_params = {}
body_params[@explorer.resource_name] = @body_params.reject { |k, v| v.blank? }
body_params.to_json
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/engine.rb | lib/sabisu_rails/engine.rb | module SabisuRails
class Engine < Rails::Engine
isolate_namespace SabisuRails
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/route_recognizer.rb | lib/sabisu_rails/route_recognizer.rb | module SabisuRails
class RouteRecognizer
attr_reader :paths
# Based on @bantic solution - https://gist.github.com/bantic/5688232#file-rails_route_recognizer-rb
# To use this inside your app, call:
# `RouteRecognizer.new.initial_path_segments`
# This returns an array, e.g.: ['assets','blog','team','faq','users']
INITIAL_SEGMENT_REGEX = %r{^\/([^\/\(:]+)}
IGNORED_PATHS = ["assets", "rails", "sabisu_rails"]
def initialize
routes = Rails.application.routes.routes
@paths = routes.collect {|r| r.path.spec.to_s }
end
def initial_path_segments
@initial_path_segments ||= begin
paths.collect {|path| match_initial_path_segment(path)}.compact.uniq
end
end
def match_initial_path_segment(path)
if match = INITIAL_SEGMENT_REGEX.match(path)
match[1]
end
end
def resources
initial_path_segments - IGNORED_PATHS
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/explorer.rb | lib/sabisu_rails/explorer.rb | module SabisuRails
class Explorer
include SabisuRails::Helpers::Required
include SabisuRails::Helpers::Type
attr_reader :resource, :uri_pattern, :http_method
def initialize(attrs = {})
@resource = attrs[:resource] || SabisuRails.default_resource
@uri_pattern = attrs[:uri_pattern]
@http_method = attrs[:http_method].nil? ? "get" : attrs[:http_method].downcase
end
# Method to retrieve the resource class name, such as User, Product, etc
def resource_class
@resource_class ||= @resource.to_s.singularize.camelize.constantize
end
def resource_name
@resource_name ||= @resource.to_s.singularize
end
def resource_columns
resource_custom_columns.reject { |column| SabisuRails.ignored_attributes.include? column }
end
def method_missing(meth, *args, &block)
resource_class.new.send(meth, *args, &block)
end
def get?
@http_method == "get"
end
def delete?
@http_method == "delete"
end
def require_body_params?
!(get? || delete?)
end
private
def resource_custom_columns
columns = nil
SabisuRails.resources.each do |resource|
if resource.is_a?(Hash) && resource[@resource.to_sym].present?
columns = resource[@resource.to_sym].map(&:to_s)
end
end
columns || resource_class.columns.map(&:name)
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/helpers/type.rb | lib/sabisu_rails/helpers/type.rb | module SabisuRails
module Helpers
module Type
def column_type(col)
columns.select { |column| column[0] == col }.flatten[1]
end
def columns
@columns ||= resource_class.columns.map{|c| [c.name, c.type] }
end
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/helpers/required.rb | lib/sabisu_rails/helpers/required.rb | module SabisuRails
module Helpers
module Required
def required_attribute?(attr)
required_attributes.include? attr.to_sym
end
def required_attributes
@required_attributes ||= resource_class.validators.map(&:attributes).flatten.uniq
end
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/builders/url_params_builder.rb | lib/sabisu_rails/builders/url_params_builder.rb | module SabisuRails
module Builders
class UrlParamsBuilder < SabisuRails::Builders::Base
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/builders/base.rb | lib/sabisu_rails/builders/base.rb | module SabisuRails
module Builders
class Base
def initialize(params)
@params = params || {}
sanitize_params
end
def build
query_params = {}
@sanitized_params.each do |k,v|
values = v.values #returns the nested hash, and we get the values from that hash
query_params[values[0]] = values[1]
end
query_params
end
private
def sanitize_params
@sanitized_params = @params.reject { |k,v| v.blank? }
end
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/lib/sabisu_rails/builders/headers_builder.rb | lib/sabisu_rails/builders/headers_builder.rb | module SabisuRails
module Builders
class HeadersBuilder < SabisuRails::Builders::Base
end
end
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
kurenn/sabisu-rails | https://github.com/kurenn/sabisu-rails/blob/e55dfbd679ac3ec2401178d5f809773f09373bcf/config/routes.rb | config/routes.rb | SabisuRails::Engine.routes.draw do
get "explorer", to: "explorer#index"
end
| ruby | MIT | e55dfbd679ac3ec2401178d5f809773f09373bcf | 2026-01-04T17:52:02.123775Z | false |
dungbanguyen/adminlte-rails | https://github.com/dungbanguyen/adminlte-rails/blob/586dd13a68cdb557fe811cec95819f6401bd59c3/lib/adminlte-rails.rb | lib/adminlte-rails.rb | require 'adminlte-rails/version'
module AdminLTE
module Rails
require 'adminlte-rails/engine'
end
end
| ruby | MIT | 586dd13a68cdb557fe811cec95819f6401bd59c3 | 2026-01-04T17:51:54.930115Z | false |
dungbanguyen/adminlte-rails | https://github.com/dungbanguyen/adminlte-rails/blob/586dd13a68cdb557fe811cec95819f6401bd59c3/lib/adminlte-rails/version.rb | lib/adminlte-rails/version.rb | module AdminLTE
module Rails
VERSION = '2.3.2'
end
end
| ruby | MIT | 586dd13a68cdb557fe811cec95819f6401bd59c3 | 2026-01-04T17:51:54.930115Z | false |
dungbanguyen/adminlte-rails | https://github.com/dungbanguyen/adminlte-rails/blob/586dd13a68cdb557fe811cec95819f6401bd59c3/lib/adminlte-rails/engine.rb | lib/adminlte-rails/engine.rb | module AdminLTE
module Rails
class Engine < ::Rails::Engine
end
end
end
| ruby | MIT | 586dd13a68cdb557fe811cec95819f6401bd59c3 | 2026-01-04T17:51:54.930115Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/test/stress.rb | test/stress.rb | #!/usr/bin/env ruby
require 'bundler/setup'
require 'traffic_jam'
require 'json'
require 'redis'
require 'optparse'
options = {
forks: 30,
actions: 1000,
keys: 5,
limit: 100,
redis_uri: 'redis://127.0.0.1:6379',
strategy: 'Limit'
}
OptionParser.new do |opts|
opts.banner = "Usage: #{__FILE__} <OPTIONS>"
opts.on( "-f", "--forks FORKS", "How many processes to fork" ) { |i| options[:forks] = i.to_i }
opts.on( "-n", "--actions N", "How many increments each process should perform" ) { |i| options[:actions] = i.to_i }
opts.on( "-k", "--keys KEYS", "How many keys a process should run through" ) { |i| options[:keys] = i.to_i }
opts.on( "-l", "--limit LIMIT", "Actions per second limit" ) { |i| options[:limit] = i.to_i }
opts.on( "-s", "--strategy STRATEGY", "Name of Limit subclass to apply" ) { |klass| options[:strategy] = klass }
opts.on( "-u", "--redis-uri URI", "Redis URI" ) { |uri| options[:redis_uri] = uri }
opts.on( "-h", "--help", "Display this usage summary" ) { puts opts; exit }
end.parse!
class Runner
attr_accessor :options
def initialize(options)
@options = options
end
def run
results = Hash[ (0...options[:keys]).map { |i| [ i, [ 0, 0 ] ] } ]
limit_class = Object.const_get("TrafficJam::#{options[:strategy]}")
options[:actions].times do
i = results.keys.sample
if limit_class.new(:test, "val#{i}", max: options[:limit], period: 1).increment
results[i][0] += 1
else
results[i][1] += 1
end
end
results
end
def launch
rd, wr = IO.pipe
Kernel.fork do
GC.copy_on_write_friendly = true if ( GC.copy_on_write_friendly? rescue false )
rd.close
TrafficJam.configure do |config|
config.redis = Redis.new(url: options[:redis_uri])
end
results = run
wr.write(JSON.generate(results))
wr.close
end
wr.close
rd
end
end
# main
puts "[#{Process.pid}] Starting with #{options.inspect}"
redis = Redis.new(url: options[:redis_uri])
redis.flushall # clean before run
redis.script(:flush) # clean scripts before run
redis.disconnect! # don't keep when forking
start = Time.now
pipes = options[:forks].times.map do
Runner.new(options).launch
end
Process.waitall
elapsed = (Time.now - start).to_f
results = Hash[ (0...options[:keys]).map { |i| [ i, [ 0, 0 ] ] } ]
pipes.each do |pipe|
proc_results = JSON.parse(pipe.read)
pipe.close
proc_results.each do |key, values|
results[key.to_i][0] += values[0]
results[key.to_i][1] += values[1]
end
end
puts "TIME: %f seconds" % elapsed
results.each do |key, values|
puts "KEY %-2d: Successes %-4d; Failures %-4d" % [key, values[0], values[1]]
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/spec/traffic_jam_configuration_spec.rb | spec/traffic_jam_configuration_spec.rb | require_relative 'spec_helper'
describe TrafficJam do
include RedisHelper
let(:config) { TrafficJam::Configuration.new }
before { config.register(:test, 3, 60) }
describe 'constructor' do
it "should take default options" do
config = TrafficJam::Configuration.new(key_prefix: 'hello')
assert_equal 'hello', config.key_prefix
end
end
describe '::max' do
it "should look up the registered max for the action" do
assert_equal 3, config.max(:test)
end
end
describe '::period' do
it "should look up the registered max for the action" do
assert_equal 60, config.period(:test)
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/spec/traffic_jam_spec.rb | spec/traffic_jam_spec.rb | require_relative 'spec_helper'
describe TrafficJam do
include RedisHelper
TrafficJam.configure do |config|
config.redis = RedisHelper.redis
config.register(:test, 3, 60)
config.register(:test4, 4, 60)
end
let(:value) { "user1" }
describe '::limit' do
it "should return limit instance with registered limits" do
limit = TrafficJam.limit(:test, value)
assert_equal :test, limit.action
assert_equal value, limit.value
assert_equal 3, limit.max
assert_equal 60, limit.period
end
it "should raise error if not found" do
assert_raises(TrafficJam::LimitNotFound) do
TrafficJam.limit(:test2, value)
end
end
end
describe '::reset_all' do
it "should reset all rate limits" do
limit = TrafficJam.increment!(:test, value)
limit = TrafficJam.increment!(:test4, value)
assert_equal 1, TrafficJam.used(:test, value)
assert_equal 1, TrafficJam.used(:test4, value)
TrafficJam.reset_all
assert_equal 0, TrafficJam.used(:test, value)
assert_equal 0, TrafficJam.used(:test4, value)
end
it "should reset all rate limits for one action" do
limit = TrafficJam.increment!(:test, value)
limit = TrafficJam.increment!(:test4, value)
assert_equal 1, TrafficJam.used(:test, value)
assert_equal 1, TrafficJam.used(:test4, value)
TrafficJam.reset_all(action: :test)
assert_equal 0, TrafficJam.used(:test, value)
assert_equal 1, TrafficJam.used(:test4, value)
end
end
describe 'class helpers' do
before { TrafficJam.config.register(:test, 3, 60) }
let(:value) { "user1" }
it "should call methods with registered limits" do
TrafficJam.increment(:test, value, 1)
assert_equal 1, TrafficJam.used(:test, value)
end
it "should raise error if limit not found" do
assert_raises(TrafficJam::LimitNotFound) do
TrafficJam.increment(:test2, value, 1)
end
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/spec/traffic_jam_limit_group_spec.rb | spec/traffic_jam_limit_group_spec.rb | require_relative 'spec_helper'
describe TrafficJam do
include RedisHelper
TrafficJam.configure do |config|
config.redis = RedisHelper.redis
end
let(:limit1) do
TrafficJam::Limit.new(:test, "user1", max: 3, period: 60 * 60)
end
let(:limit2) do
TrafficJam::Limit.new(:test, "user2", max: 2, period: 60 * 60)
end
let(:limit_group) { TrafficJam::LimitGroup.new([limit1, limit2]) }
describe :constructor do
it "should accept an array of limits" do
limit_group = TrafficJam::LimitGroup.new([limit1, limit2])
assert_equal 2, limit_group.limits.size
end
it "should accept a splat of limits" do
limit_group = TrafficJam::LimitGroup.new(limit1, limit2)
assert_equal 2, limit_group.limits.size
end
it "should accept no arguments" do
limit_group = TrafficJam::LimitGroup.new
assert_equal 0, limit_group.limits.size
end
describe "when ingnore_nil_values flag is set" do
let(:limit2) do
TrafficJam::Limit.new(:test, nil, max: 2, period: 60 * 60)
end
let(:limit_group) do
TrafficJam::LimitGroup.new([limit1, limit2], ignore_nil_values: true)
end
it "should drop limits where value is nil" do
assert_equal 1, limit_group.limits.size
end
end
end
describe :increment do
it "should be true when no limits are exceeded" do
assert limit_group.increment(2)
end
it "should false when any limit is exceeded" do
assert !limit_group.increment(3)
end
it "should be increment all limits when none are exceeded" do
limit_group.increment(2)
assert_equal 2, limit1.used
assert_equal 2, limit2.used
end
it "should be a no-op when limit would be exceeded" do
limit_group.increment(3)
assert_equal 0, limit1.used
assert_equal 0, limit2.used
end
end
describe :increment! do
it "should increment all limits when none are exceeded" do
limit_group.increment!(2)
assert_equal 2, limit1.used
assert_equal 2, limit2.used
end
it "should be a no-op when limit would be exceeded" do
assert_raises(TrafficJam::LimitExceededError) do
limit_group.increment!(3)
end
assert_equal 0, limit1.used
assert_equal 0, limit2.used
end
describe "when group contains other groups" do
let(:meta_group) { TrafficJam::LimitGroup.new(limit_group) }
it "should raise error with limit instance" do
exception = assert_raises(TrafficJam::LimitExceededError) do
meta_group.increment!(3)
end
assert_equal limit2, exception.limit
end
end
end
describe :exceeded? do
it "should be true when an limit would be exceeded" do
limit_group.increment(2)
assert limit_group.exceeded?(1)
end
it "should be false when amount would not exceed any limit" do
limit_group.increment(1)
assert !limit_group.exceeded?(1)
end
end
describe :limit_exceeded do
it "should be the limit that would exceed limit" do
limit_group.increment(2)
assert_equal limit2, limit_group.limit_exceeded(1)
end
it "should be nil when amount would not exceed limit" do
limit_group.increment(1)
assert_nil limit_group.limit_exceeded(1)
end
end
describe :remaining do
it "should be the minimum amount remaining of all limits" do
assert_equal 2, limit_group.remaining
limit1.increment!(2)
assert_equal 1, limit_group.remaining
end
end
describe :reset do
it "should reset all limits to 0" do
limit1.increment(2)
limit2.increment(1)
limit_group.reset
assert_equal 0, limit1.used
assert_equal 0, limit2.used
end
end
describe :decrement do
it "should reduce the amount used" do
limit_group.increment(2)
limit_group.decrement(1)
assert_equal 1, limit1.used
assert_equal 1, limit2.used
end
end
describe :<< do
let(:limit_group) { TrafficJam::LimitGroup.new([limit1]) }
it "should add limit to the group" do
assert_equal 1, limit_group.limits.size
limit_group << limit2
assert_equal 2, limit_group.limits.size
end
describe "when ignore_nil_values flag is set" do
let(:limit2) do
TrafficJam::Limit.new(:test, nil, max: 2, period: 60 * 60)
end
let(:limit_group) do
TrafficJam::LimitGroup.new([limit1], ignore_nil_values: true)
end
it "should drop limits where value is nil" do
assert_equal 1, limit_group.limits.size
limit_group << limit2
assert_equal 1, limit_group.limits.size
end
end
end
describe :flatten do
let(:meta_group) { TrafficJam::LimitGroup.new(limit_group) }
it "should be a flattened list of limits" do
assert_equal [limit1, limit2], meta_group.flatten
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/spec/traffic_jam_limit_spec.rb | spec/traffic_jam_limit_spec.rb | require_relative 'spec_helper'
describe TrafficJam do
include RedisHelper
TrafficJam.configure do |config|
config.redis = RedisHelper.redis
end
let(:period) { 60 * 60 }
let(:limit) do
TrafficJam::Limit.new(:test, "user1", max: 3, period: 60 * 60)
end
describe :initialize do
it 'should raise an argument error on missing max' do
assert_raises(ArgumentError) do
TrafficJam::Limit.new(:test, "user1", period: 60 * 60)
end
end
it 'should raise an argument error on missing period' do
assert_raises(ArgumentError) do
TrafficJam::Limit.new(:test, "user1", max: 3)
end
end
end
describe :increment do
after do
Spy.teardown
end
it "should be true when rate limit is not exceeded" do
assert limit.increment(1)
end
it "should be false when raise limit is exceeded" do
assert !limit.increment(4)
assert limit.increment(1)
assert limit.increment(2)
assert !limit.increment(1)
end
it "should raise an argument error if given a float" do
assert_raises(ArgumentError) do
limit.increment(1.5)
end
end
it "should be a no-op when limit would be exceeded" do
limit.increment(2)
assert !limit.increment(2)
assert limit.increment(1)
end
it "should be true when sufficient time passes" do
assert limit.increment(3)
Timecop.travel(period / 2)
assert limit.increment(1)
Timecop.travel(period)
assert limit.increment(3)
end
it "should only call eval once" do
eval_spy = Spy.on(RedisHelper.redis, :eval).and_call_through
limit.increment(1)
limit.increment(1)
limit.increment(1)
assert_equal 1, eval_spy.calls.count
end
describe "when increment is processed for a past time" do
it "should discount the past increment by the time drift" do
time = Time.now
limit.increment(1, time: time)
limit.increment(2, time: time - period / 3)
assert_equal 2, limit.used
end
end
describe "when decrement is processed for a past time" do
it "should discount the past decrement by the time drift" do
time = Time.now
limit.increment(2, time: time - period / 3)
limit.increment(2, time: time)
assert_equal 3, limit.used
limit.decrement(2, time: time - period / 3)
assert_equal 2, limit.used
end
end
describe "when max is zero" do
let(:limit) do
TrafficJam::Limit.new(:test, "user1", max: 0, period: 60 * 60)
end
it "should be false for any positive amount" do
assert !limit.increment
end
end
describe "when max is changed to a lower amount" do
it "should still expire after period" do
limit = TrafficJam::Limit.new(:test, "user1", max: 4, period: 60)
limit.increment!(4)
limit = TrafficJam::Limit.new(:test, "user1", max: 2, period: 60)
limit.increment!(0)
Timecop.travel(period)
assert_equal 0, limit.used
end
end
end
describe :increment! do
it "should not raise error when rate limit is not exceeded" do
limit.increment!(1)
end
it "should raise error when rate limit is exceeded" do
limit.increment!(3)
assert_raises(TrafficJam::LimitExceededError) do
limit.increment!(1)
end
end
end
describe :exceeded? do
it "should be true when amount would exceed limit" do
limit.increment(2)
assert limit.exceeded?(2)
end
it "should be false when amount would not exceed limit" do
limit.increment(2)
assert !limit.exceeded?(1)
end
end
describe :used do
it "should be 0 when there has been no incrementing" do
assert_equal 0, limit.used
end
it "should be the amount used" do
limit.increment(1)
assert_equal 1, limit.used
end
it "should decrease over time" do
limit.increment(2)
Timecop.travel(period / 2)
assert_equal 1, limit.used
end
it "should not exceed maximum when limit changes" do
limit.increment!(3)
limit2 = TrafficJam::Limit.new(:test, "user1", max: 2, period: 60 * 60)
assert_equal 2, limit2.used
end
end
describe :reset do
it "should reset current count to 0" do
limit.increment(3)
assert_equal 3, limit.used
limit.reset
assert_equal 0, limit.used
end
end
describe :decrement do
it "should reduce the amount used" do
limit.increment(3)
limit.decrement(2)
assert_equal 1, limit.used
end
it "should not lower amount used below 0" do
limit.decrement(2)
assert !limit.increment(4)
assert_equal 0, limit.used
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/spec/spec_helper.rb | spec/spec_helper.rb | require 'redis'
require 'timecop'
require 'simplecov'
require 'minitest/autorun'
require 'spy/integration'
SimpleCov.start :test_frameworks
if ENV['CI']
require 'coveralls'
Coveralls.wear!
end
require 'traffic_jam'
module RedisHelper
@@redis = Redis.new(url: ENV['REDIS_URI'] || 'redis://localhost:6379')
def setup
super
@@redis.flushdb
@@redis.script(:flush)
end
def self.redis
@@redis
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/spec/traffic_jam/lifetime_limit_spec.rb | spec/traffic_jam/lifetime_limit_spec.rb | require_relative '../spec_helper'
describe TrafficJam::RollingLimit do
include RedisHelper
TrafficJam.configure do |config|
config.redis = RedisHelper.redis
end
let(:max) { 3 }
let(:limit) do
TrafficJam::LifetimeLimit.new(:test, 'user1', max: max)
end
after do
Spy.teardown
end
describe :increment do
it 'should be true when rate limit is not exceeded' do
assert limit.increment(1)
end
it 'should be false when raise limit is exceeded' do
assert limit.increment(1)
assert limit.increment(2)
assert !limit.increment(1)
end
it 'should be a no-op when limit would be exceeded' do
limit.increment(2)
assert !limit.increment(2)
assert limit.increment(1)
end
it 'should be false when any time passes' do
assert limit.increment(3)
Timecop.travel(4000)
assert !limit.increment(1)
end
it 'should only call eval once' do
eval_spy = Spy.on(RedisHelper.redis, :eval).and_call_through
limit.increment(1)
limit.increment(1)
limit.increment(1)
assert_equal 1, eval_spy.calls.count
end
describe 'when max is changed to a lower amount' do
it 'should never expire' do
limit = TrafficJam::LifetimeLimit.new(:test, 'user1', max: 4)
limit.increment!(4)
limit = TrafficJam::LifetimeLimit.new(:test, 'user1', max: 2)
assert !limit.increment(0)
assert_equal 2, limit.used
end
end
end
describe :used do
it 'should be 0 when there has been no incrementing' do
assert_equal 0, limit.used
end
it 'should be the amount used' do
limit.increment(1)
assert_equal 1, limit.used
end
it 'should not decrease over time' do
limit.increment(2)
Timecop.travel(60 / 2)
assert_equal 2, limit.used
end
it 'should not exceed maximum when limit changes' do
limit.increment!(3)
limit2 = TrafficJam::LifetimeLimit.new(:test, 'user1', max: 2)
assert_equal 2, limit2.used
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/spec/traffic_jam/gcra_limit_spec.rb | spec/traffic_jam/gcra_limit_spec.rb | require_relative '../spec_helper'
describe TrafficJam do
include RedisHelper
TrafficJam.configure do |config|
config.redis = RedisHelper.redis
end
let(:period) { 0.1 }
let(:limit) do
TrafficJam::GCRALimit.new(:test, "user1", max: 3, period: period)
end
describe :increment do
after do
Spy.teardown
end
it "should be true when rate limit is not exceeded" do
assert limit.increment(1)
end
it "should be false when raise limit is exceeded" do
assert !limit.increment(4)
assert limit.increment(1)
assert limit.increment(2)
assert !limit.increment(1)
end
it "should raise an argument error if given a float" do
assert_raises(ArgumentError) do
limit.increment(1.5)
end
end
it "should be a no-op when limit would be exceeded" do
limit.increment(2)
assert !limit.increment(2)
assert limit.increment(1)
end
it "should be true when sufficient time passes" do
assert limit.increment(3)
sleep(period / 2)
assert limit.increment(1)
sleep(period * 2)
assert limit.increment(3)
end
describe "when max is zero" do
let(:limit) do
TrafficJam::GCRALimit.new(:test, "user1", max: 0, period: period)
end
it "should be false for any positive amount" do
assert !limit.increment
end
end
end
describe :used do
it "should be 0 when there has been no incrementing" do
assert_equal 0, limit.used
end
it "should be the amount used" do
limit.increment(1)
assert_equal 1, limit.used
end
it "should decrease over time" do
limit.increment(2)
sleep(period / 2)
assert_equal 1, limit.used
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/spec/traffic_jam/rolling_limit_spec.rb | spec/traffic_jam/rolling_limit_spec.rb | require_relative '../spec_helper'
describe TrafficJam::RollingLimit do
include RedisHelper
TrafficJam.configure do |config|
config.redis = RedisHelper.redis
end
let(:max) { 3 }
let(:limit) do
TrafficJam::RollingLimit.new(:test, 'user1', max: max, period: period)
end
after do
Spy.teardown
end
describe :increment do
let(:period) { 60 * 60 }
it 'should raise an argument error if given a float' do
assert_raises(ArgumentError) do
limit.increment(1.5)
end
end
describe 'when max is zero' do
let(:max) { 0 }
it 'should be false for any amount' do
assert !limit.increment
end
end
end
describe 'one time' do
let(:period) { 0 }
describe :increment do
it 'should be true when rate limit is not exceeded' do
assert limit.increment(1)
end
it 'should be false when raise limit is exceeded' do
assert limit.increment(3)
assert limit.increment(3)
assert !limit.increment(4)
end
it 'should never call eval' do
eval_spy = Spy.on(RedisHelper.redis, :eval).and_call_through
limit.increment(1)
assert_equal 0, eval_spy.calls.count
end
end
describe :used do
it 'should be 0' do
assert_equal 0, limit.used
limit.increment(1)
assert_equal 0, limit.used
end
end
end
describe 'timeframe' do
let(:period) { 60 * 60 }
describe :increment do
it 'should be true when limit is not exceeded' do
assert limit.increment(1)
end
it 'should be false when limit is exceeded' do
assert limit.increment(1)
assert limit.increment(2)
assert !limit.increment(1)
end
it 'should be a no-op when limit would be exceeded' do
assert limit.increment(2)
assert !limit.increment(2)
assert limit.increment(1)
end
it 'should be true when sufficient time passes' do
assert limit.increment(3)
Timecop.travel(period / 2)
assert !limit.increment(1)
Timecop.travel(period)
assert limit.increment(3)
end
describe 'when max is zero' do
let(:max) { 0 }
it 'should be false for any positive amount' do
assert !limit.increment
end
end
describe 'when max is changed to a lower amount' do
it 'should still expire after period' do
limit = TrafficJam::RollingLimit.new(
:test, 'user1', max: 4, period: period
)
limit.increment!(4)
limit = TrafficJam::RollingLimit.new(
:test, 'user1', max: 2, period: period
)
assert !limit.increment
Timecop.travel(period + 1)
assert_equal 0, limit.used
end
end
end
describe :used do
it 'should be 0 when there has been no incrementing' do
assert_equal 0, limit.used
end
it 'should be the amount used' do
limit.increment(1)
assert_equal 1, limit.used
end
it 'should not decrease over time' do
limit.increment(2)
Timecop.travel(period / 2)
assert_equal 2, limit.used
end
it 'should not exceed maximum when limit changes' do
limit.increment!(3)
limit2 = TrafficJam::RollingLimit.new(
:test, 'user1', max: 2, period: period
)
assert_equal 2, limit2.used
end
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam.rb | lib/traffic_jam.rb | require 'ostruct'
require 'digest/md5'
require_relative 'traffic_jam/configuration'
require_relative 'traffic_jam/errors'
require_relative 'traffic_jam/gcra_limit'
require_relative 'traffic_jam/lifetime_limit'
require_relative 'traffic_jam/limit'
require_relative 'traffic_jam/limit_group'
require_relative 'traffic_jam/rolling_limit'
module TrafficJam
include Errors
@config = Configuration.new(
key_prefix: 'traffic_jam',
hash_length: 22
)
class << self
attr_reader :config
# Configure library in a block.
#
# @yield [TrafficJam::Configuration]
def configure
yield config
end
# Create limit with registed max/period.
#
# @param action [Symbol] registered action name
# @param value [String] limit target value
# @return [TrafficJam::Limit]
def limit(action, value)
limits = config.limits(action.to_sym)
TrafficJam::Limit.new(action, value, **limits)
end
# Reset all limits associated with the given action. If action is omitted or
# nil, this will reset all limits.
#
# @note Not recommended for use in production.
# @param action [Symbol] action to reset limits for
# @return [nil]
def reset_all(action: nil)
prefix =
if action.nil?
"#{config.key_prefix}:*"
else
"#{config.key_prefix}:#{action}:*"
end
config.redis.keys(prefix).each do |key|
config.redis.del(key)
end
nil
end
%w( exceeded? increment increment! decrement reset used remaining )
.each do |method|
define_method(method) do |action, value, *args|
limit(action, value).send(method, *args)
end
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam/gcra_limit.rb | lib/traffic_jam/gcra_limit.rb | require_relative 'limit'
require_relative 'scripts'
module TrafficJam
# GCRA (Generic Cell Rate Algorithm) is a leaky bucket type rate limiting
# algorithm. GCRA works by storing a key in Redis with a ms-precision expiry
# representing the time that the limit will be completely reset. Each
# increment operation converts the increment amount into the number of
# milliseconds to be added to the expiry.
#
# When a request comes in, we take the existing expiry value, subtract a fixed
# amount representing the limit’s total burst capacity from it, and compare
# the result to the current time. This result represents the next time to
# allow a request. If it’s in the past, we allow the incoming request, and if
# it’s in the future, we don’t. After a successful request, a new expiry is
# calculated. (see https://brandur.org/rate-limiting)
#
# This limit type does not support decrements or changing the max value without
# a complete reset. This means that if the period or max value for an
# action/value key changes, the used and remaining values cannot be preserved.
#
# Example: Limit is 5 per 10 seconds.
# An increment by 1 first sets the key to expire in 2s.
# Another immediate increment by 4 sets the expiry to 10s.
# Subsequent increments fail until clock time catches up to expiry
class GCRALimit < Limit
# Increment the amount used by the given number. Does not perform increment
# if the operation would exceed the limit. Returns whether the operation was
# successful.
#
# @param amount [Integer] amount to increment by
# @param time [Time] time is ignored
# @return [Boolean] true if increment succeded and false if incrementing
# would exceed the limit
def increment(amount = 1, time: Time.now)
return true if amount == 0
return false if max == 0
raise ArgumentError.new("Amount must be positive") if amount < 0
if amount != amount.to_i
raise ArgumentError.new("Amount must be an integer")
end
return false if amount > max
incrby = (period * 1000 * amount / max).to_i
argv = [incrby, period * 1000]
result =
begin
redis.evalsha(
Scripts::INCREMENT_GCRA_HASH, keys: [key], argv: argv)
rescue Redis::CommandError
redis.eval(Scripts::INCREMENT_GCRA, keys: [key], argv: argv)
end
case result
when 0
return true
when -1
raise Errors::InvalidKeyError, "Redis key #{key} has no expire time set"
when -2
return false
else
raise Errors::UnknownReturnValue,
"Received unexpected return value #{result} from " \
"increment_gcra eval"
end
end
# Decrement the amount used by the given number.
#
# @param amount [Integer] amount to decrement by
# @param time [Time] time is ignored
# @raise [NotImplementedError] decrement is not defined for SimpleLimit
def decrement(_amount = 1, time: Time.now)
raise NotImplementedError, "decrement is not defined for SimpleLimit"
end
# Return amount of limit used, taking time drift into account.
#
# @return [Integer] amount used
def used
return 0 if max.zero?
expiry = redis.pttl(key)
case expiry
when -1 # key exists but has no associated expire
raise Errors::InvalidKeyError, "Redis key #{key} has no expire time set"
when -2 # key does not exist
return 0
end
(max * expiry / (period * 1000.0)).ceil
end
def key_prefix
"#{config.key_prefix}:s"
end
end
# alias for backward compatibility
SimpleLimit = GCRALimit
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam/limit.rb | lib/traffic_jam/limit.rb | require_relative 'scripts'
module TrafficJam
# This class represents a rate limit on an action, value pair. For example, if
# rate limiting the number of requests per IP address, the action could be
# +:requests+ and the value would be the IP address. The class exposes atomic
# increment operations and allows querying of the current amount used and
# amount remaining.
class Limit
# @!attribute [r] action
# @return [Symbol] the name of the action being rate limited.
# @!attribute [r] value
# @return [String] the target of the limit. The value should be a string
# or convertible to a distinct string when +to_s+ is called. If you
# would like to use objects that can be converted to a unique string,
# like a database-mapped object with an ID, you can implement
# +to_rate_limit_value+ on the object, which returns a deterministic
# string unique to that object.
# @!attribute [r] max
# @return [Integer] the integral cap of the limit amount.
# @!attribute [r] period
# @return [Integer] the duration of the limit in seconds. Regardless of
# the current amount used, after the period passes, the amount used will
# be 0.
attr_reader :action, :max, :period, :value
# Constructor takes an action name as a symbol, a maximum cap, and the
# period of limit. +max+ and +period+ are required keyword arguments.
#
# @param action [Symbol] action name
# @param value [String] limit target value
# @param max [Integer] required limit maximum
# @param period [Integer] required limit period in seconds
# @raise [ArgumentError] if max or period is nil
def initialize(action, value, max: nil, period: nil)
raise ArgumentError.new('Max is required') if max.nil?
raise ArgumentError.new('Period is required') if period.nil?
@action, @value, @max, @period = action, value, max, period
end
# Return whether incrementing by the given amount would exceed limit. Does
# not change amount used.
#
# @param amount [Integer]
# @return [Boolean]
def exceeded?(amount = 1)
used + amount > max
end
# Return itself if incrementing by the given amount would exceed limit,
# otherwise nil. Does not change amount used.
#
# @return [TrafficJam::Limit, nil]
def limit_exceeded(amount = 1)
self if exceeded?(amount)
end
# Increment the amount used by the given number. Does not perform increment
# if the operation would exceed the limit. Returns whether the operation was
# successful. Time of increment can be specified optionally with a keyword
# argument, which is useful for rolling back with a decrement.
#
# @param amount [Integer] amount to increment by
# @param time [Time] time when increment occurs
# @return [Boolean] true if increment succeded and false if incrementing
# would exceed the limit
def increment(amount = 1, time: Time.now)
return amount <= 0 if max.zero?
if amount != amount.to_i
raise ArgumentError.new("Amount must be an integer")
end
timestamp = (time.to_f * 1000).to_i
argv = [timestamp, amount.to_i, max, period * 1000]
result =
begin
redis.evalsha(
Scripts::INCREMENT_SCRIPT_HASH, keys: [key], argv: argv)
rescue Redis::CommandError
redis.eval(Scripts::INCREMENT_SCRIPT, keys: [key], argv: argv)
end
!!result
end
# Increment the amount used by the given number. Does not perform increment
# if the operation would exceed the limit. Raises an exception if the
# operation is unsuccessful. Time of# increment can be specified optionally
# with a keyword argument, which is useful for rolling back with a
# decrement.
#
# @param amount [Integer] amount to increment by
# @param time [Time] time when increment occurs
# @return [nil]
# @raise [TrafficJam::LimitExceededError] if incrementing would exceed the
# limit
def increment!(amount = 1, time: Time.now)
if !increment(amount, time: time)
raise TrafficJam::LimitExceededError.new(self)
end
end
# Decrement the amount used by the given number. Time of decrement can be
# specified optionally with a keyword argument, which is useful for rolling
# back an increment operation at a certain time.
#
# @param amount [Integer] amount to increment by
# @param time [Time] time when increment occurs
# @return [true]
def decrement(amount = 1, time: Time.now)
increment(-amount, time: time)
end
# Reset amount used to 0.
#
# @return [nil]
def reset
redis.del(key)
nil
end
# Return amount of limit used, taking time drift into account.
#
# @return [Integer] amount used
def used
return 0 if max.zero?
timestamp, amount = redis.hmget(key, 'timestamp', 'amount')
if timestamp && amount
time_passed = Time.now.to_f - timestamp.to_i / 1000.0
drift = max * time_passed / period
last_amount = [amount.to_f, max].min
[(last_amount - drift).ceil, 0].max
else
0
end
end
# Return amount of limit remaining, taking time drift into account.
#
# @return [Integer] amount remaining
def remaining
max - used
end
def flatten
[self]
end
private
def config
TrafficJam.config
end
def redis
config.redis
end
def key
if !defined?(@key) || @key.nil?
converted_value =
begin
value.to_rate_limit_value
rescue NoMethodError
value
end
hash = Digest::MD5.base64digest(converted_value.to_s)
hash = hash[0...config.hash_length]
@key = "#{key_prefix}:#{action}:#{hash}"
end
@key
end
def key_prefix
config.key_prefix
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam/errors.rb | lib/traffic_jam/errors.rb | module TrafficJam
module Errors
class LimitNotFound < StandardError; end
class LimitExceededError < StandardError
attr_accessor :limit
def initialize(limit)
super("Rate limit exceeded: #{limit.action}")
@limit = limit
end
end
class InvalidKeyError < StandardError; end
class UnknownReturnValue < StandardError; end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam/scripts.rb | lib/traffic_jam/scripts.rb | require 'digest/sha1'
module TrafficJam
module Scripts
def self.load(name)
scripts_dir = File.join(File.dirname(__FILE__), '..', '..', 'scripts')
File.read(File.join(scripts_dir, "#{name}.lua"))
end
private_class_method :load
INCREMENT_SCRIPT = load('increment')
INCREMENT_SCRIPT_HASH = Digest::SHA1.hexdigest(INCREMENT_SCRIPT)
INCREMENT_GCRA = load('increment_gcra')
INCREMENT_GCRA_HASH = Digest::SHA1.hexdigest(INCREMENT_GCRA)
INCREMENT_ROLLING = load('increment_rolling')
INCREMENT_ROLLING_HASH = Digest::SHA1.hexdigest(INCREMENT_ROLLING)
INCRBY = load('incrby')
INCRBY_HASH = Digest::SHA1.hexdigest(INCRBY)
SUM_ROLLING = load('sum_rolling')
SUM_ROLLING_HASH = Digest::SHA1.hexdigest(SUM_ROLLING)
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam/rolling_limit.rb | lib/traffic_jam/rolling_limit.rb | require_relative 'scripts'
module TrafficJam
# This class represents a rolling limit on an action, value pair. For example,
# if limiting the amount of money a user can transfer in a week, the action
# could be +:transfers+ and the value would be the user ID. The class exposes
# atomic increment operations and allows querying of the current amount used
# and amount remaining.
#
# This class also handles 0 for period, where 0 is no period (each
# request is compared to the max).
#
# This class departs from the design of Limit by tracking a sum of the actions
# in a second, in a hash keyed by the timestamp. Therefore, this limit can put
# a lot of data size pressure on the Redis storage, so use it wisely.
class RollingLimit < Limit
# Constructor takes an action name as a symbol, a maximum cap, and the
# period of limit. +max+ and +period+ are required keyword arguments.
#
# @param action [Symbol] action name
# @param value [String] limit target value
# @param max [Integer] required limit maximum
# @param period [Integer] required limit period in seconds
# @raise [ArgumentError] if max or period is nil
def initialize(action, value, max: nil, period: nil)
super(action, value, max: max, period: period)
end
# Increment the amount used by the given number. Rolls back the increment
# if the operation exceeds the limit. Returns whether the operation was
# successful. Time of increment can be specified optionally with a keyword
# argument, which is not really useful since it be undone by used.
#
# @param amount [Integer] amount to increment by
# @param time [Time] time when increment occurs (ignored)
# @return [Boolean] true if increment succeded and false if incrementing
# would exceed the limit
def increment(amount = 1, time: Time.now)
raise ArgumentError, 'Amount must be an integer' if amount != amount.to_i
return amount <= 0 if max.zero?
return amount <= max if period.zero?
return true if amount.zero?
return false if amount > max
!run_incr([time.to_i, amount.to_i, max, period]).nil?
end
# Return amount of limit used
#
# @return [Integer] amount used
def used
return 0 if max.zero? || period.zero?
[sum, max].min
end
private
def sum
run_sum([Time.now.to_i, period])
end
def clear_before
Time.now.to_i - period
end
def run_sum(argv)
redis.evalsha(Scripts::SUM_ROLLING_HASH, keys: [key], argv: argv)
rescue Redis::CommandError => error
raise error if /ERR Error running script/ =~ error.message
redis.eval(Scripts::SUM_ROLLING, keys: [key], argv: argv)
end
def run_incr(argv)
redis.evalsha(
Scripts::INCREMENT_ROLLING_HASH, keys: [key], argv: argv
)
rescue Redis::CommandError => error
raise error if /ERR Error running script/ =~ error.message
redis.eval(Scripts::INCREMENT_ROLLING, keys: [key], argv: argv)
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam/lifetime_limit.rb | lib/traffic_jam/lifetime_limit.rb | module TrafficJam
# This class represents a lifetime limit on an action, value pair. For example, if
# limiting the amount of money a user can transfer, the action could be
# +:transfers+ and the value would be the user ID. The class exposes atomic
# increment operations and allows querying of the current amount used and
# amount remaining.
class LifetimeLimit < Limit
# Constructor takes an action name as a symbol, a maximum cap, and the
# period of limit. +max+ and +period+ are required keyword arguments.
#
# @param action [Symbol] action name
# @param value [String] limit target value
# @param max [Integer] required limit maximum
# @raise [ArgumentError] if max is nil
def initialize(action, value, max: nil)
super(action, value, max: max, period: -1)
end
# Increment the amount used by the given number. Does not perform increment
# if the operation would exceed the limit. Returns whether the operation was
# successful.
#
# @param amount [Integer] amount to increment by
# @return [Boolean] true if increment succeded and false if incrementing
# would exceed the limit
def increment(amount = 1, time: Time.now)
raise ArgumentError, 'Amount must be an integer' if amount != amount.to_i
return amount <= 0 if max.zero?
!!run_script([amount.to_i, max])
end
# Return amount of limit used
#
# @return [Integer] amount used
def used
return 0 if max.zero?
amount = redis.get(key) || 0
[amount.to_i, max].min
end
private
def run_script(argv)
redis.evalsha(
Scripts::INCRBY_HASH, keys: [key], argv: argv
)
rescue Redis::CommandError => error
raise error if /ERR Error running script/ =~ error.message
redis.eval(Scripts::INCRBY, keys: [key], argv: argv)
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam/configuration.rb | lib/traffic_jam/configuration.rb | module TrafficJam
# Configuration for TrafficJam library.
#
# @see TrafficJam#configure
class Configuration
OPTIONS = %i( key_prefix hash_length redis )
# @!attribute redis
# @return [Redis] the connected Redis client the library uses
# @!attribute key_prefix
# @return [String] the prefix of all limit keys in Redis
# @!attribute hash_length
# @return [String] the number of characters to use from the Base64 encoded
# hashes of the limit values
attr_accessor *OPTIONS
def initialize(options = {})
OPTIONS.each do |option|
self.send("#{option}=", options[option])
end
end
# Register a default cap and period with an action name. For use with
# {TrafficJam.limit}.
#
# @param action [Symbol] action name
# @param max [Integer] limit cap
# @param period [Fixnum] limit period in seconds
def register(action, max, period)
@limits ||= {}
@limits[action.to_sym] = { max: max, period: period }
end
# Get the limit cap registered to an action.
#
# @see #register
# @return [Integer] limit cap
def max(action)
limits(action)[:max]
end
# Get the limit period registered to an action.
#
# @see #register
# @return [Integer] limit period in seconds
def period(action)
limits(action)[:period]
end
# Get registered limit parameters for an action.
#
# @see #register
# @param action [Symbol] action name
# @return [Hash] max and period parameters in a hash
# @raise [TrafficJam::LimitNotFound] if action is not registered
def limits(action)
@limits ||= {}
limits = @limits[action.to_sym]
raise TrafficJam::LimitNotFound.new(action) if limits.nil?
limits
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
coinbase/traffic_jam | https://github.com/coinbase/traffic_jam/blob/2b90aa596fdb384086a5a39a203effed14a01756/lib/traffic_jam/limit_group.rb | lib/traffic_jam/limit_group.rb | module TrafficJam
# A limit group is a way of enforcing a cap over a set of limits with the
# guarantee that either all limits will be incremented or none. This is useful
# if you must check multiple limits before allowing an action to be taken.
# Limit groups can contain other limit groups.
class LimitGroup
attr_reader :limits
# Creates a limit group from a collection of limits or other limit groups.
#
# @param limits [Array<TrafficJam::Limit>] either an array or splat of
# limits or other limit groups
# @param ignore_nil_values [Boolean] silently drop limits with a nil value
def initialize(*limits, ignore_nil_values: false)
@limits = limits.flatten
@ignore_nil_values = ignore_nil_values
if @ignore_nil_values
@limits.reject! do |limit|
limit.respond_to?(:value) && limit.value.nil?
end
end
end
# Add a limit to the group.
#
# @param limit [TrafficJam::Limit, TrafficJam::LimitGroup]
def <<(limit)
if !(@ignore_nil_values && limit.value.nil?)
limits << limit
end
end
# Attempt to increment the limits by the given amount. Does not increment
# if incrementing would exceed any limit.
#
# @param amount [Integer] amount to increment by
# @param time [Time] optional time of increment
# @return [Boolean] whether increment operation was successful
def increment(amount = 1, time: Time.now)
exceeded_index = limits.find_index do |limit|
!limit.increment(amount, time: time)
end
if exceeded_index
limits[0...exceeded_index].each do |limit|
limit.decrement(amount, time: time)
end
end
exceeded_index.nil?
end
# Increment the limits by the given amount. Raises an error and does not
# increment if doing so would exceed any limit.
#
# @param amount [Integer] amount to increment by
# @param time [Time] optional time of increment
# @return [nil]
# @raise [TrafficJam::LimitExceededError] if increment would exceed any
# limits
def increment!(amount = 1, time: Time.now)
exception = nil
exceeded_index = limits.find_index do |limit|
begin
limit.increment!(amount, time: time)
rescue TrafficJam::LimitExceededError => e
exception = e
true
end
end
if exceeded_index
limits[0...exceeded_index].each do |limit|
limit.decrement(amount, time: time)
end
raise exception
end
end
# Decrement the limits by the given amount.
#
# @param amount [Integer] amount to decrement by
# @param time [Time] optional time of decrement
# @return [true]
def decrement(amount = 1, time: Time.now)
limits.all? { |limit| limit.decrement(amount, time: time) }
end
# Return whether incrementing by the given amount would exceed any limit.
# Does not change amount used.
#
# @param amount [Integer]
# @return [Boolean] whether any limit would be exceeded
def exceeded?(amount = 1)
limits.any? { |limit| limit.exceeded?(amount) }
end
# Return the first limit to be exceeded if incrementing by the given amount,
# or nil otherwise. Does not change amount used for any limit.
#
# @param amount [Integer]
# @return [TrafficJam::Limit, nil]
def limit_exceeded(amount = 1)
limits.each do |limit|
limit_exceeded = limit.limit_exceeded(amount)
return limit_exceeded if limit_exceeded
end
nil
end
# Resets all limits to 0.
def reset
limits.each(&:reset)
nil
end
# Return minimum amount remaining of any limit.
#
# @return [Integer] amount remaining in limit group
def remaining
limits.map(&:remaining).min
end
# Return flattened list of limit. Will return list limits even if this group
# contains nested limit groups.
#
# @return [Array<TrafficJam::Limit>] list of limits
def flatten
limits.map(&:flatten).flatten
end
end
end
| ruby | MIT | 2b90aa596fdb384086a5a39a203effed14a01756 | 2026-01-04T17:52:08.574256Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/spec/gastly_spec.rb | spec/gastly_spec.rb | require 'spec_helper'
describe Gastly do
let(:url) { 'http://google.com' }
context '#screenshot' do
it 'returns an instance of Gastly::Screenshot' do
expect(Gastly.screenshot(url, timeout: 1000)).to be_instance_of Gastly::Screenshot
end
end
context '#capture' do
it 'creates a screenshot' do
tmp = 'spec/support/tmp'
path = "#{tmp}/output.png"
expect { Gastly.capture(url, path) }.to change { Dir.glob("#{tmp}/*").length }.by(1)
FileUtils.rm Dir.glob("#{tmp}/*")
end
end
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/spec/phantomjs_spec.rb | spec/phantomjs_spec.rb | require 'spec_helper'
describe Phantomjs do
it { expect(Phantomjs).to respond_to(:proxy_host) }
it { expect(Phantomjs).to respond_to(:proxy_port) }
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/spec/spec_helper.rb | spec/spec_helper.rb | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'gastly'
require 'coveralls'
Coveralls.wear!
RSpec.configure do |config|
config.before(:suite) do
Phantomjs.implode!
Phantomjs.path
end
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/spec/gastly/screenshot_spec.rb | spec/gastly/screenshot_spec.rb | require 'spec_helper'
RSpec.describe Gastly::Screenshot do
let(:url) { 'http://google.com' }
let(:params) do
{
selector: '#hplogo',
browser_width: 1280,
browser_height: 780,
timeout: 1000,
cookies: { user_id: 1, auth_token: 'abcd' },
proxy_host: '10.10.10.1',
proxy_port: '8080',
phantomjs_options: '--load-images=false'
}
end
subject { Gastly::Screenshot.new(url) }
# Constants
it { expect(Gastly::Screenshot::SCRIPT_PATH).to eq File.expand_path('../../../lib/gastly/script.js', __FILE__) }
it { expect(Gastly::Screenshot::DEFAULT_TIMEOUT).to eq 0 }
it { expect(Gastly::Screenshot::DEFAULT_BROWSER_WIDTH).to eq 1440 }
it { expect(Gastly::Screenshot::DEFAULT_BROWSER_HEIGHT).to eq 900 }
it { expect(Gastly::Screenshot::DEFAULT_FILE_FORMAT).to eq '.png' }
context '#initialize' do
it 'sets instance variables' do
screenshot = Gastly::Screenshot.new(url, params)
params.each do |key, value|
expect(screenshot.public_send(key)).to eq value
end
end
it 'raises an argument error' do
expect { Gastly::Screenshot.new(url, url: url) }.to raise_error(ArgumentError)
end
end
context '#capture' do
it 'configures proxy' do
expect(Phantomjs).to receive(:run)
expect(Phantomjs).to receive(:proxy_host=).with(params[:proxy_host])
expect(Phantomjs).to receive(:proxy_port=).with(params[:proxy_port])
Gastly::Screenshot.new(url, params).capture
end
it 'runs js script' do
screenshot = Gastly::Screenshot.new(url, params)
cookies = params[:cookies].map { |key, value| "#{key}=#{value}" }.join(',')
args = [
"--proxy=#{params[:proxy_host]}:#{params[:proxy_port]} #{params[:phantomjs_options]}",
Gastly::Screenshot::SCRIPT_PATH,
"url=#{url}",
"timeout=#{params[:timeout]}",
"width=#{params[:browser_width]}",
"height=#{params[:browser_height]}",
"output=#{screenshot.image.path}",
"selector=#{params[:selector]}",
"cookies=#{cookies}"
]
expect(Phantomjs).to receive(:run).with(*args)
screenshot.capture
end
it 'raises an exception if fetch error' do
url = 'h11p://google.com'
screenshot = Gastly::Screenshot.new(url)
expect { screenshot.capture }.to raise_error(Gastly::FetchError, "Unable to load #{url}")
end
it 'raises an exception if runtime error' do
expect(Phantomjs).to receive(:run).and_return('RuntimeError:test runtime error')
screenshot = Gastly::Screenshot.new(url)
expect { screenshot.capture }.to raise_error(Gastly::PhantomJSError, 'test runtime error')
end
it 'raises an exception if unknown error' do
expect(Phantomjs).to receive(:run).and_return('unknown error')
screenshot = Gastly::Screenshot.new(url)
expect { screenshot.capture }.to raise_error(Gastly::UnknownError)
end
it 'returns an instance of Gastly::Image' do
screenshot = Gastly::Screenshot.new(url)
expect(screenshot.capture).to be_instance_of Gastly::Image
end
end
context '#timeout' do
it 'returns default timeout' do
expect(subject.timeout).to eq Gastly::Screenshot::DEFAULT_TIMEOUT
end
it 'returns the set value' do
screenshot = described_class.new(url, timeout: 200)
expect(screenshot.timeout).to eq 200
end
end
context '#browser_width' do
it 'returns default browser width' do
expect(subject.browser_width).to eq Gastly::Screenshot::DEFAULT_BROWSER_WIDTH
end
it 'returns the set value' do
screenshot = described_class.new(url, browser_width: 1280)
expect(screenshot.browser_width).to eq 1280
end
end
context '#browser_height' do
it 'returns default browser height' do
expect(subject.browser_height).to eq Gastly::Screenshot::DEFAULT_BROWSER_HEIGHT
end
it 'returns the set value' do
screenshot = described_class.new(url, browser_height: 720)
expect(screenshot.browser_height).to eq 720
end
end
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/spec/gastly/image_spec.rb | spec/gastly/image_spec.rb | require 'spec_helper'
RSpec.describe Gastly::Image do
let(:image) { MiniMagick::Image.new('test.png') }
subject { Gastly::Image.new(image) }
context '#resize' do
it 'invokes method #resize with arguments' do
width, height = 100, 100
expect_any_instance_of(MiniMagick::Image).to receive(:resize).with("#{width}x#{height}")
subject.resize(width: 100, height: 100)
end
end
context '#crop' do
it 'invokes method #crop with arguments' do
width, height, x, y = 100, 100, 0, 0
expect_any_instance_of(MiniMagick::Image).to receive(:crop).with("#{width}x#{height}+#{x}+#{y}")
subject.crop(width: 100, height: 100, x: 0, y: 0)
end
end
context '#format' do
it 'invokes method #format' do
ext = 'png'
expect_any_instance_of(MiniMagick::Image).to receive(:format).with(ext)
subject.format(ext)
end
end
context '#save' do
let(:output) { 'output.png' }
before do
expect_any_instance_of(MiniMagick::Image).to receive(:write).with(output)
end
it 'invokes method #write' do
subject.save(output)
end
it 'returns a string' do
expect(subject.save(output)).to eq output
end
end
end | ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly.rb | lib/gastly.rb | require 'phantomjs'
require 'mini_magick'
require_relative 'gastly/utils'
require_relative 'gastly/phantomjs_patch'
require_relative 'gastly/image'
require_relative 'gastly/screenshot'
require_relative 'gastly/exceptions'
require_relative 'gastly/version'
module Gastly
def screenshot(url, **kwargs)
Screenshot.new(url, **kwargs)
end
def capture(url, path)
screenshot(url).capture.save(path)
end
module_function :screenshot, :capture
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly/version.rb | lib/gastly/version.rb | module Gastly
VERSION = '1.1.0'
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly/phantomjs_patch.rb | lib/gastly/phantomjs_patch.rb | module Phantomjs
class << self
attr_accessor :proxy_host, :proxy_port
end
class Platform
RETRY_COUNT = 5
class << self
def install!
STDERR.puts "Phantomjs does not appear to be installed in #{phantomjs_path}, installing!"
FileUtils.mkdir_p Phantomjs.base_dir
# Purge temporary directory if it is still hanging around from previous installs,
# then re-create it.
temp_dir = File.join(temp_path, 'phantomjs_install')
FileUtils.rm_rf temp_dir
FileUtils.mkdir_p temp_dir
Dir.chdir temp_dir do
unless download_via_curl || download_via_wget
fail "\n\nFailed to load phantomjs! :(\nYou need to have cURL or wget installed on your system.\nIf you have, the source of phantomjs might be unavailable: #{package_url}\n\n"
end
case package_url.split('.').last
when 'bz2'
system "bunzip2 #{File.basename(package_url)}"
system "tar xf #{File.basename(package_url).sub(/\.bz2$/, '')}"
when 'zip'
system "unzip #{File.basename(package_url)}"
else
fail "Unknown compression format for #{File.basename(package_url)}"
end
# Find the phantomjs build we just extracted
extracted_dir = Dir['phantomjs*'].find { |path| File.directory?(path) }
if extracted_dir.nil?
# Move the executable file
FileUtils.mkdir_p File.join(Phantomjs.base_dir, platform, 'bin')
if FileUtils.mv 'phantomjs', File.join(Phantomjs.base_dir, platform, 'bin')
STDOUT.puts "\nSuccessfully installed phantomjs. Yay!"
end
else
# Move the extracted phantomjs build to $HOME/.phantomjs/version/platform
if FileUtils.mv extracted_dir, File.join(Phantomjs.base_dir, platform)
STDOUT.puts "\nSuccessfully installed phantomjs. Yay!"
end
end
# Clean up remaining files in tmp
if FileUtils.rm_rf temp_dir
STDOUT.puts 'Removed temporarily downloaded files.'
end
end
fail 'Failed to install phantomjs. Sorry :(' unless File.exist?(phantomjs_path)
end
private
def download_via_curl
system "curl -L --retry #{RETRY_COUNT} -O #{package_url} #{curl_proxy_options}"
end
def download_via_wget
system "wget -t #{RETRY_COUNT} #{package_url} #{wget_proxy_options}"
end
def curl_proxy_options
return '' if proxy_options_exist?
"-x #{Phantomjs.proxy_host}:#{Phantomjs.proxy_port}"
end
def wget_proxy_options
return '' if proxy_options_exist?
"-e use_proxy=yes -e http_proxy=#{Phantomjs.proxy_host}:#{Phantomjs.proxy_port}"
end
def proxy_options_exist?
Phantomjs.proxy_host.nil? && Phantomjs.proxy_port.nil?
end
end
end
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly/exceptions.rb | lib/gastly/exceptions.rb | module Gastly
class FetchError < StandardError
def initialize(url)
super("Unable to load #{url}")
end
end
PhantomJSError = Class.new(RuntimeError)
UnknownError = Class.new(RuntimeError)
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly/utils.rb | lib/gastly/utils.rb | require_relative 'utils/hash'
require_relative 'utils/string'
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly/screenshot.rb | lib/gastly/screenshot.rb | module Gastly
class Screenshot
SCRIPT_PATH = File.expand_path('../script.js', __FILE__)
DEFAULT_TIMEOUT = 0
DEFAULT_BROWSER_WIDTH = 1440
DEFAULT_BROWSER_HEIGHT = 900
DEFAULT_FILE_FORMAT = '.png'.freeze
attr_reader :image
attr_writer :timeout, :browser_width, :browser_height
attr_accessor :url, :selector, :cookies, :proxy_host, :proxy_port, :phantomjs_options
# @param url [String] The full url to the site
def initialize(url, **kwargs)
hash = Gastly::Utils::Hash.new(kwargs)
hash.assert_valid_keys(:timeout, :browser_width, :browser_height, :selector, :cookies, :proxy_host, :proxy_port, :phantomjs_options)
@url = url
@cookies = kwargs.delete(:cookies)
@image = MiniMagick::Image.create(DEFAULT_FILE_FORMAT, false) # Disable validation
kwargs.each { |key, value| instance_variable_set(:"@#{key}", value) }
end
# Capture image via PhantomJS and save to output file
#
# @return [Gastly::Image] Instance of Gastly::Image
def capture
# This necessary to install PhantomJS via proxy
Phantomjs.proxy_host = proxy_host if proxy_host
Phantomjs.proxy_port = proxy_port if proxy_port
output = Phantomjs.run(options, SCRIPT_PATH.to_s, *prepared_params)
handle_output(output)
Gastly::Image.new(image)
end
%w(timeout browser_width browser_height).each do |name|
define_method name do # def timeout
instance_variable_get("@#{name}") || # @timeout ||
self.class.const_get("default_#{name}".upcase) # self.class.const_get('DEFAULT_TIMEOUT')
end # end
end
private
def options
[proxy_options, phantomjs_options].join(' ').strip
end
def proxy_options
return '' if proxy_host.nil? && proxy_port.nil?
"--proxy=#{proxy_host}:#{proxy_port}"
end
def prepared_params
params = {
url: url,
timeout: timeout,
width: browser_width,
height: browser_height,
output: image.path
}
params[:selector] = selector if selector
params[:cookies] = parameterize(cookies).join(',') if cookies
parameterize(params)
end
# @param hash [Hash]
# @return [Array] Array of parameterized strings
def parameterize(hash)
hash.map { |key, value| "#{key}=#{value}" }
end
def handle_output(out)
output = Gastly::Utils::String.new(out)
return unless output.present?
error = case output.string
when /^FetchError:(.+)/ then Gastly::FetchError
when /^RuntimeError:(.+)/m then Gastly::PhantomJSError
else UnknownError
end
fail error, Regexp.last_match(1)
end
end
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly/image.rb | lib/gastly/image.rb | module Gastly
class Image
attr_reader :image
# @param image [MiniMagick::Image] Instance of MiniMagick::Image
def initialize(image)
@image = image
end
# @param width [Fixnum] Image width
# @param height [Fixnum] Image height
def resize(width:, height:)
dimensions = "#{width}x#{height}"
image.resize(dimensions)
end
# @param width [Fixnum] Crop width
# @param height [Fixnum] Crop height
# @param x [Fixnum] Crop x offset
# @param y [Fixnum] Crop y offset
def crop(width:, height:, x:, y:)
dimensions = "#{width}x#{height}+#{x}+#{y}"
image.crop(dimensions)
end
# @param ext [String] Image extension
# @return [MiniMagick::Image] Instance
def format(ext)
image.format(ext)
end
# @param output [String] Full path to image
# @return [String] Full path to image
def save(output)
image.write(output)
output
end
end
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly/utils/string.rb | lib/gastly/utils/string.rb | module Gastly
module Utils
class String
BLANK_RE = /\A[[:space:]]*\z/
attr_reader :string
def initialize(string = '')
@string = string.to_s
end
# A string is blank if it's empty or contains whitespaces only:
#
# ''.blank? # => true
# ' '.blank? # => true
# "\t\n\r".blank? # => true
# ' blah '.blank? # => false
#
# Unicode whitespace is supported:
#
# "\u00a0".blank? # => true
#
# @return [true, false]
def blank?
BLANK_RE === string
end
# An object is present if it's not blank.
#
# @return [true, false]
def present?
!blank?
end
end
end
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
mgrachev/gastly | https://github.com/mgrachev/gastly/blob/f4d894d7fcb234506cf59a971d92783a28ca2ff1/lib/gastly/utils/hash.rb | lib/gastly/utils/hash.rb | module Gastly
module Utils
class Hash
attr_reader :hash
def initialize(hash = {})
@hash = hash.to_h
end
# Validates all keys in a hash match <tt>*valid_keys</tt>, raising
# +ArgumentError+ on a mismatch.
#
# Note that keys are treated differently than HashWithIndifferentAccess,
# meaning that string and symbol keys will not match.
#
# { name: 'Rob', years: '28' }.assert_valid_keys(:name, :age) # => raises "ArgumentError: Unknown key: :years. Valid keys are: :name, :age"
# { name: 'Rob', age: '28' }.assert_valid_keys('name', 'age') # => raises "ArgumentError: Unknown key: :name. Valid keys are: 'name', 'age'"
# { name: 'Rob', age: '28' }.assert_valid_keys(:name, :age) # => passes, raises nothing
def assert_valid_keys(*valid_keys)
valid_keys.flatten!
hash.each_key do |k|
unless valid_keys.include?(k)
fail ArgumentError.new("Unknown key: #{k.inspect}. Valid keys are: #{valid_keys.map(&:inspect).join(', ')}")
end
end
end
end
end
end
| ruby | MIT | f4d894d7fcb234506cf59a971d92783a28ca2ff1 | 2026-01-04T17:52:09.186283Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi_spec.rb | spec/nandi_spec.rb | # frozen_string_literal: true
require "nandi/migration"
RSpec.describe Nandi do
let(:renderer) do
Class.new(Object) do
def self.generate(migration); end
end
end
before do
# Test default single-database behavior
allow_any_instance_of(Nandi::Lockfile).to receive(:persist!)
end
describe "::compile" do
let(:args) do
{
files: files,
}
end
let(:base_path) do
File.join(
File.dirname(__FILE__),
"/nandi/fixtures/example_migrations",
)
end
before do
described_class.configure do |config|
config.renderer = renderer
config.migration_directory = base_path
end
end
context "with a valid migration" do
let(:files) { ["20180104120000_my_migration.rb"] }
it "yields output" do
allow(renderer).to receive(:generate).and_return("output")
described_class.compile(**args) do |output|
expect(output.first.file_name).to eq("20180104120000_my_migration.rb")
expect(output.first.body).to eq("output")
end
end
end
context "with a post-processing step" do
let(:files) { ["20180104120000_my_migration.rb"] }
before do
allow(renderer).to receive(:generate).and_return("output")
end
# rubocop:disable RSpec/ExampleLength
it "yields processed output" do
described_class.configure do |config|
config.post_process do |arg|
expect(arg).to eq("output")
"processed output"
end
end
described_class.compile(**args) do |output|
expect(output.first.file_name).to eq("20180104120000_my_migration.rb")
expect(output.first.body).to eq("processed output")
end
end
# rubocop:enable RSpec/ExampleLength
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/spec_helper.rb | spec/spec_helper.rb | # frozen_string_literal: true
require "bundler/setup"
require "pathname"
require "nandi"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
Tilt.prefer Tilt::ERBTemplate
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/generators/nandi/foreign_key_generator_spec.rb | spec/generators/nandi/foreign_key_generator_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "generators/nandi/foreign_key/foreign_key_generator"
RSpec.describe Nandi::ForeignKeyGenerator do
let(:generator) { described_class.new(%w[posts users]) }
before do
# Reset Nandi configuration
Nandi.instance_variable_set(:@config, nil)
# Mock Rails generator methods
allow(generator).to receive(:template)
allow(generator).to receive(:options).and_return({})
# Mock time to ensure consistent timestamps
allow(Time).to receive(:now).and_return(Time.new(2024, 1, 15, 12, 30, 45, "UTC"))
end
describe "#add_reference" do
before do
Nandi.configure do |config|
config.migration_directory = "db/safe_migrations"
end
end
it "creates reference migration file" do
expect(generator).to receive(:template).with(
"add_reference.rb",
"db/safe_migrations/20240115123045_add_reference_on_posts_to_users.rb",
)
generator.add_reference
end
it "sets correct add_reference_name" do
allow(generator).to receive(:template)
generator.add_reference
expect(generator.add_reference_name).to eq("add_reference_on_posts_to_users")
end
context "with no_create_column option" do
before do
allow(generator).to receive(:options).and_return({ "no_create_column" => true })
end
it "does not create reference migration" do
expect(generator).to_not receive(:template)
generator.add_reference
end
end
end
describe "#add_foreign_key" do
before do
Nandi.configure do |config|
config.migration_directory = "db/safe_migrations"
end
end
it "creates foreign key migration file" do
expect(generator).to receive(:template).with(
"add_foreign_key.rb",
"db/safe_migrations/20240115123046_add_foreign_key_on_posts_to_users.rb",
)
generator.add_foreign_key
end
end
describe "#validate_foreign_key" do
before do
Nandi.configure do |config|
config.migration_directory = "db/safe_migrations"
end
end
it "creates validation migration file" do
expect(generator).to receive(:template).with(
"validate_foreign_key.rb",
"db/safe_migrations/20240115123047_validate_foreign_key_on_posts_to_users.rb",
)
generator.validate_foreign_key
end
end
describe "multi-database support" do
before do
Nandi.configure do |config|
config.register_database(:primary, migration_directory: "db/primary_safe_migrations")
config.register_database(:analytics, migration_directory: "db/analytics_safe_migrations")
end
allow(generator).to receive(:options).and_return({ "database" => "analytics" })
end
it "creates reference migration in correct database directory" do
expect(generator).to receive(:template).with(
"add_reference.rb",
"db/analytics_safe_migrations/20240115123045_add_reference_on_posts_to_users.rb",
)
generator.add_reference
end
it "creates foreign key migration in correct database directory" do
expect(generator).to receive(:template).with(
"add_foreign_key.rb",
"db/analytics_safe_migrations/20240115123046_add_foreign_key_on_posts_to_users.rb",
)
generator.add_foreign_key
end
it "creates validation migration in correct database directory" do
expect(generator).to receive(:template).with(
"validate_foreign_key.rb",
"db/analytics_safe_migrations/20240115123047_validate_foreign_key_on_posts_to_users.rb",
)
generator.validate_foreign_key
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/generators/nandi/migration_generator_spec.rb | spec/generators/nandi/migration_generator_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "generators/nandi/migration/migration_generator"
RSpec.describe Nandi::MigrationGenerator do
let(:generator) { described_class.new(["add_users_table"]) }
before do
# Reset Nandi configuration
Nandi.instance_variable_set(:@config, nil)
# Mock Rails generator methods
allow(generator).to receive(:template)
allow(generator).to receive(:options).and_return({})
# Mock time to ensure consistent timestamps in tests
allow(Time).to receive(:now).and_return(Time.new(2024, 1, 15, 12, 30, 45, "UTC"))
end
describe "#create_migration_file" do
context "with default single database configuration" do
before do
Nandi.configure do |config|
config.migration_directory = "db/safe_migrations"
end
end
it "creates migration file with timestamp and underscored name" do
expect(generator).to receive(:template).with(
"migration.rb",
"db/safe_migrations/20240115123045_add_users_table.rb",
)
generator.create_migration_file
end
end
context "with multi-database configuration" do
before do
Nandi.configure do |config|
config.register_database(:primary, migration_directory: "db/primary_safe_migrations")
config.register_database(:analytics, migration_directory: "db/analytics_safe_migrations")
end
allow(generator).to receive(:options).and_return({ "database" => "analytics" })
end
it "creates migration file in correct database directory" do
expect(generator).to receive(:template).with(
"migration.rb",
"db/analytics_safe_migrations/20240115123045_add_users_table.rb",
)
generator.create_migration_file
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/generators/nandi/index_generator_spec.rb | spec/generators/nandi/index_generator_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "generators/nandi/index/index_generator"
RSpec.describe Nandi::IndexGenerator do
let(:generator) { described_class.new(["users", "email,status"]) }
before do
# Reset Nandi configuration
Nandi.instance_variable_set(:@config, nil)
# Mock Rails generator methods
allow(generator).to receive(:template)
allow(generator).to receive(:options).and_return({})
# Mock time to ensure consistent timestamps
allow(Time).to receive(:now).and_return(Time.new(2024, 1, 15, 12, 30, 45, "UTC"))
end
describe "#add_index" do
context "with single table and columns" do
before do
Nandi.configure do |config|
config.migration_directory = "db/safe_migrations"
end
end
it "creates index migration file with correct naming" do
expect(generator).to receive(:template).with(
"add_index.rb",
"db/safe_migrations/20240115123045_add_index_on_email_status_to_users.rb",
)
generator.add_index
end
it "sets correct instance variables" do
allow(generator).to receive(:template)
generator.add_index
expect(generator.table).to eq(:users)
expect(generator.columns).to eq(%w[email status])
expect(generator.add_index_name).to eq("add_index_on_email_status_to_users")
expect(generator.index_name).to eq(:idx_users_on_email_status)
end
end
context "with multi-database configuration" do
before do
Nandi.configure do |config|
config.register_database(:primary, migration_directory: "db/primary_safe_migrations")
config.register_database(:analytics, migration_directory: "db/analytics_safe_migrations")
end
allow(generator).to receive(:options).and_return({ "database" => "analytics" })
end
it "creates index migration in correct database directory" do
expect(generator).to receive(:template).with(
"add_index.rb",
"db/analytics_safe_migrations/20240115123045_add_index_on_email_status_to_users.rb",
)
generator.add_index
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/generators/nandi/compile_generator_spec.rb | spec/generators/nandi/compile_generator_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "generators/nandi/compile/compile_generator"
RSpec.describe Nandi::CompileGenerator do
let(:generator) { described_class.new }
let(:temp_dir) { "/tmp/nandi_spec" }
before do
# Reset Nandi configuration before each test
Nandi.instance_variable_set(:@config, nil)
# Clear existing lockfiles
Nandi::Lockfile.clear_instances!
# Mock file operations only - no filesystem access
allow(File).to receive(:write)
allow(File).to receive(:read).and_return("migration content")
# Mock Dir.chdir to prevent actual directory changes and return file list
allow(Dir).to receive(:chdir).and_yield
allow(Dir).to receive(:[]).with("*.rb").and_return(["test_migration.rb"])
# Mock FileMatcher to return the files as-is by default
allow(Nandi::FileMatcher).to receive(:call).and_return(["test_migration.rb"])
# Mock generator Rails methods
allow(generator).to receive(:options).and_return({})
allow(generator).to receive(:create_file)
end
describe "single database configuration" do
before do
Nandi.configure do |config|
config.migration_directory = "#{temp_dir}/db/safe_migrations"
config.output_directory = "#{temp_dir}/db/migrate"
config.lockfile_directory = temp_dir
end
# Mock Nandi.compile
allow(Nandi).to receive(:compile).and_yield([
instance_double(
Nandi::CompiledMigration,
file_name: "test_migration.rb",
source_digest: "abc123",
compiled_digest: "def456",
migration_unchanged?: false,
output_path: "#{temp_dir}/db/migrate/test_migration.rb",
body: "compiled content",
),
])
# Mock lockfile operations
allow_any_instance_of(Nandi::Lockfile).to receive(:add)
allow_any_instance_of(Nandi::Lockfile).to receive(:persist!)
end
it "calls Nandi.compile with correct parameters" do
expect(Nandi).to receive(:compile).with(
files: ["test_migration.rb"],
db_name: :primary,
)
generator.compile_migration_files
end
it "adds entries to lockfile with database context" do
expect(Nandi::Lockfile.for(:primary)).to receive(:add).with(
hash_including(file_name: "test_migration.rb"),
)
generator.compile_migration_files
end
it "creates output files when migration has changed" do
expect(generator).to receive(:create_file).with(
"#{temp_dir}/db/migrate/test_migration.rb",
"compiled content",
force: true,
)
generator.compile_migration_files
end
context "when migration is unchanged" do
before do
allow(Nandi).to receive(:compile).and_yield([
instance_double(
Nandi::CompiledMigration,
file_name: "test_migration.rb",
source_digest: "abc123",
compiled_digest: "def456",
migration_unchanged?: true,
),
])
end
it "does not create files" do
expect(generator).to_not receive(:create_file)
generator.compile_migration_files
end
end
it "persists lockfile after processing" do
expect(Nandi::Lockfile.for(:primary)).to receive(:persist!).once
generator.compile_migration_files
end
end
describe "multi-database configuration" do
before do
Nandi.configure do |config|
config.lockfile_directory = temp_dir
config.register_database(
:primary,
migration_directory: "#{temp_dir}/db/safe_migrations",
output_directory: "#{temp_dir}/db/migrate",
)
config.register_database(
:analytics,
migration_directory: "#{temp_dir}/db/analytics_safe_migrations",
output_directory: "#{temp_dir}/db/analytics_migrate",
)
end
# Mock file operations for both directories
allow(Dir).to receive(:chdir).with("#{temp_dir}/db/safe_migrations").and_yield
allow(Dir).to receive(:chdir).with("#{temp_dir}/db/analytics_safe_migrations").and_yield
call_count = 0
allow(Dir).to receive(:[]).with("*.rb") do
call_count += 1
case call_count
when 1
["primary_migration.rb"]
when 2
["analytics_migration.rb"]
else
[]
end
end
# Mock FileMatcher for multi-database
allow(Nandi::FileMatcher).to receive(:call).with(
files: ["primary_migration.rb"],
spec: nil,
).and_return(["primary_migration.rb"])
allow(Nandi::FileMatcher).to receive(:call).with(
files: ["analytics_migration.rb"],
spec: nil,
).and_return(["analytics_migration.rb"])
# Mock lockfile operations
allow(Nandi::Lockfile.for(:primary)).to receive(:add)
allow(Nandi::Lockfile.for(:primary)).to receive(:persist!)
allow(Nandi::Lockfile.for(:analytics)).to receive(:add)
allow(Nandi::Lockfile.for(:analytics)).to receive(:persist!)
end
context "when compiling all databases" do
before do
# Mock Nandi.compile for both databases
allow(Nandi).to receive(:compile).with(
files: ["primary_migration.rb"],
db_name: :primary,
).and_yield([
instance_double(
Nandi::CompiledMigration,
file_name: "primary_migration.rb",
source_digest: "primary_abc",
compiled_digest: "primary_def",
migration_unchanged?: false,
output_path: "#{temp_dir}/db/migrate/primary_migration.rb",
body: "primary compiled content",
),
])
allow(Nandi).to receive(:compile).with(
files: ["analytics_migration.rb"],
db_name: :analytics,
).and_yield([
instance_double(
Nandi::CompiledMigration,
file_name: "analytics_migration.rb",
source_digest: "analytics_abc",
compiled_digest: "analytics_def",
migration_unchanged?: false,
output_path: "#{temp_dir}/db/analytics_migrate/analytics_migration.rb",
body: "analytics compiled content",
),
])
end
it "processes all configured databases" do
expect(Nandi).to receive(:compile).twice
generator.compile_migration_files
end
it "adds migrations to lockfile with correct database context" do
expect(Nandi::Lockfile.for(:primary)).to receive(:add).once
expect(Nandi::Lockfile.for(:analytics)).to receive(:add).once
generator.compile_migration_files
end
it "creates files in correct database-specific output directories" do
expect(generator).to receive(:create_file).twice
generator.compile_migration_files
end
end
context "when compiling specific database" do
before do
allow(generator).to receive(:options).and_return({ database: "analytics" })
# Override Dir calls for single database
allow(Dir).to receive(:chdir).with("#{temp_dir}/db/analytics_safe_migrations").and_yield
allow(Dir).to receive(:[]).with("*.rb").and_return(["analytics_migration.rb"])
# Mock FileMatcher for specific database
allow(Nandi::FileMatcher).to receive(:call).with(
files: ["analytics_migration.rb"],
spec: nil,
).and_return(["analytics_migration.rb"])
allow(Nandi).to receive(:compile).with(
files: ["analytics_migration.rb"],
db_name: :analytics,
).and_yield([
instance_double(
Nandi::CompiledMigration,
file_name: "analytics_migration.rb",
source_digest: "analytics_abc",
compiled_digest: "analytics_def",
migration_unchanged?: false,
output_path: "#{temp_dir}/db/analytics_migrate/analytics_migration.rb",
body: "analytics compiled content",
),
])
end
it "processes only the specified database" do
expect(Nandi).to receive(:compile).once
generator.compile_migration_files
end
end
end
describe "file filtering with FileMatcher" do
before do
Nandi.configure do |config|
config.migration_directory = "#{temp_dir}/db/safe_migrations"
config.output_directory = "#{temp_dir}/db/migrate"
config.lockfile_directory = temp_dir
end
# Mock multiple files
allow(Dir).to receive(:[]).with("*.rb").and_return([
"20240101000000_first.rb",
"20240102000000_second.rb",
"20240103000000_third.rb",
])
allow(Nandi).to receive(:compile).and_yield([])
allow(Nandi::Lockfile.for(:primary)).to receive(:add)
allow(Nandi::Lockfile.for(:primary)).to receive(:persist!)
end
it "uses FileMatcher to filter files" do
allow(generator).to receive(:options).and_return({ "files" => "20240102" })
expect(Nandi::FileMatcher).to receive(:call).with(
hash_including(spec: "20240102"),
).and_return(["20240102000000_second.rb"])
generator.compile_migration_files
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/migration_spec.rb | spec/nandi/migration_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/migration"
require "nandi/validator"
RSpec.describe Nandi::Migration do
let(:validator) { Nandi::Validator }
describe "name" do
subject(:migration) { MyAmazingClass.new(validator).name }
before do
stub_const("MyAmazingClass", Class.new(described_class))
end
it { is_expected.to eq("MyAmazingClass") }
end
describe "#up and #down" do
subject(:migration) { subject_class.new(validator) }
context "with up but not down implemented" do
let(:subject_class) do
Class.new(described_class) do
def up; end
end
end
it "is valid" do
result = migration.validate
expect(result).to be_valid
end
end
context "with down but not up implemented" do
let(:subject_class) do
Class.new(described_class) do
def down; end
end
end
it "is not valid" do
result = migration.validate
expect(result).to_not be_valid
end
end
end
describe "#add_index" do
context "with one new index" do
subject(:instructions) { subject_class.new(validator).up_instructions }
context "with one indexed field" do
let(:subject_class) do
Class.new(described_class) do
def up
add_index :payments, :foo
end
end
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:add_index)
end
end
context "with more than one indexed field" do
let(:subject_class) do
Class.new(described_class) do
def up
add_index :payments, %i[foo bar]
end
end
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:add_index)
end
end
end
context "with extra args" do
subject(:instructions) { subject_class.new(validator).up_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
add_index :payments, :foo, extra: :arg
end
end
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:add_index)
end
end
end
describe "#remove_index" do
subject(:instructions) { subject_class.new(validator).down_instructions }
context "dropping an index by column name" do
let(:subject_class) do
Class.new(described_class) do
def up; end
def down
remove_index :payments, :foo
end
end
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:remove_index)
end
end
context "dropping an index by options hash" do
context "with column property" do
let(:subject_class) do
Class.new(described_class) do
def up; end
def down
remove_index :payments, column: :foo
end
end
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:remove_index)
end
end
context "with name property" do
let(:subject_class) do
Class.new(described_class) do
def up; end
def down
remove_index :payments, name: :index_payments_on_foo
end
end
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:remove_index)
end
end
end
end
describe "#create_table" do
subject(:instructions) { subject_class.new(validator).up_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
create_table :payments do |t|
t.column :name, :string, default: "no one"
t.column :amount, :float
t.column :paid, :bool, default: false
t.timestamps null: false
end
end
end
end
let(:expected_columns) do
[
[:name, :string, { default: "no one" }],
[:amount, :float, {}],
[:paid, :bool, { default: false }],
]
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:create_table)
end
it "exposes the correct table name" do
expect(instructions.first.table).to eq(:payments)
end
it "exposes the correct columns number" do
expect(instructions.first.columns.length).to eq(3)
end
it "exposes the correct columns values" do
instructions.first.columns.each_with_index do |c, i|
expect(c.name).to eq(expected_columns[i][0])
expect(c.type).to eq(expected_columns[i][1])
expect(c.args).to eq(expected_columns[i][2])
end
end
it "exposes the args for timestamps" do
expect(instructions.first.timestamps_args).to eq(null: false)
end
it "has no extra_args" do
expect(instructions.first.extra_args).to eq(nil)
end
context "with extra args" do
let(:subject_class) do
Class.new(described_class) do
def up
create_table :payments, id: false do |t|
t.column :name, :string, default: "no one"
t.timestamps null: false
end
end
end
end
let(:expected_columns) do
[
[:name, :string, { default: "no one" }],
]
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:create_table)
end
it "exposes the correct table name" do
expect(instructions.first.table).to eq(:payments)
end
it "exposes the correct columns number" do
expect(instructions.first.columns.length).to eq(1)
end
it "exposes the correct columns values" do
instructions.first.columns.each_with_index do |c, i|
expect(c.name).to eq(expected_columns[i][0])
expect(c.type).to eq(expected_columns[i][1])
expect(c.args).to eq(expected_columns[i][2])
end
end
it "exposes the args for timestamps" do
expect(instructions.first.timestamps_args).to eq(null: false)
end
it "has extra_args" do
expect(instructions.first.extra_args).to eq(id: false)
end
end
context "type methods" do
%i[
bigint
binary
boolean
date
datetime
decimal
float
integer
json
string
text
time
timestamp
virtual
bigserial bit bit_varying box
cidr circle citext
daterange
hstore
inet int4range int8range interval
jsonb
line lseg ltree
macaddr money
numrange
oid
path point polygon primary_key
serial
tsrange tstzrange tsvector
uuid
xml
].each do |type|
context type.to_s do
let(:subject_class) do
Class.new(described_class) do
define_method :up do
create_table :payments do |t|
t.send type, :name, null: true
end
end
end
end
it "defines the type #{type}" do
expect(instructions.first.columns.first.type).
to eq(type)
end
end
end
end
end
describe "#drop_table" do
subject(:instructions) { subject_class.new(validator).up_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
drop_table :payments
end
end
end
it "returns an instruction" do
expect(instructions.first.procedure).to eq(:drop_table)
end
it "exposes the correct attributes" do
expect(instructions.first.table).to eq(:payments)
end
end
describe "#add_column" do
subject(:instructions) { subject_class.new(validator).up_instructions }
context "with no extra options" do
let(:subject_class) do
Class.new(described_class) do
def up
add_column :payments, :my_column, :text
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:add_column)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct column name" do
expect(instructions.first.name).to eq(:my_column)
end
it "has the correct column type" do
expect(instructions.first.type).to eq(:text)
end
end
context "with extra options" do
let(:subject_class) do
Class.new(described_class) do
def up
add_column :payments, :my_column, :text, collate: :de_DE
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:add_column)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct column name" do
expect(instructions.first.name).to eq(:my_column)
end
it "has the correct column type" do
expect(instructions.first.type).to eq(:text)
end
it "sets the default constraints" do
expect(instructions.first.extra_args).to eq(
collate: :de_DE,
)
end
end
end
describe "#add_reference" do
subject(:instructions) { subject_class.new(validator).up_instructions }
context "with no extra options" do
let(:subject_class) do
Class.new(described_class) do
def up
add_reference :payments, :mandate
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:add_reference)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct ref name" do
expect(instructions.first.ref_name).to eq(:mandate)
end
it "defaults index to false" do
expect(instructions.first.extra_args).to eq(
index: false,
)
end
end
context "with extra options" do
let(:subject_class) do
Class.new(described_class) do
def up
add_reference :payments, :mandate, type: :text
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:add_reference)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct ref name" do
expect(instructions.first.ref_name).to eq(:mandate)
end
it "forwards the extra options" do
expect(instructions.first.extra_args).to eq(
index: false,
type: :text,
)
end
end
end
describe "#remove_reference" do
subject(:instructions) { subject_class.new(validator).up_instructions }
context "with no extra options" do
let(:subject_class) do
Class.new(described_class) do
def up
remove_reference :payments, :mandate
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:remove_reference)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct ref name" do
expect(instructions.first.ref_name).to eq(:mandate)
end
end
context "with extra options" do
let(:subject_class) do
Class.new(described_class) do
def up
remove_reference :payments, :mandate, index: true
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:remove_reference)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct ref name" do
expect(instructions.first.ref_name).to eq(:mandate)
end
it "forwards the extra options" do
expect(instructions.first.extra_args).to eq(
index: true,
)
end
end
end
describe "#remove_column" do
subject(:instructions) { subject_class.new(validator).up_instructions }
context "without extra args" do
let(:subject_class) do
Class.new(described_class) do
def up
remove_column :payments, :my_column
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:remove_column)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct column name" do
expect(instructions.first.name).to eq(:my_column)
end
end
context "with extra args" do
let(:subject_class) do
Class.new(described_class) do
def up
remove_column :payments, :my_column, cascade: true
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:remove_column)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct column name" do
expect(instructions.first.name).to eq(:my_column)
end
it "has the correct extra_args" do
expect(instructions.first.extra_args).to eq(
cascade: true,
)
end
end
end
describe "#add_foreign_key" do
subject(:instructions) { subject_class.new(validator).up_instructions }
context "with just table names" do
let(:subject_class) do
Class.new(described_class) do
def up
add_foreign_key :payments, :mandates
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:add_foreign_key)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct target" do
expect(instructions.first.target).to eq(:mandates)
end
it "has the correct extra args" do
expect(instructions.first.extra_args).to eq(
validate: false,
name: :payments_mandates_fk,
)
end
end
context "with constraint name" do
let(:subject_class) do
Class.new(described_class) do
def up
add_foreign_key :payments, :mandates, name: :zalgo_comes
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:add_foreign_key)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct target" do
expect(instructions.first.target).to eq(:mandates)
end
it "has the correct extra args" do
expect(instructions.first.extra_args).to eq(
name: :zalgo_comes,
validate: false,
)
end
end
context "with column name" do
let(:subject_class) do
Class.new(described_class) do
def up
add_foreign_key :payments, :mandates, column: :zalgo_comes
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:add_foreign_key)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct target" do
expect(instructions.first.target).to eq(:mandates)
end
it "has the correct extra args" do
expect(instructions.first.extra_args).to eq(
column: :zalgo_comes,
name: :payments_mandates_fk,
validate: false,
)
end
end
end
describe "#add_check_constraint" do
subject(:instructions) { subject_class.new(validator).up_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
add_check_constraint :payments, :check, "mandate_id IS NOT NULL"
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:add_check_constraint)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct check" do
expect(instructions.first.check).to eq("mandate_id IS NOT NULL")
end
it "has the correct name" do
expect(instructions.first.name).to eq(:check)
end
end
describe "#drop_constraint" do
subject(:instructions) { subject_class.new(validator).up_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
drop_constraint :payments, :payments_mandates_fk
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:drop_constraint)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct constraint name" do
expect(instructions.first.name).to eq(:payments_mandates_fk)
end
end
describe "#validate_constraint" do
subject(:instructions) { subject_class.new(validator).up_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
validate_constraint :payments, :payments_mandates_fk
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:validate_constraint)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct constraint name" do
expect(instructions.first.name).to eq(:payments_mandates_fk)
end
it "has a low lock weight" do
expect(instructions.first.lock).to eq(
described_class::LockWeights::SHARE,
)
end
end
describe "#remove_not_null_constraint" do
subject(:instructions) { subject_class.new(validator).up_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
remove_not_null_constraint :payments, :colour
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:remove_not_null_constraint)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct column name" do
expect(instructions.first.column).to eq(:colour)
end
end
describe "#change_column_default" do
subject(:instructions) { subject_class.new(validator).up_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
change_column_default :payments, :colour, "blue"
end
def down; end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:change_column_default)
end
it "has the correct table" do
expect(instructions.first.table).to eq(:payments)
end
it "has the correct column name" do
expect(instructions.first.column).to eq(:colour)
end
it "has the correct default value" do
expect(instructions.first.value).to eq("blue")
end
end
describe "#irreversible_migration" do
subject(:instructions) { subject_class.new(validator).down_instructions }
let(:subject_class) do
Class.new(described_class) do
def up
remove_column :payments, :amount
end
def down
irreversible_migration
end
end
end
it "has the correct procedure" do
expect(instructions.first.procedure).to eq(:irreversible_migration)
end
it "has a low lock weight" do
expect(instructions.first.lock).to eq(
described_class::LockWeights::SHARE,
)
end
end
describe "syntax extensions" do
subject(:instructions) { subject_class.new(validator).up_instructions }
before do
Nandi.configure do |c|
c.register_method :new_method, extension
end
end
after do
Nandi.config.custom_methods.delete(:new_method)
end
let(:extension) do
Class.new do
attr_reader :foo, :bar
def initialize(foo, bar, **_kwargs)
@foo = foo
@bar = bar
end
def procedure
:new_method
end
def lock
Nandi::Migration::LockWeights::SHARE
end
end
end
let(:subject_class) do
Class.new(described_class) do
def up
new_method :arg1, :arg2
end
end
end
it "creates an instance of the custom instruction" do
expect(instructions.first).to be_a(extension)
end
it "passes the arguments to the constructor of the custom instruction" do
instruction = instructions.first
expect(instruction.foo).to eq(:arg1)
expect(instruction.bar).to eq(:arg2)
end
end
describe "timeouts" do
subject(:migration) { subject_class.new(validator) }
context "when the strictest lock is SHARE" do
let(:subject_class) do
Class.new(described_class) do
def up
validate_constraint :payments, :payments_mandates_fk
end
def down; end
end
end
it "disables timeouts" do
expect(migration.disable_lock_timeout?).to be(true)
expect(migration.disable_statement_timeout?).to be(true)
end
context "and we have set a statement timeout" do
let(:subject_class) do
Class.new(described_class) do
set_statement_timeout(10_000)
def up
validate_constraint :payments, :payments_mandates_fk
end
def down; end
end
end
it "disables only the lock timeout" do
expect(migration.disable_lock_timeout?).to be(true)
expect(migration.disable_statement_timeout?).to be(false)
end
end
context "and we have set a lock timeout" do
let(:subject_class) do
Class.new(described_class) do
set_lock_timeout(10_000)
def up
validate_constraint :payments, :payments_mandates_fk
end
def down; end
end
end
it "disables only the lock timeout" do
expect(migration.disable_lock_timeout?).to be(false)
expect(migration.disable_statement_timeout?).to be(true)
end
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/safe_migration_enforcer_spec.rb | spec/nandi/safe_migration_enforcer_spec.rb | # frozen_string_literal: true
require "nandi/safe_migration_enforcer"
RSpec.shared_examples "linting" do
let(:db_name) { nil } # Test default single-database behavior
context "when there are no files" do
let(:safe_migrations) { [] }
let(:ar_migrations) { [] }
it "returns true" do
expect(subject.run).to eq(true)
end
end
context "when all safe migrations and generated ActiveRecord migrations match" do
it "returns true" do
expect(subject.run).to eq(true)
end
end
context "when a generated ActiveRecord migration is missing" do
before do
ar_migrations.shift
end
it "raises an error with an appropriate message" do
expect { subject.run }.to raise_error(
Nandi::SafeMigrationEnforcer::MigrationLintingFailed,
/pending generation.*20190513163422_add_elephants.rb/m,
)
end
end
context "when an ActiveRecord migration has been written rather than generated" do
before do
safe_migrations.shift
end
it "raises an error with an appropriate message" do
expect { subject.run }.to raise_error(
Nandi::SafeMigrationEnforcer::MigrationLintingFailed,
/20190513163422_add_elephants.rb.*Please use Nandi/m,
)
end
end
context "when a safe migration has had its content altered" do
let(:altered_migration) { safe_migrations.first }
before do
allow(File).to receive(:read).with(kind_of(String)).
and_return("generated_content")
allow(File).to receive(:read).
with(Regexp.new("#{safe_migration_dir}/#{altered_migration}")).
and_return("newer_content")
allow(File).to receive(:read).with(Nandi.config.lockfile_path(db_name)).and_return(lockfile)
allow(File).to receive(:write).with(Nandi.config.lockfile_path(db_name), kind_of(String)).
and_return(lockfile)
end
# rubocop:disable RSpec/ExampleLength
it "raises an error with an appropriate message" do
expect { subject.run }.to raise_error do |err|
expect(err.class).to eq(Nandi::SafeMigrationEnforcer::MigrationLintingFailed)
expect(err.message).
to match(
/20190513163422_add_elephants.rb.*Please recompile your migrations/m,
)
expect(err.message).to_not match(/20190513163423_add_beachballs.rb/)
end
end
# rubocop:enable RSpec/ExampleLength
end
context "when a generated migration has had its content altered" do
let(:altered_migration) { ar_migrations.first }
before do
allow(File).to receive(:read).with(kind_of(String)).
and_return("generated_content")
allow(File).to receive(:read).
with(Regexp.new("#{ar_migration_dir}/#{altered_migration}")).
and_return("hand_edited_content")
allow(File).to receive(:read).with(Nandi.config.lockfile_path(db_name)).and_return(lockfile)
allow(File).to receive(:write).with(Nandi.config.lockfile_path(db_name), kind_of(String)).
and_return(lockfile)
end
it "raises an error with an appropriate message" do
expect { subject.run }.to raise_error do |err|
expect(err.class).to eq(Nandi::SafeMigrationEnforcer::MigrationLintingFailed)
expect(err.message).
to match(/20190513163422_add_elephants.rb.*Please don't hand-edit/m)
expect(err.message).to_not match(/20190513163423_add_beachballs.rb/)
end
end
end
end
RSpec.describe Nandi::SafeMigrationEnforcer do
subject { described_class.new }
let(:safe_migration_dir) { Nandi::SafeMigrationEnforcer::DEFAULT_SAFE_MIGRATION_DIR }
let(:ar_migration_dir) { Nandi::SafeMigrationEnforcer::DEFAULT_AR_MIGRATION_DIR }
let(:safe_migrations) do
[
"20190513163422_add_elephants.rb",
"20190513163423_add_beachballs.rb",
"20190513163424_add_zoos.rb",
]
end
let(:ar_migrations) do
[
"20190513163422_add_elephants.rb",
"20190513163423_add_beachballs.rb",
"20190513163424_add_zoos.rb",
]
end
let(:ar_migration_paths) { ar_migrations.map { |f| File.join(ar_migration_dir, f) } }
let(:lockfile) do
lockfile_contents = ar_migration_paths.each_with_object({}) do |ar_file, hash|
file_name = File.basename(ar_file)
hash[file_name] = {
source_digest: Digest::SHA256.hexdigest("generated_content"),
compiled_digest: Digest::SHA256.hexdigest("generated_content"),
}
end
lockfile_contents.with_indifferent_access
end
before do
allow_any_instance_of(described_class).
to receive(:matching_migrations).
with(safe_migration_dir).
and_return(safe_migrations)
allow_any_instance_of(described_class).
to receive(:matching_migrations).
with(ar_migration_dir).
and_return(ar_migrations)
# Test default single-database behavior - mock lockfile instance methods
allow_any_instance_of(Nandi::Lockfile).to receive(:get) do |_instance, file_name|
if lockfile.key?(file_name)
lockfile.fetch(file_name)
else
{ source_digest: nil, compiled_digest: nil }
end
end
allow(File).to receive(:read).with(Regexp.new(safe_migration_dir)).
and_return("generated_content")
allow(File).to receive(:read).with(Regexp.new(ar_migration_dir)).
and_return("generated_content")
end
describe "#run" do
context "with the default migration directories" do
it_behaves_like "linting"
end
context "with custom migration directories" do
subject do
described_class.new(
safe_migration_dir: safe_migration_dir,
ar_migration_dir: ar_migration_dir,
)
end
let(:safe_migration_dir) { "custom/safe/migration/dir" }
let(:ar_migration_dir) { "custom/ar/migration/dir" }
it_behaves_like "linting"
end
end
describe "multi-database support" do
subject(:enforcer) { described_class.new }
let(:temp_dir) { "/tmp/nandi_test" }
let(:primary_migrations) { ["20240101000000_primary_migration.rb"] }
let(:analytics_migrations) { ["20240102000000_analytics_migration.rb"] }
before do
# Reset Nandi configuration
Nandi.instance_variable_set(:@config, nil)
# Configure multi-database setup
Nandi.configure do |config|
config.lockfile_directory = temp_dir
config.register_database(
:primary,
migration_directory: "#{temp_dir}/db/safe_migrations",
output_directory: "#{temp_dir}/db/migrate",
)
config.register_database(
:analytics,
migration_directory: "#{temp_dir}/db/analytics_safe_migrations",
output_directory: "#{temp_dir}/db/analytics_migrate",
)
end
# Mock directory existence
allow(Dir).to receive(:exist?).and_return(true)
# Mock migration file discovery for primary database
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/safe_migrations").and_return(primary_migrations)
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/migrate").and_return(primary_migrations)
# Mock migration file discovery for analytics database
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/analytics_safe_migrations").and_return(analytics_migrations)
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/analytics_migrate").and_return(analytics_migrations)
# Mock lockfile instances for each database
primary_lockfile = instance_double(Nandi::Lockfile)
analytics_lockfile = instance_double(Nandi::Lockfile)
allow(Nandi::Lockfile).to receive(:for).with(:primary).and_return(primary_lockfile)
allow(Nandi::Lockfile).to receive(:for).with(:analytics).and_return(analytics_lockfile)
# Mock lockfile data
allow(primary_lockfile).to receive(:get) do |filename|
if filename == "20240101000000_primary_migration.rb"
{ source_digest: "primary_source", compiled_digest: "primary_compiled" }
else
{ source_digest: nil, compiled_digest: nil }
end
end
allow(analytics_lockfile).to receive(:get) do |filename|
if filename == "20240102000000_analytics_migration.rb"
{ source_digest: "analytics_source", compiled_digest: "analytics_compiled" }
else
{ source_digest: nil, compiled_digest: nil }
end
end
# Mock file reading for FileDiff to return unchanged content by default
allow(File).to receive(:read).and_return("migration_content")
# Mock FileDiff to return false (no changes) by default
allow_any_instance_of(Nandi::FileDiff).to receive(:changed?).and_return(false)
end
after do
# Reset Nandi configuration after each test
Nandi.instance_variable_set(:@config, nil)
end
context "when all databases are properly configured" do
it "validates all databases successfully" do
expect(enforcer.run).to eq(true)
end
end
context "when there are ungenerated migrations in multiple databases" do
before do
# Remove AR migrations to simulate ungenerated state
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/migrate").and_return([])
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/analytics_migrate").and_return([])
end
# rubocop:disable RSpec/ExampleLength
it "reports violations from all databases with full paths" do
expect { enforcer.run }.to raise_error(
Nandi::SafeMigrationEnforcer::MigrationLintingFailed,
) do |error|
expect(error.message).to include("#{temp_dir}/db/safe_migrations/20240101000000_primary_migration.rb")
expect(error.message).to include(
"#{temp_dir}/db/analytics_safe_migrations/20240102000000_analytics_migration.rb",
)
expect(error.message).to include("pending generation")
end
end
# rubocop:enable RSpec/ExampleLength
end
context "when there are handwritten migrations in multiple databases" do
before do
# Remove safe migrations to simulate handwritten state
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/safe_migrations").and_return([])
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/analytics_safe_migrations").and_return([])
end
# rubocop:disable RSpec/ExampleLength
it "reports violations from all databases with full paths" do
expect { enforcer.run }.to raise_error(
Nandi::SafeMigrationEnforcer::MigrationLintingFailed,
) do |error|
expect(error.message).to include("#{temp_dir}/db/migrate/20240101000000_primary_migration.rb")
expect(error.message).to include("#{temp_dir}/db/analytics_migrate/20240102000000_analytics_migration.rb")
expect(error.message).to include("written by hand")
end
end
# rubocop:enable RSpec/ExampleLength
end
context "when there are out of date migrations" do
before do
# Mock FileDiff to return true (changed) only for safe migrations directory
allow_any_instance_of(Nandi::FileDiff).to receive(:changed?) do |instance|
file_path = instance.instance_variable_get(:@file_path)
file_path.include?("safe_migrations") && file_path.include?("20240101000000_primary_migration.rb")
end
end
# rubocop:disable RSpec/ExampleLength
it "reports out of date migrations with full paths" do
expect { enforcer.run }.to raise_error(
Nandi::SafeMigrationEnforcer::MigrationLintingFailed,
) do |error|
expect(error.message).to include("#{temp_dir}/db/safe_migrations/20240101000000_primary_migration.rb")
expect(error.message).to include("changed but not been recompiled")
expect(error.message).to_not include("analytics_migration")
end
end
# rubocop:enable RSpec/ExampleLength
end
context "when there are hand edited migrations" do
before do
# Mock FileDiff to return true (changed) for specific output migrations
allow_any_instance_of(Nandi::FileDiff).to receive(:changed?) do |instance|
instance.instance_variable_get(:@file_path).include?("analytics_migrate")
end
end
# rubocop:disable RSpec/ExampleLength
it "reports hand edited migrations with full paths" do
expect { enforcer.run }.to raise_error(
Nandi::SafeMigrationEnforcer::MigrationLintingFailed,
) do |error|
expect(error.message).to include("#{temp_dir}/db/analytics_migrate/20240102000000_analytics_migration.rb")
expect(error.message).to include("generated content altered")
expect(error.message).to_not include("primary_migration")
end
end
# rubocop:enable RSpec/ExampleLength
end
context "when there are violations in only one database" do
before do
# Only primary database has missing migration
allow_any_instance_of(described_class).to receive(:matching_migrations).
with("#{temp_dir}/db/migrate").and_return([])
end
it "reports violations from the affected database only" do
expect { enforcer.run }.to raise_error(
Nandi::SafeMigrationEnforcer::MigrationLintingFailed,
) do |error|
expect(error.message).to include("20240101000000_primary_migration.rb")
expect(error.message).to_not include("20240102000000_analytics_migration.rb")
end
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/validator_spec.rb | spec/nandi/validator_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/validator"
require "nandi/migration"
require "nandi/instructions"
RSpec.describe Nandi::Validator do
subject(:validator) { described_class.call(migration) }
let(:strictest_lock) { Nandi::Migration::LockWeights::SHARE }
let(:statement_timeout) { 1_000 }
let(:lock_timeout) { 750 }
let(:migration) do
instance_double(Nandi::Migration,
up_instructions: instructions,
down_instructions: [],
statement_timeout: statement_timeout,
lock_timeout: lock_timeout,
strictest_lock: strictest_lock)
end
before do
allow(migration).to receive_messages(disable_statement_timeout?: false, disable_lock_timeout?: false)
end
context "creating an index" do
before do
allow(migration).to receive_messages(disable_statement_timeout?: true, disable_lock_timeout?: true)
end
context "with one new index" do
let(:instructions) do
[
Nandi::Instructions::AddIndex.new(
table: :payments,
fields: [:foo],
),
]
end
it { is_expected.to be_valid }
end
context "with more than one new index" do
let(:instructions) do
[
Nandi::Instructions::AddIndex.new(
table: :payments,
fields: [:foo],
),
Nandi::Instructions::AddIndex.new(
table: :payments,
fields: [:foo],
),
]
end
it { is_expected.to_not be_valid }
end
end
context "dropping an index" do
before do
allow(migration).to receive_messages(disable_statement_timeout?: true, disable_lock_timeout?: true)
end
context "dropping an index by index name" do
let(:instructions) do
[
Nandi::Instructions::RemoveIndex.new(
table: :payments,
field: { name: :index_payments_on_foo },
),
]
end
it { is_expected.to be_valid }
end
context "dropping an index by column name" do
let(:instructions) do
[
Nandi::Instructions::RemoveIndex.new(
table: :payments,
field: { column: %i[foo] },
),
]
end
it { is_expected.to be_valid }
end
context "dropping an index without valid props" do
let(:instructions) do
[
Nandi::Instructions::RemoveIndex.new(
table: :payments,
field: { very: :irrelevant },
),
]
end
it { is_expected.to_not be_valid }
end
end
context "with an irreversible migration" do
let(:instructions) { [Nandi::Instructions::IrreversibleMigration.new] }
it { is_expected.to be_valid }
end
context "with more than one object modified" do
let(:instructions) do
[
Nandi::Instructions::RemoveIndex.new(
table: :payments,
field: { name: :index_payments_on_foo },
),
Nandi::Instructions::RemoveIndex.new(
table: :mandates,
field: { name: :index_payments_on_foo },
),
]
end
it { is_expected.to_not be_valid }
end
context "with one object modified as string and symbol" do
before do
allow(migration).to receive_messages(disable_statement_timeout?: true, disable_lock_timeout?: true)
end
let(:instructions) do
[
Nandi::Instructions::RemoveIndex.new(
table: :payments,
field: { name: :index_payments_on_foo },
),
Nandi::Instructions::RemoveIndex.new(
table: "payments",
field: { name: :index_payments_on_foo },
),
]
end
it { is_expected.to be_valid }
end
context "adding a column" do
context "with null: true" do
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
null: true,
),
]
end
it { is_expected.to be_valid }
end
context "with no null value" do
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
),
]
end
it { is_expected.to be_valid }
end
context "with null: false" do
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
null: false,
),
]
end
it { is_expected.to_not be_valid }
context "and a default" do
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
null: false,
default: "swilly!",
),
]
end
it { is_expected.to be_valid }
end
end
context "with a default value" do
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
null: true,
default: "swilly!",
),
]
end
it { is_expected.to be_valid }
end
context "with a unique constraint" do
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
null: true,
unique: true,
),
]
end
it { is_expected.to_not be_valid }
end
end
context "with too great a statement timeout" do
let(:strictest_lock) { Nandi::Migration::LockWeights::ACCESS_EXCLUSIVE }
let(:statement_timeout) { 2_000 }
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
null: true,
default: "swilly!",
),
]
end
it { is_expected.to_not be_valid }
end
context "with too great a lock timeout" do
let(:strictest_lock) { Nandi::Migration::LockWeights::ACCESS_EXCLUSIVE }
let(:lock_timeout) { 20_000 }
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
null: true,
default: "swilly!",
),
]
end
it { is_expected.to_not be_valid }
end
context "adding a reference" do
let(:instructions) do
[
Nandi::Instructions::AddReference.new(
table: :payments,
ref_name: :mandate,
**options,
),
]
end
context "with no options" do
let(:options) { {} }
it { is_expected.to be_valid }
end
context "with valid options" do
let(:options) { { type: :text } }
it { is_expected.to be_valid }
end
context "with foreign_key: true" do
let(:options) { { foreign_key: true } }
it { is_expected.to_not be_valid }
end
context "with index: true" do
let(:options) { { index: true } }
it { is_expected.to_not be_valid }
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/multi_database_spec.rb | spec/nandi/multi_database_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/multi_database"
RSpec.describe Nandi::MultiDatabase do
subject(:multi_db) { described_class.new }
context "when no databases are registered" do
it "returns empty names array" do
expect(multi_db.names).to eq([])
end
it "raises error when accessing config" do
expect do
multi_db.config(:nonexistent)
end.to raise_error(ArgumentError, "Missing database configuration for nonexistent")
end
it "returns nil for default database" do
expect(multi_db.default).to be_nil
end
end
context "when databases are registered" do
before do
multi_db.register(:primary, migration_directory: "db/safe_migrations", output_directory: "db/migrate")
multi_db.register(:analytics, migration_directory: "db/analytics", output_directory: "db/migrate/analytics")
end
it "returns correct database names" do
expect(multi_db.names).to contain_exactly(:primary, :analytics)
end
it "returns correct database config for primary" do
primary_config = multi_db.config(:primary)
expect(primary_config.name).to eq(:primary)
expect(primary_config.migration_directory).to eq("db/safe_migrations")
expect(primary_config.output_directory).to eq("db/migrate")
end
it "returns correct database config for analytics" do
analytics_config = multi_db.config(:analytics)
expect(analytics_config.name).to eq(:analytics)
expect(analytics_config.migration_directory).to eq("db/analytics")
expect(analytics_config.output_directory).to eq("db/migrate/analytics")
end
it "returns primary database as default when no database name specified" do
expect(multi_db.config(nil).name).to eq(:primary)
end
it "identifies default database correctly" do
expect(multi_db.default.name).to eq(:primary)
end
it "raises error for duplicate database registration with different config" do
expect do
multi_db.register(:primary, migration_directory: "db/new")
end.to raise_error(ArgumentError, "Database primary already registered")
end
it "allows re-registration with identical config (for Rails reloading)" do
original_config = { migration_directory: "db/safe_migrations", output_directory: "db/migrate" }
# First registration
db1 = multi_db.register(:reloadable, original_config)
# Re-registration with same config should return the same database object
db2 = multi_db.register(:reloadable, original_config)
expect(db2).to eq(db1)
expect(multi_db.names.count(:reloadable)).to eq(1)
end
it "converts string names to symbols" do
multi_db.register("string_name", migration_directory: "db/string")
expect(multi_db.names).to include(:string_name)
end
end
context "default database behavior" do
context "when primary database is registered" do
before do
multi_db.register(:primary, migration_directory: "db/primary")
multi_db.register(:analytics, migration_directory: "db/analytics")
end
it "automatically treats primary as default" do
expect(multi_db.default.name).to eq(:primary)
expect(multi_db.default.default).to be true
end
it "returns primary database when no database name specified" do
expect(multi_db.config(nil).name).to eq(:primary)
expect(multi_db.config.name).to eq(:primary)
end
end
context "when explicit default: true is used" do
before do
multi_db.register(:main, migration_directory: "db/main", default: true)
multi_db.register(:analytics, migration_directory: "db/analytics")
end
it "treats explicitly marked database as default" do
expect(multi_db.default.name).to eq(:main)
expect(multi_db.default.default).to be true
end
it "returns explicit default database when no database name specified" do
expect(multi_db.config(nil).name).to eq(:main)
expect(multi_db.config.name).to eq(:main)
end
it "other databases are not default" do
analytics_db = multi_db.config(:analytics)
expect(analytics_db.default).to be false
end
end
context "when both primary and explicit default: true are used" do
before do
multi_db.register(:primary, migration_directory: "db/primary")
multi_db.register(:main, migration_directory: "db/main", default: true)
end
it "raises error during validation due to multiple defaults" do
expect { multi_db.validate! }.to raise_error(
ArgumentError, "Multiple default databases specified: primary, main"
)
end
end
end
context "validation" do
context "when databases are registered" do
it "raises error when no default database is specified" do
multi_db.register(:db1, migration_directory: "db/db1")
multi_db.register(:db2, migration_directory: "db/db2")
expect { multi_db.validate! }.to raise_error(
ArgumentError, /Missing default database/
)
end
it "automatically treats primary database as default" do
multi_db.register(:primary, migration_directory: "db/primary")
multi_db.register(:analytics, migration_directory: "db/analytics")
expect { multi_db.validate! }.to_not raise_error
end
it "allows explicit default database specification" do
multi_db.register(:main, migration_directory: "db/main", default: true)
multi_db.register(:analytics, migration_directory: "db/analytics")
expect { multi_db.validate! }.to_not raise_error
end
it "raises error for duplicate migration directories" do
multi_db.register(:primary, migration_directory: "db/same", output_directory: "db/migrate1")
multi_db.register(:db2, migration_directory: "db/same", output_directory: "db/migrate2")
expect { multi_db.validate! }.to raise_error(
ArgumentError, "Unique migration directories must be specified for each database"
)
end
it "raises error for duplicate output directories" do
multi_db.register(:primary, migration_directory: "db/db1", output_directory: "db/migrate")
multi_db.register(:db2, migration_directory: "db/db2", output_directory: "db/migrate")
expect { multi_db.validate! }.to raise_error(
ArgumentError, "Unique output directories must be specified for each database"
)
end
end
end
context "with lockfile behavior" do
it "uses prefixed default lockfile when not specified" do
multi_db.register(:test, {})
config = multi_db.config(:test)
expect(config.lockfile_name).to eq(".test_nandilock.yml")
end
it "allows custom lockfile names" do
multi_db.register(:custom, lockfile_name: ".custom_lock.yml")
config = multi_db.config(:custom)
expect(config.lockfile_name).to eq(".custom_lock.yml")
end
end
describe "Database" do
subject(:database) { Nandi::MultiDatabase::Database.new(name: name, config: config) }
context "with valid configuration" do
let(:name) { :test_db }
let(:config) do
{
migration_directory: "db/test/migrations",
output_directory: "db/test/migrate",
lockfile_name: ".test_lock.yml",
}
end
it "sets name correctly" do
expect(database.name).to eq(:test_db)
end
it "sets directories correctly" do
expect(database.migration_directory).to eq("db/test/migrations")
expect(database.output_directory).to eq("db/test/migrate")
end
it "sets lockfile name correctly" do
expect(database.lockfile_name).to eq(".test_lock.yml")
end
it "is not default by default" do
expect(database.default).to be_falsy
end
end
context "with minimal configuration" do
let(:name) { :minimal }
let(:config) { {} }
it "uses default prefixed directories" do
expect(database.migration_directory).to eq("db/minimal_safe_migrations")
expect(database.output_directory).to eq("db/minimal_migrate")
end
it "uses default prefixed lockfile name" do
expect(database.lockfile_name).to eq(".minimal_nandilock.yml")
end
end
context "with primary database" do
let(:name) { :primary }
let(:config) { {} }
it "automatically sets as default" do
expect(database.default).to be true
end
end
context "with explicit default flag" do
let(:name) { :custom }
let(:config) { { default: true } }
it "respects explicit default setting" do
expect(database.default).to be true
end
end
context "partial configuration" do
let(:name) { :partial }
context "with only migration directory specified" do
let(:config) { { migration_directory: "custom/migrations" } }
it "uses custom migration directory and default output directory" do
expect(database.migration_directory).to eq("custom/migrations")
expect(database.output_directory).to eq("db/partial_migrate")
end
end
context "with only output directory specified" do
let(:config) { { output_directory: "custom/output" } }
it "uses default migration directory and custom output directory" do
expect(database.migration_directory).to eq("db/partial_safe_migrations")
expect(database.output_directory).to eq("custom/output")
end
end
context "with only lockfile name specified" do
let(:config) { { lockfile_name: ".custom.yml" } }
it "uses custom lockfile name and default directories" do
expect(database.lockfile_name).to eq(".custom.yml")
expect(database.migration_directory).to eq("db/partial_safe_migrations")
expect(database.output_directory).to eq("db/partial_migrate")
end
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/formatting_spec.rb | spec/nandi/formatting_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/formatting"
RSpec.describe Nandi::Formatting do
describe "#format_value" do
subject(:result) do
subject_class.new.format_value(value, as_argument: with_as_argument)
end
let(:with_as_argument) { nil }
let(:subject_class) do
Class.new(Object) do
include Nandi::Formatting
end
end
shared_examples "outputs valid ruby" do |input|
let(:value) { input }
# rubocop:disable Security/Eval
it "evaluates to the same value" do
expect(eval(result)).to eq(input)
rescue SyntaxError
raise StandardError, "not valid ruby: #{result}"
end
# rubocop:enable Security/Eval
end
context "with nil" do
it_behaves_like "outputs valid ruby", nil
end
context "with a string" do
it_behaves_like "outputs valid ruby", "string"
end
context "with a symbol" do
it_behaves_like "outputs valid ruby", :foo
it_behaves_like "outputs valid ruby", :"what-the-hell"
it_behaves_like "outputs valid ruby", :"6"
end
context "with a number" do
it_behaves_like "outputs valid ruby", 5
it_behaves_like "outputs valid ruby", -5
it_behaves_like "outputs valid ruby", 5.5
end
context "with an array" do
it_behaves_like "outputs valid ruby", [1, 2, "foo", [:bar, "baz"]]
end
context "with a hash" do
context "with symbol keys" do
it_behaves_like "outputs valid ruby", foo: 5
it_behaves_like "outputs valid ruby", foo: { bar: 5 }
it_behaves_like "outputs valid ruby", łódź: 5
it_behaves_like "outputs valid ruby", "lots of words": 5
context "when as_argument: is provided" do
let(:with_as_argument) { true }
it_behaves_like "outputs valid ruby", [:bar, { foo: 5 }]
it_behaves_like "outputs valid ruby", [:bar, { foo: { bar: 5 } }]
it_behaves_like "outputs valid ruby", [:bar, { łódź: 5 }]
it_behaves_like "outputs valid ruby", [:bar, { "lots of words": 5 }]
end
end
context "with non-symbol keys" do
it_behaves_like "outputs valid ruby", "łódź" => 5
it_behaves_like "outputs valid ruby", "foo" => 5
it_behaves_like "outputs valid ruby", "foo" => { "bar" => 5 }
context "when as_argument: is provided" do
let(:with_as_argument) { true }
it_behaves_like "outputs valid ruby", [:bar, { "łódź" => 5 }]
it_behaves_like "outputs valid ruby", [:bar, { "foo" => 5 }]
it_behaves_like "outputs valid ruby", [:bar, { "foo" => { "bar" => 5 } }]
end
end
context "inside an array" do
let(:value) do
[
{
foo: 5,
},
]
end
it "formats the hash correctly" do
expect(result).to eq("[{\n foo: 5\n}]")
end
context "when as_argument: is provided" do
let(:with_as_argument) { true }
it "formats the hash correctly" do
expect(result).to eq("[foo: 5]")
end
end
end
context "neasted hashes with as_argument" do
let(:with_as_argument) { true }
let(:value) do
{
foo: 5,
bar: { works: true },
}
end
it "formats the hashes correctly" do
expect(result).to eq("foo: 5, bar: {\n works: true\n}")
end
end
end
context "with some arbitrary object" do
let(:some_random_object) { Class.new(Object) }
let(:value) { some_random_object.new }
before do
stub_const("SomeRandomObject", some_random_object)
end
it "throws" do
expect { result }.to raise_error(Nandi::Formatting::UnsupportedValueError,
"Cannot format value of type SomeRandomObject")
end
end
end
describe "::formatted_property" do
subject(:result) { subject_class.new(model).my_hash }
let(:subject_class) do
Struct.new(:model) do
# described_class is not in scope here.
include Nandi::Formatting
formatted_property :my_hash
end
end
let(:model) do
Struct.new(:my_hash).new({ foo: { bar: 5 } })
end
it { is_expected.to eq("{\n foo: {\n bar: 5\n}\n}") }
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/file_matcher_spec.rb | spec/nandi/file_matcher_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/file_matcher"
RSpec.describe Nandi::FileMatcher do
describe "::call" do
subject(:match) { described_class.call(files: files, spec: spec) }
let(:files) do
[
"20180402010101_do_thing_1.rb",
"20190101010101_do_thing_2.rb",
"20190102010101_do_thing_3.rb",
"20190402010101_do_thing_4.rb",
]
end
before do
allow(File).to receive(:exist?).with(".nandiignore").and_return(false)
end
context "all files" do
let(:spec) { "all" }
it { is_expected.to eq(Set.new(files)) }
context "and some files are ignored" do
let(:nandiignore) { ignored_files.join("\n") }
let(:ignored_files) { ["db/migrate/20190402010101_do_thing_4.rb"] }
let(:expected) do
Set.new([
"20180402010101_do_thing_1.rb",
"20190101010101_do_thing_2.rb",
"20190102010101_do_thing_3.rb",
])
end
before do
allow(File).to receive(:exist?).with(".nandiignore").and_return(true)
allow(File).to receive(:read).with(".nandiignore").and_return(nandiignore)
end
it { is_expected.to eq(Set.new(expected)) }
end
end
context "git-diff" do
let(:spec) { "git-diff" }
before do
allow_any_instance_of(described_class).to receive(:files_from_git_status).
and_return(["20180402010101_do_thing_1.rb"])
end
it { is_expected.to eq(Set["20180402010101_do_thing_1.rb"]) }
end
context "timestamp" do
context "without operator" do
context "with full timestamp" do
let(:spec) { "20190101010101" }
it { is_expected.to eq(Set["20190101010101_do_thing_2.rb"]) }
end
context "with partial timestamp" do
let(:spec) { "2019" }
it "returns all matches" do
expect(match).to eq(Set[
"20190101010101_do_thing_2.rb",
"20190102010101_do_thing_3.rb",
"20190402010101_do_thing_4.rb",
])
end
end
end
context "with > operator" do
let(:spec) { ">20190101010101" }
it "returns all matches" do
expect(match).to eq(Set[
"20190102010101_do_thing_3.rb",
"20190402010101_do_thing_4.rb",
])
end
end
context "with >= operator" do
let(:spec) { ">=20190101010101" }
it "returns all matches" do
expect(match).to eq(Set[
"20190101010101_do_thing_2.rb",
"20190102010101_do_thing_3.rb",
"20190402010101_do_thing_4.rb",
])
end
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/lockfile_spec.rb | spec/nandi/lockfile_spec.rb | # frozen_string_literal: true
require "tempfile"
RSpec.describe Nandi::Lockfile do
before do
described_class.clear_instances!
allow(Nandi.config).to receive(:lockfile_directory).and_return(temp_dir)
end
let(:database) { :primary }
let(:temp_dir) { Dir.mktmpdir }
let(:lockfile_contents) { "--- {}\n" }
def write_lockfile!
File.write("#{temp_dir}/.nandilock.yml", lockfile_contents)
end
describe ".for" do
it "returns same instance for same database" do
lockfile1 = described_class.for(:primary)
lockfile2 = described_class.for(:primary)
expect(lockfile1).to be(lockfile2)
end
it "returns different instances for different databases" do
primary = described_class.for(:primary)
analytics = described_class.for(:analytics)
expect(primary).to_not be(analytics)
expect(primary.db_name).to eq(:primary)
expect(analytics.db_name).to eq(:analytics)
end
it "requires explicit db_name parameter" do
expect { described_class.for }.to raise_error(ArgumentError)
end
end
describe "#file_present?" do
let(:lockfile) { described_class.for(database) }
context "lockfile exists" do
before { write_lockfile! }
it { expect(lockfile.file_present?).to eq(true) }
end
context "doesn't exist" do
it { expect(lockfile.file_present?).to eq(false) }
end
end
describe "#create!" do
let(:lockfile) { described_class.for(database) }
it "creates a file" do
expect(File).to receive(:write).
with("#{temp_dir}/.nandilock.yml", "--- {}\n").
and_call_original
lockfile.create!
end
end
describe "#add" do
let(:lockfile) { described_class.for(database) }
let(:lockfile_contents) { "--- {}\n" }
before { write_lockfile! }
# rubocop:disable RSpec/ExampleLength
it "adds the digests to the instance" do
lockfile.add(
file_name: "file_name",
source_digest: "source_digest",
compiled_digest: "compiled_digest",
)
result = lockfile.get("file_name")
expect(result[:source_digest]).to eq("source_digest")
expect(result[:compiled_digest]).to eq("compiled_digest")
end
# rubocop:enable RSpec/ExampleLength
end
describe "#get" do
let(:lockfile) { described_class.for(database) }
let(:lockfile_contents) do
<<~YAML
---
migration1:
source_digest: "deadbeef1234"
compiled_digest: "deadbeef5678"
YAML
end
before { write_lockfile! }
it "retrieves the digests" do
expect(lockfile.get("migration1")).to eq(
source_digest: "deadbeef1234",
compiled_digest: "deadbeef5678",
)
end
end
describe "#persist!" do
let(:lockfile) { described_class.for(database) }
let(:expected_yaml) do
<<~YAML
---
foo:
source_digest: bar
compiled_digest: '5'
YAML
end
before do
write_lockfile!
lockfile.add(file_name: "foo", source_digest: "bar", compiled_digest: "5")
end
it "writes the existing file" do
expect(File).to receive(:write).with(
"#{temp_dir}/.nandilock.yml",
expected_yaml,
)
lockfile.persist!
end
context "with multiple keys, not sorted by their SHA-256 hash" do
let(:expected_yaml) do
<<~YAML
---
lower_hash:
source_digest: foo
compiled_digest: '5'
higher_hash:
source_digest: foo
compiled_digest: '5'
YAML
end
let(:test_lockfile) { described_class.for(:isolated_test_db) }
before do
allow(Nandi.config).to receive(:lockfile_path).with(:isolated_test_db).
and_return("#{temp_dir}/.isolated_nandilock.yml")
File.write("#{temp_dir}/.isolated_nandilock.yml", "--- {}\n")
test_lockfile.add(file_name: "higher_hash", source_digest: "foo", compiled_digest: "5")
test_lockfile.add(file_name: "lower_hash", source_digest: "foo", compiled_digest: "5")
end
it "sorts the keys by their SHA-256 hash" do
expect(File).to receive(:write).with(
"#{temp_dir}/.isolated_nandilock.yml",
expected_yaml,
)
test_lockfile.persist!
end
end
end
describe "multi-database support" do
let(:primary_db) { :primary }
let(:analytics_db) { :analytics }
before do
# Mock different lockfile paths for different databases
allow(Nandi.config).to receive(:lockfile_path).with(primary_db).
and_return("#{temp_dir}/.nandilock.yml")
allow(Nandi.config).to receive(:lockfile_path).with(analytics_db).
and_return("#{temp_dir}/.analytics_nandilock.yml")
end
describe "#file_present? with multiple databases" do
it "checks correct file for each database" do
# Create only primary lockfile
File.write("#{temp_dir}/.nandilock.yml", "--- {}\n")
expect(described_class.for(primary_db).file_present?).to be true
expect(described_class.for(analytics_db).file_present?).to be false
end
end
describe "#create! with multiple databases" do
it "creates separate lockfiles for different databases" do
expect(File).to receive(:write).with("#{temp_dir}/.nandilock.yml", "--- {}\n")
expect(File).to receive(:write).with("#{temp_dir}/.analytics_nandilock.yml", "--- {}\n")
described_class.for(primary_db).create!
described_class.for(analytics_db).create!
end
it "does not re-create existing lockfiles" do
File.write("#{temp_dir}/.nandilock.yml", "--- {}\n")
expect(File).to_not receive(:write).with("#{temp_dir}/.nandilock.yml", anything)
described_class.for(primary_db).create!
end
end
describe "#add with multiple databases" do
before do
File.write("#{temp_dir}/.nandilock.yml", "--- {}\n")
File.write("#{temp_dir}/.analytics_nandilock.yml", "--- {}\n")
end
let(:add_migrations_to_databases) do
primary_lockfile = described_class.for(primary_db)
primary_lockfile.add(
file_name: "primary_migration",
source_digest: "primary_source",
compiled_digest: "primary_compiled",
)
primary_lockfile.persist!
analytics_lockfile = described_class.for(analytics_db)
analytics_lockfile.add(
file_name: "analytics_migration",
source_digest: "analytics_source",
compiled_digest: "analytics_compiled",
)
analytics_lockfile.persist!
end
# rubocop: disable RSpec/ExampleLength
it "adds migrations to correct database lockfile" do
add_migrations_to_databases
primary_lockfile = described_class.for(primary_db)
analytics_lockfile = described_class.for(analytics_db)
expect(primary_lockfile.get("primary_migration")[:source_digest]).
to eq("primary_source")
expect(analytics_lockfile.get("analytics_migration")[:source_digest]).
to eq("analytics_source")
expect(primary_lockfile.get("analytics_migration")[:source_digest]).to be_nil
expect(analytics_lockfile.get("primary_migration")[:source_digest]).to be_nil
end
# rubocop: enable RSpec/ExampleLength
end
describe "#get with multiple databases" do
before do
primary_content = <<~YAML
---
shared_name:
source_digest: "primary_digest"
compiled_digest: "primary_compiled"
YAML
analytics_content = <<~YAML
---
shared_name:
source_digest: "analytics_digest"
compiled_digest: "analytics_compiled"
YAML
File.write("#{temp_dir}/.nandilock.yml", primary_content)
File.write("#{temp_dir}/.analytics_nandilock.yml", analytics_content)
end
it "retrieves migration from correct database" do
primary_result = described_class.for(primary_db).get("shared_name")
analytics_result = described_class.for(analytics_db).get("shared_name")
expect(primary_result[:source_digest]).to eq("primary_digest")
expect(analytics_result[:source_digest]).to eq("analytics_digest")
end
end
describe "#persist! with multiple databases" do
it "writes only the specific database lockfile" do
# Setup data in primary lockfile
primary_lockfile = described_class.for(primary_db)
File.write("#{temp_dir}/.nandilock.yml", "--- {}\n")
primary_lockfile.add(file_name: "migration1", source_digest: "foo", compiled_digest: "bar")
expect(File).to receive(:write).with("#{temp_dir}/.nandilock.yml", anything)
expect(File).to_not receive(:write).with("#{temp_dir}/.analytics_nandilock.yml", anything)
primary_lockfile.persist!
end
end
end
describe "database validation and error handling" do
describe "with invalid database configuration" do
before do
allow(Nandi.config).to receive(:lockfile_path).with(:invalid_db).and_call_original
end
it "propagates configuration errors" do
expect { described_class.for(:invalid_db).file_present? }.
to raise_error(ArgumentError, "Missing database configuration for invalid_db")
end
end
describe "with missing lockfile directory" do
let(:nonexistent_dir) { "/nonexistent/directory" }
before do
allow(Nandi.config).to receive(:lockfile_path).with(:test_db).
and_return("#{nonexistent_dir}/.test_nandilock.yml")
end
it "handles missing directory gracefully on file_present?" do
expect(described_class.for(:test_db).file_present?).to be false
end
it "raises error on create! with missing directory" do
expect { described_class.for(:test_db).create! }.
to raise_error(Errno::ENOENT)
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/compiled_migration_spec.rb | spec/nandi/compiled_migration_spec.rb | # frozen_string_literal: true
require "digest"
require "nandi/migration"
RSpec.describe Nandi::CompiledMigration do
let(:renderer) do
Class.new(Object) do
def self.generate(migration); end
end
end
let(:base_path) do
File.join(
File.dirname(__FILE__),
"/fixtures/example_migrations",
)
end
let(:valid_migration) { "20180104120000_my_migration.rb" }
let(:invalid_migration) { "20180104120000_my_invalid_migration.rb" }
let(:invalid_index_migration) do
"20180104120000_my_invalid_index_migration.rb"
end
let(:source_contents) { "source_migration" }
let(:compiled_contents) { "compiled_migration" }
let(:expected_source_digest) { Digest::SHA256.hexdigest(source_contents) }
let(:expected_compiled_digest) { Digest::SHA256.hexdigest(compiled_contents) }
let(:lockfile) do
lockfile_contents = {
"20180104120000_my_migration.rb": {
source_digest: expected_source_digest,
compiled_digest: expected_compiled_digest,
},
}
StringIO.new(lockfile_contents.deep_stringify_keys.to_yaml)
end
let(:file) { valid_migration }
let(:db_name) { nil }
let(:compiled_migration) { described_class.new(file_name: file, db_name: db_name) }
before do
Nandi.instance_variable_set(:@config, nil) # Reset config
allow(File).to receive(:read).with(Nandi.config.lockfile_path(db_name)).and_return(lockfile)
allow(File).to receive(:write).with(Nandi.config.lockfile_path(db_name)).and_return(lockfile)
Nandi.configure do |config|
config.renderer = renderer
config.migration_directory = base_path
end
end
describe "#body" do
subject(:body) { compiled_migration.body }
context "when the migration has changed" do
let(:file) { valid_migration }
let(:source_contents) { "contents_changed" }
it "compiles the migration" do
expect(renderer).to receive(:generate) do |migration|
expect(migration).to be_a(Nandi::Migration)
expect(migration.name).to eq("MyMigration")
end
body
end
end
context "invalid migration" do
let(:file) { invalid_migration }
it "raises an InvalidMigrationError" do
expect { body }.to raise_error(
described_class::InvalidMigrationError,
/creating more than one index per migration/,
)
end
end
context "when both migrations are unchanged" do
let(:file) { valid_migration }
it "doesn't compile the migration" do
expect(renderer).to_not receive(:generate)
end
end
end
describe "#output_path" do
subject(:output_path) { compiled_migration.output_path }
it "has an output path" do
expect(output_path).to eq("db/migrate/#{File.basename(file)}")
end
end
describe "#compiled_digest" do
subject(:compiled_digest) { compiled_migration.compiled_digest }
it "returns the digest of the compiled migration" do
allow_any_instance_of(described_class).to receive(:body).
and_return(compiled_contents)
expect(compiled_digest).to eq(expected_compiled_digest)
end
end
describe "#source_digest" do
subject(:source_digest) { compiled_migration.source_digest }
it "returns the digest of the source migration" do
allow(File).to receive(:read).and_return(source_contents)
expect(source_digest).to eq(expected_source_digest)
end
end
describe "nil db_name handling" do
context "when db_name is nil" do
let(:db_name) { nil }
it "defaults to primary database" do
migration = described_class.new(file_name: valid_migration, db_name: nil)
expect(migration.db_name).to eq(:primary)
end
it "uses primary database configuration" do
migration = described_class.new(file_name: valid_migration, db_name: nil)
expect(migration.output_path).to eq("db/migrate/#{valid_migration}")
end
end
context "when db_name is explicitly provided" do
before do
Nandi.config.register_database(:analytics,
migration_directory: base_path,
output_directory: "db/analytics_migrate")
end
it "uses the specified database" do
migration = described_class.new(file_name: valid_migration, db_name: :analytics)
expect(migration.db_name).to eq(:analytics)
expect(migration.output_path).to eq("db/analytics_migrate/#{valid_migration}")
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/config_spec.rb | spec/nandi/config_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/config"
RSpec.describe Nandi::Config do
subject(:config) { described_class.new }
before do
Nandi.instance_variable_set(:@config, nil) # Reset config
config.lockfile_directory = "db"
end
context "multi-database integration" do
before do
config.register_database(
:primary,
migration_directory: primary_migration_directory,
output_directory: primary_output_directory,
)
config.register_database(
:analytics,
migration_directory: analytics_migration_directory,
output_directory: analytics_output_directory,
)
end
let(:primary_migration_directory) { "db/migration_directory" }
let(:primary_output_directory) { "db/output_directory" }
let(:analytics_migration_directory) { "db/analytics_migrations" }
let(:analytics_output_directory) { "db/migrate/analytics" }
it "returns the correct list of names" do
expect(config.databases.names).to eq(%i[primary analytics])
end
it "throws an error if database does not exist" do
expect { config.migration_directory(:nonexistent) }.to raise_error(ArgumentError)
end
it "returns database-specific migration directories" do
expect(config.migration_directory(:primary)).to eq(primary_migration_directory)
expect(config.migration_directory(:analytics)).to eq(analytics_migration_directory)
end
it "returns database-specific output directories" do
expect(config.output_directory(:primary)).to eq(primary_output_directory)
expect(config.output_directory(:analytics)).to eq(analytics_output_directory)
end
it "delegates the lockfile path" do
config.register_database(:new, lockfile_name: ".my_nandilock.yml")
expect(config.lockfile_path(:new)).to eq("db/.my_nandilock.yml")
end
it "returns primary config if name not specified" do
expect(config.migration_directory).to eq(primary_migration_directory)
expect(config.output_directory).to eq(primary_output_directory)
end
it "returns database-specific lockfile paths in multi-database mode" do
expect(config.lockfile_path(:primary)).to eq("db/.nandilock.yml")
expect(config.lockfile_path(:analytics)).to eq("db/.analytics_nandilock.yml")
end
it "raises error for invalid database in lockfile_path" do
expect do
config.lockfile_path(:nonexistent)
end.to raise_error(ArgumentError, "Missing database configuration for nonexistent")
end
context "with default multi-database configuration" do
let(:primary_migration_directory) { nil }
let(:primary_output_directory) { nil }
it "uses default database config" do
expect(config.migration_directory(:primary)).to eq("db/safe_migrations")
expect(config.output_directory(:primary)).to eq("db/migrate")
expect(config.lockfile_path(:primary)).to eq("db/.nandilock.yml")
end
end
end
context "with single database configuration" do
it "returns :primary for the database name" do
expect(config.databases.names).to eq([:primary])
end
it "returns default directory if name not specified" do
expect(config.migration_directory).to eq("db/safe_migrations")
expect(config.output_directory).to eq("db/migrate")
end
it "raises error for unknown db name" do
expect do
config.migration_directory(:any_db)
end.to raise_error(ArgumentError, "Missing database configuration for any_db")
end
it "respects overriding paths" do
config.migration_directory = "db/safe_migrations/override"
config.output_directory = "db/migrate/override"
expect(config.migration_directory).to eq("db/safe_migrations/override")
expect(config.output_directory).to eq("db/migrate/override")
end
it "returns default lockfile path in single-database mode" do
expect(config.lockfile_path).to eq("db/.nandilock.yml")
end
end
context "delegation to MultiDatabase" do
it "delegates default database behavior to MultiDatabase" do
config.register_database(:main, migration_directory: "db/main", default: true)
config.register_database(:analytics, migration_directory: "db/analytics")
# Config should delegate to MultiDatabase for default behavior
expect(config.default.name).to eq(:main)
expect(config.migration_directory).to eq("db/main") # Uses default when no name specified
end
it "delegates validation to MultiDatabase" do
config.register_database(:db1, migration_directory: "db/db1")
config.register_database(:db2, migration_directory: "db/db2")
# Should raise error from MultiDatabase validation
expect { config.validate! }.to raise_error(ArgumentError, /Missing default database/)
end
end
context "validation" do
it "prevents mixing single and multi-database configuration with migration_directory" do
config.migration_directory = "db/custom"
config.register_database(:test, migration_directory: "db/test")
expect { config.validate! }.to raise_error(
ArgumentError, /Cannot use multi and single database config/
)
end
it "delegates multi-database validation to multi_database" do
expect_any_instance_of(Nandi::MultiDatabase).to receive(:validate!)
config.register_database(:primary, migration_directory: "db/primary")
config.validate!
end
it "validates successfully with single-database configuration" do
config.migration_directory = "db/custom"
expect { config.validate! }.to_not raise_error
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/validation/each_validator_spec.rb | spec/nandi/validation/each_validator_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/validation/add_index_validator"
require "nandi/validation/remove_index_validator"
require "nandi/migration"
require "nandi/instructions"
RSpec.describe Nandi::Validation::EachValidator do
subject(:call) { described_class.call(instruction) }
describe "#call" do
context "when the given instruction is to remove an index" do
let(:instruction) { instance_double(Nandi::Instructions::RemoveIndex) }
before do
allow(instruction).to receive(:procedure).and_return(:remove_index)
end
it "calls RemoveIndexValidator" do
expect(Nandi::Validation::RemoveIndexValidator).to receive(:call).
with(instruction)
call
end
end
context "when the given instruction is to add a column" do
let(:instruction) { instance_double(Nandi::Instructions::AddColumn) }
before do
allow(instruction).to receive(:procedure).and_return(:add_column)
end
it "calls AddColumnValidator" do
expect(Nandi::Validation::AddColumnValidator).to receive(:call).with(instruction)
call
end
end
context "when the given instruction is to add a reference" do
let(:instruction) { instance_double(Nandi::Instructions::AddReference) }
before do
allow(instruction).to receive(:procedure).and_return(:add_reference)
end
it "calls AddReferenceValidator" do
expect(Nandi::Validation::AddReferenceValidator).to receive(:call).
with(instruction)
call
end
end
context "when the given instruction is to add an index" do
let(:instruction) { instance_double(Nandi::Instructions::AddIndex) }
before do
allow(instruction).to receive(:procedure).and_return(:add_index)
end
it "calls AddIndexValidator" do
expect(Nandi::Validation::AddIndexValidator).to receive(:call).
with(instruction)
call
end
end
context "when the given instruction isn't explicitly validated" do
let(:instruction) { instance_double(Nandi::Instructions::AddForeignKey) }
before do
allow(instruction).to receive(:procedure).and_return(:add_foreign_key)
end
it "returns successful" do
expect(call).to eq(Dry::Monads::Result::Success.new(nil))
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/validation/add_index_validator_spec.rb | spec/nandi/validation/add_index_validator_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/validation/add_index_validator"
require "nandi/migration"
require "nandi/instructions"
RSpec.describe Nandi::Validation::AddIndexValidator do
subject(:validator) { described_class.call(instruction) }
describe "with a hash index in the contained migration" do
let(:instruction) do
Nandi::Instructions::AddIndex.new(
table: :payments,
fields: [:foo],
using: :hash,
)
end
it { is_expected.to be_failure }
end
describe "without a hash index in the contained migration" do
let(:instruction) do
Nandi::Instructions::AddIndex.new(
table: :payments,
fields: [:foo],
)
end
it { is_expected.to be_success }
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/validation/timeout_validator_spec.rb | spec/nandi/validation/timeout_validator_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/validation/timeout_validator"
require "nandi/migration"
require "nandi/instructions"
RSpec.describe Nandi::Validation::TimeoutValidator do
subject(:validator) { described_class.call(migration) }
let(:statement_timeout) { 1_000 }
let(:lock_timeout) { 750 }
let(:migration) do
instance_double(Nandi::Migration,
up_instructions: instructions,
down_instructions: [],
statement_timeout: statement_timeout,
lock_timeout: lock_timeout)
end
before do
allow(migration).to receive_messages(disable_statement_timeout?: false, disable_lock_timeout?: false)
allow(Nandi.config).to receive(:access_exclusive_lock_timeout_limit).
and_return(750)
allow(Nandi.config).to receive(:access_exclusive_statement_timeout_limit).
and_return(1500)
allow(Nandi.config).to receive(:access_exclusive_lock_timeout_limit).
and_return(750)
end
context "with an ACCESS EXCLUSIVE instruction" do
let(:instructions) do
[
Nandi::Instructions::AddColumn.new(
table: :payments,
name: :stuff,
type: :text,
null: true,
default: "swilly!",
),
]
end
it { is_expected.to be_success }
context "with timeouts disabled" do
before do
allow(migration).to receive_messages(disable_statement_timeout?: true, disable_lock_timeout?: true)
end
it { is_expected.to be_failure }
end
context "with too great a statement timeout" do
let(:statement_timeout) { 1501 }
it { is_expected.to be_failure }
end
context "with too great a lock timeout" do
let(:lock_timeout) { 751 }
it { is_expected.to be_failure }
end
end
context "creating an index" do
let(:instructions) do
[
Nandi::Instructions::AddIndex.new(
table: :payments,
fields: [:foo],
),
]
end
context "with huge timeouts set" do
let(:lock_timeout) { Float::INFINITY }
let(:statement_timeout) { Float::INFINITY }
it { is_expected.to be_success }
end
context "with too-low statement timeout" do
let(:lock_timeout) { Float::INFINITY }
let(:statement_timeout) { 3_599_999 }
it { is_expected.to be_failure }
end
end
context "removing an index" do
let(:instructions) do
[
Nandi::Instructions::RemoveIndex.new(
table: :payments,
field: :foo,
),
]
end
context "with timeouts disabled" do
before do
allow(migration).to receive_messages(disable_statement_timeout?: true, disable_lock_timeout?: true)
end
it { is_expected.to be_success }
end
context "with huge timeouts set" do
let(:lock_timeout) { Float::INFINITY }
let(:statement_timeout) { Float::INFINITY }
it { is_expected.to be_success }
end
context "with too-low statement timeout" do
let(:lock_timeout) { Float::INFINITY }
let(:statement_timeout) { 3_599_999 }
it { is_expected.to be_failure }
end
context "with too-low lock timeout" do
let(:statement_timeout) { Float::INFINITY }
let(:lock_timeout) { 3_599_999 }
it { is_expected.to be_failure }
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/timeout_policies/access_exclusive_spec.rb | spec/nandi/timeout_policies/access_exclusive_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/migration"
require "nandi/timeout_policies"
require "nandi/timeout_policies/access_exclusive"
RSpec.describe Nandi::TimeoutPolicies::AccessExclusive do
describe "::validate" do
subject(:validate) { described_class.validate(migration) }
let(:migration) do
instance_double(Nandi::Migration,
statement_timeout: statement_timeout,
lock_timeout: lock_timeout)
end
before do
allow(migration).to receive_messages(disable_statement_timeout?: false, disable_lock_timeout?: false)
allow(Nandi.config).to receive_messages(access_exclusive_statement_timeout_limit: 1500,
access_exclusive_lock_timeout_limit: 750)
end
context "with valid timeouts" do
let(:statement_timeout) { 1499 }
let(:lock_timeout) { 749 }
it { is_expected.to be_success }
end
context "with too-long statement timeout" do
let(:statement_timeout) { 1501 }
let(:lock_timeout) { 749 }
it { is_expected.to be_failure }
it "yields an informative message" do
expect(validate.failure).
to eq([
"statement timeout must be at most 1500ms " \
"as it takes an ACCESS EXCLUSIVE lock",
])
end
end
context "with disabled statement timeout" do
let(:statement_timeout) { 1500 }
let(:lock_timeout) { 749 }
before do
allow(migration).to receive(:disable_statement_timeout?).
and_return(true)
end
it { is_expected.to be_failure }
it "yields an informative message" do
expect(validate.failure).
to eq([
"statement timeout must be at most 1500ms " \
"as it takes an ACCESS EXCLUSIVE lock",
])
end
end
context "with too-long lock timeout" do
let(:statement_timeout) { 1499 }
let(:lock_timeout) { 751 }
it { is_expected.to be_failure }
it "yields an informative message" do
expect(validate.failure).
to eq([
"lock timeout must be at most 750ms as it takes an ACCESS EXCLUSIVE lock",
])
end
end
context "with disabled lock timeout" do
let(:statement_timeout) { 1499 }
let(:lock_timeout) { 749 }
before do
allow(migration).to receive(:disable_lock_timeout?).
and_return(true)
end
it { is_expected.to be_failure }
it "yields an informative message" do
expect(validate.failure).
to eq([
"lock timeout must be at most 750ms as it takes an ACCESS EXCLUSIVE lock",
])
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/instructions/add_index_spec.rb | spec/nandi/instructions/add_index_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/instructions/add_index"
RSpec.describe Nandi::Instructions::AddIndex do
let(:instance) do
described_class.new(
fields: fields,
table: table,
**extra_args,
)
end
let(:fields) { :foo }
let(:extra_args) { {} }
let(:table) { :widgets }
describe "#fields" do
subject(:result) { instance.fields }
context "with one field" do
context "specified without an Array" do
let(:fields) { :foo }
it { is_expected.to eq(:foo) }
end
context "specified as an Array" do
let(:fields) { [:foo] }
it { is_expected.to eq(:foo) }
end
end
context "with an array of fields" do
let(:fields) { %i[foo bar] }
it { is_expected.to eq(%i[foo bar]) }
end
end
describe "#table" do
let(:table) { :thingumyjiggers }
it "exposes the initial value" do
expect(instance.table).to eq(:thingumyjiggers)
end
end
describe "#extra_args" do
subject(:args) { instance.extra_args }
context "with no extra args passed" do
let(:extra_args) { {} }
it "sets appropriate defaults" do
expect(args).to eq(
algorithm: :concurrently,
using: :btree,
name: :idx_widgets_on_foo,
)
end
context "with fields containing operators" do
let(:fields) { "((reports::json->>'source_id'))" }
it "generates a readable index name" do
expect(args[:name]).to eq(:idx_widgets_on_reports_json_source_id)
end
end
end
context "with custom name" do
let(:extra_args) { { name: :my_amazing_index } }
it "allows override" do
expect(args).to eq(
algorithm: :concurrently,
using: :btree,
name: :my_amazing_index,
)
end
end
context "with custom using" do
let(:extra_args) { { using: :hash } }
it "allows override" do
expect(args).to eq(
algorithm: :concurrently,
using: :hash,
name: :idx_widgets_on_foo,
)
end
end
context "with custom algorithm" do
let(:extra_args) { { algorithm: :paxos } }
it "does not allow override" do
expect(args).to eq(
algorithm: :concurrently,
using: :btree,
name: :idx_widgets_on_foo,
)
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/instructions/remove_index_spec.rb | spec/nandi/instructions/remove_index_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/instructions/remove_index"
RSpec.describe Nandi::Instructions::RemoveIndex do
let(:instance) { described_class.new(table: table, field: field) }
let(:table) { :widgets }
let(:field) { :foo }
describe "#table" do
let(:table) { :thingumyjiggers }
it "exposes the initial value" do
expect(instance.table).to eq(:thingumyjiggers)
end
end
describe "#extra_args" do
subject(:args) { instance.extra_args }
context "with a field" do
it { is_expected.to eq(column: :foo, algorithm: :concurrently) }
end
context "with an array of fields" do
let(:field) { %i[foo bar] }
it { is_expected.to eq(column: %i[foo bar], algorithm: :concurrently) }
end
context "with a hash of arguments" do
let(:field) { { name: :my_useless_index } }
it "adds the algorithm: :concurrently setting" do
expect(args).to eq(
name: :my_useless_index,
algorithm: :concurrently,
)
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/example_migrations/20180104120000_my_invalid_index_migration.rb | spec/nandi/fixtures/example_migrations/20180104120000_my_invalid_index_migration.rb | # frozen_string_literal: true
class MyInvalidIndexMigration < Nandi::Migration
def up
add_index :payments, :foo, using: :gin
end
def down
remove_index :payments, :foo
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/example_migrations/20180104120000_my_invalid_migration.rb | spec/nandi/fixtures/example_migrations/20180104120000_my_invalid_migration.rb | # frozen_string_literal: true
class MyInvalidMigration < Nandi::Migration
def up
add_index :payments, :foo
add_index :payments, :bar
end
def down
remove_index :payments, :foo
remove_index :payments, :bar
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/example_migrations/20180104120001_create_my_table.rb | spec/nandi/fixtures/example_migrations/20180104120001_create_my_table.rb | # frozen_string_literal: true
class MyMigration < Nandi::Migration
def up
create_table :payments do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
end
end
def down
drop_table :payments
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/example_migrations/20180104120000_my_migration.rb | spec/nandi/fixtures/example_migrations/20180104120000_my_migration.rb | # frozen_string_literal: true
class MyMigration < Nandi::Migration
def up
add_index :payments, :foo
end
def down
remove_index :payments, :foo
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/irreversible_migration.rb | spec/nandi/fixtures/rendered/active_record/irreversible_migration.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
remove_column(
:payments,
:amount,
**{
}
)
end
def down
raise ActiveRecord::IrreversibleMigration
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/validate_constraint.rb | spec/nandi/fixtures/rendered/active_record/validate_constraint.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
disable_lock_timeout!
disable_statement_timeout!
def up
execute <<-SQL
ALTER TABLE payments VALIDATE CONSTRAINT payments_mandates_fk
SQL
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_column.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_column.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
add_column(
:payments,
:foo,
:text,
**{
collate: :de_DE
}
)
end
def down
remove_column(
:payments,
:foo,
**{
cascade: true
}
)
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_table_with_extra_args.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_table_with_extra_args.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
create_table :payments, id: false, force: true do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
end
end
def down
drop_table :payments
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_index.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_index.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
disable_lock_timeout!
disable_statement_timeout!
disable_ddl_transaction!
def up
add_index(
:payments,
[:foo, :bar],
**{
name: :idx_payments_on_foo_bar,
using: :btree,
algorithm: :concurrently
}
)
end
def down
remove_index(
:payments,
**{
column: [:foo, :bar],
algorithm: :concurrently
}
)
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/remove_not_null_constraint.rb | spec/nandi/fixtures/rendered/active_record/remove_not_null_constraint.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
change_column_null :payments, :colours, true
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/custom_instruction_with_mixins.rb | spec/nandi/fixtures/rendered/active_record/custom_instruction_with_mixins.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
include My::Important::Mixin
include My::Other::Mixin
disable_lock_timeout!
disable_statement_timeout!
def up
new_method
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_reference.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_reference.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
add_reference(
:payments,
:mandate,
**{
index: false,
type: :text
}
)
end
def down
remove_reference(
:payments,
:mandate,
**{
}
)
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_index_timeouts.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_index_timeouts.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(5000)
disable_ddl_transaction!
def up
add_index(
:payments,
[:foo, :bar],
**{
name: :idx_payments_on_foo_bar,
using: :btree,
algorithm: :concurrently
}
)
end
def down
remove_index(
:payments,
**{
column: [:foo, :bar],
algorithm: :concurrently
}
)
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_table_with_timestamps.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_table_with_timestamps.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
create_table :payments do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
t.timestamps null: false
end
end
def down
drop_table :payments
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_table.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_table.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
create_table :payments do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
end
end
def down
drop_table :payments
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/custom_instruction.rb | spec/nandi/fixtures/rendered/active_record/custom_instruction.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
disable_lock_timeout!
disable_statement_timeout!
def up
new_method
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/change_column_default.rb | spec/nandi/fixtures/rendered/active_record/change_column_default.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
change_column_default :payments, :colour, "blue"
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/custom_instruction_with_block.rb | spec/nandi/fixtures/rendered/active_record/custom_instruction_with_block.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
disable_lock_timeout!
disable_statement_timeout!
def up
new_method block rockin' beats
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/add_foreign_key.rb | spec/nandi/fixtures/rendered/active_record/add_foreign_key.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
add_foreign_key(
:payments,
:mandates,
**{
column: :zalgo_comes,
name: :payments_mandates_fk,
validate: false
}
)
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_table_with_timestamps_and_not_args.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_table_with_timestamps_and_not_args.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
create_table :payments do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
t.timestamps
end
end
def down
drop_table :payments
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/create_and_drop_index_with_hash.rb | spec/nandi/fixtures/rendered/active_record/create_and_drop_index_with_hash.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
disable_lock_timeout!
disable_statement_timeout!
disable_ddl_transaction!
def up
add_index(
:payments,
[:foo, :bar],
**{
name: :idx_payments_on_foo_bar,
using: :hash,
algorithm: :concurrently
}
)
end
def down
remove_index(
:payments,
**{
column: [:foo, :bar],
algorithm: :concurrently
}
)
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/add_check_constraint.rb | spec/nandi/fixtures/rendered/active_record/add_check_constraint.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
execute <<-SQL
ALTER TABLE payments
ADD CONSTRAINT check
CHECK (foo IS NOT NULL)
NOT VALID
SQL
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/fixtures/rendered/active_record/drop_constraint.rb | spec/nandi/fixtures/rendered/active_record/drop_constraint.rb | class MyAwesomeMigration < ActiveRecord::Migration[8.0]
set_lock_timeout(5000)
set_statement_timeout(1500)
def up
execute <<-SQL
ALTER TABLE payments DROP CONSTRAINT payments_mandates_fk
SQL
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
gocardless/nandi | https://github.com/gocardless/nandi/blob/bcacafc9bd79a08d4cb7f43e21a776f42d916de2/spec/nandi/renderers/active_record_spec.rb | spec/nandi/renderers/active_record_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "nandi/renderers/active_record"
require "nandi/migration"
require "nandi/validator"
RSpec.describe Nandi::Renderers::ActiveRecord do
describe "::generate" do
subject(:migration) do
described_class.generate(safe_migration.new(Nandi::Validator))
end
let(:fixture_root) do
File.join(
File.dirname(__FILE__),
"../fixtures/rendered/active_record",
)
end
let(:current_rails_version) do
ActiveRecord::Migration.current_version
end
def normalize_fixture(content)
content.gsub(/ActiveRecord::Migration\[\d+\.\d+\]/, "ActiveRecord::Migration[#{current_rails_version}]")
end
describe "adding and dropping an index" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "create_and_drop_index.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
add_index :payments, %i[foo bar]
end
def down
remove_index :payments, %i[foo bar]
end
end
end
it { is_expected.to eq(fixture) }
context "with custom timeouts" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "create_and_drop_index_timeouts.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
set_statement_timeout(5000)
set_lock_timeout(5000)
def self.name
"MyAwesomeMigration"
end
def up
add_index :payments, %i[foo bar]
end
def down
remove_index :payments, %i[foo bar]
end
end
end
it { is_expected.to eq(fixture) }
end
end
describe "creating and dropping a table" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "create_and_drop_table.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
create_table :payments do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
end
end
def down
drop_table :payments
end
end
end
it { is_expected.to eq(fixture) }
context "with timestamps with args" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "create_and_drop_table_with_timestamps.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
create_table :payments do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
t.timestamps null: false
end
end
def down
drop_table :payments
end
end
end
it { is_expected.to eq(fixture) }
end
context "with extra args" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "create_and_drop_table_with_extra_args.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
create_table :payments, id: false, force: true do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
end
end
def down
drop_table :payments
end
end
end
it { is_expected.to eq(fixture) }
end
context "with timestamps without args" do
let(:fixture) do
normalize_fixture(
File.read(
File.join(
fixture_root,
"create_and_drop_table_with_timestamps_and_not_args.rb",
),
),
)
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
create_table :payments do |t|
t.column :payer, :string
t.column :ammount, :float
t.column :payed, :bool, default: false
t.timestamps
end
end
def down
drop_table :payments
end
end
end
it { is_expected.to eq(fixture) }
end
end
describe "adding and dropping an column" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "create_and_drop_column.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
add_column :payments, :foo, :text, collate: :de_DE
end
def down
remove_column :payments, :foo, cascade: true
end
end
end
it { is_expected.to eq(fixture) }
end
describe "adding and dropping an reference" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "create_and_drop_reference.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
add_reference :payments, :mandate, type: :text
end
def down
remove_reference :payments, :mandate
end
end
end
it { is_expected.to eq(fixture) }
end
describe "#add_foreign_key" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "add_foreign_key.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
add_foreign_key :payments, :mandates, column: :zalgo_comes
end
def down; end
end
end
it { is_expected.to eq(fixture) }
end
describe "#add_check_constraint" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "add_check_constraint.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
add_check_constraint :payments, :check, "foo IS NOT NULL"
end
def down; end
end
end
it { is_expected.to eq(fixture) }
end
describe "#change_column_default" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "change_column_default.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
change_column_default :payments, :colour, "blue"
end
def down; end
end
end
it { is_expected.to eq(fixture) }
end
describe "#validate_constraint" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "validate_constraint.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
validate_constraint :payments, :payments_mandates_fk
end
def down; end
end
end
it { is_expected.to eq(fixture) }
end
describe "#drop_constraint" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "drop_constraint.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
drop_constraint :payments, :payments_mandates_fk
end
def down; end
end
end
it { is_expected.to eq(fixture) }
end
describe "#remove_not_null_constraint" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "remove_not_null_constraint.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
remove_not_null_constraint :payments, :colours
end
def down; end
end
end
it { is_expected.to eq(fixture) }
end
describe "#irreversible_migration" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "irreversible_migration.rb")))
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
remove_column :payments, :amount
end
def down
irreversible_migration
end
end
end
it { is_expected.to eq(fixture) }
end
describe "custom instructions" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "custom_instruction.rb")))
end
let(:extension) do
Class.new do
attr_reader :foo, :bar
def initialize(foo, bar, **_kwargs)
@foo = foo
@bar = bar
end
def procedure
:new_method
end
def template
Class.new(Cell::ViewModel) do
def show
"new_method"
end
end
end
def lock
Nandi::Migration::LockWeights::SHARE
end
end
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
new_method :arg1, :arg2
end
def down; end
end
end
before do
Nandi.configure do |c|
c.register_method :new_method, extension
end
end
after do
Nandi.config.custom_methods.delete(:new_method)
end
it { is_expected.to eq(fixture) }
context "with a mixin" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "custom_instruction_with_mixins.rb")))
end
let(:extension) do
Class.new do
attr_reader :foo, :bar
def initialize(foo, bar, **_kwargs)
@foo = foo
@bar = bar
end
def procedure
:new_method
end
def mixins
[
Class.new do
def self.name
"My::Important::Mixin"
end
end,
Class.new do
def self.name
"My::Other::Mixin"
end
end,
]
end
def template
Class.new(Cell::ViewModel) do
def show
"new_method"
end
end
end
def lock
Nandi::Migration::LockWeights::SHARE
end
end
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
new_method :arg1, :arg2
end
def down; end
end
end
before do
Nandi.configure do |c|
c.register_method :new_method, extension
end
end
after do
Nandi.config.custom_methods.delete(:new_method)
end
it { is_expected.to eq(fixture) }
end
context "with a block argument" do
let(:fixture) do
normalize_fixture(File.read(File.join(fixture_root, "custom_instruction_with_block.rb")))
end
let(:extension) do
Class.new do
def procedure
:new_method
end
def initialize(**_kwargs)
@block_result = yield
end
attr_reader :block_result
def lock
Nandi::Migration::LockWeights::SHARE
end
def template
Class.new(Cell::ViewModel) do
property :block_result
def show
"new_method #{block_result}"
end
end
end
end
end
let(:safe_migration) do
Class.new(Nandi::Migration) do
def self.name
"MyAwesomeMigration"
end
def up
new_method { "block rockin' beats" }
end
def down; end
end
end
before do
Nandi.configure do |c|
c.register_method :new_method, extension
end
end
after do
Nandi.config.custom_methods.delete(:new_method)
end
it { is_expected.to eq(fixture) }
end
end
end
end
| ruby | MIT | bcacafc9bd79a08d4cb7f43e21a776f42d916de2 | 2026-01-04T17:52:04.110442Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.