repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/change_column_test.rb | test/change_column_test.rb | require_relative "test_helper"
class ChangeColumnTest < Minitest::Test
def test_unsafe
assert_unsafe ChangeColumn
end
def test_varchar_to_text
skip unless postgresql?
assert_safe ChangeColumnVarcharToText
end
def test_varchar_to_citext
skip unless postgresql?
assert_safe ChangeColumnVarcharToCitext
end
def test_varchar_increase_limit
assert_safe ChangeColumnVarcharIncreaseLimit
end
def test_varchar_increase_limit_over_64
if postgresql?
assert_safe ChangeColumnVarcharIncreaseLimit64
elsif mysql? || mariadb?
assert_unsafe ChangeColumnVarcharIncreaseLimit64
end
end
def test_varchar_increase_limit_over_256
if postgresql?
assert_safe ChangeColumnVarcharIncreaseLimit256
elsif mysql? || mariadb?
assert_unsafe ChangeColumnVarcharIncreaseLimit256
end
end
def test_varchar_decrease_limit
assert_unsafe ChangeColumnVarcharDecreaseLimit
end
def test_varchar_remove_limit
skip unless postgresql?
assert_safe ChangeColumnVarcharRemoveLimit
end
def test_text_to_varchar_limit
skip unless postgresql?
assert_unsafe ChangeColumnTextToVarcharLimit
end
def test_text_to_varchar_no_limit
skip unless postgresql?
assert_safe ChangeColumnTextToVarcharNoLimit
end
def test_varchar_add_limit
skip unless postgresql?
assert_unsafe ChangeColumnVarcharAddLimit
end
def test_text_to_citext
skip unless postgresql?
assert_safe ChangeColumnTextToCitext
end
def test_text_to_citext_indexed
skip unless postgresql?
assert_unsafe ChangeColumnTextToCitextIndexed
end
def test_text_to_citext_indexed_expression
skip unless postgresql?
assert_unsafe ChangeColumnTextToCitextIndexedExpression
end
def test_citext_to_text
skip unless postgresql?
assert_safe ChangeColumnCitextToText
end
def test_citext_to_text_indexed
skip unless postgresql?
assert_unsafe ChangeColumnCitextToTextIndexed
end
def test_citext_to_varchar_limit
skip unless postgresql?
assert_unsafe ChangeColumnCitextToVarcharLimit
end
def test_citext_to_varchar_no_limit
skip unless postgresql?
assert_safe ChangeColumnCitextToVarcharNoLimit
end
def test_citext_to_varchar_no_limit_indexed
skip unless postgresql?
assert_unsafe ChangeColumnCitextToVarcharNoLimitIndexed
end
def test_decimal_decrease_precision
skip unless postgresql?
assert_unsafe ChangeColumnDecimalDecreasePrecision
end
def test_decimal_change_scale
skip unless postgresql?
assert_unsafe ChangeColumnDecimalChangeScale
end
def test_decimal_increase_precision
skip unless postgresql?
assert_safe ChangeColumnDecimalIncreasePrecision
end
def test_decimal_unconstrained
skip unless postgresql?
assert_safe ChangeColumnDecimalIncreasePrecision
end
def test_timestamps
skip unless postgresql?
assert_safe ChangeColumnTimestamps
end
def test_timestamps_non_utc
skip unless postgresql?
with_time_zone do
assert_unsafe ChangeColumnTimestamps
end
end
def test_datetime_increase_precision
skip unless postgresql?
assert_safe ChangeColumnDatetimeIncreasePrecision
end
def test_datetime_decrease_precision
skip unless postgresql?
assert_unsafe ChangeColumnDatetimeDecreasePrecision
end
def test_timestamp_increase_limit
skip unless postgresql?
assert_safe ChangeColumnTimestampIncreaseLimit
end
def test_timestamp_decrease_limit
skip unless postgresql?
assert_unsafe ChangeColumnTimestampDecreaseLimit
end
def test_timestamptz_increase_limit
skip unless postgresql?
assert_safe ChangeColumnTimestamptzIncreaseLimit
end
def test_timestamptz_decrease_limit
skip unless postgresql?
assert_unsafe ChangeColumnTimestamptzDecreaseLimit
end
def test_time_increase_precision
skip unless postgresql?
assert_safe ChangeColumnTimeIncreasePrecision
end
def test_time_decrease_precision
skip unless postgresql?
assert_unsafe ChangeColumnTimeDecreasePrecision
end
def test_interval_increase_precision
skip unless postgresql?
assert_safe ChangeColumnIntervalIncreasePrecision
end
def test_interval_decrease_precision
skip unless postgresql?
assert_unsafe ChangeColumnIntervalDecreasePrecision
end
def test_cidr_to_inet
skip unless postgresql?
assert_safe ChangeColumnCidrToInet
end
# cidr most restrictive than inet
# https://www.postgresql.org/docs/14/datatype-net-types.html#DATATYPE-INET-VS-CIDR
def test_inet_to_cidr
skip unless postgresql?
assert_unsafe ChangeColumnInetToCidr
end
def test_constraint
skip unless postgresql?
assert_unsafe ChangeColumnConstraint
end
def test_other_constraints
skip unless postgresql?
assert_safe ChangeColumnOtherConstraints
end
def test_with_not_null
assert_unsafe ChangeColumnWithNotNull
end
def test_missing_table
assert_unsafe ChangeColumnMissingTable
end
def with_time_zone
ActiveRecord::Base.connection.execute("SET timezone = 'America/Los_Angeles'")
yield
ensure
ActiveRecord::Base.connection.execute("SET timezone = 'UTC'")
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/change_column_default_test.rb | test/change_column_default_test.rb | require_relative "test_helper"
class ChangeColumnDefaultTest < Minitest::Test
def test_partial_inserts
with_partial_inserts(true) do
assert_unsafe ChangeColumnDefault
end
end
def test_partial_inserts_hash
with_partial_inserts(true) do
assert_unsafe ChangeColumnDefaultHash
end
end
def test_no_partial_inserts
with_partial_inserts(false) do
assert_safe ChangeColumnDefaultHash
end
end
def test_new_column
assert_safe ChangeColumnDefaultNewColumn
end
def test_default
# Rails 7 disables partial inserts by default
# but Active Record 7 by itself does not
assert_unsafe ChangeColumnDefault
end
def with_partial_inserts(value, &block)
ActiveRecord::Base.stub(:partial_inserts, value, &block)
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/add_reference_test.rb | test/add_reference_test.rb | require_relative "test_helper"
class AddReferenceTest < Minitest::Test
def test_basic
if postgresql?
assert_unsafe AddReference
else
assert_safe AddReference
end
end
def test_polymorphic
if postgresql?
assert_unsafe AddReferencePolymorphic
else
assert_safe AddReferencePolymorphic
end
end
def test_no_index
assert_safe AddReferenceNoIndex
end
def test_default
if postgresql?
assert_unsafe AddReferenceDefault
else
assert_safe AddReferenceDefault
end
end
def test_concurrently
skip unless postgresql?
assert_safe AddReferenceConcurrently
end
def test_foreign_key
if postgresql?
assert_unsafe AddReferenceForeignKey, "Then add the foreign key"
else
assert_safe AddReferenceForeignKey
end
end
def test_foreign_key_validate_false
skip unless postgresql?
assert_safe AddReferenceForeignKeyValidateFalse
end
def test_add_belongs_to
if postgresql?
assert_unsafe AddBelongsTo
else
assert_safe AddBelongsTo
end
end
def test_auto_analyze
with_auto_analyze do
assert_analyzed postgresql? ? AddReferenceConcurrently : AddReference
end
end
def test_auto_analyze_false
refute_analyzed postgresql? ? AddReferenceConcurrently : AddReference
end
def test_auto_analyze_no_index
with_auto_analyze do
refute_analyzed AddReferenceNoIndex
end
end
def test_auto_analyze_default
with_auto_analyze do
with_safety_assured do
assert_analyzed AddReferenceDefault
end
end
end
def test_auto_analyze_add_belongs_to
with_auto_analyze do
with_safety_assured do
assert_analyzed AddBelongsTo
end
end
end
def test_extra_arguments
if postgresql?
assert_unsafe AddReferenceExtraArguments
else
assert_argument_error AddReferenceExtraArguments
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/misc_test.rb | test/misc_test.rb | require_relative "test_helper"
class MiscTest < Minitest::Test
def test_execute_arbitrary_sql
assert_unsafe ExecuteArbitrarySQL
end
def test_rename_column
assert_unsafe RenameColumn
end
def test_rename_table
assert_unsafe RenameTable
end
def test_rename_schema
assert_unsafe RenameSchema
end
def test_create_table_force
assert_unsafe CreateTableForce
end
def test_create_join_table
assert_safe CreateJoinTable
end
def test_create_join_table_force
assert_unsafe CreateJoinTableForce
end
def test_revert
migrate AddReferenceNoIndex
assert_unsafe RevertAddReference
migrate RevertAddReferenceSafetyAssured
end
def test_revert_safe
with_safety_assured do
migrate CreateTableForce
end
migrate RevertCreateTableForce
end
def test_revert_down
assert_unsafe RevertCreateTableForce, direction: :down
end
def test_revert_inline
migrate AddReferenceNoIndex
assert_unsafe RevertAddReference
migrate RevertAddReferenceSafetyAssured
end
def test_revert_inline_safe
with_safety_assured do
migrate CreateTableForce
end
migrate RevertCreateTableForceInline
end
def test_custom
assert_unsafe Custom, "Cannot add forbidden column"
end
def test_unsupported_version
error = assert_raises(StrongMigrations::UnsupportedVersion) do
with_target_version(1) do
migrate ExecuteArbitrarySQL
end
end
assert_match "version (1) not supported", error.message
end
def test_target_version_outside_developer_env
outside_developer_env do
with_target_version(1) do
# ignores target version
# (does not throw UnsupportedVersion error)
assert_unsafe ExecuteArbitrarySQL
end
end
end
def test_unsupported_adapter
previous_db_config = ActiveRecord::Base.connection_db_config.configuration_hash
ActiveRecord::Base.establish_connection(adapter: "sqlite3", database: ":memory:")
schema_migration.create_table
assert_output nil, "[strong_migrations] Unsupported adapter: SQLite. Use StrongMigrations.skip_database(:primary) to silence this warning.\n" do
assert_unsafe CreateTableForce
end
ensure
ActiveRecord::Base.establish_connection(previous_db_config) if previous_db_config
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/support/active_record.rb | test/support/active_record.rb | require "active_record"
# needed for target_version
module Rails
def self.env
ActiveSupport::StringInquirer.new("test")
end
end
$adapter = ENV["ADAPTER"] || "postgresql"
connection_options = {
adapter: $adapter,
database: "strong_migrations_test"
}
if $adapter == "mysql2"
connection_options[:encoding] = "utf8mb4"
if ActiveRecord::VERSION::MAJOR < 8
connection_options[:prepared_statements] = true
end
elsif $adapter == "trilogy"
connection_options[:host] = "127.0.0.1"
end
ActiveRecord::Base.establish_connection(**connection_options)
if ENV["VERBOSE"]
ActiveRecord::Base.logger = ActiveSupport::Logger.new($stdout)
else
ActiveRecord::Migration.verbose = false
end
def migration_version
ActiveRecord.version.to_s.to_f
end
TestMigration = ActiveRecord::Migration[migration_version]
TestSchema = ActiveRecord::Schema
def schema_migration
connection_class.schema_migration
end
def connection_class
if ActiveRecord::VERSION::STRING.to_f >= 7.2
ActiveRecord::Base.connection_pool
else
ActiveRecord::Base.connection
end
end
schema_migration.create_table
ActiveRecord::Schema.define do
if $adapter == "postgresql"
# for change column
enable_extension "citext"
# for exclusion constraints
enable_extension "btree_gist"
# for gen_random_uuid() in Postgres < 13
enable_extension "pgcrypto"
end
[:users, :new_users, :orders, :devices, :cities_users].each do |table|
drop_table(table) if table_exists?(table)
end
create_table :users do |t|
t.string :name
t.string :city
t.decimal :credit_score, precision: 10, scale: 5
t.timestamp :deleted_at
t.string :country, limit: 20
t.string :interval
t.text :description
t.citext :code if $adapter == "postgresql"
t.references :order
end
create_table :orders do |t|
end
create_table :devices do |t|
end
end
class User < ActiveRecord::Base
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/support/helpers.rb | test/support/helpers.rb | module Helpers
def postgresql?
$adapter == "postgresql"
end
def mysql?
($adapter == "mysql2" || $adapter == "trilogy") && !ActiveRecord::Base.connection.mariadb?
end
def mariadb?
($adapter == "mysql2" || $adapter == "trilogy") && ActiveRecord::Base.connection.mariadb?
end
def postgresql_version
ActiveRecord::Base.connection.execute("SHOW server_version_num").first["server_version_num"].to_i / 10000
end
def transaction_timeout?
postgresql? && postgresql_version >= 17
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/check_down.rb | test/migrations/check_down.rb | class CheckDown < TestMigration
def up
add_column :users, :age, :integer
end
def down
remove_column :users, :age
end
end
class CheckDownSafe < TestMigration
def up
add_column :users, :age, :integer
end
def down
safety_assured do
remove_column :users, :age
end
end
end
class CheckDownChange < TestMigration
disable_ddl_transaction!
def change
add_index :users, :name, algorithm: :concurrently
remove_index :users, :name
end
end
class CheckDownChangeSafe < TestMigration
disable_ddl_transaction!
def change
add_index :users, :name, algorithm: :concurrently
remove_index :users, column: :name, algorithm: :concurrently
end
end
class CheckDownSafetyAssured < TestMigration
def change
safety_assured do
add_column :users, :age, :integer
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/add_column.rb | test/migrations/add_column.rb | class AddColumnDefault < TestMigration
def change
add_column :users, :nice, :boolean, default: true
end
end
class AddColumnDefaultNull < TestMigration
def change
add_column :users, :nice, :boolean, default: nil
end
end
class AddColumnDefaultNotNull < TestMigration
def change
add_column :users, :nice, :uuid, default: "gen_random_uuid()", null: false
end
end
class AddColumnDefaultSafe < TestMigration
def change
add_column :users, :nice, :boolean
change_column_default :users, :nice, from: true, to: false
end
end
class AddColumnDefaultCallable < TestMigration
def change
add_column :users, :nice, :datetime, default: -> { "clock_timestamp()" }
end
end
class AddColumnDefaultUUID < TestMigration
def change
add_column :users, :nice, :uuid, default: "gen_random_uuid()"
end
end
class AddColumnDefaultUUIDSafe < TestMigration
def change
add_column :users, :nice, :uuid
change_column_default :users, :nice, from: nil, to: "gen_random_uuid()"
end
end
class AddColumnJson < TestMigration
def change
add_column :users, :properties, :json
end
end
class AddColumnGeneratedStored < TestMigration
def change
add_column :users, :nice, :virtual, type: :string, as: "LOWER(city)", stored: true
end
end
class AddColumnGeneratedVirtual < TestMigration
def change
add_column :users, :nice, :virtual, type: :string, as: "LOWER(city)"
end
end
class AddColumnPrimaryKey < TestMigration
def change
add_column :users, :nice, :primary_key
end
end
class AddColumnSerial < TestMigration
def change
add_column :users, :nice, :serial
end
end
class AddColumnBigserial < TestMigration
def change
add_column :users, :nice, :bigserial
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/start_after.rb | test/migrations/start_after.rb | class Version < TestMigration
def change
change_column_null :users, :city, false, "San Francisco"
end
def version
20170101000001
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/add_index.rb | test/migrations/add_index.rb | class AddIndex < TestMigration
def change
add_index :users, :name
end
end
class AddIndexes < TestMigration
def change
add_index :users, :name
add_index :users, :city
end
end
class AddIndexUnique < TestMigration
def change
add_index :users, :name, unique: true
end
end
class AddIndexUp < TestMigration
def self.up
add_index :users, :name
end
def self.down
remove_index :users, :name
end
end
class AddIndexConcurrently < TestMigration
disable_ddl_transaction!
def change
add_index :users, :name, algorithm: :concurrently
end
end
class AddIndexSafetyAssured < TestMigration
def change
safety_assured { add_index :users, :name, name: "boom" }
end
end
class AddIndexNewTable < TestMigration
def change
create_table :new_users do |t|
t.string :name
end
add_index :new_users, :name
end
end
class AddIndexSchema < TestSchema
def change
add_index :users, :name, name: "boom2"
end
end
class AddIndexColumns < TestMigration
def change
add_index :users, [:name, :city, :country, :deleted_at]
end
end
class AddIndexColumnsUnique < TestMigration
disable_ddl_transaction!
def change
add_index :users, [:name, :city, :country, :deleted_at], unique: true, algorithm: :concurrently
end
end
class AddIndexName < TestMigration
def change
add_index :users, :name, name: "my_index"
end
end
class AddIndexExtraArguments < TestMigration
def change
add_index :users, :name, :extra
end
end
class AddIndexConcurrentlyExtraArguments < TestMigration
def change
add_index :users, :name, :extra, algorithm: :concurrently
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/change_column_null.rb | test/migrations/change_column_null.rb | class ChangeColumnNull < TestMigration
def change
change_column_null :users, :name, false
end
end
class ChangeColumnNullConstraint < TestMigration
def up
safety_assured do
execute 'ALTER TABLE "users" ADD CONSTRAINT "test" CHECK ("name" IS NOT NULL) NOT VALID'
execute 'ALTER TABLE "users" VALIDATE CONSTRAINT "test"'
end
change_column_null :users, :name, false
end
def down
execute 'ALTER TABLE "users" DROP CONSTRAINT "test"'
change_column_null :users, :name, true
end
end
class ChangeColumnNullConstraintMethods < TestMigration
disable_ddl_transaction!
def up
add_check_constraint :users, "name IS NOT NULL", name: "test", validate: false
validate_check_constraint :users, name: "test"
change_column_null :users, :name, false
remove_check_constraint :users, name: "test"
end
def down
change_column_null :users, :name, true
end
end
class ChangeColumnNullConstraintUnvalidated < TestMigration
def up
safety_assured do
execute 'ALTER TABLE "users" ADD CONSTRAINT "test" CHECK ("name" IS NOT NULL) NOT VALID'
end
change_column_null :users, :name, false
end
def down
execute 'ALTER TABLE "users" DROP CONSTRAINT "test"'
change_column_null :users, :name, true
end
end
class ChangeColumnNullConstraintDefault < TestMigration
def up
safety_assured do
execute 'ALTER TABLE "users" ADD CONSTRAINT "test" CHECK ("name" IS NOT NULL) NOT VALID'
execute 'ALTER TABLE "users" VALIDATE CONSTRAINT "test"'
end
change_column_null :users, :name, false, "Andy"
end
def down
execute 'ALTER TABLE "users" DROP CONSTRAINT "test"'
change_column_null :users, :name, true
end
end
class ChangeColumnNullDefault < TestMigration
def change
change_column_null :users, :name, false, "Andy"
end
end
class ChangeColumnNullDefaultCallable < TestMigration
def change
change_column_null :users, :deleted_at, false, -> { "clock_timestamp()" }
end
end
class ChangeColumnNullDefaultUUID < TestMigration
def change
add_column :users, :nice, :uuid
change_column_null :users, :nice, false, "gen_random_uuid()"
end
end
class ChangeColumnNullQuoted < TestMigration
def up
safety_assured do
execute 'ALTER TABLE "users" ADD CONSTRAINT "test" CHECK ("interval" IS NOT NULL) NOT VALID'
execute 'ALTER TABLE "users" VALIDATE CONSTRAINT "test"'
end
change_column_null :users, :interval, false
end
def down
execute 'ALTER TABLE "users" DROP CONSTRAINT "test"'
change_column_null :users, :interval, true
end
end
class ChangeColumnNullLongName < TestMigration
def change
column = "a"*53
add_column :users, column, :string
change_column_null :users, column, false
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/change_column_default.rb | test/migrations/change_column_default.rb | class ChangeColumnDefault < TestMigration
def change
change_column_default :users, :name, "Test"
end
end
class ChangeColumnDefaultHash < TestMigration
def change
change_column_default :users, :name, from: nil, to: "Test"
end
end
class ChangeColumnDefaultNewColumn < TestMigration
def change
add_column :users, :nice, :boolean
change_column_default :users, :nice, from: nil, to: true
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/remove_column.rb | test/migrations/remove_column.rb | class RemoveColumn < TestMigration
def change
remove_column :users, :name, :string
end
end
class RemoveColumns < TestMigration
def change
remove_columns :users, :name, :other
end
end
class RemoveColumnsType < TestMigration
def change
remove_columns :users, :name, :other, type: :text
end
end
class RemoveTimestamps < TestMigration
def change
remove_timestamps :users
end
end
class RemoveReference < TestMigration
def change
remove_reference :users, :device
end
end
class RemoveReferencePolymorphic < TestMigration
def change
remove_reference :users, :device, polymorphic: true
end
end
class RemoveBelongsTo < TestMigration
def change
remove_belongs_to :users, :device
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/add_exclusion_constraint.rb | test/migrations/add_exclusion_constraint.rb | class AddExclusionConstraint < TestMigration
def change
add_exclusion_constraint :users, "credit_score WITH =", using: :gist
end
end
class AddExclusionConstraintNewTable < TestMigration
def change
create_table :new_users do |t|
t.decimal :credit_score, precision: 10, scale: 5
end
add_exclusion_constraint :new_users, "credit_score WITH =", using: :gist
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/add_foreign_key.rb | test/migrations/add_foreign_key.rb | class AddForeignKey < TestMigration
def change
add_foreign_key :users, :orders
end
end
class AddForeignKeySafe < TestMigration
def change
add_foreign_key :users, :orders, validate: false
end
end
class AddForeignKeyValidateSameTransaction < TestMigration
def change
add_foreign_key :users, :orders, validate: false
validate_foreign_key :users, :orders
end
end
class AddForeignKeyValidateNoTransaction < TestMigration
disable_ddl_transaction!
def change
add_foreign_key :users, :orders, validate: false
validate_foreign_key :users, :orders
end
end
class AddForeignKeyExtraArguments < TestMigration
def change
add_foreign_key :users, :orders, :extra
end
end
class AddForeignKeyName < TestMigration
def change
add_foreign_key :users, :orders, name: "fk1"
add_foreign_key :users, :orders, name: "fk2"
end
end
class AddForeignKeyColumn < TestMigration
def change
add_reference :users, :other_order, index: false
add_foreign_key :users, :orders, column: "order_id"
add_foreign_key :users, :orders, column: "other_order_id"
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/timeouts.rb | test/migrations/timeouts.rb | class CheckTimeouts < TestMigration
include Helpers
def change
safety_assured { execute "SELECT 1" }
$statement_timeout =
if postgresql?
connection.select_all("SHOW statement_timeout").first["statement_timeout"]
elsif mysql?
connection.select_all("SHOW VARIABLES LIKE 'max_execution_time'").first["Value"].to_i / 1000.0
else
connection.select_all("SHOW VARIABLES LIKE 'max_statement_time'").first["Value"].to_f
end
$transaction_timeout =
if postgresql? && transaction_timeout?
connection.select_all("SHOW transaction_timeout").first["transaction_timeout"]
end
$lock_timeout =
if postgresql?
connection.select_all("SHOW lock_timeout").first["lock_timeout"]
else
connection.select_all("SHOW VARIABLES LIKE 'lock_wait_timeout'").first["Value"].to_i
end
end
end
class CheckTransactionTimeoutWithoutStatement < TestMigration
include Helpers
def change
$transaction_timeout =
if postgresql? && transaction_timeout?
connection.select_all("SHOW transaction_timeout").first["transaction_timeout"]
end
end
end
class CheckLockTimeout < TestMigration
def change
safety_assured { execute "SELECT 1" }
end
end
class CheckLockTimeoutRetries < TestMigration
def change
$migrate_attempts += 1
add_column :users, :nice, :boolean
end
end
class CheckLockTimeoutRetriesTransaction < TestMigration
disable_ddl_transaction!
def change
$migrate_attempts += 1
transaction do
$transaction_attempts += 1
add_column :users, :nice, :boolean
end
end
end
class CheckLockTimeoutRetriesTransactionDdlTransaction < TestMigration
def change
$migrate_attempts += 1
transaction do
$transaction_attempts += 1
add_column :users, :nice, :boolean
end
end
end
class CheckLockTimeoutRetriesNoDdlTransaction < TestMigration
disable_ddl_transaction!
def change
$migrate_attempts += 1
add_column :users, :nice, :boolean
end
end
class CheckLockTimeoutRetriesCommitDbTransaction < TestMigration
def change
$migrate_attempts += 1
commit_db_transaction
# no longer in DDL transaction
begin_db_transaction
add_column :users, :nice, :boolean
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/misc.rb | test/migrations/misc.rb | class ExecuteArbitrarySQL < TestMigration
def change
execute "SELECT 1"
end
end
class RenameColumn < TestMigration
def change
rename_column :users, :properties, :bad_name
end
end
class RenameTable < TestMigration
def change
rename_table :users, :bad_name
end
end
class RenameSchema < TestMigration
def change
rename_schema :public, :bad_name
end
end
class CreateTableForce < TestMigration
def change
create_table :admins, force: :cascade do |t|
t.string :name
end
end
end
class CreateJoinTable < TestMigration
def change
create_join_table :users, :cities
end
end
class CreateJoinTableForce < TestMigration
def change
create_join_table :users, :cities, force: :cascade
end
end
class RevertAddReference < TestMigration
def change
revert AddReferenceNoIndex
end
end
class RevertAddReferenceSafetyAssured < TestMigration
def change
safety_assured { revert AddReferenceNoIndex }
end
end
class RevertAddReferenceInline < TestMigration
def change
revert { add_reference :users, :country, index: false }
end
end
class RevertCreateTableForce < TestMigration
def change
revert CreateTableForce
end
end
class RevertCreateTableForceInline < TestMigration
def change
revert do
create_table :admins, force: :cascade do |t|
t.string :name
end
end
end
end
class Custom < TestMigration
def change
add_column :users, :forbidden, :string
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/remove_index.rb | test/migrations/remove_index.rb | class RemoveIndex < TestMigration
def change
remove_index :users, :name
end
end
class RemoveIndexColumn < TestMigration
def change
remove_index :users, column: :name
end
end
class RemoveIndexName < TestMigration
def change
remove_index :users, name: "my_index"
end
end
class RemoveIndexOptions < TestMigration
def change
remove_index :users, :name, name: "my_index", if_exists: true
end
end
class RemoveIndexConcurrently < TestMigration
disable_ddl_transaction!
def change
remove_index :users, column: :name, algorithm: :concurrently
end
end
class RemoveIndexExtraArguments < TestMigration
def change
remove_index :users, :name, :extra
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/add_check_constraint.rb | test/migrations/add_check_constraint.rb | class AddCheckConstraint < TestMigration
def change
add_check_constraint :users, "credit_score > 0"
end
end
class AddCheckConstraintSafe < TestMigration
def change
add_check_constraint :users, "credit_score > 0", validate: false
end
end
class AddCheckConstraintValidateSameTransaction < TestMigration
def change
add_check_constraint :users, "credit_score > 0", name: "credit_check", validate: false
validate_check_constraint :users, name: "credit_check"
end
end
class AddCheckConstraintValidateNoTransaction < TestMigration
disable_ddl_transaction!
def change
add_check_constraint :users, "credit_score > 0", name: "credit_check", validate: false
validate_check_constraint :users, name: "credit_check"
end
end
class AddCheckConstraintNewTable < TestMigration
def change
create_table :new_users do |t|
t.string :name
end
add_check_constraint :new_users, "name IS NOT NULL"
end
end
class AddCheckConstraintName < TestMigration
def change
add_check_constraint :users, "credit_score > 0", name: "credit_check"
end
end
class AddCheckConstraintExtraArguments < TestMigration
def change
add_check_constraint :users, "credit_score > 0", :extra
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/add_reference.rb | test/migrations/add_reference.rb | class AddReference < TestMigration
def change
add_reference :users, :device, index: true
end
end
class AddReferencePolymorphic < TestMigration
def change
add_reference :users, :device, polymorphic: true, index: true
end
end
class AddReferenceNoIndex < TestMigration
def change
add_reference :users, :country, index: false
end
end
class AddReferenceDefault < TestMigration
def change
add_reference :users, :ip
end
end
class AddReferenceForeignKey < TestMigration
def change
add_reference :users, :device, foreign_key: true, index: false
end
end
class AddReferenceForeignKeyValidateFalse < TestMigration
def change
add_reference :users, :device, foreign_key: {validate: false}, index: false
end
end
class AddReferenceForeignKeyValidateFalseIndex < TestMigration
def change
add_reference :users, :device, foreign_key: {validate: false}
end
end
class AddReferenceForeignKeyToTable < TestMigration
def change
add_reference :users, :device, foreign_key: {to_table: :users}, index: false
end
end
class AddReferenceForeignKeyOnDelete < TestMigration
def change
add_reference :users, :device, foreign_key: {on_delete: :nullify}, index: false
end
end
class AddReferenceConcurrently < TestMigration
disable_ddl_transaction!
def change
add_reference :users, :ip, index: {algorithm: :concurrently}
end
end
class AddBelongsTo < TestMigration
def change
add_belongs_to :users, :device, index: true
end
end
class AddReferenceExtraArguments < TestMigration
def change
add_reference :users, :device, :extra, index: true
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/add_unique_constraint.rb | test/migrations/add_unique_constraint.rb | class AddUniqueConstraint < TestMigration
def change
add_unique_constraint :users, :name
end
end
class AddUniqueConstraintUsingIndex < TestMigration
disable_ddl_transaction!
def up
add_index :users, :name, unique: true, algorithm: :concurrently
add_unique_constraint :users, using_index: "index_users_on_name"
end
def down
remove_unique_constraint :users, :name
end
end
class AddUniqueConstraintNewTable < TestMigration
def change
create_table :new_users do |t|
t.string :name
end
add_unique_constraint :new_users, :name
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/test/migrations/change_column.rb | test/migrations/change_column.rb | class ChangeColumn < TestMigration
def change
change_column :users, :properties, :bad_name
end
end
class ChangeColumnVarcharToText < TestMigration
def up
change_column :users, :name, :text
end
def down
change_column :users, :name, :string
end
end
class ChangeColumnVarcharToCitext < TestMigration
def up
change_column :users, :name, :citext
end
def down
change_column :users, :name, :string
end
end
class ChangeColumnVarcharIncreaseLimit < TestMigration
def up
change_column :users, :country, :string, limit: 21
end
def down
change_column :users, :country, :string, limit: 20
end
end
class ChangeColumnVarcharIncreaseLimit64 < TestMigration
def up
change_column :users, :country, :string, limit: 64
end
def down
change_column :users, :country, :string, limit: 20
end
end
class ChangeColumnVarcharIncreaseLimit256 < TestMigration
def up
change_column :users, :country, :string, limit: 256
end
def down
change_column :users, :country, :string, limit: 20
end
end
class ChangeColumnVarcharDecreaseLimit < TestMigration
def up
change_column :users, :country, :string, limit: 19
end
def down
change_column :users, :country, :string, limit: 20
end
end
class ChangeColumnVarcharRemoveLimit < TestMigration
def up
change_column :users, :country, :string
end
def down
change_column :users, :country, :string, limit: 20
end
end
class ChangeColumnVarcharAddLimit < TestMigration
def up
change_column :users, :country, :string
change_column :users, :country, :string, limit: 20
end
def down
change_column :users, :country, :string, limit: 20
end
end
class ChangeColumnTextToVarcharLimit < TestMigration
def up
change_column :users, :description, :string, limit: 20
end
def down
change_column :users, :description, :text
end
end
class ChangeColumnTextToVarcharNoLimit < TestMigration
def up
change_column :users, :description, :string
end
def down
change_column :users, :description, :text
end
end
class ChangeColumnTextToCitext < TestMigration
def up
change_column :users, :description, :citext
end
def down
change_column :users, :description, :text
end
end
class ChangeColumnTextToCitextIndexed < TestMigration
def up
safety_assured { add_index :users, :description }
change_column :users, :description, :citext
end
def down
change_column :users, :description, :text
remove_index :users, :description
end
end
class ChangeColumnTextToCitextIndexedExpression < TestMigration
def up
safety_assured { add_index :users, "lower(description)" }
change_column :users, :description, :citext
end
def down
change_column :users, :description, :text
remove_index :users, :description
end
end
class ChangeColumnCitextToText < TestMigration
def up
change_column :users, :code, :text
end
def down
change_column :users, :code, :citext
end
end
class ChangeColumnCitextToTextIndexed < TestMigration
def up
safety_assured { add_index :users, :code }
change_column :users, :code, :text
end
def down
change_column :users, :code, :citext
remove_index :users, :code
end
end
class ChangeColumnCitextToVarcharLimit < TestMigration
def up
change_column :users, :code, :string, limit: 20
end
def down
change_column :users, :code, :citext
end
end
class ChangeColumnCitextToVarcharNoLimit < TestMigration
def up
change_column :users, :code, :string
end
def down
change_column :users, :code, :citext
end
end
class ChangeColumnCitextToVarcharNoLimitIndexed < TestMigration
def up
safety_assured { add_index :users, :code }
change_column :users, :code, :string
end
def down
change_column :users, :code, :citext
remove_index :users, :code
end
end
class ChangeColumnDecimalDecreasePrecision < TestMigration
def up
change_column :users, :credit_score, :decimal, precision: 9, scale: 5
end
end
class ChangeColumnDecimalChangeScale < TestMigration
def up
change_column :users, :credit_score, :decimal, precision: 10, scale: 6
end
end
class ChangeColumnDecimalIncreasePrecision < TestMigration
def up
change_column :users, :credit_score, :decimal, precision: 11, scale: 5
end
def down
change_column :users, :credit_score, :decimal, precision: 10, scale: 5
end
end
class ChangeColumnDecimalUnconstrained < TestMigration
def up
change_column :users, :credit_score, :decimal
end
def down
change_column :users, :credit_score, :decimal, precision: 10, scale: 5
end
end
class ChangeColumnTimestamps < TestMigration
def up
change_column :users, :deleted_at, :timestamptz
change_column :users, :deleted_at, :timestamp
end
end
class ChangeColumnDatetimeIncreasePrecision < TestMigration
def up
add_column :users, :joined_at, :datetime, precision: 0
change_column :users, :joined_at, :datetime, precision: 3
change_column :users, :joined_at, :datetime, precision: 6
change_column :users, :joined_at, :datetime
change_column :users, :joined_at, :datetime, precision: 6
end
def down
remove_column :users, :joined_at
end
end
class ChangeColumnDatetimeDecreasePrecision < TestMigration
def up
add_column :users, :joined_at, :datetime
change_column :users, :joined_at, :datetime, precision: 3
end
def down
remove_column :users, :joined_at
end
end
class ChangeColumnTimestampIncreaseLimit < TestMigration
def up
add_column :users, :joined_at, :timestamp, limit: 0
change_column :users, :joined_at, :timestamp, limit: 3
change_column :users, :joined_at, :timestamp, limit: 6
change_column :users, :joined_at, :timestamp
change_column :users, :joined_at, :timestamp, limit: 6
end
def down
remove_column :users, :joined_at
end
end
class ChangeColumnTimestampDecreaseLimit < TestMigration
def up
add_column :users, :joined_at, :timestamp
change_column :users, :joined_at, :timestamp, limit: 3
end
def down
remove_column :users, :joined_at
end
end
class ChangeColumnTimestamptzIncreaseLimit < TestMigration
def up
add_column :users, :joined_at, :timestamptz, limit: 0
change_column :users, :joined_at, :timestamptz, limit: 3
change_column :users, :joined_at, :timestamptz, limit: 6
change_column :users, :joined_at, :timestamptz
change_column :users, :joined_at, :timestamptz, limit: 6
end
def down
remove_column :users, :joined_at
end
end
class ChangeColumnTimestamptzDecreaseLimit < TestMigration
def up
add_column :users, :joined_at, :timestamptz
change_column :users, :joined_at, :timestamptz, limit: 3
end
def down
remove_column :users, :joined_at
end
end
class ChangeColumnTimeIncreasePrecision < TestMigration
def up
add_column :users, :opens_at, :time, precision: 0
change_column :users, :opens_at, :time, precision: 3
change_column :users, :opens_at, :time, precision: 6
change_column :users, :opens_at, :time
change_column :users, :opens_at, :time, precision: 6
end
def down
remove_column :users, :opens_at
end
end
class ChangeColumnTimeDecreasePrecision < TestMigration
def up
add_column :users, :opens_at, :time
change_column :users, :opens_at, :time, precision: 3
end
def down
remove_column :users, :opens_at
end
end
class ChangeColumnIntervalIncreasePrecision < TestMigration
def up
add_column :users, :duration, :interval, precision: 0
change_column :users, :duration, :interval, precision: 3
change_column :users, :duration, :interval, precision: 6
change_column :users, :duration, :interval
change_column :users, :duration, :interval, precision: 6
end
def down
remove_column :users, :duration
end
end
class ChangeColumnIntervalDecreasePrecision < TestMigration
def up
add_column :users, :duration, :interval
change_column :users, :duration, :interval, precision: 3
end
def down
remove_column :users, :duration
end
end
class ChangeColumnCidrToInet < TestMigration
def up
add_column :users, :ip, :cidr
change_column :users, :ip, :inet
end
def down
remove_column :users, :ip
end
end
class ChangeColumnInetToCidr < TestMigration
def up
add_column :users, :ip, :inet
change_column :users, :ip, :cidr
end
def down
remove_column :users, :ip
end
end
class ChangeColumnWithNotNull < TestMigration
def up
change_column :users, :country, :string, limit: 20, null: false
end
def down
change_column :users, :country, :string, limit: 20
end
end
class ChangeColumnMissingTable < TestMigration
def change
change_column :missing, :properties, :string
end
end
class ChangeColumnConstraint < TestMigration
def change
safety_assured do
add_check_constraint :users, "name IS NOT NULL"
add_check_constraint :users, "name = lower(name)"
add_check_constraint :users, "credit_score > 0"
end
change_column :users, :name, :text
end
end
class ChangeColumnOtherConstraints < TestMigration
def change
add_column :users, :new_name, :string
safety_assured do
add_check_constraint :users, "new_name IS NOT NULL"
add_check_constraint :users, "credit_score > 0"
end
reversible do |direction|
direction.up { change_column :users, :name, :text }
direction.down { change_column :users, :name, :string }
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations.rb | lib/strong_migrations.rb | # dependencies
require "active_support"
# adapters
require_relative "strong_migrations/adapters/abstract_adapter"
require_relative "strong_migrations/adapters/mysql_adapter"
require_relative "strong_migrations/adapters/mariadb_adapter"
require_relative "strong_migrations/adapters/postgresql_adapter"
# modules
require_relative "strong_migrations/checks"
require_relative "strong_migrations/safe_methods"
require_relative "strong_migrations/checker"
require_relative "strong_migrations/migration"
require_relative "strong_migrations/migration_context"
require_relative "strong_migrations/migrator"
require_relative "strong_migrations/version"
# integrations
require_relative "strong_migrations/railtie" if defined?(Rails)
module StrongMigrations
class Error < StandardError; end
class UnsafeMigration < Error; end
class UnsupportedVersion < Error; end
class << self
attr_accessor :auto_analyze, :start_after, :checks, :error_messages,
:target_postgresql_version, :target_mysql_version, :target_mariadb_version,
:enabled_checks, :lock_timeout, :statement_timeout, :check_down, :target_version,
:safe_by_default, :target_sql_mode, :lock_timeout_retries, :lock_timeout_retry_delay,
:alphabetize_schema, :skipped_databases, :remove_invalid_indexes, :transaction_timeout
attr_writer :lock_timeout_limit
end
self.auto_analyze = false
self.start_after = 0
self.lock_timeout_retries = 0
self.lock_timeout_retry_delay = 10 # seconds
self.checks = []
self.safe_by_default = false
self.check_down = false
self.alphabetize_schema = false
self.skipped_databases = []
self.remove_invalid_indexes = false
# private
def self.developer_env?
env == "development" || env == "test"
end
# private
def self.env
if defined?(Rails.env)
Rails.env
else
# default to production for safety
ENV["RACK_ENV"] || "production"
end
end
def self.lock_timeout_limit
unless defined?(@lock_timeout_limit)
@lock_timeout_limit = developer_env? ? false : 10
end
@lock_timeout_limit
end
def self.add_check(&block)
checks << block
end
def self.enable_check(check, start_after: nil)
enabled_checks[check] = {start_after: start_after}
end
def self.disable_check(check)
enabled_checks.delete(check)
end
def self.check_enabled?(check, version: nil)
if enabled_checks[check]
start_after = enabled_checks[check][:start_after] || StrongMigrations.start_after
!version || version > start_after
else
false
end
end
def self.skip_database(database)
self.skipped_databases << database
end
end
# load error messages
require_relative "strong_migrations/error_messages"
ActiveSupport.on_load(:active_record) do
ActiveRecord::Migration.prepend(StrongMigrations::Migration)
ActiveRecord::MigrationContext.prepend(StrongMigrations::MigrationContext)
ActiveRecord::Migrator.prepend(StrongMigrations::Migrator)
require_relative "strong_migrations/schema_dumper"
ActiveRecord::SchemaDumper.prepend(StrongMigrations::SchemaDumper)
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/version.rb | lib/strong_migrations/version.rb | module StrongMigrations
VERSION = "2.5.2"
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/error_messages.rb | lib/strong_migrations/error_messages.rb | module StrongMigrations
self.error_messages = {
add_column_default:
"Adding a column with a %{default_type} default blocks %{rewrite_blocks} while the entire table is rewritten.
Instead, add the column without a default value, then change the default.
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
def up
%{add_command}
%{change_command}
end
def down
%{remove_command}
end
end
Then backfill the existing rows in the Rails console or a separate migration with disable_ddl_transaction!.
class Backfill%{migration_name} < ActiveRecord::Migration%{migration_suffix}
disable_ddl_transaction!
def up
%{code}
end
end",
add_column_default_callable:
"Strong Migrations does not support inspecting callable default values.
Please make really sure you're not calling a VOLATILE function,
then wrap it in a safety_assured { ... } block.",
add_column_json:
"There's no equality operator for the json column type, which can cause errors for
existing SELECT DISTINCT queries in your application. Use jsonb instead.
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
%{command}
end
end",
add_column_generated_stored:
"Adding a stored generated column blocks %{rewrite_blocks} while the entire table is rewritten.",
add_column_auto_incrementing:
"Adding an auto-incrementing column blocks %{rewrite_blocks} while the entire table is rewritten.",
change_column:
"Changing the type of an existing column blocks %{rewrite_blocks}
while the entire table is rewritten. A safer approach is to:
1. Create a new column
2. Write to both columns
3. Backfill data from the old column to the new column
4. Move reads from the old column to the new column
5. Stop writing to the old column
6. Drop the old column",
change_column_with_not_null:
"Changing the type is safe, but setting NOT NULL is not.",
change_column_constraint: "Changing the type of a column that has check constraints blocks reads and writes
while every row is checked. Drop the check constraints on the column before
changing the type and add them back afterwards.
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
%{change_column_code}
end
end
class Validate%{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
%{validate_constraint_code}
end
end",
remove_column: "Active Record caches attributes, which causes problems
when removing columns. Be sure to ignore the column%{column_suffix}:
class %{model} < %{base_model}
%{code}
end
Deploy the code, then wrap this step in a safety_assured { ... } block.
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
safety_assured { %{command} }
end
end",
rename_column:
"Renaming a column that's in use will cause errors
in your application. A safer approach is to:
1. Create a new column
2. Write to both columns
3. Backfill data from the old column to the new column
4. Move reads from the old column to the new column
5. Stop writing to the old column
6. Drop the old column",
rename_schema:
"Renaming a schema that's in use will cause errors
in your application. A safer approach is to:
1. Create a new schema
2. Write to both schemas
3. Backfill data from the old schema to the new schema
4. Move reads from the old schema to the new schema
5. Stop writing to the old schema
6. Drop the old schema",
rename_table:
"Renaming a table that's in use will cause errors
in your application. A safer approach is to:
1. Create a new table. Don't forget to recreate indexes from the old table
2. Write to both tables
3. Backfill data from the old table to the new table
4. Move reads from the old table to the new table
5. Stop writing to the old table
6. Drop the old table",
add_reference:
"%{headline} Instead, use:
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
disable_ddl_transaction!
def change
%{command}
end
end",
add_index:
"Adding an index non-concurrently blocks writes. Instead, use:
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
disable_ddl_transaction!
def change
%{command}
end
end",
remove_index:
"Removing an index non-concurrently blocks writes. Instead, use:
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
disable_ddl_transaction!
def change
%{command}
end
end",
add_index_columns:
"Adding a non-unique index with more than three columns rarely improves performance.
Instead, start an index with columns that narrow down the results the most.",
add_index_corruption:
"Adding an index concurrently can cause silent data corruption in Postgres 14.0 to 14.3.
Upgrade Postgres before adding new indexes, or wrap this step in a safety_assured { ... } block
to accept the risk.",
change_table:
"Strong Migrations does not support inspecting what happens inside a
change_table block, so cannot help you here. Please make really sure that what
you're doing is safe before proceeding, then wrap it in a safety_assured { ... } block.",
create_table:
"The force option will destroy existing tables.
If this is intended, drop the existing table first.
In any case, remove the force option.",
execute:
"Strong Migrations does not support inspecting what happens inside an
execute call, so cannot help you here. Please make really sure that what
you're doing is safe before proceeding, then wrap it in a safety_assured { ... } block.",
change_column_default:
"Partial writes are enabled, which can cause incorrect values
to be inserted when changing the default value of a column.
Disable partial writes in config/application.rb:
config.active_record.%{config} = false",
change_column_null:
"Passing a default value to change_column_null runs a single UPDATE query,
which can cause downtime. Instead, backfill the existing rows in the
Rails console or a separate migration with disable_ddl_transaction!.
class Backfill%{migration_name} < ActiveRecord::Migration%{migration_suffix}
disable_ddl_transaction!
def up
%{code}
end
end",
change_column_null_postgresql:
"Setting NOT NULL on an existing column blocks reads and writes while every row is checked.
Instead, add a check constraint and validate it in a separate migration.
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
%{add_constraint_code}
end
end
class Validate%{migration_name} < ActiveRecord::Migration%{migration_suffix}
%{validate_constraint_code}
end",
change_column_null_mysql:
"Setting NOT NULL on an existing column is not safe without strict mode enabled.",
add_foreign_key:
"Adding a foreign key blocks writes on both tables. Instead,
add the foreign key without validating existing rows,
then validate them in a separate migration.
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
%{add_foreign_key_code}
end
end
class Validate%{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
%{validate_foreign_key_code}
end
end",
validate_foreign_key:
"Validating a foreign key while writes are blocked is dangerous.
Use disable_ddl_transaction! or a separate migration.",
add_check_constraint:
"Adding a check constraint key blocks reads and writes while every row is checked.
Instead, add the check constraint without validating existing rows,
then validate them in a separate migration.
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
%{add_check_constraint_code}
end
end
class Validate%{migration_name} < ActiveRecord::Migration%{migration_suffix}
def change
%{validate_check_constraint_code}
end
end",
add_check_constraint_mysql:
"Adding a check constraint to an existing table is not safe with your database engine.",
validate_check_constraint:
"Validating a check constraint while writes are blocked is dangerous.
Use disable_ddl_transaction! or a separate migration.",
add_exclusion_constraint:
"Adding an exclusion constraint blocks reads and writes while every row is checked.",
add_unique_constraint:
"Adding a unique constraint creates a unique index, which blocks reads and writes.
Instead, create a unique index concurrently, then use it for the constraint.
class %{migration_name} < ActiveRecord::Migration%{migration_suffix}
disable_ddl_transaction!
def up
%{index_command}
%{constraint_command}
end
def down
%{remove_command}
end
end"
}
self.enabled_checks = (error_messages.keys - [:remove_index]).map { |k| [k, {}] }.to_h
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/railtie.rb | lib/strong_migrations/railtie.rb | # ensure activerecord tasks are loaded first
require "active_record/railtie"
module StrongMigrations
class Railtie < Rails::Railtie
rake_tasks do
load "tasks/strong_migrations.rake"
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/safe_methods.rb | lib/strong_migrations/safe_methods.rb | module StrongMigrations
module SafeMethods
def safe_by_default_method?(method)
StrongMigrations.safe_by_default && !version_safe? && [:add_index, :add_belongs_to, :add_reference, :remove_index, :add_foreign_key, :add_check_constraint, :change_column_null].include?(method)
end
def safe_add_index(*args, **options)
disable_transaction
@migration.add_index(*args, **options.merge(algorithm: :concurrently))
end
def safe_remove_index(*args, **options)
disable_transaction
@migration.remove_index(*args, **options.merge(algorithm: :concurrently))
end
def safe_add_reference(table, reference, *args, **options)
@migration.reversible do |dir|
dir.up do
disable_transaction
foreign_key = options.delete(:foreign_key)
@migration.add_reference(table, reference, *args, **options)
if foreign_key
# same as Active Record
name =
if foreign_key.is_a?(Hash) && foreign_key[:to_table]
foreign_key[:to_table]
else
(ActiveRecord::Base.pluralize_table_names ? reference.to_s.pluralize : reference).to_sym
end
foreign_key_opts = foreign_key.is_a?(Hash) ? foreign_key.except(:to_table) : {}
if reference
@migration.add_foreign_key(table, name, column: "#{reference}_id", **foreign_key_opts)
else
@migration.add_foreign_key(table, name, **foreign_key_opts)
end
end
end
dir.down do
@migration.remove_reference(table, reference)
end
end
end
def safe_add_foreign_key(from_table, to_table, *args, **options)
@migration.reversible do |dir|
dir.up do
if !connection.foreign_key_exists?(from_table, to_table, **options.merge(validate: false))
@migration.add_foreign_key(from_table, to_table, *args, **options.merge(validate: false))
end
disable_transaction
@migration.validate_foreign_key(from_table, to_table, **options.slice(:column, :name))
end
dir.down do
@migration.remove_foreign_key(from_table, to_table, **options.slice(:column, :name))
end
end
end
def safe_add_check_constraint(table, expression, *args, add_options, validate_options)
@migration.reversible do |dir|
dir.up do
# only skip invalid constraints
unless connection.check_constraints(table).any? { |c| c.options[:name] == validate_options[:name] && !c.options[:validate] }
@migration.add_check_constraint(table, expression, *args, **add_options)
end
disable_transaction
@migration.validate_check_constraint(table, **validate_options)
end
dir.down do
@migration.remove_check_constraint(table, expression, **add_options.except(:validate))
end
end
end
def safe_change_column_null(add_args, validate_args, change_args, remove_args, table, column, default, constraints)
@migration.reversible do |dir|
dir.up do
unless default.nil?
# TODO search for parent model if needed
if connection.pool != ActiveRecord::Base.connection_pool
raise_error :change_column_null,
code: backfill_code(table, column, default)
end
model =
Class.new(ActiveRecord::Base) do
self.table_name = table
def self.to_s
"Backfill"
end
end
update_sql =
model.connection_pool.with_connection do |c|
quoted_column = c.quote_column_name(column)
quoted_default = c.quote_default_expression(default, c.send(:column_for, table, column))
"#{quoted_column} = #{quoted_default}"
end
@migration.say("Backfilling default")
disable_transaction
model.unscoped.in_batches(of: 10000) do |relation|
relation.where(column => nil).update_all(update_sql)
sleep(0.01)
end
end
add_options = add_args.extract_options!
validate_options = validate_args.extract_options!
remove_options = remove_args.extract_options!
# only skip invalid constraints
unless constraints.any? { |c| c.options[:name] == validate_options[:name] && !c.options[:validate] }
@migration.add_check_constraint(*add_args, **add_options)
end
disable_transaction
connection.begin_db_transaction
@migration.validate_check_constraint(*validate_args, **validate_options)
@migration.change_column_null(*change_args)
@migration.remove_check_constraint(*remove_args, **remove_options)
connection.commit_db_transaction
end
dir.down do
down_args = change_args.dup
down_args[2] = true
@migration.change_column_null(*down_args)
end
end
end
# hard to commit at right time when reverting
# so just commit at start
def disable_transaction
if in_transaction? && !transaction_disabled
connection.commit_db_transaction
self.transaction_disabled = true
end
end
def in_transaction?
connection.open_transactions > 0
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/schema_dumper.rb | lib/strong_migrations/schema_dumper.rb | module StrongMigrations
module SchemaDumper
def initialize(connection, ...)
return super unless StrongMigrations.alphabetize_schema
super(WrappedConnection.new(connection), ...)
end
end
class WrappedConnection
delegate_missing_to :@connection
def initialize(connection)
@connection = connection
end
def columns(...)
@connection.columns(...).sort_by(&:name)
end
# forward private methods with send
# method_missing cannot tell how method was called
# this is not ideal, but other solutions have drawbacks
def send(name, ...)
if respond_to?(name, true)
super
else
@connection.send(name, ...)
end
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/migration.rb | lib/strong_migrations/migration.rb | module StrongMigrations
module Migration
def migrate(direction)
strong_migrations_checker.direction = direction
super
connection.begin_db_transaction if strong_migrations_checker.transaction_disabled
end
def method_missing(method, *args)
return super if is_a?(ActiveRecord::Schema) || is_a?(ActiveRecord::Schema::Definition)
catch(:safe) do
strong_migrations_checker.perform(method, *args) do
super
end
end
end
# same as ActiveRecord::Migration
ruby2_keywords(:method_missing)
def revert(*)
if strong_migrations_checker.version_safe?
safety_assured { super }
else
super
end
end
def safety_assured
strong_migrations_checker.class.safety_assured do
yield
end
end
def stop!(message, header: "Custom check")
raise StrongMigrations::UnsafeMigration, "\n=== #{header} #strong_migrations ===\n\n#{message}\n"
end
private
def strong_migrations_checker
@strong_migrations_checker ||= StrongMigrations::Checker.new(self)
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/migration_context.rb | lib/strong_migrations/migration_context.rb | module StrongMigrations
module MigrationContext
def up(...)
super
rescue => e
strong_migrations_process_exception(e)
end
def down(...)
super
rescue => e
strong_migrations_process_exception(e)
end
def run(...)
super
rescue => e
strong_migrations_process_exception(e)
end
private
def strong_migrations_process_exception(e)
if e.cause.is_a?(StrongMigrations::Error)
# strip cause and clean backtrace
def e.cause
nil
end
def e.message
super.sub("\n\n\n", "\n\n") + "\n"
end
unless Rake.application.options.trace
def e.backtrace
bc = ActiveSupport::BacktraceCleaner.new
bc.add_silencer { |line| line =~ /strong_migrations/ }
bc.clean(super)
end
end
end
raise e
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/checker.rb | lib/strong_migrations/checker.rb | module StrongMigrations
class Checker
include Checks
include SafeMethods
attr_accessor :direction, :transaction_disabled, :timeouts_set
class << self
attr_accessor :safe
end
def initialize(migration)
@migration = migration
reset
end
def reset
@new_tables = []
@new_columns = []
@timeouts_set = false
@committed = false
@transaction_disabled = false
@skip_retries = false
end
def self.safety_assured
previous_value = safe
begin
self.safe = true
yield
ensure
self.safe = previous_value
end
end
def perform(method, *args, &block)
return yield if skip?
check_adapter
check_version_supported
set_timeouts
check_lock_timeout
if !safe? || safe_by_default_method?(method)
# TODO better pattern
# see checks.rb for methods
case method
when :add_check_constraint
check_add_check_constraint(*args)
when :add_column
check_add_column(*args)
when :add_exclusion_constraint
check_add_exclusion_constraint(*args)
when :add_foreign_key
check_add_foreign_key(*args)
when :add_index
check_add_index(*args)
when :add_reference, :add_belongs_to
check_add_reference(method, *args)
when :add_unique_constraint
check_add_unique_constraint(*args)
when :change_column
check_change_column(*args)
when :change_column_default
check_change_column_default(*args)
when :change_column_null
check_change_column_null(*args)
when :change_table
check_change_table
when :create_join_table
check_create_join_table(*args)
when :create_table
check_create_table(*args)
when :execute
check_execute
when :remove_column, :remove_columns, :remove_timestamps, :remove_reference, :remove_belongs_to
check_remove_column(method, *args)
when :remove_index
check_remove_index(*args)
when :rename_column
check_rename_column
when :rename_schema
check_rename_schema
when :rename_table
check_rename_table
when :validate_check_constraint
check_validate_check_constraint
when :validate_foreign_key
check_validate_foreign_key
when :commit_db_transaction
# if committed, likely no longer in DDL transaction
# and no longer eligible to be retried at migration level
# okay to have false positives
@committed = true
end
if !safe?
# custom checks
StrongMigrations.checks.each do |check|
@migration.instance_exec(method, args, &check)
end
end
end
result =
if retry_lock_timeouts?(method)
# TODO figure out how to handle methods that generate multiple statements
# like add_reference(table, ref, index: {algorithm: :concurrently})
# lock timeout after first statement will cause retry to fail
retry_lock_timeouts { perform_method(method, *args, &block) }
else
perform_method(method, *args, &block)
end
# outdated statistics + a new index can hurt performance of existing queries
if StrongMigrations.auto_analyze && direction == :up && adds_index?(method, *args)
adapter.analyze_table(args[0])
end
result
end
def perform_method(method, *args)
if StrongMigrations.remove_invalid_indexes && direction == :up && method == :add_index && postgresql?
remove_invalid_index_if_needed(*args)
end
yield
end
def retry_lock_timeouts(check_committed: false)
retries = 0
begin
yield
rescue ActiveRecord::LockWaitTimeout => e
if retries < StrongMigrations.lock_timeout_retries && !(check_committed && @committed)
retries += 1
delay = StrongMigrations.lock_timeout_retry_delay
@migration.say("Lock timeout. Retrying in #{delay} seconds...")
sleep(delay)
retry
end
raise e
end
end
def version_safe?
version && version <= StrongMigrations.start_after
end
def skip?
StrongMigrations.skipped_databases.map(&:to_s).include?(db_config_name)
end
def set_transaction_timeout
return if defined?(@transaction_timeout_set)
if StrongMigrations.transaction_timeout
adapter.set_transaction_timeout(StrongMigrations.transaction_timeout)
end
@transaction_timeout_set = true
end
private
def check_adapter
return if defined?(@adapter_checked)
if adapter.instance_of?(Adapters::AbstractAdapter)
warn "[strong_migrations] Unsupported adapter: #{connection.adapter_name}. Use StrongMigrations.skip_database(#{db_config_name.to_sym.inspect}) to silence this warning."
end
@adapter_checked = true
end
def check_version_supported
return if defined?(@version_checked)
min_version = adapter.min_version
if min_version
version = adapter.server_version
if version < Gem::Version.new(min_version)
raise UnsupportedVersion, "#{adapter.name} version (#{version}) not supported in this version of Strong Migrations (#{StrongMigrations::VERSION})"
end
end
@version_checked = true
end
def set_timeouts
return if @timeouts_set
if StrongMigrations.statement_timeout
adapter.set_statement_timeout(StrongMigrations.statement_timeout)
end
if StrongMigrations.lock_timeout
adapter.set_lock_timeout(StrongMigrations.lock_timeout)
end
@timeouts_set = true
end
def check_lock_timeout
return if defined?(@lock_timeout_checked)
if StrongMigrations.lock_timeout_limit
adapter.check_lock_timeout(StrongMigrations.lock_timeout_limit)
end
@lock_timeout_checked = true
end
def safe?
self.class.safe || ENV["SAFETY_ASSURED"] || (direction == :down && !StrongMigrations.check_down) || version_safe? || @migration.reverting?
end
def version
@migration.version
end
def adapter
@adapter ||= begin
cls =
case connection.adapter_name
when /postg/i # PostgreSQL, PostGIS
Adapters::PostgreSQLAdapter
when /mysql|trilogy/i
if connection.try(:mariadb?)
Adapters::MariaDBAdapter
else
Adapters::MySQLAdapter
end
else
Adapters::AbstractAdapter
end
cls.new(self)
end
end
def connection
@migration.connection
end
def db_config_name
connection.pool.db_config.name
end
def retry_lock_timeouts?(method)
(
StrongMigrations.lock_timeout_retries > 0 &&
!in_transaction? &&
method != :transaction &&
!@skip_retries
)
end
def without_retries
previous_value = @skip_retries
begin
@skip_retries = true
yield
ensure
@skip_retries = previous_value
end
end
def adds_index?(method, *args)
case method
when :add_index
true
when :add_reference, :add_belongs_to
options = args.extract_options!
!!options.fetch(:index, true)
else
false
end
end
# REINDEX INDEX CONCURRENTLY leaves a new invalid index if it fails, so use remove_index instead
def remove_invalid_index_if_needed(*args)
options = args.extract_options!
# ensures has same options as existing index
# check args to avoid errors with index_exists?
return unless args.size == 2 && connection.index_exists?(*args, **options.merge(valid: false))
table, columns = args
index_name = options.fetch(:name, connection.index_name(table, columns))
@migration.say("Attempting to remove invalid index")
without_retries do
# TODO pass index schema for extra safety?
@migration.remove_index(table, **{name: index_name}.merge(options.slice(:algorithm)))
end
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/checks.rb | lib/strong_migrations/checks.rb | # TODO better pattern
module StrongMigrations
module Checks
private
def check_add_check_constraint(*args)
options = args.extract_options!
table, expression = args
if !new_table?(table)
if postgresql? && options[:validate] != false
add_options = options.merge(validate: false)
name = options[:name] || connection.check_constraint_options(table, expression, options)[:name]
validate_options = {name: name}
if StrongMigrations.safe_by_default
safe_add_check_constraint(*args, add_options, validate_options)
throw :safe
end
raise_error :add_check_constraint,
add_check_constraint_code: command_str("add_check_constraint", [table, expression, add_options]),
validate_check_constraint_code: command_str("validate_check_constraint", [table, validate_options])
elsif mysql? || mariadb?
raise_error :add_check_constraint_mysql
end
end
end
def check_add_column(*args)
options = args.extract_options!
table, column, type = args
default = options[:default]
# keep track of new columns of change_column_default check
@new_columns << [table.to_s, column.to_s]
# Check key since DEFAULT NULL behaves differently from no default
#
# Also, Active Record has special case for uuid columns that allows function default values
# https://github.com/rails/rails/blob/v7.0.3.1/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb#L92-L93
if !default.nil? && (!adapter.add_column_default_safe? || (volatile = (postgresql? && type.to_s == "uuid" && default.to_s.include?("()") && adapter.default_volatile?(default))))
if options[:null] == false
options = options.except(:null)
append = "\n\nThen add the NOT NULL constraint in separate migrations."
end
raise_error :add_column_default,
add_command: command_str("add_column", [table, column, type, options.except(:default)]),
change_command: command_str("change_column_default", [table, column, default]),
remove_command: command_str("remove_column", [table, column]),
code: backfill_code(table, column, default, volatile),
append: append,
rewrite_blocks: adapter.rewrite_blocks,
default_type: (volatile ? "volatile" : "non-null")
elsif default.is_a?(Proc) && postgresql?
# adding a column with a VOLATILE default is not safe
# https://www.postgresql.org/docs/current/sql-altertable.html#SQL-ALTERTABLE-NOTES
# functions like random() and clock_timestamp() are VOLATILE
# check for Proc to match Active Record
raise_error :add_column_default_callable
end
if type.to_s == "json" && postgresql?
raise_error :add_column_json,
command: command_str("add_column", [table, column, :jsonb, options])
end
if type.to_s == "virtual" && options[:stored]
raise_error :add_column_generated_stored, rewrite_blocks: adapter.rewrite_blocks
end
if adapter.auto_incrementing_types.include?(type.to_s)
append = (mysql? || mariadb?) ? "\n\nIf using statement-based replication, this can also generate different values on replicas." : ""
raise_error :add_column_auto_incrementing,
rewrite_blocks: adapter.rewrite_blocks,
append: append
end
end
def check_add_exclusion_constraint(*args)
table = args[0]
unless new_table?(table)
raise_error :add_exclusion_constraint
end
end
# unlike add_index, we don't make an exception here for new tables
#
# with add_index, it's fine to lock a new table even after inserting data
# since the table won't be in use by the application
#
# with add_foreign_key, this would cause issues since it locks the referenced table
#
# it's okay to allow if the table is empty, but not a fan of data-dependent checks,
# since the data in production could be different from development
#
# note: adding foreign_keys with create_table is fine
# since the table is always guaranteed to be empty
def check_add_foreign_key(*args)
options = args.extract_options!
from_table, to_table = args
validate = options.fetch(:validate, true)
if postgresql? && validate
if StrongMigrations.safe_by_default
safe_add_foreign_key(*args, **options)
throw :safe
end
raise_error :add_foreign_key,
add_foreign_key_code: command_str("add_foreign_key", [from_table, to_table, options.merge(validate: false)]),
validate_foreign_key_code: command_str("validate_foreign_key", [from_table, to_table])
end
end
def check_add_index(*args)
options = args.extract_options!
table, columns = args
if columns.is_a?(Array) && columns.size > 3 && !options[:unique]
raise_error :add_index_columns, header: "Best practice"
end
# safe_by_default goes through this path as well
if postgresql? && options[:algorithm] == :concurrently && adapter.index_corruption?
raise_error :add_index_corruption
end
# safe to add non-concurrently to new tables (even after inserting data)
# since the table won't be in use by the application
if postgresql? && options[:algorithm] != :concurrently && !new_table?(table)
if StrongMigrations.safe_by_default
safe_add_index(*args, **options)
throw :safe
end
raise_error :add_index, command: command_str("add_index", [table, columns, options.merge(algorithm: :concurrently)])
end
end
def check_add_reference(method, *args)
options = args.extract_options!
table, reference = args
if postgresql?
index_value = options.fetch(:index, true)
concurrently_set = index_value.is_a?(Hash) && index_value[:algorithm] == :concurrently
index_unsafe = index_value && !concurrently_set
foreign_key_value = options[:foreign_key]
validate_false = foreign_key_value.is_a?(Hash) && foreign_key_value[:validate] == false
foreign_key_unsafe = foreign_key_value && !validate_false
if index_unsafe || foreign_key_unsafe
if index_value.is_a?(Hash)
options[:index] = options[:index].merge(algorithm: :concurrently)
elsif index_value
options = options.merge(index: {algorithm: :concurrently})
end
if StrongMigrations.safe_by_default
safe_add_reference(*args, **options)
throw :safe
end
if foreign_key_unsafe
options.delete(:foreign_key)
headline = "Adding a foreign key blocks writes on both tables."
append = "\n\nThen add the foreign key in separate migrations."
else
headline = "Adding an index non-concurrently locks the table."
end
raise_error :add_reference,
headline: headline,
command: command_str(method, [table, reference, options]),
append: append
end
end
end
def check_add_unique_constraint(*args)
args.extract_options!
table, column = args
# column and using_index cannot be used together
# check for column to ensure error message can be generated
if column && !new_table?(table)
index_name = connection.index_name(table, {column: column})
raise_error :add_unique_constraint,
index_command: command_str(:add_index, [table, column, {unique: true, algorithm: :concurrently}]),
constraint_command: command_str(:add_unique_constraint, [table, {using_index: index_name}]),
remove_command: command_str(:remove_unique_constraint, [table, column])
end
end
def check_change_column(*args)
options = args.extract_options!
table, column, type = args
safe = false
table_columns = connection.columns(table) rescue []
existing_column = table_columns.find { |c| c.name.to_s == column.to_s }
if existing_column
existing_type = existing_column.sql_type.sub(/\(\d+(,\d+)?\)/, "")
safe = adapter.change_type_safe?(table, column, type, options, existing_column, existing_type)
end
# unsafe to set NOT NULL for safe types with Postgres
# TODO check if safe for MySQL and MariaDB
if safe && existing_column.null && options[:null] == false
raise_error :change_column_with_not_null
end
raise_error :change_column, rewrite_blocks: adapter.rewrite_blocks unless safe
# constraints must be rechecked
# Postgres recommends dropping constraints before and adding them back
# https://www.postgresql.org/docs/current/ddl-alter.html#DDL-ALTER-COLUMN-TYPE
if postgresql?
constraints = adapter.constraints(table, column)
if constraints.any?
change_commands = []
constraints.each do |c|
change_commands << command_str(:remove_check_constraint, [table, c.expression, {name: c.name}])
end
change_commands << command_str(:change_column, args + [options])
constraints.each do |c|
change_commands << command_str(:add_check_constraint, [table, c.expression, {name: c.name, validate: false}])
end
validate_commands = []
constraints.each do |c|
validate_commands << command_str(:validate_check_constraint, [table, {name: c.name}])
end
raise_error :change_column_constraint,
change_column_code: change_commands.join("\n "),
validate_constraint_code: validate_commands.join("\n ")
end
end
end
def check_change_column_default(*args)
table, column, _default_or_changes = args
# just check ActiveRecord::Base, even though can override on model
partial_inserts = ActiveRecord::Base.partial_inserts
if partial_inserts && !new_column?(table, column)
raise_error :change_column_default,
config: "partial_inserts"
end
end
def check_change_column_null(*args)
table, column, null, default = args
if !null
if postgresql?
constraints = connection.check_constraints(table)
safe = constraints.any? { |c| c.options[:validate] && (c.expression == "#{column} IS NOT NULL" || c.expression == "#{connection.quote_column_name(column)} IS NOT NULL") }
unless safe
expression = "#{quote_column_if_needed(column)} IS NOT NULL"
# match https://github.com/nullobject/rein
constraint_name = "#{table}_#{column}_null"
if adapter.max_constraint_name_length && constraint_name.bytesize > adapter.max_constraint_name_length
constraint_name = connection.check_constraint_options(table, expression, {})[:name]
# avoid collision with Active Record naming for safe_by_default
if StrongMigrations.safe_by_default
constraint_name = constraint_name.sub("rails", "strong_migrations")
end
end
add_args = [table, expression, {name: constraint_name, validate: false}]
validate_args = [table, {name: constraint_name}]
change_args = [table, column, null]
remove_args = [table, {name: constraint_name}]
if StrongMigrations.safe_by_default
safe_change_column_null(add_args, validate_args, change_args, remove_args, table, column, default, constraints)
throw :safe
end
add_constraint_code = command_str(:add_check_constraint, add_args)
up_code = String.new(command_str(:validate_check_constraint, validate_args))
up_code << "\n #{command_str(:change_column_null, change_args)}"
up_code << "\n #{command_str(:remove_check_constraint, remove_args)}"
down_code = "#{add_constraint_code}\n #{command_str(:change_column_null, [table, column, true])}"
validate_constraint_code = "def up\n #{up_code}\n end\n\n def down\n #{down_code}\n end"
raise_error :change_column_null_postgresql,
add_constraint_code: add_constraint_code,
validate_constraint_code: validate_constraint_code
end
elsif mysql? || mariadb?
unless adapter.strict_mode?
raise_error :change_column_null_mysql
end
end
if !default.nil?
raise_error :change_column_null,
code: backfill_code(table, column, default)
end
end
end
def check_change_table
raise_error :change_table, header: "Possibly dangerous operation"
end
def check_create_join_table(*args)
options = args.extract_options!
raise_error :create_table if options[:force]
# TODO keep track of new table of add_index check
end
def check_create_table(*args)
options = args.extract_options!
table, _ = args
raise_error :create_table if options[:force]
# keep track of new table of add_index check
@new_tables << table.to_s
end
def check_execute
raise_error :execute, header: "Possibly dangerous operation"
end
def check_remove_column(method, *args)
columns =
case method
when :remove_timestamps
[:created_at, :updated_at]
when :remove_column
[args[1]]
when :remove_columns
if args.last.is_a?(Hash)
args[1..-2]
else
args[1..-1]
end
else
options = args[2] || {}
reference = args[1]
cols = []
cols << "#{reference}_type".to_sym if options[:polymorphic]
cols << "#{reference}_id".to_sym
cols
end
code = "self.ignored_columns += #{columns.map(&:to_s).inspect}"
raise_error :remove_column,
model: model_name(args[0]),
code: code,
command: command_str(method, args),
column_suffix: columns.size > 1 ? "s" : ""
end
def check_remove_index(*args)
options = args.extract_options!
table, _ = args
if postgresql? && options[:algorithm] != :concurrently && !new_table?(table)
# avoid suggesting extra (invalid) args
args = args[0..1] unless StrongMigrations.safe_by_default
if StrongMigrations.safe_by_default
safe_remove_index(*args, **options)
throw :safe
end
raise_error :remove_index, command: command_str("remove_index", args + [options.merge(algorithm: :concurrently)])
end
end
def check_rename_column
raise_error :rename_column
end
def check_rename_schema
raise_error :rename_schema
end
def check_rename_table
raise_error :rename_table
end
def check_validate_check_constraint
if postgresql? && adapter.writes_blocked?
raise_error :validate_check_constraint
end
end
def check_validate_foreign_key
if postgresql? && adapter.writes_blocked?
raise_error :validate_foreign_key
end
end
# helpers
def postgresql?
adapter.instance_of?(Adapters::PostgreSQLAdapter)
end
def mysql?
adapter.instance_of?(Adapters::MySQLAdapter)
end
def mariadb?
adapter.instance_of?(Adapters::MariaDBAdapter)
end
def ar_version
ActiveRecord::VERSION::STRING.to_f
end
def raise_error(message_key, header: nil, append: nil, **vars)
return unless StrongMigrations.check_enabled?(message_key, version: version)
message = StrongMigrations.error_messages[message_key] || "Missing message"
message = message + append if append
vars[:migration_name] = @migration.class.name
vars[:migration_suffix] = migration_suffix
vars[:base_model] = "ApplicationRecord"
# escape % not followed by {
message = message.gsub(/%(?!{)/, "%%") % vars if message.include?("%")
@migration.stop!(message, header: header || "Dangerous operation detected")
end
def constraint_str(statement, identifiers)
# not all identifiers are tables, but this method of quoting should be fine
statement % identifiers.map { |v| connection.quote_table_name(v) }
end
def safety_assured_str(code)
"safety_assured do\n execute '#{code}' \n end"
end
def command_str(command, args)
str_args = args[0..-2].map { |a| a.inspect }
# prettier last arg
last_arg = args[-1]
if last_arg.is_a?(Hash)
if last_arg.any?
str_args << last_arg.map do |k, v|
if v.is_a?(Hash)
# pretty index: {algorithm: :concurrently}
"#{k}: {#{v.map { |k2, v2| "#{k2}: #{v2.inspect}" }.join(", ")}}"
else
"#{k}: #{v.inspect}"
end
end.join(", ")
end
else
str_args << last_arg.inspect
end
"#{command} #{str_args.join(", ")}"
end
def backfill_code(table, column, default, function = false)
model = model_name(table)
if function
# update_all(column: Arel.sql(default)) also works in newer versions of Active Record
update_expr = "#{quote_column_if_needed(column)} = #{default}"
"#{model}.unscoped.in_batches(of: 10000) do |relation| \n relation.where(#{column}: nil).update_all(#{update_expr.inspect})\n sleep(0.01)\n end"
else
"#{model}.unscoped.in_batches(of: 10000) do |relation| \n relation.where(#{column}: nil).update_all #{column}: #{default.inspect}\n sleep(0.01)\n end"
end
end
# only quote when needed
# important! only use for display purposes
def quote_column_if_needed(column)
/\A[a-z0-9_]+\z/.match?(column.to_s) ? column : connection.quote_column_name(column)
end
def new_table?(table)
@new_tables.include?(table.to_s)
end
def new_column?(table, column)
new_table?(table) || @new_columns.include?([table.to_s, column.to_s])
end
def migration_suffix
"[#{ActiveRecord::VERSION::MAJOR}.#{ActiveRecord::VERSION::MINOR}]"
end
def model_name(table)
table.to_s.classify
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/migrator.rb | lib/strong_migrations/migrator.rb | module StrongMigrations
module Migrator
def ddl_transaction(migration, ...)
retries = StrongMigrations.lock_timeout_retries > 0 && use_transaction?(migration)
return super unless retries || StrongMigrations.transaction_timeout
# handle MigrationProxy class
migration = migration.send(:migration) if !migration.is_a?(ActiveRecord::Migration) && migration.respond_to?(:migration, true)
checker = migration.send(:strong_migrations_checker)
return super if checker.skip?
checker.set_transaction_timeout
return super unless retries
# retry migration since the entire transaction needs to be rerun
checker.retry_lock_timeouts(check_committed: true) do
# failed transaction reverts timeout, so need to re-apply
checker.reset
super(migration, ...)
end
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/adapters/mysql_adapter.rb | lib/strong_migrations/adapters/mysql_adapter.rb | # note: MariaDB inherits from this adapter
# when making changes, be sure to see how it affects it
module StrongMigrations
module Adapters
class MySQLAdapter < AbstractAdapter
def name
"MySQL"
end
def min_version
"8.0"
end
def server_version
@server_version ||= begin
target_version(StrongMigrations.target_mysql_version) do
select_all("SELECT VERSION()").first["VERSION()"].split("-").first
end
end
end
def set_statement_timeout(timeout)
# use ceil to prevent no timeout for values under 1 ms
select_all("SET max_execution_time = #{connection.quote((timeout.to_f * 1000).ceil)}")
end
def set_lock_timeout(timeout)
# fix deprecation warning with Active Record 7.1
timeout = timeout.value if timeout.is_a?(ActiveSupport::Duration)
select_all("SET lock_wait_timeout = #{connection.quote(timeout)}")
end
def check_lock_timeout(limit)
lock_timeout = connection.select_all("SHOW VARIABLES LIKE 'lock_wait_timeout'").first["Value"]
# lock timeout is an integer
if lock_timeout.to_i > limit
warn "[strong_migrations] DANGER: Lock timeout is longer than #{limit} seconds: #{lock_timeout}"
end
end
def analyze_table(table)
connection.execute "ANALYZE TABLE #{connection.quote_table_name(table.to_s)}"
end
def add_column_default_safe?
true
end
def change_type_safe?(table, column, type, options, existing_column, existing_type)
safe = false
case type.to_s
when "string"
limit = options[:limit] || 255
if ["varchar"].include?(existing_type) && limit >= existing_column.limit
# https://dev.mysql.com/doc/refman/5.7/en/innodb-online-ddl-operations.html
# https://mariadb.com/kb/en/innodb-online-ddl-operations-with-the-instant-alter-algorithm/#changing-the-data-type-of-a-column
# increased limit, but doesn't change number of length bytes
# 1-255 = 1 byte, 256-65532 = 2 bytes, 65533+ = too big for varchar
# account for charset
# https://dev.mysql.com/doc/refman/8.0/en/charset-mysql.html
# https://mariadb.com/kb/en/supported-character-sets-and-collations/
sql = <<~SQL
SELECT cs.MAXLEN
FROM INFORMATION_SCHEMA.CHARACTER_SETS cs
INNER JOIN INFORMATION_SCHEMA.COLUMNS c ON c.CHARACTER_SET_NAME = cs.CHARACTER_SET_NAME
WHERE c.TABLE_SCHEMA = database() AND
c.TABLE_NAME = #{connection.quote(table)} AND
c.COLUMN_NAME = #{connection.quote(column)}
SQL
row = connection.select_all(sql).first
if row
threshold = 255 / row["MAXLEN"]
safe = limit <= threshold || existing_column.limit > threshold
else
warn "[strong_migrations] Could not determine charset"
end
end
end
safe
end
def strict_mode?
sql_modes = sql_modes()
sql_modes.include?("STRICT_ALL_TABLES") || sql_modes.include?("STRICT_TRANS_TABLES")
end
def rewrite_blocks
"writes"
end
def max_constraint_name_length
64
end
private
# do not memoize
# want latest value
def sql_modes
if StrongMigrations.target_sql_mode && StrongMigrations.developer_env?
StrongMigrations.target_sql_mode.split(",")
else
select_all("SELECT @@SESSION.sql_mode").first["@@SESSION.sql_mode"].split(",")
end
end
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/adapters/postgresql_adapter.rb | lib/strong_migrations/adapters/postgresql_adapter.rb | module StrongMigrations
module Adapters
class PostgreSQLAdapter < AbstractAdapter
def name
"PostgreSQL"
end
def min_version
"12"
end
def server_version
@version ||= begin
target_version(StrongMigrations.target_postgresql_version) do
version = select_all("SHOW server_version_num").first["server_version_num"].to_i
# major and minor version
"#{version / 10000}.#{(version % 10000)}"
end
end
end
def set_statement_timeout(timeout)
set_timeout("statement_timeout", timeout)
end
def set_transaction_timeout(timeout)
# TODO make sure true version supports it as well?
set_timeout("transaction_timeout", timeout) if server_version >= Gem::Version.new("17")
end
def set_lock_timeout(timeout)
set_timeout("lock_timeout", timeout)
end
def check_lock_timeout(limit)
lock_timeout = connection.select_all("SHOW lock_timeout").first["lock_timeout"]
lock_timeout_sec = timeout_to_sec(lock_timeout)
if lock_timeout_sec == 0
warn "[strong_migrations] DANGER: No lock timeout set"
elsif lock_timeout_sec > limit
warn "[strong_migrations] DANGER: Lock timeout is longer than #{limit} seconds: #{lock_timeout}"
end
end
def analyze_table(table)
connection.execute "ANALYZE #{connection.quote_table_name(table.to_s)}"
end
def add_column_default_safe?
true
end
def change_type_safe?(table, column, type, options, existing_column, existing_type)
safe = false
case type.to_s
when "string"
# safe to increase limit or remove it
# not safe to decrease limit or add a limit
case existing_type
when "character varying"
safe = !options[:limit] || (existing_column.limit && options[:limit] >= existing_column.limit)
when "text"
safe = !options[:limit]
when "citext"
safe = !options[:limit] && !indexed?(table, column)
end
when "text"
# safe to change varchar to text (and text to text)
safe =
["character varying", "text"].include?(existing_type) ||
(existing_type == "citext" && !indexed?(table, column))
when "citext"
safe = ["character varying", "text"].include?(existing_type) && !indexed?(table, column)
when "varbit"
# increasing length limit or removing the limit is safe
# but there doesn't seem to be a way to set/modify it
# https://wiki.postgresql.org/wiki/What%27s_new_in_PostgreSQL_9.2#Reduce_ALTER_TABLE_rewrites
when "numeric", "decimal"
# numeric and decimal are equivalent and can be used interchangeably
safe = ["numeric", "decimal"].include?(existing_type) &&
(
(
# unconstrained
!options[:precision] && !options[:scale]
) || (
# increased precision, same scale
options[:precision] && existing_column.precision &&
options[:precision] >= existing_column.precision &&
options[:scale] == existing_column.scale
)
)
when "datetime", "timestamp", "timestamptz"
# precision for datetime
# limit for timestamp, timestamptz
precision = (type.to_s == "datetime" ? options[:precision] : options[:limit]) || 6
existing_precision = existing_column.limit || existing_column.precision || 6
type_map = {
"timestamp" => "timestamp without time zone",
"timestamptz" => "timestamp with time zone"
}
maybe_safe = type_map.value?(existing_type) && precision >= existing_precision
if maybe_safe
new_type = type.to_s == "datetime" ? datetime_type : type.to_s
# resolve with fallback
new_type = type_map[new_type] || new_type
safe = new_type == existing_type || time_zone == "UTC"
end
when "time"
precision = options[:precision] || options[:limit] || 6
existing_precision = existing_column.precision || existing_column.limit || 6
safe = existing_type == "time without time zone" && precision >= existing_precision
when "timetz"
# increasing precision is safe
# but there doesn't seem to be a way to set/modify it
when "interval"
# https://wiki.postgresql.org/wiki/What%27s_new_in_PostgreSQL_9.2#Reduce_ALTER_TABLE_rewrites
# Active Record uses precision before limit
precision = options[:precision] || options[:limit] || 6
existing_precision = existing_column.precision || existing_column.limit || 6
safe = existing_type == "interval" && precision >= existing_precision
when "inet"
safe = existing_type == "cidr"
end
safe
end
def writes_blocked?
query = <<~SQL
SELECT
relation::regclass::text
FROM
pg_locks
WHERE
mode IN ('ShareRowExclusiveLock', 'AccessExclusiveLock') AND
pid = pg_backend_pid()
SQL
select_all(query.squish).any?
end
# only check in non-developer environments (where actual server version is used)
def index_corruption?
server_version >= Gem::Version.new("14.0") &&
server_version < Gem::Version.new("14.4") &&
!StrongMigrations.developer_env?
end
# default to true if unsure
def default_volatile?(default)
name = default.to_s.delete_suffix("()")
rows = select_all("SELECT provolatile FROM pg_proc WHERE proname = #{connection.quote(name)}").to_a
rows.empty? || rows.any? { |r| r["provolatile"] == "v" }
end
def auto_incrementing_types
["primary_key", "serial", "bigserial"]
end
def max_constraint_name_length
63
end
def constraints(table, column)
# TODO improve column check
connection.check_constraints(table).select { |c| /\b#{Regexp.escape(column.to_s)}\b/.match?(c.expression) }
end
private
def set_timeout(setting, timeout)
# use ceil to prevent no timeout for values under 1 ms
timeout = (timeout.to_f * 1000).ceil unless timeout.is_a?(String)
select_all("SET #{setting} TO #{connection.quote(timeout)}")
end
# units: https://www.postgresql.org/docs/current/config-setting.html
def timeout_to_sec(timeout)
units = {
"us" => 0.001,
"ms" => 1,
"s" => 1000,
"min" => 1000 * 60,
"h" => 1000 * 60 * 60,
"d" => 1000 * 60 * 60 * 24
}
timeout_ms = timeout.to_i
units.each do |k, v|
if timeout.end_with?(k)
timeout_ms *= v
break
end
end
timeout_ms / 1000.0
end
# columns is array for column index and string for expression index
# the current approach can yield false positives for expression indexes
# but prefer to keep it simple for now
def indexed?(table, column)
connection.indexes(table).any? { |i| i.columns.include?(column.to_s) }
end
def datetime_type
# https://github.com/rails/rails/pull/41084
# no need to support custom datetime_types
key = connection.class.datetime_type
# could be timestamp, timestamp without time zone, timestamp with time zone, etc
connection.class.const_get(:NATIVE_DATABASE_TYPES).fetch(key).fetch(:name)
end
# do not memoize
# want latest value
def time_zone
select_all("SHOW timezone").first["TimeZone"]
end
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/adapters/mariadb_adapter.rb | lib/strong_migrations/adapters/mariadb_adapter.rb | module StrongMigrations
module Adapters
class MariaDBAdapter < MySQLAdapter
def name
"MariaDB"
end
def min_version
"10.5"
end
def server_version
@server_version ||= begin
target_version(StrongMigrations.target_mariadb_version) do
select_all("SELECT VERSION()").first["VERSION()"].split("-").first
end
end
end
def set_statement_timeout(timeout)
# fix deprecation warning with Active Record 7.1
timeout = timeout.value if timeout.is_a?(ActiveSupport::Duration)
select_all("SET max_statement_time = #{connection.quote(timeout)}")
end
def add_column_default_safe?
true
end
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/strong_migrations/adapters/abstract_adapter.rb | lib/strong_migrations/adapters/abstract_adapter.rb | module StrongMigrations
module Adapters
class AbstractAdapter
def initialize(checker)
@checker = checker
end
def name
"Unknown"
end
def min_version
end
def set_statement_timeout(timeout)
# do nothing
end
def set_transaction_timeout(timeout)
# do nothing
end
def set_lock_timeout(timeout)
# do nothing
end
def check_lock_timeout(limit)
# do nothing
end
def add_column_default_safe?
false
end
def change_type_safe?(table, column, type, options, existing_column, existing_type)
false
end
def rewrite_blocks
"reads and writes"
end
def auto_incrementing_types
["primary_key"]
end
def max_constraint_name_length
end
private
def connection
@checker.send(:connection)
end
def select_all(statement)
connection.select_all(statement)
end
def target_version(target_version)
target_version ||= StrongMigrations.target_version
version =
if target_version && StrongMigrations.developer_env?
if target_version.is_a?(Hash)
db_config_name = connection.pool.db_config.name
target_version.stringify_keys.fetch(db_config_name) do
# error class is not shown in db:migrate output so ensure message is descriptive
raise StrongMigrations::Error, "StrongMigrations.target_version is not configured for :#{db_config_name} database"
end.to_s
else
target_version.to_s
end
else
yield
end
Gem::Version.new(version)
end
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
ankane/strong_migrations | https://github.com/ankane/strong_migrations/blob/de0104c819ddd0a3d973db8e03e8971ac72a7b15/lib/generators/strong_migrations/install_generator.rb | lib/generators/strong_migrations/install_generator.rb | require "rails/generators"
module StrongMigrations
module Generators
class InstallGenerator < Rails::Generators::Base
source_root File.join(__dir__, "templates")
def create_initializer
template "initializer.rb", "config/initializers/strong_migrations.rb"
end
def start_after
Time.now.utc.strftime("%Y%m%d%H%M%S")
end
def pgbouncer_message
if postgresql?
"\n# If you use PgBouncer in transaction mode, delete these lines and set timeouts on the database user"
end
end
def target_version
case adapter
when /mysql|trilogy/
# could try to connect to database and check for MariaDB
# but this should be fine
"8.0"
else
"10"
end
end
def adapter
ActiveRecord::Base.connection_db_config.adapter.to_s
end
def postgresql?
adapter =~ /postg/
end
end
end
end
| ruby | MIT | de0104c819ddd0a3d973db8e03e8971ac72a7b15 | 2026-01-04T15:46:43.728076Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/color_ls_spec.rb | spec/color_ls_spec.rb | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ColorLS do
it 'has a version number' do
expect(ColorLS::VERSION).not_to be_nil
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/spec_helper.rb | spec/spec_helper.rb | # frozen_string_literal: true
require 'simplecov'
SimpleCov.start do
add_filter '/spec/'
end
if ENV['CI'] == 'never' # FIXME: migrate to new Codecov uploader / action
require 'codecov'
SimpleCov.formatter = SimpleCov::Formatter::Codecov
end
require 'bundler/setup'
require 'colorls'
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].sort.each { |file| require file }
# disable rainbow globally to ease checking expected output
Rainbow.enabled = false
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/support/yaml_sort_checker.rb | spec/support/yaml_sort_checker.rb | # frozen_string_literal: true
require 'yaml'
require 'open3' # workaround https://github.com/samg/diffy#119
require 'diffy'
# Check Yaml if Alphabetically sorted
class YamlSortChecker
class NotSortedError < StandardError; end
def initialize(filename)
@yaml = YAML.load_file(filename)
end
def sorted?(type=:key)
case type.to_sym
when :key then key_sorted?
when :value then value_sorted?
end
true
end
private
attr_reader :yaml
def key_sorted?
sorted_yaml = yaml.to_a.sort_by { |content| content[0].downcase }
different_from_yaml? sorted_yaml
end
def value_sorted?
sorted_yaml = yaml.to_a.sort_by do |content|
[content[1].downcase, content[0].downcase]
end
different_from_yaml? sorted_yaml
end
def different_from_yaml?(sorted_yaml)
actual_str = enum_to_str(yaml)
expected_str = enum_to_str(sorted_yaml)
difference = Diffy::Diff.new(actual_str, expected_str).to_s
return if difference.empty?
raise NotSortedError, "\n#{difference}"
end
def enum_to_str(enum)
enum.to_a.map { |x| x.join(' ') }.join("\n")
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/color_ls/flags_spec.rb | spec/color_ls/flags_spec.rb | # frozen_string_literal: true
require 'spec_helper'
FIXTURES = 'spec/fixtures'
RSpec.describe ColorLS::Flags do
subject do
described_class.new(*args).process
rescue SystemExit => e
raise "colorls exited with #{e.status}" unless e.success?
end
let(:a_txt_file_info) do
instance_double(
ColorLS::FileInfo,
group: 'sys',
mtime: Time.now,
directory?: false,
owner: 'user',
name: 'a.txt',
show: 'a.txt',
nlink: 1,
size: 128,
blockdev?: false,
chardev?: false,
socket?: false,
symlink?: false,
hidden?: false,
stats: instance_double(File::Stat,
mode: 0o444, # read for user, owner, other
setuid?: true,
setgid?: true,
sticky?: true),
executable?: false
)
end
before(:each, :use_file_info_stub) do
allow(ColorLS::FileInfo).to receive(:new).with(
path: File.join(FIXTURES, 'a.txt'),
parent: FIXTURES,
name: 'a.txt',
link_info: true,
show_filepath: true
) { a_txt_file_info }
end
context 'with no flags' do
let(:args) { [FIXTURES] }
it('does not list file info') {
expect do
subject
end.not_to output(/((r|-).*(w|-).*(x|-).*){3}/).to_stdout
}
it('does not display hidden files') { expect { subject }.not_to output(/\.hidden-file/).to_stdout }
it('displays dirs & files alphabetically') { expect { subject }.to output(/a-file.+symlinks.+z-file/m).to_stdout }
it 'does not show a report' do
expect { subject }.not_to output(/(Found \d+ items in total\.)|(Folders: \d+, Files: \d+\.)/).to_stdout
end
it 'displays multiple files per line' do
allow($stdout).to receive(:tty?).and_return(true)
expect { subject }.not_to output(/(.*\n){7}/).to_stdout
end
it('does not display ./ or ../') { expect { subject }.not_to output(%r(\.{1,2}/)).to_stdout }
it('does not display file hierarchy') { expect { subject }.not_to output(/├──/).to_stdout }
end
context 'with --reverse flag' do
let(:args) { ['--reverse', '-x', FIXTURES] }
it('displays dirs & files in reverse alphabetical order') {
expect do
subject
end.to output(/z-file.+symlinks.+a-file/m).to_stdout
}
end
context 'with --format flag' do
let(:args) { ['--format=single-column', FIXTURES] }
it {
expect { subject }.to output(/.*a-file.*\n # on the first line
(?m:.*) # more lines...
.*z-file.*\n # on the last line
/x).to_stdout
}
end
context 'with --long flag & file path' do
let(:args) { ['--long', "#{FIXTURES}/.hidden-file"] }
it('lists info of a hidden file') { expect { subject }.not_to output(/No Info/).to_stdout }
end
context 'with --long flag' do
let(:args) { ['--long', FIXTURES] }
it('lists file info') { expect { subject }.to output(/((r|-).*(w|-).*(x|-).*){3}/).to_stdout }
it 'rounds up file size to nearest MiB' do
expect { subject }.to output(/2 MiB[^\n]* 20kb-less-than-2mb\.txt/).to_stdout
end
it 'rounds down file size to nearest MiB' do
expect { subject }.to output(/1 MiB[^\n]* 20kb-more-than-1mb\.txt/).to_stdout
end
end
context 'with --long flag for `a.txt`' do
let(:args) { ['--long', "#{FIXTURES}/a.txt"] }
it 'shows special permission bits', :use_file_info_stub do
expect { subject }.to output(/r-Sr-Sr-T .* a.txt/mx).to_stdout
end
it 'shows number of hardlinks', :use_file_info_stub do
allow(a_txt_file_info).to receive(:nlink).and_return 5
expect { subject }.to output(/\S+\s+ 5 .* a.txt/mx).to_stdout
end
end
context 'with --long and --non-human-readable flag for `2MB file`' do
let(:args) { ['--long', '--non-human-readable', "#{FIXTURES}/two_megabyte_file.txt"] }
it 'shows the file size in bytes' do
expect { subject }.to output(/#{2 * 1024 * 1024}\sB/).to_stdout
end
end
context 'with --long flag on windows' do
let(:args) { ['--long', "#{FIXTURES}/a.txt"] }
before do
ColorLS::FileInfo.class_variable_set :@@users, {} # rubocop:disable Style/ClassVars
ColorLS::FileInfo.class_variable_set :@@groups, {} # rubocop:disable Style/ClassVars
end
it 'returns no user / group info' do
allow(Etc).to receive(:getpwuid).and_return(nil)
allow(Etc).to receive(:getgrgid).and_return(nil)
expect { subject }.to output(/\s+ \d+ \s+ \d+ .* a.txt/mx).to_stdout
end
end
context 'with --all flag' do
let(:args) { ['--all', FIXTURES] }
it('lists hidden files') { expect { subject }.to output(/\.hidden-file/).to_stdout }
end
context 'with --sort-dirs flag' do
let(:args) { ['--sort-dirs', '-1', FIXTURES] }
it('sorts results alphabetically, directories first') {
expect do
subject
end.to output(/symlinks.+a-file.+z-file/m).to_stdout
}
end
context 'with --sort-files flag' do
let(:args) { ['--sort-files', '-1', FIXTURES] }
it('sorts results alphabetically, files first') {
expect do
subject
end.to output(/a-file.+z-file.+symlinks/m).to_stdout
}
end
context 'with --sort=time' do
entries = Dir.entries(FIXTURES, encoding: Encoding::UTF_8).grep(/^[^.]/).shuffle.freeze
mtime = Time.new(2017, 11, 7, 2, 2, 2).freeze
files = entries.each_with_index do |e, i|
t = mtime + i
File.utime(t, t, File.join(FIXTURES, e))
Regexp.quote(e)
end
expected = Regexp.new files.reverse.join('.+'), Regexp::MULTILINE
let(:args) { ['--sort=time', '-x', FIXTURES] }
it { expect { subject }.to output(expected).to_stdout }
end
context 'with --sort=size flag' do
let(:args) { ['--sort=size', '--group-directories-first', '-1', FIXTURES] }
it 'sorts results by size' do
allow($stdout).to receive(:tty?).and_return(true)
expect { subject }.to output(/symlinks.+a-file.+z-file/m).to_stdout
end
end
context 'with --help flag' do
let(:args) { ['--help', FIXTURES] }
it { expect { subject }.to output(/prints this help/).to_stdout }
end
context 'with -h flag only' do
let(:args) { ['-h'] }
it { expect { subject }.to output(/prints this help/).to_stdout }
end
context 'with -h and additional argument' do
let(:args) { ['-h', FIXTURES] }
it { expect { subject }.to output(/a-file/).to_stdout }
end
context 'with -h and additional options' do
let(:args) { ['-ht'] }
it { expect { subject }.not_to output(/show this help/).to_stdout }
end
context 'with --human-readable flag' do
let(:args) { ['--human-readable', FIXTURES] }
it { expect { subject }.to output(/a-file/).to_stdout }
end
context 'with --sort=extension flag' do
let(:args) { ['--sort=extension', '-1', FIXTURES] }
it('sorts results by extension') {
expect do
subject
end.to output(/a-file.+symlinks.+z-file.+a.md.+a.txt.+z.txt/m).to_stdout
}
end
context 'with --dirs flag' do
let(:args) { ['--dirs', FIXTURES] }
it('displays dirs only') { expect { subject }.not_to output(/a-file/).to_stdout }
end
context 'with --files flag' do
let(:args) { ['--files', FIXTURES] }
it('displays files only') { expect { subject }.not_to output(/symlinks/).to_stdout }
end
context 'with -1 flag' do
let(:args) { ['-1', FIXTURES] }
it('displays one file per line') { expect { subject }.to output(/(.*\n){3}/).to_stdout }
end
context 'with --almost-all flag' do
let(:args) { ['--almost-all', FIXTURES] }
it('displays hidden files') { expect { subject }.to output(/\.hidden-file/).to_stdout }
end
context 'with --tree flag' do
let(:args) { ['--tree', FIXTURES] }
it('displays file hierarchy') { expect { subject }.to output(/├──/).to_stdout }
it { expect { subject }.to output(/third-level-file.txt/).to_stdout }
end
context 'with --tree=1 flag' do
let(:args) { ['--tree=1', FIXTURES] }
it('displays file hierarchy') { expect { subject }.to output(/├──/).to_stdout }
it {
expect do
subject
end.not_to output(/ReadmeLink.md|Supportlink|doesnotexisttest.txt|third-level-file.txt/).to_stdout
}
end
context 'with --tree=3 flag' do
let(:args) { ['--tree=3', FIXTURES] }
it('displays file hierarchy') { expect { subject }.to output(/├──/).to_stdout }
it { expect { subject }.to output(/third-level-file.txt/).to_stdout }
end
context 'with --hyperlink flag' do
let(:args) { ['--hyperlink', FIXTURES] }
href = if File::ALT_SEPARATOR.nil?
"file://#{File.absolute_path(FIXTURES)}/a.txt"
else
"file:///#{File.absolute_path(FIXTURES)}/a.txt"
end
pattern = File.fnmatch('cat', 'CAT', File::FNM_SYSCASE) ? /#{href}/i : /#{href}/
it { expect { subject }.to output(match(pattern)).to_stdout }
end
context 'symlinked directory' do
let(:args) { ['-x', File.join(FIXTURES, 'symlinks', 'Supportlink')] }
it { expect { subject }.to output(/Supportlink/).to_stdout }
end
context 'symlinked directory with trailing separator' do
link_to_dir = File.join(FIXTURES, 'symlinks', 'Supportlink', File::SEPARATOR)
let(:args) { ['-x', link_to_dir] }
it 'shows the file in the linked directory' do
stat = File.lstat link_to_dir
if stat.directory?
expect { subject }.to output(/yaml_sort_checker.rb/).to_stdout
else
skip 'symlinks not supported'
end
end
end
context 'when passing invalid flags' do
let(:args) { ['--snafu'] }
it 'issues a warning, hint about `--help` and exit' do # rubocop:todo RSpec/MultipleExpectations
allow(Kernel).to receive(:warn) do |message|
expect(message).to output '--snafu'
end
expect { subject }.to raise_error('colorls exited with 2').and output(/--help/).to_stderr
end
end
context 'with invalid locale' do
let(:args) { [FIXTURES] }
it 'warns but not raise an error' do
allow(CLocale).to receive(:setlocale).with(CLocale::LC_COLLATE, '').and_raise(RuntimeError.new('setlocale error'))
expect { subject }.to output(/setlocale error/).to_stderr.and output.to_stdout
end
end
context 'with --report flag' do
let(:args) { ['--report', '--report=long', FIXTURES] }
it 'shows a report with recognized and unrecognized files' do
expect { subject }.to output(/Recognized files\s+: 6\n.+Unrecognized files\s+: 3/).to_stdout
end
end
context 'with --report=short flag' do
let(:args) { ['--report=short', FIXTURES] }
it 'shows a brief report' do
expect { subject }.to output(/Folders: \d+, Files: \d+\./).to_stdout
end
end
context 'with --inode flag' do
let(:args) { ['--inode', FIXTURES] }
it 'shows inode number before logo' do
expect { subject }.to output(/\d+ +[^ ]+ +a.md/).to_stdout
end
end
context 'with non-existent path' do
let(:args) { ['not_exist_file'] }
it 'exits with status code 2' do # rubocop:todo RSpec/MultipleExpectations
expect { subject }.to output(/colorls: Specified path 'not_exist_file' doesn't exist./).to_stderr
expect(subject).to eq 2
end
end
context 'with -o flag', :use_file_info_stub do
let(:args) { ['-o', "#{FIXTURES}/a.txt"] }
it 'lists without group info' do
expect { subject }.not_to output(/sys/).to_stdout
end
it 'lists with user info' do
expect { subject }.to output(/user/).to_stdout
end
end
context 'with -g flag', :use_file_info_stub do
let(:args) { ['-g', "#{FIXTURES}/a.txt"] }
it 'lists with group info' do
expect { subject }.to output(/sys/).to_stdout
end
it 'lists without user info' do
expect { subject }.not_to output(/user/).to_stdout
end
end
context 'with -o and -g flag', :use_file_info_stub do
let(:args) { ['-og', "#{FIXTURES}/a.txt"] }
it 'lists without group info' do
expect { subject }.not_to output(/sys/).to_stdout
end
it 'lists without user info' do
expect { subject }.not_to output(/user/).to_stdout
end
end
context 'with -G flag in a listing format', :use_file_info_stub do
let(:args) { ['-l', '-G', "#{FIXTURES}/a.txt"] }
it 'lists without group info' do
expect { subject }.not_to output(/sys/).to_stdout
end
it 'lists with user info' do
expect { subject }.to output(/user/).to_stdout
end
end
context 'with --indicator-style=none' do
let(:args) { ['-dl', '--indicator-style=none', FIXTURES] }
it { expect { subject }.to output(/.+second-level \n.+symlinks \n/).to_stdout }
end
context 'with --time-style option' do
let(:args) { ['-l', '--time-style=+%y-%m-%d %k:%M', "#{FIXTURES}/a.txt"] }
mtime = File.mtime("#{FIXTURES}/a.txt")
it { expect { subject }.to output(/#{mtime.strftime("%y-%m-%d %k:%M")}/).to_stdout }
end
context 'with --no-hardlinks flag in a listing format', :use_file_info_stub do
let(:args) { ['-l', '--no-hardlink', "#{FIXTURES}/a.txt"] }
before do
allow(a_txt_file_info).to receive(:nlink).and_return 987
end
it 'lists without hard links count' do
expect { subject }.not_to output(/987/).to_stdout
end
end
context 'with -L flag in a listing format' do
let(:args) { ['-l', '-L', "#{FIXTURES}/a.txt"] }
before do
file_info = instance_double(
ColorLS::FileInfo,
group: 'sys',
mtime: Time.now,
directory?: false,
owner: 'user',
name: 'a.txt',
show: 'a.txt',
nlink: 1,
size: 128,
blockdev?: false,
chardev?: false,
socket?: false,
symlink?: true,
hidden?: false,
link_target: "#{FIXTURES}/z.txt",
dead?: false,
executable?: false
)
allow(ColorLS::FileInfo).to receive(:new).and_call_original
allow(ColorLS::FileInfo).to receive(:new).with(
path: File.join(FIXTURES, 'a.txt'),
parent: FIXTURES,
name: 'a.txt',
link_info: true,
show_filepath: true
) { file_info }
end
it 'show information on the destination of symbolic links' do
expect { subject }.not_to output(/128/).to_stdout
end
end
context 'when argument is a file with relative path' do
let(:args) { ["#{FIXTURES}/a.txt"] }
it 'replicates the filepath provided in the argument' do
expect { subject }.to output(/#{args.first}/).to_stdout
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/color_ls/git_spec.rb | spec/color_ls/git_spec.rb | # frozen_string_literal: false
require 'spec_helper'
RSpec.describe ColorLS::Git do
before(:all) do # rubocop:todo RSpec/BeforeAfterAll
`echo` # initialize $CHILD_STATUS
expect($CHILD_STATUS).to be_success # rubocop:todo RSpec/ExpectInHook
end
context 'with file in repository root' do
it 'returns `M`' do
allow(subject).to receive(:git_prefix).with('/repo/').and_return(['', true])
allow(subject).to receive(:git_subdir_status).and_yield('M', 'foo.txt')
expect(subject.status('/repo/')).to include('foo.txt' => Set['M'])
end
it 'returns `??`' do
allow(subject).to receive(:git_prefix).with('/repo/').and_return(['', true])
allow(subject).to receive(:git_subdir_status).and_yield('??', 'foo.txt')
expect(subject.status('/repo/')).to include('foo.txt' => Set['??'])
end
end
context 'with file in subdir' do
it 'returns `M` for subdir' do
allow(subject).to receive(:git_prefix).with('/repo/').and_return(['', true])
allow(subject).to receive(:git_subdir_status).and_yield('M', 'subdir/foo.txt')
expect(subject.status('/repo/')).to include('subdir' => Set['M'])
end
it 'returns `M` and `D` for subdir' do
allow(subject).to receive(:git_prefix).with('/repo/').and_return(['', true])
allow(subject).to receive(:git_subdir_status).and_yield('M', 'subdir/foo.txt').and_yield('D', 'subdir/other.c')
expect(subject.status('/repo/')).to include('subdir' => Set['M', 'D'])
end
end
context 'determining the git status' do
it 'does not output to stderr' do
expect { subject.status('.') }.not_to output.to_stderr
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/color_ls/monkey_spec.rb | spec/color_ls/monkey_spec.rb | # frozen_string_literal: true
require 'colorls/monkeys'
RSpec.describe String do # rubocop:disable RSpec/FilePath
describe '#uniq' do
it 'removes all duplicate characters' do
expect('abca'.uniq).to be == 'abc'
end
end
describe String, '#colorize' do
it 'colors a string with red' do
expect('hello'.colorize(:red)).to be == Rainbow('hello').red
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/color_ls/core_spec.rb | spec/color_ls/core_spec.rb | # frozen_string_literal: false
require 'spec_helper'
RSpec.describe ColorLS::Core do
subject { described_class.new(colors: Hash.new('black')) }
context 'ls' do
it 'works with Unicode characters' do
camera = 'Cámara'.force_encoding(ColorLS.file_encoding)
imagenes = 'Imágenes'.force_encoding(ColorLS.file_encoding)
dir_info = instance_double(
ColorLS::FileInfo,
group: 'sys',
mtime: Time.now,
directory?: true,
owner: 'user',
name: imagenes,
path: '.',
show: imagenes,
nlink: 1,
size: 128,
blockdev?: false,
chardev?: false,
socket?: false,
symlink?: false,
hidden?: false,
stats: instance_double(File::Stat,
mode: 0o444, # read for user, owner, other
setuid?: false,
setgid?: false,
sticky?: false),
executable?: true
)
file_info = instance_double(
ColorLS::FileInfo,
group: 'sys',
mtime: Time.now,
directory?: false,
owner: 'user',
name: camera,
show: camera,
nlink: 1,
size: 128,
blockdev?: false,
chardev?: false,
socket?: false,
symlink?: false,
hidden?: false,
stats: instance_double(File::Stat,
mode: 0o444, # read for user, owner, other
setuid?: false,
setgid?: false,
sticky?: false),
executable?: false
)
allow(Dir).to receive(:entries).and_return([camera])
allow(ColorLS::FileInfo).to receive(:new).and_return(file_info)
expect { subject.ls_dir(dir_info) }.to output(/mara/).to_stdout
end
it 'works for `...`' do
file_info = instance_double(
ColorLS::FileInfo,
group: 'sys',
mtime: Time.now,
directory?: false,
owner: 'user',
name: '...',
show: '...',
nlink: 1,
size: 128,
blockdev?: false,
chardev?: false,
socket?: false,
symlink?: false,
hidden?: true,
executable?: false
)
expect { subject.ls_files([file_info]) }.to output(/[.]{3}/).to_stdout
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/color_ls/layout_spec.rb | spec/color_ls/layout_spec.rb | # frozen_string_literal: true
require 'spec_helper'
# rubocop:todo RSpec/MultipleDescribes
RSpec.describe(ColorLS::HorizontalLayout, '#each_line') do
subject { described_class.new(array, array.map(&:length), width) }
context 'when empty' do
let(:array) { [] }
let(:width) { 10 }
it 'does nothing' do
expect { |b| subject.each_line(&b) }.not_to yield_control
end
end
context 'with one item' do
first = '1234567890'
let(:array) { [first] }
let(:width) { 11 }
it 'is on a single line' do
expect { |b| subject.each_line(&b) }.to yield_successive_args([[first], [first.size]])
end
end
context 'with an item not fitting' do
first = '1234567890'
let(:array) { [first] }
let(:width) { 1 }
it 'is on a single column' do
expect { |b| subject.each_line(&b) }.to yield_successive_args([[first], [first.size]])
end
end
context 'with two items fitting' do
first = '1234567890'
let(:array) { [first, 'a'] }
let(:width) { 100 }
it 'is on a single line' do
expect { |b| subject.each_line(&b) }.to yield_successive_args([[first, 'a'], [first.size, 1]])
end
end
context 'with three items but place for two' do
first = '1234567890'
let(:array) { [first, 'a', first] }
let(:width) { first.size + 1 }
it 'is on two lines' do
max_widths = [first.size, 1]
expect { |b| subject.each_line(&b) }.to yield_successive_args([[first, 'a'], max_widths], [[first], max_widths])
end
end
end
RSpec.describe(ColorLS::VerticalLayout, '#each_line') do
subject { described_class.new(array, array.map(&:length), width) }
context 'when empty' do
let(:array) { [] }
let(:width) { 10 }
it 'does nothing' do
expect { |b| subject.each_line(&b) }.not_to yield_control
end
end
context 'with one item' do
first = '1234567890'
let(:array) { [first] }
let(:width) { 11 }
it 'is on a single line' do
expect { |b| subject.each_line(&b) }.to yield_successive_args([[first], [first.size]])
end
end
context 'with an item not fitting' do
first = '1234567890'
let(:array) { [first] }
let(:width) { 1 }
it 'is on a single column' do
expect { |b| subject.each_line(&b) }.to yield_successive_args([[first], [first.size]])
end
end
context 'with two items fitting' do
first = '1234567890'
let(:array) { [first, 'a'] }
let(:width) { 100 }
it 'is on a single line' do
expect { |b| subject.each_line(&b) }.to yield_successive_args([[first, 'a'], [first.size, 1]])
end
end
context 'with three items but place for two' do
first = '1234567890'
let(:array) { [first, 'a', first] }
let(:width) { first.size * 2 }
it 'is on two lines' do
max_widths = [first.size, first.size]
expect { |b| subject.each_line(&b) }.to yield_successive_args([[first, first], max_widths], [['a'], max_widths])
end
end
end
# rubocop:enable RSpec/MultipleDescribes
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/spec/color_ls/yaml_spec.rb | spec/color_ls/yaml_spec.rb | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ColorLS::Yaml do
filenames = {
file_aliases: :value,
folder_aliases: :value,
folders: :key,
files: :key
}.freeze
let(:base_directory) { 'lib/yaml' }
filenames.each do |filename, sort_type|
describe filename do
let(:checker) { YamlSortChecker.new("#{base_directory}/#{filename}.yaml") }
it 'is sorted correctly' do
expect(checker.sorted?(sort_type)).to be true
end
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls.rb | lib/colorls.rb | # frozen_string_literal: true
require 'yaml'
require 'etc'
require 'English'
require 'filesize'
require 'io/console'
require 'io/console/size'
require 'rainbow/ext/string'
require 'clocale'
require 'unicode/display_width'
require 'addressable/uri'
require_relative 'colorls/core'
require_relative 'colorls/fileinfo'
require_relative 'colorls/flags'
require_relative 'colorls/layout'
require_relative 'colorls/yaml'
require_relative 'colorls/monkeys'
require_relative 'colorls/git'
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls/core.rb | lib/colorls/core.rb | # frozen_string_literal: true
module ColorLS
# on Windows (were the special 'nul' device exists) we need to use UTF-8
@file_encoding = File.exist?('nul') ? Encoding::UTF_8 : Encoding::ASCII_8BIT
def self.file_encoding
@file_encoding
end
def self.terminal_width
console = IO.console
width = IO.console_size[1]
return width if console.nil? || console.winsize[1].zero?
console.winsize[1]
end
@screen_width = terminal_width
def self.screen_width
@screen_width
end
class Core # rubocop:disable Metrics/ClassLength
MIN_SIZE_CHARS = 4
def initialize(all: false, sort: false, show: false,
mode: nil, show_git: false, almost_all: false, colors: [], group: nil,
reverse: false, hyperlink: false, tree_depth: nil, show_inode: false,
indicator_style: 'slash', long_style_options: {}, icons: true)
@count = {folders: 0, recognized_files: 0, unrecognized_files: 0}
@all = all
@almost_all = almost_all
@hyperlink = hyperlink
@sort = sort
@reverse = reverse
@group = group
@show = show
@one_per_line = mode == :one_per_line
@show_inode = show_inode
init_long_format(mode,long_style_options)
@tree = {mode: mode == :tree, depth: tree_depth}
@horizontal = mode == :horizontal
@git_status = init_git_status(show_git)
@time_style = long_style_options.key?(:time_style) ? long_style_options[:time_style] : ''
@indicator_style = indicator_style
@hard_links_count = long_style_options.key?(:hard_links_count) ? long_style_options[:hard_links_count] : true
@icons = icons
init_colors colors
init_icons
end
def additional_chars_per_item
12 + (@show_git ? 4 : 0) + (@show_inode ? 10 : 0)
end
def ls_dir(info)
if @tree[:mode]
print "\n"
return tree_traverse(info.path, 0, 1, 2)
end
@contents = Dir.entries(info.path, encoding: ColorLS.file_encoding)
filter_hidden_contents
@contents.map! { |e| FileInfo.dir_entry(info.path, e, link_info: @long) }
filter_contents if @show
sort_contents if @sort
group_contents if @group
return print "\n Nothing to show here\n".colorize(@colors[:empty]) if @contents.empty?
ls
end
def ls_files(files)
@contents = files
ls
end
def display_report(report_mode)
if report_mode == :short
puts <<~REPORT
\s\s\s\sFolders: #{@count[:folders]}, Files: #{@count[:recognized_files] + @count[:unrecognized_files]}.
REPORT
.colorize(@colors[:report])
else
puts <<~REPORT
Found #{@count.values.sum} items in total.
\tFolders\t\t\t: #{@count[:folders]}
\tRecognized files\t: #{@count[:recognized_files]}
\tUnrecognized files\t: #{@count[:unrecognized_files]}
REPORT
.colorize(@colors[:report])
end
end
private
def ls
init_column_lengths
layout = case
when @horizontal
HorizontalLayout.new(@contents, item_widths, ColorLS.screen_width)
when @one_per_line || @long
SingleColumnLayout.new(@contents)
else
VerticalLayout.new(@contents, item_widths, ColorLS.screen_width)
end
layout.each_line do |line, widths|
ls_line(line, widths)
end
clear_chars_for_size
end
def init_colors(colors)
@colors = colors
@modes = Hash.new do |hash, key|
color = case key
when 'r' then :read
when 'w' then :write
when '-' then :no_access
when 'x', 's', 'S', 't', 'T' then :exec
end
hash[key] = key.colorize(@colors[color]).freeze
end
end
def init_long_format(mode, long_style_options)
@long = mode == :long
@show_group = long_style_options.key?(:show_group) ? long_style_options[:show_group] : true
@show_user = long_style_options.key?(:show_user) ? long_style_options[:show_user] : true
@show_symbol_dest = long_style_options.key?(:show_symbol_dest) ? long_style_options[:show_symbol_dest] : false
@show_human_readable_size =
long_style_options.key?(:human_readable_size) ? long_style_options[:human_readable_size] : true
end
def init_git_status(show_git)
@show_git = show_git
return {}.freeze unless show_git
# stores git status information per directory
Hash.new do |hash, key|
path = File.absolute_path key.parent
if hash.key? path
hash[path]
else
hash[path] = Git.status(path)
end
end
end
def item_widths
@contents.map { |item| Unicode::DisplayWidth.of(item.show) + additional_chars_per_item }
end
def filter_hidden_contents
@contents -= %w[. ..] unless @all
@contents.keep_if { |x| !x.start_with? '.' } unless @all || @almost_all
end
def init_column_lengths
return unless @long
maxlink = maxuser = maxgroup = 0
@contents.each do |c|
maxlink = c.nlink if c.nlink > maxlink
maxuser = c.owner.length if c.owner.length > maxuser
maxgroup = c.group.length if c.group.length > maxgroup
end
@linklength = maxlink.digits.length
@userlength = maxuser
@grouplength = maxgroup
end
def filter_contents
@contents.keep_if do |x|
x.directory? == (@show == :dirs)
end
end
def sort_contents
case @sort
when :extension
@contents.sort_by! do |f|
name = f.name
ext = File.extname(name)
name = name.chomp(ext) unless ext.empty?
[ext, name].map { |s| CLocale.strxfrm(s) }
end
when :time
@contents.sort_by! { |a| -a.mtime.to_f }
when :size
@contents.sort_by! { |a| -a.size }
else
@contents.sort_by! { |a| CLocale.strxfrm(a.name) }
end
@contents.reverse! if @reverse
end
def group_contents
return unless @group
dirs, files = @contents.partition(&:directory?)
@contents = case @group
when :dirs then dirs.push(*files)
when :files then files.push(*dirs)
end
end
def init_icons
@files = ColorLS::Yaml.new('files.yaml').load
@file_aliases = ColorLS::Yaml.new('file_aliases.yaml').load(aliase: true)
@folders = ColorLS::Yaml.new('folders.yaml').load
@folder_aliases = ColorLS::Yaml.new('folder_aliases.yaml').load(aliase: true)
end
def format_mode(rwx, special, char)
m_r = (rwx & 4).zero? ? '-' : 'r'
m_w = (rwx & 2).zero? ? '-' : 'w'
m_x = if special
(rwx & 1).zero? ? char.upcase : char
else
(rwx & 1).zero? ? '-' : 'x'
end
@modes[m_r] + @modes[m_w] + @modes[m_x]
end
def mode_info(stat)
m = stat.mode
format_mode(m >> 6, stat.setuid?, 's') +
format_mode(m >> 3, stat.setgid?, 's') +
format_mode(m, stat.sticky?, 't')
end
def user_info(content)
content.owner.ljust(@userlength, ' ').colorize(@colors[:user])
end
def group_info(group)
group.to_s.ljust(@grouplength, ' ').colorize(@colors[:normal])
end
def size_info(filesize)
filesize = Filesize.new(filesize)
size = @show_human_readable_size ? filesize.pretty(precision: 0) : filesize.to_s('B', precision: 0)
size = size.split
size = justify_size_info(size)
return size.colorize(@colors[:file_large]) if filesize >= 512 * (1024 ** 2)
return size.colorize(@colors[:file_medium]) if filesize >= 128 * (1024 ** 2)
size.colorize(@colors[:file_small])
end
def chars_for_size
@chars_for_size ||= if @show_human_readable_size
MIN_SIZE_CHARS
else
max_size = @contents.max_by(&:size).size
reqd_chars = max_size.to_s.length
[reqd_chars, MIN_SIZE_CHARS].max
end
end
def justify_size_info(size)
size_num = size[0].rjust(chars_for_size, ' ')
size_unit = @show_human_readable_size ? size[1].ljust(3, ' ') : size[1]
"#{size_num} #{size_unit}"
end
def clear_chars_for_size
@chars_for_size = nil
end
def mtime_info(file_mtime)
mtime = @time_style.start_with?('+') ? file_mtime.strftime(@time_style.delete_prefix('+')) : file_mtime.asctime
now = Time.now
return mtime.colorize(@colors[:hour_old]) if now - file_mtime < 60 * 60
return mtime.colorize(@colors[:day_old]) if now - file_mtime < 24 * 60 * 60
mtime.colorize(@colors[:no_modifier])
end
def git_info(content)
return '' unless (status = @git_status[content])
if content.directory?
git_dir_info(content, status)
else
git_file_info(status[content.name])
end
end
def git_file_info(status)
return Git.colored_status_symbols(status, @colors) if status
' ✓ '
.encode(Encoding.default_external, undef: :replace, replace: '=')
.colorize(@colors[:unchanged])
end
def git_dir_info(content, status)
modes = if content.path == '.'
Set.new(status.values).flatten
else
status[content.name]
end
if modes.empty? && Dir.empty?(content.path)
' '
else
Git.colored_status_symbols(modes, @colors)
end
end
def inode(content)
return '' unless @show_inode
content.stats.ino.to_s.rjust(10).colorize(@colors[:inode])
end
def long_info(content)
return '' unless @long
links = content.nlink.to_s.rjust(@linklength)
line_array = [mode_info(content.stats)]
line_array.push links if @hard_links_count
line_array.push user_info(content) if @show_user
line_array.push group_info(content.group) if @show_group
line_array.push(size_info(content.size), mtime_info(content.mtime))
line_array.join(' ')
end
def symlink_info(content)
return '' unless @long && content.symlink?
target = content.link_target.nil? ? '…' : content.link_target
link_info = " ⇒ #{target}"
if content.dead?
"#{link_info} [Dead link]".colorize(@colors[:dead_link])
else
link_info.colorize(@colors[:link])
end
end
def update_content_if_show_symbol_dest(content, show_symbol_dest_flag)
return content unless show_symbol_dest_flag
return content unless content.symlink?
return content if content.link_target.nil?
return content if content.dead?
FileInfo.info(content.link_target)
end
def out_encode(str)
str.encode(Encoding.default_external, undef: :replace, replace: '')
end
def fetch_string(content, key, color, increment)
@count[increment] += 1
value = increment == :folders ? @folders[key] : @files[key]
logo = value.gsub(/\\u[\da-f]{4}/i) { |m| [m[-4..].to_i(16)].pack('U') }
name = @hyperlink ? make_link(content) : content.show
name += content.directory? && @indicator_style != 'none' ? '/' : ' '
entry = @icons ? "#{out_encode(logo)} #{out_encode(name)}" : out_encode(name).to_s
entry = entry.bright if !content.directory? && content.executable?
symlink_info_string = symlink_info(content)
content = update_content_if_show_symbol_dest(content,@show_symbol_dest)
"#{inode(content)} #{long_info(content)} #{git_info(content)} #{entry.colorize(color)}#{symlink_info_string}"
end
def ls_line(chunk, widths)
padding = 0
line = +''
chunk.each_with_index do |content, i|
entry = fetch_string(content, *options(content))
line << (' ' * padding)
line << ' ' << entry.encode(Encoding.default_external, undef: :replace)
padding = widths[i] - Unicode::DisplayWidth.of(content.show) - additional_chars_per_item
end
print line << "\n"
end
def file_color(file, key)
color_key = case
when file.chardev? then :chardev
when file.blockdev? then :blockdev
when file.socket? then :socket
when file.executable? then :executable_file
when file.hidden? then :hidden
when @files.key?(key) then :recognized_file
else :unrecognized_file
end
@colors[color_key]
end
def options(content)
if content.directory?
options_directory(content).values_at(:key, :color, :group)
else
options_file(content).values_at(:key, :color, :group)
end
end
def options_directory(content)
key = content.name.downcase.to_sym
key = @folder_aliases[key] unless @folders.key?(key)
key = :folder if key.nil?
color = content.hidden? ? @colors[:hidden_dir] : @colors[:dir]
{key: key, color: color, group: :folders}
end
def options_file(content)
key = File.extname(content.name).delete_prefix('.').downcase.to_sym
key = @file_aliases[key] unless @files.key?(key)
color = file_color(content, key)
group = @files.key?(key) ? :recognized_files : :unrecognized_files
key = :file if key.nil?
{key: key, color: color, group: group}
end
def tree_contents(path)
@contents = Dir.entries(path, encoding: ColorLS.file_encoding)
filter_hidden_contents
@contents.map! { |e| FileInfo.dir_entry(path, e, link_info: @long) }
filter_contents if @show
sort_contents if @sort
group_contents if @group
@contents
end
def tree_traverse(path, prespace, depth, indent)
contents = tree_contents(path)
contents.each do |content|
icon = content == contents.last || content.directory? ? ' └──' : ' ├──'
print tree_branch_preprint(prespace, indent, icon).colorize(@colors[:tree])
print " #{fetch_string(content, *options(content))} \n"
next unless content.directory?
tree_traverse("#{path}/#{content}", prespace + indent, depth + 1, indent) if keep_going(depth)
end
end
def keep_going(depth)
@tree[:depth].nil? || depth < @tree[:depth]
end
def tree_branch_preprint(prespace, indent, prespace_icon)
return prespace_icon if prespace.zero?
(' │ ' * (prespace/indent)) + prespace_icon + ('─' * indent)
end
def make_link(content)
uri = Addressable::URI.convert_path(File.absolute_path(content.path))
"\033]8;;#{uri}\007#{content.show}\033]8;;\007"
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls/monkeys.rb | lib/colorls/monkeys.rb | # frozen_string_literal: true
class String
def colorize(color)
self.color(color)
end
def uniq
chars.uniq.join
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls/version.rb | lib/colorls/version.rb | # frozen_string_literal: true
module ColorLS
VERSION = '1.5.0'
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls/flags.rb | lib/colorls/flags.rb | # frozen_string_literal: true
require 'optparse'
require 'colorls/version'
module ColorLS
class Flags
def initialize(*args)
@args = args
@light_colors = false
@opts = default_opts
@report_mode = false
@exit_status_code = 0
parse_options
return unless @opts[:mode] == :tree
# FIXME: `--all` and `--tree` do not work together, use `--almost-all` instead
@opts[:almost_all] = true if @opts[:all]
@opts[:all] = false
end
def process
init_locale
@args = ['.'] if @args.empty?
process_args
end
Option = Struct.new(:flags, :desc)
def options
list = @parser.top.list + @parser.base.list
result = list.collect do |o|
next unless o.respond_to? :desc
flags = o.short + o.long
next if flags.empty?
Option.new(flags, o.desc)
end
result.compact
end
private
def init_locale
# initialize locale from environment
CLocale.setlocale(CLocale::LC_COLLATE, '')
rescue RuntimeError => e
warn "WARN: #{e}, check your locale settings"
end
def group_files_and_directories
infos = @args.flat_map do |arg|
FileInfo.info(arg, show_filepath: true)
rescue Errno::ENOENT
$stderr.puts "colorls: Specified path '#{arg}' doesn't exist.".colorize(:red)
@exit_status_code = 2
[]
rescue SystemCallError => e
$stderr.puts "#{path}: #{e}".colorize(:red)
@exit_status_code = 2
[]
end
infos.group_by(&:directory?).values_at(true, false)
end
def process_args
core = Core.new(**@opts)
directories, files = group_files_and_directories
core.ls_files(files) unless files.nil?
directories&.sort_by! do |a|
CLocale.strxfrm(a.name)
end&.each do |dir|
puts "\n#{dir.show}:" if @args.size > 1
core.ls_dir(dir)
rescue SystemCallError => e
$stderr.puts "#{dir}: #{e}".colorize(:red)
end
core.display_report(@report_mode) if @report_mode
@exit_status_code
end
def default_opts
{
show: false,
sort: true,
reverse: false,
group: nil,
mode: STDOUT.tty? ? :vertical : :one_per_line, # rubocop:disable Style/GlobalStdStream
all: false,
almost_all: false,
show_git: false,
colors: [],
tree_depth: 3,
show_inode: false,
indicator_style: 'slash',
long_style_options: {}
}
end
def add_sort_options(options)
options.separator ''
options.separator 'sorting options:'
options.separator ''
options.on('--sd', '--sort-dirs', '--group-directories-first', 'sort directories first') { @opts[:group] = :dirs }
options.on('--sf', '--sort-files', 'sort files first') { @opts[:group] = :files }
options.on('-t', 'sort by modification time, newest first') { @opts[:sort] = :time }
options.on('-U', 'do not sort; list entries in directory order') { @opts[:sort] = false }
options.on('-S', 'sort by file size, largest first') { @opts[:sort] = :size }
options.on('-X', 'sort by file extension') { @opts[:sort] = :extension }
options.on(
'--sort=WORD',
%w[none time size extension],
'sort by WORD instead of name: none, size (-S), time (-t), extension (-X)'
) do |word|
@opts[:sort] = case word
when 'none' then false
else word.to_sym
end
end
options.on('-r', '--reverse', 'reverse order while sorting') { @opts[:reverse] = true }
end
def add_common_options(options)
options.on('-a', '--all', 'do not ignore entries starting with .') { @opts[:all] = true }
options.on('-A', '--almost-all', 'do not list . and ..') { @opts[:almost_all] = true }
options.on('-d', '--dirs', 'show only directories') { @opts[:show] = :dirs }
options.on('-f', '--files', 'show only files') { @opts[:show] = :files }
options.on('--gs', '--git-status', 'show git status for each file') { @opts[:show_git] = true }
options.on('-p', 'append / indicator to directories') { @opts[:indicator_style] = 'slash' }
options.on('-i', '--inode', 'show inode number') { @opts[:show_inode] = true }
options.on('--report=[WORD]', %w[short long], 'show report: short, long (default if omitted)') do |word|
word ||= :long
@report_mode = word.to_sym
end
options.on(
'--indicator-style=[STYLE]',
%w[none slash], 'append indicator with style STYLE to entry names: none, slash (-p) (default)'
) do |style|
@opts[:indicator_style] = style
end
end
def add_format_options(options)
options.on(
'--format=WORD', %w[across horizontal long single-column],
'use format: across (-x), horizontal (-x), long (-l), single-column (-1), vertical (-C)'
) do |word|
case word
when 'across', 'horizontal' then @opts[:mode] = :horizontal
when 'vertical' then @opts[:mode] = :vertical
when 'long' then @opts[:mode] = :long
when 'single-column' then @opts[:mode] = :one_per_line
end
end
options.on('-1', 'list one file per line') { @opts[:mode] = :one_per_line }
options.on('--tree=[DEPTH]', Integer, 'shows tree view of the directory') do |depth|
@opts[:tree_depth] = depth
@opts[:mode] = :tree
end
options.on('-x', 'list entries by lines instead of by columns') { @opts[:mode] = :horizontal }
options.on('-C', 'list entries by columns instead of by lines') { @opts[:mode] = :vertical }
options.on('--without-icons', 'list entries without icons') { @opts[:icons] = false }
end
def default_long_style_options
{
show_group: true,
show_user: true,
time_style: '',
hard_links_count: true,
show_symbol_dest: false,
human_readable_size: true
}
end
def add_long_style_options(options)
long_style_options = default_long_style_options
options.on('-l', '--long', 'use a long listing format') { @opts[:mode] = :long }
long_style_options = set_long_style_user_and_group_options(options, long_style_options)
options.on('--time-style=FORMAT', String, 'use time display format') do |time_style|
long_style_options[:time_style] = time_style
end
options.on('--no-hardlinks', 'show no hard links count in a long listing') do
long_style_options[:hard_links_count] = false
end
long_style_options = get_long_style_symlink_options(options, long_style_options)
options.on('--non-human-readable', 'show file sizes in bytes only') do
long_style_options[:human_readable_size] = false
end
@opts[:long_style_options] = long_style_options
end
def set_long_style_user_and_group_options(options, long_style_options)
options.on('-o', 'use a long listing format without group information') do
@opts[:mode] = :long
long_style_options[:show_group] = false
end
options.on('-g', 'use a long listing format without owner information') do
@opts[:mode] = :long
long_style_options[:show_user] = false
end
options.on('-G', '--no-group', 'show no group information in a long listing') do
long_style_options[:show_group] = false
end
long_style_options
end
def get_long_style_symlink_options(options, long_style_options)
options.on('-L', 'show information on the destination of symbolic links') do
long_style_options[:show_symbol_dest] = true
end
long_style_options
end
def add_general_options(options)
options.separator ''
options.separator 'general options:'
options.separator ''
options.on(
'--color=[WHEN]', %w[always auto never],
'colorize the output: auto, always (default if omitted), never'
) do |word|
# let Rainbow decide in "auto" mode
Rainbow.enabled = (word != 'never') unless word == 'auto'
end
options.on('--light', 'use light color scheme') { @light_colors = true }
options.on('--dark', 'use dark color scheme') { @light_colors = false }
options.on('--hyperlink') { @opts[:hyperlink] = true }
end
def add_compatiblity_options(options)
options.separator ''
options.separator 'options for compatiblity with ls (ignored):'
options.separator ''
options.on('-h', '--human-readable') {} # always active
end
def show_help
puts @parser
show_examples
exit
end
def add_help_option(opts)
opts.separator ''
opts.on_tail('--help', 'prints this help') { show_help }
end
def show_examples
puts <<EXAMPLES.gsub(/^ /, '')
examples:
* show the given file:
#{'colorls README.md'.colorize(:green)}
* show matching files and list matching directories:
#{'colorls *'.colorize(:green)}
* filter output by a regular expression:
#{'colorls | grep PATTERN'.colorize(:green)}
* several short options can be combined:
#{'colorls -d -l -a'.colorize(:green)}
#{'colorls -dla'.colorize(:green)}
EXAMPLES
end
def assign_each_options(opts)
add_common_options(opts)
add_format_options(opts)
add_long_style_options(opts)
add_sort_options(opts)
add_compatiblity_options(opts)
add_general_options(opts)
add_help_option(opts)
end
def parse_options
@parser = OptionParser.new do |opts|
opts.banner = 'Usage: colorls [OPTION]... [FILE]...'
opts.separator ''
assign_each_options(opts)
opts.on_tail('--version', 'show version') do
puts ColorLS::VERSION
exit
end
end
# show help and exit if the only argument is -h
show_help if !@args.empty? && @args.all?('-h')
@parser.parse!(@args)
set_color_opts
rescue OptionParser::ParseError => e
warn "colorls: #{e}\nSee 'colorls --help'."
exit 2
end
def set_color_opts
color_scheme_file = @light_colors ? 'light_colors.yaml' : 'dark_colors.yaml'
@opts[:colors] = ColorLS::Yaml.new(color_scheme_file).load(aliase: true)
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls/yaml.rb | lib/colorls/yaml.rb | # frozen_string_literal: true
module ColorLS
class Yaml
def initialize(filename)
@filepath = File.join(File.dirname(__FILE__),"../yaml/#{filename}")
@user_config_filepath = File.join(Dir.home, ".config/colorls/#{filename}")
end
def load(aliase: false)
yaml = read_file(@filepath)
if File.exist?(@user_config_filepath)
user_config_yaml = read_file(@user_config_filepath)
yaml = yaml.merge(user_config_yaml)
end
return yaml unless aliase
yaml.to_a.map! { |k, v| v.include?('#') ? [k, v] : [k, v.to_sym] }.to_h
end
def read_file(filepath)
::YAML.safe_load(File.read(filepath, encoding: Encoding::UTF_8)).transform_keys!(&:to_sym)
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls/fileinfo.rb | lib/colorls/fileinfo.rb | # frozen_string_literal: true
require 'forwardable'
module ColorLS
class FileInfo
extend Forwardable
@@users = {} # rubocop:disable Style/ClassVars
@@groups = {} # rubocop:disable Style/ClassVars
attr_reader :stats, :name, :path, :parent
def initialize(name:, parent:, path: nil, link_info: true, show_filepath: false)
@name = name
@parent = parent
@path = path.nil? ? File.join(parent, name) : +path
@stats = File.lstat(@path)
@path.force_encoding(ColorLS.file_encoding)
handle_symlink(@path) if link_info && @stats.symlink?
set_show_name(use_path: show_filepath)
end
def self.info(path, link_info: true, show_filepath: false)
FileInfo.new(name: File.basename(path), parent: File.dirname(path), path: path, link_info: link_info,
show_filepath: show_filepath)
end
def self.dir_entry(dir, child, link_info: true)
FileInfo.new(name: child, parent: dir, link_info: link_info)
end
def show
@show_name
end
def dead?
@dead
end
def hidden?
@name.start_with?('.')
end
def owner
return @@users[@stats.uid] if @@users.key? @stats.uid
user = Etc.getpwuid(@stats.uid)
@@users[@stats.uid] = user.nil? ? @stats.uid.to_s : user.name
rescue ArgumentError
@stats.uid.to_s
end
def group
return @@groups[@stats.gid] if @@groups.key? @stats.gid
group = Etc.getgrgid(@stats.gid)
@@groups[@stats.gid] = group.nil? ? @stats.gid.to_s : group.name
rescue ArgumentError
@stats.gid.to_s
end
# target of a symlink (only available for symlinks)
def link_target
@target
end
def to_s
name
end
def_delegators :@stats, :directory?, :socket?, :chardev?, :symlink?, :blockdev?, :mtime, :nlink, :size, :owned?,
:executable?
private
def handle_symlink(path)
@target = File.readlink(path)
@dead = !File.exist?(path)
rescue SystemCallError => e
$stderr.puts "cannot read symbolic link: #{e}"
end
def show_basename
@name.encode(Encoding.find('filesystem'), Encoding.default_external,
invalid: :replace, undef: :replace)
end
def show_relative_path
@path.encode(Encoding.find('filesystem'), Encoding.default_external,
invalid: :replace, undef: :replace)
end
def set_show_name(use_path: false)
@show_name = show_basename unless use_path
@show_name = show_basename if directory?
@show_name = show_relative_path if use_path
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls/git.rb | lib/colorls/git.rb | # frozen_string_literal: true
require 'pathname'
require 'set'
module ColorLS
module Git
EMPTY_SET = Set.new.freeze
private_constant :EMPTY_SET
def self.status(repo_path)
prefix, success = git_prefix(repo_path)
return unless success
prefix_path = Pathname.new(prefix)
git_status = Hash.new { |hash, key| hash[key] = Set.new }
git_status_default = EMPTY_SET
git_subdir_status(repo_path) do |mode, file|
if file == prefix
git_status_default = Set[mode].freeze
else
path = Pathname.new(file).relative_path_from(prefix_path)
git_status[path.descend.first.cleanpath.to_s].add(mode)
end
end
warn "git status failed in #{repo_path}" unless $CHILD_STATUS.success?
git_status.default = git_status_default
git_status.freeze
end
def self.colored_status_symbols(modes, colors)
if modes.empty?
return ' ✓ '
.encode(Encoding.default_external, undef: :replace, replace: '=')
.colorize(colors[:unchanged])
end
modes.to_a.join.uniq.delete('!').rjust(3).ljust(4)
.sub('?', '?'.colorize(colors[:untracked]))
.sub('A', 'A'.colorize(colors[:addition]))
.sub('M', 'M'.colorize(colors[:modification]))
.sub('D', 'D'.colorize(colors[:deletion]))
end
class << self
private
def git_prefix(repo_path)
[
IO.popen(['git', '-C', repo_path, 'rev-parse', '--show-prefix'], err: File::NULL, &:gets)&.chomp,
$CHILD_STATUS.success?
]
rescue Errno::ENOENT
[nil, false]
end
def git_subdir_status(repo_path)
IO.popen(
['git', '-C', repo_path, 'status', '--porcelain', '-z', '-unormal', '--ignored', '.'],
external_encoding: Encoding::ASCII_8BIT
) do |output|
while (status_line = output.gets "\x0")
mode, file = status_line.chomp("\x0").split(' ', 2)
yield mode, file
# skip the next \x0 separated original path for renames, issue #185
output.gets("\x0") if mode.start_with? 'R'
end
end
end
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
athityakumar/colorls | https://github.com/athityakumar/colorls/blob/f066e3216180351df88d2d44ea0e8c788cca0045/lib/colorls/layout.rb | lib/colorls/layout.rb | # frozen_string_literal: true
module ColorLS
class Layout
def initialize(contents, widths, line_size)
@max_widths = widths
@contents = contents
@screen_width = line_size
end
def each_line
return if @contents.empty?
get_chunks(chunk_size).each { |line| yield(line.compact, @max_widths) }
end
private
def chunk_size
min_size = @max_widths.min
max_chunks = [1, @screen_width / min_size].max
max_chunks = [max_chunks, @max_widths.size].min
min_chunks = 1
loop do
mid = ((max_chunks + min_chunks).to_f / 2).ceil
size, max_widths = column_widths(mid)
if min_chunks < max_chunks && not_in_line(max_widths)
max_chunks = mid - 1
elsif min_chunks < mid
min_chunks = mid
else
@max_widths = max_widths
return size
end
end
end
def not_in_line(max_widths)
max_widths.sum > @screen_width
end
end
class SingleColumnLayout < Layout
def initialize(contents)
super(contents, [1], 1)
end
private
def chunk_size
1
end
def get_chunks(_chunk_size)
@contents.each_slice(1)
end
end
class HorizontalLayout < Layout
private
def column_widths(mid)
max_widths = @max_widths.each_slice(mid).to_a
last_size = max_widths.last.size
max_widths.last.fill(0, last_size, max_widths.first.size - last_size)
[mid, max_widths.transpose.map!(&:max)]
end
def get_chunks(chunk_size)
@contents.each_slice(chunk_size)
end
end
class VerticalLayout < Layout
private
def column_widths(mid)
chunk_size = (@max_widths.size.to_f / mid).ceil
[chunk_size, @max_widths.each_slice(chunk_size).map(&:max).to_a]
end
def get_chunks(chunk_size)
columns = @contents.each_slice(chunk_size).to_a
columns.last[chunk_size - 1] = nil if columns.last.size < chunk_size
columns.transpose
end
end
end
| ruby | MIT | f066e3216180351df88d2d44ea0e8c788cca0045 | 2026-01-04T15:44:05.020663Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/spec_helper.rb | spec/spec_helper.rb | require "coveralls"
require "simplecov"
SimpleCov.start do
add_filter "lib/terraforming.rb"
add_filter "lib/terraforming/version.rb"
formatter SimpleCov::Formatter::MultiFormatter.new([
Coveralls::SimpleCov::Formatter,
SimpleCov::Formatter::HTMLFormatter,
])
end
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'terraforming'
require 'tempfile'
require 'time'
def fixture_path(fixture_name)
File.join(File.dirname(__FILE__), "fixtures", fixture_name)
end
def tfstate_fixture_path
fixture_path("terraform.tfstate")
end
def tfstate_fixture
JSON.parse(open(tfstate_fixture_path).read)
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming_spec.rb | spec/lib/terraforming_spec.rb | require 'spec_helper'
describe Terraforming do
it 'has a version number' do
expect(Terraforming::VERSION).not_to be nil
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/cli_spec.rb | spec/lib/terraforming/cli_spec.rb | require "spec_helper"
module Terraforming
describe CLI do
context "resources" do
shared_examples "CLI examples" do
context "without --tfstate" do
it "should export tf" do
expect(klass).to receive(:tf).with(no_args)
described_class.new.invoke(command, [], {})
end
end
context "with --tfstate" do
it "should export tfstate" do
expect(klass).to receive(:tfstate).with(no_args)
described_class.new.invoke(command, [], { tfstate: true })
end
end
context "with --tfstate --merge TFSTATE" do
it "should export merged tfstate" do
expect(klass).to receive(:tfstate).with(no_args)
described_class.new.invoke(command, [], { tfstate: true, merge: tfstate_fixture_path })
end
end
end
Aws.config[:sts] = {
stub_responses: {
get_caller_identity: {
account: '123456789012',
arn: 'arn:aws:iam::123456789012:user/terraforming',
user_id: 'AAAABBBBCCCCDDDDDEEE'
}
}
}
before do
allow(STDOUT).to receive(:puts).and_return(nil)
allow(klass).to receive(:tf).and_return("")
allow(klass).to receive(:tfstate).and_return({})
allow(klass).to receive(:assume).and_return({})
end
describe "asg" do
let(:klass) { Terraforming::Resource::ALB }
let(:command) { :alb }
it_behaves_like "CLI examples"
end
describe "asg" do
let(:klass) { Terraforming::Resource::AutoScalingGroup }
let(:command) { :asg }
it_behaves_like "CLI examples"
end
describe "cwa" do
let(:klass) { Terraforming::Resource::CloudWatchAlarm }
let(:command) { :cwa }
it_behaves_like "CLI examples"
end
describe "dbpg" do
let(:klass) { Terraforming::Resource::DBParameterGroup }
let(:command) { :dbpg }
it_behaves_like "CLI examples"
end
describe "dbsg" do
let(:klass) { Terraforming::Resource::DBSecurityGroup }
let(:command) { :dbsg }
it_behaves_like "CLI examples"
end
describe "dbsn" do
let(:klass) { Terraforming::Resource::DBSubnetGroup }
let(:command) { :dbsn }
it_behaves_like "CLI examples"
end
describe "ddb" do
let(:klass) { Terraforming::Resource::DynamoDB }
let(:command) { :ddb }
it_behaves_like "CLI examples"
end
describe "ec2" do
let(:klass) { Terraforming::Resource::EC2 }
let(:command) { :ec2 }
it_behaves_like "CLI examples"
end
describe "ecc" do
let(:klass) { Terraforming::Resource::ElastiCacheCluster }
let(:command) { :ecc }
it_behaves_like "CLI examples"
end
describe "ecsn" do
let(:klass) { Terraforming::Resource::ElastiCacheSubnetGroup }
let(:command) { :ecsn }
it_behaves_like "CLI examples"
end
describe "eip" do
let(:klass) { Terraforming::Resource::EIP }
let(:command) { :eip }
it_behaves_like "CLI examples"
end
describe "efs" do
let(:klass) { Terraforming::Resource::EFSFileSystem }
let(:command) { :efs }
it_behaves_like "CLI examples"
end
describe "elb" do
let(:klass) { Terraforming::Resource::ELB }
let(:command) { :elb }
it_behaves_like "CLI examples"
end
describe "iamg" do
let(:klass) { Terraforming::Resource::IAMGroup }
let(:command) { :iamg }
it_behaves_like "CLI examples"
end
describe "iamgm" do
let(:klass) { Terraforming::Resource::IAMGroupMembership }
let(:command) { :iamgm }
it_behaves_like "CLI examples"
end
describe "iamgp" do
let(:klass) { Terraforming::Resource::IAMGroupPolicy }
let(:command) { :iamgp }
it_behaves_like "CLI examples"
end
describe "iamip" do
let(:klass) { Terraforming::Resource::IAMInstanceProfile }
let(:command) { :iamip }
it_behaves_like "CLI examples"
end
describe "iamp" do
let(:klass) { Terraforming::Resource::IAMPolicy }
let(:command) { :iamp }
it_behaves_like "CLI examples"
end
describe "iampa" do
let(:klass) { Terraforming::Resource::IAMPolicyAttachment }
let(:command) { :iampa }
it_behaves_like "CLI examples"
end
describe "iamr" do
let(:klass) { Terraforming::Resource::IAMRole }
let(:command) { :iamr }
it_behaves_like "CLI examples"
end
describe "iamrp" do
let(:klass) { Terraforming::Resource::IAMRolePolicy }
let(:command) { :iamrp }
it_behaves_like "CLI examples"
end
describe "iamu" do
let(:klass) { Terraforming::Resource::IAMUser }
let(:command) { :iamu }
it_behaves_like "CLI examples"
end
describe "iamup" do
let(:klass) { Terraforming::Resource::IAMUserPolicy }
let(:command) { :iamup }
it_behaves_like "CLI examples"
end
describe "kmsa" do
let(:klass) { Terraforming::Resource::KMSAlias }
let(:command) { :kmsa }
it_behaves_like "CLI examples"
end
describe "kmsk" do
let(:klass) { Terraforming::Resource::KMSKey }
let(:command) { :kmsk }
it_behaves_like "CLI examples"
end
describe "lc" do
let(:klass) { Terraforming::Resource::LaunchConfiguration }
let(:command) { :lc }
it_behaves_like "CLI examples"
end
describe "igw" do
let(:klass) { Terraforming::Resource::InternetGateway }
let(:command) { :igw }
it_behaves_like "CLI examples"
end
describe "nacl" do
let(:klass) { Terraforming::Resource::NetworkACL }
let(:command) { :nacl }
it_behaves_like "CLI examples"
end
describe "nat" do
let(:klass) { Terraforming::Resource::NATGateway }
let(:command) { :nat }
it_behaves_like "CLI examples"
end
describe "nif" do
let(:klass) { Terraforming::Resource::NetworkInterface }
let(:command) { :nif }
it_behaves_like "CLI examples"
end
describe "r53r" do
let(:klass) { Terraforming::Resource::Route53Record }
let(:command) { :r53r }
it_behaves_like "CLI examples"
end
describe "r53z" do
let(:klass) { Terraforming::Resource::Route53Zone }
let(:command) { :r53z }
it_behaves_like "CLI examples"
end
describe "RDS" do
let(:klass) { Terraforming::Resource::RDS }
let(:command) { :rds }
it_behaves_like "CLI examples"
end
describe "rs" do
let(:klass) { Terraforming::Resource::Redshift }
let(:command) { :rs }
it_behaves_like "CLI examples"
end
describe "rt" do
let(:klass) { Terraforming::Resource::RouteTable }
let(:command) { :rt }
it_behaves_like "CLI examples"
end
describe "rta" do
let(:klass) { Terraforming::Resource::RouteTableAssociation }
let(:command) { :rta }
it_behaves_like "CLI examples"
end
describe "s3" do
let(:klass) { Terraforming::Resource::S3 }
let(:command) { :s3 }
it_behaves_like "CLI examples"
end
describe "sg" do
let(:klass) { Terraforming::Resource::SecurityGroup }
let(:command) { :sg }
it_behaves_like "CLI examples"
end
describe "sqs" do
let(:klass) { Terraforming::Resource::SQS }
let(:command) { :sqs }
it_behaves_like "CLI examples"
end
describe "sn" do
let(:klass) { Terraforming::Resource::Subnet }
let(:command) { :sn }
it_behaves_like "CLI examples"
end
describe "vpc" do
let(:klass) { Terraforming::Resource::VPC }
let(:command) { :vpc }
it_behaves_like "CLI examples"
end
describe "vgw" do
let(:klass) { Terraforming::Resource::VPNGateway }
let(:command) { :vgw }
it_behaves_like "CLI examples"
end
describe "snst" do
let(:klass) { Terraforming::Resource::SNSTopic }
let(:command) { :snst }
it_behaves_like "CLI examples"
end
describe "snss" do
let(:klass) { Terraforming::Resource::SNSTopicSubscription }
let(:command) { :snss }
it_behaves_like "CLI examples"
end
end
context "flush to stdout" do
describe "s3" do
let(:klass) { Terraforming::Resource::S3 }
let(:command) { :s3 }
let(:tf) do
<<-EOS
resource "aws_s3_bucket" "hoge" {
bucket = "hoge"
acl = "private"
}
resource "aws_s3_bucket" "fuga" {
bucket = "fuga"
acl = "private"
}
EOS
end
let(:tfstate) do
{
"aws_s3_bucket.hoge" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "hoge",
"attributes" => {
"acl" => "private",
"bucket" => "hoge",
"id" => "hoge"
}
}
},
"aws_s3_bucket.fuga" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "fuga",
"attributes" => {
"acl" => "private",
"bucket" => "fuga",
"id" => "fuga"
}
}
}
}
end
let(:initial_tfstate) do
{
"version" => 1,
"serial" => 1,
"modules" => [
{
"path" => [
"root"
],
"outputs" => {},
"resources" => {
"aws_s3_bucket.hoge" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "hoge",
"attributes" => {
"acl" => "private",
"bucket" => "hoge",
"id" => "hoge"
}
}
},
"aws_s3_bucket.fuga" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "fuga",
"attributes" => {
"acl" => "private",
"bucket" => "fuga",
"id" => "fuga"
}
}
},
}
}
]
}
end
let(:merged_tfstate) do
{
"version" => 1,
"serial" => 89,
"remote" => {
"type" => "s3",
"config" => { "bucket" => "terraforming-tfstate", "key" => "tf" }
},
"modules" => [
{
"path" => ["root"],
"outputs" => {},
"resources" => {
"aws_elb.hogehoge" => {
"type" => "aws_elb",
"primary" => {
"id" => "hogehoge",
"attributes" => {
"availability_zones.#" => "2",
"connection_draining" => "true",
"connection_draining_timeout" => "300",
"cross_zone_load_balancing" => "true",
"dns_name" => "hoge-12345678.ap-northeast-1.elb.amazonaws.com",
"health_check.#" => "1",
"id" => "hogehoge",
"idle_timeout" => "60",
"instances.#" => "1",
"listener.#" => "1",
"name" => "hoge",
"security_groups.#" => "2",
"source_security_group" => "default",
"subnets.#" => "2"
}
}
},
"aws_s3_bucket.hoge" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "hoge",
"attributes" => {
"acl" => "private",
"bucket" => "hoge",
"id" => "hoge"
}
}
},
"aws_s3_bucket.fuga" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "fuga",
"attributes" => {
"acl" => "private",
"bucket" => "fuga",
"id" => "fuga"
}
}
},
}
}
]
}
end
before do
allow(klass).to receive(:tf).and_return(tf)
allow(klass).to receive(:tfstate).and_return(tfstate)
end
context "without --tfstate" do
it "should flush tf to stdout" do
expect(STDOUT).to receive(:puts).with(tf)
described_class.new.invoke(command, [], {})
end
end
context "with --tfstate" do
it "should flush state to stdout" do
expect(STDOUT).to receive(:puts).with(JSON.pretty_generate(initial_tfstate))
described_class.new.invoke(command, [], { tfstate: true })
end
end
context "with --tfstate --merge TFSTATE" do
it "should flush merged tfstate to stdout" do
expect(STDOUT).to receive(:puts).with(JSON.pretty_generate(merged_tfstate))
described_class.new.invoke(command, [], { tfstate: true, merge: tfstate_fixture_path })
end
end
context "with --tfstate --merge TFSTATE --overwrite" do
before do
@tmp_tfstate = Tempfile.new("tfstate")
@tmp_tfstate.write(open(tfstate_fixture_path).read)
@tmp_tfstate.flush
end
it "should overwrite passed tfstate" do
described_class.new.invoke(command, [], { tfstate: true, merge: @tmp_tfstate.path, overwrite: true })
expect(open(@tmp_tfstate.path).read).to eq JSON.pretty_generate(merged_tfstate)
end
after do
@tmp_tfstate.close
@tmp_tfstate.unlink
end
end
context "with --assumes and without --tfstate" do
it "should switch roles and export tf" do
expect(klass).to receive(:tf).with(no_args)
described_class.new.invoke(command, [], {
assume: 'arn:aws:iam::123456789123:role/test-role',
region: 'ap-northeast-1'
})
end
end
context "with --assumes and --tfstate" do
it "should switch roles and export tfstate" do
expect(klass).to receive(:tfstate).with(no_args)
described_class.new.invoke(command, [], {
assume: 'arn:aws:iam::123456789123:role/test-role',
region: 'ap-northeast-1',
tfstate: true
})
end
end
context "with --assumes and --tfstate --merge TFSTATE" do
it "should switch roles and export merged tfstate" do
expect(klass).to receive(:tfstate).with(no_args)
described_class.new.invoke(command, [], {
assume: 'arn:aws:iam::123456789123:role/test-role',
region: 'ap-northeast-1',
tfstate: true,
merge: tfstate_fixture_path
})
end
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/s3_spec.rb | spec/lib/terraforming/resource/s3_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe S3 do
let(:buckets) do
[
{
creation_date: Time.parse("2014-01-01T12:12:12.000Z"),
name: "hoge"
},
{
creation_date: Time.parse("2015-01-01T00:00:00.000Z"),
name: "fuga"
},
{
creation_date: Time.parse("2015-01-01T00:00:00.000Z"),
name: "piyo"
}
]
end
let(:owner) do
{
display_name: "owner",
id: "12345678abcdefgh12345678abcdefgh12345678abcdefgh12345678abcdefgh"
}
end
let(:hoge_policy) do
"{\"Version\":\"2012-10-17\",\"Id\":\"Policy123456789012\",\"Statement\":[{\"Sid\":\"Stmt123456789012\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::123456789012:user/hoge\"},\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::hoge/*\"}]}"
end
let(:hoge_location) do
{ location_constraint: "ap-northeast-1" }
end
let(:fuga_location) do
{ location_constraint: "ap-northeast-1" }
end
let(:piyo_location) do
{ location_constraint: "" }
end
context "from ap-northeast-1" do
let(:client) do
Aws::S3::Client.new(region: "ap-northeast-1", stub_responses: true)
end
before do
client.stub_responses(:list_buckets, buckets: buckets, owner: owner)
client.stub_responses(:get_bucket_policy, [
{ policy: hoge_policy },
"NoSuchBucketPolicy",
])
client.stub_responses(:get_bucket_location, [hoge_location, fuga_location, piyo_location])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_s3_bucket" "hoge" {
bucket = "hoge"
acl = "private"
policy = <<POLICY
{
"Version": "2012-10-17",
"Id": "Policy123456789012",
"Statement": [
{
"Sid": "Stmt123456789012",
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::123456789012:user/hoge"
},
"Action": "s3:*",
"Resource": "arn:aws:s3:::hoge/*"
}
]
}
POLICY
}
resource "aws_s3_bucket" "fuga" {
bucket = "fuga"
acl = "private"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_s3_bucket.hoge" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "hoge",
"attributes" => {
"acl" => "private",
"bucket" => "hoge",
"force_destroy" => "false",
"id" => "hoge",
"policy" => "{\"Version\":\"2012-10-17\",\"Id\":\"Policy123456789012\",\"Statement\":[{\"Sid\":\"Stmt123456789012\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::123456789012:user/hoge\"},\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::hoge/*\"}]}",
}
}
},
"aws_s3_bucket.fuga" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "fuga",
"attributes" => {
"acl" => "private",
"bucket" => "fuga",
"force_destroy" => "false",
"id" => "fuga",
"policy" => "",
}
}
},
})
end
end
end
context "from us-east-1" do
let(:client) do
Aws::S3::Client.new(region: "us-east-1", stub_responses: true)
end
before do
client.stub_responses(:list_buckets, buckets: buckets, owner: owner)
client.stub_responses(:get_bucket_policy, [
"NoSuchBucketPolicy",
])
client.stub_responses(:get_bucket_location, [hoge_location, fuga_location, piyo_location])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_s3_bucket" "piyo" {
bucket = "piyo"
acl = "private"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_s3_bucket.piyo" => {
"type" => "aws_s3_bucket",
"primary" => {
"id" => "piyo",
"attributes" => {
"acl" => "private",
"bucket" => "piyo",
"force_destroy" => "false",
"id" => "piyo",
"policy" => "",
}
}
},
})
end
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/elasti_cache_cluster_spec.rb | spec/lib/terraforming/resource/elasti_cache_cluster_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe ElastiCacheCluster do
let(:client) do
Aws::ElastiCache::Client.new(stub_responses: true)
end
let(:cache_clusters) do
[
{
cache_cluster_id: "hoge",
configuration_endpoint: {
address: "hoge.abc123.cfg.apne1.cache.amazonaws.com",
port: 11211
},
client_download_landing_page: "https://console.aws.amazon.com/elasticache/home#client-download:",
cache_node_type: "cache.m1.small",
engine: "memcached",
engine_version: "1.4.5",
cache_cluster_status: "available",
num_cache_nodes: 1,
preferred_availability_zone: "ap-northeast-1b",
cache_cluster_create_time: Time.parse("2014-06-25 00:00:00 UTC"),
preferred_maintenance_window: "fri:20:00-fri:21:00",
pending_modified_values: {},
cache_security_groups: [],
cache_parameter_group: {
cache_parameter_group_name: "default.memcached1.4",
parameter_apply_status: "in-sync",
cache_node_ids_to_reboot: []
},
cache_subnet_group_name: "subnet-hoge",
cache_nodes: [
{
cache_node_id: "0001",
cache_node_status: "available",
cache_node_create_time: Time.parse("2014-08-28 12:51:55 UTC"),
endpoint: {
address: "hoge.abc123.0001.apne1.cache.amazonaws.com",
port: 11211
},
parameter_group_status: "in-sync",
customer_availability_zone: "ap-northeast-1b"
}
],
auto_minor_version_upgrade: false,
security_groups: [
{ security_group_id: "sg-abcd1234", status: "active" }
]
},
{
cache_cluster_id: "fuga",
client_download_landing_page: "https://console.aws.amazon.com/elasticache/home#client-download:",
cache_node_type: "cache.t2.micro",
engine: "redis",
engine_version: "2.8.6",
cache_cluster_status: "available",
num_cache_nodes: 1,
preferred_availability_zone: "ap-northeast-1b",
cache_cluster_create_time: Time.parse("2014-06-25 12:34:56 UTC"),
preferred_maintenance_window: "fri:20:00-fri:21:00",
pending_modified_values: {},
cache_security_groups: [
{ cache_security_group_name: "sg-hoge", status: "active" },
],
cache_parameter_group: {
cache_parameter_group_name: "default.redis2.8",
parameter_apply_status: "in-sync",
cache_node_ids_to_reboot: []
},
cache_subnet_group_name: "subnet-fuga",
cache_nodes: [
{
cache_node_id: "0001",
cache_node_status: "available",
cache_node_create_time: Time.parse("2014-08-28 12:51:55 UTC"),
endpoint: {
address: "fuga.def456.0001.apne1.cache.amazonaws.com",
port: 6379
},
parameter_group_status: "in-sync",
customer_availability_zone: "ap-northeast-1b"
}
],
auto_minor_version_upgrade: false,
security_groups: [],
},
{
cache_cluster_id: "piyo",
client_download_landing_page: "https://console.aws.amazon.com/elasticache/home#client-download:",
cache_node_type: "cache.t2.micro",
engine: "redis",
engine_version: "2.8.6",
cache_cluster_status: "available",
num_cache_nodes: 1,
preferred_availability_zone: "ap-northeast-1b",
cache_cluster_create_time: Time.parse("2014-06-25 12:34:56 UTC"),
preferred_maintenance_window: "fri:20:00-fri:21:00",
pending_modified_values: {},
cache_security_groups: [
{ cache_security_group_name: "sg-hoge", status: "active" },
],
cache_parameter_group: {
cache_parameter_group_name: "default.redis2.8",
parameter_apply_status: "in-sync",
cache_node_ids_to_reboot: []
},
cache_subnet_group_name: "subnet-fuga",
cache_nodes: [
{
cache_node_id: "0001",
cache_node_status: "available",
cache_node_create_time: Time.parse("2014-08-28 12:51:55 UTC"),
endpoint: {
address: "fuga.def456.0001.apne1.cache.amazonaws.com",
port: 6379
},
parameter_group_status: "in-sync",
customer_availability_zone: "ap-northeast-1b"
}
],
auto_minor_version_upgrade: false,
security_groups: [],
notification_configuration: {
topic_arn: "arn:aws:sns:ap-northeast-1:123456789012:test",
topic_status: "active",
},
},
]
end
before do
client.stub_responses(:describe_cache_clusters, cache_clusters: cache_clusters)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_elasticache_cluster" "hoge" {
cluster_id = "hoge"
engine = "memcached"
engine_version = "1.4.5"
node_type = "cache.m1.small"
num_cache_nodes = 1
parameter_group_name = "default.memcached1.4"
port = 11211
subnet_group_name = "subnet-hoge"
security_group_ids = ["sg-abcd1234"]
}
resource "aws_elasticache_cluster" "fuga" {
cluster_id = "fuga"
engine = "redis"
engine_version = "2.8.6"
node_type = "cache.t2.micro"
num_cache_nodes = 1
parameter_group_name = "default.redis2.8"
port = 6379
security_group_names = ["sg-hoge"]
}
resource "aws_elasticache_cluster" "piyo" {
cluster_id = "piyo"
engine = "redis"
engine_version = "2.8.6"
node_type = "cache.t2.micro"
num_cache_nodes = 1
parameter_group_name = "default.redis2.8"
port = 6379
security_group_names = ["sg-hoge"]
notification_topic_arn = "arn:aws:sns:ap-northeast-1:123456789012:test"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_elasticache_cluster.hoge" => {
"type" => "aws_elasticache_cluster",
"primary" => {
"id" => "hoge",
"attributes" => {
"cache_nodes.#" => "1",
"cluster_id" => "hoge",
"engine" => "memcached",
"engine_version" => "1.4.5",
"id" => "hoge",
"node_type" => "cache.m1.small",
"num_cache_nodes" => "1",
"parameter_group_name" => "default.memcached1.4",
"security_group_ids.#" => "1",
"security_group_names.#" => "0",
"subnet_group_name" => "subnet-hoge",
"tags.#" => "0",
"port" => "11211"
}
}
},
"aws_elasticache_cluster.fuga" => {
"type" => "aws_elasticache_cluster",
"primary" => {
"id" => "fuga",
"attributes" => {
"cache_nodes.#" => "1",
"cluster_id" => "fuga",
"engine" => "redis",
"engine_version" => "2.8.6",
"id" => "fuga",
"node_type" => "cache.t2.micro",
"num_cache_nodes" => "1",
"parameter_group_name" => "default.redis2.8",
"security_group_ids.#" => "0",
"security_group_names.#" => "1",
"subnet_group_name" => "subnet-fuga",
"tags.#" => "0",
"port" => "6379"
}
}
},
"aws_elasticache_cluster.piyo" => {
"type" => "aws_elasticache_cluster",
"primary" => {
"id" => "piyo",
"attributes" => {
"cache_nodes.#" => "1",
"cluster_id" => "piyo",
"engine" => "redis",
"engine_version" => "2.8.6",
"id" => "piyo",
"node_type" => "cache.t2.micro",
"num_cache_nodes" => "1",
"parameter_group_name" => "default.redis2.8",
"security_group_ids.#" => "0",
"security_group_names.#" => "1",
"subnet_group_name" => "subnet-fuga",
"tags.#" => "0",
"port" => "6379",
"notification_topic_arn" => "arn:aws:sns:ap-northeast-1:123456789012:test"
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_role_spec.rb | spec/lib/terraforming/resource/iam_role_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMRole do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:roles) do
[
{
path: "/",
role_name: "hoge_role",
role_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:role/hoge_role",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
assume_role_policy_document: "%7B%22Version%22%3A%222008-10-17%22%2C%22Statement%22%3A%5B%7B%22Sid%22%3A%22%22%2C%22Effect%22%3A%22Allow%22%2C%22Principal%22%3A%7B%22Service%22%3A%22ec2.amazonaws.com%22%7D%2C%22Action%22%3A%22sts%3AAssumeRole%22%7D%5D%7D",
},
{
path: "/system/",
role_name: "fuga_role",
role_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:role/fuga_role",
create_date: Time.parse("2015-05-01 12:34:56 UTC"),
assume_role_policy_document: "%7B%22Version%22%3A%222008-10-17%22%2C%22Statement%22%3A%5B%7B%22Sid%22%3A%221%22%2C%22Effect%22%3A%22Allow%22%2C%22Principal%22%3A%7B%22Service%22%3A%22elastictranscoder.amazonaws.com%22%7D%2C%22Action%22%3A%22sts%3AAssumeRole%22%7D%5D%7D",
},
]
end
before do
client.stub_responses(:list_roles, roles: roles)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_role" "hoge_role" {
name = "hoge_role"
path = "/"
assume_role_policy = <<POLICY
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "",
"Effect": "Allow",
"Principal": {
"Service": "ec2.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
POLICY
}
resource "aws_iam_role" "fuga_role" {
name = "fuga_role"
path = "/system/"
assume_role_policy = <<POLICY
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "1",
"Effect": "Allow",
"Principal": {
"Service": "elastictranscoder.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
POLICY
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_role.hoge_role" => {
"type" => "aws_iam_role",
"primary" => {
"id" => "hoge_role",
"attributes" => {
"arn" => "arn:aws:iam::123456789012:role/hoge_role",
"assume_role_policy" => "{\n \"Version\": \"2008-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"Service\": \"ec2.amazonaws.com\"\n },\n \"Action\": \"sts:AssumeRole\"\n }\n ]\n}\n",
"id" => "hoge_role",
"name" => "hoge_role",
"path" => "/",
"unique_id" => "ABCDEFGHIJKLMN1234567",
}
}
},
"aws_iam_role.fuga_role" => {
"type" => "aws_iam_role",
"primary" => {
"id" => "fuga_role",
"attributes" => {
"arn" => "arn:aws:iam::345678901234:role/fuga_role",
"assume_role_policy" => "{\n \"Version\": \"2008-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"1\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"Service\": \"elastictranscoder.amazonaws.com\"\n },\n \"Action\": \"sts:AssumeRole\"\n }\n ]\n}\n",
"id" => "fuga_role",
"name" => "fuga_role",
"path" => "/system/",
"unique_id" => "OPQRSTUVWXYZA8901234",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/sns_topic_spec.rb | spec/lib/terraforming/resource/sns_topic_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe SNSTopic do
let(:client) do
Aws::SNS::Client.new(stub_responses: true)
end
let(:topics) do
[
Aws::SNS::Types::Topic.new(topic_arn: "arn:aws:sns:us-west-2:012345678901:topicOfFanciness"),
]
end
let(:attributes) do
{
"SubscriptionsConfirmed" => "1",
"DisplayName" => "topicOfFancinessDisplayName",
"SubscriptionsDeleted" => "0",
"EffectiveDeliveryPolicy" => "{\"http\":{\"defaultHealthyRetryPolicy\":{\"minDelayTarget\":2,\"maxDelayTarget\":20,\"numRetries\":12,\"numMaxDelayRetries\":0,\"numNoDelayRetries\":0,\"numMinDelayRetries\":12,\"backoffFunction\":\"linear\"},\"disableSubscriptionOverrides\":false}}",
"Owner" => "012345678901",
"Policy" => "{\"Version\":\"2008-10-17\",\"Id\":\"__default_policy_ID\",\"Statement\":[{\"Sid\":\"__default_statement_ID\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"*\"},\"Action\":[\"SNS:GetTopicAttributes\",\"SNS:SetTopicAttributes\",\"SNS:AddPermission\",\"SNS:RemovePermission\",\"SNS:DeleteTopic\",\"SNS:Subscribe\",\"SNS:ListSubscriptionsByTopic\",\"SNS:Publish\",\"SNS:Receive\"],\"Resource\":\"arn:aws:sns:us-west-2:012345678901:topicOfFanciness\",\"Condition\":{\"StringEquals\":{\"AWS:SourceOwner\":\"012345678901\"}}}]}",
"DeliveryPolicy" => "{\"http\":{\"defaultHealthyRetryPolicy\":{\"minDelayTarget\":2,\"maxDelayTarget\":20,\"numRetries\":12,\"numMaxDelayRetries\":0,\"numNoDelayRetries\":0,\"numMinDelayRetries\":12,\"backoffFunction\":\"linear\"},\"disableSubscriptionOverrides\":false}}",
"TopicArn" => "arn:aws:sns:us-west-2:012345678901:topicOfFanciness",
"SubscriptionsPending" => "0"
}
end
before do
client.stub_responses(:list_topics, topics: topics)
client.stub_responses(:get_topic_attributes, attributes: attributes)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_sns_topic" "topicOfFanciness" {
name = "topicOfFanciness"
display_name = "topicOfFancinessDisplayName"
policy = <<POLICY
{
"Version": "2008-10-17",
"Id": "__default_policy_ID",
"Statement": [
{
"Sid": "__default_statement_ID",
"Effect": "Allow",
"Principal": {
"AWS": "*"
},
"Action": [
"SNS:GetTopicAttributes",
"SNS:SetTopicAttributes",
"SNS:AddPermission",
"SNS:RemovePermission",
"SNS:DeleteTopic",
"SNS:Subscribe",
"SNS:ListSubscriptionsByTopic",
"SNS:Publish",
"SNS:Receive"
],
"Resource": "arn:aws:sns:us-west-2:012345678901:topicOfFanciness",
"Condition": {
"StringEquals": {
"AWS:SourceOwner": "012345678901"
}
}
}
]
}
POLICY
delivery_policy = <<POLICY
{
"http": {
"defaultHealthyRetryPolicy": {
"minDelayTarget": 2,
"maxDelayTarget": 20,
"numRetries": 12,
"numMaxDelayRetries": 0,
"numNoDelayRetries": 0,
"numMinDelayRetries": 12,
"backoffFunction": "linear"
},
"disableSubscriptionOverrides": false
}
}
POLICY
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_sns_topic.topicOfFanciness" => {
"type" => "aws_sns_topic",
"primary" => {
"id" => "arn:aws:sns:us-west-2:012345678901:topicOfFanciness",
"attributes" => {
"name" => "topicOfFanciness",
"id" => "arn:aws:sns:us-west-2:012345678901:topicOfFanciness",
"arn" => "arn:aws:sns:us-west-2:012345678901:topicOfFanciness",
"display_name" => "topicOfFancinessDisplayName",
"policy" => "{\"Version\":\"2008-10-17\",\"Id\":\"__default_policy_ID\",\"Statement\":[{\"Sid\":\"__default_statement_ID\",\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"*\"},\"Action\":[\"SNS:GetTopicAttributes\",\"SNS:SetTopicAttributes\",\"SNS:AddPermission\",\"SNS:RemovePermission\",\"SNS:DeleteTopic\",\"SNS:Subscribe\",\"SNS:ListSubscriptionsByTopic\",\"SNS:Publish\",\"SNS:Receive\"],\"Resource\":\"arn:aws:sns:us-west-2:012345678901:topicOfFanciness\",\"Condition\":{\"StringEquals\":{\"AWS:SourceOwner\":\"012345678901\"}}}]}",
"delivery_policy" => "{\"http\":{\"defaultHealthyRetryPolicy\":{\"minDelayTarget\":2,\"maxDelayTarget\":20,\"numRetries\":12,\"numMaxDelayRetries\":0,\"numNoDelayRetries\":0,\"numMinDelayRetries\":12,\"backoffFunction\":\"linear\"},\"disableSubscriptionOverrides\":false}}"
},
},
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/launch_configuration_spec.rb | spec/lib/terraforming/resource/launch_configuration_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe LaunchConfiguration do
let(:client) do
Aws::AutoScaling::Client.new(stub_responses: true)
end
let(:launch_configurations) do
[
{
launch_configuration_name: "launch-123456789",
launch_configuration_arn: "arn:aws:autoscaling:us-west-2:123456789:launchConfiguration:12345678a-123b-123c-123d-123456789abc:launchConfigurationName/launch-123456789",
image_id: "ami-1234abcd",
key_name: "dummy_key",
security_groups: ["sg-1234abcd"],
classic_link_vpc_id: nil,
classic_link_vpc_security_groups: [],
user_data: "",
instance_type: "t2.small",
kernel_id: "",
ramdisk_id: "",
block_device_mappings: [
{
virtual_name: nil,
device_name: "/dev/sda1",
ebs: {
snapshot_id: nil,
volume_size: 8,
volume_type: "standard",
delete_on_termination: true,
iops: nil,
encrypted: nil
},
no_device: nil
}
],
instance_monitoring: {
enabled: false
},
spot_prive: nil,
iam_instance_profile: nil,
created_time: Time.parse("2016-03-05 01:23:45 UTC"), #=> Time
ebs_optimized: false,
associate_public_ip_address: true,
placement_tenancy: nil
},
{
launch_configuration_name: "launch-234567891",
launch_configuration_arn: "arn:aws:autoscaling:us-west-2:123456789:launchConfiguration:12345678a-123b-123c-123d-123456789abc:launchConfigurationName/launch-234567891",
image_id: "ami-1234abcd",
key_name: "dummy_key",
security_groups: ["sg-1234abcd"],
classic_link_vpc_id: nil,
classic_link_vpc_security_groups: [],
user_data: "",
instance_type: "t2.small",
kernel_id: "",
ramdisk_id: "",
block_device_mappings: [
{
virtual_name: nil,
device_name: "/dev/sda1",
ebs: {
snapshot_id: nil,
volume_size: 8,
volume_type: "standard",
delete_on_termination: true,
iops: nil,
encrypted: nil
},
no_device: nil
},
{
virtual_name: nil,
device_name: "/dev/sdb",
ebs: {
snapshot_id: nil,
volume_size: 8,
volume_type: "standard",
delete_on_termination: true,
iops: nil,
encrypted: nil
},
no_device: nil
}
],
instance_monitoring: {
enabled: false
},
spot_prive: nil,
iam_instance_profile: nil,
created_time: Time.parse("2016-03-05 01:23:45 UTC"), #=> Time
ebs_optimized: false,
associate_public_ip_address: true,
placement_tenancy: nil
}
]
end
before do
client.stub_responses(
:describe_launch_configurations,
launch_configurations: launch_configurations
)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_launch_configuration" "launch-123456789" {
name = "launch-123456789"
image_id = "ami-1234abcd"
instance_type = "t2.small"
key_name = "dummy_key"
security_groups = ["sg-1234abcd"]
associate_public_ip_address = true
enable_monitoring = false
ebs_optimized = false
root_block_device {
volume_type = "standard"
volume_size = 8
delete_on_termination = true
}
}
resource "aws_launch_configuration" "launch-234567891" {
name = "launch-234567891"
image_id = "ami-1234abcd"
instance_type = "t2.small"
key_name = "dummy_key"
security_groups = ["sg-1234abcd"]
associate_public_ip_address = true
enable_monitoring = false
ebs_optimized = false
root_block_device {
volume_type = "standard"
volume_size = 8
delete_on_termination = true
}
ebs_block_device {
device_name = "/dev/sdb"
volume_type = "standard"
volume_size = 8
delete_on_termination = true
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_launch_configuration.launch-123456789" => {
"type" => "aws_launch_configuration",
"primary" => {
"id" => "launch-123456789",
"attributes" => {
"name" => "launch-123456789",
"image_id" => "ami-1234abcd",
"instance_type" => "t2.small",
"key_name" => "dummy_key",
"security_groups.#" => "1",
"associate_public_ip_address" => "true",
"user_data" => "",
"enable_monitoring" => "false",
"ebs_optimized" => "false",
"root_block_device.#" => "1",
"ebs_block_device.#" => "0",
"ephemeral_block_device.#" => "0",
"security_groups.550527283" => "sg-1234abcd"
}
}
},
"aws_launch_configuration.launch-234567891" => {
"type" => "aws_launch_configuration",
"primary" => {
"id" => "launch-234567891",
"attributes" => {
"name" => "launch-234567891",
"image_id" => "ami-1234abcd",
"instance_type" => "t2.small",
"key_name" => "dummy_key",
"security_groups.#" => "1",
"associate_public_ip_address" => "true",
"user_data" => "",
"enable_monitoring" => "false",
"ebs_optimized" => "false",
"root_block_device.#" => "1",
"ebs_block_device.#" => "1",
"ephemeral_block_device.#" => "0",
"security_groups.550527283" => "sg-1234abcd"
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/sqs_spec.rb | spec/lib/terraforming/resource/sqs_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe SQS do
let(:client) do
Aws::SQS::Client.new(stub_responses: true)
end
let(:queue_urls) do
[
"https://sqs.ap-northeast-1.amazonaws.com/123456789012/test",
]
end
let(:attributes) do
{
"QueueArn" => "arn:aws:sqs:ap-northeast-1:123456789012:test",
"ApproximateNumberOfMessages" => "0",
"ApproximateNumberOfMessagesNotVisible" => "0",
"ApproximateNumberOfMessagesDelayed" => "0",
"CreatedTimestamp" => "1456122200",
"LastModifiedTimestamp" => "1456122200",
"VisibilityTimeout" => "30",
"MaximumMessageSize" => "262144",
"MessageRetentionPeriod" => "345600",
"DelaySeconds" => "10",
"ReceiveMessageWaitTimeSeconds" => "10",
"Policy" => "{\"Version\":\"2012-10-17\",\"Id\":\"arn:aws:sqs:ap-northeast-1:123456789012:test/SQSDefaultPolicy\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::987654321098:root\"},\"Action\":\"SQS:*\",\"Resource\":\"arn:aws:sqs:ap-northeast-1:123456789012:test\"}]}",
"RedrivePolicy" => "{\"deadLetterTargetArn\":\"arn:aws:sqs:ap-northeast-1:123456789012:dead\",\"maxReceiveCount\":3}",
}
end
before do
client.stub_responses(:list_queues, queue_urls: queue_urls)
client.stub_responses(:get_queue_attributes, attributes: attributes)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_sqs_queue" "test" {
name = "test"
visibility_timeout_seconds = 30
message_retention_seconds = 345600
max_message_size = 262144
delay_seconds = 10
receive_wait_time_seconds = 10
policy = <<POLICY
{
"Version": "2012-10-17",
"Id": "arn:aws:sqs:ap-northeast-1:123456789012:test/SQSDefaultPolicy",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::987654321098:root"
},
"Action": "SQS:*",
"Resource": "arn:aws:sqs:ap-northeast-1:123456789012:test"
}
]
}
POLICY
redrive_policy = <<POLICY
{
"deadLetterTargetArn": "arn:aws:sqs:ap-northeast-1:123456789012:dead",
"maxReceiveCount": 3
}
POLICY
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_sqs_queue.test" => {
"type" => "aws_sqs_queue",
"primary" => {
"id" => "https://sqs.ap-northeast-1.amazonaws.com/123456789012/test",
"attributes" => {
"name" => "test",
"id" => "https://sqs.ap-northeast-1.amazonaws.com/123456789012/test",
"arn" => "arn:aws:sqs:ap-northeast-1:123456789012:test",
"visibility_timeout_seconds" => "30",
"message_retention_seconds" => "345600",
"max_message_size" => "262144",
"delay_seconds" => "10",
"receive_wait_time_seconds" => "10",
"policy" => "{\"Version\":\"2012-10-17\",\"Id\":\"arn:aws:sqs:ap-northeast-1:123456789012:test/SQSDefaultPolicy\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::987654321098:root\"},\"Action\":\"SQS:*\",\"Resource\":\"arn:aws:sqs:ap-northeast-1:123456789012:test\"}]}",
"redrive_policy" => "{\"deadLetterTargetArn\":\"arn:aws:sqs:ap-northeast-1:123456789012:dead\",\"maxReceiveCount\":3}",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/rds_spec.rb | spec/lib/terraforming/resource/rds_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe RDS do
let(:client) do
Aws::RDS::Client.new(stub_responses: true)
end
let(:db_instances) do
[
{
publicly_accessible: false,
master_username: "user",
license_model: "postgresql-license",
vpc_security_groups: [
{
status: "active",
vpc_security_group_id: "sg-1234abcd"
}
],
instance_create_time: Time.parse("2014-01-01T00:00:00.000Z"),
option_group_memberships: [
{
status: "in-sync",
option_group_name: "default:postgres-9-4"
}
],
pending_modified_values: {
},
engine: "postgres",
multi_az: false,
latest_restorable_time: Time.parse("2015-01-01T00:00:00Z"),
db_security_groups: [
],
db_parameter_groups: [
{
db_parameter_group_name: "default.postgres9.4",
parameter_apply_status: "in-sync"
}
],
auto_minor_version_upgrade: false,
preferred_backup_window: "23:00-23:30",
db_subnet_group: {
subnets: [
{
subnet_status: "Active",
subnet_identifier: "subnet-1234abcd",
subnet_availability_zone: {
name: "ap-northeast-1b"
}
},
{
subnet_status: "Active",
subnet_identifier: "subnet-5678efgh",
subnet_availability_zone: {
name: "ap-northeast-1c"
}
}
],
db_subnet_group_name: "hogedb-subnet",
vpc_id: "vpc-1234abcd",
db_subnet_group_description: "hogehoge",
subnet_group_status: "Complete"
},
read_replica_db_instance_identifiers: [
],
allocated_storage: 10,
backup_retention_period: 1,
db_name: "hogedb",
preferred_maintenance_window: "mon:00:00-mon:00:30",
endpoint: {
port: 5432,
address: "hogefuga.ap-northeast-1.rds.amazonaws.com"
},
db_instance_status: "available",
engine_version: "9.4.1",
availability_zone: "ap-northeast-1b",
storage_type: "standard",
dbi_resource_id: "db-1234ABCD5678EFGH1234ABCD56",
storage_encrypted: false,
db_instance_class: "db.m3.large",
db_instance_identifier: "hogedb"
}
]
end
before do
client.stub_responses(:describe_db_instances, db_instances: db_instances)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_db_instance" "hogedb" {
identifier = "hogedb"
allocated_storage = 10
storage_type = "standard"
engine = "postgres"
engine_version = "9.4.1"
instance_class = "db.m3.large"
name = "hogedb"
username = "user"
password = "xxxxxxxx"
port = 5432
publicly_accessible = false
availability_zone = "ap-northeast-1b"
security_group_names = []
vpc_security_group_ids = ["sg-1234abcd"]
db_subnet_group_name = "hogedb-subnet"
parameter_group_name = "default.postgres9.4"
multi_az = false
backup_retention_period = 1
backup_window = "23:00-23:30"
maintenance_window = "mon:00:00-mon:00:30"
final_snapshot_identifier = "hogedb-final"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_db_instance.hogedb" => {
"type" => "aws_db_instance",
"primary" => {
"id" => "hogedb",
"attributes" => {
"address" => "hogefuga.ap-northeast-1.rds.amazonaws.com",
"allocated_storage" => "10",
"availability_zone" => "ap-northeast-1b",
"backup_retention_period" => "1",
"backup_window" => "23:00-23:30",
"db_subnet_group_name" => "hogedb-subnet",
"endpoint" => "hogefuga.ap-northeast-1.rds.amazonaws.com",
"engine" => "postgres",
"engine_version" => "9.4.1",
"final_snapshot_identifier" => "hogedb-final",
"id" => "hogedb",
"identifier" => "hogedb",
"instance_class" => "db.m3.large",
"maintenance_window" => "mon:00:00-mon:00:30",
"multi_az" => "false",
"name" => "hogedb",
"parameter_group_name" => "default.postgres9.4",
"password" => "xxxxxxxx",
"port" => "5432",
"publicly_accessible" => "false",
"security_group_names.#" => "0",
"status" => "available",
"storage_type" => "standard",
"username" => "user",
"vpc_security_group_ids.#" => "1",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/network_interface_spec.rb | spec/lib/terraforming/resource/network_interface_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe NetworkInterface do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:network_interfaces) do
[
{
status: "available",
mac_address: "11:11:11:11:11:11",
source_dest_check: true,
vpc_id: "vpc-12345678",
description: "test network_interface",
network_interface_id: "eni-1234abcd",
private_ip_addresses: [
{
private_dns_name: "ip-1-1-1-1.ap-northeast-1.compute.internal",
private_ip_address: "1.1.1.1",
primary: true
}
],
requester_managed: false,
groups: [
],
private_dns_name: "ip-1-1-1-1.ap-northeast-1.compute.internal",
availability_zone: "ap-northeast-1a",
requester_id: "234567890123",
subnet_id: "subnet-1234abcd",
owner_id: "123456789012",
private_ip_address: "1.1.1.1",
},
{
status: "in-use",
mac_address: "22:22:22:22:22:22",
source_dest_check: false,
vpc_id: "vpc-12345678",
description: "test network_interface",
association: {
public_ip: "9.9.9.9",
association_id: "eipassoc-63446006",
public_dns_name: "ec2-9-9-9-9.ap-northeast-1.compute.amazonaws.com",
allocation_id: "eipalloc-7fe93c1a",
ip_owner_id: "123456789012"
},
network_interface_id: "eni-2345efgh",
private_ip_addresses: [
{
private_dns_name: "ip-2-2-2-2.ap-northeast-1.compute.internal",
association: {
public_ip: "9.9.9.9",
association_id: "eipassoc-63446006",
public_dns_name: "ec2-9-9-9-9.ap-northeast-1.compute.amazonaws.com",
allocation_id: "eipalloc-7fe93c1a",
ip_owner_id: "123456789012"
},
private_ip_address: "2.2.2.2",
primary: true
},
{
private_dns_name: "ip-3-3-3-3.ap-northeast-1.compute.internal",
private_ip_address: "3.3.3.3",
primary: false
},
],
requester_managed: false,
groups: [
{
group_name: "test",
group_id: "sg-12345678",
},
{
group_name: "test2",
group_id: "sg-23456789",
}
],
attachment: {
status: "attached",
device_index: 0,
attach_time: Time.parse("2015-04-01 12:34:56 UTC"),
instance_id: "i-12345678",
delete_on_termination: true,
attachment_id: "eni-attach-12345678",
instance_owner_id: "345678901234",
},
private_dns_name: "ip-2-2-2-2.ap-northeast-1.compute.internal",
availability_zone: "ap-northeast-1a",
requester_id: "234567890123",
subnet_id: "subnet-1234abcd",
owner_id: "123456789012",
private_ip_address: "2.2.2.2",
tag_set: [
{ key: "Name", value: "fuga" },
]
}
]
end
before do
client.stub_responses(:describe_network_interfaces, network_interfaces: network_interfaces)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_network_interface" "eni-1234abcd" {
subnet_id = "subnet-1234abcd"
private_ips = ["1.1.1.1"]
security_groups = []
source_dest_check = true
}
resource "aws_network_interface" "eni-2345efgh" {
subnet_id = "subnet-1234abcd"
private_ips = ["2.2.2.2", "3.3.3.3"]
security_groups = ["sg-12345678", "sg-23456789"]
source_dest_check = false
attachment {
instance = "i-12345678"
device_index = 0
}
tags {
"Name" = "fuga"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_network_interface.eni-1234abcd" => {
"type" => "aws_network_interface",
"primary" => {
"id" => "eni-1234abcd",
"attributes" => {
"attachment.#" => "0",
"id" => "eni-1234abcd",
"private_ips.#" => "1",
"security_groups.#" => "0",
"source_dest_check" => "true",
"subnet_id" => "subnet-1234abcd",
"tags.#" => "0",
}
}
},
"aws_network_interface.eni-2345efgh" => {
"type" => "aws_network_interface",
"primary" => {
"id" => "eni-2345efgh",
"attributes" => {
"attachment.#" => "1",
"id" => "eni-2345efgh",
"private_ips.#" => "2",
"security_groups.#" => "2",
"source_dest_check" => "false",
"subnet_id" => "subnet-1234abcd",
"tags.#" => "1",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/network_acl_spec.rb | spec/lib/terraforming/resource/network_acl_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe NetworkACL do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:network_acls) do
[
{
network_acl_id: "acl-1234abcd",
vpc_id: "vpc-1234abcd",
is_default: true,
entries: [
{
rule_number: 100,
protocol: "-1",
rule_action: "allow",
egress: false,
cidr_block: "0.0.0.0/0",
port_range: nil,
},
{
rule_number: 32767,
protocol: "-1",
rule_action: "deny",
egress: true,
cidr_block: "0.0.0.0/0",
port_range: {
from: 80,
to: 80,
},
},
],
associations: [
{
network_acl_association_id: "aclassoc-1234abcd",
network_acl_id: "acl-1234abcd",
subnet_id: "subnet-1234abcd"
},
{
network_acl_association_id: "aclassoc-5678efgh",
network_acl_id: "acl-1234abcd",
subnet_id: "subnet-5678efgh"
},
],
tags: [
{ key: "Name", value: "hoge" },
]
},
{
network_acl_id: "acl-5678efgh",
vpc_id: "vpc-5678efgh",
is_default: true,
entries: [
{
rule_number: 100,
protocol: "-1",
rule_action: "allow",
egress: false,
cidr_block: "0.0.0.0/0",
port_range: nil,
},
{
rule_number: 12345,
protocol: "1",
rule_action: "allow",
egress: false,
cidr_block: "0.0.0.0/0",
port_range: nil,
icmp_type_code: {
code: -1,
type: 10,
},
},
{
rule_number: 15000,
protocol: "1",
rule_action: "allow",
egress: true,
cidr_block: "0.0.0.0/0",
port_range: nil,
icmp_type_code: {
code: -1,
type: 4
},
},
{
rule_number: 32767,
protocol: "-1",
rule_action: "deny",
egress: true,
cidr_block: "0.0.0.0/0",
port_range: {
from: 80,
to: 80
}
},
],
associations: [
{
network_acl_association_id: "aclassoc-9012ijkl",
network_acl_id: "acl-5678efgh",
subnet_id: "subnet-9012ijkl"
},
{
network_acl_association_id: "aclassoc-3456mnop",
network_acl_id: "acl-5678efgh",
subnet_id: "subnet-3456mnop"
},
],
tags: [
{ key: "Name", value: "fuga" },
]
},
]
end
before do
client.stub_responses(:describe_network_acls, network_acls: network_acls)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_network_acl" "hoge" {
vpc_id = "vpc-1234abcd"
subnet_ids = ["subnet-1234abcd", "subnet-5678efgh"]
ingress {
from_port = 0
to_port = 0
rule_no = 100
action = "allow"
protocol = "-1"
cidr_block = "0.0.0.0/0"
}
tags {
"Name" = "hoge"
}
}
resource "aws_network_acl" "fuga" {
vpc_id = "vpc-5678efgh"
subnet_ids = ["subnet-9012ijkl", "subnet-3456mnop"]
ingress {
from_port = 0
to_port = 0
rule_no = 100
action = "allow"
protocol = "-1"
cidr_block = "0.0.0.0/0"
}
ingress {
from_port = 0
to_port = 0
rule_no = 12345
action = "allow"
protocol = "1"
cidr_block = "0.0.0.0/0"
icmp_code = "-1"
icmp_type = "10"
}
egress {
from_port = 0
to_port = 0
rule_no = 15000
action = "allow"
protocol = "1"
cidr_block = "0.0.0.0/0"
icmp_code = "-1"
icmp_type = "4"
}
tags {
"Name" = "fuga"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_network_acl.hoge" => {
"type" => "aws_network_acl",
"primary" => {
"id" => "acl-1234abcd",
"attributes" => {
"egress.#" => "0",
"id" => "acl-1234abcd",
"ingress.#" => "1",
"subnet_ids.#" => "2",
"tags.#" => "1",
"vpc_id" => "vpc-1234abcd",
}
}
},
"aws_network_acl.fuga" => {
"type" => "aws_network_acl",
"primary" => {
"id" => "acl-5678efgh",
"attributes" => {
"egress.#" => "1",
"id" => "acl-5678efgh",
"ingress.#" => "2",
"subnet_ids.#" => "2",
"tags.#" => "1",
"vpc_id" => "vpc-5678efgh",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/efs_file_system_spec.rb | spec/lib/terraforming/resource/efs_file_system_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe EFSFileSystem do
let(:client) do
Aws::EFS::Client.new(stub_responses: true)
end
let(:efs_description_0) do
{
creation_time: Time.parse("2016-11-01 11:30:00 -0700"),
creation_token: "console-1234abcd-1234-abcd-a123-d34db33f0000",
file_system_id: "fs-0000abcd",
life_cycle_state: "available",
name: "efs_name_0",
number_of_mount_targets: 3,
owner_id: "999999999999",
performance_mode: "generalPurpose",
size_in_bytes: { value: 6144 },
tags: [],
}
end
let(:efs_description_1) do
{
creation_time: Time.parse("2016-10-24 11:42:21 -0700"),
creation_token: "console-0000abcd-4321-dcba-a123-d34db33f0000",
file_system_id: "fs-abcd1234",
life_cycle_state: "available",
name: "efs_name_1",
number_of_mount_targets: 3,
owner_id: "999999999999",
performance_mode: "generalPurpose",
size_in_bytes: { value: 23481234 },
tags: [],
}
end
before do
client.stub_responses(:describe_file_systems, file_systems: [efs_description_0, efs_description_1])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_efs_file_system" "fs-0000abcd" {
creation_token = "console-1234abcd-1234-abcd-a123-d34db33f0000"
file_system_id = "fs-0000abcd"
performance_mode = "generalPurpose"
tags {
Name = "efs_name_0"
}
}
resource "aws_efs_file_system" "fs-abcd1234" {
creation_token = "console-0000abcd-4321-dcba-a123-d34db33f0000"
file_system_id = "fs-abcd1234"
performance_mode = "generalPurpose"
tags {
Name = "efs_name_1"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_efs_file_system.fs-0000abcd" => {
"type" => "aws_efs_file_system",
"depends_on" => [],
"primary" => {
"id" => "fs-0000abcd",
"meta" => {},
"tainted" => false,
"attributes" => {
"creation_token" => "console-1234abcd-1234-abcd-a123-d34db33f0000",
"id" => "fs-0000abcd",
"performance_mode" => "generalPurpose",
"tags.%" => "1",
"tags.Name" => "efs_name_0"
},
},
"deposed" => [],
"provider" => "aws",
},
"aws_efs_file_system.fs-abcd1234" => {
"type" => "aws_efs_file_system",
"depends_on" => [],
"primary" => {
"id" => "fs-abcd1234",
"meta" => {},
"tainted" => false,
"attributes" => {
"creation_token" => "console-0000abcd-4321-dcba-a123-d34db33f0000",
"id" => "fs-abcd1234",
"performance_mode" => "generalPurpose",
"tags.%" => "1",
"tags.Name" => "efs_name_1"
},
},
"deposed" => [],
"provider" => "aws",
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/vpc_spec.rb | spec/lib/terraforming/resource/vpc_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe VPC do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:vpcs) do
[
{
vpc_id: "vpc-1234abcd",
state: "available",
cidr_block: "10.0.0.0/16",
dhcp_options_id: "dopt-1234abcd",
tags: [
{
key: "Name",
value: "hoge"
}
],
instance_tenancy: "default",
is_default: false
},
{
vpc_id: "vpc-5678efgh",
state: "available",
cidr_block: "10.0.0.0/16",
dhcp_options_id: "dopt-5678efgh",
tags: [
{
key: "Name",
value: "fuga"
}
],
instance_tenancy: "default",
is_default: false
}
]
end
before do
client.stub_responses(:describe_vpcs, vpcs: vpcs)
attr_stub_responses = []
%w(vpc-1234abcd vpc-5678efgh).each do |_vpc_id|
%i(enable_dns_hostnames enable_dns_support).each do |attr|
attr_stub_responses << { attr => { value: true } }
end
end
client.stub_responses(:describe_vpc_attribute, attr_stub_responses)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_vpc" "hoge" {
cidr_block = "10.0.0.0/16"
enable_dns_hostnames = true
enable_dns_support = true
instance_tenancy = "default"
tags {
"Name" = "hoge"
}
}
resource "aws_vpc" "fuga" {
cidr_block = "10.0.0.0/16"
enable_dns_hostnames = true
enable_dns_support = true
instance_tenancy = "default"
tags {
"Name" = "fuga"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_vpc.hoge" => {
"type" => "aws_vpc",
"primary" => {
"id" => "vpc-1234abcd",
"attributes" => {
"cidr_block" => "10.0.0.0/16",
"enable_dns_hostnames" => "true",
"enable_dns_support" => "true",
"id" => "vpc-1234abcd",
"instance_tenancy" => "default",
"tags.#" => "1",
}
}
},
"aws_vpc.fuga" => {
"type" => "aws_vpc",
"primary" => {
"id" => "vpc-5678efgh",
"attributes" => {
"cidr_block" => "10.0.0.0/16",
"enable_dns_hostnames" => "true",
"enable_dns_support" => "true",
"id" => "vpc-5678efgh",
"instance_tenancy" => "default",
"tags.#" => "1",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/route_table_spec.rb | spec/lib/terraforming/resource/route_table_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe RouteTable do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:route_tables) do
[
{
route_table_id: 'rtb-a12bcd34',
vpc_id: 'vpc-ab123cde',
routes: [
{
destination_cidr_block: '10.0.0.0/16',
destination_prefix_list_id: nil,
gateway_id: 'local',
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
},
{
destination_cidr_block: '0.0.0.0/0',
destination_prefix_list_id: nil,
gateway_id: 'igw-1ab2345c',
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
},
{
destination_cidr_block: '192.168.1.0/24',
destination_prefix_list_id: nil,
gateway_id: nil,
instance_id: 'i-ec12345a',
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
},
{
destination_cidr_block: '192.168.2.0/24',
destination_prefix_list_id: nil,
gateway_id: nil,
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: 'pcx-c56789de',
state: 'active'
}
],
associations: [
{
route_table_association_id: 'rtbassoc-b123456cd',
route_table_id: 'rtb-a12bcd34',
subnet_id: 'subnet-1234a567',
main: false
},
{
route_table_association_id: 'rtbassoc-e789012fg',
route_table_id: 'rtb-e56egf78',
subnet_id: 'subnet-8901b123',
main: false
}
],
propagating_vgws: [
{ gateway_id: 'vgw-1a4j20b' }
],
tags: [
{
key: 'Name',
value: 'my-route-table'
}
]
},
{
route_table_id: 'rtb-efgh5678',
vpc_id: 'vpc-ab123cde',
routes: [
{
destination_cidr_block: '0.0.0.0/0',
destination_prefix_list_id: nil,
gateway_id: 'vgw-2345cdef',
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
},
{
destination_cidr_block: '172.18.0.0/16',
destination_prefix_list_id: nil,
gateway_id: 'vgw-2345dddf',
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active',
origin: 'EnableVgwRoutePropagation'
},
{
destination_cidr_block: '10.18.0.0/16',
destination_prefix_list_id: '1234567',
gateway_id: 'vgw-2115dddf',
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
},
],
associations: [
],
tags: [
{
key: 'Name',
value: 'my-route-table-2'
}
]
}
]
end
before do
client.stub_responses(:describe_route_tables, route_tables: route_tables)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_route_table" "my-route-table" {
vpc_id = "vpc-ab123cde"
route {
cidr_block = "0.0.0.0/0"
gateway_id = "igw-1ab2345c"
}
route {
cidr_block = "192.168.1.0/24"
instance_id = "i-ec12345a"
}
route {
cidr_block = "192.168.2.0/24"
vpc_peering_connection_id = "pcx-c56789de"
}
propagating_vgws = ["vgw-1a4j20b"]
tags {
"Name" = "my-route-table"
}
}
resource "aws_route_table" "my-route-table-2" {
vpc_id = "vpc-ab123cde"
route {
cidr_block = "0.0.0.0/0"
gateway_id = "vgw-2345cdef"
}
tags {
"Name" = "my-route-table-2"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_route_table.my-route-table" => {
"type" => "aws_route_table",
"primary" => {
"id" => "rtb-a12bcd34",
"attributes" => {
"id" => "rtb-a12bcd34",
"vpc_id" => "vpc-ab123cde",
"tags.#" => "1",
"tags.Name" => "my-route-table",
"route.#" => "3",
"route.4066406027.cidr_block" => "0.0.0.0/0",
"route.4066406027.gateway_id" => "igw-1ab2345c",
"route.4066406027.instance_id" => "",
"route.4066406027.network_interface_id" => "",
"route.4066406027.vpc_peering_connection_id" => "",
"route.3686469914.cidr_block" => "192.168.1.0/24",
"route.3686469914.gateway_id" => "",
"route.3686469914.instance_id" => "i-ec12345a",
"route.3686469914.network_interface_id" => "",
"route.3686469914.vpc_peering_connection_id" => "",
"route.2351420441.cidr_block" => "192.168.2.0/24",
"route.2351420441.gateway_id" => "",
"route.2351420441.instance_id" => "",
"route.2351420441.network_interface_id" => "",
"route.2351420441.vpc_peering_connection_id" => "pcx-c56789de",
"propagating_vgws.#" => "1",
"propagating_vgws.772379535" => "vgw-1a4j20b"
}
}
},
"aws_route_table.my-route-table-2" => {
"type" => "aws_route_table",
"primary" => {
"id" => "rtb-efgh5678",
"attributes" => {
"id" => "rtb-efgh5678",
"vpc_id" => "vpc-ab123cde",
"tags.#" => "1",
"tags.Name" => "my-route-table-2",
"route.#" => "1",
"route.4031521715.cidr_block" => "0.0.0.0/0",
"route.4031521715.gateway_id" => "vgw-2345cdef",
"route.4031521715.instance_id" => "",
"route.4031521715.network_interface_id" => "",
"route.4031521715.vpc_peering_connection_id" => "",
"propagating_vgws.#" => "0",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/redshift_spec.rb | spec/lib/terraforming/resource/redshift_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe Redshift do
let(:client) do
Aws::Redshift::Client.new(stub_responses: true)
end
let(:clusters) do
[
{
cluster_identifier: "test",
node_type: "dc1.large",
cluster_status: "available",
modify_status: nil,
master_username: "testuser",
db_name: "testdb",
endpoint: {
address: "test.xxxxxxxxxxxx.ap-northeast-1.redshift.amazonaws.com",
port: 5439
},
cluster_create_time: Time.parse("2016-01-01T00:00:00Z"),
automated_snapshot_retention_period: 1,
cluster_security_groups: [],
vpc_security_groups: [],
cluster_parameter_groups: [
{
parameter_group_name: "default.redshift-1.0",
parameter_apply_status: "in-sync",
cluster_parameter_status_list: []
}
],
cluster_subnet_group_name: "test",
vpc_id: "vpc-xxxxxxxx",
availability_zone: "ap-northeast-1c",
preferred_maintenance_window: "fri:15:00-fri:15:30",
pending_modified_values: {
master_user_password: nil,
node_type: nil,
number_of_nodes: nil,
cluster_type: nil,
cluster_version: nil,
automated_snapshot_retention_period: nil,
cluster_identifier: nil
},
cluster_version: "1.0",
allow_version_upgrade: true,
number_of_nodes: 2,
publicly_accessible: true,
encrypted: true,
restore_status: {
status: "completed",
current_restore_rate_in_mega_bytes_per_second: 20.000,
snapshot_size_in_mega_bytes: 10000,
progress_in_mega_bytes: 10000,
elapsed_time_in_seconds: 500,
estimated_time_to_completion_in_seconds: 0
},
hsm_status: nil,
cluster_snapshot_copy_status: nil,
cluster_public_key: "ssh-rsa AAAAB3NzaC1yc2E... Amazon-Redshift\n",
cluster_nodes: [
{
node_role: "LEADER",
private_ip_address: "10.0.0.1",
public_ip_address: "192.0.2.1"
},
{
node_role: "COMPUTE-0",
private_ip_address: "10.0.0.2",
public_ip_address: "192.0.2.2"
},
{
node_role: "COMPUTE-1",
private_ip_address: "10.0.0.3",
public_ip_address: "192.0.2.3"
}
],
elastic_ip_status: nil,
cluster_revision_number: "1026",
tags: [],
kms_key_id: nil
}
]
end
before do
client.stub_responses(:describe_clusters, clusters: clusters)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_redshift_cluster" "test" {
cluster_identifier = "test"
database_name = "testdb"
cluster_type = "multi-node"
node_type = "dc1.large"
master_password = "xxxxxxxx"
master_username = "testuser"
availability_zone = "ap-northeast-1c"
preferred_maintenance_window = "fri:15:00-fri:15:30"
cluster_parameter_group_name = "default.redshift-1.0"
automated_snapshot_retention_period = "1"
port = "5439"
cluster_version = "1.0"
allow_version_upgrade = "true"
number_of_nodes = "2"
publicly_accessible = "true"
encrypted = "true"
skip_final_snapshot = "true"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_redshift_cluster.test" => {
"type" => "aws_redshift_cluster",
"primary" => {
"id" => "test",
"attributes" => {
"cluster_identifier" => "test",
"database_name" => "testdb",
"cluster_type" => "multi-node",
"node_type" => "dc1.large",
"master_password" => "xxxxxxxx",
"master_username" => "testuser",
"availability_zone" => "ap-northeast-1c",
"preferred_maintenance_window" => "fri:15:00-fri:15:30",
"cluster_parameter_group_name" => "default.redshift-1.0",
"automated_snapshot_retention_period" => "1",
"port" => "5439",
"cluster_version" => "1.0",
"allow_version_upgrade" => "true",
"number_of_nodes" => "2",
"publicly_accessible" => "true",
"encrypted" => "true",
"skip_final_snapshot" => "true",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/kms_key_spec.rb | spec/lib/terraforming/resource/kms_key_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe KMSKey do
let(:client) do
Aws::KMS::Client.new(stub_responses: true)
end
let(:keys) do
[
{
key_id: "1234abcd-12ab-34cd-56ef-1234567890ab",
key_arn: "arn:aws:kms:ap-northeast-1:123456789012:key/1234abcd-12ab-34cd-56ef-1234567890ab",
},
{
key_id: "abcd1234-ab12-cd34-ef56-abcdef123456",
key_arn: "arn:aws:kms:ap-northeast-1:123456789012:key/abcd1234-ab12-cd34-ef56-abcdef123456",
},
{
key_id: "12ab34cd-56ef-12ab-34cd-12ab34cd56ef",
key_arn: "arn:aws:kms:ap-northeast-1:123456789012:key/12ab34cd-56ef-12ab-34cd-12ab34cd56ef",
},
{
key_id: "ab12cd34-ef56-ab12-cd34-ab12cd34ef56",
key_arn: "arn:aws:kms:ap-northeast-1:123456789012:key/ab12cd34-ef56-ab12-cd34-ab12cd34ef56",
},
]
end
let(:hoge_key) do
{
key_metadata: {
aws_account_id: "123456789012",
key_id: "1234abcd-12ab-34cd-56ef-1234567890ab",
arn: "arn:aws:kms:ap-northeast-1:123456789012:key/1234abcd-12ab-34cd-56ef-1234567890ab",
creation_date: Time.new("2017-01-01 20:12:34 +0900"),
enabled: true,
description: "hoge",
key_usage: "ENCRYPT_DECRYPT",
key_state: "Enabled",
origin: "AWS_KMS",
},
}
end
let(:fuga_key) do
{
key_metadata: {
aws_account_id: "123456789012",
key_id: "abcd1234-ab12-cd34-ef56-abcdef123456",
arn: "arn:aws:kms:ap-northeast-1:123456789012:key/abcd1234-ab12-cd34-ef56-abcdef123456",
creation_date: Time.new("2017-01-09 12:34:56 +0900"),
enabled: true,
description: "fuga",
key_usage: "ENCRYPT_DECRYPT",
key_state: "Enabled",
origin: "AWS_KMS",
},
}
end
let(:foobar_key) do
{
key_metadata: {
aws_account_id: "123456789012",
key_id: "ab12cd34-ef56-ab12-cd34-ab12cd34ef56",
arn: "arn:aws:kms:ap-northeast-1:123456789012:key/ab12cd34-ef56-ab12-cd34-ab12cd34ef56",
creation_date: Time.new("2017-09-09 12:34:56 +0900"),
enabled: true,
description: "Default master key that protects my ACM private keys when no other key is foobar",
key_usage: "ENCRYPT_DECRYPT",
key_state: "PendingImport",
origin: "EXTERNAL",
},
}
end
let(:aliases) do
[
{
alias_name: "alias/aws/acm",
alias_arn: "arn:aws:kms:ap-northeast-1:123456789012:alias/aws/acm",
target_key_id: "12ab34cd-56ef-12ab-34cd-12ab34cd56ef"
},
{
alias_name: "alias/hoge",
alias_arn: "arn:aws:kms:ap-northeast-1:123456789012:alias/hoge",
target_key_id: "1234abcd-12ab-34cd-56ef-1234567890ab"
},
{
alias_name: "alias/fuga",
alias_arn: "arn:aws:kms:ap-northeast-1:123456789012:alias/fuga",
target_key_id: "abcd1234-ab12-cd34-ef56-abcdef123456"
},
{
alias_name: "alias/foobar",
alias_arn: "arn:aws:kms:ap-northeast-1:123456789012:alias/foobar",
target_key_id: "ab12cd34-ef56-ab12-cd34-ab12cd34ef56"
},
]
end
let(:hoge_policies) do
{
policy_names: ["default"],
}
end
let(:fuga_policies) do
{
policy_names: ["default"],
}
end
let(:hoge_policy) do
{
policy: <<-EOS,
{
"Version" : "2012-10-17",
"Id" : "key-default-1",
"Statement" : [ {
"Sid" : "Enable IAM User Permissions",
"Effect" : "Allow",
"Principal" : {
"AWS" : "arn:aws:iam::123456789012:root"
},
"Action" : "kms:*",
"Resource" : "*"
} ]
}
EOS
}
end
let(:fuga_policy) do
{
policy: <<-EOS,
{
"Version" : "2012-10-17",
"Id" : "key-consolepolicy-2",
"Statement" : [ {
"Sid" : "Enable IAM User Permissions",
"Effect" : "Allow",
"Principal" : {
"AWS" : "arn:aws:iam::123456789012:root"
},
"Action" : "kms:*",
"Resource" : "*"
}, {
"Sid" : "Allow access for Key Administrators",
"Effect" : "Allow",
"Action" : [ "kms:Create*", "kms:Describe*", "kms:Enable*", "kms:List*", "kms:Put*", "kms:Update*", "kms:Revoke*", "kms:Disable*", "kms:Get*", "kms:Delete*", "kms:ScheduleKeyDeletion", "kms:CancelKeyDeletion" ],
"Resource" : "*"
}, {
"Sid" : "Allow use of the key",
"Effect" : "Allow",
"Principal" : {
"AWS" : [ "arn:aws:iam::123456789012:user/user1", "arn:aws:iam::123456789012:user/user2" ]
},
"Action" : [ "kms:Encrypt", "kms:Decrypt", "kms:ReEncrypt*", "kms:GenerateDataKey*", "kms:DescribeKey" ],
"Resource" : "*"
}, {
"Sid" : "Allow attachment of persistent resources",
"Effect" : "Allow",
"Principal" : {
"AWS" : [ "arn:aws:iam::123456789012:user/user1", "arn:aws:iam::123456789012:user/user2" ]
},
"Action" : [ "kms:CreateGrant", "kms:ListGrants", "kms:RevokeGrant" ],
"Resource" : "*",
"Condition" : {
"Bool" : {
"kms:GrantIsForAWSResource" : "true"
}
}
} ]
}
EOS
}
end
let(:hoge_key_rotation_status) do
{
key_rotation_enabled: true,
}
end
let(:fuga_key_rotation_status) do
{
key_rotation_enabled: false,
}
end
before do
client.stub_responses(:list_keys, keys: keys)
client.stub_responses(:list_aliases, aliases: aliases)
client.stub_responses(:describe_key, [hoge_key, fuga_key, foobar_key])
client.stub_responses(:list_key_policies, [hoge_policies, fuga_policies])
client.stub_responses(:get_key_policy, [hoge_policy, fuga_policy])
client.stub_responses(:get_key_rotation_status, [hoge_key_rotation_status, fuga_key_rotation_status])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_kms_key" "1234abcd-12ab-34cd-56ef-1234567890ab" {
description = "hoge"
key_usage = "ENCRYPT_DECRYPT"
is_enabled = true
enable_key_rotation = true
policy = <<POLICY
{
"Version" : "2012-10-17",
"Id" : "key-default-1",
"Statement" : [ {
"Sid" : "Enable IAM User Permissions",
"Effect" : "Allow",
"Principal" : {
"AWS" : "arn:aws:iam::123456789012:root"
},
"Action" : "kms:*",
"Resource" : "*"
} ]
}
POLICY
}
resource "aws_kms_key" "abcd1234-ab12-cd34-ef56-abcdef123456" {
description = "fuga"
key_usage = "ENCRYPT_DECRYPT"
is_enabled = true
enable_key_rotation = false
policy = <<POLICY
{
"Version" : "2012-10-17",
"Id" : "key-consolepolicy-2",
"Statement" : [ {
"Sid" : "Enable IAM User Permissions",
"Effect" : "Allow",
"Principal" : {
"AWS" : "arn:aws:iam::123456789012:root"
},
"Action" : "kms:*",
"Resource" : "*"
}, {
"Sid" : "Allow access for Key Administrators",
"Effect" : "Allow",
"Action" : [ "kms:Create*", "kms:Describe*", "kms:Enable*", "kms:List*", "kms:Put*", "kms:Update*", "kms:Revoke*", "kms:Disable*", "kms:Get*", "kms:Delete*", "kms:ScheduleKeyDeletion", "kms:CancelKeyDeletion" ],
"Resource" : "*"
}, {
"Sid" : "Allow use of the key",
"Effect" : "Allow",
"Principal" : {
"AWS" : [ "arn:aws:iam::123456789012:user/user1", "arn:aws:iam::123456789012:user/user2" ]
},
"Action" : [ "kms:Encrypt", "kms:Decrypt", "kms:ReEncrypt*", "kms:GenerateDataKey*", "kms:DescribeKey" ],
"Resource" : "*"
}, {
"Sid" : "Allow attachment of persistent resources",
"Effect" : "Allow",
"Principal" : {
"AWS" : [ "arn:aws:iam::123456789012:user/user1", "arn:aws:iam::123456789012:user/user2" ]
},
"Action" : [ "kms:CreateGrant", "kms:ListGrants", "kms:RevokeGrant" ],
"Resource" : "*",
"Condition" : {
"Bool" : {
"kms:GrantIsForAWSResource" : "true"
}
}
} ]
}
POLICY
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_kms_key.1234abcd-12ab-34cd-56ef-1234567890ab" => {
"type" => "aws_kms_key",
"primary" => {
"id" => "1234abcd-12ab-34cd-56ef-1234567890ab",
"attributes" => {
"arn" => "arn:aws:kms:ap-northeast-1:123456789012:key/1234abcd-12ab-34cd-56ef-1234567890ab",
"description" => "hoge",
"enable_key_rotation" => "true",
"id" => "1234abcd-12ab-34cd-56ef-1234567890ab",
"is_enabled" => "true",
"key_id" => "1234abcd-12ab-34cd-56ef-1234567890ab",
"key_usage" => "ENCRYPT_DECRYPT",
"policy" => "{\n \"Version\" : \"2012-10-17\",\n \"Id\" : \"key-default-1\",\n \"Statement\" : [ {\n \"Sid\" : \"Enable IAM User Permissions\",\n \"Effect\" : \"Allow\",\n \"Principal\" : {\n \"AWS\" : \"arn:aws:iam::123456789012:root\"\n },\n \"Action\" : \"kms:*\",\n \"Resource\" : \"*\"\n } ]\n}\n",
}
}
},
"aws_kms_key.abcd1234-ab12-cd34-ef56-abcdef123456" => {
"type" => "aws_kms_key",
"primary" => {
"id" => "abcd1234-ab12-cd34-ef56-abcdef123456",
"attributes" => {
"arn" => "arn:aws:kms:ap-northeast-1:123456789012:key/abcd1234-ab12-cd34-ef56-abcdef123456",
"description" => "fuga",
"enable_key_rotation" => "false",
"id" => "abcd1234-ab12-cd34-ef56-abcdef123456",
"is_enabled" => "true",
"key_id" => "abcd1234-ab12-cd34-ef56-abcdef123456",
"key_usage" => "ENCRYPT_DECRYPT",
"policy" => "{\n \"Version\" : \"2012-10-17\",\n \"Id\" : \"key-consolepolicy-2\",\n \"Statement\" : [ {\n \"Sid\" : \"Enable IAM User Permissions\",\n \"Effect\" : \"Allow\",\n \"Principal\" : {\n \"AWS\" : \"arn:aws:iam::123456789012:root\"\n },\n \"Action\" : \"kms:*\",\n \"Resource\" : \"*\"\n }, {\n \"Sid\" : \"Allow access for Key Administrators\",\n \"Effect\" : \"Allow\",\n \"Action\" : [ \"kms:Create*\", \"kms:Describe*\", \"kms:Enable*\", \"kms:List*\", \"kms:Put*\", \"kms:Update*\", \"kms:Revoke*\", \"kms:Disable*\", \"kms:Get*\", \"kms:Delete*\", \"kms:ScheduleKeyDeletion\", \"kms:CancelKeyDeletion\" ],\n \"Resource\" : \"*\"\n }, {\n \"Sid\" : \"Allow use of the key\",\n \"Effect\" : \"Allow\",\n \"Principal\" : {\n \"AWS\" : [ \"arn:aws:iam::123456789012:user/user1\", \"arn:aws:iam::123456789012:user/user2\" ]\n },\n \"Action\" : [ \"kms:Encrypt\", \"kms:Decrypt\", \"kms:ReEncrypt*\", \"kms:GenerateDataKey*\", \"kms:DescribeKey\" ],\n \"Resource\" : \"*\"\n }, {\n \"Sid\" : \"Allow attachment of persistent resources\",\n \"Effect\" : \"Allow\",\n \"Principal\" : {\n \"AWS\" : [ \"arn:aws:iam::123456789012:user/user1\", \"arn:aws:iam::123456789012:user/user2\" ]\n },\n \"Action\" : [ \"kms:CreateGrant\", \"kms:ListGrants\", \"kms:RevokeGrant\" ],\n \"Resource\" : \"*\",\n \"Condition\" : {\n \"Bool\" : {\n \"kms:GrantIsForAWSResource\" : \"true\"\n }\n }\n\n } ]\n}\n",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/db_subnet_group_spec.rb | spec/lib/terraforming/resource/db_subnet_group_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe DBSubnetGroup do
let(:client) do
Aws::RDS::Client.new(stub_responses: true)
end
let(:db_subnet_groups) do
[
{
subnets: [
{
subnet_status: "Active",
subnet_identifier: "subnet-1234abcd",
subnet_availability_zone: {
name: "ap-northeast-1c"
}
},
{
subnet_status: "Active",
subnet_identifier: "subnet-5678efgh",
subnet_availability_zone: {
name: "ap-northeast-1b"
}
}
],
db_subnet_group_name: "hoge",
vpc_id: "vpc-1234abcd",
db_subnet_group_description: "DB subnet group hoge",
subnet_group_status: "Complete"
},
{
subnets: [
{
subnet_status: "Active",
subnet_identifier: "subnet-9012ijkl",
subnet_availability_zone: {
name: "ap-northeast-1b"
}
},
{
subnet_status: "Active",
subnet_identifier: "subnet-3456mnop",
subnet_availability_zone: {
name: "ap-northeast-1c"
}
}
],
db_subnet_group_name: "fuga",
vpc_id: "vpc-5678efgh",
db_subnet_group_description: "DB subnet group fuga",
subnet_group_status: "Complete"
}
]
end
before do
client.stub_responses(:describe_db_subnet_groups, db_subnet_groups: db_subnet_groups)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_db_subnet_group" "hoge" {
name = "hoge"
description = "DB subnet group hoge"
subnet_ids = ["subnet-1234abcd", "subnet-5678efgh"]
}
resource "aws_db_subnet_group" "fuga" {
name = "fuga"
description = "DB subnet group fuga"
subnet_ids = ["subnet-9012ijkl", "subnet-3456mnop"]
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_db_subnet_group.hoge" => {
"type" => "aws_db_subnet_group",
"primary" => {
"id" => "hoge",
"attributes" => {
"description" => "DB subnet group hoge",
"name" => "hoge",
"subnet_ids.#" => "2",
}
}
},
"aws_db_subnet_group.fuga" => {
"type" => "aws_db_subnet_group",
"primary" => {
"id" => "fuga",
"attributes" => {
"description" => "DB subnet group fuga",
"name" => "fuga",
"subnet_ids.#" => "2",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/vpn_gateway_spec.rb | spec/lib/terraforming/resource/vpn_gateway_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe VPNGateway do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:vpn_gateways) do
[
{
vpn_gateway_id: "vgw-1234abcd",
vpc_attachments: [
vpc_id: "vpc-1234abcd",
state: "available"
],
availability_zone: "us-east-1c",
tags: [],
},
{
vpn_gateway_id: "vgw-5678efgh",
vpc_attachments: [
vpc_id: "vpc-5678efgh",
state: "available"
],
availability_zone: "us-east-1d",
tags: [
{
key: "Name",
value: "test"
}
]
}
]
end
before do
client.stub_responses(:describe_vpn_gateways, vpn_gateways: vpn_gateways)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_vpn_gateway" "vgw-1234abcd" {
vpc_id = "vpc-1234abcd"
availability_zone = "us-east-1c"
tags {
}
}
resource "aws_vpn_gateway" "test" {
vpc_id = "vpc-5678efgh"
availability_zone = "us-east-1d"
tags {
"Name" = "test"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_vpn_gateway.vgw-1234abcd" => {
"type" => "aws_vpn_gateway",
"primary" => {
"id" => "vgw-1234abcd",
"attributes" => {
"id" => "vgw-1234abcd",
"vpc_id" => "vpc-1234abcd",
"availability_zone" => "us-east-1c",
"tags.#" => "0",
}
}
},
"aws_vpn_gateway.test" => {
"type" => "aws_vpn_gateway",
"primary" => {
"id" => "vgw-5678efgh",
"attributes" => {
"id" => "vgw-5678efgh",
"vpc_id" => "vpc-5678efgh",
"availability_zone" => "us-east-1d",
"tags.#" => "1",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/elb_spec.rb | spec/lib/terraforming/resource/elb_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe ELB do
let(:client) do
Aws::ElasticLoadBalancing::Client.new(stub_responses: true)
end
let(:load_balancer_descriptions) do
[
{
subnets: [
"subnet-1234abcd",
"subnet-5678efgh"
],
canonical_hosted_zone_name_id: "12345678ABCDEF",
canonical_hosted_zone_name: "hoge-12345678.ap-northeast-1.elb.amazonaws.com",
listener_descriptions: [
{
listener: {
instance_port: 80,
ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/foobar",
load_balancer_port: 443,
protocol: "HTTPS",
instance_protocol: "HTTP"
},
policy_names: [
"AWSConsole-SSLNegotiationPolicy-foobar-1234567890123"
]
}
],
health_check: {
healthy_threshold: 10,
interval: 30,
target: "HTTP:8080/status",
timeout: 5,
unhealthy_threshold: 2
},
vpc_id: "vpc-1234abcd",
backend_server_descriptions: [],
instances: [
{
instance_id: "i-1234abcd"
}
],
dns_name: "hoge-12345678.ap-northeast-1.elb.amazonaws.com",
security_groups: [
"sg-1234abcd",
"sg-5678efgh"
],
policies: {
lb_cookie_stickiness_policies: [],
app_cookie_stickiness_policies: [],
other_policies: [
"ELBSecurityPolicy-2014-01",
"AWSConsole-SSLNegotiationPolicy-foobar-1234567890123"
]
},
load_balancer_name: "hoge",
created_time: Time.parse("2014-01-01T12:12:12.000Z"),
availability_zones: [
"ap-northeast-1b",
"ap-northeast-1c"
],
scheme: "internet-facing",
source_security_group: {
owner_alias: "123456789012",
group_name: "default"
}
},
{
subnets: [
"subnet-9012ijkl",
"subnet-3456mnop"
],
canonical_hosted_zone_name_id: "90123456GHIJKLM",
canonical_hosted_zone_name: "fuga-90123456.ap-northeast-1.elb.amazonaws.com",
listener_descriptions: [
{
listener: {
instance_port: 80,
ssl_certificate_id: "arn:aws:iam::345678901234:server-certificate/foobar",
load_balancer_port: 443,
protocol: "HTTPS",
instance_protocol: "HTTP"
},
policy_names: [
"AWSConsole-SSLNegotiationPolicy-foobar-1234567890123"
]
}
],
health_check: {
healthy_threshold: 10,
interval: 30,
target: "HTTP:8080/status",
timeout: 5,
unhealthy_threshold: 2
},
vpc_id: "",
backend_server_descriptions: [],
instances: [
{
instance_id: "i-5678efgh"
}
],
dns_name: "fuga-90123456.ap-northeast-1.elb.amazonaws.com",
security_groups: [
"sg-9012ijkl",
"sg-3456mnop"
],
policies: {
lb_cookie_stickiness_policies: [],
app_cookie_stickiness_policies: [],
other_policies: [
"ELBSecurityPolicy-2014-01",
"AWSConsole-SSLNegotiationPolicy-foobar-1234567890123"
]
},
load_balancer_name: "fuga",
created_time: Time.parse("2015-01-01T12:12:12.000Z"),
availability_zones: [
"ap-northeast-1b",
"ap-northeast-1c"
],
scheme: "internal",
source_security_group: {
owner_alias: "345678901234",
group_name: "elb"
}
}
]
end
let(:hoge_attributes) do
{
cross_zone_load_balancing: { enabled: true },
access_log: { enabled: false },
connection_draining: { enabled: true, timeout: 300 },
connection_settings: { idle_timeout: 60 },
additional_attributes: []
}
end
let(:fuga_attributes) do
{
cross_zone_load_balancing: { enabled: true },
access_log: {
enabled: true,
s3_bucket_name: "hoge-elb-logs",
emit_interval: 60,
s3_bucket_prefix: "fuga",
},
connection_draining: { enabled: true, timeout: 900 },
connection_settings: { idle_timeout: 90 },
additional_attributes: []
}
end
let(:tag_attributes) do
[{
tags: [
{ key: 'name', value: 'elb-1' }
]
}]
end
before do
client.stub_responses(:describe_load_balancers, load_balancer_descriptions: load_balancer_descriptions)
client.stub_responses(:describe_load_balancer_attributes, [
{ load_balancer_attributes: hoge_attributes },
{ load_balancer_attributes: fuga_attributes }
])
client.stub_responses(:describe_tags, tag_descriptions: tag_attributes)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_elb" "hoge" {
name = "hoge"
subnets = ["subnet-1234abcd", "subnet-5678efgh"]
security_groups = ["sg-1234abcd", "sg-5678efgh"]
instances = ["i-1234abcd"]
cross_zone_load_balancing = true
idle_timeout = 60
connection_draining = true
connection_draining_timeout = 300
internal = false
listener {
instance_port = 80
instance_protocol = "http"
lb_port = 443
lb_protocol = "https"
ssl_certificate_id = "arn:aws:iam::123456789012:server-certificate/foobar"
}
health_check {
healthy_threshold = 10
unhealthy_threshold = 2
interval = 30
target = "HTTP:8080/status"
timeout = 5
}
tags {
"name" = "elb-1"
}
}
resource "aws_elb" "fuga" {
name = "fuga"
availability_zones = ["ap-northeast-1b", "ap-northeast-1c"]
security_groups = ["sg-9012ijkl", "sg-3456mnop"]
instances = ["i-5678efgh"]
cross_zone_load_balancing = true
idle_timeout = 90
connection_draining = true
connection_draining_timeout = 900
internal = true
access_logs {
bucket = "hoge-elb-logs"
bucket_prefix = "fuga"
interval = 60
}
listener {
instance_port = 80
instance_protocol = "http"
lb_port = 443
lb_protocol = "https"
ssl_certificate_id = "arn:aws:iam::345678901234:server-certificate/foobar"
}
health_check {
healthy_threshold = 10
unhealthy_threshold = 2
interval = 30
target = "HTTP:8080/status"
timeout = 5
}
tags {
"name" = "elb-1"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_elb.hoge" => {
"type" => "aws_elb",
"primary" => {
"id" => "hoge",
"attributes" => {
"access_logs.#" => "0",
"availability_zones.#" => "2",
"connection_draining" => "true",
"connection_draining_timeout" => "300",
"cross_zone_load_balancing" => "true",
"dns_name" => "hoge-12345678.ap-northeast-1.elb.amazonaws.com",
"id" => "hoge",
"idle_timeout" => "60",
"instances.#" => "1",
"internal" => "false",
"name" => "hoge",
"source_security_group" => "default",
"health_check.#" => "1",
"health_check.362345074.healthy_threshold" => "10",
"health_check.362345074.interval" => "30",
"health_check.362345074.target" => "HTTP:8080/status",
"health_check.362345074.timeout" => "5",
"health_check.362345074.unhealthy_threshold" => "2",
"listener.#" => "1",
"listener.3051874140.instance_port" => "80",
"listener.3051874140.instance_protocol" => "http",
"listener.3051874140.lb_port" => "443",
"listener.3051874140.lb_protocol" => "https",
"listener.3051874140.ssl_certificate_id" => "arn:aws:iam::123456789012:server-certificate/foobar", "security_groups.#" => "2",
"security_groups.550527283" => "sg-1234abcd",
"security_groups.3942994537" => "sg-5678efgh",
"subnets.#" => "2",
"subnets.3229571749" => "subnet-1234abcd",
"subnets.195717631" => "subnet-5678efgh",
"instances.3520380136" => "i-1234abcd",
"tags.#" => "1",
"tags.name" => "elb-1"
}
}
},
"aws_elb.fuga" => {
"type" => "aws_elb",
"primary" => {
"id" => "fuga",
"attributes" => {
"access_logs.#" => "1",
"access_logs.0.bucket" => "hoge-elb-logs",
"access_logs.0.bucket_prefix" => "fuga",
"access_logs.0.interval" => "60",
"availability_zones.#" => "2",
"connection_draining" => "true",
"connection_draining_timeout" => "900",
"cross_zone_load_balancing" => "true",
"dns_name" => "fuga-90123456.ap-northeast-1.elb.amazonaws.com",
"id" => "fuga",
"idle_timeout" => "90",
"instances.#" => "1",
"internal" => "true",
"name" => "fuga",
"source_security_group" => "elb",
"health_check.#" => "1",
"health_check.362345074.healthy_threshold" => "10",
"health_check.362345074.interval" => "30",
"health_check.362345074.target" => "HTTP:8080/status",
"health_check.362345074.timeout" => "5",
"health_check.362345074.unhealthy_threshold" => "2",
"listener.#" => "1",
"listener.1674021574.instance_port" => "80",
"listener.1674021574.instance_protocol" => "http",
"listener.1674021574.lb_port" => "443",
"listener.1674021574.lb_protocol" => "https",
"listener.1674021574.ssl_certificate_id" => "arn:aws:iam::345678901234:server-certificate/foobar",
"security_groups.#" => "2",
"security_groups.2877768809" => "sg-9012ijkl",
"security_groups.1478442660" => "sg-3456mnop",
"subnets.#" => "2",
"subnets.1260945407" => "subnet-9012ijkl",
"subnets.3098543410" => "subnet-3456mnop",
"instances.436309938" => "i-5678efgh",
"tags.#" => "1",
"tags.name" => "elb-1"
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/sns_topic_subscription_spec.rb | spec/lib/terraforming/resource/sns_topic_subscription_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe SNSTopicSubscription do
let(:client) do
Aws::SNS::Client.new(stub_responses: true)
end
let(:subscriptions) do
[
Aws::SNS::Types::Subscription.new(subscription_arn: "arn:aws:sns:us-west-2:012345678901:a-cool-topic:000ff1ce-dead-beef-f00d-ea7food5a1d1"),
Aws::SNS::Types::Subscription.new(subscription_arn: "PendingConfirmation")
]
end
let(:attributes_regular) do
{
"Endpoint" => "arn:aws:sqs:us-west-2:012345678901:a-cool-queue",
"Protocol" => "sqs",
"RawMessageDelivery" => "false",
"ConfirmationWasAuthenticated" => "true",
"Owner" => "012345678901",
"SubscriptionArn" => "arn:aws:sns:us-west-2:012345678901:a-cool-topic:000ff1ce-dead-beef-f00d-ea7food5a1d1",
"TopicArn" => "arn:aws:sns:us-west-2:012345678901:a-cool-topic"
}
end
let(:attributes_email) do
{
"Endpoint" => "arn:aws:sqs:us-west-2:012345678901:a-cool-queue",
"Protocol" => "email-json",
"RawMessageDelivery" => "false",
"ConfirmationWasAuthenticated" => "true",
"Owner" => "012345678901",
"SubscriptionArn" => "arn:aws:sns:us-west-2:012345678901:a-cool-topic:000ff1ce-dead-beef-f00d-ea7food5a1d1",
"TopicArn" => "arn:aws:sns:us-west-2:012345678901:a-cool-topic"
}
end
before do
client.stub_responses(:list_subscriptions, subscriptions: subscriptions)
client.stub_responses(:get_subscription_attributes, attributes: attributes_regular)
end
describe ".tf" do
it "should generate tf for non-email subscriptions" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_sns_topic_subscription" "000ff1ce-dead-beef-f00d-ea7food5a1d1" {
topic_arn = "arn:aws:sns:us-west-2:012345678901:a-cool-topic"
protocol = "sqs"
endpoint = "arn:aws:sqs:us-west-2:012345678901:a-cool-queue"
raw_message_delivery = "false"
}
EOS
end
it "should generate commented tf for email subscriptions" do
client.stub_responses(:get_subscription_attributes, attributes: attributes_email)
expect(described_class.tf(client: client)).to eq <<-EOS
/*
resource "aws_sns_topic_subscription" "000ff1ce-dead-beef-f00d-ea7food5a1d1" {
topic_arn = "arn:aws:sns:us-west-2:012345678901:a-cool-topic"
protocol = "email-json"
endpoint = "arn:aws:sqs:us-west-2:012345678901:a-cool-queue"
raw_message_delivery = "false"
}
*/
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_sns_topic_subscription.000ff1ce-dead-beef-f00d-ea7food5a1d1" => {
"type" => "aws_sns_topic_subscription",
"primary" => {
"id" => "arn:aws:sns:us-west-2:012345678901:a-cool-topic:000ff1ce-dead-beef-f00d-ea7food5a1d1",
"attributes" => {
"id" => "arn:aws:sns:us-west-2:012345678901:a-cool-topic:000ff1ce-dead-beef-f00d-ea7food5a1d1",
"topic_arn" => "arn:aws:sns:us-west-2:012345678901:a-cool-topic",
"protocol" => "sqs",
"endpoint" => "arn:aws:sqs:us-west-2:012345678901:a-cool-queue",
"raw_message_delivery" => "false",
"confirmation_timeout_in_minutes" => "1",
"endpoint_auto_confirms" => "false"
},
},
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_group_membership_spec.rb | spec/lib/terraforming/resource/iam_group_membership_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMGroupMembership do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:groups) do
[
{
path: "/",
group_name: "hoge",
group_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:group/hoge",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
},
{
path: "/system/",
group_name: "fuga",
group_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:group/fuga",
create_date: Time.parse("2015-05-01 12:34:56 UTC"),
},
]
end
let(:hoge_group) do
{
path: "/",
group_name: "hoge",
group_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:group/hoge",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
}
end
let(:hoge_users) do
[
{
path: "/",
user_name: "foo",
user_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:user/foo",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
password_last_used: Time.parse("2015-04-01 15:00:00 UTC"),
},
]
end
let(:fuga_group) do
{
path: "/system/",
group_name: "fuga",
group_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:group/fuga",
create_date: Time.parse("2015-05-01 12:34:56 UTC"),
}
end
let(:fuga_users) do
[
{
path: "/system/",
user_name: "bar",
user_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:user/bar",
create_date: Time.parse("2015-05-01 12:34:56 UTC"),
password_last_used: Time.parse("2015-05-01 15:00:00 UTC"),
},
]
end
before do
client.stub_responses(:list_groups, groups: groups)
client.stub_responses(:get_group, [{ group: hoge_group, users: hoge_users }, { group: fuga_group, users: fuga_users }])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_group_membership" "hoge" {
name = "hoge-group-membership"
users = ["foo"]
group = "hoge"
}
resource "aws_iam_group_membership" "fuga" {
name = "fuga-group-membership"
users = ["bar"]
group = "fuga"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_group_membership.hoge" => {
"type" => "aws_iam_group_membership",
"primary" => {
"id" => "hoge-group-membership",
"attributes" => {
"group" => "hoge",
"id" => "hoge-group-membership",
"name" => "hoge-group-membership",
"users.#" => "1",
}
}
},
"aws_iam_group_membership.fuga" => {
"type" => "aws_iam_group_membership",
"primary" => {
"id" => "fuga-group-membership",
"attributes" => {
"group" => "fuga",
"id" => "fuga-group-membership",
"name" => "fuga-group-membership",
"users.#" => "1",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_user_spec.rb | spec/lib/terraforming/resource/iam_user_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMUser do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:users) do
[
{
path: "/",
user_name: "hoge",
user_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:user/hoge",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
password_last_used: Time.parse("2015-04-01 15:00:00 UTC"),
},
{
path: "/system/",
user_name: "fuga.piyo",
user_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:user/fuga",
create_date: Time.parse("2015-05-01 12:34:56 UTC"),
password_last_used: Time.parse("2015-05-01 15:00:00 UTC"),
},
]
end
before do
client.stub_responses(:list_users, users: users)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_user" "hoge" {
name = "hoge"
path = "/"
}
resource "aws_iam_user" "fuga-piyo" {
name = "fuga.piyo"
path = "/system/"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_user.hoge" => {
"type" => "aws_iam_user",
"primary" => {
"id" => "hoge",
"attributes" => {
"arn" => "arn:aws:iam::123456789012:user/hoge",
"id" => "hoge",
"name" => "hoge",
"path" => "/",
"unique_id" => "ABCDEFGHIJKLMN1234567",
"force_destroy" => "false",
}
}
},
"aws_iam_user.fuga-piyo" => {
"type" => "aws_iam_user",
"primary" => {
"id" => "fuga.piyo",
"attributes" => {
"arn" => "arn:aws:iam::345678901234:user/fuga",
"id" => "fuga.piyo",
"name" => "fuga.piyo",
"path" => "/system/",
"unique_id" => "OPQRSTUVWXYZA8901234",
"force_destroy" => "false",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/eip_spec.rb | spec/lib/terraforming/resource/eip_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe EIP do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:eips) do
[
{
domain: "vpc",
instance_id: "i-12345678",
network_interface_id: "eni-12345678",
association_id: "eipassoc-98765432",
network_interface_owner_id: "123456789012",
public_ip: "12.34.56.78",
allocation_id: "eipalloc-87654321",
private_ip_address: "1.1.1.1",
},
{
domain: "vpc",
network_interface_id: "eni-23456789",
association_id: "eipassoc-87654321",
network_interface_owner_id: "234567890123",
public_ip: "2.2.2.2",
allocation_id: "eipalloc-76543210",
private_ip_address: "9.9.9.9",
},
{
public_ip: "3.3.3.3",
domain: "vpc",
allocation_id: "eipalloc-33333333",
},
{
instance_id: "i-91112221",
public_ip: "2.2.2.4",
allocation_id: nil,
association_id: nil,
domain: "standard",
network_interface_id: nil,
network_interface_owner_id: nil,
private_ip_address: nil
}
]
end
before do
client.stub_responses(:describe_addresses, addresses: eips)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_eip" "eipalloc-87654321" {
instance = "i-12345678"
vpc = true
}
resource "aws_eip" "eipalloc-76543210" {
network_interface = "eni-23456789"
vpc = true
}
resource "aws_eip" "eipalloc-33333333" {
vpc = true
}
resource "aws_eip" "2-2-2-4" {
instance = "i-91112221"
vpc = false
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_eip.eipalloc-87654321" => {
"type" => "aws_eip",
"primary" => {
"id" => "eipalloc-87654321",
"attributes" => {
"association_id" => "eipassoc-98765432",
"domain" => "vpc",
"id" => "eipalloc-87654321",
"instance" => "i-12345678",
"network_interface" => "eni-12345678",
"private_ip" => "1.1.1.1",
"public_ip" => "12.34.56.78",
"vpc" => "true"
}
}
},
"aws_eip.eipalloc-76543210" => {
"type" => "aws_eip",
"primary" => {
"id" => "eipalloc-76543210",
"attributes" => {
"association_id" => "eipassoc-87654321",
"domain" => "vpc",
"id" => "eipalloc-76543210",
"network_interface" => "eni-23456789",
"private_ip" => "9.9.9.9",
"public_ip" => "2.2.2.2",
"vpc" => "true"
}
}
},
"aws_eip.eipalloc-33333333" => {
"type" => "aws_eip",
"primary" => {
"id" => "eipalloc-33333333",
"attributes" => {
"domain" => "vpc",
"id" => "eipalloc-33333333",
"public_ip" => "3.3.3.3",
"vpc" => "true"
}
}
},
"aws_eip.2-2-2-4" => {
"type" => "aws_eip",
"primary" => {
"id" => "2.2.2.4",
"attributes" => {
"domain" => "standard",
"id" => "2.2.2.4",
"instance" => "i-91112221",
"public_ip" => "2.2.2.4",
"vpc" => "false"
},
},
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/db_security_group_spec.rb | spec/lib/terraforming/resource/db_security_group_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe DBSecurityGroup do
let(:client) do
Aws::RDS::Client.new(stub_responses: true)
end
let(:db_security_groups) do
[
{
ip_ranges: [],
owner_id: "123456789012",
db_security_group_description: "default",
ec2_security_groups: [
{
status: "authorized",
ec2_security_group_name: "default",
ec2_security_group_owner_id: "123456789012",
ec2_security_group_id: "sg-1234abcd"
}
],
db_security_group_name: "default"
},
{
ip_ranges: [
{
status: "authorized",
cidrip: "0.0.0.0/0"
}
],
owner_id: "3456789012",
db_security_group_description: "foobar group",
ec2_security_groups: [
{
status: "authorized",
ec2_security_group_name: "foobar",
ec2_security_group_owner_id: "3456789012",
ec2_security_group_id: "sg-5678efgh"
}
],
db_security_group_name: "sgfoobar"
},
{
ip_ranges: [],
owner_id: "123456789012",
db_security_group_description: "empty",
ec2_security_groups: [],
db_security_group_name: "empty"
},
]
end
before do
client.stub_responses(:describe_db_security_groups, db_security_groups: db_security_groups)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_db_security_group" "default" {
name = "default"
description = "default"
ingress {
cidr = ""
security_group_name = "default"
security_group_id = "sg-1234abcd"
security_group_owner_id = "123456789012"
}
}
resource "aws_db_security_group" "sgfoobar" {
name = "sgfoobar"
description = "foobar group"
ingress {
cidr = "0.0.0.0/0"
security_group_name = ""
security_group_id = ""
security_group_owner_id = ""
}
ingress {
cidr = ""
security_group_name = "foobar"
security_group_id = "sg-5678efgh"
security_group_owner_id = "3456789012"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_db_security_group.default" => {
"type" => "aws_db_security_group",
"primary" => {
"id" => "default",
"attributes" => {
"db_subnet_group_name" => "default",
"id" => "default",
"ingress.#" => "1",
"name" => "default",
}
}
},
"aws_db_security_group.sgfoobar" => {
"type" => "aws_db_security_group",
"primary" => {
"id" => "sgfoobar",
"attributes" => {
"db_subnet_group_name" => "sgfoobar",
"id" => "sgfoobar",
"ingress.#" => "2",
"name" => "sgfoobar",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_group_spec.rb | spec/lib/terraforming/resource/iam_group_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMGroup do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:groups) do
[
{
path: "/",
group_name: "hoge",
group_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:group/hoge",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
},
{
path: "/system/",
group_name: "fuga",
group_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:group/fuga",
create_date: Time.parse("2015-05-01 12:34:56 UTC"),
},
]
end
before do
client.stub_responses(:list_groups, [{
groups: [groups[0]],
is_truncated: true,
marker: 'marker'
}, {
groups: [groups[1]],
is_truncated: false,
marker: nil
}])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_group" "hoge" {
name = "hoge"
path = "/"
}
resource "aws_iam_group" "fuga" {
name = "fuga"
path = "/system/"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_group.hoge" => {
"type" => "aws_iam_group",
"primary" => {
"id" => "hoge",
"attributes" => {
"arn" => "arn:aws:iam::123456789012:group/hoge",
"id" => "hoge",
"name" => "hoge",
"path" => "/",
"unique_id" => "ABCDEFGHIJKLMN1234567",
}
}
},
"aws_iam_group.fuga" => {
"type" => "aws_iam_group",
"primary" => {
"id" => "fuga",
"attributes" => {
"arn" => "arn:aws:iam::345678901234:group/fuga",
"id" => "fuga",
"name" => "fuga",
"path" => "/system/",
"unique_id" => "OPQRSTUVWXYZA8901234",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/db_parameter_group_spec.rb | spec/lib/terraforming/resource/db_parameter_group_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe DBParameterGroup do
let(:client) do
Aws::RDS::Client.new(stub_responses: true)
end
let(:db_parameter_groups) do
[
{
db_parameter_group_name: "default.mysql5.6",
db_parameter_group_family: "mysql5.6",
description: "Default parameter group for mysql5.6"
},
{
db_parameter_group_name: "default.postgres9.4",
db_parameter_group_family: "postgres9.4",
description: "Default parameter group for postgres9.4"
}
]
end
let(:mysql_parameters) do
[
{
parameter_name: "application_name",
parameter_value: nil,
description: "Name of the application",
source: "engine-default",
apply_type: "dynamic",
data_type: "string",
allowed_values: nil,
is_modifiable: true,
minimum_engine_version: nil,
apply_method: nil
},
{
parameter_name: "archive_command",
parameter_value: "/path/to/archive %p",
description: "Command to archive database",
source: "system",
apply_type: "dynamic",
data_type: "string",
allowed_values: nil,
is_modifiable: false,
minimum_engine_version: nil,
apply_method: nil
}
]
end
let(:pg_parameters) do
[
{
parameter_name: "archive_timeout",
parameter_value: "300",
description: "Timeout seconds for archiving",
source: "system",
apply_type: "dynamic",
data_type: "integer",
allowed_values: "0-214748364",
is_modifiable: false,
minimum_engine_version: nil,
apply_method: nil
},
{
parameter_name: "array_nulls",
parameter_value: nil,
description: "Enable input of NULL elements",
source: "engine-default",
apply_type: "dynamic",
data_type: "boolean",
allowed_values: "0,1",
is_modifiable: false,
minimum_engine_version: nil,
apply_method: nil
}
]
end
before do
client.stub_responses(:describe_db_parameter_groups, db_parameter_groups: db_parameter_groups)
client.stub_responses(:describe_db_parameters, [{ parameters: mysql_parameters }, { parameters: pg_parameters }])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_db_parameter_group" "default-mysql5-6" {
name = "default.mysql5.6"
family = "mysql5.6"
description = "Default parameter group for mysql5.6"
parameter {
name = "application_name"
value = ""
apply_method = "immediate"
}
parameter {
name = "archive_command"
value = "/path/to/archive %p"
apply_method = "immediate"
}
}
resource "aws_db_parameter_group" "default-postgres9-4" {
name = "default.postgres9.4"
family = "postgres9.4"
description = "Default parameter group for postgres9.4"
parameter {
name = "archive_timeout"
value = "300"
apply_method = "immediate"
}
parameter {
name = "array_nulls"
value = ""
apply_method = "immediate"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_db_parameter_group.default-mysql5-6" => {
"type" => "aws_db_parameter_group",
"primary" => {
"id" => "default.mysql5.6",
"attributes" => {
"description" => "Default parameter group for mysql5.6",
"family" => "mysql5.6",
"id" => "default.mysql5.6",
"name" => "default.mysql5.6",
"parameter.#" => "2",
}
}
},
"aws_db_parameter_group.default-postgres9-4" => {
"type" => "aws_db_parameter_group",
"primary" => {
"id" => "default.postgres9.4",
"attributes" => {
"description" => "Default parameter group for postgres9.4",
"family" => "postgres9.4",
"id" => "default.postgres9.4",
"name" => "default.postgres9.4",
"parameter.#" => "2",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/route53_zone_spec.rb | spec/lib/terraforming/resource/route53_zone_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe Route53Zone do
let(:client) do
Aws::Route53::Client.new(stub_responses: true)
end
let(:hoge_hosted_zone) do
{
id: "/hostedzone/ABCDEFGHIJKLMN",
name: "hoge.net.",
caller_reference: "ABCDEFGH-1234-IJKL-5678-MNOPQRSTUVWX",
config: {
comment: "",
private_zone: false
},
resource_record_set_count: 4,
}
end
let(:fuga_hosted_zone) do
{
id: "/hostedzone/OPQRSTUVWXYZAB",
name: "fuga.net.",
caller_reference: "ABCDEFGH-5678-IJKL-9012-MNOPQRSTUVWX",
config: {
comment: "fuga.net zone comment",
private_zone: true
},
resource_record_set_count: 4
}
end
let(:hosted_zones) do
[hoge_hosted_zone, fuga_hosted_zone]
end
let(:hoge_resource_tag_set) do
{
resource_type: "hostedzone",
resource_id: "ABCDEFGHIJKLMN",
tags: [
{ key: "Environment", value: "dev" }
]
}
end
let(:fuga_resource_tag_set) do
{
resource_type: "hostedzone",
resource_id: "OPQRSTUVWXYZAB",
tags: [
{ key: "Environment", value: "dev" }
]
}
end
let(:hoge_delegation_set) do
{
name_servers: %w(ns-1234.awsdns-12.co.uk ns-567.awsdns-34.net ns-8.awsdns-56.com ns-9012.awsdns-78.org)
}
end
let(:fuga_vp_cs) do
[
{ vpc_region: "ap-northeast-1", vpc_id: "vpc-1234abcd" }
]
end
before do
client.stub_responses(:list_hosted_zones,
hosted_zones: hosted_zones, marker: "", is_truncated: false, max_items: 1)
client.stub_responses(:list_tags_for_resource, [
{ resource_tag_set: hoge_resource_tag_set },
{ resource_tag_set: fuga_resource_tag_set },
])
client.stub_responses(:get_hosted_zone, [
{ hosted_zone: hoge_hosted_zone, delegation_set: hoge_delegation_set, vp_cs: [] },
{ hosted_zone: fuga_hosted_zone, delegation_set: nil, vp_cs: fuga_vp_cs },
])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_route53_zone" "hoge-net-public" {
name = "hoge.net"
comment = ""
tags {
"Environment" = "dev"
}
}
resource "aws_route53_zone" "fuga-net-private" {
name = "fuga.net"
comment = "fuga.net zone comment"
vpc_id = "vpc-1234abcd"
vpc_region = "ap-northeast-1"
tags {
"Environment" = "dev"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_route53_zone.hoge-net-public" => {
"type" => "aws_route53_zone",
"primary" => {
"id" => "ABCDEFGHIJKLMN",
"attributes" => {
"comment" => "",
"id" => "ABCDEFGHIJKLMN",
"name" => "hoge.net",
"name_servers.#" => "4",
"tags.#" => "1",
"vpc_id" => "",
"vpc_region" => "",
"zone_id" => "ABCDEFGHIJKLMN",
},
}
},
"aws_route53_zone.fuga-net-private" => {
"type" => "aws_route53_zone",
"primary" => {
"id" => "OPQRSTUVWXYZAB",
"attributes" => {
"comment" => "fuga.net zone comment",
"id" => "OPQRSTUVWXYZAB",
"name" => "fuga.net",
"name_servers.#" => "0",
"tags.#" => "1",
"vpc_id" => "vpc-1234abcd",
"vpc_region" => "ap-northeast-1",
"zone_id" => "OPQRSTUVWXYZAB",
},
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_policy_spec.rb | spec/lib/terraforming/resource/iam_policy_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMPolicy do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:policies) do
[
{
policy_name: "hoge_policy",
policy_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:policy/hoge_policy",
path: "/",
default_version_id: "v1",
attachment_count: 0,
is_attachable: true,
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
update_date: Time.parse("2015-05-14 11:25:36 UTC"),
description: "hoge",
},
{
policy_name: "fuga_policy",
policy_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:policy/fuga-policy",
path: "/system/",
default_version_id: "v1",
attachment_count: 1,
is_attachable: true,
create_date: Time.parse("2015-04-01 12:00:00 UTC"),
update_date: Time.parse("2015-04-26 19:54:56 UTC"),
description: "fuga",
}
]
end
let(:hoge_policy_version) do
{
document: "%7B%0A%20%20%22Version%22%3A%20%222012-10-17%22%2C%0A%20%20%22Statement%22%3A%20%5B%0A%20%20%20%20%7B%0A%20%20%20%20%20%20%22Action%22%3A%20%5B%0A%20%20%20%20%20%20%20%20%22ec2%3ADescribe%2A%22%0A%20%20%20%20%20%20%5D%2C%0A%20%20%20%20%20%20%22Effect%22%3A%20%22Allow%22%2C%0A%20%20%20%20%20%20%22Resource%22%3A%20%22%2A%22%0A%20%20%20%20%7D%0A%20%20%5D%0A%7D%0A",
version_id: "v1",
is_default_version: true,
create_date: Time.parse("2015-05-14 11:25:36 UTC"),
}
end
let(:fuga_policy_version) do
{
document: "%7B%0A%20%20%22Version%22%3A%20%222012-10-17%22%2C%0A%20%20%22Statement%22%3A%20%5B%0A%20%20%20%20%7B%0A%20%20%20%20%20%20%22Action%22%3A%20%5B%0A%20%20%20%20%20%20%20%20%22ec2%3ADescribe%2A%22%0A%20%20%20%20%20%20%5D%2C%0A%20%20%20%20%20%20%22Effect%22%3A%20%22Allow%22%2C%0A%20%20%20%20%20%20%22Resource%22%3A%20%22%2A%22%0A%20%20%20%20%7D%0A%20%20%5D%0A%7D%0A",
version_id: "v1",
is_default_version: true,
create_date: Time.parse("2015-04-26 19:54:56 UTC"),
}
end
before do
client.stub_responses(:get_policy, [{ policy: policies[0] }, { policy: policies[1] }])
client.stub_responses(:list_policies, policies: policies)
client.stub_responses(:get_policy_version, [{ policy_version: hoge_policy_version }, { policy_version: fuga_policy_version }])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_policy" "hoge_policy" {
name = "hoge_policy"
path = "/"
description = "hoge"
policy = <<POLICY
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"ec2:Describe*"
],
"Effect": "Allow",
"Resource": "*"
}
]
}
POLICY
}
resource "aws_iam_policy" "fuga_policy" {
name = "fuga_policy"
path = "/system/"
description = "fuga"
policy = <<POLICY
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"ec2:Describe*"
],
"Effect": "Allow",
"Resource": "*"
}
]
}
POLICY
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_policy.hoge_policy" => {
"type" => "aws_iam_policy",
"primary" => {
"id" => "arn:aws:iam::123456789012:policy/hoge_policy",
"attributes" => {
"id" => "arn:aws:iam::123456789012:policy/hoge_policy",
"name" => "hoge_policy",
"path" => "/",
"description" => "hoge",
"policy" => "{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Action\": [\n \"ec2:Describe*\"\n ],\n \"Effect\": \"Allow\",\n \"Resource\": \"*\"\n }\n ]\n}\n",
}
}
},
"aws_iam_policy.fuga_policy" => {
"type" => "aws_iam_policy",
"primary" => {
"id" => "arn:aws:iam::345678901234:policy/fuga-policy",
"attributes" => {
"id" => "arn:aws:iam::345678901234:policy/fuga-policy",
"name" => "fuga_policy",
"path" => "/system/",
"description" => "fuga",
"policy" => "{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Action\": [\n \"ec2:Describe*\"\n ],\n \"Effect\": \"Allow\",\n \"Resource\": \"*\"\n }\n ]\n}\n",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/auto_scaling_group_spec.rb | spec/lib/terraforming/resource/auto_scaling_group_spec.rb | require "spec_helper"
require "spec_helper"
module Terraforming
module Resource
describe AutoScalingGroup do
let(:client) do
Aws::AutoScaling::Client.new(stub_responses: true)
end
let(:auto_scaling_groups) do
[
{
auto_scaling_group_name: "hoge",
auto_scaling_group_arn:
"arn:aws:autoscaling:ap-northeast-1:123456789012:autoScalingGroup:1234abcd-1dd4-4089-b8c9-12345abcdefg:autoScalingGroupName/hoge",
launch_configuration_name: "hoge-lc",
min_size: 1,
max_size: 4,
desired_capacity: 2,
default_cooldown: 300,
availability_zones: ["ap-northeast-1b"],
load_balancer_names: [],
health_check_type: "EC2",
health_check_grace_period: 300,
instances: [
{
instance_id: "i-1234abcd",
availability_zone: "ap-northeast-1b",
lifecycle_state: "InService",
health_status: "Healthy",
launch_configuration_name: "hoge-lc",
protected_from_scale_in: true,
},
{
instance_id: "i-5678efgh",
availability_zone: "ap-northeast-1b",
lifecycle_state: "InService",
health_status: "Healthy",
launch_configuration_name: "hoge-lc",
protected_from_scale_in: true,
},
],
created_time: Time.parse("2015-10-21 04:08:39 UTC"),
suspended_processes: [],
vpc_zone_identifier: "",
enabled_metrics: [],
tags: [
{
resource_id: "hoge",
resource_type: "auto-scaling-group",
key: "foo1",
value: "bar",
propagate_at_launch: true,
}
],
termination_policies: ["Default"],
},
{
auto_scaling_group_name: "fuga",
auto_scaling_group_arn:
"arn:aws:autoscaling:ap-northeast-1:123456789012:autoScalingGroup:1234abcd-1dd4-4089-b8c9-12345abcdefg:autoScalingGroupName/fuga",
launch_configuration_name: "fuga-lc",
min_size: 1,
max_size: 4,
desired_capacity: 2,
default_cooldown: 300,
availability_zones: ["ap-northeast-1b", "ap-northeast-1c"],
load_balancer_names: [],
health_check_type: "EC2",
health_check_grace_period: 300,
instances: [
{
instance_id: "i-9012ijkl",
availability_zone: "ap-northeast-1c",
lifecycle_state: "InService",
health_status: "Healthy",
launch_configuration_name: "fuga-lc",
protected_from_scale_in: true,
},
{
instance_id: "i-3456mnop",
availability_zone: "ap-northeast-1c",
lifecycle_state: "InService",
health_status: "Healthy",
launch_configuration_name: "fuga-lc",
protected_from_scale_in: true,
},
],
created_time: Time.parse("2015-10-20 04:08:39 UTC"),
suspended_processes: [],
vpc_zone_identifier: "subnet-1234abcd,subnet-5678efgh",
enabled_metrics: [],
tags: [],
termination_policies: ["Default"],
},
{
auto_scaling_group_name: "piyo",
auto_scaling_group_arn:
"arn:aws:autoscaling:ap-northeast-1:123456789012:autoScalingGroup:1234abcd-1dd4-4089-b8c9-12345abcdefg:autoScalingGroupName/piyo",
launch_configuration_name: "piyo-lc",
min_size: 1,
max_size: 2,
desired_capacity: 1,
default_cooldown: 300,
availability_zones: ["ap-northeast-1c"],
load_balancer_names: [],
health_check_type: "EC2",
health_check_grace_period: 300,
instances: [
{
instance_id: "i-7890qrst",
availability_zone: "ap-northeast-1c",
lifecycle_state: "InService",
health_status: "Healthy",
launch_configuration_name: "piyo-lc",
protected_from_scale_in: true,
},
],
created_time: Time.parse("2015-10-20 04:08:39 UTC"),
suspended_processes: [],
vpc_zone_identifier: "subnet-1234abcd,subnet-5678efgh",
enabled_metrics: [],
tags: [
{
resource_id: "piyo",
resource_type: "auto-scaling-group",
key: "foo1",
value: "bar",
propagate_at_launch: true,
},
{
resource_id: "piyo",
resource_type: "auto-scaling-group",
key: "app",
value: "sample",
propagate_at_launch: true,
}
],
termination_policies: ["Default"],
},
]
end
before do
client.stub_responses(:describe_auto_scaling_groups, auto_scaling_groups: auto_scaling_groups)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_autoscaling_group" "hoge" {
availability_zones = ["ap-northeast-1b"]
desired_capacity = 2
health_check_grace_period = 300
health_check_type = "EC2"
launch_configuration = "hoge-lc"
max_size = 4
min_size = 1
name = "hoge"
tag {
key = "foo1"
value = "bar"
propagate_at_launch = true
}
}
resource "aws_autoscaling_group" "fuga" {
desired_capacity = 2
health_check_grace_period = 300
health_check_type = "EC2"
launch_configuration = "fuga-lc"
max_size = 4
min_size = 1
name = "fuga"
vpc_zone_identifier = ["subnet-1234abcd", "subnet-5678efgh"]
}
resource "aws_autoscaling_group" "piyo" {
desired_capacity = 1
health_check_grace_period = 300
health_check_type = "EC2"
launch_configuration = "piyo-lc"
max_size = 2
min_size = 1
name = "piyo"
vpc_zone_identifier = ["subnet-1234abcd", "subnet-5678efgh"]
tag {
key = "foo1"
value = "bar"
propagate_at_launch = true
}
tag {
key = "app"
value = "sample"
propagate_at_launch = true
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_autoscaling_group.hoge" => {
"type" => "aws_autoscaling_group",
"primary" => {
"id" => "hoge",
"attributes" => {
"availability_zones.#" => "1",
"default_cooldown" => "300",
"desired_capacity" => "2",
"health_check_grace_period" => "300",
"health_check_type" => "EC2",
"id" => "hoge",
"launch_configuration" => "hoge-lc",
"load_balancers.#" => "0",
"max_size" => "4",
"min_size" => "1",
"name" => "hoge",
"tag.#" => "1",
"tag.3921462319.key" => "foo1",
"tag.3921462319.propagate_at_launch" => "true",
"tag.3921462319.value" => "bar",
"termination_policies.#" => "0",
"vpc_zone_identifier.#" => "0",
},
"meta" => {
"schema_version" => "1"
}
}
},
"aws_autoscaling_group.fuga" => {
"type" => "aws_autoscaling_group",
"primary" => {
"id" => "fuga",
"attributes" => {
"availability_zones.#" => "0",
"default_cooldown" => "300",
"desired_capacity" => "2",
"health_check_grace_period" => "300",
"health_check_type" => "EC2",
"id" => "fuga",
"launch_configuration" => "fuga-lc",
"load_balancers.#" => "0",
"max_size" => "4",
"min_size" => "1",
"name" => "fuga",
"tag.#" => "0",
"termination_policies.#" => "0",
"vpc_zone_identifier.#" => "2",
},
"meta" => {
"schema_version" => "1"
}
}
},
"aws_autoscaling_group.piyo" => {
"type" => "aws_autoscaling_group",
"primary" => {
"id" => "piyo",
"attributes" => {
"availability_zones.#" => "0",
"default_cooldown" => "300",
"desired_capacity" => "1",
"health_check_grace_period" => "300",
"health_check_type" => "EC2",
"id" => "piyo",
"launch_configuration" => "piyo-lc",
"load_balancers.#" => "0",
"max_size" => "2",
"min_size" => "1",
"name" => "piyo",
"tag.#" => "2",
"tag.3921462319.key" => "foo1",
"tag.3921462319.propagate_at_launch" => "true",
"tag.3921462319.value" => "bar",
"tag.1379189922.key" => "app",
"tag.1379189922.propagate_at_launch" => "true",
"tag.1379189922.value" => "sample",
"termination_policies.#" => "0",
"vpc_zone_identifier.#" => "2",
},
"meta" => {
"schema_version" => "1"
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/nat_gateway_spec.rb | spec/lib/terraforming/resource/nat_gateway_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe NATGateway do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:nat_gateways) do
[
{
nat_gateway_id: "nat-0c5b68b2c4d64e037",
subnet_id: "subnet-cd5645f7",
nat_gateway_addresses: [
allocation_id: "eipalloc-b02a3c79",
network_interface_id: "eni-03d4046f",
private_ip: "10.0.3.6",
public_ip: "52.5.3.67",
]
},
{
nat_gateway_id: "nat-0c5b68b2c4d64ea12",
subnet_id: "subnet-cd564c9e",
nat_gateway_addresses: [
allocation_id: "eipalloc-a03a3c79",
network_interface_id: "eni-b6e4046f",
private_ip: "10.0.4.6",
public_ip: "54.4.5.68",
]
}
]
end
before do
client.stub_responses(:describe_nat_gateways, nat_gateways: nat_gateways)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_nat_gateway" "nat-0c5b68b2c4d64e037" {
allocation_id = "eipalloc-b02a3c79"
subnet_id = "subnet-cd5645f7"
}
resource "aws_nat_gateway" "nat-0c5b68b2c4d64ea12" {
allocation_id = "eipalloc-a03a3c79"
subnet_id = "subnet-cd564c9e"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_nat_gateway.nat-0c5b68b2c4d64e037" => {
"type" => "aws_nat_gateway",
"primary" => {
"id" => "nat-0c5b68b2c4d64e037",
"attributes" => {
"id" => "nat-0c5b68b2c4d64e037",
"allocation_id" => "eipalloc-b02a3c79",
"subnet_id" => "subnet-cd5645f7",
"network_inferface_id" => "eni-03d4046f",
"private_ip" => "10.0.3.6",
"public_ip" => "52.5.3.67",
}
}
},
"aws_nat_gateway.nat-0c5b68b2c4d64ea12" => {
"type" => "aws_nat_gateway",
"primary" => {
"id" => "nat-0c5b68b2c4d64ea12",
"attributes" => {
"id" => "nat-0c5b68b2c4d64ea12",
"allocation_id" => "eipalloc-a03a3c79",
"subnet_id" => "subnet-cd564c9e",
"network_inferface_id" => "eni-b6e4046f",
"private_ip" => "10.0.4.6",
"public_ip" => "54.4.5.68",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_policy_attachment_spec.rb | spec/lib/terraforming/resource/iam_policy_attachment_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMPolicyAttachment do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:policies) do
[
{
policy_name: "hoge",
policy_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:policy/hoge",
path: "/",
default_version_id: "v1",
attachment_count: 0,
is_attachable: true,
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
update_date: Time.parse("2015-05-14 11:25:36 UTC"),
description: "hoge",
},
{
policy_name: "fuga",
policy_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:policy/fuga",
path: "/system/",
default_version_id: "v1",
attachment_count: 1,
is_attachable: true,
create_date: Time.parse("2015-04-01 12:00:00 UTC"),
update_date: Time.parse("2015-04-26 19:54:56 UTC"),
description: "fuga",
}
]
end
let(:entities_for_policy_hoge) do
{
policy_groups: [
{ group_name: "hoge", group_id: "GRUPEFGHIJKLMN1234567" },
{ group_name: "fuga", group_id: "GRIPSTUVWXYZA89012345" },
],
policy_users: [
{ user_name: "hoge", user_id: "USEREFGHIJKLMN1234567" }
],
policy_roles: [],
}
end
let(:entities_for_policy_fuga) do
{
policy_groups: [
{ group_name: "fuga", group_id: "GRIPSTUVWXYZA89012345" },
],
policy_users: [
{ user_name: "hoge", user_id: "USEREFGHIJKLMN1234567" },
{ user_name: "fuga", user_id: "USERSTUVWXYZA89012345" },
],
policy_roles: [
{ role_name: "hoge_role", role_id: "ROLEEFGHIJKLMN1234567" },
{ role_name: "fuga_role", role_id: "OPQRSTUVWXYZA89012345" },
],
}
end
before do
client.stub_responses(:list_policies, policies: policies)
client.stub_responses(:list_entities_for_policy, [entities_for_policy_hoge, entities_for_policy_fuga])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_policy_attachment" "hoge-policy-attachment" {
name = "hoge-policy-attachment"
policy_arn = "arn:aws:iam::123456789012:policy/hoge"
groups = ["hoge", "fuga"]
users = ["hoge"]
roles = []
}
resource "aws_iam_policy_attachment" "fuga-policy-attachment" {
name = "fuga-policy-attachment"
policy_arn = "arn:aws:iam::345678901234:policy/fuga"
groups = ["fuga"]
users = ["hoge", "fuga"]
roles = ["hoge_role", "fuga_role"]
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_policy_attachment.hoge-policy-attachment" => {
"type" => "aws_iam_policy_attachment",
"primary" => {
"id" => "hoge-policy-attachment",
"attributes" => {
"id" => "hoge-policy-attachment",
"name" => "hoge-policy-attachment",
"policy_arn" => "arn:aws:iam::123456789012:policy/hoge",
"groups.#" => "2",
"users.#" => "1",
"roles.#" => "0",
}
}
},
"aws_iam_policy_attachment.fuga-policy-attachment" => {
"type" => "aws_iam_policy_attachment",
"primary" => {
"id" => "fuga-policy-attachment",
"attributes" => {
"id" => "fuga-policy-attachment",
"name" => "fuga-policy-attachment",
"policy_arn" => "arn:aws:iam::345678901234:policy/fuga",
"groups.#" => "1",
"users.#" => "2",
"roles.#" => "2",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/ec2_spec.rb | spec/lib/terraforming/resource/ec2_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe EC2 do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:instances) do
[
{
instance_id: "i-1234abcd",
image_id: "ami-1234abcd",
state: { code: 16, name: "running" },
private_dns_name: "ip-10-0-0-100.ap-northeast-1.compute.internal",
public_dns_name: "ec2-54-12-0-0.ap-northeast-1.compute.amazonaws.com",
state_transition_reason: "",
key_name: "hoge-key",
ami_launch_index: 0,
product_codes: [],
instance_type: "t2.micro",
monitoring: { state: "disabled" },
launch_time: Time.parse("2015-03-12 01:23:45 UTC"),
placement: { availability_zone: "ap-northeast-1b", group_name: "", tenancy: "default" },
subnet_id: "subnet-1234abcd",
vpc_id: "vpc-1234abcd",
private_ip_address: "10.0.0.100",
public_ip_address: "54.12.0.0",
architecture: "x86_64",
root_device_type: "ebs",
root_device_name: "/dev/sda1",
block_device_mappings: [
{
device_name: "/dev/sda1",
ebs: {
volume_id: "vol-1234abcd", status: "attached",
attach_time: Time.parse("2015-03-12 01:23:45 UTC"), delete_on_termination: true
}
}
],
virtualization_type: "hvm",
client_token: "abcde1234567890123",
tags: [
{ key: "Name", value: "hoge" }
],
security_groups: [
{ group_name: "default", group_id: "sg-1234abcd" }
],
source_dest_check: true,
hypervisor: "xen",
network_interfaces: [
{
network_interface_id: "eni-1234abcd",
subnet_id: "subnet-1234abcd",
vpc_id: "vpc-1234abcd",
description: "Primary network interface",
owner_id: "012345678901",
status: "in-use",
mac_address: "01:23:45:67:89:0a",
private_ip_address: "10.0.0.100",
private_dns_name: "ip-10-0-0-100.ap-northeast-1.compute.internal",
source_dest_check: true,
groups: [
{ group_name: "default", group_id: "sg-1234abcd" }
],
attachment: {
attachment_id: "eni-attach-5678efgh",
device_index: 0,
status: "attached",
attach_time: Time.parse("2015-03-12 01:23:45 UTC"),
delete_on_termination: true
},
association: {
public_ip: "54.12.0.0",
public_dns_name: "ec2-54-12-0-0.ap-northeast-1.compute.amazonaws.com",
ip_owner_id: "amazon"
},
private_ip_addresses: [
{
private_ip_address: "10.0.0.100",
private_dns_name: "ip-10-0-6-100.ap-northeast-1.compute.internal",
primary: true,
association: {
public_ip: "54.12.0.0",
public_dns_name: "ec2-54-12-0-0.ap-northeast-1.compute.amazonaws.com",
ip_owner_id: "amazon"
}
}
]
}
],
ebs_optimized: false
},
{
instance_id: "i-5678efgh",
image_id: "ami-5678efgh",
state: { code: 16, name: "running" },
private_dns_name: "ip-10-0-0-101.ap-northeast-1.compute.internal",
public_dns_name: "ec2-54-12-0-1.ap-northeast-1.compute.amazonaws.com",
state_transition_reason: "",
key_name: "hoge-key",
ami_launch_index: 0,
product_codes: [],
instance_type: "t2.micro",
monitoring: { state: "enabled" },
launch_time: Time.parse("2015-03-12 01:23:45 UTC"),
placement: { availability_zone: "ap-northeast-1b", group_name: "pg-1", tenancy: "default" },
subnet_id: "",
vpc_id: "vpc-5678efgh",
private_ip_address: "10.0.0.101",
public_ip_address: "54.12.0.1",
architecture: "x86_64",
root_device_type: "ebs",
root_device_name: "/dev/sda1",
block_device_mappings: [
{
device_name: "/dev/sda2",
ebs: {
volume_id: "vol-5678efgh", status: "attached",
attach_time: Time.parse("2015-03-12 01:23:45 UTC"), delete_on_termination: true
}
}
],
virtualization_type: "hvm",
client_token: "abcde1234567890123",
tags: [],
security_groups: [
{ group_name: "default", group_id: "5678efgh" }
],
source_dest_check: true,
hypervisor: "xen",
network_interfaces: [
{
network_interface_id: "eni-5678efgh",
subnet_id: "subnet-5678efgh",
vpc_id: "vpc-5678efgh",
description: "Primary network interface",
owner_id: "012345678901",
status: "in-use",
mac_address: "01:23:45:67:89:0a",
private_ip_address: "10.0.0.101",
private_dns_name: "ip-10-0-0-101.ap-northeast-1.compute.internal",
source_dest_check: true,
groups: [
{ group_name: "default", group_id: "sg-5678efgh" }
],
attachment: {
attachment_id: "eni-attach-5678efgh",
device_index: 0,
status: "attached",
attach_time: Time.parse("2015-03-12 01:23:45 UTC"),
delete_on_termination: true
},
association: {
public_ip: "54.12.0.1",
public_dns_name: "ec2-54-12-0-1.ap-northeast-1.compute.amazonaws.com",
ip_owner_id: "amazon"
},
private_ip_addresses: [
{
private_ip_address: "10.0.0.101",
private_dns_name: "ip-10-0-6-101.ap-northeast-1.compute.internal",
primary: true,
association: {
public_ip: "54.12.0.1",
public_dns_name: "ec2-54-12-0-1.ap-northeast-1.compute.amazonaws.com",
ip_owner_id: "amazon"
}
}
]
}
],
ebs_optimized: false
},
{
instance_id: "i-9012ijkl",
image_id: "ami-9012ijkl",
state: { code: 16, name: "running" },
private_dns_name: "ip-10-0-0-102.ap-northeast-1.compute.internal",
public_dns_name: "",
state_transition_reason: "",
key_name: "hoge-key",
ami_launch_index: 0,
product_codes: [],
instance_type: "t2.micro",
monitoring: { state: "pending" },
launch_time: Time.parse("2015-03-12 01:23:45 UTC"),
placement: { availability_zone: "ap-northeast-1b", group_name: "", tenancy: "default" },
subnet_id: "",
vpc_id: "vpc-9012ijkl",
private_ip_address: "10.0.0.102",
public_ip_address: "",
architecture: "x86_64",
root_device_type: "ebs",
root_device_name: "/dev/sda1",
block_device_mappings: [],
virtualization_type: "hvm",
client_token: "abcde1234567890123",
tags: [],
security_groups: [
{ group_name: "default", group_id: "9012ijkl" }
],
source_dest_check: true,
hypervisor: "xen",
network_interfaces: [
{
network_interface_id: "eni-9012ijkl",
subnet_id: "subnet-9012ijkl",
vpc_id: "vpc-9012ijkl",
description: "Primary network interface",
owner_id: "012345678901",
status: "in-use",
mac_address: "01:23:45:67:89:0a",
private_ip_address: "10.0.0.102",
private_dns_name: "ip-10-0-0-102.ap-northeast-1.compute.internal",
source_dest_check: true,
groups: [
{ group_name: "default", group_id: "sg-9012ijkl" }
],
attachment: {
attachment_id: "eni-attach-9012ijkl",
device_index: 0,
status: "attached",
attach_time: Time.parse("2015-03-12 01:23:45 UTC"),
delete_on_termination: true
},
association: nil,
private_ip_addresses: [
{
private_ip_address: "10.0.0.102",
private_dns_name: "ip-10-0-6-102.ap-northeast-1.compute.internal",
primary: true,
association: nil
}
]
}
],
ebs_optimized: false
}
]
end
let(:reservations) do
[
reservation_id: "r-1234abcd",
owner_id: "012345678901",
requester_id: nil,
groups: [],
instances: instances
]
end
let(:hoge_volumes) do
[
{
volume_id: "vol-1234abcd",
size: 8,
snapshot_id: "snap-1234abcd",
availability_zone: "ap-northeast-1c",
state: "in-use",
create_time: Time.parse("2015-07-29 15:28:02 UTC"),
attachments: [
{
volume_id: "vol-1234abcd",
instance_id: "i-1234abcd",
device: "/dev/sda1",
state: "attached",
attach_time: Time.parse("2015-03-12 12:34:56 UTC"),
delete_on_termination: true
}
],
volume_type: "io1",
iops: 24,
encrypted: false
}
]
end
let(:fuga_volumes) do
[
{
volume_id: "vol-5678efgh",
size: 8,
snapshot_id: "snap-5678efgh",
availability_zone: "ap-northeast-1c",
state: "in-use",
create_time: Time.parse("2015-07-29 15:28:02 UTC"),
attachments: [
{
volume_id: "vol-5678efgh",
instance_id: "i-5678efgh",
device: "/dev/sda2",
state: "attached",
attach_time: Time.parse("2015-03-12 12:34:56 UTC"),
delete_on_termination: true
}
],
volume_type: "gp2",
iops: 24,
encrypted: false
}
]
end
before do
client.stub_responses(:describe_instances, reservations: reservations)
client.stub_responses(:describe_volumes, [{ volumes: hoge_volumes }, { volumes: fuga_volumes }])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_instance" "hoge" {
ami = "ami-1234abcd"
availability_zone = "ap-northeast-1b"
ebs_optimized = false
instance_type = "t2.micro"
monitoring = false
key_name = "hoge-key"
subnet_id = "subnet-1234abcd"
vpc_security_group_ids = ["sg-1234abcd"]
associate_public_ip_address = true
private_ip = "10.0.0.100"
source_dest_check = true
root_block_device {
volume_type = "io1"
volume_size = 8
delete_on_termination = true
iops = 24
}
tags {
"Name" = "hoge"
}
}
resource "aws_instance" "i-5678efgh" {
ami = "ami-5678efgh"
availability_zone = "ap-northeast-1b"
ebs_optimized = false
instance_type = "t2.micro"
placement_group = "pg-1"
monitoring = true
key_name = "hoge-key"
security_groups = ["default"]
associate_public_ip_address = true
private_ip = "10.0.0.101"
source_dest_check = true
ebs_block_device {
device_name = "/dev/sda2"
snapshot_id = "snap-5678efgh"
volume_type = "gp2"
volume_size = 8
delete_on_termination = true
}
tags {
}
}
resource "aws_instance" "i-9012ijkl" {
ami = "ami-9012ijkl"
availability_zone = "ap-northeast-1b"
ebs_optimized = false
instance_type = "t2.micro"
monitoring = true
key_name = "hoge-key"
security_groups = ["default"]
associate_public_ip_address = false
private_ip = "10.0.0.102"
source_dest_check = true
tags {
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_instance.hoge" => {
"type" => "aws_instance",
"primary" => {
"id" => "i-1234abcd",
"attributes" => {
"ami" => "ami-1234abcd",
"associate_public_ip_address" => "true",
"availability_zone" => "ap-northeast-1b",
"ebs_block_device.#" => "0",
"ebs_optimized" => "false",
"ephemeral_block_device.#" => "0",
"id" => "i-1234abcd",
"instance_type" => "t2.micro",
"monitoring" => "false",
"private_dns" => "ip-10-0-0-100.ap-northeast-1.compute.internal",
"private_ip" => "10.0.0.100",
"public_dns" => "ec2-54-12-0-0.ap-northeast-1.compute.amazonaws.com",
"public_ip" => "54.12.0.0",
"root_block_device.#" => "1",
"security_groups.#" => "0",
"source_dest_check" => "true",
"tenancy" => "default",
"vpc_security_group_ids.#" => "1",
"subnet_id" => "subnet-1234abcd",
},
"meta" => {
"schema_version" => "1"
}
}
},
"aws_instance.i-5678efgh" => {
"type" => "aws_instance",
"primary" => {
"id" => "i-5678efgh",
"attributes" => {
"ami" => "ami-5678efgh",
"associate_public_ip_address" => "true",
"availability_zone" => "ap-northeast-1b",
"ebs_block_device.#" => "1",
"ebs_optimized" => "false",
"ephemeral_block_device.#" => "0",
"id" => "i-5678efgh",
"instance_type" => "t2.micro",
"monitoring" => "true",
"placement_group" => "pg-1",
"private_dns" => "ip-10-0-0-101.ap-northeast-1.compute.internal",
"private_ip" => "10.0.0.101",
"public_dns" => "ec2-54-12-0-1.ap-northeast-1.compute.amazonaws.com",
"public_ip" => "54.12.0.1",
"root_block_device.#" => "0",
"security_groups.#" => "1",
"source_dest_check" => "true",
"tenancy" => "default",
"vpc_security_group_ids.#" => "0",
},
"meta" => {
"schema_version" => "1"
}
},
},
"aws_instance.i-9012ijkl" => {
"type" => "aws_instance",
"primary" => {
"id" => "i-9012ijkl",
"attributes" => {
"ami" => "ami-9012ijkl",
"associate_public_ip_address" => "false",
"availability_zone" => "ap-northeast-1b",
"ebs_block_device.#" => "0",
"ebs_optimized" => "false",
"ephemeral_block_device.#" => "0",
"id" => "i-9012ijkl",
"instance_type" => "t2.micro",
"monitoring" => "true",
"private_dns" => "ip-10-0-0-102.ap-northeast-1.compute.internal",
"private_ip" => "10.0.0.102",
"public_dns" => "",
"public_ip" => "",
"root_block_device.#" => "0",
"security_groups.#" => "1",
"source_dest_check" => "true",
"tenancy" => "default",
"vpc_security_group_ids.#" => "0",
},
"meta" => {
"schema_version" => "1"
}
},
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/dynamo_db_spec.rb | spec/lib/terraforming/resource/dynamo_db_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe DynamoDB do
let(:client) do
Aws::DynamoDB::Client.new(stub_responses: true)
end
let(:tables) do
[
"test-ddb","new-ddb"
]
end
let(:test_ddb_table) do
{
attribute_definitions:
[
{ attribute_name: "account_id", attribute_type: "N" },
{ attribute_name: "action_timestamp", attribute_type: "N" },
{ attribute_name: "type_parentid_timestamp", attribute_type: "S" },
{attribute_name: "newky", attribute_type: "S"},
{attribute_name: "newsortkey", attribute_type: "S"},
],
table_name: "test-ddb",
key_schema: [
{attribute_name: "account_id", key_type: "HASH"},
{attribute_name: "type_parentid_timestamp", key_type: "RANGE"}
],
table_status: "ACTIVE",
creation_date_time: Time.parse("2016-08-31 06:23:57 UTC"),
provisioned_throughput: { number_of_decreases_today: 0, read_capacity_units: 1, write_capacity_units: 1 },
table_size_bytes: 0,
item_count: 0,
table_arn: "arn:aws:dynamodb:eu-central-1:123456789:table/test-ddb",
local_secondary_indexes: [
{
index_name: "action_timestamp_index",
key_schema: [
{attribute_name: "account_id", key_type: "HASH"},
{attribute_name: "action_timestamp", key_type: "RANGE"}
],
projection: { projection_type: "INCLUDE", non_key_attributes: ["fghi", "jklm"] },
index_size_bytes: 0,
item_count: 0,
index_arn: "arn:aws:dynamodb:eu-central-1:123456789:table/test-ddb/index/action_timestamp_index"}
],
global_secondary_indexes: [
{
index_name: "newky-newsortkey-index",
key_schema: [
{attribute_name: "newky", key_type: "HASH"},
{attribute_name: "newsortkey", key_type: "RANGE"}
],
projection: { projection_type: "INCLUDE", non_key_attributes: ["abcd", "efgh"] },
index_status: "ACTIVE",
provisioned_throughput: { number_of_decreases_today: 0, read_capacity_units: 1, write_capacity_units: 1 },
index_size_bytes: 0,
item_count: 0,
index_arn: "arn:aws:dynamodb:eu-central-1:123456789:table/test-ddb/index/newky-newsortkey-index"}
],
stream_specification: {stream_enabled: true, stream_view_type: "NEW_AND_OLD_IMAGES"},
latest_stream_label: Time.parse("2016-08-31 06:23:57 UTC").to_s,
latest_stream_arn: "arn:aws:dynamodb:eu-central-1:123456789:table/test-ddb/stream/"+Time.parse("2016-08-31 06:23:57 UTC").to_s,
sse_description: {
status: "ENABLED"
}
}
end
let(:new_ddb_table) do
{
attribute_definitions:[
{:attribute_name=>"id", :attribute_type=>"S"},
{:attribute_name=>"time", :attribute_type=>"N"}
],
table_name: "new-ddb",
key_schema: [
{:attribute_name=>"id", :key_type=>"HASH"},
{:attribute_name=>"time", :key_type=>"RANGE"}
],
table_status: "ACTIVE",
creation_date_time: Time.parse("2016-08-31 06:23:57 UTC"),
provisioned_throughput: {number_of_decreases_today: 0, read_capacity_units: 5, write_capacity_units: 5},
table_size_bytes: 12345,
item_count: 11222,
:table_arn=>"arn:aws:dynamodb:eu-central-1:123456789:table/new-ddb",
:table_id=>"new-ddb"
}
end
let(:test_ddb_continuous_backups_description) do
{
continuous_backups_status: "ENABLED",
point_in_time_recovery_description: {
point_in_time_recovery_status: "ENABLED"
}
}
end
let(:new_ddb_continuous_backups_description) do
{
continuous_backups_status: "ENABLED",
point_in_time_recovery_description: {
point_in_time_recovery_status: "DISABLED"
}
}
end
let(:test_ddb_describe_time_to_live) do
{:time_to_live_status=>"ENABLED", :attribute_name=>"1"}
end
let(:new_ddb_describe_time_to_live) do
{:time_to_live_status=>"DISABLED"}
end
let(:test_ddb_tags) do
[{:key=>"abcd", :value=>"efgh"}]
end
let(:new_ddb_tags) do
[]
end
before do
client.stub_responses(:list_tables, table_names: tables)
client.stub_responses(:describe_table, [
{table: test_ddb_table},
{table: new_ddb_table}
])
client.stub_responses(:describe_continuous_backups,[
{continuous_backups_description: test_ddb_continuous_backups_description},
{continuous_backups_description: new_ddb_continuous_backups_description}
])
client.stub_responses(:describe_time_to_live, [
{time_to_live_description: test_ddb_describe_time_to_live},
{time_to_live_description: new_ddb_describe_time_to_live}
])
client.stub_responses(:list_tags_of_resource, [
{tags: test_ddb_tags},
{tags: new_ddb_tags}
])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_dynamodb_table" "test-ddb" {
name = "test-ddb"
read_capacity = 1
write_capacity = 1
hash_key = "account_id"
range_key = "type_parentid_timestamp"
attribute {
name = "account_id"
type = "N"
}
attribute {
name = "action_timestamp"
type = "N"
}
attribute {
name = "type_parentid_timestamp"
type = "S"
}
attribute {
name = "newky"
type = "S"
}
attribute {
name = "newsortkey"
type = "S"
}
ttl {
attribute_name = "1"
enabled = true
}
global_secondary_index {
name = "newky-newsortkey-index"
hash_key = "newky"
range_key = "newsortkey"
read_capacity = 1
write_capacity = 1
projection_type = "INCLUDE"
non_key_attributes = ["abcd", "efgh"]
}
local_secondary_index {
name = "action_timestamp_index"
range_key = "action_timestamp"
projection_type = "INCLUDE"
non_key_attributes = ["fghi", "jklm"]
}
tags {
abcd = "efgh"
}
stream_enabled = true
stream_view_type = "NEW_AND_OLD_IMAGES"
server_side_encryption {
enabled = true
}
}
resource "aws_dynamodb_table" "new-ddb" {
name = "new-ddb"
read_capacity = 5
write_capacity = 5
hash_key = "id"
range_key = "time"
attribute {
name = "id"
type = "S"
}
attribute {
name = "time"
type = "N"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_dynamodb_table.test-ddb"=>
{
"type"=>"aws_dynamodb_table",
"primary"=>
{
"id"=>"test-ddb",
"attributes"=>
{
"arn"=>"arn:aws:dynamodb:eu-central-1:123456789:table/test-ddb",
"id"=>"test-ddb",
"name"=>"test-ddb",
"read_capacity"=>"1",
"stream_arn"=>"arn:aws:dynamodb:eu-central-1:123456789:table/test-ddb/stream/2016-08-31 06:23:57 UTC",
"stream_label"=>"2016-08-31 06:23:57 UTC",
"write_capacity"=>"1",
"attribute.#"=>"5",
"attribute.3170009653.name"=>"account_id",
"attribute.3170009653.type"=>"N",
"attribute.901452415.name"=>"action_timestamp",
"attribute.901452415.type"=>"N",
"attribute.2131915850.name"=>"type_parentid_timestamp",
"attribute.2131915850.type"=>"S",
"attribute.3685094810.name"=>"newky",
"attribute.3685094810.type"=>"S",
"attribute.3333016131.name"=>"newsortkey",
"attribute.3333016131.type"=>"S",
"global_secondary_index.#"=>"1",
"global_secondary_index.1661317069.hash_key"=>"newky",
"global_secondary_index.1661317069.name"=>"newky-newsortkey-index",
"global_secondary_index.1661317069.projection_type"=>"INCLUDE",
"global_secondary_index.1661317069.range_key"=>"",
"global_secondary_index.1661317069.read_capacity"=>"1",
"global_secondary_index.1661317069.write_capacity"=>"1",
"global_secondary_index.1661317069.non_key_attributes.#"=>"2",
"global_secondary_index.1661317069.non_key_attributes.0"=>"abcd",
"global_secondary_index.1661317069.non_key_attributes.1"=>"efgh",
"local_secondary_index.#"=>"1",
"local_secondary_index.2469045277.name"=>"action_timestamp_index",
"local_secondary_index.2469045277.projection_type"=>"INCLUDE",
"local_secondary_index.2469045277.non_key_attributes.#"=>"2",
"local_secondary_index.2469045277.non_key_attributes.0"=>"fghi",
"local_secondary_index.2469045277.non_key_attributes.1"=>"jklm",
"key_schema.#"=>"2",
"hash_key"=>"account_id",
"point_in_time_recovery.#"=>"1",
"point_in_time_recovery.0.enabled"=>"true",
"server_side_encryption.#"=>"1",
"server_side_encryption.0.enabled"=>"true",
"stream_view_type"=>"NEW_AND_OLD_IMAGES",
"tags.%"=>"1",
"tags.abcd"=>"efgh",
"ttl.#"=>"1",
"ttl.2212294583.attribute_name"=>"1",
"ttl.2212294583.enabled"=>"true"
},
"meta"=>{"schema_version"=>"1"}
}
},
"aws_dynamodb_table.new-ddb"=>
{
"type"=>"aws_dynamodb_table",
"primary"=>
{
"id"=>"new-ddb",
"attributes"=>
{
"arn"=>"arn:aws:dynamodb:eu-central-1:123456789:table/new-ddb",
"id"=>"new-ddb",
"name"=>"new-ddb",
"read_capacity"=>"5",
"stream_arn"=>"",
"stream_label"=>"",
"write_capacity"=>"5",
"attribute.#"=>"2",
"attribute.4228504427.name"=>"id",
"attribute.4228504427.type"=>"S",
"attribute.2432995967.name"=>"time",
"attribute.2432995967.type"=>"N",
"key_schema.#"=>"2", "hash_key"=>"id",
"point_in_time_recovery.#"=>"0",
"server_side_encryption.#"=>"0"},
"meta"=>{"schema_version"=>"1"}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_user_policy_spec.rb | spec/lib/terraforming/resource/iam_user_policy_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMUserPolicy do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:users) do
[
{
path: "/",
user_name: "hoge",
user_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:user/hoge",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
password_last_used: Time.parse("2015-04-01 15:00:00 UTC"),
},
{
path: "/system/",
user_name: "fuga",
user_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:user/fuga",
create_date: Time.parse("2015-05-01 12:34:56 UTC"),
password_last_used: Time.parse("2015-05-01 15:00:00 UTC"),
},
]
end
let(:hoge_policy) do
{
user_name: "hoge",
policy_name: "hoge_policy",
policy_document: "%7B%0A%20%20%22Version%22%3A%20%222012-10-17%22%2C%0A%20%20%22Statement%22%3A%20%5B%0A%20%20%20%20%7B%0A%20%20%20%20%20%20%22Action%22%3A%20%5B%0A%20%20%20%20%20%20%20%20%22ec2%3ADescribe%2A%22%0A%20%20%20%20%20%20%5D%2C%0A%20%20%20%20%20%20%22Effect%22%3A%20%22Allow%22%2C%0A%20%20%20%20%20%20%22Resource%22%3A%20%22%2A%22%0A%20%20%20%20%7D%0A%20%20%5D%0A%7D%0A",
}
end
let(:fuga_policy) do
{
user_name: "fuga",
policy_name: "fuga_policy",
policy_document: "%7B%0A%20%20%22Version%22%3A%20%222012-10-17%22%2C%0A%20%20%22Statement%22%3A%20%5B%0A%20%20%20%20%7B%0A%20%20%20%20%20%20%22Action%22%3A%20%5B%0A%20%20%20%20%20%20%20%20%22ec2%3ADescribe%2A%22%0A%20%20%20%20%20%20%5D%2C%0A%20%20%20%20%20%20%22Effect%22%3A%20%22Allow%22%2C%0A%20%20%20%20%20%20%22Resource%22%3A%20%22%2A%22%0A%20%20%20%20%7D%0A%20%20%5D%0A%7D%0A",
}
end
before do
client.stub_responses(:list_users, users: users)
client.stub_responses(:list_user_policies, [{ policy_names: %w(hoge_policy) }, { policy_names: %w(fuga_policy) }])
client.stub_responses(:get_user_policy, [hoge_policy, fuga_policy])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_user_policy" "hoge_hoge_policy" {
name = "hoge_policy"
user = "hoge"
policy = <<POLICY
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"ec2:Describe*"
],
"Effect": "Allow",
"Resource": "*"
}
]
}
POLICY
}
resource "aws_iam_user_policy" "fuga_fuga_policy" {
name = "fuga_policy"
user = "fuga"
policy = <<POLICY
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"ec2:Describe*"
],
"Effect": "Allow",
"Resource": "*"
}
]
}
POLICY
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_user_policy.hoge_hoge_policy" => {
"type" => "aws_iam_user_policy",
"primary" => {
"id" => "hoge:hoge_policy",
"attributes" => {
"id" => "hoge:hoge_policy",
"name" => "hoge_policy",
"policy" => "{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Action\": [\n \"ec2:Describe*\"\n ],\n \"Effect\": \"Allow\",\n \"Resource\": \"*\"\n }\n ]\n}\n",
"user" => "hoge",
}
}
},
"aws_iam_user_policy.fuga_fuga_policy" => {
"type" => "aws_iam_user_policy",
"primary" => {
"id" => "fuga:fuga_policy",
"attributes" => {
"id" => "fuga:fuga_policy",
"name" => "fuga_policy",
"policy" => "{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Action\": [\n \"ec2:Describe*\"\n ],\n \"Effect\": \"Allow\",\n \"Resource\": \"*\"\n }\n ]\n}\n",
"user" => "fuga",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/cloud_watch_alarm_spec.rb | spec/lib/terraforming/resource/cloud_watch_alarm_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe CloudWatchAlarm do
let(:client) do
Aws::CloudWatch::Client.new(stub_responses: true)
end
let(:alarms) do
[
{
actions_enabled: true,
alarm_actions: ["arn:aws:sns:region:account:lambda-alerts"],
alarm_name: "Alarm With Dimensions",
comparison_operator: "GreaterThanOrEqualToThreshold",
dimensions: [{ name: "FunctionName", value: "beep-beep" }],
evaluation_periods: 1,
insufficient_data_actions: [],
metric_name: "Duration",
namespace: "AWS/Lambda",
ok_actions: [],
period: 300,
statistic: "Average",
threshold: 10000.0
},
{
actions_enabled: false,
alarm_actions: [],
alarm_description: "This metric monitors ec2 cpu utilization",
alarm_name: "terraform-test-foobar5",
comparison_operator: "GreaterThanOrEqualToThreshold",
evaluation_periods: 2,
insufficient_data_actions: [],
metric_name: "CPUUtilization",
namespace: "AWS/EC2",
ok_actions: [],
period: 120,
statistic: "Average",
threshold: 80.0
}
]
end
before do
client.stub_responses(:describe_alarms, metric_alarms: alarms)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_cloudwatch_metric_alarm" "Alarm-With-Dimensions" {
alarm_name = "Alarm With Dimensions"
comparison_operator = "GreaterThanOrEqualToThreshold"
evaluation_periods = "1"
metric_name = "Duration"
namespace = "AWS/Lambda"
period = "300"
statistic = "Average"
threshold = "10000.0"
alarm_description = ""
alarm_actions = ["arn:aws:sns:region:account:lambda-alerts"]
dimensions {
FunctionName = "beep-beep"
}
}
resource "aws_cloudwatch_metric_alarm" "terraform-test-foobar5" {
alarm_name = "terraform-test-foobar5"
comparison_operator = "GreaterThanOrEqualToThreshold"
evaluation_periods = "2"
metric_name = "CPUUtilization"
namespace = "AWS/EC2"
period = "120"
statistic = "Average"
threshold = "80.0"
alarm_description = "This metric monitors ec2 cpu utilization"
actions_enabled = false
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_cloudwatch_metric_alarm.Alarm-With-Dimensions" => {
"type" => "aws_cloudwatch_metric_alarm",
"primary" => {
"id" => "Alarm With Dimensions",
"attributes" => {
"actions_enabled" => "true",
"alarm_name" => "Alarm With Dimensions",
"alarm_description" => "",
"comparison_operator" => "GreaterThanOrEqualToThreshold",
"evaluation_periods" => "1",
"id" => "Alarm With Dimensions",
"metric_name" => "Duration",
"namespace" => "AWS/Lambda",
"ok_actions.#" => "0",
"period" => "300",
"statistic" => "Average",
"threshold" => "10000.0",
"unit" => "",
"insufficient_data_actions.#" => "0",
"alarm_actions.#" => "1",
"alarm_actions.1795058781" => "arn:aws:sns:region:account:lambda-alerts",
"dimensions.#" => "1",
"dimensions.FunctionName" => "beep-beep"
}
}
},
"aws_cloudwatch_metric_alarm.terraform-test-foobar5" => {
"type" => "aws_cloudwatch_metric_alarm",
"primary" => {
"id" => "terraform-test-foobar5",
"attributes" => {
"actions_enabled" => "false",
"alarm_description" => "This metric monitors ec2 cpu utilization",
"alarm_name" => "terraform-test-foobar5",
"comparison_operator" => "GreaterThanOrEqualToThreshold",
"evaluation_periods" => "2",
"id" => "terraform-test-foobar5",
"metric_name" => "CPUUtilization",
"namespace" => "AWS/EC2",
"ok_actions.#" => "0",
"period" => "120",
"statistic" => "Average",
"threshold" => "80.0",
"unit" => "",
"insufficient_data_actions.#" => "0",
"alarm_actions.#" => "0",
"dimensions.#" => "0"
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_role_policy_spec.rb | spec/lib/terraforming/resource/iam_role_policy_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMRolePolicy do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:roles) do
[
{
path: "/",
role_name: "hoge_role",
role_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:role/hoge_role",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
assume_role_policy_document: "%7B%22Version%22%3A%222008-10-17%22%2C%22Statement%22%3A%5B%7B%22Sid%22%3A%22%22%2C%22Effect%22%3A%22Allow%22%2C%22Principal%22%3A%7B%22Service%22%3A%22ec2.amazonaws.com%22%7D%2C%22Action%22%3A%22sts%3AAssumeRole%22%7D%5D%7D",
},
{
path: "/system/",
role_name: "fuga_role",
role_id: "OPQRSTUVWXYZA8901234",
arn: "arn:aws:iam::345678901234:role/fuga_role",
create_date: Time.parse("2015-05-01 12:34:56 UTC"),
assume_role_policy_document: "%7B%22Version%22%3A%222008-10-17%22%2C%22Statement%22%3A%5B%7B%22Sid%22%3A%221%22%2C%22Effect%22%3A%22Allow%22%2C%22Principal%22%3A%7B%22Service%22%3A%22elastictranscoder.amazonaws.com%22%7D%2C%22Action%22%3A%22sts%3AAssumeRole%22%7D%5D%7D",
},
]
end
let(:hoge_role_policy) do
{
role_name: "hoge_role",
policy_name: "hoge_role_policy",
policy_document: "%7B%22Version%22%3A%222008-10-17%22%2C%22Statement%22%3A%5B%7B%22Sid%22%3A%221%22%2C%22Effect%22%3A%22Allow%22%2C%22Action%22%3A%5B%22s3%3AListBucket%22%2C%22s3%3APut%2A%22%2C%22s3%3AGet%2A%22%2C%22s3%3A%2AMultipartUpload%2A%22%5D%2C%22Resource%22%3A%22%2A%22%7D%5D%7D",
}
end
let(:fuga_role_policy) do
{
role_name: "fuga_role",
policy_name: "fuga_role_policy",
policy_document: "%7B%22Version%22%3A%222008-10-17%22%2C%22Statement%22%3A%5B%7B%22Sid%22%3A%222%22%2C%22Effect%22%3A%22Allow%22%2C%22Action%22%3A%22sns%3APublish%22%2C%22Resource%22%3A%22%2A%22%7D%5D%7D",
}
end
before do
client.stub_responses(:list_roles, roles: roles)
client.stub_responses(:list_role_policies, [{ policy_names: %w(hoge_role_policy) }, { policy_names: %w(fuga_role_policy) }])
client.stub_responses(:get_role_policy, [hoge_role_policy, fuga_role_policy])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_role_policy" "hoge_role_hoge_role_policy" {
name = "hoge_role_policy"
role = "hoge_role"
policy = <<POLICY
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "1",
"Effect": "Allow",
"Action": [
"s3:ListBucket",
"s3:Put*",
"s3:Get*",
"s3:*MultipartUpload*"
],
"Resource": "*"
}
]
}
POLICY
}
resource "aws_iam_role_policy" "fuga_role_fuga_role_policy" {
name = "fuga_role_policy"
role = "fuga_role"
policy = <<POLICY
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "2",
"Effect": "Allow",
"Action": "sns:Publish",
"Resource": "*"
}
]
}
POLICY
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_role_policy.hoge_role_hoge_role_policy" => {
"type" => "aws_iam_role_policy",
"primary" => {
"id" => "hoge_role:hoge_role_policy",
"attributes" => {
"id" => "hoge_role:hoge_role_policy",
"name" => "hoge_role_policy",
"policy" => "{\n \"Version\": \"2008-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"1\",\n \"Effect\": \"Allow\",\n \"Action\": [\n \"s3:ListBucket\",\n \"s3:Put*\",\n \"s3:Get*\",\n \"s3:*MultipartUpload*\"\n ],\n \"Resource\": \"*\"\n }\n ]\n}\n",
"role" => "hoge_role",
}
}
},
"aws_iam_role_policy.fuga_role_fuga_role_policy" => {
"type" => "aws_iam_role_policy",
"primary" => {
"id" => "fuga_role:fuga_role_policy",
"attributes" => {
"id" => "fuga_role:fuga_role_policy",
"name" => "fuga_role_policy",
"policy" => "{\n \"Version\": \"2008-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"2\",\n \"Effect\": \"Allow\",\n \"Action\": \"sns:Publish\",\n \"Resource\": \"*\"\n }\n ]\n}\n",
"role" => "fuga_role",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/internet_gateway_spec.rb | spec/lib/terraforming/resource/internet_gateway_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe InternetGateway do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:internet_gateways) do
[
{
internet_gateway_id: "igw-1234abcd",
attachments: [
vpc_id: "vpc-1234abcd",
state: "available"
],
tags: [],
},
{
internet_gateway_id: "igw-5678efgh",
attachments: [
vpc_id: "vpc-5678efgh",
state: "available"
],
tags: [
{
key: "Name",
value: "test"
}
]
}
]
end
before do
client.stub_responses(:describe_internet_gateways, internet_gateways: internet_gateways)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_internet_gateway" "igw-1234abcd" {
vpc_id = "vpc-1234abcd"
tags {
}
}
resource "aws_internet_gateway" "test" {
vpc_id = "vpc-5678efgh"
tags {
"Name" = "test"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_internet_gateway.igw-1234abcd" => {
"type" => "aws_internet_gateway",
"primary" => {
"id" => "igw-1234abcd",
"attributes" => {
"id" => "igw-1234abcd",
"vpc_id" => "vpc-1234abcd",
"tags.#" => "0",
}
}
},
"aws_internet_gateway.test" => {
"type" => "aws_internet_gateway",
"primary" => {
"id" => "igw-5678efgh",
"attributes" => {
"id" => "igw-5678efgh",
"vpc_id" => "vpc-5678efgh",
"tags.#" => "1",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/subnet_spec.rb | spec/lib/terraforming/resource/subnet_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe Subnet do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:subnets) do
[
{
subnet_id: "subnet-1234abcd",
state: "available",
vpc_id: "vpc-1234abcd",
cidr_block: "10.0.8.0/21",
available_ip_address_count: 1000,
availability_zone: "ap-northeast-1c",
default_for_az: false,
map_public_ip_on_launch: false,
tags: [
{ key: "Name", value: "hoge" }
]
},
{
subnet_id: "subnet-5678efgh",
state: "available",
vpc_id: "vpc-5678efgh",
cidr_block: "10.0.8.0/21",
available_ip_address_count: 2000,
availability_zone: "ap-northeast-1c",
default_for_az: false,
map_public_ip_on_launch: false,
tags: [
{ key: "Name", value: "fuga" }
]
}
]
end
before do
client.stub_responses(:describe_subnets, subnets: subnets)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_subnet" "subnet-1234abcd-hoge" {
vpc_id = "vpc-1234abcd"
cidr_block = "10.0.8.0/21"
availability_zone = "ap-northeast-1c"
map_public_ip_on_launch = false
tags {
"Name" = "hoge"
}
}
resource "aws_subnet" "subnet-5678efgh-fuga" {
vpc_id = "vpc-5678efgh"
cidr_block = "10.0.8.0/21"
availability_zone = "ap-northeast-1c"
map_public_ip_on_launch = false
tags {
"Name" = "fuga"
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_subnet.subnet-1234abcd-hoge" => {
"type" => "aws_subnet",
"primary" => {
"id" => "subnet-1234abcd",
"attributes" => {
"availability_zone" => "ap-northeast-1c",
"cidr_block" => "10.0.8.0/21",
"id" => "subnet-1234abcd",
"map_public_ip_on_launch" => "false",
"tags.#" => "1",
"vpc_id" => "vpc-1234abcd"
}
}
},
"aws_subnet.subnet-5678efgh-fuga" => {
"type" => "aws_subnet",
"primary" => {
"id" => "subnet-5678efgh",
"attributes" => {
"availability_zone" => "ap-northeast-1c",
"cidr_block" => "10.0.8.0/21",
"id" => "subnet-5678efgh",
"map_public_ip_on_launch" => "false",
"tags.#" => "1",
"vpc_id" => "vpc-5678efgh"
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/kms_alias_spec.rb | spec/lib/terraforming/resource/kms_alias_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe KMSAlias do
let(:client) do
Aws::KMS::Client.new(stub_responses: true)
end
let(:aliases) do
[
{
alias_name: "alias/aws/acm",
alias_arn: "arn:aws:kms:ap-northeast-1:123456789012:alias/aws/acm",
target_key_id: "12ab34cd-56ef-12ab-34cd-12ab34cd56ef"
},
{
alias_name: "alias/hoge",
alias_arn: "arn:aws:kms:ap-northeast-1:123456789012:alias/hoge",
target_key_id: "1234abcd-12ab-34cd-56ef-1234567890ab"
},
{
alias_name: "alias/fuga",
alias_arn: "arn:aws:kms:ap-northeast-1:123456789012:alias/fuga",
target_key_id: "abcd1234-ab12-cd34-ef56-abcdef123456"
},
]
end
before do
client.stub_responses(:list_aliases, aliases: aliases)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_kms_alias" "hoge" {
name = "alias/hoge"
target_key_id = "1234abcd-12ab-34cd-56ef-1234567890ab"
}
resource "aws_kms_alias" "fuga" {
name = "alias/fuga"
target_key_id = "abcd1234-ab12-cd34-ef56-abcdef123456"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_kms_alias.hoge" => {
"type" => "aws_kms_alias",
"primary" => {
"id" => "alias/hoge",
"attributes" => {
"arn" => "arn:aws:kms:ap-northeast-1:123456789012:alias/hoge",
"id" => "alias/hoge",
"name" => "alias/hoge",
"target_key_id" => "1234abcd-12ab-34cd-56ef-1234567890ab",
}
}
},
"aws_kms_alias.fuga" => {
"type" => "aws_kms_alias",
"primary" => {
"id" => "alias/fuga",
"attributes" => {
"arn" => "arn:aws:kms:ap-northeast-1:123456789012:alias/fuga",
"id" => "alias/fuga",
"name" => "alias/fuga",
"target_key_id" => "abcd1234-ab12-cd34-ef56-abcdef123456",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/route_table_association_spec.rb | spec/lib/terraforming/resource/route_table_association_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe RouteTableAssociation do
let(:client) do
Aws::EC2::Client.new(stub_responses: true)
end
let(:route_tables) do
[
{
route_table_id: 'rtb-a12bcd34',
vpc_id: 'vpc-ab123cde',
routes: [
{
destination_cidr_block: '10.0.0.0/16',
destination_prefix_list_id: nil,
gateway_id: 'local',
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
},
{
destination_cidr_block: '0.0.0.0/0',
destination_prefix_list_id: nil,
gateway_id: 'igw-1ab2345c',
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
},
{
destination_cidr_block: '192.168.1.0/24',
destination_prefix_list_id: nil,
gateway_id: nil,
instance_id: 'i-ec12345a',
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
},
{
destination_cidr_block: '192.168.2.0/24',
destination_prefix_list_id: nil,
gateway_id: nil,
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: 'pcx-c56789de',
state: 'active'
}
],
associations: [
{
route_table_association_id: 'rtbassoc-b123456cd',
route_table_id: 'rtb-a12bcd34',
subnet_id: 'subnet-1234a567',
main: false
},
{
route_table_association_id: 'rtbassoc-e789012fg',
route_table_id: 'rtb-a12bcd34',
subnet_id: 'subnet-8901b123',
main: false
},
{
route_table_association_id: 'rtbassoc-e71201aaa',
route_table_id: 'rtb-a12bcd34',
subnet_id: nil,
main: true
}
],
tags: [
{
key: 'Name',
value: 'my-route-table'
}
]
},
{
route_table_id: 'rtb-efgh5678',
vpc_id: 'vpc-ab123cde',
routes: [
{
destination_cidr_block: '0.0.0.0/0',
destination_prefix_list_id: nil,
gateway_id: 'vgw-2345cdef',
instance_id: nil,
instance_owner_id: nil,
network_interface_id: nil,
vpc_peering_connection_id: nil,
state: 'active'
}
],
associations: [
],
tags: [
{
key: 'Name',
value: 'my-route-table-2'
}
]
}
]
end
before do
client.stub_responses(:describe_route_tables, route_tables: route_tables)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_route_table_association" "my-route-table-rtbassoc-b123456cd" {
route_table_id = "rtb-a12bcd34"
subnet_id = "subnet-1234a567"
}
resource "aws_route_table_association" "my-route-table-rtbassoc-e789012fg" {
route_table_id = "rtb-a12bcd34"
subnet_id = "subnet-8901b123"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_route_table_association.my-route-table-rtbassoc-b123456cd" => {
"type" => "aws_route_table_association",
"primary" => {
"id" => "rtbassoc-b123456cd",
"attributes" => {
"id" => "rtbassoc-b123456cd",
"route_table_id" => "rtb-a12bcd34",
"subnet_id" => "subnet-1234a567"
}
}
},
"aws_route_table_association.my-route-table-rtbassoc-e789012fg" => {
"type" => "aws_route_table_association",
"primary" => {
"id" => "rtbassoc-e789012fg",
"attributes" => {
"id" => "rtbassoc-e789012fg",
"route_table_id" => "rtb-a12bcd34",
"subnet_id" => "subnet-8901b123"
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/alb_spec.rb | spec/lib/terraforming/resource/alb_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe ALB do
let(:client) do
Aws::ElasticLoadBalancingV2::Client.new(stub_responses: true)
end
let(:load_balancers) do
[
{
load_balancer_arn: "arn:aws:elasticloadbalancing:ap-northeast-1:012345678901:loadbalancer/app/hoge/1234abcd1234abcd",
dns_name: "hoge-123456789.ap-northeast-1.elb.amazonaws.com",
canonical_hosted_zone_id: "12345678ABCDEF",
created_time: Time.parse("2016-08-19 00:39:01 UTC"),
load_balancer_name: "hoge",
scheme: "internet-facing",
vpc_id: "vpc-1234abcd",
state: { code: "active" },
type: "application",
availability_zones: [
{ zone_name: "ap-northeast-1c", subnet_id: "subnet-1234abcd" },
{ zone_name: "ap-northeast-1b", subnet_id: "subnet-5678efgh" }
],
security_groups: ["sg-1234abcd", "sg-5678efgh"]
},
{
load_balancer_arn: "arn:aws:elasticloadbalancing:ap-northeast-1:012345678901:loadbalancer/app/fuga/5678efgh5678efgh",
dns_name: "fuga-567891234.ap-northeast-1.elb.amazonaws.com",
canonical_hosted_zone_id: "12345678ABCDEF",
created_time: Time.parse("2016-08-31 06:23:57 UTC"),
load_balancer_name: "fuga",
scheme: "internal",
vpc_id: "vpc-5678efgh",
state: { code: "active" },
type: "application",
availability_zones: [
{ zone_name: "ap-northeast-1c", subnet_id: "subnet-1234abcd" },
{ zone_name: "ap-northeast-1b", subnet_id: "subnet-9012ijkl" }
],
security_groups: ["sg-1234abcd"]
},
]
end
let(:hoge_attributes) do
[
{ key: "access_logs.s3.enabled", value: "true" },
{ key: "idle_timeout.timeout_seconds", value: "600" },
{ key: "access_logs.s3.prefix", value: "hoge" },
{ key: "deletion_protection.enabled", value: "false" },
{ key: "access_logs.s3.bucket", value: "my-elb-logs" },
]
end
let(:fuga_attributes) do
[
{ key: "access_logs.s3.enabled", value: "false" },
{ key: "idle_timeout.timeout_seconds", value: "60" },
{ key: "access_logs.s3.prefix", value: "fuga" },
{ key: "deletion_protection.enabled", value: "true" },
{ key: "access_logs.s3.bucket", value: "my-elb-logs" },
]
end
let(:hoge_tag_descriptions) do
[
{
resource_arn: "arn:aws:elasticloadbalancing:ap-northeast-1:012345678901:loadbalancer/app/hoge/1234abcd1234abcd",
tags: [
{ key: "Environment", value: "Production" }
]
}
]
end
let(:fuga_tag_descriptions) do
[
{
resource_arn: "arn:aws:elasticloadbalancing:ap-northeast-1:012345678901:loadbalancer/app/fuga/5678efgh5678efgh",
tags: []
}
]
end
before do
client.stub_responses(:describe_load_balancers, load_balancers: load_balancers)
client.stub_responses(:describe_load_balancer_attributes, [
{ attributes: hoge_attributes },
{ attributes: fuga_attributes },
])
client.stub_responses(:describe_tags, [
{ tag_descriptions: hoge_tag_descriptions },
{ tag_descriptions: fuga_tag_descriptions },
])
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_alb" "hoge" {
idle_timeout = 600
internal = false
name = "hoge"
security_groups = ["sg-1234abcd", "sg-5678efgh"]
subnets = ["subnet-1234abcd", "subnet-5678efgh"]
enable_deletion_protection = false
access_logs {
bucket = "my-elb-logs"
enabled = true
prefix = "hoge"
}
tags {
"Environment" = "Production"
}
}
resource "aws_alb" "fuga" {
idle_timeout = 60
internal = true
name = "fuga"
security_groups = ["sg-1234abcd"]
subnets = ["subnet-1234abcd", "subnet-9012ijkl"]
enable_deletion_protection = true
tags {
}
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_alb.hoge" => {
"type" => "aws_alb",
"primary" => {
"id" => "arn:aws:elasticloadbalancing:ap-northeast-1:012345678901:loadbalancer/app/hoge/1234abcd1234abcd",
"attributes" => {
"access_logs.#" => "1",
"access_logs.0.bucket" => "my-elb-logs",
"access_logs.0.prefix" => "hoge",
"access_logs.0.enabled" => "true",
"dns_name" => "hoge-123456789.ap-northeast-1.elb.amazonaws.com",
"enable_deletion_protection" => "false",
"id" => "arn:aws:elasticloadbalancing:ap-northeast-1:012345678901:loadbalancer/app/hoge/1234abcd1234abcd",
"idle_timeout" => "600",
"internal" => "false",
"name" => "hoge",
"security_groups.#" => "2",
"subnets.#" => "2",
"tags.%" => "1",
"tags.Environment" => "Production",
"zone_id" => "12345678ABCDEF",
}
}
},
"aws_alb.fuga" => {
"type" => "aws_alb",
"primary" => {
"id" => "arn:aws:elasticloadbalancing:ap-northeast-1:012345678901:loadbalancer/app/fuga/5678efgh5678efgh",
"attributes" => {
"access_logs.#" => "1",
"access_logs.0.bucket" => "my-elb-logs",
"access_logs.0.prefix" => "fuga",
"access_logs.0.enabled" => "false",
"dns_name" => "fuga-567891234.ap-northeast-1.elb.amazonaws.com",
"enable_deletion_protection" => "true",
"id" => "arn:aws:elasticloadbalancing:ap-northeast-1:012345678901:loadbalancer/app/fuga/5678efgh5678efgh",
"idle_timeout" => "60",
"internal" => "true",
"name" => "fuga",
"security_groups.#" => "1",
"subnets.#" => "2",
"tags.%" => "0",
"zone_id" => "12345678ABCDEF",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/iam_instance_profile_spec.rb | spec/lib/terraforming/resource/iam_instance_profile_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe IAMInstanceProfile do
let(:client) do
Aws::IAM::Client.new(stub_responses: true)
end
let(:instance_profiles) do
[
{
path: "/",
instance_profile_name: "hoge_profile",
instance_profile_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:instance-profile/hoge_profile",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
roles: [
{
path: "/",
role_name: "hoge_role",
role_id: "ABCDEFGHIJKLMN1234567",
arn: "arn:aws:iam::123456789012:role/hoge_role",
create_date: Time.parse("2015-04-01 12:34:56 UTC"),
assume_role_policy_document: "%7B%22Version%22%3A%222008-10-17%22%2C%22Statement%22%3A%5B%7B%22Sid%22%3A%22%22%2C%22Effect%22%3A%22Allow%22%2C%22Principal%22%3A%7B%22Service%22%3A%22ec2.amazonaws.com%22%7D%2C%22Action%22%3A%22sts%3AAssumeRole%22%7D%5D%7D",
},
],
}
]
end
before do
client.stub_responses(:list_instance_profiles, instance_profiles: instance_profiles)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_iam_instance_profile" "hoge_profile" {
name = "hoge_profile"
path = "/"
role = "hoge_role"
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_iam_instance_profile.hoge_profile" => {
"type" => "aws_iam_instance_profile",
"primary" => {
"id" => "hoge_profile",
"attributes" => {
"arn" => "arn:aws:iam::123456789012:instance-profile/hoge_profile",
"id" => "hoge_profile",
"name" => "hoge_profile",
"path" => "/",
"role" => "hoge_role",
"roles.#" => "1",
}
}
}
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
dtan4/terraforming | https://github.com/dtan4/terraforming/blob/ab6486872952b61c4dca24d51a8a11f189f91573/spec/lib/terraforming/resource/elasti_cache_subnet_group_spec.rb | spec/lib/terraforming/resource/elasti_cache_subnet_group_spec.rb | require "spec_helper"
module Terraforming
module Resource
describe ElastiCacheSubnetGroup do
let(:client) do
Aws::ElastiCache::Client.new(stub_responses: true)
end
let(:cache_subnet_groups) do
[
{
cache_subnet_group_name: "hoge",
cache_subnet_group_description: "Group for hoge",
vpc_id: "vpc-1234abcd",
subnets: [
{
subnet_identifier: "subnet-1234abcd",
subnet_availability_zone: { name: "ap-northeast-1b" }
}
]
},
{
cache_subnet_group_name: "fuga",
cache_subnet_group_description: "Group for fuga",
vpc_id: "vpc-5678efgh",
subnets: [
{
subnet_identifier: "subnet-5678efgh",
subnet_availability_zone: { name: "ap-northeast-1b" }
}
]
}
]
end
before do
client.stub_responses(:describe_cache_subnet_groups, cache_subnet_groups: cache_subnet_groups)
end
describe ".tf" do
it "should generate tf" do
expect(described_class.tf(client: client)).to eq <<-EOS
resource "aws_elasticache_subnet_group" "hoge" {
name = "hoge"
description = "Group for hoge"
subnet_ids = ["subnet-1234abcd"]
}
resource "aws_elasticache_subnet_group" "fuga" {
name = "fuga"
description = "Group for fuga"
subnet_ids = ["subnet-5678efgh"]
}
EOS
end
end
describe ".tfstate" do
it "should generate tfstate" do
expect(described_class.tfstate(client: client)).to eq({
"aws_elasticache_subnet_group.hoge" => {
"type" => "aws_elasticache_subnet_group",
"primary" => {
"id" => "hoge",
"attributes" => {
"description" => "Group for hoge",
"name" => "hoge",
"subnet_ids.#" => "1",
}
}
},
"aws_elasticache_subnet_group.fuga" => {
"type" => "aws_elasticache_subnet_group",
"primary" => {
"id" => "fuga",
"attributes" => {
"description" => "Group for fuga",
"name" => "fuga",
"subnet_ids.#" => "1",
}
}
},
})
end
end
end
end
end
| ruby | MIT | ab6486872952b61c4dca24d51a8a11f189f91573 | 2026-01-04T15:46:47.062437Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.