CombinedText stringlengths 4 3.42M |
|---|
require "spec_helper"
describe ::Protobuf::Nats::Server do
class SomeRandom < ::Protobuf::Message; end
class SomeRandomService < ::Protobuf::Rpc::Service
rpc :implemented, SomeRandom, SomeRandom
rpc :not_implemented, SomeRandom, SomeRandom
def implemented; end
end
let(:logger) { ::Logger.new(nil) }
let(:client) { ::FakeNatsClient.new }
let(:options) {
{
:threads => 2,
:client => client,
:server => 'derpaderp'
}
}
subject { described_class.new(options) }
before do
allow(::Protobuf::Logging).to receive(:logger).and_return(logger)
allow(subject).to receive(:service_klasses).and_return([SomeRandomService])
end
describe "#detect_and_handle_a_pause" do
it "unsubscribes when the server is paused" do
allow(subject).to receive(:paused?).and_return(true)
expect(subject).to receive(:unsubscribe)
subject.detect_and_handle_a_pause
end
it "subscribes and restarts slow start when the pause file is removed" do
subject.instance_variable_set(:@processing_requests, false)
expect(subject).to receive(:subscribe)
subject.detect_and_handle_a_pause
end
it "never calls unsubscribe more than once per pause" do
allow(subject).to receive(:paused?).and_return(true)
expect(subject).to receive(:unsubscribe).once
subject.detect_and_handle_a_pause
subject.detect_and_handle_a_pause
subject.detect_and_handle_a_pause
end
it "never calls subscribe more than once per pause" do
subject.instance_variable_set(:@processing_requests, false)
expect(subject).to receive(:subscribe).once
subject.detect_and_handle_a_pause
subject.detect_and_handle_a_pause
subject.detect_and_handle_a_pause
end
end
describe "#max_queue_size" do
it "can be set via options hash" do
expect(subject.max_queue_size).to eq(2)
end
it "can be set via PB_NATS_SERVER_MAX_QUEUE_SIZE environment variable" do
::ENV["PB_NATS_SERVER_MAX_QUEUE_SIZE"] = "10"
expect(subject.max_queue_size).to eq(10)
::ENV.delete("PB_NATS_SERVER_MAX_QUEUE_SIZE")
end
end
describe "pause_file_path" do
it "is nil by default" do
expect(subject.pause_file_path).to eq(nil)
end
it "can be set via PB_NATS_SERVER_PAUSE_FILE_PATH environment variable" do
::ENV["PB_NATS_SERVER_PAUSE_FILE_PATH"] = "/tmp/rpc-paused-bro"
expect(subject.pause_file_path).to eq("/tmp/rpc-paused-bro")
::ENV.delete("PB_NATS_SERVER_PAUSE_FILE_PATH")
end
end
describe "#paused?" do
let(:test_file) { "#{::SecureRandom.uuid}-testing-123" }
# Ensure the test file is always cleaned up.
after { ::File.delete(test_file) if ::File.exist?(test_file) }
it "pauses when a pause file is set" do
::ENV["PB_NATS_SERVER_PAUSE_FILE_PATH"] = test_file
expect(subject).to_not be_paused
::File.write(test_file, "")
expect(subject).to be_paused
::ENV.delete("PB_NATS_SERVER_PAUSE_FILE_PATH")
end
end
describe "#slow_start_delay" do
it "has a default" do
expect(subject.slow_start_delay).to eq(10)
end
it "can be set via PB_NATS_SERVER_SLOW_START_DELAY environment variable" do
::ENV["PB_NATS_SERVER_SLOW_START_DELAY"] = "20"
expect(subject.slow_start_delay).to eq(20)
::ENV.delete("PB_NATS_SERVER_SLOW_START_DELAY")
end
end
describe "#subscriptions_per_rpc_endpoint" do
it "has a default" do
expect(subject.subscriptions_per_rpc_endpoint).to eq(10)
end
it "can be set via PB_NATS_SERVER_SUBSCRIPTIONS_PER_RPC_ENDPOINT environment variable" do
::ENV["PB_NATS_SERVER_SUBSCRIPTIONS_PER_RPC_ENDPOINT"] = "20"
expect(subject.subscriptions_per_rpc_endpoint).to eq(20)
::ENV.delete("PB_NATS_SERVER_SUBSCRIPTIONS_PER_RPC_ENDPOINT")
end
end
describe "#subscribe_to_services_once" do
it "subscribes to services that inherit from protobuf rpc service" do
subject.subscribe_to_services_once
expect(client.subscriptions.keys).to eq(["rpc.some_random_service.implemented"])
end
end
describe "#enqueue_request" do
it "returns false when the thread pool and thread pool queue is full and publish NACK" do
# Fill the thread pool.
2.times { subject.thread_pool.push { sleep 1 } }
# Fill the thread pool queue.
2.times { subject.thread_pool.push { sleep 1 } }
expect(subject.nats).to receive(:publish).with("inbox_123", ::Protobuf::Nats::Messages::NACK)
expect(subject.enqueue_request("", "inbox_123")).to eq(false)
end
it "sends an ACK if the thread pool enqueued the task" do
# Fill the thread pool.
2.times { subject.thread_pool.push { sleep 1 } }
expect(subject.nats).to receive(:publish).with("inbox_123", ::Protobuf::Nats::Messages::ACK)
# Wait for promise to finish executing.
expect(subject.enqueue_request("", "inbox_123")).to eq(true)
subject.thread_pool.kill
end
it "logs any error that is raised within the request block" do
request_data = "yolo"
expect(subject).to receive(:handle_request).with(request_data, 'server' => 'derpaderp').and_raise(::RuntimeError, "mah error")
expect(logger).to receive(:error).once.ordered.with("mah error")
expect(logger).to receive(:error).once.ordered.with("RuntimeError")
expect(logger).to receive(:error).once.ordered
# Wait for promise to finish executing.
expect(subject.enqueue_request(request_data, "inbox_123")).to eq(true)
sleep 0.1 until subject.thread_pool.size.zero?
end
it "returns an ACK and a response" do
response = "some response data"
inbox = "inbox_123"
expect(subject).to receive(:handle_request).and_return(response)
expect(client).to receive(:publish).once.ordered.with(inbox, ::Protobuf::Nats::Messages::ACK)
expect(client).to receive(:publish).once.ordered.with(inbox, response)
# Wait for promise to finish executing.
expect(subject.enqueue_request("", inbox)).to eq(true)
sleep 0.1 until subject.thread_pool.size.zero?
end
end
describe "instrumentation" do
it "instruments the thread pool execution delay" do
expect(subject).to receive(:handle_request).and_return("response")
execution_delay = nil
subscription = ::ActiveSupport::Notifications.subscribe "server.thread_pool_execution_delay.protobuf-nats" do |_, _, _, _, delay|
execution_delay = delay
end
subject.enqueue_request("", "YOLO123")
sleep 0.1 until subject.thread_pool.size.zero?
expect(execution_delay).to be > 0
::ActiveSupport::Notifications.unsubscribe(subscription)
end
it "instrument a request duration" do
expect(subject).to receive(:handle_request) do
sleep 0.05
"response"
end
request_duration = nil
subscription = ::ActiveSupport::Notifications.subscribe "server.request_duration.protobuf-nats" do |_, _, _, _, duration|
request_duration = duration
end
subject.enqueue_request("", "YOLO123")
sleep 0.1 until subject.thread_pool.size.zero?
expect(request_duration).to be > 0.05
::ActiveSupport::Notifications.unsubscribe(subscription)
end
it "instruments when a message received" do
allow(subject.thread_pool).to receive(:push)
message_was_received = false
subscription = ::ActiveSupport::Notifications.subscribe "server.message_received.protobuf-nats" do
message_was_received = true
end
subject.enqueue_request("", "YOLO123")
sleep 0.1 until subject.thread_pool.size.zero?
expect(message_was_received).to eq(true)
::ActiveSupport::Notifications.unsubscribe(subscription)
end
it "instruments when a message dropped" do
allow(subject.thread_pool).to receive(:push).and_return(false)
message_was_dropped = false
subscription = ::ActiveSupport::Notifications.subscribe "server.message_dropped.protobuf-nats" do
message_was_dropped = true
end
subject.enqueue_request("", "YOLO123")
sleep 0.1 until subject.thread_pool.size.zero?
expect(message_was_dropped).to eq(true)
::ActiveSupport::Notifications.unsubscribe(subscription)
end
end
end
Fix up instrumenation tests to not flap as much
require "spec_helper"
describe ::Protobuf::Nats::Server do
class SomeRandom < ::Protobuf::Message; end
class SomeRandomService < ::Protobuf::Rpc::Service
rpc :implemented, SomeRandom, SomeRandom
rpc :not_implemented, SomeRandom, SomeRandom
def implemented; end
end
let(:logger) { ::Logger.new(nil) }
let(:client) { ::FakeNatsClient.new }
let(:options) {
{
:threads => 2,
:client => client,
:server => 'derpaderp'
}
}
subject { described_class.new(options) }
before do
allow(::Protobuf::Logging).to receive(:logger).and_return(logger)
allow(subject).to receive(:service_klasses).and_return([SomeRandomService])
end
describe "#detect_and_handle_a_pause" do
it "unsubscribes when the server is paused" do
allow(subject).to receive(:paused?).and_return(true)
expect(subject).to receive(:unsubscribe)
subject.detect_and_handle_a_pause
end
it "subscribes and restarts slow start when the pause file is removed" do
subject.instance_variable_set(:@processing_requests, false)
expect(subject).to receive(:subscribe)
subject.detect_and_handle_a_pause
end
it "never calls unsubscribe more than once per pause" do
allow(subject).to receive(:paused?).and_return(true)
expect(subject).to receive(:unsubscribe).once
subject.detect_and_handle_a_pause
subject.detect_and_handle_a_pause
subject.detect_and_handle_a_pause
end
it "never calls subscribe more than once per pause" do
subject.instance_variable_set(:@processing_requests, false)
expect(subject).to receive(:subscribe).once
subject.detect_and_handle_a_pause
subject.detect_and_handle_a_pause
subject.detect_and_handle_a_pause
end
end
describe "#max_queue_size" do
it "can be set via options hash" do
expect(subject.max_queue_size).to eq(2)
end
it "can be set via PB_NATS_SERVER_MAX_QUEUE_SIZE environment variable" do
::ENV["PB_NATS_SERVER_MAX_QUEUE_SIZE"] = "10"
expect(subject.max_queue_size).to eq(10)
::ENV.delete("PB_NATS_SERVER_MAX_QUEUE_SIZE")
end
end
describe "pause_file_path" do
it "is nil by default" do
expect(subject.pause_file_path).to eq(nil)
end
it "can be set via PB_NATS_SERVER_PAUSE_FILE_PATH environment variable" do
::ENV["PB_NATS_SERVER_PAUSE_FILE_PATH"] = "/tmp/rpc-paused-bro"
expect(subject.pause_file_path).to eq("/tmp/rpc-paused-bro")
::ENV.delete("PB_NATS_SERVER_PAUSE_FILE_PATH")
end
end
describe "#paused?" do
let(:test_file) { "#{::SecureRandom.uuid}-testing-123" }
# Ensure the test file is always cleaned up.
after { ::File.delete(test_file) if ::File.exist?(test_file) }
it "pauses when a pause file is set" do
::ENV["PB_NATS_SERVER_PAUSE_FILE_PATH"] = test_file
expect(subject).to_not be_paused
::File.write(test_file, "")
expect(subject).to be_paused
::ENV.delete("PB_NATS_SERVER_PAUSE_FILE_PATH")
end
end
describe "#slow_start_delay" do
it "has a default" do
expect(subject.slow_start_delay).to eq(10)
end
it "can be set via PB_NATS_SERVER_SLOW_START_DELAY environment variable" do
::ENV["PB_NATS_SERVER_SLOW_START_DELAY"] = "20"
expect(subject.slow_start_delay).to eq(20)
::ENV.delete("PB_NATS_SERVER_SLOW_START_DELAY")
end
end
describe "#subscriptions_per_rpc_endpoint" do
it "has a default" do
expect(subject.subscriptions_per_rpc_endpoint).to eq(10)
end
it "can be set via PB_NATS_SERVER_SUBSCRIPTIONS_PER_RPC_ENDPOINT environment variable" do
::ENV["PB_NATS_SERVER_SUBSCRIPTIONS_PER_RPC_ENDPOINT"] = "20"
expect(subject.subscriptions_per_rpc_endpoint).to eq(20)
::ENV.delete("PB_NATS_SERVER_SUBSCRIPTIONS_PER_RPC_ENDPOINT")
end
end
describe "#subscribe_to_services_once" do
it "subscribes to services that inherit from protobuf rpc service" do
subject.subscribe_to_services_once
expect(client.subscriptions.keys).to eq(["rpc.some_random_service.implemented"])
end
end
describe "#enqueue_request" do
it "returns false when the thread pool and thread pool queue is full and publish NACK" do
# Fill the thread pool.
2.times { subject.thread_pool.push { sleep 1 } }
# Fill the thread pool queue.
2.times { subject.thread_pool.push { sleep 1 } }
expect(subject.nats).to receive(:publish).with("inbox_123", ::Protobuf::Nats::Messages::NACK)
expect(subject.enqueue_request("", "inbox_123")).to eq(false)
end
it "sends an ACK if the thread pool enqueued the task" do
# Fill the thread pool.
2.times { subject.thread_pool.push { sleep 1 } }
expect(subject.nats).to receive(:publish).with("inbox_123", ::Protobuf::Nats::Messages::ACK)
# Wait for promise to finish executing.
expect(subject.enqueue_request("", "inbox_123")).to eq(true)
subject.thread_pool.kill
end
it "logs any error that is raised within the request block" do
request_data = "yolo"
expect(subject).to receive(:handle_request).with(request_data, 'server' => 'derpaderp').and_raise(::RuntimeError, "mah error")
expect(logger).to receive(:error).once.ordered.with("mah error")
expect(logger).to receive(:error).once.ordered.with("RuntimeError")
expect(logger).to receive(:error).once.ordered
# Wait for promise to finish executing.
expect(subject.enqueue_request(request_data, "inbox_123")).to eq(true)
sleep 0.1 until subject.thread_pool.size.zero?
end
it "returns an ACK and a response" do
response = "some response data"
inbox = "inbox_123"
expect(subject).to receive(:handle_request).and_return(response)
expect(client).to receive(:publish).once.ordered.with(inbox, ::Protobuf::Nats::Messages::ACK)
expect(client).to receive(:publish).once.ordered.with(inbox, response)
# Wait for promise to finish executing.
expect(subject.enqueue_request("", inbox)).to eq(true)
sleep 0.1 until subject.thread_pool.size.zero?
end
end
describe "instrumentation" do
it "instruments the thread pool execution delay" do
expect(subject).to receive(:handle_request).and_return("response")
execution_delay = nil
subscription = ::ActiveSupport::Notifications.subscribe "server.thread_pool_execution_delay.protobuf-nats" do |_, _, _, _, delay|
execution_delay = delay
end
subject.enqueue_request("", "YOLO123")
sleep 0.1 until subject.thread_pool.size.zero?
expect(execution_delay).to_not eq(nil)
::ActiveSupport::Notifications.unsubscribe(subscription)
end
it "instrument a request duration" do
expect(subject).to receive(:handle_request) do
sleep 0.05
"response"
end
request_duration = nil
subscription = ::ActiveSupport::Notifications.subscribe "server.request_duration.protobuf-nats" do |_, _, _, _, duration|
request_duration = duration
end
subject.enqueue_request("", "YOLO123")
sleep 0.1 until subject.thread_pool.size.zero?
expect(request_duration).to be >= 0.05
::ActiveSupport::Notifications.unsubscribe(subscription)
end
it "instruments when a message received" do
allow(subject.thread_pool).to receive(:push)
message_was_received = false
subscription = ::ActiveSupport::Notifications.subscribe "server.message_received.protobuf-nats" do
message_was_received = true
end
subject.enqueue_request("", "YOLO123")
sleep 0.1 until subject.thread_pool.size.zero?
expect(message_was_received).to eq(true)
::ActiveSupport::Notifications.unsubscribe(subscription)
end
it "instruments when a message dropped" do
allow(subject.thread_pool).to receive(:push).and_return(false)
message_was_dropped = false
subscription = ::ActiveSupport::Notifications.subscribe "server.message_dropped.protobuf-nats" do
message_was_dropped = true
end
subject.enqueue_request("", "YOLO123")
sleep 0.1 until subject.thread_pool.size.zero?
expect(message_was_dropped).to eq(true)
::ActiveSupport::Notifications.unsubscribe(subscription)
end
end
end
|
require File.expand_path('../../../spec_helper', __FILE__)
describe "NilClass#frozen?" do
it "returns true" do
nil.frozen?.should == true
end
end
remove accidental commited nil#frozen spec
|
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/methods', __FILE__)
describe "Time._load" do
ruby_bug("http://redmine.ruby-lang.org/issues/show/627", "1.8.7") do
it "loads a time object in the new format" do
t = Time.local(2000, 1, 15, 20, 1, 1)
t = t.gmtime
high = 1 << 31 |
(t.gmt? ? 1 : 0) << 30 |
(t.year - 1900) << 14 |
(t.mon - 1) << 10 |
t.mday << 5 |
t.hour
low = t.min << 26 |
t.sec << 20 |
t.usec
Time.send(:_load, [high, low].pack("VV")).should == t
end
end
it "loads a time object in the old UNIX timestamp based format" do
t = Time.local(2000, 1, 15, 20, 1, 1, 203)
timestamp = t.to_i
high = timestamp & ((1 << 31) - 1)
low = t.usec
Time.send(:_load, [high, low].pack("VV")).should == t
end
ruby_version_is ''...'1.9' do
it "loads MRI's marshaled time format" do
t = Marshal.load("\004\bu:\tTime\r\320\246\e\200\320\001\r\347")
t.utc
t.to_s.should == "Fri Oct 22 16:57:48 UTC 2010"
end
end
ruby_version_is '1.9' do
it "loads MRI's marshaled time format" do
t = Marshal.load("\004\bu:\tTime\r\320\246\e\200\320\001\r\347")
t.utc
t.to_s.should == "2010-10-22 16:57:48 UTC"
end
end
with_feature :encoding do
it "treats the data as binary data" do
data = "\x04\bu:\tTime\r\fM\x1C\xC0\x00\x00\xD0\xBE"
data.force_encoding Encoding::UTF_8
t = Marshal.load(data)
t.to_s.should == "2013-04-08 12:47:45 UTC"
end
end
end
Add specs for Time._load
Currently, have_public_method matcher is not in mspec.
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/methods', __FILE__)
describe "Time._load" do
ruby_version_is ""..."2.0" do
it "is a public method" do
Time.public_methods(false).should include(stasy(:_load))
end
end
ruby_version_is "2.0" do
it "is a private method" do
Time.should have_private_method(:_load, false)
end
end
ruby_bug("http://redmine.ruby-lang.org/issues/show/627", "1.8.7") do
it "loads a time object in the new format" do
t = Time.local(2000, 1, 15, 20, 1, 1)
t = t.gmtime
high = 1 << 31 |
(t.gmt? ? 1 : 0) << 30 |
(t.year - 1900) << 14 |
(t.mon - 1) << 10 |
t.mday << 5 |
t.hour
low = t.min << 26 |
t.sec << 20 |
t.usec
Time.send(:_load, [high, low].pack("VV")).should == t
end
end
it "loads a time object in the old UNIX timestamp based format" do
t = Time.local(2000, 1, 15, 20, 1, 1, 203)
timestamp = t.to_i
high = timestamp & ((1 << 31) - 1)
low = t.usec
Time.send(:_load, [high, low].pack("VV")).should == t
end
ruby_version_is ''...'1.9' do
it "loads MRI's marshaled time format" do
t = Marshal.load("\004\bu:\tTime\r\320\246\e\200\320\001\r\347")
t.utc
t.to_s.should == "Fri Oct 22 16:57:48 UTC 2010"
end
end
ruby_version_is '1.9' do
it "loads MRI's marshaled time format" do
t = Marshal.load("\004\bu:\tTime\r\320\246\e\200\320\001\r\347")
t.utc
t.to_s.should == "2010-10-22 16:57:48 UTC"
end
end
with_feature :encoding do
it "treats the data as binary data" do
data = "\x04\bu:\tTime\r\fM\x1C\xC0\x00\x00\xD0\xBE"
data.force_encoding Encoding::UTF_8
t = Marshal.load(data)
t.to_s.should == "2013-04-08 12:47:45 UTC"
end
end
end
|
describe "every accessible has(n, :through) association with a valid reject_if proc", :shared => true do
it "should not allow to create a new project via Person#projects_attributes" do
Person.all.size.should == 0
Project.all.size.should == 0
ProjectMembership.all.size.should == 0
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
@person.projects_attributes = { 'new_1' => { :name => 'dm-accepts_nested_attributes' } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
end
end
describe "every accessible has(n, :through) association with no reject_if proc", :shared => true do
it "should allow to create a new project via Person#projects_attributes" do
Person.all.size.should == 0
Project.all.size.should == 0
ProjectMembership.all.size.should == 0
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
@person.projects_attributes = { 'new_1' => { :name => 'dm-accepts_nested_attributes' } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
Project.first.name.should == 'dm-accepts_nested_attributes'
end
it "should allow to update an existing project via Person#projects_attributes" do
Person.all.size.should == 0
Project.all.size.should == 0
ProjectMembership.all.size.should == 0
@person.save
project = Project.create(:name => 'dm-accepts_nested_attributes')
project_membership = ProjectMembership.create(:person => @person, :project => project)
Person.all.size.should == 1
Project.all.size.should == 1
ProjectMembership.all.size.should == 1
@person.reload
@person.projects_attributes = { project.id.to_s => { :id => project.id, :name => 'still dm-accepts_nested_attributes' } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
Project.first.name.should == 'still dm-accepts_nested_attributes'
end
it "should perform atomic commits" do
@person.projects_attributes = { 'new_1' => { :name => nil } } # should fail because of validations
@person.projects.should be_empty
@person.save
@person.projects.should be_empty
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
@person.name = nil # should fail because of validations
@person.projects_attributes = { 'new_1' => { :name => nil } }
@person.projects.should be_empty
@person.save
@person.projects.should be_empty
Person.all.size.should == 0
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
end
end
describe "every accessible has(n, :through) association with :allow_destroy => false", :shared => true do
it "should not allow to delete an existing project via Person#projects_attributes" do
@person.save
project = Project.create(:name => 'dm-accepts_nested_attributes')
project_membership = ProjectMembership.create(:person => @person, :project => project)
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
@person.reload
@person.projects_attributes = { '1' => { :id => project.id, :_delete => true } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
end
end
describe "every accessible has(n, :through) association with :allow_destroy => true", :shared => true do
it "should allow to delete an existing project via Person#projects_attributes" do
@person.save
project = Project.create(:name => 'dm-accepts_nested_attributes')
project_membership = ProjectMembership.create(:person => @person, :project => project)
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
@person.reload
@person.projects_attributes = { '1' => { :id => project.id, :_delete => true } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
end
end
describe "every accessible has(n, :through) association with a nested attributes reader", :shared => true do
it "should return the attributes written to Person#projects_attributes from the Person#projects_attributes reader" do
@person.projects_attributes.should be_nil
@person.projects_attributes = { 'new_1' => { :name => 'write specs' } }
@person.projects_attributes.should == { 'new_1' => { :name => 'write specs' } }
end
end
removed unneeded assertions
* deleting a related resource still not working
* transactions still not working
* other than that, it seems quite fine
describe "every accessible has(n, :through) association with a valid reject_if proc", :shared => true do
it "should not allow to create a new project via Person#projects_attributes" do
Person.all.size.should == 0
Project.all.size.should == 0
ProjectMembership.all.size.should == 0
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
@person.projects_attributes = { 'new_1' => { :name => 'dm-accepts_nested_attributes' } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
end
end
describe "every accessible has(n, :through) association with no reject_if proc", :shared => true do
it "should allow to create a new project via Person#projects_attributes" do
Person.all.size.should == 0
Project.all.size.should == 0
ProjectMembership.all.size.should == 0
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
@person.projects_attributes = { 'new_1' => { :name => 'dm-accepts_nested_attributes' } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
Project.first.name.should == 'dm-accepts_nested_attributes'
end
it "should allow to update an existing project via Person#projects_attributes" do
Person.all.size.should == 0
Project.all.size.should == 0
ProjectMembership.all.size.should == 0
@person.save
project = Project.create(:name => 'dm-accepts_nested_attributes')
project_membership = ProjectMembership.create(:person => @person, :project => project)
Person.all.size.should == 1
Project.all.size.should == 1
ProjectMembership.all.size.should == 1
@person.projects_attributes = { project.id.to_s => { :id => project.id, :name => 'still dm-accepts_nested_attributes' } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
Project.first.name.should == 'still dm-accepts_nested_attributes'
end
it "should perform atomic commits" do
@person.projects_attributes = { 'new_1' => { :name => nil } } # should fail because of validations
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
@person.name = nil # should fail because of validations
@person.projects_attributes = { 'new_1' => { :name => nil } }
@person.save
Person.all.size.should == 0
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
end
end
describe "every accessible has(n, :through) association with :allow_destroy => false", :shared => true do
it "should not allow to delete an existing project via Person#projects_attributes" do
@person.save
project = Project.create(:name => 'dm-accepts_nested_attributes')
project_membership = ProjectMembership.create(:person => @person, :project => project)
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
@person.projects_attributes = { '1' => { :id => project.id, :_delete => true } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
end
end
describe "every accessible has(n, :through) association with :allow_destroy => true", :shared => true do
it "should allow to delete an existing project via Person#projects_attributes" do
@person.save
project = Project.create(:name => 'dm-accepts_nested_attributes')
project_membership = ProjectMembership.create(:person => @person, :project => project)
Person.all.size.should == 1
ProjectMembership.all.size.should == 1
Project.all.size.should == 1
@person.projects_attributes = { '1' => { :id => project.id, :_delete => true } }
@person.save
Person.all.size.should == 1
ProjectMembership.all.size.should == 0
Project.all.size.should == 0
end
end
describe "every accessible has(n, :through) association with a nested attributes reader", :shared => true do
it "should return the attributes written to Person#projects_attributes from the Person#projects_attributes reader" do
@person.projects_attributes.should be_nil
@person.projects_attributes = { 'new_1' => { :name => 'write specs' } }
@person.projects_attributes.should == { 'new_1' => { :name => 'write specs' } }
end
end |
require 'spec_helper'
describe Socialcast::Provision do
let(:credentials) { YAML.load_file(File.join(File.dirname(__FILE__), '..', 'fixtures', 'credentials.yml')) }
describe ".provision" do
let(:ldap_default_config) { YAML.load_file(File.join(File.dirname(__FILE__), '..', 'fixtures', 'ldap.yml')) }
context "when a user is found" do
let(:result) { '' }
before do
entry = Net::LDAP::Entry.new("dc=example,dc=com")
entry[:mail] = 'user@example.com'
Net::LDAP.any_instance.stub(:search).and_yield(entry)
Zlib::GzipWriter.stub(:open).and_yield(result)
Socialcast.stub(:credentials).and_return(credentials)
Socialcast::CLI.any_instance.should_receive(:ldap_config).and_return(ldap_default_config)
File.stub(:open).with(/users.xml.gz/, anything).and_yield(result)
RestClient::Resource.any_instance.should_receive(:post).with(hash_including(:file => result), { :accept => :json })
Socialcast::CLI.start ['provision']
end
it "includes the user" do
result.gsub(/\s/, '').should == %Q[
<?xml version="1.0" encoding="UTF-8"?>
<export>
<users type="array">
<user>
<first_name/>
<last_name/>
<contact-info>
<email>user@example.com</email>
</contact-info>
<custom-fields type="array">
</custom-fields>
<account-type>member</account-type>
<roles type="array">
</roles>
</user>
</users>
</export>
].gsub(/\s/, '')
end
end
end
end
test method in isolation
require 'spec_helper'
describe Socialcast::Provision do
let!(:credentials) { YAML.load_file(File.join(File.dirname(__FILE__), '..', 'fixtures', 'credentials.yml')) }
describe ".provision" do
let!(:ldap_default_config) { YAML.load_file(File.join(File.dirname(__FILE__), '..', 'fixtures', 'ldap.yml')) }
context "when a user is found" do
let(:result) { '' }
before do
entry = Net::LDAP::Entry.new("dc=example,dc=com")
entry[:mail] = 'user@example.com'
Net::LDAP.any_instance.stub(:search).and_yield(entry)
Zlib::GzipWriter.stub(:open).and_yield(result)
Socialcast.stub(:credentials).and_return(credentials)
File.stub(:open).with(/users.xml.gz/, anything).and_yield(result)
RestClient::Resource.any_instance.should_receive(:post).with(hash_including(:file => result), { :accept => :json })
Socialcast::Provision.provision(ldap_default_config, {})
end
it "includes the user" do
result.gsub(/\s/, '').should == %Q[
<?xml version="1.0" encoding="UTF-8"?>
<export>
<users type="array">
<user>
<first_name/>
<last_name/>
<contact-info>
<email>user@example.com</email>
</contact-info>
<custom-fields type="array">
</custom-fields>
<account-type>member</account-type>
<roles type="array">
</roles>
</user>
</users>
</export>
].gsub(/\s/, '')
end
end
end
end
|
require 'spec_helper'
describe Squire::Configuration do
subject { described_class.new }
let(:hash) {
{
development: {
a: 1,
nested: {
b: 2
}
},
production: {
a: 3
}
}
}
describe '#source' do
it 'should properly set hash as source' do
subject.source hash
subject.source.should eql(hash)
end
it 'should properly set yaml sa a source' do
subject.source 'config.yml'
subject.source.should eql('config.yml')
subject.type.should eql(:yml)
end
end
describe '#namespace' do
it 'should properly set namespace' do
subject.source hash
subject.namespace :development
subject.settings.a.should eql(1)
subject.namespace.should eql(:development)
expect { subject.settings.development }.to raise_exception(Squire::MissingSettingError)
end
it 'should properly handle runtime changing of namespaces' do
subject.source hash
subject.namespace :development
subject.settings.a.should eql(1)
subject.namespace :production
subject.settings.a.should eql(3)
end
end
describe '#setup' do
let(:path) { '/path/to/file.yml'}
let(:factory) { double(:factory) }
let(:parser) { double(:parser) }
it 'should setup basic settings from hash' do
subject.source(hash)
factory.should_receive(:of).with(:hash).and_return(parser)
parser.should_receive(:parse).with(hash).and_return(hash)
stub_const('Squire::Parser', factory)
settings = subject.setup
settings.to_hash.should eql(hash)
end
it 'should setup basic settings yml' do
factory.should_receive(:of).with(:yml).and_return(parser)
parser.should_receive(:parse).with(path).and_return(hash)
stub_const('Squire::Parser', factory)
subject.source(path)
settings = subject.settings
settings.development.a.should eql(1)
settings.development.nested.b.should eql(2)
end
it 'should not setup source with unknown filetype' do
subject.source(path, type: :bogus)
expect { subject.setup }.to raise_error Squire::UndefinedParserError
end
end
end
Add specs for namespace overriding
require 'spec_helper'
describe Squire::Configuration do
subject { described_class.new }
let(:hash) {
{
defaults: {
nested: {
b: 3,
c: 4
}
},
development: {
a: 1,
nested: {
b: 2
}
},
production: {
a: 3
}
}
}
describe '#source' do
it 'should properly set hash as source' do
subject.source hash
subject.source.should eql(hash)
end
it 'should properly set yaml sa a source' do
subject.source 'config.yml'
subject.source.should eql('config.yml')
subject.type.should eql(:yml)
end
end
describe '#namespace' do
it 'should properly set namespace' do
subject.source hash
subject.namespace :development
subject.settings.a.should eql(1)
subject.namespace.should eql(:development)
expect { subject.settings.development }.to raise_exception(Squire::MissingSettingError)
end
it 'should properly handle runtime changing of namespaces' do
subject.source hash
subject.namespace :development
subject.settings.a.should eql(1)
subject.namespace :production
subject.settings.a.should eql(3)
end
it 'should handle base namespace overriding' do
subject.source hash
subject.namespace :development, base: :defaults
subject.settings.a.should eql(1)
subject.settings.nested.b.should eql(3)
subject.settings.nested.c.should eql(4)
end
end
describe '#setup' do
let(:path) { '/path/to/file.yml'}
let(:factory) { double(:factory) }
let(:parser) { double(:parser) }
it 'should setup basic settings from hash' do
subject.source(hash)
factory.should_receive(:of).with(:hash).and_return(parser)
parser.should_receive(:parse).with(hash).and_return(hash)
stub_const('Squire::Parser', factory)
settings = subject.setup
settings.to_hash.should eql(hash)
end
it 'should setup basic settings yml' do
factory.should_receive(:of).with(:yml).and_return(parser)
parser.should_receive(:parse).with(path).and_return(hash)
stub_const('Squire::Parser', factory)
subject.source(path)
settings = subject.settings
settings.development.a.should eql(1)
settings.development.nested.b.should eql(2)
end
it 'should not setup source with unknown filetype' do
subject.source(path, type: :bogus)
expect { subject.setup }.to raise_error Squire::UndefinedParserError
end
end
end
|
#
# Copyright:: Copyright (c) 2012 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/shell_out'
class OmnibusHelper
def self.should_notify?(service_name,
path = "/opt/opscode",
command = "private-chef-ctl")
File.symlink?("#{path}/service/#{service_name}") && check_status(service_name, path, command)
end
def self.check_status(service_name,
path="/opt/opscode",
command="private-chef-ctl")
o = Mixlib::ShellOut.new("#{path}/bin/#{command} status #{service_name}")
o.run_command
o.exitstatus == 0 ? true : false
end
end
Remove libraries/helper.rb
|
data = {
"some_setting" => "some value"
}
settings = Settings.new data
describe Settings do
specify "Getting a setting from the setting data gets the setting's value" do
key = 'some_setting'
setting = settings.get key
expect(setting == 'some value').to be
end
specify "Trying to get a setting that isn't in the data gets a nil" do
key = 'setting_that_isnt_in_the_data'
setting = settings.get key
expect(setting == nil).to be
end
end
Test uses a control module
module GetSetting
def self.data
{
"some_setting" => "some value"
}
end
def self.settings
Settings.new data
end
def self.get(key)
settings.get key
end
end
describe "Get Settings", :* do
specify "Getting a setting from the setting data gets the setting's value" do
setting = GetSetting.get 'some_setting'
expect(setting == 'some value').to be
end
specify "Trying to get a setting that isn't in the data gets a nil" do
setting = GetSetting.get 'setting_that_isnt_in_the_data'
expect(setting == nil).to be
end
end
|
module UncleSam
# Source: http://en.wikipedia.org/wiki/Standard_deduction
FILING_STATUS_OPTIONS = {
:single => 6200.0,
:married_filing_jointly => 12400.0,
:married_filing_separately => 6200.0,
:head_of_household => 9100.0,
:qualifying_surviving_spouse => 12400
}
class Calculator
attr_reader :remaining_income, :filing_status
def initialize(net_income, filing_status)
@remaining_income = net_income
@filing_status = filing_status
end
def make_standard_deductions
@remaining_income -= FILING_STATUS_OPTIONS[filing_status]
end
end
end
describe UncleSam::Calculator do
let(:average_net_income) { 51939.00 }
describe '#make_standard_deductions' do
it 'deducts the matching amount from the net income' do
calculator = UncleSam::Calculator.new(average_net_income, :single)
calculator.make_standard_deductions
expect(calculator.remaining_income).to eq(average_net_income - UncleSam::FILING_STATUS_OPTIONS[:single])
end
it 'only accepts values matching keys contained in UncleSam::FILING_STATUS_OPTIONS'
end
end
add exception handling to filing_status input
module UncleSam
# Source: http://en.wikipedia.org/wiki/Standard_deduction
FILING_STATUS_OPTIONS = {
:single => 6200.0,
:married_filing_jointly => 12400.0,
:married_filing_separately => 6200.0,
:head_of_household => 9100.0,
:qualifying_surviving_spouse => 12400
}
UnknownFilingStatusError = Class.new(Exception)
class Calculator
attr_reader :remaining_income, :filing_status
def initialize(net_income, filing_status)
@remaining_income = net_income
@filing_status = filing_status
raise UnknownFilingStatusError if filing_status_is_invalid?
end
def make_standard_deductions
@remaining_income -= FILING_STATUS_OPTIONS[filing_status]
end
private
def filing_status_is_invalid?
FILING_STATUS_OPTIONS[filing_status].nil?
end
end
end
describe UncleSam::Calculator do
let(:average_net_income) { 51939.00 }
describe '#make_standard_deductions' do
it 'deducts the matching amount from the net income' do
calculator = UncleSam::Calculator.new(average_net_income, :single)
calculator.make_standard_deductions
expect(calculator.remaining_income).to eq(average_net_income - UncleSam::FILING_STATUS_OPTIONS[:single])
end
end
it 'rejects unknown filing status values' do
expect do
UncleSam::Calculator.new(average_net_income, :invalid_value)
end.to raise_error(UncleSam::UnknownFilingStatusError)
end
end
|
#
# Author:: Laurent Desarmes <laurent.desarmes@u-picardie.fr>
# Copyright:: Copyright (c) 2012 Laurent Desarmes
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper.rb')
def it_does_not_fail
it "doesn't fail" do
Ohai::Log.should_receive(:warn).any_number_of_times
Ohai::Log.should_not_receive(:debug).with(/^Plugin network threw exception/)
@ohai._require_plugin("network")
%w[ ipaddress, macaddress, ip6address ].each do |attribute|
@ohai.should have_key(attribute)
end
end
end
describe Ohai::System, "Network Plugin" do
basic_data = {
"linux" => {
"network" => {
# pp Hash[node['network']] from shef to get the network data
# have just removed the neighbour and route entries by hand
"interfaces" => {
"lo" => {
"flags" => ["LOOPBACK", "UP"],
"addresses" => {
"::1" => {
"scope" => "Node",
"prefixlen" => "128",
"family" => "inet6"
},
"127.0.0.1" => {
"scope" => "Node",
"netmask" => "255.0.0.0",
"prefixlen" => "8",
"family" => "inet"
}
},
"mtu" => "16436",
"encapsulation" => "Loopback"
},
"eth0" => {
"flags" => ["BROADCAST", "MULTICAST", "UP"],
"number" => "0",
"addresses" => {
"fe80::216:3eff:fe2f:3679" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:79" => {"family" => "lladdr"},
"192.168.66.33" => {
"scope" => "Global",
"netmask" => "255.255.255.0",
"broadcast" => "192.168.66.255",
"prefixlen" => "24",
"family" => "inet"
},
"3ffe:1111:2222::33" => {
"prefixlen" => "48",
"family" => "inet6",
"scope" => "Global"
}
},
"mtu" => "1500",
"type" => "eth",
"encapsulation" => "Ethernet"
},
"eth1" => {
"flags" => ["BROADCAST", "MULTICAST", "UP"],
"number" => "1",
"addresses" => {
"fe80::216:3eff:fe2f:3680" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:80" => {"family" => "lladdr"},
"192.168.99.11" => {
"scope" => "Global",
"netmask" => "255.255.255.0",
"broadcast" => "192.168.99.255",
"prefixlen" => "24",
"family" => "inet"
},
"3ffe:1111:3333::1" => {
"prefixlen" => "48",
"family" => "inet6",
"scope" => "Global"
}
},
"mtu" => "1500",
"type" => "eth",
"encapsulation" => "Ethernet"
}
},
"default_gateway" => "192.168.66.15",
"default_interface" => "eth0",
"default_inet6_gateway" => "3ffe:1111:2222::",
"default_inet6_interface" => "eth0"
}
},
"windows" => {
"network" => {
"interfaces" => {
"0xb" => {
"addresses" => {
"172.19.0.130" => {
"prefixlen" => "24",
"netmask" => "255.255.255.0",
"broadcast" => "172.19.0.255",
"family" => "inet"
},
"fe80::698d:3e37:7950:b28c" => {
"prefixlen" => "64",
"family" => "inet6",
"scope" => "Link"
},
"52:54:44:66:66:02" => {
"family" => "lladdr"
}
},
"mtu" => nil,
"type" => "Ethernet 802.3",
"encapsulation" => "Ethernet"
}
},
"default_gateway" => "172.19.0.1",
"default_interface" => "0xb"
}
}
}
describe "with linux" do
before(:each) do
@ohai = Ohai::System.new
@ohai.stub!(:require_plugin).twice.and_return(true)
@ohai["network"] = basic_data["linux"]["network"]
end
describe "when the linux::network plugin hasn't set any of {ip,ip6,mac}address attributes" do
describe "simple setup" do
it_does_not_fail
it "logs 2 debug messages" do
Ohai::Log.should_receive(:debug).with(/^Loading plugin network/).once
Ohai::Log.should_receive(:debug).with(/^\[inet\] Using default/).once
Ohai::Log.should_receive(:debug).with(/^\[inet6\] Using default/).once
@ohai._require_plugin("network")
end
it "detects {ip,ip6,mac}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222::33"
end
end
describe "default ipv4 and ipv6 gateway on different interfaces" do
describe "both interfaces have an ARP" do
before do
@ohai["network"]["default_inet6_gateway"] = "3ffe:1111:3333::"
@ohai["network"]["default_inet6_interface"] = "eth1"
end
it_does_not_fail
it "detects {ip,ip6}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
it "set macaddress from the ipv4 setup" do
@ohai._require_plugin("network")
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
end
it "informs about this setup" do
Ohai::Log.should_receive(:info).with(/^ipaddress and ip6address are set from different interfaces/)
@ohai._require_plugin("network")
end
end
describe "ipv4 interface has no ARP" do
before do
@ohai["network"]["interfaces"]["eth0"]["addresses"].delete_if{|k,kv| kv["family"] == "lladdr" }
# not really checked by this pluging
@ohai["network"]["interfaces"]["eth0"]["flags"] << "NOARP"
@ohai["network"]["default_inet6_gateway"] = "3ffe:1111:3333::"
@ohai["network"]["default_inet6_interface"] = "eth1"
end
it_does_not_fail
it "detects {ip,ip6}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
it "doesn't set macaddress, ipv4 setup is valid and has precedence over ipv6" do
Ohai::Log.should_not_receive(:warn).with(/^unable to detect macaddress/)
@ohai._require_plugin("network")
@ohai["macaddress"].should be_nil
end
it "informs about this setup" do
Ohai::Log.should_receive(:info).with(/^ipaddress and ip6address are set from different interfaces/)
@ohai._require_plugin("network")
end
end
end
describe "conflicting results from the linux::network plugin" do
describe "default interface doesn't match the default_gateway" do
before do
@ohai["network"]["default_interface"] = "eth1"
@ohai["network"]["default_inet6_interface"] = "eth1"
end
it_does_not_fail
it "doesn't detect {ip,ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
@ohai["macaddress"].should be_nil
@ohai["ip6address"].should be_nil
end
it "warns about this conflict" do
Ohai::Log.should_receive(:warn).with(/^\[inet\] no ipaddress\/mask on eth1/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
Ohai::Log.should_receive(:warn).with(/^\[inet6\] no ipaddress\/mask on eth1/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect ip6address/).once
@ohai._require_plugin("network")
end
end
describe "no ip address for the given default interface/gateway" do
before do
@ohai["network"]["interfaces"]["eth0"]["addresses"].delete_if{|k,v| %w[inet inet6].include? v["family"]}
end
it_does_not_fail
it "doesn't detect {ip,ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
@ohai["macaddress"].should be_nil
@ohai["ip6address"].should be_nil
end
it "warns about this conflict" do
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
Ohai::Log.should_receive(:warn).with(/^\[inet\] no ip on eth0/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect ip6address/).once
Ohai::Log.should_receive(:warn).with(/^\[inet6\] no ip on eth0/).once
@ohai._require_plugin("network")
end
end
describe "no ip at all" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["default_inet6_gateway"] = nil
@ohai["network"]["default_inet6_interface"] = nil
@ohai["network"]["interfaces"].each do |i,iv|
iv["addresses"].delete_if{|k,kv| %w[inet inet6].include? kv["family"]}
end
end
it_does_not_fail
it "doesn't detect {ip,ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
@ohai["macaddress"].should be_nil
@ohai["ip6address"].should be_nil
end
it "should warn about it" do
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect ip6address/).once
@ohai._require_plugin("network")
end
end
end
describe "several ipaddresses matching the default route" do
describe "bigger prefix not set on the default interface" do
before do
@ohai["network"]["interfaces"]["eth2"] = {
"flags" => ["BROADCAST", "MULTICAST", "UP"],
"number" => "2",
"addresses" => {
"fe80::216:3eff:fe2f:3681" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:81" => {"family" => "lladdr"},
"192.168.66.99" => {
"scope" => "Global",
"netmask" => "255.255.255.128",
"broadcast" => "192.168.99.127",
"prefixlen" => "25",
"family" => "inet"
},
"3ffe:1111:2222:0:4444::1" => {
"prefixlen" => "64",
"family" => "inet6",
"scope" => "Global"
}
}
}
end
it_does_not_fail
it "sets {ip,ip6,mac}address correctly" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222::33"
end
end
describe "bigger prefix set on the default interface" do
before do
@ohai["network"]["interfaces"]["eth0"]["addresses"]["192.168.66.99"] = {
"scope" => "Global",
"netmask" => "255.255.255.128",
"broadcast" => "192.168.66.127",
"prefixlen" => "25",
"family" => "inet"
}
@ohai["network"]["interfaces"]["eth0"]["addresses"]["3ffe:1111:2222:0:4444::1"] = {
"prefixlen" => "64",
"family" => "inet6",
"scope" => "Global"
}
end
it_does_not_fail
it "sets {ip,ip6,mac}address correctly" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.99"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222:0:4444::1"
end
end
describe "smallest ip not set on the default_interface" do
before do
@ohai["network"]["interfaces"]["eth2"] = {
"flags" => ["BROADCAST", "MULTICAST", "UP"],
"number" => "2",
"addresses" => {
"fe80::216:3eff:fe2f:3681" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:81" => {"family" => "lladdr"},
"192.168.66.32" => {
"scope" => "Global",
"netmask" => "255.255.255.0",
"broadcast" => "192.168.66.255",
"prefixlen" => "24",
"family" => "inet"
},
"3ffe:1111:2222::32" => {
"prefixlen" => "48",
"family" => "inet6",
"scope" => "Global"
}
}
}
end
it_does_not_fail
it "sets {ip,ip6,mac}address correctly" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222::33"
end
end
describe "smallest ip set on the default_interface" do
before do
@ohai["network"]["interfaces"]["eth0"]["addresses"]["192.168.66.32"] = {
"scope" => "Global",
"netmask" => "255.255.255.0",
"broadcast" => "192.168.66.255",
"prefixlen" => "24",
"family" => "inet"
}
@ohai["network"]["interfaces"]["eth0"]["addresses"]["3ffe:1111:2222::32"] = {
"prefixlen" => "48",
"family" => "inet6",
"scope" => "Global"
}
end
it_does_not_fail
it "sets {ip,ip6,mac}address correctly" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.32"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222::32"
end
end
end
describe "no default route" do
describe "first interface is not the best choice" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["default_inet6_gateway"] = nil
@ohai["network"]["default_inet6_interface"] = nil
# removing inet* addresses from eth0, to complicate things a bit
@ohai["network"]["interfaces"]["eth0"]["addresses"].delete_if{|k,v| %w[inet inet6].include? v["family"]}
end
it_does_not_fail
it "picks {ip,mac,ip6}address from the first interface" do
Ohai::Log.should_receive(:info).with(/^\[inet\] no default interface/).once
Ohai::Log.should_receive(:info).with(/^\[inet6\] no default interface/).once
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.99.11"
@ohai["macaddress"].should == "00:16:3E:2F:36:80"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
end
describe "can choose from addresses with different scopes" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["default_inet6_gateway"] = nil
@ohai["network"]["default_inet6_interface"] = nil
# just changing scopes to lInK for eth0 addresses
@ohai["network"]["interfaces"]["eth0"]["addresses"].each{|k,v| v[:scope]="lInK" if %w[inet inet6].include? v["family"]}
end
it_does_not_fail
it "prefers global scope addressses to set {ip,mac,ip6}address" do
Ohai::Log.should_receive(:info).with(/^\[inet\] no default interface/).once
Ohai::Log.should_receive(:info).with(/^\[inet6\] no default interface/).once
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.99.11"
@ohai["macaddress"].should == "00:16:3E:2F:36:80"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
end
end
describe "link level default route" do
describe "simple setup" do
before do
@ohai["network"]["default_gateway"] = "0.0.0.0"
@ohai["network"]["default_interface"] = "eth1"
@ohai["network"]["default_inet6_gateway"] = "::"
@ohai["network"]["default_inet6_interface"] = "eth1"
end
it_does_not_fail
it "displays debug messages" do
Ohai::Log.should_receive(:debug).with(/^Loading plugin network/).once
Ohai::Log.should_receive(:debug).with(/^link level default inet /).once
Ohai::Log.should_receive(:debug).with(/^link level default inet6 /).once
@ohai._require_plugin("network")
end
it "picks {ip,mac,ip6}address from the default interface" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.99.11"
@ohai["macaddress"].should == "00:16:3E:2F:36:80"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
end
describe "can choose from addresses with different scopes" do
before do
@ohai["network"]["default_gateway"] = "0.0.0.0"
@ohai["network"]["default_interface"] = "eth1"
@ohai["network"]["default_inet6_gateway"] = "::"
@ohai["network"]["default_inet6_interface"] = "eth1"
@ohai["network"]["interfaces"]["eth1"]["addresses"]["127.0.0.2"] = {
"scope" => "host",
"netmask" => "255.255.255.255",
"prefixlen" => "32",
"family" => "inet"
}
end
it_does_not_fail
it "displays debug messages" do
Ohai::Log.should_receive(:debug).with(/^Loading plugin network/).once
Ohai::Log.should_receive(:debug).with(/^link level default inet /).once
Ohai::Log.should_receive(:debug).with(/^link level default inet6 /).once
@ohai._require_plugin("network")
end
it "picks {ip,mac,ip6}address from the default interface" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.99.11"
@ohai["macaddress"].should == "00:16:3E:2F:36:80"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
end
end
describe "point to point address" do
before do
@ohai["network"]["interfaces"]["eth2"] = {
"flags" => ["POINTOPOINT", "BROADCAST", "MULTICAST", "UP"],
"number" => "2",
"addresses" => {
"fe80::216:3eff:fe2f:3681" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:81" => {"family" => "lladdr"},
"192.168.66.99" => {
"scope" => "Global",
"netmask" => "255.255.255.255",
"peer" => "192.168.99.126",
"prefixlen" => "32",
"family" => "inet"
},
"3ffe:1111:2222:0:4444::1" => {
"prefixlen" => "128",
"peer" => "3ffe:1111:2222:0:4444::2",
"family" => "inet6",
"scope" => "Global"
}
}
}
@ohai["network"]["default_gateway"] = "192.168.99.126"
@ohai["network"]["default_interface"] = "eth2"
@ohai["network"]["default_inet6_gateway"] = "3ffe:1111:2222:0:4444::2"
@ohai["network"]["default_inet6_interface"] = "eth2"
end
it_does_not_fail
it "picks {ip,mac,ip6}address from the default interface" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.99"
@ohai["macaddress"].should == "00:16:3E:2F:36:81"
@ohai["ip6address"].should == "3ffe:1111:2222:0:4444::1"
end
end
describe "ipv6 only node" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["interfaces"].each do |i,iv|
iv["addresses"].delete_if{|k,kv| kv["family"] == "inet" }
end
end
it_does_not_fail
it "can't detect ipaddress" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
end
it "warns about not being able to set {ip,mac}address (ipv4)" do
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
@ohai._require_plugin("network")
end
it "sets {ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:1111:2222::33"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
end
it "informs about macaddress being set using the ipv6 setup" do
Ohai::Log.should_receive(:warn).any_number_of_times
Ohai::Log.should_receive(:info).with(/^macaddress set to 00:16:3E:2F:36:79 from the ipv6 setup/).once
@ohai._require_plugin("network")
end
end
end
basic_data.keys.sort.each do |os|
describe "the #{os}::network has already set some of the {ip,mac,ip6}address attributes" do
before(:each) do
@ohai = Ohai::System.new
@ohai.stub!(:require_plugin).twice.and_return(true)
@ohai["network"] = basic_data[os]["network"]
end
describe "{ip,mac}address are already set" do
before do
@ohai["ipaddress"] = "10.11.12.13"
@ohai["macaddress"] = "00:AA:BB:CC:DD:EE"
@expected_results = {
"linux" => {
"ip6address" => "3ffe:1111:2222::33"
},
"windows" => {
"ip6address" => "fe80::698d:3e37:7950:b28c"
}
}
end
it_does_not_fail
it "detects ip6address" do
@ohai._require_plugin("network")
@ohai["ip6address"].should == @expected_results[os]["ip6address"]
end
it "doesn't overwrite {ip,mac}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "10.11.12.13"
@ohai["macaddress"].should == "00:AA:BB:CC:DD:EE"
end
end
describe "ip6address is already set" do
describe "node has ipv4 and ipv6" do
before do
@ohai["ip6address"] = "3ffe:8888:9999::1"
@expected_results = {
"linux" => {
"ipaddress" => "192.168.66.33",
"macaddress" => "00:16:3E:2F:36:79"
},
"windows" => {
"ipaddress" => "172.19.0.130",
"macaddress" => "52:54:44:66:66:02"
}
}
end
it_does_not_fail
it "detects {ip,mac}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == @expected_results[os]["ipaddress"]
@ohai["macaddress"].should == @expected_results[os]["macaddress"]
end
it "doesn't overwrite ip6address" do
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
describe "ipv6 only node" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["interfaces"].each do |i,iv|
iv["addresses"].delete_if{|k,kv| kv["family"] == "inet" }
end
@ohai["ip6address"] = "3ffe:8888:9999::1"
end
it_does_not_fail
it "can't detect ipaddress (ipv4)" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
end
it "can't detect macaddress either" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["macaddress"].should be_nil
end
it "warns about not being able to set {ip,mac}address" do
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
@ohai._require_plugin("network")
end
it "doesn't overwrite ip6address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
end
describe "{mac,ip6}address are already set" do
describe "valid ipv4 setup" do
before do
@ohai["macaddress"] = "00:AA:BB:CC:DD:EE"
@ohai["ip6address"] = "3ffe:8888:9999::1"
@expected_results = {
"linux" => {
"ipaddress" => "192.168.66.33",
"macaddress" => "00:16:3E:2F:36:79"
},
"windows" => {
"ipaddress" => "172.19.0.130",
"macaddress" => "52:54:44:66:66:02"
}
}
end
it_does_not_fail
it "detects ipaddress and overwrite macaddress" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == @expected_results[os]["ipaddress"]
@ohai["macaddress"].should == @expected_results[os]["macaddress"]
end
it "doesn't overwrite ip6address" do
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
describe "ipv6 only node" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["interfaces"].each do |i,iv|
iv["addresses"].delete_if{|k,kv| kv["family"] == "inet" }
end
@ohai["macaddress"] = "00:AA:BB:CC:DD:EE"
@ohai["ip6address"] = "3ffe:8888:9999::1"
end
it_does_not_fail
it "can't set ipaddress" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
end
it "doesn't overwrite {ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:8888:9999::1"
@ohai["macaddress"].should == "00:AA:BB:CC:DD:EE"
end
end
end
describe "{ip,mac,ip6}address are already set" do
before do
@ohai["ipaddress"] = "10.11.12.13"
@ohai["macaddress"] = "00:AA:BB:CC:DD:EE"
@ohai["ip6address"] = "3ffe:8888:9999::1"
end
it_does_not_fail
it "doesn't overwrite {ip,mac,ip6}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "10.11.12.13"
@ohai["macaddress"].should == "00:AA:BB:CC:DD:EE"
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
describe "{ip,ip6}address are already set" do
before do
@ohai["ipaddress"] = "10.11.12.13"
@ohai["ip6address"] = "3ffe:8888:9999::1"
end
it_does_not_fail
it "doesn't overwrite {ip,mac,ip6}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "10.11.12.13"
@ohai["macaddress"].should == nil
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
end
end
end
end
OHAI-433: Update tests for reduced network logging level
#
# Author:: Laurent Desarmes <laurent.desarmes@u-picardie.fr>
# Copyright:: Copyright (c) 2012 Laurent Desarmes
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper.rb')
def it_does_not_fail
it "doesn't fail" do
Ohai::Log.should_receive(:warn).any_number_of_times
Ohai::Log.should_not_receive(:debug).with(/^Plugin network threw exception/)
@ohai._require_plugin("network")
%w[ ipaddress, macaddress, ip6address ].each do |attribute|
@ohai.should have_key(attribute)
end
end
end
describe Ohai::System, "Network Plugin" do
basic_data = {
"linux" => {
"network" => {
# pp Hash[node['network']] from shef to get the network data
# have just removed the neighbour and route entries by hand
"interfaces" => {
"lo" => {
"flags" => ["LOOPBACK", "UP"],
"addresses" => {
"::1" => {
"scope" => "Node",
"prefixlen" => "128",
"family" => "inet6"
},
"127.0.0.1" => {
"scope" => "Node",
"netmask" => "255.0.0.0",
"prefixlen" => "8",
"family" => "inet"
}
},
"mtu" => "16436",
"encapsulation" => "Loopback"
},
"eth0" => {
"flags" => ["BROADCAST", "MULTICAST", "UP"],
"number" => "0",
"addresses" => {
"fe80::216:3eff:fe2f:3679" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:79" => {"family" => "lladdr"},
"192.168.66.33" => {
"scope" => "Global",
"netmask" => "255.255.255.0",
"broadcast" => "192.168.66.255",
"prefixlen" => "24",
"family" => "inet"
},
"3ffe:1111:2222::33" => {
"prefixlen" => "48",
"family" => "inet6",
"scope" => "Global"
}
},
"mtu" => "1500",
"type" => "eth",
"encapsulation" => "Ethernet"
},
"eth1" => {
"flags" => ["BROADCAST", "MULTICAST", "UP"],
"number" => "1",
"addresses" => {
"fe80::216:3eff:fe2f:3680" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:80" => {"family" => "lladdr"},
"192.168.99.11" => {
"scope" => "Global",
"netmask" => "255.255.255.0",
"broadcast" => "192.168.99.255",
"prefixlen" => "24",
"family" => "inet"
},
"3ffe:1111:3333::1" => {
"prefixlen" => "48",
"family" => "inet6",
"scope" => "Global"
}
},
"mtu" => "1500",
"type" => "eth",
"encapsulation" => "Ethernet"
}
},
"default_gateway" => "192.168.66.15",
"default_interface" => "eth0",
"default_inet6_gateway" => "3ffe:1111:2222::",
"default_inet6_interface" => "eth0"
}
},
"windows" => {
"network" => {
"interfaces" => {
"0xb" => {
"addresses" => {
"172.19.0.130" => {
"prefixlen" => "24",
"netmask" => "255.255.255.0",
"broadcast" => "172.19.0.255",
"family" => "inet"
},
"fe80::698d:3e37:7950:b28c" => {
"prefixlen" => "64",
"family" => "inet6",
"scope" => "Link"
},
"52:54:44:66:66:02" => {
"family" => "lladdr"
}
},
"mtu" => nil,
"type" => "Ethernet 802.3",
"encapsulation" => "Ethernet"
}
},
"default_gateway" => "172.19.0.1",
"default_interface" => "0xb"
}
}
}
describe "with linux" do
before(:each) do
@ohai = Ohai::System.new
@ohai.stub!(:require_plugin).twice.and_return(true)
@ohai["network"] = basic_data["linux"]["network"]
end
describe "when the linux::network plugin hasn't set any of {ip,ip6,mac}address attributes" do
describe "simple setup" do
it_does_not_fail
it "logs 2 debug messages" do
Ohai::Log.should_receive(:debug).with(/^Loading plugin network/).once
Ohai::Log.should_receive(:debug).with(/^\[inet\] Using default/).once
Ohai::Log.should_receive(:debug).with(/^\[inet6\] Using default/).once
@ohai._require_plugin("network")
end
it "detects {ip,ip6,mac}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222::33"
end
end
describe "default ipv4 and ipv6 gateway on different interfaces" do
describe "both interfaces have an ARP" do
before do
@ohai["network"]["default_inet6_gateway"] = "3ffe:1111:3333::"
@ohai["network"]["default_inet6_interface"] = "eth1"
end
it_does_not_fail
it "detects {ip,ip6}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
it "set macaddress from the ipv4 setup" do
@ohai._require_plugin("network")
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
end
it "informs about this setup" do
Ohai::Log.should_receive(:debug).with(/^ipaddress and ip6address are set from different interfaces/)
Ohai::Log.should_receive(:debug).any_number_of_times
@ohai._require_plugin("network")
end
end
describe "ipv4 interface has no ARP" do
before do
@ohai["network"]["interfaces"]["eth0"]["addresses"].delete_if{|k,kv| kv["family"] == "lladdr" }
# not really checked by this pluging
@ohai["network"]["interfaces"]["eth0"]["flags"] << "NOARP"
@ohai["network"]["default_inet6_gateway"] = "3ffe:1111:3333::"
@ohai["network"]["default_inet6_interface"] = "eth1"
end
it_does_not_fail
it "detects {ip,ip6}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
it "doesn't set macaddress, ipv4 setup is valid and has precedence over ipv6" do
Ohai::Log.should_not_receive(:warn).with(/^unable to detect macaddress/)
@ohai._require_plugin("network")
@ohai["macaddress"].should be_nil
end
it "informs about this setup" do
Ohai::Log.should_receive(:debug).with(/^ipaddress and ip6address are set from different interfaces/)
Ohai::Log.should_receive(:debug).any_number_of_times
@ohai._require_plugin("network")
end
end
end
describe "conflicting results from the linux::network plugin" do
describe "default interface doesn't match the default_gateway" do
before do
@ohai["network"]["default_interface"] = "eth1"
@ohai["network"]["default_inet6_interface"] = "eth1"
end
it_does_not_fail
it "doesn't detect {ip,ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
@ohai["macaddress"].should be_nil
@ohai["ip6address"].should be_nil
end
it "warns about this conflict" do
Ohai::Log.should_receive(:warn).with(/^\[inet\] no ipaddress\/mask on eth1/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
Ohai::Log.should_receive(:warn).with(/^\[inet6\] no ipaddress\/mask on eth1/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect ip6address/).once
@ohai._require_plugin("network")
end
end
describe "no ip address for the given default interface/gateway" do
before do
@ohai["network"]["interfaces"]["eth0"]["addresses"].delete_if{|k,v| %w[inet inet6].include? v["family"]}
end
it_does_not_fail
it "doesn't detect {ip,ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
@ohai["macaddress"].should be_nil
@ohai["ip6address"].should be_nil
end
it "warns about this conflict" do
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
Ohai::Log.should_receive(:warn).with(/^\[inet\] no ip on eth0/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect ip6address/).once
Ohai::Log.should_receive(:warn).with(/^\[inet6\] no ip on eth0/).once
@ohai._require_plugin("network")
end
end
describe "no ip at all" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["default_inet6_gateway"] = nil
@ohai["network"]["default_inet6_interface"] = nil
@ohai["network"]["interfaces"].each do |i,iv|
iv["addresses"].delete_if{|k,kv| %w[inet inet6].include? kv["family"]}
end
end
it_does_not_fail
it "doesn't detect {ip,ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
@ohai["macaddress"].should be_nil
@ohai["ip6address"].should be_nil
end
it "should warn about it" do
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect ip6address/).once
@ohai._require_plugin("network")
end
end
end
describe "several ipaddresses matching the default route" do
describe "bigger prefix not set on the default interface" do
before do
@ohai["network"]["interfaces"]["eth2"] = {
"flags" => ["BROADCAST", "MULTICAST", "UP"],
"number" => "2",
"addresses" => {
"fe80::216:3eff:fe2f:3681" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:81" => {"family" => "lladdr"},
"192.168.66.99" => {
"scope" => "Global",
"netmask" => "255.255.255.128",
"broadcast" => "192.168.99.127",
"prefixlen" => "25",
"family" => "inet"
},
"3ffe:1111:2222:0:4444::1" => {
"prefixlen" => "64",
"family" => "inet6",
"scope" => "Global"
}
}
}
end
it_does_not_fail
it "sets {ip,ip6,mac}address correctly" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222::33"
end
end
describe "bigger prefix set on the default interface" do
before do
@ohai["network"]["interfaces"]["eth0"]["addresses"]["192.168.66.99"] = {
"scope" => "Global",
"netmask" => "255.255.255.128",
"broadcast" => "192.168.66.127",
"prefixlen" => "25",
"family" => "inet"
}
@ohai["network"]["interfaces"]["eth0"]["addresses"]["3ffe:1111:2222:0:4444::1"] = {
"prefixlen" => "64",
"family" => "inet6",
"scope" => "Global"
}
end
it_does_not_fail
it "sets {ip,ip6,mac}address correctly" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.99"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222:0:4444::1"
end
end
describe "smallest ip not set on the default_interface" do
before do
@ohai["network"]["interfaces"]["eth2"] = {
"flags" => ["BROADCAST", "MULTICAST", "UP"],
"number" => "2",
"addresses" => {
"fe80::216:3eff:fe2f:3681" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:81" => {"family" => "lladdr"},
"192.168.66.32" => {
"scope" => "Global",
"netmask" => "255.255.255.0",
"broadcast" => "192.168.66.255",
"prefixlen" => "24",
"family" => "inet"
},
"3ffe:1111:2222::32" => {
"prefixlen" => "48",
"family" => "inet6",
"scope" => "Global"
}
}
}
end
it_does_not_fail
it "sets {ip,ip6,mac}address correctly" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.33"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222::33"
end
end
describe "smallest ip set on the default_interface" do
before do
@ohai["network"]["interfaces"]["eth0"]["addresses"]["192.168.66.32"] = {
"scope" => "Global",
"netmask" => "255.255.255.0",
"broadcast" => "192.168.66.255",
"prefixlen" => "24",
"family" => "inet"
}
@ohai["network"]["interfaces"]["eth0"]["addresses"]["3ffe:1111:2222::32"] = {
"prefixlen" => "48",
"family" => "inet6",
"scope" => "Global"
}
end
it_does_not_fail
it "sets {ip,ip6,mac}address correctly" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.32"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
@ohai["ip6address"].should == "3ffe:1111:2222::32"
end
end
end
describe "no default route" do
describe "first interface is not the best choice" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["default_inet6_gateway"] = nil
@ohai["network"]["default_inet6_interface"] = nil
# removing inet* addresses from eth0, to complicate things a bit
@ohai["network"]["interfaces"]["eth0"]["addresses"].delete_if{|k,v| %w[inet inet6].include? v["family"]}
end
it_does_not_fail
it "picks {ip,mac,ip6}address from the first interface" do
Ohai::Log.should_receive(:debug).with(/^\[inet\] no default interface/).once
Ohai::Log.should_receive(:debug).with(/^\[inet6\] no default interface/).once
Ohai::Log.should_receive(:debug).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.99.11"
@ohai["macaddress"].should == "00:16:3E:2F:36:80"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
end
describe "can choose from addresses with different scopes" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["default_inet6_gateway"] = nil
@ohai["network"]["default_inet6_interface"] = nil
# just changing scopes to lInK for eth0 addresses
@ohai["network"]["interfaces"]["eth0"]["addresses"].each{|k,v| v[:scope]="lInK" if %w[inet inet6].include? v["family"]}
end
it_does_not_fail
it "prefers global scope addressses to set {ip,mac,ip6}address" do
Ohai::Log.should_receive(:debug).with(/^\[inet\] no default interface/).once
Ohai::Log.should_receive(:debug).with(/^\[inet6\] no default interface/).once
Ohai::Log.should_receive(:debug).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.99.11"
@ohai["macaddress"].should == "00:16:3E:2F:36:80"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
end
end
describe "link level default route" do
describe "simple setup" do
before do
@ohai["network"]["default_gateway"] = "0.0.0.0"
@ohai["network"]["default_interface"] = "eth1"
@ohai["network"]["default_inet6_gateway"] = "::"
@ohai["network"]["default_inet6_interface"] = "eth1"
end
it_does_not_fail
it "displays debug messages" do
Ohai::Log.should_receive(:debug).with(/^Loading plugin network/).once
Ohai::Log.should_receive(:debug).with(/^link level default inet /).once
Ohai::Log.should_receive(:debug).with(/^link level default inet6 /).once
@ohai._require_plugin("network")
end
it "picks {ip,mac,ip6}address from the default interface" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.99.11"
@ohai["macaddress"].should == "00:16:3E:2F:36:80"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
end
describe "can choose from addresses with different scopes" do
before do
@ohai["network"]["default_gateway"] = "0.0.0.0"
@ohai["network"]["default_interface"] = "eth1"
@ohai["network"]["default_inet6_gateway"] = "::"
@ohai["network"]["default_inet6_interface"] = "eth1"
@ohai["network"]["interfaces"]["eth1"]["addresses"]["127.0.0.2"] = {
"scope" => "host",
"netmask" => "255.255.255.255",
"prefixlen" => "32",
"family" => "inet"
}
end
it_does_not_fail
it "displays debug messages" do
Ohai::Log.should_receive(:debug).with(/^Loading plugin network/).once
Ohai::Log.should_receive(:debug).with(/^link level default inet /).once
Ohai::Log.should_receive(:debug).with(/^link level default inet6 /).once
@ohai._require_plugin("network")
end
it "picks {ip,mac,ip6}address from the default interface" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.99.11"
@ohai["macaddress"].should == "00:16:3E:2F:36:80"
@ohai["ip6address"].should == "3ffe:1111:3333::1"
end
end
end
describe "point to point address" do
before do
@ohai["network"]["interfaces"]["eth2"] = {
"flags" => ["POINTOPOINT", "BROADCAST", "MULTICAST", "UP"],
"number" => "2",
"addresses" => {
"fe80::216:3eff:fe2f:3681" => {
"scope" => "Link",
"prefixlen" => "64",
"family" => "inet6"
},
"00:16:3E:2F:36:81" => {"family" => "lladdr"},
"192.168.66.99" => {
"scope" => "Global",
"netmask" => "255.255.255.255",
"peer" => "192.168.99.126",
"prefixlen" => "32",
"family" => "inet"
},
"3ffe:1111:2222:0:4444::1" => {
"prefixlen" => "128",
"peer" => "3ffe:1111:2222:0:4444::2",
"family" => "inet6",
"scope" => "Global"
}
}
}
@ohai["network"]["default_gateway"] = "192.168.99.126"
@ohai["network"]["default_interface"] = "eth2"
@ohai["network"]["default_inet6_gateway"] = "3ffe:1111:2222:0:4444::2"
@ohai["network"]["default_inet6_interface"] = "eth2"
end
it_does_not_fail
it "picks {ip,mac,ip6}address from the default interface" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "192.168.66.99"
@ohai["macaddress"].should == "00:16:3E:2F:36:81"
@ohai["ip6address"].should == "3ffe:1111:2222:0:4444::1"
end
end
describe "ipv6 only node" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["interfaces"].each do |i,iv|
iv["addresses"].delete_if{|k,kv| kv["family"] == "inet" }
end
end
it_does_not_fail
it "can't detect ipaddress" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
end
it "warns about not being able to set {ip,mac}address (ipv4)" do
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
@ohai._require_plugin("network")
end
it "sets {ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:1111:2222::33"
@ohai["macaddress"].should == "00:16:3E:2F:36:79"
end
it "informs about macaddress being set using the ipv6 setup" do
Ohai::Log.should_receive(:debug).with(/^macaddress set to 00:16:3E:2F:36:79 from the ipv6 setup/).once
Ohai::Log.should_receive(:debug).any_number_of_times
@ohai._require_plugin("network")
end
end
end
basic_data.keys.sort.each do |os|
describe "the #{os}::network has already set some of the {ip,mac,ip6}address attributes" do
before(:each) do
@ohai = Ohai::System.new
@ohai.stub!(:require_plugin).twice.and_return(true)
@ohai["network"] = basic_data[os]["network"]
end
describe "{ip,mac}address are already set" do
before do
@ohai["ipaddress"] = "10.11.12.13"
@ohai["macaddress"] = "00:AA:BB:CC:DD:EE"
@expected_results = {
"linux" => {
"ip6address" => "3ffe:1111:2222::33"
},
"windows" => {
"ip6address" => "fe80::698d:3e37:7950:b28c"
}
}
end
it_does_not_fail
it "detects ip6address" do
@ohai._require_plugin("network")
@ohai["ip6address"].should == @expected_results[os]["ip6address"]
end
it "doesn't overwrite {ip,mac}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "10.11.12.13"
@ohai["macaddress"].should == "00:AA:BB:CC:DD:EE"
end
end
describe "ip6address is already set" do
describe "node has ipv4 and ipv6" do
before do
@ohai["ip6address"] = "3ffe:8888:9999::1"
@expected_results = {
"linux" => {
"ipaddress" => "192.168.66.33",
"macaddress" => "00:16:3E:2F:36:79"
},
"windows" => {
"ipaddress" => "172.19.0.130",
"macaddress" => "52:54:44:66:66:02"
}
}
end
it_does_not_fail
it "detects {ip,mac}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == @expected_results[os]["ipaddress"]
@ohai["macaddress"].should == @expected_results[os]["macaddress"]
end
it "doesn't overwrite ip6address" do
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
describe "ipv6 only node" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["interfaces"].each do |i,iv|
iv["addresses"].delete_if{|k,kv| kv["family"] == "inet" }
end
@ohai["ip6address"] = "3ffe:8888:9999::1"
end
it_does_not_fail
it "can't detect ipaddress (ipv4)" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
end
it "can't detect macaddress either" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["macaddress"].should be_nil
end
it "warns about not being able to set {ip,mac}address" do
Ohai::Log.should_receive(:warn).with(/^unable to detect ipaddress/).once
Ohai::Log.should_receive(:warn).with(/^unable to detect macaddress/).once
@ohai._require_plugin("network")
end
it "doesn't overwrite ip6address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
end
describe "{mac,ip6}address are already set" do
describe "valid ipv4 setup" do
before do
@ohai["macaddress"] = "00:AA:BB:CC:DD:EE"
@ohai["ip6address"] = "3ffe:8888:9999::1"
@expected_results = {
"linux" => {
"ipaddress" => "192.168.66.33",
"macaddress" => "00:16:3E:2F:36:79"
},
"windows" => {
"ipaddress" => "172.19.0.130",
"macaddress" => "52:54:44:66:66:02"
}
}
end
it_does_not_fail
it "detects ipaddress and overwrite macaddress" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == @expected_results[os]["ipaddress"]
@ohai["macaddress"].should == @expected_results[os]["macaddress"]
end
it "doesn't overwrite ip6address" do
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
describe "ipv6 only node" do
before do
@ohai["network"]["default_gateway"] = nil
@ohai["network"]["default_interface"] = nil
@ohai["network"]["interfaces"].each do |i,iv|
iv["addresses"].delete_if{|k,kv| kv["family"] == "inet" }
end
@ohai["macaddress"] = "00:AA:BB:CC:DD:EE"
@ohai["ip6address"] = "3ffe:8888:9999::1"
end
it_does_not_fail
it "can't set ipaddress" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ipaddress"].should be_nil
end
it "doesn't overwrite {ip6,mac}address" do
Ohai::Log.should_receive(:warn).any_number_of_times
@ohai._require_plugin("network")
@ohai["ip6address"].should == "3ffe:8888:9999::1"
@ohai["macaddress"].should == "00:AA:BB:CC:DD:EE"
end
end
end
describe "{ip,mac,ip6}address are already set" do
before do
@ohai["ipaddress"] = "10.11.12.13"
@ohai["macaddress"] = "00:AA:BB:CC:DD:EE"
@ohai["ip6address"] = "3ffe:8888:9999::1"
end
it_does_not_fail
it "doesn't overwrite {ip,mac,ip6}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "10.11.12.13"
@ohai["macaddress"].should == "00:AA:BB:CC:DD:EE"
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
describe "{ip,ip6}address are already set" do
before do
@ohai["ipaddress"] = "10.11.12.13"
@ohai["ip6address"] = "3ffe:8888:9999::1"
end
it_does_not_fail
it "doesn't overwrite {ip,mac,ip6}address" do
@ohai._require_plugin("network")
@ohai["ipaddress"].should == "10.11.12.13"
@ohai["macaddress"].should == nil
@ohai["ip6address"].should == "3ffe:8888:9999::1"
end
end
end
end
end
end
|
#
# Cookbook Name:: universe_ubuntu
# Spec:: default
#
# Copyright (c) 2016 The Authors, All Rights Reserved.
require 'spec_helper'
describe 'universe_ubuntu::default' do
context 'When all attributes are default, on an Ubuntu' do
before do
stub_command('[ -x /home/vagrant/anaconda3/bin/conda ]').and_return(0)
stub_command('[ -e /home/vagrant/anaconda3/envs/universe ]').and_return(0)
stub_command('[ -x /home/vagrant/anaconda3/envs/universe/bin/tensorboard ]').and_return(0)
end
let(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'ubuntu', version: '14.04') do |node|
node.override['universe']['user'] = 'vagrant'
node.override['universe']['home'] = '/home/vagrant'
node.override['universe']['gpu'] = true
node.automatic['os_version'] = 'specific_kernel_version'
end.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
pkgs = %w(golang
libjpeg-turbo8-dev
make
tmux
htop
chromium-browser
git
cmake
zlib1g-dev
libjpeg-dev
xvfb
libav-tools
xorg-dev
python-opengl
libboost-all-dev
libsdl2-dev
swig)
pkgs.each do |name|
it "install #{name} package" do
expect(chef_run).to install_package name
end
end
it 'creates remote_file anaconda if missing' do
user = 'vagrant'
expect(chef_run).to create_remote_file_if_missing(
"#{Chef::Config[:file_cache_path]}/Anaconda3-4.2.0-Linux-x86_64.sh")
.with(
owner: user,
group: user,
mode: '0755',
checksum: '73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78'
)
end
it 'installs anaconda' do
expect(chef_run).to_not run_execute("bash #{Chef::Config[:file_cache_path]}/Anaconda3-4.2.0-Linux-x86_64.sh -b")
.with(user: 'vagrant')
end
it 'creates conda env file' do
expect(chef_run).to create_cookbook_file('/home/vagrant/environment.yml')
end
it 'creates conda environment' do
expect(chef_run).to_not run_execute('conda env create -f environment.yml')
.with(user: 'vagrant', cwd: '/home/vagrant')
end
it 'Installs Tensorflow' do
conda_prefix = '/home/vagrant/anaconda3/envs/universe'
expect(chef_run).to_not run_execute("#{conda_prefix}/bin/pip install --ignore-installed --upgrade https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow-0.11.0-cp35-cp35m-linux_x86_64.whl")
.with(
user: 'vagrant',
environment: {
PATH: "#{conda_prefix}/bin:#{ENV['PATH']}",
CONDA_PREFIX: conda_prefix,
CONDA_DEFAULT_ENV: 'universe'
})
end
docker_pkgs = ['linux-image-extra-specific_kernel_version',
'linux-image-extra-virtual',
'docker-engine']
docker_pkgs.each do |name|
it "Installs #{name} package" do
expect(chef_run).to install_package(name)
end
end
it 'Clone gym repo' do
expect(chef_run).to sync_git("#{Chef::Config[:file_cache_path]}/gym")
end
end
end
Unit test on git sync, universe repo
#
# Cookbook Name:: universe_ubuntu
# Spec:: default
#
# Copyright (c) 2016 The Authors, All Rights Reserved.
require 'spec_helper'
describe 'universe_ubuntu::default' do
context 'When all attributes are default, on an Ubuntu' do
before do
stub_command('[ -x /home/vagrant/anaconda3/bin/conda ]').and_return(0)
stub_command('[ -e /home/vagrant/anaconda3/envs/universe ]').and_return(0)
stub_command('[ -x /home/vagrant/anaconda3/envs/universe/bin/tensorboard ]').and_return(0)
end
let(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'ubuntu', version: '14.04') do |node|
node.override['universe']['user'] = 'vagrant'
node.override['universe']['home'] = '/home/vagrant'
node.override['universe']['gpu'] = true
node.automatic['os_version'] = 'specific_kernel_version'
end.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
pkgs = %w(golang
libjpeg-turbo8-dev
make
tmux
htop
chromium-browser
git
cmake
zlib1g-dev
libjpeg-dev
xvfb
libav-tools
xorg-dev
python-opengl
libboost-all-dev
libsdl2-dev
swig)
pkgs.each do |name|
it "install #{name} package" do
expect(chef_run).to install_package name
end
end
it 'creates remote_file anaconda if missing' do
user = 'vagrant'
expect(chef_run).to create_remote_file_if_missing(
"#{Chef::Config[:file_cache_path]}/Anaconda3-4.2.0-Linux-x86_64.sh")
.with(
owner: user,
group: user,
mode: '0755',
checksum: '73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78'
)
end
it 'installs anaconda' do
expect(chef_run).to_not run_execute("bash #{Chef::Config[:file_cache_path]}/Anaconda3-4.2.0-Linux-x86_64.sh -b")
.with(user: 'vagrant')
end
it 'creates conda env file' do
expect(chef_run).to create_cookbook_file('/home/vagrant/environment.yml')
end
it 'creates conda environment' do
expect(chef_run).to_not run_execute('conda env create -f environment.yml')
.with(user: 'vagrant', cwd: '/home/vagrant')
end
it 'Installs Tensorflow' do
conda_prefix = '/home/vagrant/anaconda3/envs/universe'
expect(chef_run).to_not run_execute("#{conda_prefix}/bin/pip install --ignore-installed --upgrade https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow-0.11.0-cp35-cp35m-linux_x86_64.whl")
.with(
user: 'vagrant',
environment: {
PATH: "#{conda_prefix}/bin:#{ENV['PATH']}",
CONDA_PREFIX: conda_prefix,
CONDA_DEFAULT_ENV: 'universe'
})
end
docker_pkgs = ['linux-image-extra-specific_kernel_version',
'linux-image-extra-virtual',
'docker-engine']
docker_pkgs.each do |name|
it "Installs #{name} package" do
expect(chef_run).to install_package(name)
end
end
it 'Clone gym repo' do
expect(chef_run).to sync_git("#{Chef::Config[:file_cache_path]}/gym")
end
it 'Clone universe repo' do
expect(chef_run).to sync_git("#{Chef::Config[:file_cache_path]}/universe")
end
end
end
|
#
# Cookbook:: asf
# Spec:: default
#
# Copyright:: 2017, Tyler Wong, All Rights Reserved.
require 'spec_helper'
describe 'asf::mono' do
context 'When all attributes are default' do
let(:chef_run) do
runner = ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04')
runner.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
# it 'includes the mono and service recipes' do
# expect(chef_run).to include_recipe('asf::mono')
# expect(chef_run).to include_recipe('asf::service')
# end
end
end
Uncomment test in default_spec and use default recipe
#
# Cookbook:: asf
# Spec:: default
#
# Copyright:: 2017, Tyler Wong, All Rights Reserved.
require 'spec_helper'
describe 'asf::default' do
context 'When all attributes are default' do
let(:chef_run) do
runner = ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04')
runner.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
it 'includes the mono and service recipes' do
expect(chef_run).to include_recipe('asf::mono')
expect(chef_run).to include_recipe('asf::service')
end
end
end
|
RSpec.describe Valanga::MusicSearch do
# NOTE: Spec is too slow, so caches the capybara session.
before(:all) do
@music_searcher = Class.new do
include Valanga::MusicSearch
attr_accessor :session
attr_reader :pages
def initialize
@pages = {}
end
def session
@session ||= Valanga::Client.new(ENV['KID'], ENV['K_PASSWORD']).session
end
end.new
end
describe '#list_musics' do
context 'with valid parameters' do
it do
expect do
@music_searcher.list_musics
end.not_to raise_error
end
it do
expect do
@music_searcher.list_musics(page: 1)
end.not_to raise_error
end
it do
expect do
@music_searcher.list_musics(sorttype: :music_name)
end.not_to raise_error
end
it do
expect do
@music_searcher.list_musics(sort: :asc)
end.not_to raise_error
end
context 'when page is over' do
it do
expect(
@music_searcher.list_musics(page: 10000)
).to eq []
end
end
end
context 'with invalid parameters' do
context 'given `page` is not integer' do
it do
expect do
@music_searcher.list_musics(page: "aaa")
end.to raise_error ArgumentError
end
end
context 'given `sorttype` is not included in `music_name` or `basic` or ` medium` or `hard` or `special`' do
it do
expect do
@music_searcher.list_musics(sorttype: "aaa")
end.to raise_error ArgumentError
end
end
context 'given `sort` is not included in `asc` or `desc`' do
it do
expect do
@music_searcher.list_musics(sort: "aaa")
end.to raise_error ArgumentError
end
end
end
end
describe '#search' do
context 'when finds the music' do
it do
expect(
@music_searcher.search('RPG')
).to be_instance_of Valanga::Music
end
end
context 'when finds the music (page=2)' do
it do
expect(
@music_searcher.search('鬼天')
).to be_instance_of Valanga::Music
end
end
context 'when does not find the music' do
it do
expect(
@music_searcher.search('muuuusic')
).to be nil
end
end
end
describe '#music_image_url' do
context 'when finds the music' do
it do
expect(
@music_searcher.music_image_url('愛は不死鳥の様に')
).to eq 'http://p.eagate.573.jp/game/reflec/groovin/p/images/binary.html?img=sXEPaas8apvqg8BnZ5drKuUHpNt%2FY1N6%2FlbAdjzBE6w%3D'
end
end
context 'when finds the music (page=2)' do
it do
expect(
@music_searcher.music_image_url('朧')
).to eq 'http://p.eagate.573.jp/game/reflec/groovin/p/images/binary.html?img=h1Qz2wJDnzV6GI1YiQFB5dXowq2lHWQYLgrIgHJX%2FxM%3D'
end
end
end
end
Refactor spec
RSpec.describe Valanga::MusicSearch do
# NOTE: Spec is too slow, so caches the capybara session.
before(:all) do
@music_searcher = Class.new do
include Valanga::MusicSearch
attr_accessor :session
attr_reader :music_ids
def initialize
@music_ids = {}
end
def session
@session ||= Valanga::Client.new.session
end
end.new
end
describe '#list_musics' do
context 'with valid parameters' do
it do
expect do
@music_searcher.list_musics
end.not_to raise_error
end
it do
expect do
@music_searcher.list_musics(page: 1)
end.not_to raise_error
end
it do
expect do
@music_searcher.list_musics(sorttype: :music_name)
end.not_to raise_error
end
it do
expect do
@music_searcher.list_musics(sort: :asc)
end.not_to raise_error
end
context 'when page is over' do
it do
expect(
@music_searcher.list_musics(page: 10000)
).to eq []
end
end
end
context 'with invalid parameters' do
context 'given `page` is not integer' do
it do
expect do
@music_searcher.list_musics(page: "aaa")
end.to raise_error ArgumentError
end
end
context 'given `sorttype` is not included in `music_name` or `basic` or ` medium` or `hard` or `special`' do
it do
expect do
@music_searcher.list_musics(sorttype: "aaa")
end.to raise_error ArgumentError
end
end
context 'given `sort` is not included in `asc` or `desc`' do
it do
expect do
@music_searcher.list_musics(sort: "aaa")
end.to raise_error ArgumentError
end
end
end
end
describe '#search' do
context 'when finds the music' do
it do
expect(
@music_searcher.search('RPG')
).to be_instance_of Valanga::Music
end
end
context 'when finds the music (page=2)' do
it do
expect(
@music_searcher.search('鬼天')
).to be_instance_of Valanga::Music
end
end
context 'when does not find the music' do
it do
expect(
@music_searcher.search('muuuusic')
).to be nil
end
end
end
describe '#music_image_url' do
context 'when finds the music' do
it do
expect(
@music_searcher.music_image_url('愛は不死鳥の様に')
).to eq 'http://p.eagate.573.jp/game/reflec/groovin/p/images/binary.html?img=sXEPaas8apvqg8BnZ5drKuUHpNt%2FY1N6%2FlbAdjzBE6w%3D'
end
end
context 'when finds the music (page=2)' do
it do
expect(
@music_searcher.music_image_url('朧')
).to eq 'http://p.eagate.573.jp/game/reflec/groovin/p/images/binary.html?img=h1Qz2wJDnzV6GI1YiQFB5dXowq2lHWQYLgrIgHJX%2FxM%3D'
end
end
end
end
|
require 'spec_helper'
describe 'ApiRange section tests' do
it 'ApiRange | GetCol method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_col.js')
expect(xlsx.worksheets.first.rows[1].cells.first.text).to eq('3')
end
it 'ApiRange | Col method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/getter_col.js')
expect(xlsx.worksheets.first.rows[1].cells.first.text).to eq('3')
end
it 'ApiRange | GetRow method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_row.js')
expect(xlsx.worksheets.first.rows[1].cells.first.text).to eq('8')
end
it 'ApiRange | Row method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/getter_row.js')
expect(xlsx.worksheets.first.rows[1].cells.first.text).to eq('8')
end
it 'ApiRange | SetAlignHorizontal method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_align_horizontal.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.alignment.horizontal).to eq(:center)
end
end
end
it 'ApiRange | AlignHorizontal method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_align_horizontal.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.alignment.horizontal).to eq(:center)
end
end
end
it 'ApiRange | SetAlignVertical method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_align_vertical.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.alignment.vertical).to eq(:top)
end
end
end
it 'ApiRange | AlignVertical method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_align_vertical.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.alignment.vertical).to eq(:top)
end
end
end
it 'ApiRange | SetFontColor method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_font_color.js')
expect(xlsx.worksheets.first.rows[1]
.cells.first.style
.font.color.rgb).to eq(OoxmlParser::Color.new(49, 133, 154))
expect(xlsx.worksheets.first.rows[3]
.cells.first.style
.font.color.theme).to eq(1)
end
it 'ApiRange | FontColor method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_font_color.js')
expect(xlsx.worksheets.first.rows[1]
.cells.first.style
.font.color.rgb).to eq(OoxmlParser::Color.new(49, 133, 154))
expect(xlsx.worksheets.first.rows[3]
.cells.first.style
.font.color.theme).to eq(1)
end
it 'ApiRange | SetFontName method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_font_name.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.font.name).to eq('Arial')
end
end
end
it 'ApiRange | SetFontSize method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_font_size.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.font.size).to eq(20)
end
end
end
it 'ApiRange | FontSize method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_font_size.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.font.size).to eq(20)
end
end
end
it 'ApiRange | SetValue method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_value.js')
expect(xlsx.worksheets.first.rows.first.cells[1].text).to eq('2')
expect(xlsx.worksheets.first.rows[1].cells[1].text).to eq('2')
expect(xlsx.worksheets.first.rows[2].cells[0].text).to eq('2x2=')
expect(xlsx.worksheets.first.rows[2].cells[1].text).to eq('4')
end
it 'ApiRange | Value method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_value.js')
expect(xlsx.worksheets.first.rows.first.cells[1].text).to eq('2')
expect(xlsx.worksheets.first.rows[1].cells[1].text).to eq('2')
expect(xlsx.worksheets.first.rows[2].cells[0].text).to eq('2x2=')
expect(xlsx.worksheets.first.rows[2].cells[1].text).to eq('4')
end
it 'ApiRange | SetBold method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_bold.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Bold text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.bold).to be_truthy
end
it 'ApiRange | Bold method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_bold.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Bold text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.bold).to be_truthy
end
it 'ApiRange | SetItalic method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_italic.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Italicized text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.italic).to be_truthy
end
it 'ApiRange | Italic method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_italic.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Italicized text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.italic).to be_truthy
end
it 'ApiRange | SetUnderline method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_underline.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('The text underlined with a single line')
expect(xlsx.worksheets.first.rows[3].cells[0].raw_text).to eq('Normal text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.underlined).to eq(:single)
expect(xlsx.worksheets.first.rows[3].cells[0].style.font.font_style.underlined).to eq(:none)
end
it 'ApiRange | Underline method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_underline.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('The text underlined with a single line')
expect(xlsx.worksheets.first.rows[3].cells[0].raw_text).to eq('Normal text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.underlined).to eq(:single)
expect(xlsx.worksheets.first.rows[3].cells[0].style.font.font_style.underlined).to eq(:none)
end
it 'ApiRange | SetStrikeout method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_strikeout.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Struckout text')
expect(xlsx.worksheets.first.rows[2].cells[0].raw_text).to eq('Normal text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.strike).to eq(:single)
expect(xlsx.worksheets.first.rows[2].cells[0].style.font.font_style.strike).to eq(:none)
end
it 'ApiRange | SetFillColor method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_fill_color.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.background_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.foreground_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('This is the cell with a color set to its background')
expect(xlsx.worksheets.first.rows[3].cells[0].style.fill_color).to be_nil
expect(xlsx.worksheets.first.rows[3].cells[0].raw_text).to eq('This is the cell with a default background color')
end
it 'ApiRange | FillColor method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_fill_color.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.background_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.foreground_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('This is the cell with a color set to its background')
expect(xlsx.worksheets.first.rows[3].cells[0].style.fill_color).to be_nil
expect(xlsx.worksheets.first.rows[3].cells[0].raw_text).to eq('This is the cell with a default background color')
end
it 'ApiRange | SetNumberFormat method' do
formats = ['General', '0.00', '$#,##0.00', '_($* #,##0.00_)', 'm/d/yyyy', '[$-F800]dddd, mmmm dd, yyyy', '[$-F400]h:mm:ss AM/PM', '0.00%', '0%', '# ?/?', '0.00E+00']
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_number_format.js')
formats.each_with_index do |current_format, i|
expect(xlsx.worksheets.first.rows[i + 1].cells[0].raw_text).to eq('123456')
expect(xlsx.worksheets.first.rows[i + 1].cells[0].style.numerical_format).to eq(current_format)
end
expect(xlsx.worksheets.first.rows.last.cells[0].style.apply_number_format).to be_truthy
end
it 'ApiRange | NumberFormat method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
formats = ['General', '0.00', '$#,##0.00', '_($* #,##0.00_)', 'm/d/yyyy', '[$-F800]dddd, mmmm dd, yyyy', '[$-F400]h:mm:ss AM/PM', '0.00%', '0%', '# ?/?', '0.00E+00']
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_number_format.js')
formats.each_with_index do |current_format, i|
expect(xlsx.worksheets.first.rows[i + 1].cells[0].raw_text).to eq('123456')
expect(xlsx.worksheets.first.rows[i + 1].cells[0].style.numerical_format).to eq(current_format)
end
expect(xlsx.worksheets.first.rows.last.cells[0].style.apply_number_format).to be_truthy
end
it 'ApiRange | Merge method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/merge.js')
expect(xlsx.worksheets.first.merge).to eq(['A3:E3', 'A4:E4', 'A5:E5', 'A6:E6', 'A7:E7', 'A8:E8', 'A9:E14'])
end
it 'ApiRange | Unmerge method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/un_merge.js')
expect(xlsx.worksheets.first.merge).to eq(['A3:E3', 'A4:E4', 'A6:E6', 'A7:E7', 'A8:E8'])
end
it 'ApiRange | SetBorders method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_borders.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.bottom.color).to eq(OoxmlParser::Color.new(49, 133, 154))
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.bottom.style).to eq(:thick)
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.left.color).to eq(nil)
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.right.color).to eq(nil)
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.top.color).to eq(nil)
end
it 'ApiRange | SetWrap method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_wrap.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.alignment.wrap_text).to be_truthy
expect(xlsx.worksheets.first.rows[1].cells[2].style.alignment.wrap_text).to be_falsey
end
it 'ApiRange | SetWrapText method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_wrap_text.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.alignment.wrap_text).to be_truthy
expect(xlsx.worksheets.first.rows[1].cells[2].style.alignment.wrap_text).to be_falsey
end
it 'ApiRange | Wrap property' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/wrap_property.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.alignment.wrap_text).to be_truthy
expect(xlsx.worksheets.first.rows[1].cells[2].style.alignment.wrap_text).to be_falsey
end
it 'ApiRange | GetValue method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_value.js')
expect(xlsx.worksheets.first.rows[2].cells[0].text).to eq('Inserted text')
end
it 'ApiRange | Value Getter' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/getter_value.js')
expect(xlsx.worksheets.first.rows[2].cells[0].text).to eq('Inserted text')
end
it 'ApiRange | SetColumnWidth method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_column_width.js')
expect(xlsx.worksheets.first.columns.first.width.to_i).to eq(15)
end
it 'ApiRange | SetRowHeight method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_row_height.js')
expect(xlsx.worksheets.first.rows.first.height).to eq(OoxmlParser::OoxmlSize.new(15, :point))
end
it 'ApiRange | GetRowHeight method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_row_height.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('15')
end
it 'ApiRange | GetColumnWidth method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_column_width.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('15')
end
it 'ApiRange | ColumnWidth getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/column_width_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('15')
end
it 'ApiRange | RowHeight getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/row_height_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('30')
end
it 'ApiRange | ForEach' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/for_each.js')
expect(xlsx.worksheets.first.rows[0].cells[0].style.fill_color.pattern_fill.background_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.background_color).to eq(OoxmlParser::Color.new(255, 224, 204))
end
it 'ApiRange | Width getter' do
pending('https://bugzilla.onlyoffice.com/show_bug.cgi?id=37730')
# skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/width_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('15')
end
it 'ApiRange | Height getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/height_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('30')
end
it 'ApiRange | SetHidden method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_hidden.js')
expect(xlsx.worksheets.first.columns.first.hidden).to be_truthy
end
it 'ApiRange | GetHidden method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_hidden.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('1')
end
it 'ApiRange | Hidden getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_hidden.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('1')
end
it 'ApiRange | GetCount method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_count.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('9')
end
it 'ApiRange | Count getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/count_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('9')
end
it 'ApiRange | MergeArea property' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/merge_area_property.js')
expect(xlsx.worksheets.first.rows[3].cells.first.text).to eq('9')
end
it 'ApiRange | WrapText property' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/wrap_text_property.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.alignment.wrap_text).to be_truthy
expect(xlsx.worksheets.first.rows[1].cells[2].style.alignment.wrap_text).to be_falsey
end
it 'ApiRange | GetWrapText method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_wrap_text.js')
expect(xlsx.worksheets.first.rows[2].cells.first.text).to eq('1')
expect(xlsx.worksheets.first.rows[3].cells.first.text).to eq('0')
end
it 'ApiRange | SetOffset method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_offset.js')
expect(xlsx.worksheets.first.rows[3].cells[1].text).to eq('Text')
end
it 'ApiRange | GetAddress method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_address.js')
expect(xlsx.worksheets.first.rows[0].cells[0].text).to eq('A1')
end
end
skip test not working on develop (#218)
According to Sergey Konvalov it need newest core
but it may be troubles to get it work
Since:
https://github.com/ONLYOFFICE/sdkjs/commit/95b141d0ff5c8b3cc01a488a71791e601c6cc01e
require 'spec_helper'
describe 'ApiRange section tests' do
it 'ApiRange | GetCol method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_col.js')
expect(xlsx.worksheets.first.rows[1].cells.first.text).to eq('3')
end
it 'ApiRange | Col method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/getter_col.js')
expect(xlsx.worksheets.first.rows[1].cells.first.text).to eq('3')
end
it 'ApiRange | GetRow method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_row.js')
expect(xlsx.worksheets.first.rows[1].cells.first.text).to eq('8')
end
it 'ApiRange | Row method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/getter_row.js')
expect(xlsx.worksheets.first.rows[1].cells.first.text).to eq('8')
end
it 'ApiRange | SetAlignHorizontal method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_align_horizontal.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.alignment.horizontal).to eq(:center)
end
end
end
it 'ApiRange | AlignHorizontal method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_align_horizontal.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.alignment.horizontal).to eq(:center)
end
end
end
it 'ApiRange | SetAlignVertical method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_align_vertical.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.alignment.vertical).to eq(:top)
end
end
end
it 'ApiRange | AlignVertical method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_align_vertical.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.alignment.vertical).to eq(:top)
end
end
end
it 'ApiRange | SetFontColor method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_font_color.js')
expect(xlsx.worksheets.first.rows[1]
.cells.first.style
.font.color.rgb).to eq(OoxmlParser::Color.new(49, 133, 154))
expect(xlsx.worksheets.first.rows[3]
.cells.first.style
.font.color.theme).to eq(1)
end
it 'ApiRange | FontColor method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_font_color.js')
expect(xlsx.worksheets.first.rows[1]
.cells.first.style
.font.color.rgb).to eq(OoxmlParser::Color.new(49, 133, 154))
expect(xlsx.worksheets.first.rows[3]
.cells.first.style
.font.color.theme).to eq(1)
end
it 'ApiRange | SetFontName method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_font_name.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.font.name).to eq('Arial')
end
end
end
it 'ApiRange | SetFontSize method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_font_size.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.font.size).to eq(20)
end
end
end
it 'ApiRange | FontSize method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_font_size.js')
xlsx.worksheets.first.rows.each do |current_row|
current_row.cells.each do |current_cell|
expect(current_cell.style.font.size).to eq(20)
end
end
end
it 'ApiRange | SetValue method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_value.js')
expect(xlsx.worksheets.first.rows.first.cells[1].text).to eq('2')
expect(xlsx.worksheets.first.rows[1].cells[1].text).to eq('2')
expect(xlsx.worksheets.first.rows[2].cells[0].text).to eq('2x2=')
expect(xlsx.worksheets.first.rows[2].cells[1].text).to eq('4')
end
it 'ApiRange | Value method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_value.js')
expect(xlsx.worksheets.first.rows.first.cells[1].text).to eq('2')
expect(xlsx.worksheets.first.rows[1].cells[1].text).to eq('2')
expect(xlsx.worksheets.first.rows[2].cells[0].text).to eq('2x2=')
expect(xlsx.worksheets.first.rows[2].cells[1].text).to eq('4')
end
it 'ApiRange | SetBold method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_bold.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Bold text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.bold).to be_truthy
end
it 'ApiRange | Bold method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_bold.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Bold text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.bold).to be_truthy
end
it 'ApiRange | SetItalic method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_italic.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Italicized text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.italic).to be_truthy
end
it 'ApiRange | Italic method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_italic.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Italicized text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.italic).to be_truthy
end
it 'ApiRange | SetUnderline method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_underline.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('The text underlined with a single line')
expect(xlsx.worksheets.first.rows[3].cells[0].raw_text).to eq('Normal text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.underlined).to eq(:single)
expect(xlsx.worksheets.first.rows[3].cells[0].style.font.font_style.underlined).to eq(:none)
end
it 'ApiRange | Underline method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_underline.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('The text underlined with a single line')
expect(xlsx.worksheets.first.rows[3].cells[0].raw_text).to eq('Normal text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.underlined).to eq(:single)
expect(xlsx.worksheets.first.rows[3].cells[0].style.font.font_style.underlined).to eq(:none)
end
it 'ApiRange | SetStrikeout method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_strikeout.js')
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('Struckout text')
expect(xlsx.worksheets.first.rows[2].cells[0].raw_text).to eq('Normal text')
expect(xlsx.worksheets.first.rows[1].cells[0].style.font.font_style.strike).to eq(:single)
expect(xlsx.worksheets.first.rows[2].cells[0].style.font.font_style.strike).to eq(:none)
end
it 'ApiRange | SetFillColor method' do
skip('Not working on develop because need newest core')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_fill_color.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.background_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.foreground_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('This is the cell with a color set to its background')
expect(xlsx.worksheets.first.rows[3].cells[0].style.fill_color).to be_nil
expect(xlsx.worksheets.first.rows[3].cells[0].raw_text).to eq('This is the cell with a default background color')
end
it 'ApiRange | FillColor method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_fill_color.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.background_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.foreground_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].raw_text).to eq('This is the cell with a color set to its background')
expect(xlsx.worksheets.first.rows[3].cells[0].style.fill_color).to be_nil
expect(xlsx.worksheets.first.rows[3].cells[0].raw_text).to eq('This is the cell with a default background color')
end
it 'ApiRange | SetNumberFormat method' do
formats = ['General', '0.00', '$#,##0.00', '_($* #,##0.00_)', 'm/d/yyyy', '[$-F800]dddd, mmmm dd, yyyy', '[$-F400]h:mm:ss AM/PM', '0.00%', '0%', '# ?/?', '0.00E+00']
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_number_format.js')
formats.each_with_index do |current_format, i|
expect(xlsx.worksheets.first.rows[i + 1].cells[0].raw_text).to eq('123456')
expect(xlsx.worksheets.first.rows[i + 1].cells[0].style.numerical_format).to eq(current_format)
end
expect(xlsx.worksheets.first.rows.last.cells[0].style.apply_number_format).to be_truthy
end
it 'ApiRange | NumberFormat method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
formats = ['General', '0.00', '$#,##0.00', '_($* #,##0.00_)', 'm/d/yyyy', '[$-F800]dddd, mmmm dd, yyyy', '[$-F400]h:mm:ss AM/PM', '0.00%', '0%', '# ?/?', '0.00E+00']
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/setter_number_format.js')
formats.each_with_index do |current_format, i|
expect(xlsx.worksheets.first.rows[i + 1].cells[0].raw_text).to eq('123456')
expect(xlsx.worksheets.first.rows[i + 1].cells[0].style.numerical_format).to eq(current_format)
end
expect(xlsx.worksheets.first.rows.last.cells[0].style.apply_number_format).to be_truthy
end
it 'ApiRange | Merge method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/merge.js')
expect(xlsx.worksheets.first.merge).to eq(['A3:E3', 'A4:E4', 'A5:E5', 'A6:E6', 'A7:E7', 'A8:E8', 'A9:E14'])
end
it 'ApiRange | Unmerge method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/un_merge.js')
expect(xlsx.worksheets.first.merge).to eq(['A3:E3', 'A4:E4', 'A6:E6', 'A7:E7', 'A8:E8'])
end
it 'ApiRange | SetBorders method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_borders.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.bottom.color).to eq(OoxmlParser::Color.new(49, 133, 154))
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.bottom.style).to eq(:thick)
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.left.color).to eq(nil)
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.right.color).to eq(nil)
expect(xlsx.worksheets.first.rows[1].cells[0].style.borders.top.color).to eq(nil)
end
it 'ApiRange | SetWrap method' do
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_wrap.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.alignment.wrap_text).to be_truthy
expect(xlsx.worksheets.first.rows[1].cells[2].style.alignment.wrap_text).to be_falsey
end
it 'ApiRange | SetWrapText method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_wrap_text.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.alignment.wrap_text).to be_truthy
expect(xlsx.worksheets.first.rows[1].cells[2].style.alignment.wrap_text).to be_falsey
end
it 'ApiRange | Wrap property' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/wrap_property.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.alignment.wrap_text).to be_truthy
expect(xlsx.worksheets.first.rows[1].cells[2].style.alignment.wrap_text).to be_falsey
end
it 'ApiRange | GetValue method' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_value.js')
expect(xlsx.worksheets.first.rows[2].cells[0].text).to eq('Inserted text')
end
it 'ApiRange | Value Getter' do
skip if builder.semver < Semantic::Version.new('5.1.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/getter_value.js')
expect(xlsx.worksheets.first.rows[2].cells[0].text).to eq('Inserted text')
end
it 'ApiRange | SetColumnWidth method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_column_width.js')
expect(xlsx.worksheets.first.columns.first.width.to_i).to eq(15)
end
it 'ApiRange | SetRowHeight method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_row_height.js')
expect(xlsx.worksheets.first.rows.first.height).to eq(OoxmlParser::OoxmlSize.new(15, :point))
end
it 'ApiRange | GetRowHeight method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_row_height.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('15')
end
it 'ApiRange | GetColumnWidth method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_column_width.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('15')
end
it 'ApiRange | ColumnWidth getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/column_width_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('15')
end
it 'ApiRange | RowHeight getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/row_height_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('30')
end
it 'ApiRange | ForEach' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/for_each.js')
expect(xlsx.worksheets.first.rows[0].cells[0].style.fill_color.pattern_fill.background_color).to eq(OoxmlParser::Color.new(255, 224, 204))
expect(xlsx.worksheets.first.rows[1].cells[0].style.fill_color.pattern_fill.background_color).to eq(OoxmlParser::Color.new(255, 224, 204))
end
it 'ApiRange | Width getter' do
pending('https://bugzilla.onlyoffice.com/show_bug.cgi?id=37730')
# skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/width_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('15')
end
it 'ApiRange | Height getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/height_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('30')
end
it 'ApiRange | SetHidden method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_hidden.js')
expect(xlsx.worksheets.first.columns.first.hidden).to be_truthy
end
it 'ApiRange | GetHidden method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_hidden.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('1')
end
it 'ApiRange | Hidden getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_hidden.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('1')
end
it 'ApiRange | GetCount method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_count.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('9')
end
it 'ApiRange | Count getter' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/count_getter.js')
expect(xlsx.worksheets.first.rows.first.cells.first.text).to eq('9')
end
it 'ApiRange | MergeArea property' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/merge_area_property.js')
expect(xlsx.worksheets.first.rows[3].cells.first.text).to eq('9')
end
it 'ApiRange | WrapText property' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/wrap_text_property.js')
expect(xlsx.worksheets.first.rows[1].cells[0].style.alignment.wrap_text).to be_truthy
expect(xlsx.worksheets.first.rows[1].cells[2].style.alignment.wrap_text).to be_falsey
end
it 'ApiRange | GetWrapText method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_wrap_text.js')
expect(xlsx.worksheets.first.rows[2].cells.first.text).to eq('1')
expect(xlsx.worksheets.first.rows[3].cells.first.text).to eq('0')
end
it 'ApiRange | SetOffset method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/set_offset.js')
expect(xlsx.worksheets.first.rows[3].cells[1].text).to eq('Text')
end
it 'ApiRange | GetAddress method' do
skip if builder.semver < Semantic::Version.new('5.2.0')
xlsx = builder.build_and_parse('asserts/js/xlsx/smoke/api_range/get_address.js')
expect(xlsx.worksheets.first.rows[0].cells[0].text).to eq('A1')
end
end
|
require_relative 'test_helper'
class SearchTest < ActiveSupport::TestCase
def setup
@person = create_user('testing').person
end
attr_reader :person
should 'return the default environment' do
default = Environment.default
get "/api/v1/environment/default"
json = JSON.parse(last_response.body)
assert_equal default.id, json['id']
end
should 'return created environment' do
other = fast_create(Environment)
default = Environment.default
assert_not_equal other.id, default.id
get "/api/v1/environment/#{other.id}"
json = JSON.parse(last_response.body)
assert_equal other.id, json['id']
end
should 'return context environment' do
contextEnv = fast_create(Environment)
contextEnv.name = "example.org"
contextEnv.save
default = Environment.default
assert_not_equal contextEnv.id, default.id
get "/api/v1/environment/context"
json = JSON.parse(last_response.body)
assert_equal contextEnv.id, json['id']
end
should 'return environment boxes' do
default = Environment.default
default.boxes << Box.new
default.boxes[0].blocks << Block.new
default.save!
assert !default.boxes.empty?
get "/api/v1/environments/#{default.id}/boxes"
json = JSON.parse(last_response.body)
assert_equal "boxes", json.first[0]
assert_not_equal [], json.first[1]
end
end
cosmetic changes
require_relative 'test_helper'
class SearchTest < ActiveSupport::TestCase
def setup
@person = create_user('testing').person
end
attr_reader :person
should 'return the default environment' do
environment = Environment.default
get "/api/v1/environment/default"
json = JSON.parse(last_response.body)
assert_equal environment.id, json['id']
end
should 'return created environment' do
environment = fast_create(Environment)
default_env = Environment.default
assert_not_equal environment.id, default_env.id
get "/api/v1/environment/#{environment.id}"
json = JSON.parse(last_response.body)
assert_equal environment.id, json['id']
end
should 'return context environment' do
context_env = fast_create(Environment)
context_env.name = "example.org"
context_env.save
default_env = Environment.default
assert_not_equal context_env.id, default_env.id
get "/api/v1/environment/context"
json = JSON.parse(last_response.body)
assert_equal context_env.id, json['id']
end
should 'return environment boxes' do
environment = Environment.default
environment.boxes << Box.new
box = environment.boxes.last
environment.boxes[0].blocks << Block.new
environment.save!
assert !environment.boxes.empty?
get "/api/v1/environments/#{environment.id}/boxes"
json = JSON.parse(last_response.body)
assert_equal "boxes", json.first[0]
assert_equal box.id, json['boxes'].first['id']
end
end
|
require 'test_helper'
class IRBTest < ActiveSupport::TestCase
setup do
@irb1 = @irb = WebConsole::REPL::IRB.new
@irb2 = WebConsole::REPL::IRB.new
end
test 'sending input returns the result as output' do
assert_equal sprintf(return_prompt, "42"), @irb.send_input('foo = 42')
end
test 'preserves the session in the binding' do
assert_equal sprintf(return_prompt, "42"), @irb.send_input('foo = 42')
assert_equal sprintf(return_prompt, "50"), @irb.send_input('foo + 8')
end
test 'session isolation requires own bindings' do
irb1 = WebConsole::REPL::IRB.new(Object.new.instance_eval('binding'))
irb2 = WebConsole::REPL::IRB.new(Object.new.instance_eval('binding'))
assert_equal sprintf(return_prompt, "42"), irb1.send_input('foo = 42')
assert_match undefined_var_or_method('foo'), irb2.send_input('foo')
end
test 'session preservation requires same bindings' do
assert_equal sprintf(return_prompt, "42"), @irb1.send_input('foo = 42')
assert_equal sprintf(return_prompt, "42"), @irb2.send_input('foo')
end
test 'multiline sessions' do
irb = WebConsole::REPL::IRB.new(Object.new.instance_eval('binding'))
assert_equal "", irb.send_input('class A')
assert_equal sprintf(return_prompt, 'nil'), irb.send_input('end')
assert_no_match uninitialized_constant('A'), irb.send_input('A')
end
test 'prompt is the globally selected one' do
assert_equal input_prompt, @irb.prompt
end
test 'prompt is present' do
assert_not_nil @irb.prompt
end
test 'captures stdout output' do
assert_equal "42\n#{sprintf(return_prompt, 'nil')}", @irb.send_input('puts 42')
end
test 'captures stderr output' do
assert_equal "42\n#{sprintf(return_prompt, '3')}", @irb.send_input('$stderr.write("42\n")')
end
test 'rails helpers are available in the session' do
each_rails_console_method do |meth|
assert_no_match undefined_var_or_method(meth), @irb.send_input("respond_to? :#{meth}")
end
end
private
def currently_selected_prompt
::IRB.conf[:PROMPT][::IRB.conf[:PROMPT_MODE]]
end
def return_prompt
currently_selected_prompt[:RETURN]
end
def input_prompt
currently_selected_prompt[:PROMPT_I]
end
def undefined_var_or_method(name)
%r{undefined local variable or method `#{name}'}
end
def uninitialized_constant(name)
%r{uninitialized constant #{name}}
end
def each_rails_console_method(&block)
require 'rails/console/app'
require 'rails/console/helpers'
Rails::ConsoleMethods.public_instance_methods.each(&block)
end
end
Add suprocess and forks tests
require 'test_helper'
class IRBTest < ActiveSupport::TestCase
setup do
@irb1 = @irb = WebConsole::REPL::IRB.new
@irb2 = WebConsole::REPL::IRB.new
end
test 'sending input returns the result as output' do
assert_equal sprintf(return_prompt, "42"), @irb.send_input('foo = 42')
end
test 'preserves the session in the binding' do
assert_equal sprintf(return_prompt, "42"), @irb.send_input('foo = 42')
assert_equal sprintf(return_prompt, "50"), @irb.send_input('foo + 8')
end
test 'session isolation requires own bindings' do
irb1 = WebConsole::REPL::IRB.new(Object.new.instance_eval('binding'))
irb2 = WebConsole::REPL::IRB.new(Object.new.instance_eval('binding'))
assert_equal sprintf(return_prompt, "42"), irb1.send_input('foo = 42')
assert_match undefined_var_or_method('foo'), irb2.send_input('foo')
end
test 'session preservation requires same bindings' do
assert_equal sprintf(return_prompt, "42"), @irb1.send_input('foo = 42')
assert_equal sprintf(return_prompt, "42"), @irb2.send_input('foo')
end
test 'multiline sessions' do
irb = WebConsole::REPL::IRB.new(Object.new.instance_eval('binding'))
assert_equal "", irb.send_input('class A')
assert_equal sprintf(return_prompt, 'nil'), irb.send_input('end')
assert_no_match uninitialized_constant('A'), irb.send_input('A')
end
test 'captures direct stdout output' do
assert_equal "42\n#{sprintf(return_prompt, 'nil')}", @irb.send_input('puts 42')
end
test 'captures direct stderr output' do
assert_equal "42\n#{sprintf(return_prompt, '3')}", @irb.send_input('$stderr.write("42\n")')
end
test 'captures direct output from subprocesses' do
assert_equal "42\n#{sprintf(return_prompt, 'true')}", @irb.send_input('system "echo 42"')
end
test 'captures direct output from forks' do
# This is a bummer, but currently I don't see how we can work around it.
# Since we are redirecting the output streams only for the duration of the
# send_input execution, childs that print to stdout, may miss this time.
assert_equal "42\n#{sprintf(return_prompt, '2')}", @irb.send_input('Process.wait fork { puts 42 };')
end
test 'prompt is the globally selected one' do
assert_equal input_prompt, @irb.prompt
end
test 'prompt is present' do
assert_not_nil @irb.prompt
end
test 'rails helpers are available in the session' do
each_rails_console_method do |meth|
assert_no_match undefined_var_or_method(meth), @irb.send_input("respond_to? :#{meth}")
end
end
private
def currently_selected_prompt
::IRB.conf[:PROMPT][::IRB.conf[:PROMPT_MODE]]
end
def return_prompt
currently_selected_prompt[:RETURN]
end
def input_prompt
currently_selected_prompt[:PROMPT_I]
end
def undefined_var_or_method(name)
%r{undefined local variable or method `#{name}'}
end
def uninitialized_constant(name)
%r{uninitialized constant #{name}}
end
def each_rails_console_method(&block)
require 'rails/console/app'
require 'rails/console/helpers'
Rails::ConsoleMethods.public_instance_methods.each(&block)
end
end
|
[Add] FirebaseAppCheck (8.12.0-beta)
Pod::Spec.new do |s|
s.name = 'FirebaseAppCheck'
s.version = '8.12.0-beta'
s.summary = 'Firebase App Check SDK.'
s.description = <<-DESC
Firebase SDK for anti-abuse compatibility.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.12.0.nightly'
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '9.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
base_dir = "FirebaseAppCheck/"
s.source_files = [
base_dir + 'Sources/**/*.[mh]',
'FirebaseCore/Sources/Private/*.h',
]
s.public_header_files = base_dir + 'Sources/Public/FirebaseAppCheck/*.h'
s.ios.weak_framework = 'DeviceCheck'
s.osx.weak_framework = 'DeviceCheck'
s.tvos.weak_framework = 'DeviceCheck'
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'PromisesObjC', '>= 1.2', '< 3.0'
s.dependency 'GoogleUtilities/Environment', '~> 7.7'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"'
}
s.test_spec 'unit' do |unit_tests|
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
unit_tests.source_files = [
base_dir + 'Tests/Unit/**/*.[mh]',
base_dir + 'Tests/Utils/**/*.[mh]',
'SharedTestUtilities/AppCheckFake/*',
'SharedTestUtilities/AppCheckBackoffWrapperFake/*',
'SharedTestUtilities/Date/*',
'SharedTestUtilities/URLSession/*',
]
unit_tests.resources = base_dir + 'Tests/Fixture/**/*'
unit_tests.dependency 'OCMock'
unit_tests.requires_app_host = true
end
s.test_spec 'integration' do |integration_tests|
integration_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
integration_tests.source_files = [
base_dir + 'Tests/Integration/**/*.[mh]',
base_dir + 'Tests/Integration/**/*.[mh]',
]
integration_tests.resources = base_dir + 'Tests/Fixture/**/*'
integration_tests.requires_app_host = true
end
s.test_spec 'swift-unit' do |swift_unit_tests|
swift_unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
swift_unit_tests.source_files = [
base_dir + 'Tests/Unit/Swift/**/*.swift',
base_dir + 'Tests/Unit/Swift/**/*.h',
]
end
end
|
[Add] FirebaseAppCheck (8.15.0-beta)
Pod::Spec.new do |s|
s.name = 'FirebaseAppCheck'
s.version = '8.15.0-beta'
s.summary = 'Firebase App Check SDK.'
s.description = <<-DESC
Firebase SDK for anti-abuse compatibility.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.15.0.nightly'
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '9.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
base_dir = "FirebaseAppCheck/"
s.source_files = [
base_dir + 'Sources/**/*.[mh]',
'FirebaseCore/Sources/Private/*.h',
]
s.public_header_files = base_dir + 'Sources/Public/FirebaseAppCheck/*.h'
s.ios.weak_framework = 'DeviceCheck'
s.osx.weak_framework = 'DeviceCheck'
s.tvos.weak_framework = 'DeviceCheck'
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'PromisesObjC', '>= 1.2', '< 3.0'
s.dependency 'GoogleUtilities/Environment', '~> 7.7'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"'
}
s.test_spec 'unit' do |unit_tests|
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
unit_tests.source_files = [
base_dir + 'Tests/Unit/**/*.[mh]',
base_dir + 'Tests/Utils/**/*.[mh]',
'SharedTestUtilities/AppCheckFake/*',
'SharedTestUtilities/AppCheckBackoffWrapperFake/*',
'SharedTestUtilities/Date/*',
'SharedTestUtilities/URLSession/*',
]
unit_tests.resources = base_dir + 'Tests/Fixture/**/*'
unit_tests.dependency 'OCMock'
unit_tests.requires_app_host = true
end
s.test_spec 'integration' do |integration_tests|
integration_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
integration_tests.source_files = [
base_dir + 'Tests/Integration/**/*.[mh]',
base_dir + 'Tests/Integration/**/*.[mh]',
]
integration_tests.resources = base_dir + 'Tests/Fixture/**/*'
integration_tests.requires_app_host = true
end
s.test_spec 'swift-unit' do |swift_unit_tests|
swift_unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
swift_unit_tests.source_files = [
base_dir + 'Tests/Unit/Swift/**/*.swift',
base_dir + 'Tests/Unit/Swift/**/*.h',
]
end
end
|
[Add] FirebaseCrashlytics (8.1.0)
Pod::Spec.new do |s|
s.name = 'FirebaseCrashlytics'
s.version = '8.1.0'
s.summary = 'Best and lightest-weight crash reporting for mobile, desktop and tvOS.'
s.description = 'Firebase Crashlytics helps you track, prioritize, and fix stability issues that erode app quality.'
s.homepage = 'https://firebase.google.com/'
s.license = { :type => 'Apache', :file => 'Crashlytics/LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.1.0.nightly'
}
ios_deployment_target = '9.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
s.source_files = [
'Crashlytics/Crashlytics/**/*.{c,h,m,mm}',
'Crashlytics/Protogen/**/*.{c,h,m,mm}',
'Crashlytics/Shared/**/*.{c,h,m,mm}',
'Crashlytics/third_party/**/*.{c,h,m,mm}',
'FirebaseCore/Sources/Private/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
'Interop/Analytics/Public/*.h',
]
s.public_header_files = [
'Crashlytics/Crashlytics/Public/FirebaseCrashlytics/*.h'
]
s.preserve_paths = [
'Crashlytics/README.md',
'run',
'upload-symbols',
]
# Ensure the run script and upload-symbols are callable via
# ${PODS_ROOT}/FirebaseCrashlytics/<name>
s.prepare_command = <<-PREPARE_COMMAND_END
cp -f ./Crashlytics/run ./run
cp -f ./Crashlytics/upload-symbols ./upload-symbols
PREPARE_COMMAND_END
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'FirebaseInstallations', '~> 8.0'
s.dependency 'PromisesObjC', '~> 1.2'
s.dependency 'GoogleDataTransport', '~> 9.0'
s.dependency 'nanopb', '~> 2.30908.0'
s.libraries = 'c++', 'z'
s.ios.frameworks = 'Security', 'SystemConfiguration'
s.macos.frameworks = 'Security', 'SystemConfiguration'
s.osx.frameworks = 'Security', 'SystemConfiguration'
s.watchos.frameworks = 'Security'
s.ios.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics iOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.osx.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics Mac SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.tvos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics tvOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.watchos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics watchOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'OTHER_LD_FLAGS' => '$(inherited) -sectcreate __TEXT __info_plist',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
# Unit tests can't run on watchOS.
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
unit_tests.source_files = 'Crashlytics/UnitTests/*.[mh]',
'Crashlytics/UnitTests/*/*.[mh]'
unit_tests.resources = 'Crashlytics/UnitTests/Data/*',
'Crashlytics/UnitTests/*.clsrecord',
'Crashlytics/UnitTests/FIRCLSMachO/data/*'
end
end
|
[Add] FirebaseCrashlytics (8.8.0)
Pod::Spec.new do |s|
s.name = 'FirebaseCrashlytics'
s.version = '8.8.0'
s.summary = 'Best and lightest-weight crash reporting for mobile, desktop and tvOS.'
s.description = 'Firebase Crashlytics helps you track, prioritize, and fix stability issues that erode app quality.'
s.homepage = 'https://firebase.google.com/'
s.license = { :type => 'Apache', :file => 'Crashlytics/LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.8.0.nightly'
}
ios_deployment_target = '9.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
s.source_files = [
'Crashlytics/Crashlytics/**/*.{c,h,m,mm}',
'Crashlytics/Protogen/**/*.{c,h,m,mm}',
'Crashlytics/Shared/**/*.{c,h,m,mm}',
'Crashlytics/third_party/**/*.{c,h,m,mm}',
'FirebaseCore/Sources/Private/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
'Interop/Analytics/Public/*.h',
]
s.public_header_files = [
'Crashlytics/Crashlytics/Public/FirebaseCrashlytics/*.h'
]
s.preserve_paths = [
'Crashlytics/README.md',
'run',
'upload-symbols',
]
# Ensure the run script and upload-symbols are callable via
# ${PODS_ROOT}/FirebaseCrashlytics/<name>
s.prepare_command = <<-PREPARE_COMMAND_END
cp -f ./Crashlytics/run ./run
cp -f ./Crashlytics/upload-symbols ./upload-symbols
PREPARE_COMMAND_END
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'FirebaseInstallations', '~> 8.0'
s.dependency 'PromisesObjC', '>= 1.2', '< 3.0'
s.dependency 'GoogleDataTransport', '~> 9.0'
s.dependency 'GoogleUtilities/Environment', '~> 7.4'
s.dependency 'nanopb', '~> 2.30908.0'
s.libraries = 'c++', 'z'
s.ios.frameworks = 'Security', 'SystemConfiguration'
s.macos.frameworks = 'Security', 'SystemConfiguration'
s.osx.frameworks = 'Security', 'SystemConfiguration'
s.watchos.frameworks = 'Security'
s.ios.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics iOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.osx.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics Mac SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.tvos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics tvOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.watchos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics watchOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'OTHER_LD_FLAGS' => '$(inherited) -sectcreate __TEXT __info_plist',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
# Unit tests can't run on watchOS.
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
unit_tests.source_files = 'Crashlytics/UnitTests/*.[mh]',
'Crashlytics/UnitTests/*/*.[mh]'
unit_tests.resources = 'Crashlytics/UnitTests/Data/*',
'Crashlytics/UnitTests/*.clsrecord',
'Crashlytics/UnitTests/FIRCLSMachO/data/*'
end
end
|
[Add] FirebaseCrashlytics (8.9.0)
Pod::Spec.new do |s|
s.name = 'FirebaseCrashlytics'
s.version = '8.9.0'
s.summary = 'Best and lightest-weight crash reporting for mobile, desktop and tvOS.'
s.description = 'Firebase Crashlytics helps you track, prioritize, and fix stability issues that erode app quality.'
s.homepage = 'https://firebase.google.com/'
s.license = { :type => 'Apache', :file => 'Crashlytics/LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.10.0.nightly'
}
ios_deployment_target = '9.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
s.source_files = [
'Crashlytics/Crashlytics/**/*.{c,h,m,mm}',
'Crashlytics/Protogen/**/*.{c,h,m,mm}',
'Crashlytics/Shared/**/*.{c,h,m,mm}',
'Crashlytics/third_party/**/*.{c,h,m,mm}',
'FirebaseCore/Sources/Private/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
'Interop/Analytics/Public/*.h',
]
s.public_header_files = [
'Crashlytics/Crashlytics/Public/FirebaseCrashlytics/*.h'
]
s.preserve_paths = [
'Crashlytics/README.md',
'run',
'upload-symbols',
]
# Ensure the run script and upload-symbols are callable via
# ${PODS_ROOT}/FirebaseCrashlytics/<name>
s.prepare_command = <<-PREPARE_COMMAND_END
cp -f ./Crashlytics/run ./run
cp -f ./Crashlytics/upload-symbols ./upload-symbols
PREPARE_COMMAND_END
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'FirebaseInstallations', '~> 8.0'
s.dependency 'PromisesObjC', '>= 1.2', '< 3.0'
s.dependency 'GoogleDataTransport', '~> 9.1'
s.dependency 'GoogleUtilities/Environment', '~> 7.6'
s.dependency 'nanopb', '~> 2.30908.0'
s.libraries = 'c++', 'z'
s.ios.frameworks = 'Security', 'SystemConfiguration'
s.macos.frameworks = 'Security', 'SystemConfiguration'
s.osx.frameworks = 'Security', 'SystemConfiguration'
s.watchos.frameworks = 'Security'
s.ios.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics iOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.osx.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics Mac SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.tvos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics tvOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.watchos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics watchOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'OTHER_LD_FLAGS' => '$(inherited) -sectcreate __TEXT __info_plist',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
# Unit tests can't run on watchOS.
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
unit_tests.source_files = 'Crashlytics/UnitTests/*.[mh]',
'Crashlytics/UnitTests/*/*.[mh]'
unit_tests.resources = 'Crashlytics/UnitTests/Data/*',
'Crashlytics/UnitTests/*.clsrecord',
'Crashlytics/UnitTests/FIRCLSMachO/data/*'
end
end
|
[Add] FirebaseCrashlytics (9.0.0)
Pod::Spec.new do |s|
s.name = 'FirebaseCrashlytics'
s.version = '9.0.0'
s.summary = 'Best and lightest-weight crash reporting for mobile, desktop and tvOS.'
s.description = 'Firebase Crashlytics helps you track, prioritize, and fix stability issues that erode app quality.'
s.homepage = 'https://firebase.google.com/'
s.license = { :type => 'Apache', :file => 'Crashlytics/LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-9.0.0.nightly'
}
ios_deployment_target = '9.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.swift_version = '5.3'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
s.source_files = [
'Crashlytics/Crashlytics/**/*.{c,h,m,mm}',
'Crashlytics/Protogen/**/*.{c,h,m,mm}',
'Crashlytics/Shared/**/*.{c,h,m,mm}',
'Crashlytics/third_party/**/*.{c,h,m,mm}',
'FirebaseCore/Extension/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
'Interop/Analytics/Public/*.h',
]
s.public_header_files = [
'Crashlytics/Crashlytics/Public/FirebaseCrashlytics/*.h'
]
s.preserve_paths = [
'Crashlytics/README.md',
'run',
'upload-symbols',
]
# Ensure the run script and upload-symbols are callable via
# ${PODS_ROOT}/FirebaseCrashlytics/<name>
s.prepare_command = <<-PREPARE_COMMAND_END
cp -f ./Crashlytics/run ./run
cp -f ./Crashlytics/upload-symbols ./upload-symbols
PREPARE_COMMAND_END
s.dependency 'FirebaseCore', '~> 9.0'
s.dependency 'FirebaseInstallations', '~> 9.0'
s.dependency 'PromisesObjC', '~> 2.0'
s.dependency 'GoogleDataTransport', '~> 9.1'
s.dependency 'GoogleUtilities/Environment', '~> 7.7'
s.dependency 'nanopb', '~> 2.30908.0'
s.libraries = 'c++', 'z'
s.ios.frameworks = 'Security', 'SystemConfiguration'
s.macos.frameworks = 'Security', 'SystemConfiguration'
s.osx.frameworks = 'Security', 'SystemConfiguration'
s.watchos.frameworks = 'Security'
s.ios.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics iOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.osx.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics Mac SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.tvos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics tvOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.watchos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics watchOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'OTHER_LD_FLAGS' => '$(inherited) -sectcreate __TEXT __info_plist',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
# Unit tests can't run on watchOS.
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => '10.15',
:tvos => tvos_deployment_target
}
unit_tests.source_files = 'Crashlytics/UnitTests/*.[mh]',
'Crashlytics/UnitTests/*/*.[mh]'
unit_tests.resources = 'Crashlytics/UnitTests/Data/*',
'Crashlytics/UnitTests/*.clsrecord',
'Crashlytics/UnitTests/FIRCLSMachO/data/*'
end
end
|
[Add] FirebaseCrashlytics (9.0.0)
Pod::Spec.new do |s|
s.name = 'FirebaseCrashlytics'
s.version = '9.0.0'
s.summary = 'Best and lightest-weight crash reporting for mobile, desktop and tvOS.'
s.description = 'Firebase Crashlytics helps you track, prioritize, and fix stability issues that erode app quality.'
s.homepage = 'https://firebase.google.com/'
s.license = { :type => 'Apache', :file => 'Crashlytics/LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-9.0.0.nightly'
}
ios_deployment_target = '9.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.swift_version = '5.3'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
s.source_files = [
'Crashlytics/Crashlytics/**/*.{c,h,m,mm}',
'Crashlytics/Protogen/**/*.{c,h,m,mm}',
'Crashlytics/Shared/**/*.{c,h,m,mm}',
'Crashlytics/third_party/**/*.{c,h,m,mm}',
'FirebaseCore/Extension/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
'Interop/Analytics/Public/*.h',
]
s.public_header_files = [
'Crashlytics/Crashlytics/Public/FirebaseCrashlytics/*.h'
]
s.preserve_paths = [
'Crashlytics/README.md',
'run',
'upload-symbols',
]
# Ensure the run script and upload-symbols are callable via
# ${PODS_ROOT}/FirebaseCrashlytics/<name>
s.prepare_command = <<-PREPARE_COMMAND_END
cp -f ./Crashlytics/run ./run
cp -f ./Crashlytics/upload-symbols ./upload-symbols
PREPARE_COMMAND_END
s.dependency 'FirebaseCore', '~> 9.0'
s.dependency 'FirebaseInstallations', '~> 9.0'
s.dependency 'PromisesObjC', '~> 2.1'
s.dependency 'GoogleDataTransport', '~> 9.1'
s.dependency 'GoogleUtilities/Environment', '~> 7.7'
s.dependency 'nanopb', '~> 2.30908.0'
s.libraries = 'c++', 'z'
s.ios.frameworks = 'Security', 'SystemConfiguration'
s.macos.frameworks = 'Security', 'SystemConfiguration'
s.osx.frameworks = 'Security', 'SystemConfiguration'
s.watchos.frameworks = 'Security'
s.ios.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics iOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.osx.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics Mac SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.tvos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics tvOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.watchos.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'CLS_SDK_NAME="Crashlytics watchOS SDK" ' +
# For nanopb:
'PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1',
'OTHER_LD_FLAGS' => '$(inherited) -sectcreate __TEXT __info_plist',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
# Unit tests can't run on watchOS.
unit_tests.platforms = {
:ios => ios_deployment_target,
:osx => '10.15',
:tvos => tvos_deployment_target
}
unit_tests.source_files = 'Crashlytics/UnitTests/*.[mh]',
'Crashlytics/UnitTests/*/*.[mh]'
unit_tests.resources = 'Crashlytics/UnitTests/Data/*',
'Crashlytics/UnitTests/*.clsrecord',
'Crashlytics/UnitTests/FIRCLSMachO/data/*'
end
end
|
[Add] FirebasePerformance (8.4.0)
Pod::Spec.new do |s|
s.name = 'FirebasePerformance'
s.version = '8.4.0'
s.summary = 'Firebase Performance'
s.description = <<-DESC
Firebase Performance library to measure performance of Mobile and Web Apps.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.4.0.nightly'
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '10.0'
tvos_deployment_target = '10.0'
s.ios.deployment_target = ios_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
base_dir = "FirebasePerformance/"
s.source_files = [
base_dir + 'Sources/**/*.[mh]',
base_dir + 'ProtoSupport/**/*.[mh]',
'FirebaseCore/Sources/Private/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
'FirebaseRemoteConfig/Sources/Private/*.h',
]
s.requires_arc = [
base_dir + 'Sources/**/*.[mh]',
base_dir + 'Public/**/*.h',
]
s.public_header_files = base_dir + 'Sources/Public/*.h'
preprocessor_definitions = 'GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=1 ' + 'FIRPerformance_LIB_VERSION=' + String(s.version)
if ENV['FPR_UNSWIZZLE_AVAILABLE'] && ENV['FPR_UNSWIZZLE_AVAILABLE'] == '1' then
preprocessor_definitions += ' UNSWIZZLE_AVAILABLE=1'
end
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' => preprocessor_definitions,
# Unit tests do library imports using repo-root relative paths.
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.ios.framework = 'CoreTelephony'
s.framework = 'QuartzCore'
s.framework = 'SystemConfiguration'
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'FirebaseInstallations', '~> 8.0'
s.dependency 'FirebaseRemoteConfig', '~> 8.0'
s.dependency 'GoogleDataTransport', '~> 9.0'
s.dependency 'GoogleUtilities/Environment', '~> 7.4'
s.dependency 'GoogleUtilities/ISASwizzler', '~> 7.4'
s.dependency 'GoogleUtilities/MethodSwizzler', '~> 7.4'
s.dependency 'Protobuf', '~> 3.15'
s.test_spec 'unit' do |unit_tests|
unit_tests.platforms = {:ios => ios_deployment_target, :tvos => tvos_deployment_target}
unit_tests.scheme = { :code_coverage => true }
unit_tests.source_files = [
'FirebasePerformance/Tests/Unit/**/*.{m,h,plist}',
'GoogleDataTransport/GDTCORTests/Common/**/*.[hm]',
]
unit_tests.resources = ['FirebasePerformance/Tests/Unit/Server/*File']
unit_tests.requires_arc = true
unit_tests.requires_app_host = true
unit_tests.pod_target_xcconfig = {
'CLANG_ENABLE_OBJC_WEAK' => 'YES',
}
unit_tests.info_plist = {
'FPRTestingDummyFeature' => true,
'FPRScreenTracesForContainerVC' => true,
'FPRDelegateSwizzling' => true,
'FPRNSURLConnection' => true,
'FPRScreenTracesSwizzling' => true,
'FPRScreenTraces' => false,
}
unit_tests.dependency 'GoogleUtilities/SwizzlerTestHelpers'
unit_tests.dependency 'OCMock'
unit_tests.dependency 'GCDWebServer'
end
s.app_spec 'TestApp' do |app_spec|
app_spec.platforms = {:ios => ios_deployment_target, :tvos => tvos_deployment_target}
app_spec.source_files = ['FirebasePerformance/Tests/TestApp/Source/**/*.{m,h}']
ios_resources = ['FirebasePerformance/Tests/TestApp/Resources/*.*']
if ENV['FPR_AUTOPUSH_ENV'] && ENV['FPR_AUTOPUSH_ENV'] == '1' then
ios_resources += ['FirebasePerformance/Tests/TestApp/Plists/Autopush/**/*.plist']
app_spec.info_plist = {
'CFBundleIdentifier' => 'com.google.FIRPerfTestAppAutopush'
}
app_spec.scheme = {
:environment_variables => { "FPR_AUTOPUSH_ENV" => "1" }
}
else
ios_resources += ['FirebasePerformance/Tests/TestApp/Plists/Prod/**/*.plist']
app_spec.info_plist = {
'CFBundleIdentifier' => 'com.google.FIRPerfTestApp'
}
end
app_spec.ios.resources = ios_resources
app_spec.requires_arc = true
end
end
|
[Add] FirebasePerformance (8.7.0)
Pod::Spec.new do |s|
s.name = 'FirebasePerformance'
s.version = '8.7.0'
s.summary = 'Firebase Performance'
s.description = <<-DESC
Firebase Performance library to measure performance of Mobile and Web Apps.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-8.7.0.nightly'
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '10.0'
tvos_deployment_target = '10.0'
s.ios.deployment_target = ios_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
base_dir = "FirebasePerformance/"
s.source_files = [
base_dir + 'Sources/**/*.[cmh]',
'FirebaseCore/Sources/Private/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
'FirebaseRemoteConfig/Sources/Private/*.h',
]
s.requires_arc = [
base_dir + 'Sources/**/*.[mh]',
base_dir + 'Public/**/*.h',
]
s.public_header_files = base_dir + 'Sources/Public/FirebasePerformance/*.h'
preprocessor_definitions = 'FIRPerformance_LIB_VERSION=' + String(s.version)
preprocessor_definitions += ' PB_FIELD_32BIT=1 PB_NO_PACKED_STRUCTS=1 PB_ENABLE_MALLOC=1'
if ENV['FPR_UNSWIZZLE_AVAILABLE'] && ENV['FPR_UNSWIZZLE_AVAILABLE'] == '1' then
preprocessor_definitions += ' UNSWIZZLE_AVAILABLE=1'
end
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' => preprocessor_definitions,
# Unit tests do library imports using repo-root relative paths.
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
}
s.ios.framework = 'CoreTelephony'
s.framework = 'QuartzCore'
s.framework = 'SystemConfiguration'
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'FirebaseInstallations', '~> 8.0'
s.dependency 'FirebaseRemoteConfig', '~> 8.0'
s.dependency 'GoogleDataTransport', '~> 9.0'
s.dependency 'GoogleUtilities/Environment', '~> 7.4'
s.dependency 'GoogleUtilities/ISASwizzler', '~> 7.4'
s.dependency 'GoogleUtilities/MethodSwizzler', '~> 7.4'
s.dependency 'nanopb', '~> 2.30908.0'
s.test_spec 'unit' do |unit_tests|
unit_tests.platforms = {:ios => ios_deployment_target, :tvos => tvos_deployment_target}
unit_tests.scheme = { :code_coverage => true }
unit_tests.source_files = [
'FirebasePerformance/Tests/Unit/**/*.{m,h,plist}',
'SharedTestUtilities/*.[hm]',
]
unit_tests.resources = ['FirebasePerformance/Tests/Unit/Server/*File']
unit_tests.requires_arc = true
unit_tests.requires_app_host = true
unit_tests.pod_target_xcconfig = {
'CLANG_ENABLE_OBJC_WEAK' => 'YES',
}
unit_tests.info_plist = {
'FPRTestingDummyFeature' => true,
'FPRScreenTracesForContainerVC' => true,
'FPRDelegateSwizzling' => true,
'FPRNSURLConnection' => true,
'FPRScreenTracesSwizzling' => true,
'FPRScreenTraces' => false,
}
unit_tests.dependency 'GoogleUtilities/SwizzlerTestHelpers'
unit_tests.dependency 'OCMock'
unit_tests.dependency 'GCDWebServer'
end
s.app_spec 'TestApp' do |app_spec|
app_spec.platforms = {:ios => ios_deployment_target, :tvos => tvos_deployment_target}
app_spec.source_files = ['FirebasePerformance/Tests/TestApp/Source/**/*.{m,h}']
ios_resources = ['FirebasePerformance/Tests/TestApp/Resources/*.*']
if ENV['FPR_AUTOPUSH_ENV'] && ENV['FPR_AUTOPUSH_ENV'] == '1' then
ios_resources += ['FirebasePerformance/Tests/TestApp/Plists/Autopush/**/*.plist']
app_spec.info_plist = {
'CFBundleIdentifier' => 'com.google.FIRPerfTestAppAutopush'
}
app_spec.scheme = {
:environment_variables => { "FPR_AUTOPUSH_ENV" => "1" }
}
else
ios_resources += ['FirebasePerformance/Tests/TestApp/Plists/Prod/**/*.plist']
app_spec.info_plist = {
'CFBundleIdentifier' => 'com.google.FIRPerfTestApp'
}
end
app_spec.ios.resources = ios_resources
app_spec.requires_arc = true
end
end
|
name "hopsworks"
maintainer "Jim Dowling"
maintainer_email "jdowling@kth.se"
license "Apache v2.0"
description "Installs/Configures HopsWorks, the UI for Hops Hadoop."
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.1.0"
source_url "https://github.com/hopshadoop/hopsworks-chef"
%w{ ubuntu debian centos rhel }.each do |os|
supports os
end
depends 'glassfish'
depends 'ndb'
depends 'kagent'
depends 'hops'
depends 'elastic'
depends 'hadoop_spark'
depends 'flink'
depends 'zeppelin'
depends 'compat_resource'
depends 'ulimit2'
depends 'authbind'
depends 'apache_hadoop'
depends 'epipe'
depends 'livy'
depends 'oozie'
depends 'kkafka'
depends 'kzookeeper'
depends 'drelephant'
depends 'dela'
depends 'java'
#link:Click <a target='_blank' href='https://%host%:4848'>here</a> to launch Glassfish in your browser (http)
recipe "hopsworks::install", "Installs Glassfish"
#link:Click <a target='_blank' href='http://%host%:8080/hopsworks'>here</a> to launch hopsworks in your browser (http)
recipe "hopsworks", "Installs HopsWorks war file, starts glassfish+application."
recipe "hopsworks::dev", "Installs development libraries needed for HopsWorks development."
recipe "hopsworks::letsencypt", "Given a glassfish installation and a letscrypt installation, update glassfish's key."
recipe "hopsworks::purge", "Deletes glassfish installation."
recipe "kagent::install", ""
recipe "kagent::default", ""
recipe "kagent::purge", ""
recipe "ndb::install", ""
recipe "ndb::ndbd", ""
recipe "ndb::mgmd", ""
recipe "ndb::mysqld", ""
recipe "ndb::purge", ""
recipe "apache_hadoop::install", ""
recipe "apache_hadoop::nn", ""
recipe "apache_hadoop::dn", ""
recipe "apache_hadoop::rm", ""
recipe "apache_hadoop::nm", ""
recipe "apache_hadoop::jhs", ""
recipe "apache_hadoop::purge", ""
recipe "hadoop_spark::install", ""
recipe "hadoop_spark::yarn", ""
recipe "hadoop_spark::historyserver", ""
recipe "hadoop_spark::purge", ""
recipe "flink::install", ""
recipe "flink::yarn", ""
recipe "flink::purge", ""
recipe "elastic::install", ""
recipe "elastic::default", ""
recipe "elastic::purge", ""
recipe "kzookeeper::install", ""
recipe "kzookeeper::default", ""
recipe "kzookeeper::purge", ""
recipe "kkafka::install", ""
recipe "kkafka::default", ""
recipe "kkafka::purge", ""
recipe "livy::install", ""
recipe "livy::default", ""
recipe "livy::purge", ""
recipe "epipe::install", ""
recipe "epipe::default", ""
recipe "epipe::purge", ""
recipe "zeppelin::install", ""
recipe "zeppelin::default", ""
recipe "zeppelin::purge", ""
recipe "drelephant::install", ""
recipe "drelephant::default", ""
recipe "drelephant::purge", ""
recipe "dela::install", ""
recipe "dela::default", ""
recipe "dela::purge", ""
#######################################################################################
# Required Attributes
#######################################################################################
attribute "hopsworks/twofactor_auth",
:description => "twofactor_auth (default: false)",
:type => 'string',
:required => "required"
attribute "hopsworks/gmail/email",
:description => "Email address for gmail account",
:required => "required",
:type => 'string'
attribute "hopsworks/gmail/password",
:description => "Password for gmail account",
:required => "required",
:type => 'string'
attribute "hopsworks/admin/user",
:description => "Username for the Administration account on the Web Application Server",
:type => 'string',
:required => "required"
attribute "hopsworks/admin/password",
:description => "Password for the Administration account on the Web Application Server",
:type => 'string',
:required => "required"
attribute "mysql/user",
:description => "Username for the MySQL Server Accounts",
:type => 'string',
:required => "required"
attribute "mysql/password",
:description => "Password for the MySQL Server Accounts",
:type => 'string',
:required => "required"
#######################################################################################
# Non-Required Attributes
#######################################################################################
attribute "hopsworks/master/password",
:description => "Web Application Server master password",
:type => 'string'
attribute "download_url",
:description => "URL for downloading binaries",
:type => 'string'
# attribute "hopsworks/cert/password",
# :description => "hopsworks/cert/password",
# :type => 'string',
# :default => "changeit"
attribute "karamel/cert/cn",
:description => "Certificate Name",
:type => 'string'
attribute "karamel/cert/o",
:description => "organization",
:type => 'string'
attribute "karamel/cert/ou",
:description => "Organization unit",
:type => 'string'
attribute "karamel/cert/l",
:description => "Location",
:type => 'string'
attribute "karamel/cert/s",
:description => "City",
:type => 'string'
attribute "karamel/cert/c",
:description => "Country (2 letters)",
:type => 'string'
attribute "glassfish/version",
:description => "glassfish/version",
:type => 'string'
attribute "glassfish/user",
:description => "Install and run the glassfish server as this username",
:type => 'string'
attribute "glassfish/group",
:description => "glassfish/group",
:type => 'string'
# attribute "glassfish/admin/port",
# :description => "glassfish/admin/port",
# :type => 'string'
# attribute "glassfish/port",
# :description => "glassfish/port",
# :type => 'string'
attribute "hopsworks/port",
:description => "Port that webserver will listen on",
:type => 'string'
attribute "hopsworks/max_mem",
:description => "glassfish/max_mem",
:type => 'string'
attribute "hopsworks/min_mem",
:description => "glassfish/min_mem",
:type => 'string'
attribute "hopsworks/max_stack_size",
:description => "glassfish/max_stack_size",
:type => 'string'
attribute "hopsworks/max_perm_size",
:description => "glassfish/max_perm_size",
:type => 'string'
attribute "kagent/enabled",
:description => "Install kagent",
:type => 'string'
attribute "hopsworks/reinstall",
:description => "Enter 'true' if this is a reinstallation",
:type => 'string'
attribute "hopsworks/war_url",
:description => "Url for the hopsworks war file",
:type => 'string'
attribute "hopsworks/yarn_default_quota_mins",
:description => "Default number of CPU mins availble per project",
:type => 'string'
attribute "hopsworks/hdfs_default_quota_gbs",
:description => "Default amount in GB of available storage per project",
:type => 'string'
attribute "hopsworks/max_num_proj_per_user",
:description => "Maximum number of projects that can be created by each user",
:type => 'string'
attribute "glassfish/package_url",
:description => "Url for the Glassfish distribution zip file.",
:type => 'string'
attribute "ndb/dir",
:description => "Ndb Installation directory.",
:type => 'string'
attribute "hops/dir",
:description => "Ndb Installation directory.",
:type => 'string'
attribute "hadoop_spark/dir",
:description => "Installation directory.",
:type => 'string'
attribute "hopsworks.kafka_num_replicas",
:description => "Default number of replicas for Kafka Topics.",
:type => 'string'
attribute "hopsworks.kafka_num_partitions",
:description => "Default number of partitions for Kafka Topics.",
:type => 'string'
attribute "hopsworks/file_preview_image_size",
:description => "Maximum size in bytes of an image that can be previewed in DataSets",
:type => 'string'
attribute "hopsworks/file_preview_txt_size",
:description => "Maximum size in lines of file that can be previewed in DataSets",
:type => 'string'
attribute "java/jdk_version",
:display_name => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:display_name => "Oracle (default) or openjdk",
:type => 'string'
#########################################################################
#########################################################################
### BEGIN GENERATED CONTENT
attribute "kagent/user",
:description => "Username to run kagent as",
:type => 'string'
attribute "kagent/dashboard/ip",
:description => " Ip address for Dashboard REST API",
:type => 'string'
attribute "kagent/dashboard/port",
:description => " Port for Dashboard REST API",
:type => 'string'
attribute "hop/hostid",
:description => " One-time password used when registering the host",
:type => 'string'
attribute "kagent/name",
:description => "Cookbook name",
:type => 'string'
attribute "kagent/rest_api/user",
:description => "kagent REST API username",
:type => "string"
attribute "kagent/rest_api/password",
:description => "kagent REST API password",
:type => "string"
attribute "kagent/dashboard/user",
:description => "kagent username to register with server",
:type => "string"
attribute "kagent/dashboard/password",
:description => "kagent password to register with server",
:type => "string"
attribute "ndb/mysql_port",
:description => "Port for the mysql server",
:type => "string"
attribute "ndb/mysql_socket",
:description => "Socket for the mysql server",
:type => "string"
attribute "systemd",
:description => "Use systemd startup scripts, default 'true'",
:type => "string"
attribute "kagent/network/interface",
:description => "Define the network intefaces (eth0, enp0s3)",
:type => "string"
attribute "ntp/install",
:description => "Install Network Time Protocol (default: false)",
:type => "string"
attribute "ndb/package_url",
:description => "Download URL for MySQL Cluster binaries",
:type => 'string'
attribute "ndb/MaxNoOfExecutionThreads",
:description => "Number of execution threads for MySQL Cluster",
:type => 'string'
attribute "ndb/DataMemory",
:description => "Data memory for each MySQL Cluster Data Node",
:type => 'string',
:required => "required"
attribute "ndb/IndexMemory",
:description => "Index memory for each MySQL Cluster Data Node",
:type => 'string'
attribute "memcached/mem_size",
:description => "Memcached data memory size",
:type => 'string'
attribute "ndb/version",
:description => "MySQL Cluster Version",
:type => 'string'
attribute "ndb/user",
:description => "User that runs ndb database",
:type => 'string'
attribute "ndb/group",
:description => "Group that runs ndb database",
:type => 'string'
attribute "mysql/user",
:description => "User that runs mysql server",
:required => "required",
:type => 'string'
attribute "mysql/password",
:description => "Password for hop mysql user",
:required => "required",
:type => 'string'
#
# Optional Parameters/Attributes
#
attribute "mysql/dir",
:description => "Directory in which to install MySQL Binaries",
:type => 'string'
attribute "mysql/replication_enabled",
:description => "Enable replication for the mysql server",
:type => 'string'
attribute "ndb/wait_startup",
:description => "Max amount of time a MySQL server should wait for the ndb nodes to be up",
:type => 'string'
attribute "ndb/mgm_server/port",
:description => "Port used by Mgm servers in MySQL Cluster",
:type => 'string'
attribute "ndb/NoOfReplicas",
:description => "Num of replicas of the MySQL Cluster Data Nodes",
:type => 'string'
attribute "ndb/FragmentLogFileSize",
:description => "FragmentLogFileSize",
:type => 'string'
attribute "ndb/MaxNoOfAttributes",
:description => "MaxNoOfAttributes",
:type => 'string'
attribute "ndb/MaxNoOfConcurrentIndexOperations",
:description => "Increase for higher throughput at the cost of more memory",
:type => 'string'
attribute "ndb/MaxNoOfConcurrentOperations",
:description => "Increase for higher throughput at the cost of more memory",
:type => 'string'
attribute "ndb/MaxNoOfTables",
:description => "MaxNoOfTables",
:type => 'string'
attribute "ndb/MaxNoOfOrderedIndexes",
:description => "MaxNoOfOrderedIndexes",
:type => 'string'
attribute "ndb/MaxNoOfUniqueHashIndexes",
:description => "MaxNoOfUniqueHashIndexes",
:type => 'string'
attribute "ndb/MaxDMLOperationsPerTransaction",
:description => "MaxDMLOperationsPerTransaction",
:type => 'string'
attribute "ndb/TransactionBufferMemory",
:description => "TransactionBufferMemory",
:type => 'string'
attribute "ndb/MaxParallelScansPerFragment",
:description => "MaxParallelScansPerFragment",
:type => 'string'
attribute "ndb/MaxDiskWriteSpeed",
:description => "MaxDiskWriteSpeed",
:type => 'string'
attribute "ndb/MaxDiskWriteSpeedOtherNodeRestart",
:description => "MaxDiskWriteSpeedOtherNodeRestart",
:type => 'string'
attribute "ndb/MaxDiskWriteSpeedOwnRestart",
:description => "MaxDiskWriteSpeedOwnRestart",
:type => 'string'
attribute "ndb/MinDiskWriteSpeed",
:description => "MinDiskWriteSpeed",
:type => 'string'
attribute "ndb/DiskSyncSize",
:description => "DiskSyncSize",
:type => 'string'
attribute "ndb/RedoBuffer",
:description => "RedoBuffer",
:type => 'string'
attribute "ndb/LongMessageBuffer",
:description => "LongMessageBuffer",
:type => 'string'
attribute "ndb/TransactionInactiveTimeout",
:description => "TransactionInactiveTimeout",
:type => 'string'
attribute "ndb/TransactionDeadlockDetectionTimeout",
:description => "TransactionDeadlockDetectionTimeout",
:type => 'string'
attribute "ndb/LockPagesInMainMemory",
:description => "LockPagesInMainMemory",
:type => 'string'
attribute "ndb/RealTimeScheduler",
:description => "RealTimeScheduler",
:type => 'string'
attribute "ndb/SchedulerSpinTimer",
:description => "SchedulerSpinTimer",
:type => 'string'
attribute "ndb/BuildIndexThreads",
:description => "BuildIndexThreads",
:type => 'string'
attribute "ndb/CompressedLCP",
:description => "CompressedLCP",
:type => 'string'
attribute "ndb/CompressedBackup",
:description => "CompressedBackup",
:type => 'string'
attribute "ndb/BackupMaxWriteSize",
:description => "BackupMaxWriteSize",
:type => 'string'
attribute "ndb/BackupLogBufferSize",
:description => "BackupLogBufferSize",
:type => 'string'
attribute "ndb/BackupDataBufferSize",
:description => "BackupDataBufferSize",
:type => 'string'
attribute "ndb/MaxAllocate",
:description => "MaxAllocate",
:type => 'string'
attribute "ndb/DefaultHashMapSize",
:description => "DefaultHashMapSize",
:type => 'string'
attribute "ndb/ODirect",
:description => "ODirect",
:type => 'string'
attribute "ndb/TotalSendBufferMemory",
:description => "TotalSendBufferMemory in MBs",
:type => 'string'
attribute "ndb/OverloadLimit",
:description => "Overload for Send/Recv TCP Buffers in MBs",
:type => 'string'
attribute "kagent/enabled",
:description => "Install kagent",
:type => 'string',
:required => "optional"
attribute "ndb/NoOfFragmentLogParts",
:description => "One per ldm thread. Valid values: 4, 8, 16. Should match the number of CPUs in ThreadConfig's ldm threads.",
:type => 'string'
attribute "ndb/bind_cpus",
:description => "Isolate interrupts from cpus, turn off balance_irqs",
:type => 'string'
attribute "ndb/TcpBind_INADDR_ANY",
:description => "Set to TRUE so that any IP addr can be used on any node. Default is FALSE.",
:type => 'string'
attribute "ndb/aws_enhanced_networking",
:description => "Set to true if you want the ixgbevf module to be installed that is needed for AWS enhanced networking.",
:type => 'string'
attribute "ndb/interrupts_isolated_to_single_cpu",
:description => "Set to true if you want to setup your linux kernal to handle interrupts on a single CPU.",
:type => 'string'
attribute "ndb/ThreadConfig",
:description => "Decide which threads bind to which cores: Threadconfig=main={cpubind=0},ldm={count=8,cpubind=1,2,3,4,13,14,15,16},io={count=4,cpubind=5,6,17,18},rep={cpubind=7},recv={count=2,cpubind=8,19}k",
:type => 'string'
attribute "ndb/dir",
:description => "Directory in which to install mysql-cluster",
:type => 'string'
attribute "ndb/shared_folder",
:description => "Directory in which to download mysql-cluster",
:type => 'string'
attribute "ndb/systemd",
:description => "Use systemd scripts (instead of system-v). Default is 'true'.",
:type => 'string'
attribute "ndb/MaxNoOfConcurrentTransactions",
:description => "Maximum number of concurrent transactions (higher consumes more memory)",
:type => 'string'
# attribute "btsync/ndb/seeder_secret",
# :display_name => "Ndb seeder's random secret key.",
# :description => "20 chars or more (normally 32 chars)",
# :type => 'string',
# :default => "AY27AAZKTKO3GONE6PBCZZRA6MKGRKBX2"
# attribute "btsync/ndb/leecher_secret",
# :display_name => "Ndb leecher's secret key.",
# :description => "Ndb's random secret (key) generated using the seeder's secret key. 20 chars or more (normally 32 chars)",
# :type => 'string',
# :default => "BTHKJKK4PIPIOJZ7GITF2SJ2IYDLSSJVY"
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "hops/yarn/rm_heartbeat",
:description => "NodeManager heartbeat timeout",
:type => 'string'
attribute "mysql/user",
:description => "Mysql server username",
:type => 'string',
:required => "required"
attribute "mysql/password",
:description => "MySql server Password",
:type => 'string',
:required => "required"
attribute "hops/use_hopsworks",
:description => "'true' or 'false' - true to enable HopsWorks support",
:type => 'string'
attribute "hops/erasure_coding",
:description => "'true' or 'false' - true to enable erasure-coding replication",
:type => 'string'
attribute "hops/nn/direct_memory_size",
:description => "Size of the direct memory size for the NameNode in MBs",
:type => 'string'
attribute "hops/nn/heap_size",
:description => "Size of the NameNode heap in MBs",
:type => 'string'
attribute "hops/nn/cache",
:description => "'true' or 'false' - true to enable the path cache in the NameNode",
:type => 'string'
attribute "hops/nn/partition_key",
:description => "'true' or 'false' - true to enable the partition key when starting transactions. Distribution-aware transactions.",
:type => 'string'
attribute "hops/yarn/resource_tracker",
:description => "Hadoop Resource Tracker enabled on this nodegroup",
:type => 'string'
attribute "hops/install_db",
:description => "Install hops database and tables in MySQL Cluster ('true' (default) or 'false')",
:type => 'string'
attribute "hops/dir",
:description => "Base installation directory for HopsFS",
:type => 'string'
attribute "hops/use_systemd",
:description => "Use systemd startup scripts, default 'false'",
:type => "string"
attribute "apache_hadoop/group",
:description => "Group to run hdfs/yarn/mr as",
:type => 'string'
#
# wrapper parameters
#
attribute "apache_hadoop/yarn/nm/memory_mbs",
:description => "Apache_Hadoop NodeManager Memory in MB",
:type => 'string'
attribute "apache_hadoop/yarn/vcores",
:description => "Apache_Hadoop NodeManager Number of Virtual Cores",
:type => 'string'
attribute "apache_hadoop/yarn/max_vcores",
:description => "Hadoop NodeManager Maximum Virtual Cores per container",
:type => 'string'
attribute "apache_hadoop/version",
:description => "Hadoop version",
:type => 'string'
attribute "apache_hadoop/num_replicas",
:description => "HDFS replication factor",
:type => 'string'
attribute "apache_hadoop/container_cleanup_delay_sec",
:description => "The number of seconds container data is retained after termination",
:type => 'string'
attribute "apache_hadoop/yarn/user",
:description => "Username to run yarn as",
:type => 'string'
attribute "apache_hadoop/mr/user",
:description => "Username to run mapReduce as",
:type => 'string'
attribute "apache_hadoop/hdfs/user",
:description => "Username to run hdfs as",
:type => 'string'
attribute "apache_hadoop/format",
:description => "Format HDFS",
:type => 'string'
attribute "apache_hadoop/tmp_dir",
:description => "The directory in which Hadoop stores temporary data, including container data",
:type => 'string'
attribute "apache_hadoop/data_dir",
:description => "The directory in which Hadoop's DataNodes store their data",
:type => 'string'
attribute "apache_hadoop/yarn/nodemanager_hb_ms",
:description => "Heartbeat Interval for NodeManager->ResourceManager in ms",
:type => 'string'
attribute "apache_hadoop/container_cleanup_delay_sec",
:description => "The number of seconds container data is retained after termination",
:type => 'string'
attribute "apache_hadoop/rm/scheduler_class",
:description => "Java Classname for the Yarn scheduler (fifo, capacity, fair)",
:type => 'string'
attribute "apache_hadoop/rm/scheduler_capacity/calculator_class",
:description => "YARN resource calculator class. Switch to DominantResourseCalculator for multiple resource scheduling",
:type => 'string'
attribute "apache_hadoop/user_envs",
:description => "Update the PATH environment variable for the hdfs and yarn users to include hadoop/bin in the PATH ",
:type => 'string'
attribute "apache_hadoop/logging_level",
:description => "Log levels are: TRACE, DEBUG, INFO, WARN",
:type => 'string'
attribute "apache_hadoop/nn/heap_size",
:description => "Size of the NameNode heap in MBs",
:type => 'string'
attribute "apache_hadoop/nn/direct_memory_size",
:description => "Size of the direct memory size for the NameNode in MBs",
:type => 'string'
attribute "apache_hadoop/ha_enabled",
:description => "'true' to enable HA, else 'false'",
:type => 'string'
attribute "apache_hadoop/yarn/rt",
:description => "Hadoop Resource Tracker enabled on this nodegroup",
:type => 'string'
attribute "apache_hadoop/dir",
:description => "Hadoop installation directory",
:type => 'string'
attribute "hops/yarn/rm_distributed",
:description => "Set to 'true' for distribute yarn",
:type => "string"
attribute "hops/yarn/nodemanager_ha_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_auto_failover_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_recovery_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/rm_heartbeat",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_rpc_batch_max_size",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_rpc_batch_max_duration",
:description => "",
:type => "string"
attribute "hops/yarn/rm_distributed",
:description => "Set to 'true' to enable distributed RMs",
:type => "string"
attribute "hops/yarn/nodemanager_rm_streaming_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/client_failover_sleep_base_ms",
:description => "",
:type => "string"
attribute "hops/yarn/client_failover_sleep_max_ms",
:description => "",
:type => "string"
attribute "hops/yarn/quota_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/quota_monitor_interval",
:description => "",
:type => "string"
attribute "hops/yarn/quota_ticks_per_credit",
:description => "",
:type => "string"
attribute "hops/yarn/quota_min_ticks_charge",
:description => "",
:type => "string"
attribute "hops/yarn/quota_checkpoint_nbticks",
:description => "",
:type => "string"
attribute "java/jdk_version",
:display_name => "Jdk version",
:type => 'string'
attribute "hadoop_spark/user",
:display_name => "Username to run spark master/worker as",
:type => 'string'
attribute "hadoop_spark/group",
:display_name => "Groupname to run spark master/worker as",
:type => 'string'
attribute "hadoop_spark/executor_memory",
:display_name => "Executor memory (e.g., 512m)",
:type => 'string'
attribute "hadoop_spark/driver_memory",
:display_name => "Driver memory (e.g., 1g)",
:type => 'string'
attribute "hadoop_spark/eventlog_enabled",
:display_name => "Eventlog enabled (true|false)",
:type => 'string'
attribute "hadoop_spark/worker/cleanup/enabled",
:display_name => "Spark standalone worker cleanup enabled (true|false)",
:type => 'string'
attribute "hadoop_spark/version",
:display_name => "Spark version (e.g., 1.4.1 or 1.5.2 or 1.6.0)",
:type => 'string'
attribute "hadoop_spark/hadoop/distribution",
:display_name => "'hops' or 'apache_hadoop'",
:type => 'string'
attribute "hadoop_spark/history/fs/cleaner/enabled",
:display_name => "'true' to enable cleanup of the historyservers logs",
:type => 'string'
attribute "hadoop_spark/history/fs/cleaner/interval",
:display_name => "How often to run the cleanup of the historyservers logs (e.g., '1d' for once per day)",
:type => 'string'
attribute "hadoop_spark/history/fs/cleaner/maxAge",
:display_name => "Age in days of the historyservers logs before they are removed (e.g., '7d' for 7 days)",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "flink/user",
:description => "Username to run flink jobmgr/task as",
:type => 'string'
attribute "flink/group",
:description => "Groupname to run flink jobmgr/task as",
:type => 'string'
attribute "flink/mode",
:description => "Run Flink JobManager in one of the following modes: BATCH, STREAMING",
:type => 'string'
attribute "flink/jobmanager/heap_mbs",
:description => "Flink JobManager Heap Size in MB",
:type => 'string'
attribute "flink/taskmanager/heap_mbs",
:description => "Flink TaskManager Heap Size in MB",
:type => 'string'
attribute "flink/dir",
:description => "Root directory for flink installation",
:type => 'string'
attribute "flink/taskmanager/num_taskslots",
:description => "Override the default number of task slots (default = NoOfCPUs)",
:type => 'string'
attribute "flink/hadoop/distribution",
:description => "apache_hadoop (default) or hops",
:type => 'string'
attribute "epipe/user",
:description => "User to run Epipe server as",
:type => "string"
attribute "epipe/group",
:description => "Group to run Epipe server as",
:type => "string"
attribute "epipe/version",
:description => "Version of epipe to use",
:type => "string"
attribute "epipe/url",
:description => "Url to epipe binaries",
:type => "string"
attribute "epipe/dir",
:description => "Parent directory to install epipe in (/srv is default)",
:type => "string"
attribute "epipe/pid_file",
:description => "Change the location for the pid_file.",
:type => "string"
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "livy/user",
:description => "User to install/run as",
:type => 'string'
attribute "livy/dir",
:description => "base dir for installation",
:type => 'string'
attribute "livy/version",
:dscription => "livy.version",
:type => "string"
attribute "livy/url",
:dscription => "livy.url",
:type => "string"
attribute "livy/port",
:dscription => "livy.port",
:type => "string"
attribute "livy/home",
:dscription => "livy.home",
:type => "string"
attribute "livy/keystore",
:dscription => "ivy.keystore",
:type => "string"
attribute "livy/keystore_password",
:dscription => "ivy.keystore_password",
:type => "string"
attribute "dela/group",
:description => "group parameter value",
:type => "string"
attribute "dela/user",
:description => "user parameter value",
:type => "string"
attribute "java/jdk_version",
:description => "Version of Java to use (e.g., '7' or '8')",
:type => "string"
attribute "dela/id",
:description => "id for the dela instance. Randomly generated, but can be ovverriden here.",
:type => "string"
attribute "dela/seed",
:description => "seed for the dela instance. Randomly generated, but can be ovverriden here.",
:type => "string"
attribute "dela/log_level",
:description => "Default: WARN. Can be INFO or DEBUG or TRACE or ERROR.",
:type => "string"
attribute "dela/stun_port1",
:description => "1st Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_port2",
:description => "2nd Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_client_port1",
:description => "1st Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_client_port2",
:description => "2nd Client port used by stun client in Dela.",
:type => "string"
attribute "dela/port",
:description => "Dela Client application port.",
:type => "string"
attribute "dela/http-port",
:description => "Dela Client http port.",
:type => "string"
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "kzookeeper/version",
:description => "Version of kzookeeper",
:type => 'string'
attribute "kzookeeper/url",
:description => "Url to download binaries for kzookeeper",
:type => 'string'
attribute "kzookeeper/user",
:description => "Run kzookeeper as this user",
:type => 'string'
attribute "kzookeeper/group",
:description => "Run kzookeeper user as this group",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "kafka/ulimit",
:description => "ULimit for the max number of open files allowed",
:type => 'string'
attribute "kkafka/offset_monitor/port",
:description => "Port for Kafka monitor service",
:type => 'string'
attribute "kkafka/memory_mb",
:description => "Kafka server memory in mbs",
:type => 'string'
attribute "kkafka/broker/zookeeper_connection_timeout_ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/hours",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/size",
:description => "",
:type => 'string'
attribute "kkafka/broker/message/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/network/threads",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/io/threads",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/recovery/threads/per/data/dir",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/replica/fetchers",
:description => "",
:type => 'string'
attribute "kkafka/broker/queued/max/requests",
:description => "",
:type => 'string'
attribute "kkafka/broker/socket/send/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/brattribute oker/socket/receive/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/sockeattribute t/request/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/partitionsattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/segment/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/roll/hoursattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/hoursattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/check/interval/attribute ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/index/size/max/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/index/interval/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/interval/messagesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/scheduler/interval/msattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/interval/msattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/leader/imbalance/check/intervalattribute /seconds",
:description => "",
:type => 'string'
attribute "kkafka/broker/leader/imbalance/per/broker/percentageattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/dir",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/offset/checkpoint/interval/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/port",
:description => "",
:type => 'string'
attribute "kkafka/broker/queued/max/requests",
:description => "",
:type => 'string'
attribute "kkafka/broker/quota/consumer/default",
:description => "",
:type => 'string'
attribute "kkafka/broker/quota/producer/default",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/min/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/wait/max/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/high/watermark/checkpoint/interval/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/lag/time/max/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/socket/receive/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/socket/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/request/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/session/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/set/acl",
:description => "",
:type => 'string'
attribute "kkafka/broker/replication/factor",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/cleaner/enable",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/cleaner/io/buffer/load/factor",
:description => "",
:type => 'string'
attribute "kkafka/broker/security/inter/broker/protocol",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/client/auth",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/key/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/keystore/location",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/keystore/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/truststore/location",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/truststore/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/authorizer/class/name",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/endpoint/identification/algorithm",
:description => "",
:type => 'string'
attribute "kkafka/broker/principal/builder/class",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/synctime/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/connectiontimeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/sessiontimeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/synctime/ms",
:description => "",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "elastic/port",
:description => "Port for elasticsearch service (default: 9200)",
:type => 'string'
attribute "elastic/ulimit_files",
:description => "Number of files to set ulimit to.",
:type => 'string'
attribute "elastic/ulimit_memlock",
:description => "Memlock size for ulimit",
:type => 'string'
attribute "elastic/dir",
:description => "Base directory to install elastic search into.",
:type => 'string'
attribute "elastic/memory",
:description => "Amount of memory for Elasticsearch.",
:type => 'string'
attribute "elastic/version",
:description => "Elasticsearch version, .e.g, '2.1.2'",
:type => 'string'
attribute "elastic/checksum",
:description => "Sha-1 checksum for the elasticsearch .tar.gz file",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "drelephant/user",
:description => "Username that runs the Dr Elephant server",
:type => 'string'
attribute "drelephant/port",
:description => "Port for running the Dr Elephant server",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "zeppelin/user",
:description => "User to install/run zeppelin as",
:type => 'string'
attribute "zeppelin/dir",
:description => "zeppelin base dir",
:type => 'string'
attribute "dela/group",
:description => "group parameter value",
:type => "string"
attribute "dela/user",
:description => "user parameter value",
:type => "string"
attribute "java/jdk_version",
:description => "Version of Java to use (e.g., '7' or '8')",
:type => "string"
attribute "dela/id",
:description => "id for the dela instance. Randomly generated, but can be ovverriden here.",
:type => "string"
attribute "dela/seed",
:description => "seed for the dela instance. Randomly generated, but can be ovverriden here.",
:type => "string"
attribute "dela/log_level",
:description => "Default: WARN. Can be INFO or DEBUG or TRACE or ERROR.",
:type => "string"
attribute "dela/stun_port1",
:description => "1st Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_port2",
:description => "2nd Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_client_port1",
:description => "1st Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_client_port2",
:description => "2nd Client port used by stun client in Dela.",
:type => "string"
attribute "dela/port",
:description => "Dela Client application port.",
:type => "string"
attribute "dela/http-port",
:description => "Dela Client http port.",
:type => "string"
huge metadata parser fix
name "hopsworks"
maintainer "Jim Dowling"
maintainer_email "jdowling@kth.se"
license "Apache v2.0"
description "Installs/Configures HopsWorks, the UI for Hops Hadoop."
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.1.0"
source_url "https://github.com/hopshadoop/hopsworks-chef"
%w{ ubuntu debian centos rhel }.each do |os|
supports os
end
depends 'glassfish'
depends 'ndb'
depends 'kagent'
depends 'hops'
depends 'elastic'
depends 'hadoop_spark'
depends 'flink'
depends 'zeppelin'
depends 'compat_resource'
depends 'ulimit2'
depends 'authbind'
depends 'apache_hadoop'
depends 'epipe'
depends 'livy'
depends 'oozie'
depends 'kkafka'
depends 'kzookeeper'
depends 'drelephant'
depends 'dela'
depends 'java'
#link:Click <a target='_blank' href='https://%host%:4848'>here</a> to launch Glassfish in your browser (http)
recipe "hopsworks::install", "Installs Glassfish"
#link:Click <a target='_blank' href='http://%host%:8080/hopsworks'>here</a> to launch hopsworks in your browser (http)
recipe "hopsworks", "Installs HopsWorks war file, starts glassfish+application."
recipe "hopsworks::dev", "Installs development libraries needed for HopsWorks development."
recipe "hopsworks::letsencypt", "Given a glassfish installation and a letscrypt installation, update glassfish's key."
recipe "hopsworks::purge", "Deletes glassfish installation."
recipe "kagent::install", " "
recipe "kagent::default", " "
recipe "kagent::purge", " "
recipe "ndb::install", " "
recipe "ndb::ndbd", " "
recipe "ndb::mgmd", " "
recipe "ndb::mysqld", " "
recipe "ndb::purge", " "
recipe "apache_hadoop::install", " "
recipe "apache_hadoop::nn", " "
recipe "apache_hadoop::dn", " "
recipe "apache_hadoop::rm", " "
recipe "apache_hadoop::nm", " "
recipe "apache_hadoop::jhs", " "
recipe "apache_hadoop::purge", " "
recipe "hadoop_spark::install", " "
recipe "hadoop_spark::yarn", " "
recipe "hadoop_spark::historyserver", " "
recipe "hadoop_spark::purge", " "
recipe "flink::install", " "
recipe "flink::yarn", " "
recipe "flink::purge", " "
recipe "elastic::install", " "
recipe "elastic::default", " "
recipe "elastic::purge", " "
recipe "kzookeeper::install", " "
recipe "kzookeeper::default", " "
recipe "kzookeeper::purge", " "
recipe "kkafka::install", " "
recipe "kkafka::default", " "
recipe "kkafka::purge", " "
recipe "livy::install", " "
recipe "livy::default", " "
recipe "livy::purge", " "
recipe "epipe::install", " "
recipe "epipe::default", " "
recipe "epipe::purge", " "
recipe "zeppelin::install", " "
recipe "zeppelin::default", " "
recipe "zeppelin::purge", " "
recipe "drelephant::install", " "
recipe "drelephant::default", " "
recipe "drelephant::purge", " "
recipe "dela::install", " "
recipe "dela::default", " "
recipe "dela::purge", " "
#######################################################################################
# Required Attributes
#######################################################################################
attribute "hopsworks/twofactor_auth",
:description => "twofactor_auth (default: false)",
:type => 'string',
:required => "required"
attribute "hopsworks/gmail/email",
:description => "Email address for gmail account",
:required => "required",
:type => 'string'
attribute "hopsworks/gmail/password",
:description => "Password for gmail account",
:required => "required",
:type => 'string'
attribute "hopsworks/admin/user",
:description => "Username for the Administration account on the Web Application Server",
:type => 'string',
:required => "required"
attribute "hopsworks/admin/password",
:description => "Password for the Administration account on the Web Application Server",
:type => 'string',
:required => "required"
attribute "mysql/user",
:description => "Username for the MySQL Server Accounts",
:type => 'string',
:required => "required"
attribute "mysql/password",
:description => "Password for the MySQL Server Accounts",
:type => 'string',
:required => "required"
#######################################################################################
# Non-Required Attributes
#######################################################################################
attribute "hopsworks/master/password",
:description => "Web Application Server master password",
:type => 'string'
attribute "download_url",
:description => "URL for downloading binaries",
:type => 'string'
# attribute "hopsworks/cert/password",
# :description => "hopsworks/cert/password",
# :type => 'string',
# :default => "changeit"
attribute "karamel/cert/cn",
:description => "Certificate Name",
:type => 'string'
attribute "karamel/cert/o",
:description => "organization",
:type => 'string'
attribute "karamel/cert/ou",
:description => "Organization unit",
:type => 'string'
attribute "karamel/cert/l",
:description => "Location",
:type => 'string'
attribute "karamel/cert/s",
:description => "City",
:type => 'string'
attribute "karamel/cert/c",
:description => "Country (2 letters)",
:type => 'string'
attribute "glassfish/version",
:description => "glassfish/version",
:type => 'string'
attribute "glassfish/user",
:description => "Install and run the glassfish server as this username",
:type => 'string'
attribute "glassfish/group",
:description => "glassfish/group",
:type => 'string'
# attribute "glassfish/admin/port",
# :description => "glassfish/admin/port",
# :type => 'string'
# attribute "glassfish/port",
# :description => "glassfish/port",
# :type => 'string'
attribute "hopsworks/port",
:description => "Port that webserver will listen on",
:type => 'string'
attribute "hopsworks/max_mem",
:description => "glassfish/max_mem",
:type => 'string'
attribute "hopsworks/min_mem",
:description => "glassfish/min_mem",
:type => 'string'
attribute "hopsworks/max_stack_size",
:description => "glassfish/max_stack_size",
:type => 'string'
attribute "hopsworks/max_perm_size",
:description => "glassfish/max_perm_size",
:type => 'string'
attribute "kagent/enabled",
:description => "Install kagent",
:type => 'string'
attribute "hopsworks/reinstall",
:description => "Enter 'true' if this is a reinstallation",
:type => 'string'
attribute "hopsworks/war_url",
:description => "Url for the hopsworks war file",
:type => 'string'
attribute "hopsworks/yarn_default_quota_mins",
:description => "Default number of CPU mins availble per project",
:type => 'string'
attribute "hopsworks/hdfs_default_quota_gbs",
:description => "Default amount in GB of available storage per project",
:type => 'string'
attribute "hopsworks/max_num_proj_per_user",
:description => "Maximum number of projects that can be created by each user",
:type => 'string'
attribute "glassfish/package_url",
:description => "Url for the Glassfish distribution zip file.",
:type => 'string'
attribute "ndb/dir",
:description => "Ndb Installation directory.",
:type => 'string'
attribute "hops/dir",
:description => "Ndb Installation directory.",
:type => 'string'
attribute "hadoop_spark/dir",
:description => "Installation directory.",
:type => 'string'
attribute "hopsworks.kafka_num_replicas",
:description => "Default number of replicas for Kafka Topics.",
:type => 'string'
attribute "hopsworks.kafka_num_partitions",
:description => "Default number of partitions for Kafka Topics.",
:type => 'string'
attribute "hopsworks/file_preview_image_size",
:description => "Maximum size in bytes of an image that can be previewed in DataSets",
:type => 'string'
attribute "hopsworks/file_preview_txt_size",
:description => "Maximum size in lines of file that can be previewed in DataSets",
:type => 'string'
attribute "java/jdk_version",
:display_name => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:display_name => "Oracle (default) or openjdk",
:type => 'string'
#########################################################################
#########################################################################
### BEGIN GENERATED CONTENT
attribute "kagent/user",
:description => "Username to run kagent as",
:type => 'string'
attribute "kagent/dashboard/ip",
:description => " Ip address for Dashboard REST API",
:type => 'string'
attribute "kagent/dashboard/port",
:description => " Port for Dashboard REST API",
:type => 'string'
attribute "hop/hostid",
:description => " One-time password used when registering the host",
:type => 'string'
attribute "kagent/name",
:description => "Cookbook name",
:type => 'string'
attribute "kagent/rest_api/user",
:description => "kagent REST API username",
:type => "string"
attribute "kagent/rest_api/password",
:description => "kagent REST API password",
:type => "string"
attribute "kagent/dashboard/user",
:description => "kagent username to register with server",
:type => "string"
attribute "kagent/dashboard/password",
:description => "kagent password to register with server",
:type => "string"
attribute "ndb/mysql_port",
:description => "Port for the mysql server",
:type => "string"
attribute "ndb/mysql_socket",
:description => "Socket for the mysql server",
:type => "string"
attribute "systemd",
:description => "Use systemd startup scripts, default 'true'",
:type => "string"
attribute "kagent/network/interface",
:description => "Define the network intefaces (eth0, enp0s3)",
:type => "string"
attribute "ntp/install",
:description => "Install Network Time Protocol (default: false)",
:type => "string"
attribute "ndb/package_url",
:description => "Download URL for MySQL Cluster binaries",
:type => 'string'
attribute "ndb/MaxNoOfExecutionThreads",
:description => "Number of execution threads for MySQL Cluster",
:type => 'string'
attribute "ndb/DataMemory",
:description => "Data memory for each MySQL Cluster Data Node",
:type => 'string',
:required => "required"
attribute "ndb/IndexMemory",
:description => "Index memory for each MySQL Cluster Data Node",
:type => 'string'
attribute "memcached/mem_size",
:description => "Memcached data memory size",
:type => 'string'
attribute "ndb/version",
:description => "MySQL Cluster Version",
:type => 'string'
attribute "ndb/user",
:description => "User that runs ndb database",
:type => 'string'
attribute "ndb/group",
:description => "Group that runs ndb database",
:type => 'string'
attribute "mysql/user",
:description => "User that runs mysql server",
:required => "required",
:type => 'string'
attribute "mysql/password",
:description => "Password for hop mysql user",
:required => "required",
:type => 'string'
#
# Optional Parameters/Attributes
#
attribute "mysql/dir",
:description => "Directory in which to install MySQL Binaries",
:type => 'string'
attribute "mysql/replication_enabled",
:description => "Enable replication for the mysql server",
:type => 'string'
attribute "ndb/wait_startup",
:description => "Max amount of time a MySQL server should wait for the ndb nodes to be up",
:type => 'string'
attribute "ndb/mgm_server/port",
:description => "Port used by Mgm servers in MySQL Cluster",
:type => 'string'
attribute "ndb/NoOfReplicas",
:description => "Num of replicas of the MySQL Cluster Data Nodes",
:type => 'string'
attribute "ndb/FragmentLogFileSize",
:description => "FragmentLogFileSize",
:type => 'string'
attribute "ndb/MaxNoOfAttributes",
:description => "MaxNoOfAttributes",
:type => 'string'
attribute "ndb/MaxNoOfConcurrentIndexOperations",
:description => "Increase for higher throughput at the cost of more memory",
:type => 'string'
attribute "ndb/MaxNoOfConcurrentOperations",
:description => "Increase for higher throughput at the cost of more memory",
:type => 'string'
attribute "ndb/MaxNoOfTables",
:description => "MaxNoOfTables",
:type => 'string'
attribute "ndb/MaxNoOfOrderedIndexes",
:description => "MaxNoOfOrderedIndexes",
:type => 'string'
attribute "ndb/MaxNoOfUniqueHashIndexes",
:description => "MaxNoOfUniqueHashIndexes",
:type => 'string'
attribute "ndb/MaxDMLOperationsPerTransaction",
:description => "MaxDMLOperationsPerTransaction",
:type => 'string'
attribute "ndb/TransactionBufferMemory",
:description => "TransactionBufferMemory",
:type => 'string'
attribute "ndb/MaxParallelScansPerFragment",
:description => "MaxParallelScansPerFragment",
:type => 'string'
attribute "ndb/MaxDiskWriteSpeed",
:description => "MaxDiskWriteSpeed",
:type => 'string'
attribute "ndb/MaxDiskWriteSpeedOtherNodeRestart",
:description => "MaxDiskWriteSpeedOtherNodeRestart",
:type => 'string'
attribute "ndb/MaxDiskWriteSpeedOwnRestart",
:description => "MaxDiskWriteSpeedOwnRestart",
:type => 'string'
attribute "ndb/MinDiskWriteSpeed",
:description => "MinDiskWriteSpeed",
:type => 'string'
attribute "ndb/DiskSyncSize",
:description => "DiskSyncSize",
:type => 'string'
attribute "ndb/RedoBuffer",
:description => "RedoBuffer",
:type => 'string'
attribute "ndb/LongMessageBuffer",
:description => "LongMessageBuffer",
:type => 'string'
attribute "ndb/TransactionInactiveTimeout",
:description => "TransactionInactiveTimeout",
:type => 'string'
attribute "ndb/TransactionDeadlockDetectionTimeout",
:description => "TransactionDeadlockDetectionTimeout",
:type => 'string'
attribute "ndb/LockPagesInMainMemory",
:description => "LockPagesInMainMemory",
:type => 'string'
attribute "ndb/RealTimeScheduler",
:description => "RealTimeScheduler",
:type => 'string'
attribute "ndb/SchedulerSpinTimer",
:description => "SchedulerSpinTimer",
:type => 'string'
attribute "ndb/BuildIndexThreads",
:description => "BuildIndexThreads",
:type => 'string'
attribute "ndb/CompressedLCP",
:description => "CompressedLCP",
:type => 'string'
attribute "ndb/CompressedBackup",
:description => "CompressedBackup",
:type => 'string'
attribute "ndb/BackupMaxWriteSize",
:description => "BackupMaxWriteSize",
:type => 'string'
attribute "ndb/BackupLogBufferSize",
:description => "BackupLogBufferSize",
:type => 'string'
attribute "ndb/BackupDataBufferSize",
:description => "BackupDataBufferSize",
:type => 'string'
attribute "ndb/MaxAllocate",
:description => "MaxAllocate",
:type => 'string'
attribute "ndb/DefaultHashMapSize",
:description => "DefaultHashMapSize",
:type => 'string'
attribute "ndb/ODirect",
:description => "ODirect",
:type => 'string'
attribute "ndb/TotalSendBufferMemory",
:description => "TotalSendBufferMemory in MBs",
:type => 'string'
attribute "ndb/OverloadLimit",
:description => "Overload for Send/Recv TCP Buffers in MBs",
:type => 'string'
attribute "kagent/enabled",
:description => "Install kagent",
:type => 'string',
:required => "optional"
attribute "ndb/NoOfFragmentLogParts",
:description => "One per ldm thread. Valid values: 4, 8, 16. Should match the number of CPUs in ThreadConfig's ldm threads.",
:type => 'string'
attribute "ndb/bind_cpus",
:description => "Isolate interrupts from cpus, turn off balance_irqs",
:type => 'string'
attribute "ndb/TcpBind_INADDR_ANY",
:description => "Set to TRUE so that any IP addr can be used on any node. Default is FALSE.",
:type => 'string'
attribute "ndb/aws_enhanced_networking",
:description => "Set to true if you want the ixgbevf module to be installed that is needed for AWS enhanced networking.",
:type => 'string'
attribute "ndb/interrupts_isolated_to_single_cpu",
:description => "Set to true if you want to setup your linux kernal to handle interrupts on a single CPU.",
:type => 'string'
attribute "ndb/ThreadConfig",
:description => "Decide which threads bind to which cores: Threadconfig=main={cpubind=0},ldm={count=8,cpubind=1,2,3,4,13,14,15,16},io={count=4,cpubind=5,6,17,18},rep={cpubind=7},recv={count=2,cpubind=8,19}k",
:type => 'string'
attribute "ndb/dir",
:description => "Directory in which to install mysql-cluster",
:type => 'string'
attribute "ndb/shared_folder",
:description => "Directory in which to download mysql-cluster",
:type => 'string'
attribute "ndb/systemd",
:description => "Use systemd scripts (instead of system-v). Default is 'true'.",
:type => 'string'
attribute "ndb/MaxNoOfConcurrentTransactions",
:description => "Maximum number of concurrent transactions (higher consumes more memory)",
:type => 'string'
# attribute "btsync/ndb/seeder_secret",
# :display_name => "Ndb seeder's random secret key.",
# :description => "20 chars or more (normally 32 chars)",
# :type => 'string',
# :default => "AY27AAZKTKO3GONE6PBCZZRA6MKGRKBX2"
# attribute "btsync/ndb/leecher_secret",
# :display_name => "Ndb leecher's secret key.",
# :description => "Ndb's random secret (key) generated using the seeder's secret key. 20 chars or more (normally 32 chars)",
# :type => 'string',
# :default => "BTHKJKK4PIPIOJZ7GITF2SJ2IYDLSSJVY"
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "hops/yarn/rm_heartbeat",
:description => "NodeManager heartbeat timeout",
:type => 'string'
attribute "mysql/user",
:description => "Mysql server username",
:type => 'string',
:required => "required"
attribute "mysql/password",
:description => "MySql server Password",
:type => 'string',
:required => "required"
attribute "hops/use_hopsworks",
:description => "'true' or 'false' - true to enable HopsWorks support",
:type => 'string'
attribute "hops/erasure_coding",
:description => "'true' or 'false' - true to enable erasure-coding replication",
:type => 'string'
attribute "hops/nn/direct_memory_size",
:description => "Size of the direct memory size for the NameNode in MBs",
:type => 'string'
attribute "hops/nn/heap_size",
:description => "Size of the NameNode heap in MBs",
:type => 'string'
attribute "hops/nn/cache",
:description => "'true' or 'false' - true to enable the path cache in the NameNode",
:type => 'string'
attribute "hops/nn/partition_key",
:description => "'true' or 'false' - true to enable the partition key when starting transactions. Distribution-aware transactions.",
:type => 'string'
attribute "hops/yarn/resource_tracker",
:description => "Hadoop Resource Tracker enabled on this nodegroup",
:type => 'string'
attribute "hops/install_db",
:description => "Install hops database and tables in MySQL Cluster ('true' (default) or 'false')",
:type => 'string'
attribute "hops/dir",
:description => "Base installation directory for HopsFS",
:type => 'string'
attribute "hops/use_systemd",
:description => "Use systemd startup scripts, default 'false'",
:type => "string"
attribute "apache_hadoop/group",
:description => "Group to run hdfs/yarn/mr as",
:type => 'string'
#
# wrapper parameters
#
attribute "apache_hadoop/yarn/nm/memory_mbs",
:description => "Apache_Hadoop NodeManager Memory in MB",
:type => 'string'
attribute "apache_hadoop/yarn/vcores",
:description => "Apache_Hadoop NodeManager Number of Virtual Cores",
:type => 'string'
attribute "apache_hadoop/yarn/max_vcores",
:description => "Hadoop NodeManager Maximum Virtual Cores per container",
:type => 'string'
attribute "apache_hadoop/version",
:description => "Hadoop version",
:type => 'string'
attribute "apache_hadoop/num_replicas",
:description => "HDFS replication factor",
:type => 'string'
attribute "apache_hadoop/container_cleanup_delay_sec",
:description => "The number of seconds container data is retained after termination",
:type => 'string'
attribute "apache_hadoop/yarn/user",
:description => "Username to run yarn as",
:type => 'string'
attribute "apache_hadoop/mr/user",
:description => "Username to run mapReduce as",
:type => 'string'
attribute "apache_hadoop/hdfs/user",
:description => "Username to run hdfs as",
:type => 'string'
attribute "apache_hadoop/format",
:description => "Format HDFS",
:type => 'string'
attribute "apache_hadoop/tmp_dir",
:description => "The directory in which Hadoop stores temporary data, including container data",
:type => 'string'
attribute "apache_hadoop/data_dir",
:description => "The directory in which Hadoop's DataNodes store their data",
:type => 'string'
attribute "apache_hadoop/yarn/nodemanager_hb_ms",
:description => "Heartbeat Interval for NodeManager->ResourceManager in ms",
:type => 'string'
attribute "apache_hadoop/container_cleanup_delay_sec",
:description => "The number of seconds container data is retained after termination",
:type => 'string'
attribute "apache_hadoop/rm/scheduler_class",
:description => "Java Classname for the Yarn scheduler (fifo, capacity, fair)",
:type => 'string'
attribute "apache_hadoop/rm/scheduler_capacity/calculator_class",
:description => "YARN resource calculator class. Switch to DominantResourseCalculator for multiple resource scheduling",
:type => 'string'
attribute "apache_hadoop/user_envs",
:description => "Update the PATH environment variable for the hdfs and yarn users to include hadoop/bin in the PATH ",
:type => 'string'
attribute "apache_hadoop/logging_level",
:description => "Log levels are: TRACE, DEBUG, INFO, WARN",
:type => 'string'
attribute "apache_hadoop/nn/heap_size",
:description => "Size of the NameNode heap in MBs",
:type => 'string'
attribute "apache_hadoop/nn/direct_memory_size",
:description => "Size of the direct memory size for the NameNode in MBs",
:type => 'string'
attribute "apache_hadoop/ha_enabled",
:description => "'true' to enable HA, else 'false'",
:type => 'string'
attribute "apache_hadoop/yarn/rt",
:description => "Hadoop Resource Tracker enabled on this nodegroup",
:type => 'string'
attribute "apache_hadoop/dir",
:description => "Hadoop installation directory",
:type => 'string'
attribute "hops/yarn/rm_distributed",
:description => "Set to 'true' for distribute yarn",
:type => "string"
attribute "hops/yarn/nodemanager_ha_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_auto_failover_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_recovery_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/rm_heartbeat",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_rpc_batch_max_size",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_rpc_batch_max_duration",
:description => "",
:type => "string"
attribute "hops/yarn/rm_distributed",
:description => "Set to 'true' to enable distributed RMs",
:type => "string"
attribute "hops/yarn/nodemanager_rm_streaming_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/client_failover_sleep_base_ms",
:description => "",
:type => "string"
attribute "hops/yarn/client_failover_sleep_max_ms",
:description => "",
:type => "string"
attribute "hops/yarn/quota_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/quota_monitor_interval",
:description => "",
:type => "string"
attribute "hops/yarn/quota_ticks_per_credit",
:description => "",
:type => "string"
attribute "hops/yarn/quota_min_ticks_charge",
:description => "",
:type => "string"
attribute "hops/yarn/quota_checkpoint_nbticks",
:description => "",
:type => "string"
attribute "java/jdk_version",
:display_name => "Jdk version",
:type => 'string'
attribute "hadoop_spark/user",
:display_name => "Username to run spark master/worker as",
:type => 'string'
attribute "hadoop_spark/group",
:display_name => "Groupname to run spark master/worker as",
:type => 'string'
attribute "hadoop_spark/executor_memory",
:display_name => "Executor memory (e.g., 512m)",
:type => 'string'
attribute "hadoop_spark/driver_memory",
:display_name => "Driver memory (e.g., 1g)",
:type => 'string'
attribute "hadoop_spark/eventlog_enabled",
:display_name => "Eventlog enabled (true|false)",
:type => 'string'
attribute "hadoop_spark/worker/cleanup/enabled",
:display_name => "Spark standalone worker cleanup enabled (true|false)",
:type => 'string'
attribute "hadoop_spark/version",
:display_name => "Spark version (e.g., 1.4.1 or 1.5.2 or 1.6.0)",
:type => 'string'
attribute "hadoop_spark/hadoop/distribution",
:display_name => "'hops' or 'apache_hadoop'",
:type => 'string'
attribute "hadoop_spark/history/fs/cleaner/enabled",
:display_name => "'true' to enable cleanup of the historyservers logs",
:type => 'string'
attribute "hadoop_spark/history/fs/cleaner/interval",
:display_name => "How often to run the cleanup of the historyservers logs (e.g., '1d' for once per day)",
:type => 'string'
attribute "hadoop_spark/history/fs/cleaner/maxAge",
:display_name => "Age in days of the historyservers logs before they are removed (e.g., '7d' for 7 days)",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "flink/user",
:description => "Username to run flink jobmgr/task as",
:type => 'string'
attribute "flink/group",
:description => "Groupname to run flink jobmgr/task as",
:type => 'string'
attribute "flink/mode",
:description => "Run Flink JobManager in one of the following modes: BATCH, STREAMING",
:type => 'string'
attribute "flink/jobmanager/heap_mbs",
:description => "Flink JobManager Heap Size in MB",
:type => 'string'
attribute "flink/taskmanager/heap_mbs",
:description => "Flink TaskManager Heap Size in MB",
:type => 'string'
attribute "flink/dir",
:description => "Root directory for flink installation",
:type => 'string'
attribute "flink/taskmanager/num_taskslots",
:description => "Override the default number of task slots (default = NoOfCPUs)",
:type => 'string'
attribute "flink/hadoop/distribution",
:description => "apache_hadoop (default) or hops",
:type => 'string'
attribute "epipe/user",
:description => "User to run Epipe server as",
:type => "string"
attribute "epipe/group",
:description => "Group to run Epipe server as",
:type => "string"
attribute "epipe/version",
:description => "Version of epipe to use",
:type => "string"
attribute "epipe/url",
:description => "Url to epipe binaries",
:type => "string"
attribute "epipe/dir",
:description => "Parent directory to install epipe in (/srv is default)",
:type => "string"
attribute "epipe/pid_file",
:description => "Change the location for the pid_file.",
:type => "string"
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "livy/user",
:description => "User to install/run as",
:type => 'string'
attribute "livy/dir",
:description => "base dir for installation",
:type => 'string'
attribute "livy/version",
:dscription => "livy.version",
:type => "string"
attribute "livy/url",
:dscription => "livy.url",
:type => "string"
attribute "livy/port",
:dscription => "livy.port",
:type => "string"
attribute "livy/home",
:dscription => "livy.home",
:type => "string"
attribute "livy/keystore",
:dscription => "ivy.keystore",
:type => "string"
attribute "livy/keystore_password",
:dscription => "ivy.keystore_password",
:type => "string"
attribute "dela/group",
:description => "group parameter value",
:type => "string"
attribute "dela/user",
:description => "user parameter value",
:type => "string"
attribute "java/jdk_version",
:description => "Version of Java to use (e.g., '7' or '8')",
:type => "string"
attribute "dela/id",
:description => "id for the dela instance. Randomly generated, but can be ovverriden here.",
:type => "string"
attribute "dela/seed",
:description => "seed for the dela instance. Randomly generated, but can be ovverriden here.",
:type => "string"
attribute "dela/log_level",
:description => "Default: WARN. Can be INFO or DEBUG or TRACE or ERROR.",
:type => "string"
attribute "dela/stun_port1",
:description => "1st Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_port2",
:description => "2nd Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_client_port1",
:description => "1st Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_client_port2",
:description => "2nd Client port used by stun client in Dela.",
:type => "string"
attribute "dela/port",
:description => "Dela Client application port.",
:type => "string"
attribute "dela/http-port",
:description => "Dela Client http port.",
:type => "string"
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "kzookeeper/version",
:description => "Version of kzookeeper",
:type => 'string'
attribute "kzookeeper/url",
:description => "Url to download binaries for kzookeeper",
:type => 'string'
attribute "kzookeeper/user",
:description => "Run kzookeeper as this user",
:type => 'string'
attribute "kzookeeper/group",
:description => "Run kzookeeper user as this group",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "kafka/ulimit",
:description => "ULimit for the max number of open files allowed",
:type => 'string'
attribute "kkafka/offset_monitor/port",
:description => "Port for Kafka monitor service",
:type => 'string'
attribute "kkafka/memory_mb",
:description => "Kafka server memory in mbs",
:type => 'string'
attribute "kkafka/broker/zookeeper_connection_timeout_ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/hours",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/size",
:description => "",
:type => 'string'
attribute "kkafka/broker/message/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/network/threads",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/io/threads",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/recovery/threads/per/data/dir",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/replica/fetchers",
:description => "",
:type => 'string'
attribute "kkafka/broker/queued/max/requests",
:description => "",
:type => 'string'
attribute "kkafka/broker/socket/send/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/brattribute oker/socket/receive/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/sockeattribute t/request/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/num/partitionsattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/segment/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/roll/hoursattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/hoursattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/retention/check/interval/attribute ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/index/size/max/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/index/interval/bytesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/interval/messagesattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/scheduler/interval/msattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/interval/msattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/leader/imbalance/check/intervalattribute /seconds",
:description => "",
:type => 'string'
attribute "kkafka/broker/leader/imbalance/per/broker/percentageattribute ",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/dir",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/flush/offset/checkpoint/interval/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/port",
:description => "",
:type => 'string'
attribute "kkafka/broker/queued/max/requests",
:description => "",
:type => 'string'
attribute "kkafka/broker/quota/consumer/default",
:description => "",
:type => 'string'
attribute "kkafka/broker/quota/producer/default",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/max/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/min/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/fetch/wait/max/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/high/watermark/checkpoint/interval/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/lag/time/max/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/socket/receive/buffer/bytes",
:description => "",
:type => 'string'
attribute "kkafka/broker/replica/socket/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/request/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/session/timeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/set/acl",
:description => "",
:type => 'string'
attribute "kkafka/broker/replication/factor",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/cleaner/enable",
:description => "",
:type => 'string'
attribute "kkafka/broker/log/cleaner/io/buffer/load/factor",
:description => "",
:type => 'string'
attribute "kkafka/broker/security/inter/broker/protocol",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/client/auth",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/key/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/keystore/location",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/keystore/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/truststore/location",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/truststore/password",
:description => "",
:type => 'string'
attribute "kkafka/broker/authorizer/class/name",
:description => "",
:type => 'string'
attribute "kkafka/broker/ssl/endpoint/identification/algorithm",
:description => "",
:type => 'string'
attribute "kkafka/broker/principal/builder/class",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/synctime/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/connectiontimeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/sessiontimeout/ms",
:description => "",
:type => 'string'
attribute "kkafka/broker/zookeeper/synctime/ms",
:description => "",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "elastic/port",
:description => "Port for elasticsearch service (default: 9200)",
:type => 'string'
attribute "elastic/ulimit_files",
:description => "Number of files to set ulimit to.",
:type => 'string'
attribute "elastic/ulimit_memlock",
:description => "Memlock size for ulimit",
:type => 'string'
attribute "elastic/dir",
:description => "Base directory to install elastic search into.",
:type => 'string'
attribute "elastic/memory",
:description => "Amount of memory for Elasticsearch.",
:type => 'string'
attribute "elastic/version",
:description => "Elasticsearch version, .e.g, '2.1.2'",
:type => 'string'
attribute "elastic/checksum",
:description => "Sha-1 checksum for the elasticsearch .tar.gz file",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "drelephant/user",
:description => "Username that runs the Dr Elephant server",
:type => 'string'
attribute "drelephant/port",
:description => "Port for running the Dr Elephant server",
:type => 'string'
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "zeppelin/user",
:description => "User to install/run zeppelin as",
:type => 'string'
attribute "zeppelin/dir",
:description => "zeppelin base dir",
:type => 'string'
attribute "dela/group",
:description => "group parameter value",
:type => "string"
attribute "dela/user",
:description => "user parameter value",
:type => "string"
attribute "java/jdk_version",
:description => "Version of Java to use (e.g., '7' or '8')",
:type => "string"
attribute "dela/id",
:description => "id for the dela instance. Randomly generated, but can be ovverriden here.",
:type => "string"
attribute "dela/seed",
:description => "seed for the dela instance. Randomly generated, but can be ovverriden here.",
:type => "string"
attribute "dela/log_level",
:description => "Default: WARN. Can be INFO or DEBUG or TRACE or ERROR.",
:type => "string"
attribute "dela/stun_port1",
:description => "1st Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_port2",
:description => "2nd Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_client_port1",
:description => "1st Client port used by stun client in Dela.",
:type => "string"
attribute "dela/stun_client_port2",
:description => "2nd Client port used by stun client in Dela.",
:type => "string"
attribute "dela/port",
:description => "Dela Client application port.",
:type => "string"
attribute "dela/http-port",
:description => "Dela Client http port.",
:type => "string"
|
name 'yum-repoforge'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and onfigures yum-repoforge aka RPMforge'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '2.0.0'
source_url 'https://github.com/chef-cookbooks/yum-repoforge'
issues_url 'https://github.com/chef-cookbooks/yum-repoforge/issues'
depends 'compat_resource', '>= 12.14.7'
depends 'yum-epel'
%w(amazon centos oracle redhat scientific).each do |os|
supports os
end
chef_version '>= 12.1'
Depend on the latest compat_resource cookbook
name 'yum-repoforge'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and onfigures yum-repoforge aka RPMforge'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '2.0.0'
source_url 'https://github.com/chef-cookbooks/yum-repoforge'
issues_url 'https://github.com/chef-cookbooks/yum-repoforge/issues'
depends 'compat_resource', '>= 12.16.3'
depends 'yum-epel'
%w(amazon centos oracle redhat scientific).each do |os|
supports os
end
chef_version '>= 12.1'
|
# encoding: UTF-8
name 'system'
maintainer 'Xhost Australia'
maintainer_email 'cookbooks@xhost.com.au'
license 'Apache 2.0'
description 'Installs/Configures system elements such as the hostname and timezone.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.10.0'
recipe 'system::default', "Sets the system's hostname and timezone, updates the system's installed packages."
recipe 'system::timezone', "Sets the system's' timezone."
recipe 'system::hostname', "Sets the system's hostname."
recipe 'system::profile', "Manage the system's profile such as search path and scripts (/etc/profile)."
recipe 'system::upgrade_packages', "Upgrades the system's installed packages."
recipe 'system::update_package_list', "Updates the system's list of packages in the package manager's cache."
recipe 'system::install_packages', "Installs packages to the system with it's native package manager."
recipe 'system::reboot', 'Attempts to gracefully reboot the operating system.'
recipe 'system::shutdown', 'Attempts to gracefully shutdown the operating system.'
%w(ubuntu debian centos fedora redhat arch mac_os_x).each { |os| supports os }
depends 'apt'
depends 'cron'
depends 'hostsfile'
attribute 'system/timezone',
display_name: 'Timezone',
description: 'The system timezone, which must be a valid zoneinfo/tz database entry.',
required: 'optional',
default: 'UTC',
recipes: ['system::timezone', 'system::default'],
choice: [
'Africa/Casablanca',
'America/Bogota',
'America/Buenos_Aires',
'America/Caracas',
'America/La_Paz',
'America/Lima',
'America/Mexico_City',
'Asia/Almaty',
'Asia/Baghdad',
'Asia/Baku',
'Asia/Bangkok',
'Asia/Calcutta',
'Asia/Colombo',
'Asia/Dhaka',
'Asia/Hong_Kong',
'Asia/Jakarta',
'Asia/Kabul',
'Asia/Kamchatka',
'Asia/Karachi',
'Asia/Kathmandu',
'Asia/Magadan',
'Asia/Muscat',
'Asia/Riyadh',
'Asia/Seoul',
'Asia/Singapore',
'Asia/Tashkent',
'Asia/Tbilisi',
'Asia/Tehran',
'Asia/Tokyo',
'Asia/Vladivostok',
'Asia/Yakutsk',
'Asia/Yekaterinburg',
'Atlantic/Azores',
'Atlantic/Cape_Verde',
'Australia/Adelaide',
'Australia/Darwin',
'Australia/Perth',
'Australia/Sydney',
'Brazil/Acre',
'Brazil/DeNoronha',
'Brazil/East',
'Brazil/West',
'Canada/Atlantic',
'Canada/Newfoundland',
'Europe/Brussels',
'Europe/Copenhagen',
'Europe/Kaliningrad',
'Europe/Lisbon',
'Europe/London',
'Europe/Helsinki',
'Europe/Madrid',
'Europe/Moscow',
'Europe/Paris',
'Pacific/Auckland',
'Pacific/Fiji',
'Pacific/Guam',
'Pacific/Kwajalein',
'Pacific/Midway',
'US/Alaska',
'US/Central',
'US/Eastern',
'US/Hawaii',
'US/Mountain',
'US/Pacific',
'US/Samoa',
'GMT',
'UTC',
'localtime']
attribute 'system/short_hostname',
display_name: 'Short Hostname',
description: 'The short hostname that you would like this node to have, e.g. kryten.',
required: 'recommended',
default: 'localhost',
recipes: ['system::hostname', 'system::default']
attribute 'system/domain_name',
display_name: 'Domain Name',
description: 'The domain name that you would like this node to have, e.g. domain.suf. Note: Only set a valid domain name '\
'to satisfy the resolution of a FQDN; use ignore:ignore for no domain name.',
required: 'recommended',
default: 'localdomain',
recipes: ['system::hostname', 'system::default']
attribute 'system/netbios_name',
display_name: 'NetBIOS Name',
description: 'The NetBIOS name to set on the node, default is the value of node/short_hostname upper-cased (OS X only).',
required: 'optional',
recipes: ['system::hostname', 'system::default']
attribute 'system/workgroup',
display_name: 'NetBIOS Workgroup',
description: "The NetBIOS workgroup name to set on the node, default is 'WORKGROUP' (OS X only).",
required: 'optional',
default: 'WORKGROUP',
recipes: ['system::hostname', 'system::default']
attribute 'system/upgrade_packages',
display_name: 'Upgrade Packages',
description: "Whether or not the system::upgrade_packages recipe will physically update the system's installed packages.",
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::upgrade_packages']
attribute 'system/upgrade_packages_at_compile',
display_name: 'Upgrade Packages at compile time',
description: "Whether or not the system::upgrade_packages recipe will update the system's installed packages at compile time.",
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::upgrade_packages']
attribute 'system/manage_hostsfile',
display_name: 'Manage Hostsfile',
description: 'Whether or not to manage /etc/hostsfile (in any way).',
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::hostname', 'system::default']
attribute 'system/enable_cron',
display_name: 'Enable cron recipe',
description: 'Whether or not the system::timezone recipe will include the cron recipe.',
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::timezone']
attribute 'system/packages/install',
display_name: 'Install Packages',
description: 'An array of system packages to install with the package resource in execute phase.',
required: 'optional',
type: 'array',
recipes: ['system::install_packages']
attribute 'system/packages/install_compile_time',
display_name: 'Install Packages Compile Phase',
description: 'An array of system packages to install with the package resource in compile phase.',
required: 'optional',
type: 'array',
recipes: ['system::install_packages']
attribute 'system/permanent_ip',
display_name: 'Permanent IP Address',
description: 'Whether the system has a permenent IP address (http://www.debian.org/doc/manuals/debian-reference/ch05.en.html#_the_hostname_resolution).',
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::hostname', 'system::default']
attribute 'system/static_hosts',
display_name: 'Static Hosts',
description: 'A hash of static hosts to add to /etc/hosts.',
required: 'optional',
type: 'hash',
recipes: ['system::hostname', 'system::default']
attribute 'system/primary_interface',
display_name: 'Primary Network Interface',
description: "Specify primary network interface, used by hostname to set the correct address in hostsfile. default is node['network']['default_interface'].",
required: 'optional',
recipes: ['system::hostname', 'system::default']
attribute 'system/profile/path',
display_name: 'System Profile Path',
description: 'Overrides the default path for the system.',
required: 'optional',
type: 'array',
recipes: ['system::profile']
attribute 'system/profile/path_append',
display_name: 'System Profile Path Append',
description: 'Append more paths to the base path.',
required: 'optional',
type: 'array',
recipes: ['system::profile']
attribute 'system/profile/path_prepend',
display_name: 'System Profile Path Prepend',
description: 'Prepends more paths to the base path.',
required: 'optional',
type: 'array',
recipes: ['system::profile']
attribute 'system/profile/append_scripts',
display_name: 'System Profile Path Append',
description: 'An array of shell scripts to be appended to the system profile (include raw scripts without shebangs).',
required: 'optional',
type: 'array',
recipes: ['system::profile']
Bump version to 0.10.1 in metadata.rb.
# encoding: UTF-8
name 'system'
maintainer 'Xhost Australia'
maintainer_email 'cookbooks@xhost.com.au'
license 'Apache 2.0'
description 'Installs/Configures system elements such as the hostname and timezone.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.10.1'
recipe 'system::default', "Sets the system's hostname and timezone, updates the system's installed packages."
recipe 'system::timezone', "Sets the system's' timezone."
recipe 'system::hostname', "Sets the system's hostname."
recipe 'system::profile', "Manage the system's profile such as search path and scripts (/etc/profile)."
recipe 'system::upgrade_packages', "Upgrades the system's installed packages."
recipe 'system::update_package_list', "Updates the system's list of packages in the package manager's cache."
recipe 'system::install_packages', "Installs packages to the system with it's native package manager."
recipe 'system::reboot', 'Attempts to gracefully reboot the operating system.'
recipe 'system::shutdown', 'Attempts to gracefully shutdown the operating system.'
%w(ubuntu debian centos fedora redhat arch mac_os_x).each { |os| supports os }
depends 'apt'
depends 'cron'
depends 'hostsfile'
attribute 'system/timezone',
display_name: 'Timezone',
description: 'The system timezone, which must be a valid zoneinfo/tz database entry.',
required: 'optional',
default: 'UTC',
recipes: ['system::timezone', 'system::default'],
choice: [
'Africa/Casablanca',
'America/Bogota',
'America/Buenos_Aires',
'America/Caracas',
'America/La_Paz',
'America/Lima',
'America/Mexico_City',
'Asia/Almaty',
'Asia/Baghdad',
'Asia/Baku',
'Asia/Bangkok',
'Asia/Calcutta',
'Asia/Colombo',
'Asia/Dhaka',
'Asia/Hong_Kong',
'Asia/Jakarta',
'Asia/Kabul',
'Asia/Kamchatka',
'Asia/Karachi',
'Asia/Kathmandu',
'Asia/Magadan',
'Asia/Muscat',
'Asia/Riyadh',
'Asia/Seoul',
'Asia/Singapore',
'Asia/Tashkent',
'Asia/Tbilisi',
'Asia/Tehran',
'Asia/Tokyo',
'Asia/Vladivostok',
'Asia/Yakutsk',
'Asia/Yekaterinburg',
'Atlantic/Azores',
'Atlantic/Cape_Verde',
'Australia/Adelaide',
'Australia/Darwin',
'Australia/Perth',
'Australia/Sydney',
'Brazil/Acre',
'Brazil/DeNoronha',
'Brazil/East',
'Brazil/West',
'Canada/Atlantic',
'Canada/Newfoundland',
'Europe/Brussels',
'Europe/Copenhagen',
'Europe/Kaliningrad',
'Europe/Lisbon',
'Europe/London',
'Europe/Helsinki',
'Europe/Madrid',
'Europe/Moscow',
'Europe/Paris',
'Pacific/Auckland',
'Pacific/Fiji',
'Pacific/Guam',
'Pacific/Kwajalein',
'Pacific/Midway',
'US/Alaska',
'US/Central',
'US/Eastern',
'US/Hawaii',
'US/Mountain',
'US/Pacific',
'US/Samoa',
'GMT',
'UTC',
'localtime']
attribute 'system/short_hostname',
display_name: 'Short Hostname',
description: 'The short hostname that you would like this node to have, e.g. kryten.',
required: 'recommended',
default: 'localhost',
recipes: ['system::hostname', 'system::default']
attribute 'system/domain_name',
display_name: 'Domain Name',
description: 'The domain name that you would like this node to have, e.g. domain.suf. Note: Only set a valid domain name '\
'to satisfy the resolution of a FQDN; use ignore:ignore for no domain name.',
required: 'recommended',
default: 'localdomain',
recipes: ['system::hostname', 'system::default']
attribute 'system/netbios_name',
display_name: 'NetBIOS Name',
description: 'The NetBIOS name to set on the node, default is the value of node/short_hostname upper-cased (OS X only).',
required: 'optional',
recipes: ['system::hostname', 'system::default']
attribute 'system/workgroup',
display_name: 'NetBIOS Workgroup',
description: "The NetBIOS workgroup name to set on the node, default is 'WORKGROUP' (OS X only).",
required: 'optional',
default: 'WORKGROUP',
recipes: ['system::hostname', 'system::default']
attribute 'system/upgrade_packages',
display_name: 'Upgrade Packages',
description: "Whether or not the system::upgrade_packages recipe will physically update the system's installed packages.",
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::upgrade_packages']
attribute 'system/upgrade_packages_at_compile',
display_name: 'Upgrade Packages at compile time',
description: "Whether or not the system::upgrade_packages recipe will update the system's installed packages at compile time.",
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::upgrade_packages']
attribute 'system/manage_hostsfile',
display_name: 'Manage Hostsfile',
description: 'Whether or not to manage /etc/hostsfile (in any way).',
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::hostname', 'system::default']
attribute 'system/enable_cron',
display_name: 'Enable cron recipe',
description: 'Whether or not the system::timezone recipe will include the cron recipe.',
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::timezone']
attribute 'system/packages/install',
display_name: 'Install Packages',
description: 'An array of system packages to install with the package resource in execute phase.',
required: 'optional',
type: 'array',
recipes: ['system::install_packages']
attribute 'system/packages/install_compile_time',
display_name: 'Install Packages Compile Phase',
description: 'An array of system packages to install with the package resource in compile phase.',
required: 'optional',
type: 'array',
recipes: ['system::install_packages']
attribute 'system/permanent_ip',
display_name: 'Permanent IP Address',
description: 'Whether the system has a permenent IP address (http://www.debian.org/doc/manuals/debian-reference/ch05.en.html#_the_hostname_resolution).',
required: 'optional',
type: 'boolean',
choice: [true, false],
default: true,
recipes: ['system::hostname', 'system::default']
attribute 'system/static_hosts',
display_name: 'Static Hosts',
description: 'A hash of static hosts to add to /etc/hosts.',
required: 'optional',
type: 'hash',
recipes: ['system::hostname', 'system::default']
attribute 'system/primary_interface',
display_name: 'Primary Network Interface',
description: "Specify primary network interface, used by hostname to set the correct address in hostsfile. default is node['network']['default_interface'].",
required: 'optional',
recipes: ['system::hostname', 'system::default']
attribute 'system/profile/path',
display_name: 'System Profile Path',
description: 'Overrides the default path for the system.',
required: 'optional',
type: 'array',
recipes: ['system::profile']
attribute 'system/profile/path_append',
display_name: 'System Profile Path Append',
description: 'Append more paths to the base path.',
required: 'optional',
type: 'array',
recipes: ['system::profile']
attribute 'system/profile/path_prepend',
display_name: 'System Profile Path Prepend',
description: 'Prepends more paths to the base path.',
required: 'optional',
type: 'array',
recipes: ['system::profile']
attribute 'system/profile/append_scripts',
display_name: 'System Profile Path Append',
description: 'An array of shell scripts to be appended to the system profile (include raw scripts without shebangs).',
required: 'optional',
type: 'array',
recipes: ['system::profile']
|
name 'newrelic'
maintainer 'Escape Studios'
maintainer_email 'dev@escapestudios.com'
license 'MIT'
description 'Installs/Configures New Relic'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '2.7.2'
%w( debian ubuntu redhat centos fedora scientific amazon windows smartos ).each do |os|
supports os
end
depends 'python'
depends 'curl'
recipe 'newrelic', 'Adds the New Relic repository, installs & configures the New Relic server monitor agent.'
recipe 'newrelic::repository', 'Adds the New Relic repository.'
recipe 'newrelic::server_monitor_agent', 'Installs & configures the New Relic server monitor agent.'
recipe 'newrelic::dotnet_agent', 'Installs New Relic .NET agent.'
recipe 'newrelic::java_agent', 'Installs the New Relic Java agent.'
recipe 'newrelic::nodejs_agent', 'Installs New Relic Node.js agent.'
recipe 'newrelic::php_agent', 'Installs the New Relic PHP agent.'
recipe 'newrelic::python_agent', 'Installs the New Relic Python agent.'
recipe 'newrelic::ruby_agent', 'Installs the New Relic Ruby agent.'
Enable the thread profiler based on attributes
name 'newrelic'
maintainer 'Escape Studios'
maintainer_email 'dev@escapestudios.com'
license 'MIT'
description 'Installs/Configures New Relic'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '2.8.0'
%w( debian ubuntu redhat centos fedora scientific amazon windows smartos ).each do |os|
supports os
end
depends 'python'
depends 'curl'
recipe 'newrelic', 'Adds the New Relic repository, installs & configures the New Relic server monitor agent.'
recipe 'newrelic::repository', 'Adds the New Relic repository.'
recipe 'newrelic::server_monitor_agent', 'Installs & configures the New Relic server monitor agent.'
recipe 'newrelic::dotnet_agent', 'Installs New Relic .NET agent.'
recipe 'newrelic::java_agent', 'Installs the New Relic Java agent.'
recipe 'newrelic::nodejs_agent', 'Installs New Relic Node.js agent.'
recipe 'newrelic::php_agent', 'Installs the New Relic PHP agent.'
recipe 'newrelic::python_agent', 'Installs the New Relic Python agent.'
recipe 'newrelic::ruby_agent', 'Installs the New Relic Ruby agent.'
|
name 'chef_nginx'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and configures nginx'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '5.1.0'
recipe 'chef_nginx', 'Installs nginx package and sets up configuration with Debian apache style with sites-enabled/sites-available'
recipe 'chef_nginx::source', 'Installs nginx from source and sets up configuration with Debian apache style with sites-enabled/sites-available'
depends 'build-essential'
depends 'ohai', '>= 4.1.0'
depends 'yum-epel'
depends 'runit', '>= 1.6.0'
depends 'compat_resource', '>= 12.14.6'
depends 'zypper'
suggests 'nssm'
supports 'amazon'
supports 'centos'
supports 'debian'
supports 'fedora'
supports 'oracle'
supports 'redhat'
supports 'scientific'
supports 'ubuntu'
supports 'suse'
supports 'opensuse'
supports 'opensuseleap'
supports 'windows'
source_url 'https://github.com/chef-cookbooks/chef_nginx'
issues_url 'https://github.com/chef-cookbooks/chef_nginx/issues'
chef_version '>= 12.1'
depends instead of sugggests
name 'chef_nginx'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and configures nginx'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '5.1.0'
recipe 'chef_nginx', 'Installs nginx package and sets up configuration with Debian apache style with sites-enabled/sites-available'
recipe 'chef_nginx::source', 'Installs nginx from source and sets up configuration with Debian apache style with sites-enabled/sites-available'
depends 'build-essential'
depends 'ohai', '>= 4.1.0'
depends 'yum-epel'
depends 'runit', '>= 1.6.0'
depends 'compat_resource', '>= 12.14.6'
depends 'zypper'
depends 'nssm'
supports 'amazon'
supports 'centos'
supports 'debian'
supports 'fedora'
supports 'oracle'
supports 'redhat'
supports 'scientific'
supports 'ubuntu'
supports 'suse'
supports 'opensuse'
supports 'opensuseleap'
supports 'windows'
source_url 'https://github.com/chef-cookbooks/chef_nginx'
issues_url 'https://github.com/chef-cookbooks/chef_nginx/issues'
chef_version '>= 12.1'
|
name "postgresql"
maintainer "Heavy Water Operations, LLC"
maintainer_email "support@hw-ops.com"
license "Apache 2.0"
description "Installs and configures postgresql for clients or servers"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "3.4.12"
recipe "postgresql", "Includes postgresql::client"
recipe "postgresql::ruby", "Installs pg gem for Ruby bindings"
recipe "postgresql::client", "Installs postgresql client package(s)"
recipe "postgresql::server", "Installs postgresql server packages, templates"
recipe "postgresql::server_redhat", "Installs postgresql server packages, redhat family style"
recipe "postgresql::server_debian", "Installs postgresql server packages, debian family style"
supports "ubuntu", "< 14.04"
%w{debian fedora suse amazon}.each do |os|
supports os
end
%w{redhat centos scientific oracle}.each do |el|
supports el, "~> 6.0"
end
depends "apt", ">= 1.9.0"
depends "build-essential"
depends "openssl"
Development version
name "postgresql"
maintainer "Heavy Water Operations, LLC"
maintainer_email "support@hw-ops.com"
license "Apache 2.0"
description "Installs and configures postgresql for clients or servers"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "3.4.13"
recipe "postgresql", "Includes postgresql::client"
recipe "postgresql::ruby", "Installs pg gem for Ruby bindings"
recipe "postgresql::client", "Installs postgresql client package(s)"
recipe "postgresql::server", "Installs postgresql server packages, templates"
recipe "postgresql::server_redhat", "Installs postgresql server packages, redhat family style"
recipe "postgresql::server_debian", "Installs postgresql server packages, debian family style"
supports "ubuntu", "< 14.04"
%w{debian fedora suse amazon}.each do |os|
supports os
end
%w{redhat centos scientific oracle}.each do |el|
supports el, "~> 6.0"
end
depends "apt", ">= 1.9.0"
depends "build-essential"
depends "openssl"
|
name 'zabbix_lwrp'
maintainer 'LLC Express 42'
maintainer_email 'cookbooks@express42.com'
license 'MIT'
description 'Installs and configures Zabbix agent and server with PostgreSQL and Nginx. Provides LWRP for creating and modifying Zabbix objects.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.2.4'
chef_version '>= 12.5' if respond_to?(:chef_version)
source_url 'https://github.com/express42/zabbix_lwrp' if respond_to?(:source_url)
issues_url 'https://github.com/express42/zabbix_lwrp/issues' if respond_to?(:issues_url)
depends 'apt'
depends 'build-essential'
depends 'chef_nginx'
depends 'chocolatey'
depends 'database'
depends 'lvm'
depends 'php-fpm'
depends 'postgresql'
depends 'windows_firewall'
recipe 'zabbix_lwrp::agent', 'Installs and configures Zabbix agent.'
recipe 'zabbix_lwrp::connect', 'Connects to Zabbix API to sync configuration.'
recipe 'zabbix_lwrp::default', 'Installs and configures Zabbix official repository and agent.'
recipe 'zabbix_lwrp::database', 'Installs and configures Zabbix database.'
recipe 'zabbix_lwrp::host', 'Creates host via Zabbix API.'
recipe 'zabbix_lwrp::partition', 'Configures LVM for Zabbix database.'
recipe 'zabbix_lwrp::repository', 'Installs Zabbix official repository.'
recipe 'zabbix_lwrp::server', 'Installs and configures Zabbix server.'
recipe 'zabbix_lwrp::web', 'Installs and configures Zabbix frontend.'
supports 'ubuntu'
supports 'centos'
supports 'windows'
Version bump to 1.2.5
name 'zabbix_lwrp'
maintainer 'LLC Express 42'
maintainer_email 'cookbooks@express42.com'
license 'MIT'
description 'Installs and configures Zabbix agent and server with PostgreSQL and Nginx. Provides LWRP for creating and modifying Zabbix objects.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.2.5'
chef_version '>= 12.5' if respond_to?(:chef_version)
source_url 'https://github.com/express42/zabbix_lwrp' if respond_to?(:source_url)
issues_url 'https://github.com/express42/zabbix_lwrp/issues' if respond_to?(:issues_url)
depends 'apt'
depends 'build-essential'
depends 'chef_nginx'
depends 'chocolatey'
depends 'database'
depends 'lvm'
depends 'php-fpm'
depends 'postgresql'
depends 'windows_firewall'
recipe 'zabbix_lwrp::agent', 'Installs and configures Zabbix agent.'
recipe 'zabbix_lwrp::connect', 'Connects to Zabbix API to sync configuration.'
recipe 'zabbix_lwrp::default', 'Installs and configures Zabbix official repository and agent.'
recipe 'zabbix_lwrp::database', 'Installs and configures Zabbix database.'
recipe 'zabbix_lwrp::host', 'Creates host via Zabbix API.'
recipe 'zabbix_lwrp::partition', 'Configures LVM for Zabbix database.'
recipe 'zabbix_lwrp::repository', 'Installs Zabbix official repository.'
recipe 'zabbix_lwrp::server', 'Installs and configures Zabbix server.'
recipe 'zabbix_lwrp::web', 'Installs and configures Zabbix frontend.'
supports 'ubuntu'
supports 'centos'
supports 'windows'
|
# -*- coding: UTF-8 -*-
name 'valhalla'
maintainer 'valhalla'
maintainer_email 'valhalla@mapzen.com'
license 'MIT'
description 'Installs/Configures valhalla'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.4.5'
recipe 'valhalla', 'Installs valhalla'
%w(
apt
user
runit
).each do |dep|
depends dep
end
supports 'ubuntu', '>= 12.04'
update version
# -*- coding: UTF-8 -*-
name 'valhalla'
maintainer 'valhalla'
maintainer_email 'valhalla@mapzen.com'
license 'MIT'
description 'Installs/Configures valhalla'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.4.6'
recipe 'valhalla', 'Installs valhalla'
%w(
apt
user
runit
).each do |dep|
depends dep
end
supports 'ubuntu', '>= 12.04'
|
# frozen_string_literal: true
require "test_helper"
module Speculation
class SpeculationTest < Minitest::Test
S = Speculation
include S::NamespacedSymbols
def test_conform_with_existing_spec
S.def(ns(:int?), ->(x) { x.is_a?(Integer) })
assert_equal 2, S.conform(ns(:int?), 2)
assert_equal :"Speculation/invalid", S.conform(ns(:int?), "two")
assert S.valid?(ns(:int?), 2)
refute S.valid?(ns(:int?), "two")
end
def test_def_requires_namespaced_symbol
assert_raises(ArgumentError) do
S.def("foo/integer", Integer)
end
assert_raises(ArgumentError) do
S.def(:integer, Integer)
end
end
def test_conform_with_predicate
predicate = ->(x) { x.is_a?(Integer) }
assert_equal 2, S.conform(predicate, 2)
assert_equal :"Speculation/invalid", S.conform(predicate, "two")
assert S.valid?(predicate, 2)
refute S.valid?(predicate, "two")
end
def test_conformer
S.def(ns(:wont_conform_keys), S.hash_of(S.and(Symbol, S.conformer(&:to_s)),
S.and(Float, S.conformer(&:to_i))))
assert_equal({ :foo => 1, :bar => 2 },
S.conform(ns(:wont_conform_keys), :foo => 1.0, :bar => 2.0))
S.def(ns(:will_conform_keys), S.hash_of(S.and(Symbol, S.conformer(&:to_s)),
S.and(Float, S.conformer(&:to_i)),
:conform_keys => true))
assert_equal({ "foo" => 1, "bar" => 2 },
S.conform(ns(:will_conform_keys), :foo => 1.0, :bar => 2.0))
end
def test_explain_data
S.def(ns(:even), ->(x) { x.even? })
ed = S.explain_data(ns(:even), 1)
problems = ed.fetch(ns(S, :problems))
assert_equal 1, problems.count
problem = problems.first
assert_equal [], problem[:path]
assert_equal 1, problem[:val]
assert_equal [ns(:even)], problem[:via]
assert_equal [], problem[:in]
assert_kind_of Proc, problem[:pred].first
assert_equal [1], problem[:pred].last
S.def(ns(:integer), Integer)
S.def(ns(:even), ->(x) { x.even? })
S.def(ns(:even_integer), S.and(ns(:integer), ns(:even)))
ed = S.explain_data(ns(:even_integer), "s")
problems = ed.fetch(ns(S, :problems))
assert_equal 1, problems.count
problem = problems.first
assert_equal [], problem[:path]
assert_equal "s", problem[:val]
assert_equal [ns(:even_integer), ns(:integer)], problem[:via]
assert_equal [], problem[:in]
assert_equal [Integer, ["s"]], problem[:pred]
end
end
end
Tidy up test
# frozen_string_literal: true
require "test_helper"
module Speculation
class SpeculationTest < Minitest::Test
S = Speculation
include S::NamespacedSymbols
def test_def_requires_namespaced_symbol
assert_raises(ArgumentError) do
S.def("foo/integer", Integer)
end
assert_raises(ArgumentError) do
S.def(:integer, Integer)
end
end
def test_conform_with_existing_spec
S.def(ns(:int?), ->(x) { x.is_a?(Integer) })
assert_equal 2, S.conform(ns(:int?), 2)
assert_equal :"Speculation/invalid", S.conform(ns(:int?), "two")
assert S.valid?(ns(:int?), 2)
refute S.valid?(ns(:int?), "two")
end
def test_conform_with_predicate
predicate = ->(x) { x.is_a?(Integer) }
assert_equal 2, S.conform(predicate, 2)
assert_equal :"Speculation/invalid", S.conform(predicate, "two")
assert S.valid?(predicate, 2)
refute S.valid?(predicate, "two")
end
def test_conformer
S.def(ns(:wont_conform_keys), S.hash_of(S.and(Symbol, S.conformer(&:to_s)),
S.and(Float, S.conformer(&:to_i))))
assert_equal Hash[:foo => 1, :bar => 2],
S.conform(ns(:wont_conform_keys), :foo => 1.0, :bar => 2.0)
S.def(ns(:will_conform_keys), S.hash_of(S.and(Symbol, S.conformer(&:to_s)),
S.and(Float, S.conformer(&:to_i)),
:conform_keys => true))
assert_equal Hash["foo" => 1, "bar" => 2],
S.conform(ns(:will_conform_keys), :foo => 1.0, :bar => 2.0)
end
def test_explain_data
S.def(ns(:integer), Integer)
S.def(ns(:even), ->(x) { x.even? })
S.def(ns(:even_integer), S.and(ns(:integer), ns(:even)))
ed = S.explain_data(ns(:even_integer), "s")
problems = ed.fetch(ns(S, :problems))
assert_equal 1, problems.count
assert_equal Hash[:path => [],
:val => "s",
:via => [ns(:even_integer), ns(:integer)],
:in => [],
:pred => [Integer, ["s"]]], problems.first
end
end
end
|
name 'mysql'
maintainer 'Opscode, Inc.'
maintainer_email 'cookbooks@opscode.com'
license 'Apache 2.0'
description 'Installs and configures mysql for client or server'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '4.0.6'
recipe 'mysql', 'Includes the client recipe to configure a client'
recipe 'mysql::client', 'Installs packages required for mysql clients using run_action magic'
recipe 'mysql::server', 'Installs packages required for mysql servers w/o manual intervention'
recipe 'mysql::server_ec2', 'Performs EC2-specific mountpoint manipulation'
# actually tested on
supports 'redhat'
supports 'amazon'
supports 'centos'
supports 'debian'
supports 'ubuntu'
# code bits around, untested. remove?
supports 'freebsd'
supports 'mac_os_x'
supports 'scientific'
supports 'suse'
supports 'windows'
depends 'openssl', '~> 1.1'
depends 'build-essential', '~> 1.4'
# wat
suggests 'homebrew'
suggests 'windows'
# remove all these attributes from metadata?
attribute 'mysql/server_root_password',
:display_name => 'MySQL Server Root Password',
:description => 'Randomly generated password for the mysqld root user',
:default => 'randomly generated'
attribute 'mysql/bind_address',
:display_name => 'MySQL Bind Address',
:description => 'Address that mysqld should listen on',
:default => 'ipaddress'
attribute 'mysql/data_dir',
:display_name => 'MySQL Data Directory',
:description => 'Location of mysql databases',
:default => '/var/lib/mysql'
attribute 'mysql/conf_dir',
:display_name => 'MySQL Conf Directory',
:description => 'Location of mysql conf files',
:default => '/etc/mysql'
attribute 'mysql/ec2_path',
:display_name => 'MySQL EC2 Path',
:description => 'Location of mysql directory on EC2 instance EBS volumes',
:default => '/mnt/mysql'
attribute 'mysql/reload_action',
:display_name => 'MySQL conf file reload action',
:description => 'Action to take when mysql conf files are modified',
:default => 'reload'
attribute 'mysql/tunable',
:display_name => 'MySQL Tunables',
:description => 'Hash of MySQL tunable attributes',
:type => 'hash'
attribute 'mysql/tunable/key_buffer',
:display_name => 'MySQL Tuntable Key Buffer',
:default => '250M'
attribute 'mysql/tunable/max_connections',
:display_name => 'MySQL Tunable Max Connections',
:default => '800'
attribute 'mysql/tunable/wait_timeout',
:display_name => 'MySQL Tunable Wait Timeout',
:default => '180'
attribute 'mysql/tunable/net_read_timeout',
:display_name => 'MySQL Tunable Net Read Timeout',
:default => '30'
attribute 'mysql/tunable/net_write_timeout',
:display_name => 'MySQL Tunable Net Write Timeout',
:default => '30'
attribute 'mysql/tunable/back_log',
:display_name => 'MySQL Tunable Back Log',
:default => '128'
attribute 'mysql/tunable/table_cache',
:display_name => 'MySQL Tunable Table Cache for MySQL < 5.1.3',
:default => '128'
attribute 'mysql/tunable/table_open_cache',
:display_name => 'MySQL Tunable Table Cache for MySQL >= 5.1.3',
:default => '128'
attribute 'mysql/tunable/max_heap_table_size',
:display_name => 'MySQL Tunable Max Heap Table Size',
:default => '32M'
attribute 'mysql/tunable/expire_logs_days',
:display_name => 'MySQL Exipre Log Days',
:default => '10'
attribute 'mysql/tunable/max_binlog_size',
:display_name => 'MySQL Max Binlog Size',
:default => '100M'
attribute 'mysql/client',
:display_name => 'MySQL Connector/C Client',
:description => 'Hash of MySQL client attributes',
:type => 'hash'
attribute 'mysql/client/version',
:display_name => 'MySQL Connector/C Version',
:default => '6.0.2'
attribute 'mysql/client/arch',
:display_name => 'MySQL Connector/C Architecture',
:default => 'win32'
attribute 'mysql/client/package_file',
:display_name => 'MySQL Connector/C Package File Name',
:default => 'mysql-connector-c-6.0.2-win32.msi'
attribute 'mysql/client/url',
:display_name => 'MySQL Connector/C Download URL',
:default => 'http://www.mysql.com/get/Downloads/Connector-C/mysql-connector-c-6.0.2-win32.msi/from/http://mysql.mirrors.pair.com/'
attribute 'mysql/client/package_name',
:display_name => 'MySQL Connector/C Registry DisplayName',
:default => 'MySQL Connector C 6.0.2'
attribute 'mysql/client/basedir',
:display_name => 'MySQL Connector/C Base Install Directory',
:default => 'C:\\Program Files (x86)\\MySQL\\Connector C 6.0.2'
attribute 'mysql/client/lib_dir',
:display_name => 'MySQL Connector/C Library Directory (containing libmysql.dll)',
:default => 'C:\\Program Files (x86)\\MySQL\\Connector C 6.0.2\\lib\\opt'
attribute 'mysql/client/bin_dir',
:display_name => 'MySQL Connector/C Executable Directory',
:default => 'C:\\Program Files (x86)\\MySQL\\Connector C 6.0.2\\bin'
attribute 'mysql/client/ruby_dir',
:display_name => 'Ruby Executable Directory which should gain MySQL support',
:default => 'system ruby'
Version bump to v4.0.7
name 'mysql'
maintainer 'Opscode, Inc.'
maintainer_email 'cookbooks@opscode.com'
license 'Apache 2.0'
description 'Installs and configures mysql for client or server'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '4.0.7'
recipe 'mysql', 'Includes the client recipe to configure a client'
recipe 'mysql::client', 'Installs packages required for mysql clients using run_action magic'
recipe 'mysql::server', 'Installs packages required for mysql servers w/o manual intervention'
recipe 'mysql::server_ec2', 'Performs EC2-specific mountpoint manipulation'
# actually tested on
supports 'redhat'
supports 'amazon'
supports 'centos'
supports 'debian'
supports 'ubuntu'
# code bits around, untested. remove?
supports 'freebsd'
supports 'mac_os_x'
supports 'scientific'
supports 'suse'
supports 'windows'
depends 'openssl', '~> 1.1'
depends 'build-essential', '~> 1.4'
# wat
suggests 'homebrew'
suggests 'windows'
# remove all these attributes from metadata?
attribute 'mysql/server_root_password',
:display_name => 'MySQL Server Root Password',
:description => 'Randomly generated password for the mysqld root user',
:default => 'randomly generated'
attribute 'mysql/bind_address',
:display_name => 'MySQL Bind Address',
:description => 'Address that mysqld should listen on',
:default => 'ipaddress'
attribute 'mysql/data_dir',
:display_name => 'MySQL Data Directory',
:description => 'Location of mysql databases',
:default => '/var/lib/mysql'
attribute 'mysql/conf_dir',
:display_name => 'MySQL Conf Directory',
:description => 'Location of mysql conf files',
:default => '/etc/mysql'
attribute 'mysql/ec2_path',
:display_name => 'MySQL EC2 Path',
:description => 'Location of mysql directory on EC2 instance EBS volumes',
:default => '/mnt/mysql'
attribute 'mysql/reload_action',
:display_name => 'MySQL conf file reload action',
:description => 'Action to take when mysql conf files are modified',
:default => 'reload'
attribute 'mysql/tunable',
:display_name => 'MySQL Tunables',
:description => 'Hash of MySQL tunable attributes',
:type => 'hash'
attribute 'mysql/tunable/key_buffer',
:display_name => 'MySQL Tuntable Key Buffer',
:default => '250M'
attribute 'mysql/tunable/max_connections',
:display_name => 'MySQL Tunable Max Connections',
:default => '800'
attribute 'mysql/tunable/wait_timeout',
:display_name => 'MySQL Tunable Wait Timeout',
:default => '180'
attribute 'mysql/tunable/net_read_timeout',
:display_name => 'MySQL Tunable Net Read Timeout',
:default => '30'
attribute 'mysql/tunable/net_write_timeout',
:display_name => 'MySQL Tunable Net Write Timeout',
:default => '30'
attribute 'mysql/tunable/back_log',
:display_name => 'MySQL Tunable Back Log',
:default => '128'
attribute 'mysql/tunable/table_cache',
:display_name => 'MySQL Tunable Table Cache for MySQL < 5.1.3',
:default => '128'
attribute 'mysql/tunable/table_open_cache',
:display_name => 'MySQL Tunable Table Cache for MySQL >= 5.1.3',
:default => '128'
attribute 'mysql/tunable/max_heap_table_size',
:display_name => 'MySQL Tunable Max Heap Table Size',
:default => '32M'
attribute 'mysql/tunable/expire_logs_days',
:display_name => 'MySQL Exipre Log Days',
:default => '10'
attribute 'mysql/tunable/max_binlog_size',
:display_name => 'MySQL Max Binlog Size',
:default => '100M'
attribute 'mysql/client',
:display_name => 'MySQL Connector/C Client',
:description => 'Hash of MySQL client attributes',
:type => 'hash'
attribute 'mysql/client/version',
:display_name => 'MySQL Connector/C Version',
:default => '6.0.2'
attribute 'mysql/client/arch',
:display_name => 'MySQL Connector/C Architecture',
:default => 'win32'
attribute 'mysql/client/package_file',
:display_name => 'MySQL Connector/C Package File Name',
:default => 'mysql-connector-c-6.0.2-win32.msi'
attribute 'mysql/client/url',
:display_name => 'MySQL Connector/C Download URL',
:default => 'http://www.mysql.com/get/Downloads/Connector-C/mysql-connector-c-6.0.2-win32.msi/from/http://mysql.mirrors.pair.com/'
attribute 'mysql/client/package_name',
:display_name => 'MySQL Connector/C Registry DisplayName',
:default => 'MySQL Connector C 6.0.2'
attribute 'mysql/client/basedir',
:display_name => 'MySQL Connector/C Base Install Directory',
:default => 'C:\\Program Files (x86)\\MySQL\\Connector C 6.0.2'
attribute 'mysql/client/lib_dir',
:display_name => 'MySQL Connector/C Library Directory (containing libmysql.dll)',
:default => 'C:\\Program Files (x86)\\MySQL\\Connector C 6.0.2\\lib\\opt'
attribute 'mysql/client/bin_dir',
:display_name => 'MySQL Connector/C Executable Directory',
:default => 'C:\\Program Files (x86)\\MySQL\\Connector C 6.0.2\\bin'
attribute 'mysql/client/ruby_dir',
:display_name => 'Ruby Executable Directory which should gain MySQL support',
:default => 'system ruby'
|
name 'smartstack'
maintainer 'Igor Serebryany'
maintainer_email 'igor.serebryany@airbnb.com'
license 'MIT'
version '0.4.1'
description 'The cookbook for configuring Airbnb SmartStack'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
recipe 'smartstack::nerve', 'Installs and configures nerve, the service registry component'
recipe 'smartstack::synapse', 'Installs and confgures a synapse, the service discovery component'
depends 'runit', '>= 1.1.0'
depends 'ruby', '~> 0.9.2'
depends 'java'
supports 'ubuntu', '= 12.04'
bump cookbook version to 0.5.0
name 'smartstack'
maintainer 'Igor Serebryany'
maintainer_email 'igor.serebryany@airbnb.com'
license 'MIT'
version '0.5.0'
description 'The cookbook for configuring Airbnb SmartStack'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
recipe 'smartstack::nerve', 'Installs and configures nerve, the service registry component'
recipe 'smartstack::synapse', 'Installs and confgures a synapse, the service discovery component'
depends 'runit', '>= 1.1.0'
depends 'ruby', '~> 0.9.2'
depends 'java'
supports 'ubuntu', '= 12.04'
|
name 'tomcat'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and Apache Tomcat and manages the service'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '2.3.0'
depends 'compat_resource', '>= 12.7.3'
%w(ubuntu debian redhat centos suse opensuse scientific oracle amazon).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/tomcat' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/tomcat/issues' if respond_to?(:issues_url)
Get the version number right
name 'tomcat'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and Apache Tomcat and manages the service'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '2.0.0'
depends 'compat_resource', '>= 12.7.3'
%w(ubuntu debian redhat centos suse opensuse scientific oracle amazon).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/tomcat' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/tomcat/issues' if respond_to?(:issues_url)
|
name "hops"
maintainer "Jim Dowling"
maintainer_email "jdowling@kth.se"
license "Apache v2.0"
description 'Installs/Configures the Hops distribution'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.6.0"
source_url "https://github.com/hopshadoop/hops-hadoop-chef"
#link:<a target='_blank' href='http://%host%:50070/'>Launch the WebUI for the NameNode</a>
recipe "hops::nn", "Installs a HopsFs NameNode"
recipe "hops::ndb", "Installs MySQL Cluster (ndb) dal driver for Hops"
recipe "hops::dn", "Installs a HopsFs DataNode"
#link:<a target='_blank' href='http://%host%:8088/'>Launch the WebUI for the ResourceManager</a>
recipe "hops::rm", "Installs a YARN ResourceManager"
recipe "hops::nm", "Installs a YARN NodeManager"
recipe "hops::jhs", "Installs a MapReduce JobHistory Server for YARN"
recipe "hops::ps", "Installs a WebProxy Server for YARN"
recipe "hops::rt", "Installs a ResourceTracker server for YARN"
recipe "hops::client", "Installs libaries and configuration files for writing HDFS and YARN progams"
recipe "hops::purge", "Removes all hops-hadoop files and dirs and ndb-dal, but doesnt drop hops db from NDB"
recipe "hops::purge-ndb", "Drops hops db from NDB"
depends 'java'
depends 'kagent'
depends 'ndb'
depends 'magic_shell'
depends 'sysctl'
depends 'cmake'
depends 'kzookeeper'
depends 'hopsmonitor'
%w{ ubuntu debian rhel centos }.each do |os|
supports os
end
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "java/java_home",
:description => "JAVA_HOME",
:type => 'string'
attribute "hops/dir",
:description => "Base installation directory for HopsFS",
:type => 'string'
attribute "mysql/user",
:description => "Mysql server username",
:type => 'string',
:required => "required"
attribute "mysql/password",
:description => "MySql server Password",
:type => 'string',
:required => "required"
attribute "hops/erasure_coding",
:description => "'true' or 'false' - true to enable erasure-coding replication",
:type => 'string'
attribute "hops/nn/cache",
:description => "'true' or 'false' - true to enable the path cache in the NameNode",
:type => 'string'
attribute "hops/nn/partition_key",
:description => "'true' or 'false' - true to enable the partition key when starting transactions. Distribution-aware transactions.",
:type => 'string'
attribute "hops/yarn/resource_tracker",
:description => "Hadoop Resource Tracker enabled on this nodegroup",
:type => 'string'
attribute "hops/install_db",
:description => "Install hops database and tables in MySQL Cluster ('true' (default) or 'false')",
:type => 'string'
attribute "hops/use_systemd",
:description => "Use systemd startup scripts, default 'false'",
:type => "string"
attribute "hops/format",
:description => "'true' to format HDFS, 'false' to skip formatting",
:type => 'string'
attribute "hops/reformat",
:description => "'true' to re-format HDFS, 'false' to skip re-formatting",
:type => 'string'
attribute "hops/yarn/memory_mbs",
:description => "Apache_Hadoop NodeManager Memory in MB",
:type => 'string'
attribute "hops/yarn/nodemanager_log_dir",
:description => "The directory in which yarn node manager store containers logs",
:type => 'string'
attribute "hops/yarn/nodemanager_recovery_dir",
:description => "The directory in which yarn node manager stores recovery state",
:type => 'string'
attribute "hops/yarn/nodemanager_ha_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_auto_failover_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_recovery_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_rpc_batch_max_size",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_rpc_batch_max_duration",
:description => "",
:type => "string"
attribute "hops/yarn/rm_distributed",
:description => "Set to 'true' to enable distributed RMs",
:type => "string"
attribute "hops/yarn/nodemanager_rm_streaming_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/rm_heartbeat",
:description => "",
:type => "string"
attribute "hops/yarn/client_failover_sleep_base_ms",
:description => "",
:type => "string"
attribute "hops/yarn/client_failover_sleep_max_ms",
:description => "",
:type => "string"
attribute "hops/yarn/quota_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/quota_monitor_interval",
:description => "",
:type => "string"
attribute "hops/yarn/quota_ticks_per_credit",
:description => "",
:type => "string"
attribute "hops/yarn/quota_min_ticks_charge",
:description => "",
:type => "string"
attribute "hops/yarn/quota_checkpoint_nbticks",
:description => "",
:type => "string"
attribute "hops/yarn/nm_heapsize_mbs",
:description => "Increase this value if using the YARN external shuffle service. (default: 1000)",
:type => 'string'
attribute "hops/yarn/rm_heapsize_mbs",
:description => "Resource manager heapsize. (default: 1000)",
:type => 'string'
attribute "hops/yarn/container_executor",
:description => "Container executor class",
:type => 'string'
attribute "hops/trash/interval",
:description => "How long in minutes trash survives in /user/<glassfish>/.Trash/<interval-bucket>/...",
:type => "string"
attribute "hops/trash/checkpoint/interval",
:description => "How long in minutes until a new directory bucket is created in /user/<glassfish>/.Trash with a timestamp. ",
:type => "string"
attribute "hops/nn/private_ips",
:description => "Set ip addresses",
:type => "array"
attribute "hops/nn/public_ips",
:description => "Set ip addresses",
:type => "array"
attribute "hops/rm/private_ips",
:description => "Set ip addresses",
:type => "array"
attribute "hops/rm/public_ips",
:description => "Set ip addresses",
:type => "array"
# Needed to find the jar file for yan-spark-shuffle
attribute "hadoop_spark/version",
:description => "Spark version",
:type => 'string'
attribute "hops/url/primary",
:description => "Primary download url of hops distribution",
:type => 'string'
attribute "hops/url/secondary",
:description => "Secondary download url of hops distribution",
:type => 'string'
attribute "hops/tls/enabled",
:description => "'true' will enable RPC TLS and 'false' will disable it",
:type => 'string'
attribute "hops/tls/certs_actor_class",
:description => "Actor class to perform X509 requests to Hopsworks",
:type => 'string'
attribute "hops/tls/certs_expiration_safety_period",
:description => "Time to substract fro X509 expiration time for renewal",
:type => 'string'
attribute "hops/tls/certs_revocation_monitor_interval",
:description => "Period to check for stale X509 certificates that should be revoked",
:type => 'string'
attribute "hops/tls/crl_enabled",
:description => "Enable CRL validation when RPC TLS is enabled",
:type => 'string'
attribute "hops/tls/crl_fetcher_class",
:description => "Canonical name of the CRL fetcher class",
:type => 'string'
attribute "hops/tls/crl_input_uri",
:description => "Location where the CRL will be fetched from",
:type => 'string'
attribute "hops/tls/crl_output_file",
:description => "Location where the CRL will be stored",
:type => 'string'
attribute "hops/tls/crl_fetcher_interval",
:description => "Interval for the CRL fetcher service, suffix can be m/h/d",
:type => 'string'
attribute "hops/encrypt_data_transfer/enabled",
:description => "Enable encryption for Data Tranfer Protocol of DataNodes",
:type => 'string'
attribute "hops/encrypt_data_transfer/algorithm",
:description => "Encryption algorithm, 3des or rc4",
:type => 'string'
attribute "hops/yarn/vcores",
:description => "Hops NodeManager Number of Virtual Cores",
:type => 'string'
attribute "hops/yarn/min_vcores",
:description => "Hadoop NodeManager Minimum Virtual Cores per container",
:type => 'string'
attribute "hops/yarn/max_vcores",
:description => "Hadoop NodeManager Maximum Virtual Cores per container",
:type => 'string'
attribute "hops/yarn/log_retain_secs",
:description => "Default time (in seconds) to retain log files on the NodeManager",
:type => 'string'
attribute "hops/yarn/log_retain_check",
:description =>"Default time (in seconds) between checks for retained log files in HDFS.",
:type => 'string'
attribute "hops/yarn/log_roll_interval",
:description =>"Defines how often NMs wake up to upload log files. The minimum rolling-interval-seconds can be set is 3600.",
:type => 'string'
attribute "hops/version",
:description => "Version of hops",
:type => 'string'
attribute "hops/num_replicas",
:description => "Number of replicates for each file stored in HDFS",
:type => 'string'
attribute "hops/container_cleanup_delay_sec",
:description => "The number of seconds container data is retained after termination",
:type => 'string'
attribute "hops/group",
:description => "Group to run hdfs/yarn/yarnapp/mr as",
:type => 'string'
attribute "hops/yarn/user",
:description => "Username to run yarn as",
:type => 'string'
attribute "hops/yarnapp/user",
:description => "Username to run yarn applications as",
:type => 'string'
attribute "hops/mr/user",
:description => "Username to run mapReduce as",
:type => 'string'
attribute "hops/hdfs/user",
:description => "Username to run hdfs as",
:type => 'string'
attribute "hops/hdfs/superuser_group",
:description => "Group for users with hdfs superuser privileges",
:type => 'string'
attribute "hops/hdfs/blocksize",
:description => "HDFS Blocksize (128k, 512m, 1g, etc). Default 128m.",
:type => 'string'
attribute "hops/hdfs/umask",
:description => "Set the default HDFS umask (default: 0022).",
:type => 'string'
attribute "hops/format",
:description => "Format HDFS, Run 'hdfs namenode -format",
:type => 'string'
attribute "hops/tmp_dir",
:description => "The directory in which Hadoop stores temporary data, including container data",
:type => 'string'
attribute "hops/nn/name_dir",
:description => "Directory for NameNode's state",
:type => 'string'
attribute "hops/dn/data_dir",
:description => "The directory in which Hadoop's DataNodes store their data",
:type => 'string'
attribute "hops/data_dir",
:description => "The directory in which Hadoop's main data files are stored (including hops/dn/data_dir)",
:type => 'string'
attribute "hops/dn/data_dir_permissions",
:description => "The permissions for the directory in which Hadoop's DataNodes store their data (default: 700)",
:type => 'string'
attribute "hops/yarn/nodemanager_hb_ms",
:description => "Heartbeat Interval for NodeManager->ResourceManager in ms",
:type => 'string'
attribute "hops/rm/scheduler_class",
:description => "Java Classname for the Yarn scheduler (fifo, capacity, fair)",
:type => 'string'
attribute "hops/user_envs",
:description => "Update the PATH environment variable for the hdfs and yarn users to include hadoop/bin in the PATH ",
:type => 'string'
attribute "hops/logging_level",
:description => "Log levels are: TRACE, DEBUG, INFO, WARN",
:type => 'string'
attribute "hops/nn/heap_size",
:description => "Size of the NameNode heap in MBs",
:type => 'string'
attribute "hops/nn/direct_memory_size",
:description => "Size of the direct memory size for the NameNode in MBs",
:type => 'string'
attribute "hops/yarn/aux_services",
:description => "mapreduce_shuffle, spark_shuffle",
:type => "string"
attribute "hops/capacity/max_ap",
:description => "Maximum number of applications that can be pending and running.",
:type => "string"
attribute "hops/capacity/max_am_percent",
:description => "Maximum percent of resources in the cluster which can be used to run application masters i.e. controls number of concurrent running applications.",
:type => "string"
attribute "hops/capacity/resource_calculator_class",
:description => "The ResourceCalculator implementation to be used to compare Resources in the scheduler. The default i.e. DefaultResourceCalculator only uses Memory while DominantResourceCalculator uses dominant-resource to compare multi-dimensional resources such as Memory, CPU etc.",
:type => "string"
attribute "hops/capacity/root_queues",
:description => "The queues at the root level (root is the root queue).",
:type => "string"
attribute "hops/capacity/default_capacity",
:description => "Default queue target capacity.",
:type => "string"
attribute "hops/capacity/user_limit_factor",
:description => " Default queue user limit a percentage from 0.0 to 1.0.",
:type => "string"
attribute "hops/capacity/default_max_capacity",
:description => "The maximum capacity of the default queue.",
:type => "string"
attribute "hops/capacity/default_state",
:description => "The state of the default queue. State can be one of RUNNING or STOPPED.",
:type => "string"
attribute "hops/capacity/default_acl_submit_applications",
:description => "The ACL of who can submit jobs to the default queue.",
:type => "string"
attribute "hops/capacity/default_acl_administer_queue",
:description => "The ACL of who can administer jobs on the default queue.",
:type => "string"
attribute "hops/capacity/queue_mapping",
:description => "A list of mappings that will be used to assign jobs to queues The syntax for this list is [u|g]:[name]:[queue_name][,next mapping]* Typically this list will be used to map users to queues, for example, u:%user:%user maps all users to queues with the same name as the user.",
:type => "string"
attribute "hops/capacity/queue_mapping_override.enable",
:description => "If a queue mapping is present, will it override the value specified by the user? This can be used by administrators to place jobs in queues that are different than the one specified by the user. The default is false.",
:type => "string"
attribute "kagent/enabled",
:description => "Set to 'true' to enable, 'false' to disable kagent",
:type => "string"
attribute "mysql/dir",
:description => "MySQL installation directory.",
:type => "string"
attribute "install/dir",
:description => "Set to a base directory under which we will install.",
:type => "string"
attribute "install/user",
:description => "User to install the services as",
:type => "string"
attribute "influxdb/graphite/port",
:description => "Port for influxdb graphite connector",
:type => "string"
#GPU settings
attribute "hops/yarn/min_gpus",
:description => "Min number of GPUs per container",
:type => "string"
attribute "hops/yarn/max_gpus",
:description => "Max number of GPUs per container",
:type => "string"
attribute "hops/gpu",
:description => "Are GPUs enabled for YARN? (on this node) Default: false",
:type => "string"
attribute "hops/yarn/gpus",
:description => "'*' default: use all GPUs on the host. Otherwise, specify the number of GPUs per host (e.g., '4'). Otherwise, specify a comma-separated list of minor device-ids: '0,1,2' or '0-3')",
:type => "string"
attribute "hops/yarn/cluster/gpu",
:description => "Is there a machine in the cluster with gpus?",
:type => "string"
#CGroups settings
attribute "hops/yarn/groups",
:description => "",
:type => "string"
attribute "hops/yarn/linux_container_local_user",
:description => "the user running the yarn containers",
:type => "string"
attribute "hops/yarn/linux_container_limit_users",
:description => "",
:type => "string"
attribute "hops/hopsutil_version",
:description => "Version of the hops-util jar file.",
:type => "string"
attribute "hops/hopsexamples_version",
:description => "Version of the hops-spark jar file.",
:type => "string"
attribute "hops/yarn/cgroups",
:description => "'true' to enable cgroups (default), else 'false'",
:type => "string"
attribute "livy/user",
:description => "Livy user that will be a proxy user",
:type => "string"
attribute "hopsworks/user",
:description => "Hopsworks username",
:type => "string"
attribute "hops/jmx/adminPassword",
:description => "Password for JMX admin role",
:type => "string"
attribute "hopsmonitor/default/private_ips",
:description => "Hopsworks username",
:type => "string"
attribute "hopsworks/default/private_ips",
:description => "Hopsworks private ip",
:type => "string"
# Kernel tuning parameters
attribute "hops/kernel/somaxconn",
:description => "net.core.somaxconn value",
:type => "string"
attribute "hops/kernel/swappiness",
:description => "vm.swappiness value",
:type => "string"
attribute "hops/kernel/overcommit_memory",
:description => "vm.overcommit_memory value",
:type => "string"
attribute "hops/kernel/overcommit_ratio",
:description => "vm.overcommit_ratio value",
:type => "string"
[master] Add conf property in metadata
name "hops"
maintainer "Jim Dowling"
maintainer_email "jdowling@kth.se"
license "Apache v2.0"
description 'Installs/Configures the Hops distribution'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.6.0"
source_url "https://github.com/hopshadoop/hops-hadoop-chef"
#link:<a target='_blank' href='http://%host%:50070/'>Launch the WebUI for the NameNode</a>
recipe "hops::nn", "Installs a HopsFs NameNode"
recipe "hops::ndb", "Installs MySQL Cluster (ndb) dal driver for Hops"
recipe "hops::dn", "Installs a HopsFs DataNode"
#link:<a target='_blank' href='http://%host%:8088/'>Launch the WebUI for the ResourceManager</a>
recipe "hops::rm", "Installs a YARN ResourceManager"
recipe "hops::nm", "Installs a YARN NodeManager"
recipe "hops::jhs", "Installs a MapReduce JobHistory Server for YARN"
recipe "hops::ps", "Installs a WebProxy Server for YARN"
recipe "hops::rt", "Installs a ResourceTracker server for YARN"
recipe "hops::client", "Installs libaries and configuration files for writing HDFS and YARN progams"
recipe "hops::purge", "Removes all hops-hadoop files and dirs and ndb-dal, but doesnt drop hops db from NDB"
recipe "hops::purge-ndb", "Drops hops db from NDB"
depends 'java'
depends 'kagent'
depends 'ndb'
depends 'magic_shell'
depends 'sysctl'
depends 'cmake'
depends 'kzookeeper'
depends 'hopsmonitor'
%w{ ubuntu debian rhel centos }.each do |os|
supports os
end
attribute "java/jdk_version",
:description => "Jdk version",
:type => 'string'
attribute "java/install_flavor",
:description => "Oracle (default) or openjdk",
:type => 'string'
attribute "java/java_home",
:description => "JAVA_HOME",
:type => 'string'
attribute "hops/dir",
:description => "Base installation directory for HopsFS",
:type => 'string'
attribute "mysql/user",
:description => "Mysql server username",
:type => 'string',
:required => "required"
attribute "mysql/password",
:description => "MySql server Password",
:type => 'string',
:required => "required"
attribute "hops/erasure_coding",
:description => "'true' or 'false' - true to enable erasure-coding replication",
:type => 'string'
attribute "hops/nn/cache",
:description => "'true' or 'false' - true to enable the path cache in the NameNode",
:type => 'string'
attribute "hops/nn/partition_key",
:description => "'true' or 'false' - true to enable the partition key when starting transactions. Distribution-aware transactions.",
:type => 'string'
attribute "hops/yarn/resource_tracker",
:description => "Hadoop Resource Tracker enabled on this nodegroup",
:type => 'string'
attribute "hops/install_db",
:description => "Install hops database and tables in MySQL Cluster ('true' (default) or 'false')",
:type => 'string'
attribute "hops/use_systemd",
:description => "Use systemd startup scripts, default 'false'",
:type => "string"
attribute "hops/format",
:description => "'true' to format HDFS, 'false' to skip formatting",
:type => 'string'
attribute "hops/reformat",
:description => "'true' to re-format HDFS, 'false' to skip re-formatting",
:type => 'string'
attribute "hops/yarn/memory_mbs",
:description => "Apache_Hadoop NodeManager Memory in MB",
:type => 'string'
attribute "hops/yarn/nodemanager_log_dir",
:description => "The directory in which yarn node manager store containers logs",
:type => 'string'
attribute "hops/yarn/nodemanager_recovery_dir",
:description => "The directory in which yarn node manager stores recovery state",
:type => 'string'
attribute "hops/yarn/nodemanager_ha_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_auto_failover_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_recovery_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_rpc_batch_max_size",
:description => "",
:type => "string"
attribute "hops/yarn/nodemanager_rpc_batch_max_duration",
:description => "",
:type => "string"
attribute "hops/yarn/rm_distributed",
:description => "Set to 'true' to enable distributed RMs",
:type => "string"
attribute "hops/yarn/nodemanager_rm_streaming_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/rm_heartbeat",
:description => "",
:type => "string"
attribute "hops/yarn/client_failover_sleep_base_ms",
:description => "",
:type => "string"
attribute "hops/yarn/client_failover_sleep_max_ms",
:description => "",
:type => "string"
attribute "hops/yarn/quota_enabled",
:description => "",
:type => "string"
attribute "hops/yarn/quota_monitor_interval",
:description => "",
:type => "string"
attribute "hops/yarn/quota_ticks_per_credit",
:description => "",
:type => "string"
attribute "hops/yarn/quota_min_ticks_charge",
:description => "",
:type => "string"
attribute "hops/yarn/quota_checkpoint_nbticks",
:description => "",
:type => "string"
attribute "hops/yarn/nm_heapsize_mbs",
:description => "Increase this value if using the YARN external shuffle service. (default: 1000)",
:type => 'string'
attribute "hops/yarn/rm_heapsize_mbs",
:description => "Resource manager heapsize. (default: 1000)",
:type => 'string'
attribute "hops/yarn/container_executor",
:description => "Container executor class",
:type => 'string'
attribute "hops/trash/interval",
:description => "How long in minutes trash survives in /user/<glassfish>/.Trash/<interval-bucket>/...",
:type => "string"
attribute "hops/trash/checkpoint/interval",
:description => "How long in minutes until a new directory bucket is created in /user/<glassfish>/.Trash with a timestamp. ",
:type => "string"
attribute "hops/nn/private_ips",
:description => "Set ip addresses",
:type => "array"
attribute "hops/nn/public_ips",
:description => "Set ip addresses",
:type => "array"
attribute "hops/rm/private_ips",
:description => "Set ip addresses",
:type => "array"
attribute "hops/rm/public_ips",
:description => "Set ip addresses",
:type => "array"
# Needed to find the jar file for yan-spark-shuffle
attribute "hadoop_spark/version",
:description => "Spark version",
:type => 'string'
attribute "hops/url/primary",
:description => "Primary download url of hops distribution",
:type => 'string'
attribute "hops/url/secondary",
:description => "Secondary download url of hops distribution",
:type => 'string'
attribute "hops/server/threadpool",
:description => "Number of threads in RPC server reading from socket",
:type => 'string'
attribute "hops/tls/enabled",
:description => "'true' will enable RPC TLS and 'false' will disable it",
:type => 'string'
attribute "hops/tls/certs_actor_class",
:description => "Actor class to perform X509 requests to Hopsworks",
:type => 'string'
attribute "hops/tls/certs_expiration_safety_period",
:description => "Time to substract fro X509 expiration time for renewal",
:type => 'string'
attribute "hops/tls/certs_revocation_monitor_interval",
:description => "Period to check for stale X509 certificates that should be revoked",
:type => 'string'
attribute "hops/tls/crl_enabled",
:description => "Enable CRL validation when RPC TLS is enabled",
:type => 'string'
attribute "hops/tls/crl_fetcher_class",
:description => "Canonical name of the CRL fetcher class",
:type => 'string'
attribute "hops/tls/crl_input_uri",
:description => "Location where the CRL will be fetched from",
:type => 'string'
attribute "hops/tls/crl_output_file",
:description => "Location where the CRL will be stored",
:type => 'string'
attribute "hops/tls/crl_fetcher_interval",
:description => "Interval for the CRL fetcher service, suffix can be m/h/d",
:type => 'string'
attribute "hops/encrypt_data_transfer/enabled",
:description => "Enable encryption for Data Tranfer Protocol of DataNodes",
:type => 'string'
attribute "hops/encrypt_data_transfer/algorithm",
:description => "Encryption algorithm, 3des or rc4",
:type => 'string'
attribute "hops/yarn/vcores",
:description => "Hops NodeManager Number of Virtual Cores",
:type => 'string'
attribute "hops/yarn/min_vcores",
:description => "Hadoop NodeManager Minimum Virtual Cores per container",
:type => 'string'
attribute "hops/yarn/max_vcores",
:description => "Hadoop NodeManager Maximum Virtual Cores per container",
:type => 'string'
attribute "hops/yarn/log_retain_secs",
:description => "Default time (in seconds) to retain log files on the NodeManager",
:type => 'string'
attribute "hops/yarn/log_retain_check",
:description =>"Default time (in seconds) between checks for retained log files in HDFS.",
:type => 'string'
attribute "hops/yarn/log_roll_interval",
:description =>"Defines how often NMs wake up to upload log files. The minimum rolling-interval-seconds can be set is 3600.",
:type => 'string'
attribute "hops/version",
:description => "Version of hops",
:type => 'string'
attribute "hops/num_replicas",
:description => "Number of replicates for each file stored in HDFS",
:type => 'string'
attribute "hops/container_cleanup_delay_sec",
:description => "The number of seconds container data is retained after termination",
:type => 'string'
attribute "hops/group",
:description => "Group to run hdfs/yarn/yarnapp/mr as",
:type => 'string'
attribute "hops/yarn/user",
:description => "Username to run yarn as",
:type => 'string'
attribute "hops/yarnapp/user",
:description => "Username to run yarn applications as",
:type => 'string'
attribute "hops/mr/user",
:description => "Username to run mapReduce as",
:type => 'string'
attribute "hops/hdfs/user",
:description => "Username to run hdfs as",
:type => 'string'
attribute "hops/hdfs/superuser_group",
:description => "Group for users with hdfs superuser privileges",
:type => 'string'
attribute "hops/hdfs/blocksize",
:description => "HDFS Blocksize (128k, 512m, 1g, etc). Default 128m.",
:type => 'string'
attribute "hops/hdfs/umask",
:description => "Set the default HDFS umask (default: 0022).",
:type => 'string'
attribute "hops/format",
:description => "Format HDFS, Run 'hdfs namenode -format",
:type => 'string'
attribute "hops/tmp_dir",
:description => "The directory in which Hadoop stores temporary data, including container data",
:type => 'string'
attribute "hops/nn/name_dir",
:description => "Directory for NameNode's state",
:type => 'string'
attribute "hops/dn/data_dir",
:description => "The directory in which Hadoop's DataNodes store their data",
:type => 'string'
attribute "hops/data_dir",
:description => "The directory in which Hadoop's main data files are stored (including hops/dn/data_dir)",
:type => 'string'
attribute "hops/dn/data_dir_permissions",
:description => "The permissions for the directory in which Hadoop's DataNodes store their data (default: 700)",
:type => 'string'
attribute "hops/yarn/nodemanager_hb_ms",
:description => "Heartbeat Interval for NodeManager->ResourceManager in ms",
:type => 'string'
attribute "hops/rm/scheduler_class",
:description => "Java Classname for the Yarn scheduler (fifo, capacity, fair)",
:type => 'string'
attribute "hops/user_envs",
:description => "Update the PATH environment variable for the hdfs and yarn users to include hadoop/bin in the PATH ",
:type => 'string'
attribute "hops/logging_level",
:description => "Log levels are: TRACE, DEBUG, INFO, WARN",
:type => 'string'
attribute "hops/nn/heap_size",
:description => "Size of the NameNode heap in MBs",
:type => 'string'
attribute "hops/nn/direct_memory_size",
:description => "Size of the direct memory size for the NameNode in MBs",
:type => 'string'
attribute "hops/yarn/aux_services",
:description => "mapreduce_shuffle, spark_shuffle",
:type => "string"
attribute "hops/capacity/max_ap",
:description => "Maximum number of applications that can be pending and running.",
:type => "string"
attribute "hops/capacity/max_am_percent",
:description => "Maximum percent of resources in the cluster which can be used to run application masters i.e. controls number of concurrent running applications.",
:type => "string"
attribute "hops/capacity/resource_calculator_class",
:description => "The ResourceCalculator implementation to be used to compare Resources in the scheduler. The default i.e. DefaultResourceCalculator only uses Memory while DominantResourceCalculator uses dominant-resource to compare multi-dimensional resources such as Memory, CPU etc.",
:type => "string"
attribute "hops/capacity/root_queues",
:description => "The queues at the root level (root is the root queue).",
:type => "string"
attribute "hops/capacity/default_capacity",
:description => "Default queue target capacity.",
:type => "string"
attribute "hops/capacity/user_limit_factor",
:description => " Default queue user limit a percentage from 0.0 to 1.0.",
:type => "string"
attribute "hops/capacity/default_max_capacity",
:description => "The maximum capacity of the default queue.",
:type => "string"
attribute "hops/capacity/default_state",
:description => "The state of the default queue. State can be one of RUNNING or STOPPED.",
:type => "string"
attribute "hops/capacity/default_acl_submit_applications",
:description => "The ACL of who can submit jobs to the default queue.",
:type => "string"
attribute "hops/capacity/default_acl_administer_queue",
:description => "The ACL of who can administer jobs on the default queue.",
:type => "string"
attribute "hops/capacity/queue_mapping",
:description => "A list of mappings that will be used to assign jobs to queues The syntax for this list is [u|g]:[name]:[queue_name][,next mapping]* Typically this list will be used to map users to queues, for example, u:%user:%user maps all users to queues with the same name as the user.",
:type => "string"
attribute "hops/capacity/queue_mapping_override.enable",
:description => "If a queue mapping is present, will it override the value specified by the user? This can be used by administrators to place jobs in queues that are different than the one specified by the user. The default is false.",
:type => "string"
attribute "kagent/enabled",
:description => "Set to 'true' to enable, 'false' to disable kagent",
:type => "string"
attribute "mysql/dir",
:description => "MySQL installation directory.",
:type => "string"
attribute "install/dir",
:description => "Set to a base directory under which we will install.",
:type => "string"
attribute "install/user",
:description => "User to install the services as",
:type => "string"
attribute "influxdb/graphite/port",
:description => "Port for influxdb graphite connector",
:type => "string"
#GPU settings
attribute "hops/yarn/min_gpus",
:description => "Min number of GPUs per container",
:type => "string"
attribute "hops/yarn/max_gpus",
:description => "Max number of GPUs per container",
:type => "string"
attribute "hops/gpu",
:description => "Are GPUs enabled for YARN? (on this node) Default: false",
:type => "string"
attribute "hops/yarn/gpus",
:description => "'*' default: use all GPUs on the host. Otherwise, specify the number of GPUs per host (e.g., '4'). Otherwise, specify a comma-separated list of minor device-ids: '0,1,2' or '0-3')",
:type => "string"
attribute "hops/yarn/cluster/gpu",
:description => "Is there a machine in the cluster with gpus?",
:type => "string"
#CGroups settings
attribute "hops/yarn/groups",
:description => "",
:type => "string"
attribute "hops/yarn/linux_container_local_user",
:description => "the user running the yarn containers",
:type => "string"
attribute "hops/yarn/linux_container_limit_users",
:description => "",
:type => "string"
attribute "hops/hopsutil_version",
:description => "Version of the hops-util jar file.",
:type => "string"
attribute "hops/hopsexamples_version",
:description => "Version of the hops-spark jar file.",
:type => "string"
attribute "hops/yarn/cgroups",
:description => "'true' to enable cgroups (default), else 'false'",
:type => "string"
attribute "livy/user",
:description => "Livy user that will be a proxy user",
:type => "string"
attribute "hopsworks/user",
:description => "Hopsworks username",
:type => "string"
attribute "hops/jmx/adminPassword",
:description => "Password for JMX admin role",
:type => "string"
attribute "hopsmonitor/default/private_ips",
:description => "Hopsworks username",
:type => "string"
attribute "hopsworks/default/private_ips",
:description => "Hopsworks private ip",
:type => "string"
# Kernel tuning parameters
attribute "hops/kernel/somaxconn",
:description => "net.core.somaxconn value",
:type => "string"
attribute "hops/kernel/swappiness",
:description => "vm.swappiness value",
:type => "string"
attribute "hops/kernel/overcommit_memory",
:description => "vm.overcommit_memory value",
:type => "string"
attribute "hops/kernel/overcommit_ratio",
:description => "vm.overcommit_ratio value",
:type => "string"
|
name 'flink'
maintainer "Jim Dowling"
maintainer_email "jdowling@sics.se"
license "Apache v 2.0"
description 'Installs/Configures Standalone Apache Flink'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0"
recipe "install", "Installs Apache Flink"
#link:<a target='_blank' href='http://%host%:8088/'>Launch the WebUI for the Flink JobManager</a>
recipe "jobmanager", "Starts a Flink JobManager in standalone mode"
recipe "taskmanager", "Starts a Flink Slave in standalone mode"
recipe "wordcount", "Prepares wordcount example using HDFS"
depends "hadoop"
depends "kagent"
depends "ark"
%w{ ubuntu debian rhel centos }.each do |os|
supports os
end
attribute "flink/mode",
:display_name => "Run Flink JobManager in one of the following modes: BATCH, STREAMING",
:required => "required",
:type => 'string'
attribute "flink/jobmanager/heap_mbs",
:display_name => "Flink JobManager Heap Size in MB",
:required => "required",
:type => 'string'
attribute "flink/taskmanager/heap_mbs",
:display_name => "Flink TaskManager Heap Size in MB",
:required => "required",
:type => 'string'
webui port fix
name 'flink'
maintainer "Jim Dowling"
maintainer_email "jdowling@sics.se"
license "Apache v 2.0"
description 'Installs/Configures Standalone Apache Flink'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0"
recipe "install", "Installs Apache Flink"
#link:<a target='_blank' href='http://%host%:9088/'>Launch the WebUI for the Flink JobManager</a>
recipe "jobmanager", "Starts a Flink JobManager in standalone mode"
recipe "taskmanager", "Starts a Flink Slave in standalone mode"
recipe "wordcount", "Prepares wordcount example using HDFS"
depends "hadoop"
depends "kagent"
depends "ark"
%w{ ubuntu debian rhel centos }.each do |os|
supports os
end
attribute "flink/mode",
:display_name => "Run Flink JobManager in one of the following modes: BATCH, STREAMING",
:required => "required",
:type => 'string'
attribute "flink/jobmanager/heap_mbs",
:display_name => "Flink JobManager Heap Size in MB",
:required => "required",
:type => 'string'
attribute "flink/taskmanager/heap_mbs",
:display_name => "Flink TaskManager Heap Size in MB",
:required => "required",
:type => 'string'
|
# frozen_string_literal: true
name 'postgresql'
maintainer 'Sous Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Installs and configures postgresql for clients or servers'
version '8.0.1'
source_url 'https://github.com/sous-chefs/postgresql'
issues_url 'https://github.com/sous-chefs/postgresql/issues'
chef_version '>= 13.8'
depends 'yum-epel'
depends 'apt'
%w(
amazon
centos
debian
fedora
oracle
redhat
scientefic
ubuntu
).each do |os|
supports os
end
Update metadata for 8.0.2
# frozen_string_literal: true
name 'postgresql'
maintainer 'Sous Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Installs and configures postgresql for clients or servers'
version '8.0.2'
source_url 'https://github.com/sous-chefs/postgresql'
issues_url 'https://github.com/sous-chefs/postgresql/issues'
chef_version '>= 13.8'
depends 'yum-epel'
depends 'apt'
%w(
amazon
centos
debian
fedora
oracle
redhat
scientefic
ubuntu
).each do |os|
supports os
end
|
name 'flyway-cli'
maintainer 'ClouDesire'
maintainer_email 'dev@cloudesire.com'
license 'Apache 2.0'
description 'Installs and execute flyway cli'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
source_url 'https://github.com/ClouDesire/flyway-cli-cookbook'
version '0.3.1'
supports 'ubuntu'
supports 'windows'
depends 'ubuntu'
depends 'java', '~> 1.17'
depends 'windows', '~> 1.36.1'
Minor fix: added issues_url to metadata.rb
name 'flyway-cli'
maintainer 'ClouDesire'
maintainer_email 'dev@cloudesire.com'
license 'Apache v2.0'
description 'Installs and execute flyway cli'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
source_url 'https://github.com/ClouDesire/flyway-cli-cookbook'
issues_url 'https://github.com/ClouDesire/flyway-cli-cookbook/issues'
version '0.3.1'
supports 'ubuntu'
supports 'windows'
depends 'ubuntu'
depends 'java', '~> 1.17'
depends 'windows', '~> 1.36.1'
|
# frozen_string_literal: true
name 'rabbitmq'
maintainer 'Chef, Inc.'
maintainer_email 'jj@chef.io'
license 'Apache-2.0'
description 'Installs and configures RabbitMQ server'
version '5.6.2'
recipe 'rabbitmq', 'Install and configure RabbitMQ'
recipe 'rabbitmq::mgmt_console', 'Sets up RabbitMQ management UI'
recipe 'rabbitmq::systemd_limits', 'Sets up kernel limits (e.g. nofile) for RabbitMQ via systemd'
recipe 'rabbitmq::cluster', 'Set up RabbitMQ clustering.'
recipe 'rabbitmq::plugin_management', 'Manage plugins with node attributes'
recipe 'rabbitmq::virtualhost_management', 'Manage virtualhost with node attributes'
recipe 'rabbitmq::user_management', 'Manage users with node attributes'
issues_url 'https://github.com/rabbitmq/chef-cookbook/issues'
source_url 'https://github.com/rabbitmq/chef-cookbook'
chef_version '>= 12.0'
depends 'erlang'
depends 'yum-epel'
depends 'yum-erlang_solutions'
depends 'dpkg_autostart'
depends 'logrotate'
supports 'centos', '>= 7.0'
supports 'debian', '>= 8.0'
supports 'opensuse'
supports 'opensuseleap'
supports 'oracle'
supports 'redhat'
supports 'scientific'
supports 'smartos'
supports 'suse'
supports 'ubuntu', '>= 14.04'
Use my email in metadata.rb
# frozen_string_literal: true
name 'rabbitmq'
maintainer 'Chef, Inc. and contributors'
maintainer_email 'mklishin@pivotal.io'
license 'Apache-2.0'
description 'Installs and configures RabbitMQ server'
version '5.6.2'
recipe 'rabbitmq', 'Install and configure RabbitMQ'
recipe 'rabbitmq::mgmt_console', 'Sets up RabbitMQ management UI'
recipe 'rabbitmq::systemd_limits', 'Sets up kernel limits (e.g. nofile) for RabbitMQ via systemd'
recipe 'rabbitmq::cluster', 'Set up RabbitMQ clustering.'
recipe 'rabbitmq::plugin_management', 'Manage plugins with node attributes'
recipe 'rabbitmq::virtualhost_management', 'Manage virtualhost with node attributes'
recipe 'rabbitmq::user_management', 'Manage users with node attributes'
issues_url 'https://github.com/rabbitmq/chef-cookbook/issues'
source_url 'https://github.com/rabbitmq/chef-cookbook'
chef_version '>= 12.0'
depends 'erlang'
depends 'yum-epel'
depends 'yum-erlang_solutions'
depends 'dpkg_autostart'
depends 'logrotate'
supports 'centos', '>= 7.0'
supports 'debian', '>= 8.0'
supports 'opensuse'
supports 'opensuseleap'
supports 'oracle'
supports 'redhat'
supports 'scientific'
supports 'smartos'
supports 'suse'
supports 'ubuntu', '>= 14.04'
|
name 'magic'
version File.read(File.join(File.dirname(__FILE__), 'VERSION')).strip
description 'Cookbook helpers and other magical things'
long_description 'Cookbook helpers and other magical things'
maintainer 'Sean Clemmer'
maintainer_email 'sczizzo@gmail.com'
license 'ISC'
Rename cookbook to et_magic cookbook
name 'et_magic'
version File.read(File.join(File.dirname(__FILE__), 'VERSION')).strip
description 'Cookbook helpers and other magical things'
long_description 'Cookbook helpers and other magical things'
maintainer 'Sean Clemmer'
maintainer_email 'sczizzo@gmail.com'
license 'ISC'
|
name 'time_zone'
maintainer 'Thomas Bishop'
maintainer_email 'thomas_bishop@intuit.com'
license 'All rights reserved'
description 'Installs/Configures time_zone'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
recipe 'time_zone', 'Configures the OS time zone'
%w{redhat centos}.each do |os|
supports os
end
tweak license and description in metadata
name 'time_zone'
maintainer 'Thomas Bishop'
maintainer_email 'thomas_bishop@intuit.com'
license 'Apache 2.0'
description 'Configures time_zone'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
recipe 'time_zone', 'Configures the OS time zone'
%w{redhat centos}.each do |os|
supports os
end
|
name 'aptly'
maintainer 'Sous Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Installs/Configures aptly'
issues_url 'https://github.com/sous-chefs/aptly/issues'
source_url 'https://github.com/sous-chefs/aptly'
version '2.1.1'
chef_version '>= 13.0'
supports 'ubuntu'
supports 'debian'
Release 2.1.2
name 'aptly'
maintainer 'Sous Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Installs/Configures aptly'
issues_url 'https://github.com/sous-chefs/aptly/issues'
source_url 'https://github.com/sous-chefs/aptly'
version '2.1.2'
chef_version '>= 13.0'
supports 'ubuntu'
supports 'debian'
|
name "cerner_kafka"
maintainer "Cerner Corp."
maintainer_email "Bryan.Baugher@cerner.com"
license "Apache 2.0"
description "Installs and configures a Kafka"
issues_url 'https://github.com/cerner/cerner_kafka/issues'
source_url 'https://github.com/cerner/cerner_kafka'
depends 'java'
depends 'ulimit'
depends 'logrotate'
version '2.3.0'
Bumped cookbook version to 2.4.0 after release
name "cerner_kafka"
maintainer "Cerner Corp."
maintainer_email "Bryan.Baugher@cerner.com"
license "Apache 2.0"
description "Installs and configures a Kafka"
issues_url 'https://github.com/cerner/cerner_kafka/issues'
source_url 'https://github.com/cerner/cerner_kafka'
depends 'java'
depends 'ulimit'
depends 'logrotate'
version '2.4.0'
|
name 'mysql_chef_gem'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Provides the mysql_chef_gem resource'
version '2.0.2'
supports 'amazon'
supports 'redhat'
supports 'centos'
supports 'scientific'
supports 'fedora'
supports 'debian'
supports 'ubuntu'
depends 'build-essential'
depends 'mysql'
depends 'mariadb'
Add supermarket metadata
name 'mysql_chef_gem'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Provides the mysql_chef_gem resource'
version '2.0.2'
supports 'amazon'
supports 'redhat'
supports 'centos'
supports 'scientific'
supports 'fedora'
supports 'debian'
supports 'ubuntu'
depends 'build-essential'
depends 'mysql'
depends 'mariadb'
source_url 'https://github.com/chef-cookbooks/mysql_chef_gem' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/mysql_chef_gem/issues' if respond_to?(:issues_url)
|
name 'cygwin'
maintainer 'Ryan Larson & Ben Jansen'
maintainer_email 'aogail@w007.org'
license 'Apache 2.0'
description 'Installs/Configures cygwin'
long_description 'Installs/Configures cygwin'
version '0.4.5'
supports 'windows'
depends 'windows'
depends 'windows_firewall', '~> 2.0.0'
Bump version
name 'cygwin'
maintainer 'Ryan Larson & Ben Jansen'
maintainer_email 'aogail@w007.org'
license 'Apache 2.0'
description 'Installs/Configures cygwin'
long_description 'Installs/Configures cygwin'
version '0.5.0'
supports 'windows'
depends 'windows'
depends 'windows_firewall', '~> 2.0.0'
|
name 'dsc2'
maintainer 'Dimension Data Cloud Solutions, Inc.'
maintainer_email 'eugene.narciso@itaas.dimensiondata.com'
license 'Apache 2.0'
description 'Installs dsc modules'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.2'
supports 'windows', '>= 6.3'
source_url 'https://github.com/DimensionDataDevOps/cookbook-dsc2' if respond_to?(:source_url)
issues_url 'https://github.com/DimensionDataDevOps/cookbook-dsc2/issues' if respond_to?(:issues_url)
depends 'windows', '~> 1.39.2'
depends 'powershell', '~> 3.2.3'
depends 'ms_dotnet', '~> 2.6.1'
bump version to
name 'dsc2'
maintainer 'Dimension Data Cloud Solutions, Inc.'
maintainer_email 'eugene.narciso@itaas.dimensiondata.com'
license 'Apache 2.0'
description 'Installs dsc modules'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.3'
supports 'windows', '>= 6.3'
source_url 'https://github.com/DimensionDataDevOps/cookbook-dsc2' if respond_to?(:source_url)
issues_url 'https://github.com/DimensionDataDevOps/cookbook-dsc2/issues' if respond_to?(:issues_url)
depends 'windows', '~> 1.39.2'
depends 'powershell', '~> 3.2.3'
depends 'ms_dotnet', '~> 2.6.1'
|
name 'users'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Creates users from a databag search'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.8.3'
source_url 'https://github.com/chef-cookbooks/users'
issues_url 'https://github.com/chef-cookbooks/users/issues'
recipe 'users', 'Empty recipe for including LWRPs'
recipe 'users::sysadmins', 'Create and manage sysadmin group'
%w( ubuntu debian redhat centos fedora freebsd mac_os_x ).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/users' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/users/issues' if respond_to?(:issues_url)
Remove duplicate metadata
name 'users'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Creates users from a databag search'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.8.3'
recipe 'users', 'Empty recipe for including LWRPs'
recipe 'users::sysadmins', 'Create and manage sysadmin group'
%w( ubuntu debian redhat centos fedora freebsd mac_os_x ).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/users' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/users/issues' if respond_to?(:issues_url)
|
name 'al_agents'
maintainer 'Justin Early'
maintainer_email 'jearly@alertlogic.com'
source_url 'https://github.com/alertlogic/al_agents'
issues_url 'https://github.com/alertlogic/al_agents/issues'
license 'Apache 2.0 License'
description 'Installs/Configures the Alert Logic Agent'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.3.1'
depends 'selinux_policy'
depends 'rsyslog', '= 2.2.0'
depends 'line'
supports 'debian'
supports 'ubuntu'
supports 'redhat'
supports 'centos'
supports 'fedora'
supports 'windows'
bump version
name 'al_agents'
maintainer 'Justin Early'
maintainer_email 'jearly@alertlogic.com'
source_url 'https://github.com/alertlogic/al_agents'
issues_url 'https://github.com/alertlogic/al_agents/issues'
license 'Apache 2.0 License'
description 'Installs/Configures the Alert Logic Agent'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.3.2'
depends 'selinux_policy'
depends 'rsyslog', '= 2.2.0'
depends 'line'
supports 'debian'
supports 'ubuntu'
supports 'redhat'
supports 'centos'
supports 'fedora'
supports 'windows'
|
name 'nagios'
maintainer 'Tim Smith'
maintainer_email 'tsmith84@gmail.com'
license 'Apache 2.0'
description 'Installs and configures Nagios server'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '7.1.7'
recipe 'default', 'Installs Nagios server.'
recipe 'nagios::pagerduty', 'Integrates contacts w/ PagerDuty API'
depends 'apache2', '>= 2.0'
depends 'zap', '>= 0.6.0'
%w( build-essential php nginx nginx_simplecgi yum-epel nrpe ).each do |cb|
depends cb
end
%w( debian ubuntu redhat centos fedora scientific amazon oracle).each do |os|
supports os
end
deploying new version
name 'nagios'
maintainer 'Tim Smith'
maintainer_email 'tsmith84@gmail.com'
license 'Apache 2.0'
description 'Installs and configures Nagios server'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '7.1.8'
recipe 'default', 'Installs Nagios server.'
recipe 'nagios::pagerduty', 'Integrates contacts w/ PagerDuty API'
depends 'apache2', '>= 2.0'
depends 'zap', '>= 0.6.0'
%w( build-essential php nginx nginx_simplecgi yum-epel nrpe ).each do |cb|
depends cb
end
%w( debian ubuntu redhat centos fedora scientific amazon oracle).each do |os|
supports os
end
|
name "haproxy"
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
description "Installs and configures haproxy"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.6.6"
recipe "haproxy", "Installs and configures haproxy"
recipe "haproxy::app_lb", "Installs and configures haproxy by searching for nodes of a particular role"
%w{ debian ubuntu centos redhat scientific oracle }.each do |os|
supports os
end
depends "cpu", ">= 0.2.0"
depends "build-essential"
source_url 'https://github.com/hw-cookbooks/haproxy' if respond_to?(:source_url)
issues_url 'https://github.com/hw-cookbooks/haproxy/issues' if respond_to?(:issues_url)
chef_version '>= 11.0' if respond_to?(:chef_version)
Chef does not maintain this cookbook anymore
Signed-off-by: Tim Smith <764ef62106582a09ed09dfa0b6bff7c05fd7d1e4@chef.io>
name "haproxy"
maintainer 'Heavy Water Software Inc.'
maintainer_email 'ops@hw-ops.com'
license "Apache 2.0"
description "Installs and configures haproxy"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.6.6"
recipe "haproxy", "Installs and configures haproxy"
recipe "haproxy::app_lb", "Installs and configures haproxy by searching for nodes of a particular role"
%w{ debian ubuntu centos redhat scientific oracle }.each do |os|
supports os
end
depends "cpu", ">= 0.2.0"
depends "build-essential"
source_url 'https://github.com/hw-cookbooks/haproxy' if respond_to?(:source_url)
issues_url 'https://github.com/hw-cookbooks/haproxy/issues' if respond_to?(:issues_url)
chef_version '>= 11.0' if respond_to?(:chef_version)
|
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0.1"
recipe "yum", "Empty recipe."
recipe "yum::yum", "Manages yum configuration"
%w{ redhat centos scientific amazon }.each do |os|
supports os, ">= 5.0"
end
attribute "yum/exclude",
:display_name => "yum.conf exclude",
:description => "List of packages to exclude from updates or installs. This should be an array. Shell globs using wildcards (eg. * and ?) are allowed.",
:required => "optional"
attribute "yum/installonlypkgs",
:display_name => "yum.conf installonlypkgs",
:description => "List of packages that should only ever be installed, never updated. Kernels in particular fall into this category. Defaults to kernel, kernel-smp, kernel-bigmem, kernel-enterprise, kernel-debug, kernel-unsupported.",
:required => "optional"
attribute "yum/proxy",
:display_name => "yum.conf proxy",
:description => "Set the http URL for proxy to use in yum.conf",
:required => "optional"
attribute "yum/proxy_username",
:display_name => "yum.conf proxy_username",
:description => "Set the proxy_username to use for yum.conf",
:required => "optional"
attribute "yum/proxy_password",
:display_name => "yum.conf proxy_password",
:description => "Set the proxy_password to use for yum.conf",
:required => "optional"
add name attribute to metadata
name "yum"
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0.1"
recipe "yum", "Empty recipe."
recipe "yum::yum", "Manages yum configuration"
%w{ redhat centos scientific amazon }.each do |os|
supports os, ">= 5.0"
end
attribute "yum/exclude",
:display_name => "yum.conf exclude",
:description => "List of packages to exclude from updates or installs. This should be an array. Shell globs using wildcards (eg. * and ?) are allowed.",
:required => "optional"
attribute "yum/installonlypkgs",
:display_name => "yum.conf installonlypkgs",
:description => "List of packages that should only ever be installed, never updated. Kernels in particular fall into this category. Defaults to kernel, kernel-smp, kernel-bigmem, kernel-enterprise, kernel-debug, kernel-unsupported.",
:required => "optional"
attribute "yum/proxy",
:display_name => "yum.conf proxy",
:description => "Set the http URL for proxy to use in yum.conf",
:required => "optional"
attribute "yum/proxy_username",
:display_name => "yum.conf proxy_username",
:description => "Set the proxy_username to use for yum.conf",
:required => "optional"
attribute "yum/proxy_password",
:display_name => "yum.conf proxy_password",
:description => "Set the proxy_password to use for yum.conf",
:required => "optional"
|
name "lang_java"
maintainer "RIYIC"
maintainer_email "info@riyic.com"
license "Apache 2.0"
description " This cookbook installs a Java JDK/JRE. It defaults install OpenJDK but can also install Oracle. "
#long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.0.1"
## Imprescindible en chef 11!!!
depends "java"
%w{debian ubuntu}.each do |os|
supports os
end
recipe "install",
description: "Installs Java JDK",
attributes: [/.+/]
attribute "lang/java/install_flavor",
:display_name => 'JDK flavor',
:description => 'Flavor of JDK you would like to install',
:default => 'openjdk',
:choice => %w{openjdk oracle}
attribute "lang/java/jdk_version",
:display_name => 'JDK version',
:description => 'JDK version to install',
:default => '8',
:choice => %w{6 7 8}
attribute "lang/java/java_home",
:display_name => 'JVM install dir',
:description => 'JVM install dir',
:validations => {:predefined => 'unix_path'}
Bump version
name "lang_java"
maintainer "RIYIC"
maintainer_email "info@riyic.com"
license "Apache 2.0"
description " This cookbook installs a Java JDK/JRE. It defaults install OpenJDK but can also install Oracle. "
#long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.1.0"
## Imprescindible en chef 11!!!
depends "java"
%w{debian ubuntu}.each do |os|
supports os
end
recipe "install",
description: "Installs Java JDK",
attributes: [/.+/]
attribute "lang/java/install_flavor",
:display_name => 'JDK flavor',
:description => 'Flavor of JDK you would like to install',
:default => 'openjdk',
:choice => %w{openjdk oracle}
attribute "lang/java/jdk_version",
:display_name => 'JDK version',
:description => 'JDK version to install',
:default => '8',
:choice => %w{6 7 8}
attribute "lang/java/java_home",
:display_name => 'JVM install dir',
:description => 'JVM install dir',
:validations => {:predefined => 'unix_path'}
|
# encoding: UTF-8
name 'system'
maintainer 'Xhost Australia'
maintainer_email 'cookbooks@xhost.com.au'
license 'Apache 2.0'
description 'Installs/Configures system elements such as the hostname and timezone.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.7.0'
recipe 'system::default', "Sets the system's hostname and timezone, updates the system's installed packages."
recipe 'system::timezone', "Sets the system's' timezone."
recipe 'system::hostname', "Sets the system's hostname."
recipe 'system::upgrade_packages', "Upgrades the system's installed packages."
recipe 'system::update_package_list', "Updates the system's list of packages in the package manager's cache."
recipe 'system::install_packages', "Installs packages to the system with it's native package manager."
recipe 'system::reboot', 'Attempts to gracefully reboot the operating system.'
recipe 'system::shutdown', 'Attempts to gracefully shutdown the operating system.'
%w(ubuntu debian centos fedora redhat arch mac_os_x).each { |os| supports os }
depends 'apt'
depends 'cron'
depends 'hostsfile'
attribute 'system/timezone',
display_name: 'Timezone',
description: 'The system timezone, which must be a valid zoneinfo/tz database entry.',
required: 'optional',
default: 'UTC',
recipes: ['system::timezone', 'system::default'],
choice: [
'Africa/Casablanca',
'America/Bogota',
'America/Buenos_Aires',
'America/Caracas',
'America/La_Paz',
'America/Lima',
'America/Mexico_City',
'Asia/Almaty',
'Asia/Baghdad',
'Asia/Baku',
'Asia/Bangkok',
'Asia/Calcutta',
'Asia/Colombo',
'Asia/Dhaka',
'Asia/Hong_Kong',
'Asia/Jakarta',
'Asia/Kabul',
'Asia/Kamchatka',
'Asia/Karachi',
'Asia/Kathmandu',
'Asia/Magadan',
'Asia/Muscat',
'Asia/Riyadh',
'Asia/Seoul',
'Asia/Singapore',
'Asia/Tashkent',
'Asia/Tbilisi',
'Asia/Tehran',
'Asia/Tokyo',
'Asia/Vladivostok',
'Asia/Yakutsk',
'Asia/Yekaterinburg',
'Atlantic/Azores',
'Atlantic/Cape_Verde',
'Australia/Adelaide',
'Australia/Darwin',
'Australia/Perth',
'Australia/Sydney',
'Brazil/Acre',
'Brazil/DeNoronha',
'Brazil/East',
'Brazil/West',
'Canada/Atlantic',
'Canada/Newfoundland',
'Europe/Brussels',
'Europe/Copenhagen',
'Europe/Kaliningrad',
'Europe/Lisbon',
'Europe/London',
'Europe/Helsinki',
'Europe/Madrid',
'Europe/Moscow',
'Europe/Paris',
'Pacific/Auckland',
'Pacific/Fiji',
'Pacific/Guam',
'Pacific/Kwajalein',
'Pacific/Midway',
'US/Alaska',
'US/Central',
'US/Eastern',
'US/Hawaii',
'US/Mountain',
'US/Pacific',
'US/Samoa',
'GMT',
'UTC',
'localtime']
attribute 'system/short_hostname',
display_name: 'Short Hostname',
description: 'The short hostname that you would like this node to have, e.g. kryten.',
required: 'recommended',
default: 'localhost',
recipes: ['system::hostname', 'system::default']
attribute 'system/domain_name',
display_name: 'Domain Name',
description: 'The domain name that you would like this node to have, e.g. domain.suf. Note: Only set a valid domain name '\
'to satisfy the resolution of a FQDN; use ignore:ignore for no domain name.',
required: 'recommended',
default: 'localdomain',
recipes: ['system::hostname', 'system::default']
attribute 'system/netbios_name',
display_name: 'NetBIOS Name',
description: 'The NetBIOS name to set on the node, default is the value of node/short_hostname upper-cased (OS X only).',
required: 'optional',
recipes: ['system::hostname', 'system::default']
attribute 'system/workgroup',
display_name: 'NetBIOS Workgroup',
description: "The NetBIOS workgroup name to set on the node, default is 'WORKGROUP' (OS X only).",
required: 'optional',
default: 'WORKGROUP',
recipes: ['system::hostname', 'system::default']
attribute 'system/upgrade_packages',
display_name: 'Upgrade Packages',
description: "Whether or not the system::upgrade_packages recipe will physically update the system's installed packages (in compile time).",
required: 'optional',
choice: %w(true false),
recipes: ['system::upgrade_packages']
attribute 'system/packages/install',
display_name: 'Install Packages',
description: 'An array of system packages to install with the package resource in execute phase.',
required: 'optional',
type: 'array',
recipes: ['system::install_packages']
attribute 'system/packages/install_compile_time',
display_name: 'Install Packages Compile Phase',
description: 'An array of system packages to install with the package resource in compile phase.',
required: 'optional',
type: 'array',
recipes: ['system::install_packages']
attribute 'system/permanent_ip',
display_name: 'Permanent IP Address',
description: 'Whether the system has a permenent IP address (http://www.debian.org/doc/manuals/debian-reference/ch05.en.html#_the_hostname_resolution).',
required: 'optional',
choice: %w(true false),
recipes: ['system::hostname', 'system::default']
Add system/static_hosts attribute to metadata.rb.
# encoding: UTF-8
name 'system'
maintainer 'Xhost Australia'
maintainer_email 'cookbooks@xhost.com.au'
license 'Apache 2.0'
description 'Installs/Configures system elements such as the hostname and timezone.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.7.0'
recipe 'system::default', "Sets the system's hostname and timezone, updates the system's installed packages."
recipe 'system::timezone', "Sets the system's' timezone."
recipe 'system::hostname', "Sets the system's hostname."
recipe 'system::upgrade_packages', "Upgrades the system's installed packages."
recipe 'system::update_package_list', "Updates the system's list of packages in the package manager's cache."
recipe 'system::install_packages', "Installs packages to the system with it's native package manager."
recipe 'system::reboot', 'Attempts to gracefully reboot the operating system.'
recipe 'system::shutdown', 'Attempts to gracefully shutdown the operating system.'
%w(ubuntu debian centos fedora redhat arch mac_os_x).each { |os| supports os }
depends 'apt'
depends 'cron'
depends 'hostsfile'
attribute 'system/timezone',
display_name: 'Timezone',
description: 'The system timezone, which must be a valid zoneinfo/tz database entry.',
required: 'optional',
default: 'UTC',
recipes: ['system::timezone', 'system::default'],
choice: [
'Africa/Casablanca',
'America/Bogota',
'America/Buenos_Aires',
'America/Caracas',
'America/La_Paz',
'America/Lima',
'America/Mexico_City',
'Asia/Almaty',
'Asia/Baghdad',
'Asia/Baku',
'Asia/Bangkok',
'Asia/Calcutta',
'Asia/Colombo',
'Asia/Dhaka',
'Asia/Hong_Kong',
'Asia/Jakarta',
'Asia/Kabul',
'Asia/Kamchatka',
'Asia/Karachi',
'Asia/Kathmandu',
'Asia/Magadan',
'Asia/Muscat',
'Asia/Riyadh',
'Asia/Seoul',
'Asia/Singapore',
'Asia/Tashkent',
'Asia/Tbilisi',
'Asia/Tehran',
'Asia/Tokyo',
'Asia/Vladivostok',
'Asia/Yakutsk',
'Asia/Yekaterinburg',
'Atlantic/Azores',
'Atlantic/Cape_Verde',
'Australia/Adelaide',
'Australia/Darwin',
'Australia/Perth',
'Australia/Sydney',
'Brazil/Acre',
'Brazil/DeNoronha',
'Brazil/East',
'Brazil/West',
'Canada/Atlantic',
'Canada/Newfoundland',
'Europe/Brussels',
'Europe/Copenhagen',
'Europe/Kaliningrad',
'Europe/Lisbon',
'Europe/London',
'Europe/Helsinki',
'Europe/Madrid',
'Europe/Moscow',
'Europe/Paris',
'Pacific/Auckland',
'Pacific/Fiji',
'Pacific/Guam',
'Pacific/Kwajalein',
'Pacific/Midway',
'US/Alaska',
'US/Central',
'US/Eastern',
'US/Hawaii',
'US/Mountain',
'US/Pacific',
'US/Samoa',
'GMT',
'UTC',
'localtime']
attribute 'system/short_hostname',
display_name: 'Short Hostname',
description: 'The short hostname that you would like this node to have, e.g. kryten.',
required: 'recommended',
default: 'localhost',
recipes: ['system::hostname', 'system::default']
attribute 'system/domain_name',
display_name: 'Domain Name',
description: 'The domain name that you would like this node to have, e.g. domain.suf. Note: Only set a valid domain name '\
'to satisfy the resolution of a FQDN; use ignore:ignore for no domain name.',
required: 'recommended',
default: 'localdomain',
recipes: ['system::hostname', 'system::default']
attribute 'system/netbios_name',
display_name: 'NetBIOS Name',
description: 'The NetBIOS name to set on the node, default is the value of node/short_hostname upper-cased (OS X only).',
required: 'optional',
recipes: ['system::hostname', 'system::default']
attribute 'system/workgroup',
display_name: 'NetBIOS Workgroup',
description: "The NetBIOS workgroup name to set on the node, default is 'WORKGROUP' (OS X only).",
required: 'optional',
default: 'WORKGROUP',
recipes: ['system::hostname', 'system::default']
attribute 'system/upgrade_packages',
display_name: 'Upgrade Packages',
description: "Whether or not the system::upgrade_packages recipe will physically update the system's installed packages (in compile time).",
required: 'optional',
choice: %w(true false),
recipes: ['system::upgrade_packages']
attribute 'system/packages/install',
display_name: 'Install Packages',
description: 'An array of system packages to install with the package resource in execute phase.',
required: 'optional',
type: 'array',
recipes: ['system::install_packages']
attribute 'system/packages/install_compile_time',
display_name: 'Install Packages Compile Phase',
description: 'An array of system packages to install with the package resource in compile phase.',
required: 'optional',
type: 'array',
recipes: ['system::install_packages']
attribute 'system/permanent_ip',
display_name: 'Permanent IP Address',
description: 'Whether the system has a permenent IP address (http://www.debian.org/doc/manuals/debian-reference/ch05.en.html#_the_hostname_resolution).',
required: 'optional',
choice: %w(true false),
recipes: ['system::hostname', 'system::default']
attribute 'system/static_hosts',
display_name: 'Static Hosts',
desription: 'A hash of static hosts to add to /etc/hosts.',
required: 'optional',
type: 'hash',
recipes: ['system::hostname', 'system::default']
|
name 'drone'
maintainer 'Justin Campbell'
maintainer_email 'justin@justincampbell.me'
license 'Apache 2.0'
description 'Installs/Configures Drone'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.7.9'
depends 'docker', '>= 2.2.2'
depends 'ssl_certificate'
supports 'ubuntu', '>= 12.04'
Pin docker cookbook version
name 'drone'
maintainer 'Justin Campbell'
maintainer_email 'justin@justincampbell.me'
license 'Apache 2.0'
description 'Installs/Configures Drone'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.7.9'
depends 'docker', '~> 2.2.2'
depends 'ssl_certificate'
supports 'ubuntu', '>= 12.04'
|
name 'exabgp'
maintainer 'Aetrion, LLC.'
maintainer_email 'ops@dnsimple.com'
license 'Apache 2.0'
description 'Installs/Configures exabgp'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version IO.read(File.join(File.dirname(__FILE__), 'VERSION')) rescue "0.0.1"
supports 'ubuntu', '>= 12.04'
depends 'python'
depends 'runit'
Bump version
name 'exabgp'
maintainer 'Aetrion, LLC.'
maintainer_email 'ops@dnsimple.com'
license 'Apache 2.0'
description 'Installs/Configures exabgp'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.1'
supports 'ubuntu', '>= 12.04'
depends 'python'
depends 'runit'
|
name 'habitat'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Habitat related cookbooks for chef-client'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.0.2'
%w(ubuntu debian redhat centos suse scientific oracle amazon).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/habitat' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/habitat/issues' if respond_to?(:issues_url)
bump version
name 'habitat'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Habitat related cookbooks for chef-client'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.0.3'
%w(ubuntu debian redhat centos suse scientific oracle amazon).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/habitat' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/habitat/issues' if respond_to?(:issues_url)
|
name 'national_parks'
maintainer 'The Authors'
maintainer_email 'you@example.com'
license 'all_rights'
description 'Installs/Configures national_parks'
long_description 'Installs/Configures national_parks'
version '0.1.0'
# The `issues_url` points to the location where issues for this cookbook are
# tracked. A `View Issues` link will be displayed on this cookbook's page when
# uploaded to a Supermarket.
#
# issues_url 'https://github.com/<insert_org_here>/national_parks/issues' if respond_to?(:issues_url)
# The `source_url` points to the development reposiory for this cookbook. A
# `View Source` link will be displayed on this cookbook's page when uploaded to
# a Supermarket.
#
# source_url 'https://github.com/<insert_org_here>/national_parks' if respond_to?(:source_url)
Switched from the Amazon repo to the Redhat repo
name 'national_parks'
maintainer 'The Authors'
maintainer_email 'you@example.com'
license 'all_rights'
description 'Installs/Configures national_parks'
long_description 'Installs/Configures national_parks'
version '0.1.1'
# The `issues_url` points to the location where issues for this cookbook are
# tracked. A `View Issues` link will be displayed on this cookbook's page when
# uploaded to a Supermarket.
#
# issues_url 'https://github.com/<insert_org_here>/national_parks/issues' if respond_to?(:issues_url)
# The `source_url` points to the development reposiory for this cookbook. A
# `View Source` link will be displayed on this cookbook's page when uploaded to
# a Supermarket.
#
# source_url 'https://github.com/<insert_org_here>/national_parks' if respond_to?(:source_url)
|
name 'attrbagger'
maintainer 'ModCloth, Inc.'
maintainer_email 'github+attrbagger-cookbook@modcloth.com'
license 'MIT'
description 'Installs/Configures attrbagger'
long_description File.read(File.expand_path('../README.md', __FILE__))
version '0.1.0'
Egad, overdue version bump
name 'attrbagger'
maintainer 'ModCloth, Inc.'
maintainer_email 'github+attrbagger-cookbook@modcloth.com'
license 'MIT'
description 'Installs/Configures attrbagger'
long_description File.read(File.expand_path('../README.md', __FILE__))
version '0.2.0'
|
name 'wordpress'
maintainer 'Mircea Preotu'
maintainer_email 'Mircea Preotu <mircea.preotu@gmail.com>'
license 'Apache 2.0'
description 'Installs/Configures Wordpress'
version '0.0.1'
depends 'mysql'
updated metadata
name "wordpress"
maintainer "Mircea Preotu"
maintainer_email "Mircea Preotu <mircea.preotu@gmail.com>"
license "Apache 2.0"
description "Installs/Configures WordPress"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0.0"
recipe "wordpress::default", "Installs and configures WordPress with php-fpm + nginx + mysql stack"
depends 'mysql' |
name 'chef_nginx'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and configures nginx'
version '2.8.0'
recipe 'nginx', 'Installs nginx package and sets up configuration with Debian apache style with sites-enabled/sites-available'
recipe 'nginx::source', 'Installs nginx from source and sets up configuration with Debian apache style with sites-enabled/sites-available'
depends 'apt'
depends 'bluepill'
depends 'build-essential'
depends 'ohai', '< 4.0'
depends 'runit', '>= 1.6.0'
depends 'yum'
depends 'yum-epel'
supports 'amazon'
supports 'centos'
supports 'debian'
supports 'fedora'
supports 'oracle'
supports 'redhat'
supports 'scientific'
supports 'ubuntu'
source_url 'https://github.com/miketheman/nginx' if respond_to?(:source_url)
issues_url 'https://github.com/miketheman/nginx/issues' if respond_to?(:issues_url)
chef_version '>= 11' if respond_to?(:chef_version)
Resolve foodcritic warning in the metadata
Signed-off-by: Tim Smith <764ef62106582a09ed09dfa0b6bff7c05fd7d1e4@chef.io>
name 'chef_nginx'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and configures nginx'
version '2.8.0'
recipe 'chef_nginx', 'Installs nginx package and sets up configuration with Debian apache style with sites-enabled/sites-available'
recipe 'chef_nginx::source', 'Installs nginx from source and sets up configuration with Debian apache style with sites-enabled/sites-available'
depends 'apt'
depends 'bluepill'
depends 'build-essential'
depends 'ohai', '< 4.0'
depends 'runit', '>= 1.6.0'
depends 'yum'
depends 'yum-epel'
supports 'amazon'
supports 'centos'
supports 'debian'
supports 'fedora'
supports 'oracle'
supports 'redhat'
supports 'scientific'
supports 'ubuntu'
source_url 'https://github.com/miketheman/nginx' if respond_to?(:source_url)
issues_url 'https://github.com/miketheman/nginx/issues' if respond_to?(:issues_url)
chef_version '>= 11' if respond_to?(:chef_version)
|
name 'racktables'
maintainer 'Oregon State University'
maintainer_email 'bramwelt@osuosl.org'
license 'Apache 2.0'
description 'Manages deploying Racktables: the datacenter asset manager'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.3.2'
recipe 'racktables::default', 'Install from source, and setup web app'
recipe 'racktables::server', 'Setup the racktables web appliction using apache'
recipe 'racktables::source', 'Install racktables for source tarball'
recipe 'racktables::application', 'Configure the racktables application'
recipe 'racktables::database', 'Setup the database for Racktables'
depends 'apache2'
depends 'database'
depends 'mysql'
Bump to version 0.3.3
name 'racktables'
maintainer 'Oregon State University'
maintainer_email 'bramwelt@osuosl.org'
license 'Apache 2.0'
description 'Manages deploying Racktables: the datacenter asset manager'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.3.3'
recipe 'racktables::default', 'Install from source, and setup web app'
recipe 'racktables::server', 'Setup the racktables web appliction using apache'
recipe 'racktables::source', 'Install racktables for source tarball'
recipe 'racktables::application', 'Configure the racktables application'
recipe 'racktables::database', 'Setup the database for Racktables'
depends 'apache2'
depends 'database'
depends 'mysql'
|
name 'dse'
maintainer 'Daniel Parker'
maintainer_email 'daniel.c.parker@target.com'
license 'Apache 2.0'
description 'Installs/Configures Datastax Enterprise.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '3.0.33'
%w(redhat centos).each do |name|
supports name, '>= 6.4'
end
supports 'ubuntu', '= 14.04'
depends 'java', '~> 1.14'
depends 'yum', '~> 3.5'
depends 'yum-epel', '~> 0.6'
depends 'apt', '~> 2.0'
pin build-essential so travis works.
name 'dse'
maintainer 'Daniel Parker'
maintainer_email 'daniel.c.parker@target.com'
license 'Apache 2.0'
description 'Installs/Configures Datastax Enterprise.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '3.0.33'
%w(redhat centos).each do |name|
supports name, '>= 6.4'
end
supports 'ubuntu', '= 14.04'
depends 'java', '~> 1.14'
depends 'yum', '~> 3.5'
depends 'yum-epel', '~> 0.6'
depends 'apt', '~> 2.0'
depends 'build-essential', '~> 3.0'
|
name "motd"
maintainer "Rob Lyon"
maintainer_email "nosignsoflifehere@gmail.com"
license "Apache 2.0"
description "Installs/Configures motd"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0.5"
depends "cron"
%w{ redhat centos }.each do |os|
supports os
end
real version bump
name "motd"
maintainer "Rob Lyon"
maintainer_email "nosignsoflifehere@gmail.com"
license "Apache 2.0"
description "Installs/Configures motd"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0.6"
depends "cron"
%w{ redhat centos }.each do |os|
supports os
end
|
name 'ark'
maintainer 'Sous Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Provides a custom resource for installing runtime artifacts in a predictable fashion'
version '6.0.0'
source_url 'https://github.com/sous-chefs/ark'
issues_url 'https://github.com/sous-chefs/ark/issues'
chef_version '>= 15.3'
supports 'amazon'
supports 'centos'
supports 'debian'
supports 'freebsd'
supports 'mac_os_x'
supports 'opensuse'
supports 'opensuseleap'
supports 'oracle'
supports 'redhat'
supports 'scientific'
supports 'smartos'
supports 'suse'
supports 'ubuntu'
supports 'windows'
depends 'seven_zip', '>= 3.1' # for windows os
Update metadata for 6.0.1
name 'ark'
maintainer 'Sous Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Provides a custom resource for installing runtime artifacts in a predictable fashion'
version '6.0.1'
source_url 'https://github.com/sous-chefs/ark'
issues_url 'https://github.com/sous-chefs/ark/issues'
chef_version '>= 15.3'
supports 'amazon'
supports 'centos'
supports 'debian'
supports 'freebsd'
supports 'mac_os_x'
supports 'opensuse'
supports 'opensuseleap'
supports 'oracle'
supports 'redhat'
supports 'scientific'
supports 'smartos'
supports 'suse'
supports 'ubuntu'
supports 'windows'
depends 'seven_zip', '>= 3.1' # for windows os
|
Metadata information.
name "nginxh5bp"
description "Configure nginx with h5bp configs"
maintainer "Joshua Thornton"
license "Apache 2.0"
version "1.0.0"
|
name 'stunnel'
maintainer 'Sous-Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Provides resources to help install and configure stunnel'
source_url 'https://github.com/sous-chefs/chef-stunnel'
issues_url 'https://github.com/sous-chefs/chef-stunnel/issues'
version '4.1.0'
chef_version '>= 13'
supports 'ubuntu'
supports 'centos'
supports 'fedora'
supports 'suse'
Update metadata for 4.1.1
name 'stunnel'
maintainer 'Sous-Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Provides resources to help install and configure stunnel'
source_url 'https://github.com/sous-chefs/chef-stunnel'
issues_url 'https://github.com/sous-chefs/chef-stunnel/issues'
version '4.1.1'
chef_version '>= 13'
supports 'ubuntu'
supports 'centos'
supports 'fedora'
supports 'suse'
|
name "pdns"
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
description "Installs/Configures pdns"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.2.1"
depends "sqlite"
depends "build-essential"
depends "mysql"
depends "git"
depends "resolvconf"
Version bump to 0.3.0
name "pdns"
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
description "Installs/Configures pdns"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.3.0"
depends "sqlite"
depends "build-essential"
depends "mysql"
depends "git"
depends "resolvconf"
|
name 'cloud_linode'
maintainer 'Logan Koester'
maintainer_email 'logan@logankoester.com'
license 'mit'
description 'Installs/Configures linode'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
source_url 'https://github.com/logankoester/chef-cloud_linode/'
issues_url 'https://github.com/logankoester/chef-cloud_linode/issues'
depends 'ohai', '>= 4.2.2'
version '0.1.0'
Bumps 0.2.0
name 'cloud_linode'
maintainer 'Logan Koester'
maintainer_email 'logan@logankoester.com'
license 'mit'
description 'Installs/Configures linode'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
source_url 'https://github.com/logankoester/chef-cloud_linode/'
issues_url 'https://github.com/logankoester/chef-cloud_linode/issues'
depends 'ohai', '>= 4.2.2'
version '0.2.0'
|
name 'rsyslog'
maintainer 'Opscode, Inc.'
maintainer_email 'cookbooks@opscode.com'
license 'Apache 2.0'
description 'Installs and configures rsyslog'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.9.0'
recipe 'rsyslog', 'Installs rsyslog'
recipe 'rsyslog::client', 'Sets up a client to log to a remote rsyslog server'
recipe 'rsyslog::server', 'Sets up an rsyslog server'
supports 'ubuntu'
supports 'debian', '>= 5.0'
supports 'redhat', '>= 6.0'
attribute 'rsyslog',
:display_name => 'Rsyslog',
:description => 'Hash of Rsyslog attributes',
:type => 'hash'
attribute 'rsyslog/log_dir',
:display_name => 'Rsyslog Log Directory',
:description => 'Filesystem location of logs from clients',
:default => '/srv/rsyslog'
attribute 'rsyslog/server',
:display_name => 'Rsyslog Server?',
:description => 'Is this node an rsyslog server?',
:default => 'false'
attribute 'rsyslog/server_ip',
:display_name => 'Rsyslog Server IP Address',
:description => 'Set rsyslog server ip address explicitly'
attribute 'rsyslog/server_search',
:display_name => 'Rsyslog Server Search Criteria',
:description => 'Set the search criteria for rsyslog server resolving',
:default => 'role:loghost'
attribute 'rsyslog/protocol',
:display_name => 'Rsyslog Protocol',
:description => 'Set which network protocol to use for rsyslog',
:default => 'tcp'
attribute 'rsyslog/port',
:display_name => 'Rsyslog Port',
:description => 'Port that Rsyslog listens for incoming connections',
:default => '514'
attribute 'rsyslog/remote_logs',
:display_name => 'Remote Logs',
:description => 'Specifies whether redirect all log from client to server',
:default => 'true'
attribute 'rsyslog/user',
:display_name => 'User',
:description => 'The owner of Rsyslog config files and directories',
:default => 'root'
attribute 'rsyslog/group',
:display_name => 'Group',
:description => 'The group-owner of Rsyslog config files and directories',
:default => 'adm'
attribute 'rsyslog/service_name',
:display_name => 'Service name',
:description => 'The name of the service for the platform',
:default => 'rsyslog'
attribute 'rsyslog/max_message_size',
:display_name => 'Maximum Rsyslog message size',
:description => 'Specifies the maximum size of allowable Rsyslog messages',
:default => '2k'
attribute 'rsyslog/preserve_fqdn',
:display_name => 'Preserve FQDN',
:description => 'Specifies if the short or full host name will be used. The default off setting is more compatible.',
:default => 'off'
attribute 'rsyslog/repeated_msg_reduction',
:display_name => 'Filter duplicated messages',
:description => 'Specifies whether or not repeated messages should be reduced.',
:default => 'on'
attribute 'rsyslog/priv_seperation',
:display_name => 'Privilege separation',
:description => 'Whether or not to make use of Rsyslog privilege separation',
:default => 'false'
Version bump to v1.9.1
name 'rsyslog'
maintainer 'Opscode, Inc.'
maintainer_email 'cookbooks@opscode.com'
license 'Apache 2.0'
description 'Installs and configures rsyslog'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.9.1'
recipe 'rsyslog', 'Installs rsyslog'
recipe 'rsyslog::client', 'Sets up a client to log to a remote rsyslog server'
recipe 'rsyslog::server', 'Sets up an rsyslog server'
supports 'ubuntu'
supports 'debian', '>= 5.0'
supports 'redhat', '>= 6.0'
attribute 'rsyslog',
:display_name => 'Rsyslog',
:description => 'Hash of Rsyslog attributes',
:type => 'hash'
attribute 'rsyslog/log_dir',
:display_name => 'Rsyslog Log Directory',
:description => 'Filesystem location of logs from clients',
:default => '/srv/rsyslog'
attribute 'rsyslog/server',
:display_name => 'Rsyslog Server?',
:description => 'Is this node an rsyslog server?',
:default => 'false'
attribute 'rsyslog/server_ip',
:display_name => 'Rsyslog Server IP Address',
:description => 'Set rsyslog server ip address explicitly'
attribute 'rsyslog/server_search',
:display_name => 'Rsyslog Server Search Criteria',
:description => 'Set the search criteria for rsyslog server resolving',
:default => 'role:loghost'
attribute 'rsyslog/protocol',
:display_name => 'Rsyslog Protocol',
:description => 'Set which network protocol to use for rsyslog',
:default => 'tcp'
attribute 'rsyslog/port',
:display_name => 'Rsyslog Port',
:description => 'Port that Rsyslog listens for incoming connections',
:default => '514'
attribute 'rsyslog/remote_logs',
:display_name => 'Remote Logs',
:description => 'Specifies whether redirect all log from client to server',
:default => 'true'
attribute 'rsyslog/user',
:display_name => 'User',
:description => 'The owner of Rsyslog config files and directories',
:default => 'root'
attribute 'rsyslog/group',
:display_name => 'Group',
:description => 'The group-owner of Rsyslog config files and directories',
:default => 'adm'
attribute 'rsyslog/service_name',
:display_name => 'Service name',
:description => 'The name of the service for the platform',
:default => 'rsyslog'
attribute 'rsyslog/max_message_size',
:display_name => 'Maximum Rsyslog message size',
:description => 'Specifies the maximum size of allowable Rsyslog messages',
:default => '2k'
attribute 'rsyslog/preserve_fqdn',
:display_name => 'Preserve FQDN',
:description => 'Specifies if the short or full host name will be used. The default off setting is more compatible.',
:default => 'off'
attribute 'rsyslog/repeated_msg_reduction',
:display_name => 'Filter duplicated messages',
:description => 'Specifies whether or not repeated messages should be reduced.',
:default => 'on'
attribute 'rsyslog/priv_seperation',
:display_name => 'Privilege separation',
:description => 'Whether or not to make use of Rsyslog privilege separation',
:default => 'false'
|
name "php-fpm"
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
description "Installs/Configures php-fpm"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.4.1"
depends "apt"
depends "yum"
%w{ debian ubuntu centos redhat fedora amazon }.each do |os|
supports os
end
Bump version
name "php-fpm"
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
description "Installs/Configures php-fpm"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.4.2"
depends "apt"
depends "yum"
%w{ debian ubuntu centos redhat fedora amazon }.each do |os|
supports os
end
|
name "site-reviewtypo3org"
maintainer "Steffen Gebert / TYPO3 Association"
maintainer_email "steffen.gebert@typo3.org"
license "Apache 2.0"
description "Installs/configures something"
version "0.1.28"
depends "ssh", "= 0.6.6"
depends "ssl_certificates", "= 1.1.3"
depends "t3-gerrit", "= 0.4.26"
depends "t3-chef-vault", "= 1.0.1"
depends "apt", "= 2.7.0"
bump version 0.1.29
name "site-reviewtypo3org"
maintainer "Steffen Gebert / TYPO3 Association"
maintainer_email "steffen.gebert@typo3.org"
license "Apache 2.0"
description "Installs/configures something"
version "0.1.29"
depends "ssh", "= 0.6.6"
depends "ssl_certificates", "= 1.1.3"
depends "t3-gerrit", "= 0.4.26"
depends "t3-chef-vault", "= 1.0.1"
depends "apt", "= 2.7.0"
|
#!/usr/bin/env ruby
require 'rubygems'
require 'bundler/setup'
require 'pivotal-tracker'
require 'yaml'
require 'rugged'
def what_version
output = `agvtool what-version -terse`
output.length > 0 ? output : nil
end
def what_marketing_version
output = `agvtool what-marketing-version -terse`
output.scan(/\=(.+)$/).flatten.first
end
config = YAML::load_file('bin/release-notes-config.yml')
# read the previous commit
last_commit = config["previous-commit"]
# open the repo
repo = Rugged::Repository.new('.git')
# read the latest commit from head
latest_commit = repo.head.target.oid
# Parse out story IDs from the current commit
commit_range = "#{last_commit}..#{latest_commit}"
story_ids = `git log --format=%B #{commit_range}`.scan(/\[\#(\d+)\]/).map(&:first)
PivotalTracker::Client.token = "fc826e4f5dd2622f519e09c62f32b982"
PivotalTracker::Client.use_ssl = true
project = PivotalTracker::Project.find("1214202")
stories = story_ids.uniq.map { |story_id| project.stories.find(story_id) }.compact
build_number = what_version
marketing_version = what_marketing_version
testflight_message = "Ello #{marketing_version} Build #{build_number}"
# Append story notes
if stories.size > 0
testflight_message << <<-EOF
Tracker stories:
#{stories.map { |s| "[#{s.id}] #{s.name}" } * "\n"}
EOF
end
puts testflight_message
release_note_message = "###Ello #{marketing_version} Build #{build_number}"
# Append story notes
if stories.size > 0
release_note_message << <<-EOF
####Tracker stories:
#{stories.map { |s| "* [#{s.id}](#{s.url}) #{s.name}" } * "\n"}
EOF
end
commit_notes = `git log --format=%s #{commit_range}`
formatted_commit_notes = %(#{commit_notes}).split(/\n/).map { |s| "* #{s}" } * "\n"
# Append commit notes
release_note_message << <<-EOF
####Commit notes:
#{formatted_commit_notes}
EOF
open('release-notes.md', 'a') { |f|
f.puts "\n-----------------"
f.puts "#{release_note_message}"
f.puts "-----------------\n"
}
config["previous-commit"] = "#{latest_commit}"
File.open('bin/release-notes-config.yml', 'w') {|f| f.write config.to_yaml }
update release notes generator
#!/usr/bin/env ruby
require 'rubygems'
require 'bundler/setup'
require 'pivotal-tracker'
require 'yaml'
require 'rugged'
def what_version
output = `agvtool what-version -terse`
output.length > 0 ? output : nil
end
def what_marketing_version
output = `agvtool what-marketing-version -terse`
output.scan(/\=(.+)$/).flatten.first
end
config = YAML::load_file('bin/release-notes-config.yml')
# read the previous commit
last_commit = config["previous-commit"]
# open the repo
repo = Rugged::Repository.new('.git')
# read the latest commit from head
latest_commit = repo.head.target.oid
# Parse out story IDs from the current commit
commit_range = "#{last_commit}..#{latest_commit}"
story_ids = `git log --format=%B #{commit_range}`.scan(/\[\#(\d+)\]/).map(&:first)
PivotalTracker::Client.token = "fc826e4f5dd2622f519e09c62f32b982"
PivotalTracker::Client.use_ssl = true
project = PivotalTracker::Project.find("1214202")
stories = story_ids.uniq.map { |story_id| project.stories.find(story_id) }.compact
build_number = what_version
marketing_version = what_marketing_version
testflight_message = "Ello #{marketing_version} Build #{build_number}"
# Append story notes
if stories.size > 0
testflight_message << <<-EOF
Tracker stories:
#{stories.map { |s| "[#{s.id}] #{s.name}" } * "\n"}
EOF
end
puts testflight_message
release_note_message = "###Ello #{marketing_version} Build #{build_number}"
# Append story notes
if stories.size > 0
release_note_message << <<-EOF
####Tracker stories:
#{stories.map { |s| "* [#{s.id}](#{s.url}) #{s.name}" } * "\n"}
EOF
end
crashlytics_release_notes = release_note_message.dup
commit_notes = `git log --format=%s #{commit_range}`
formatted_commit_notes = %(#{commit_notes}).split(/\n/).map { |s| "* #{s}" } * "\n"
# Append commit notes
release_note_message << <<-EOF
####Commit notes:
#{formatted_commit_notes}
EOF
open('release-notes.md', 'a') { |f|
f.puts "\n-----------------"
f.puts "#{release_note_message}"
f.puts "-----------------\n"
}
if ARGV[0] && ARGV[0] == "testers"
open('crashlytics-release-notes.md', 'w') { |f|
f.puts crashlytics_release_notes
}
config["previous-commit"] = "#{latest_commit}"
File.open('bin/release-notes-config.yml', 'w') {|f| f.write config.to_yaml }
end
|
# coding: UTF-8
maintainer 'Cerner Corp'
maintainer_email 'Bryan.Baugher@Cerner.com'
license 'All rights reserved'
description 'Installs/Configures tomcat'
long_description 'This cookbook is meant to install and configure an instance of tomcat'
name 'cerner_tomcat'
supports 'centos'
supports 'ubuntu'
depends 'java'
depends 'ulimit'
depends 'logrotate'
version '2.1.0'
Released 2.1.0 and bumped version to 2.2.0
# coding: UTF-8
maintainer 'Cerner Corp'
maintainer_email 'Bryan.Baugher@Cerner.com'
license 'All rights reserved'
description 'Installs/Configures tomcat'
long_description 'This cookbook is meant to install and configure an instance of tomcat'
name 'cerner_tomcat'
supports 'centos'
supports 'ubuntu'
depends 'java'
depends 'ulimit'
depends 'logrotate'
version '2.2.0'
|
#!/usr/bin/env ruby
require 'bundler/setup'
require 'dotenv'
require 'git'
require 'octokit'
require 'yaml'
# load .env vars
Dotenv.load
class GenerateReleaseNotes
def initialize(repo_name, previous_sha_file, access_token)
return puts 'You must supply a valid github API token' unless access_token.length > 0
@repo_name = repo_name
@pull_request_notes = ['RELEASE NOTES']
# Grab out previous sha
@previous_sha_file = previous_sha_file
@previous_sha_yaml = YAML::load_file(@previous_sha_file)
set_versions
# create github api client
@client = Octokit::Client.new(access_token: access_token)
# create git
@git = Git.open('./')
commits = @git.log(100)
@newest_sha = commits.first.sha
# start creating the notes
scan_commits commits
# update the notes
update_release_notes
end
# grab out build verion info
def set_versions
@git_release_version = `git describe --tags --always --abbrev=0`.strip()
@number_of_commits = `git rev-list master | wc -l | tr -d ' '`.strip()
end
# add PRs from commits
def scan_commits(commits)
commits.each do |commit|
return true if @previous_sha_yaml['previous-sha'] == commit.sha
match = commit.message.match(/pull request #(\d+) from/)
if match
pr_num = match.captures[0]
pr = @client.pull_request @repo_name, pr_num
if pr[:state] == 'closed'
@pull_request_notes << "#### ##{pr_num} - #{pr[:title]}\n#{pr[:body]}".strip()
end
end
end
end
def update_release_notes
# new release notes
release_notes = "### Ello Build #{@number_of_commits}(#{@git_release_version}) #{Time.now.strftime("%B %-d, %Y")}\n\n"
release_notes << <<-EOF
#{@pull_request_notes.count > 1 ? @pull_request_notes.join("\n\n------\n\n") : 'No completed pull requests since last distribution.'}
#{"\n------------\n"}
EOF
# add release_notes to crashlytics-release-notes
`mkdir Build`
File.open('Build/crashlytics-release-notes.md', 'w') { |f| f.write release_notes.gsub(/(#+ )/, "") }
if ARGV[0] && ARGV[0].split(',').include?("testers")
# prepend new contents into release-notes
old = File.open('release-notes.md', 'a')
new = File.open('release-notes.new.md', 'w')
File.open(new, 'w') { |f|
f.puts release_notes
f.puts File.read(old)
}
File.rename(new, old)
# update the latest commit from here
@previous_sha_yaml["previous-sha"] = @newest_sha
File.open(@previous_sha_file, 'w') {|f| f.write @previous_sha_yaml.to_yaml }
else
puts release_notes.gsub(/(#+ )/, "")
end
end
end
GenerateReleaseNotes.new('ello/ello-ios', 'bin/previous-sha.yml', ENV['GITHUB_API_TOKEN'])
Update build release notes to match crashlytics.
* 1.0.0(1895) instead of 1895(1.0.0)
#!/usr/bin/env ruby
require 'bundler/setup'
require 'dotenv'
require 'git'
require 'octokit'
require 'yaml'
# load .env vars
Dotenv.load
class GenerateReleaseNotes
def initialize(repo_name, previous_sha_file, access_token)
return puts 'You must supply a valid github API token' unless access_token.length > 0
@repo_name = repo_name
@pull_request_notes = ['RELEASE NOTES']
# Grab out previous sha
@previous_sha_file = previous_sha_file
@previous_sha_yaml = YAML::load_file(@previous_sha_file)
set_versions
# create github api client
@client = Octokit::Client.new(access_token: access_token)
# create git
@git = Git.open('./')
commits = @git.log(100)
@newest_sha = commits.first.sha
# start creating the notes
scan_commits commits
# update the notes
update_release_notes
end
# grab out build verion info
def set_versions
@git_release_version = `git describe --tags --always --abbrev=0`.strip()
@number_of_commits = `git rev-list master | wc -l | tr -d ' '`.strip()
end
# add PRs from commits
def scan_commits(commits)
commits.each do |commit|
return true if @previous_sha_yaml['previous-sha'] == commit.sha
match = commit.message.match(/pull request #(\d+) from/)
if match
pr_num = match.captures[0]
pr = @client.pull_request @repo_name, pr_num
if pr[:state] == 'closed'
@pull_request_notes << "#### ##{pr_num} - #{pr[:title]}\n#{pr[:body]}".strip()
end
end
end
end
def update_release_notes
# new release notes
release_notes = "### Ello Build #{@git_release_version}(#{@number_of_commits}) #{Time.now.strftime("%B %-d, %Y")}\n\n"
release_notes << <<-EOF
#{@pull_request_notes.count > 1 ? @pull_request_notes.join("\n\n------\n\n") : 'No completed pull requests since last distribution.'}
#{"\n------------\n"}
EOF
# add release_notes to crashlytics-release-notes
`mkdir Build`
File.open('Build/crashlytics-release-notes.md', 'w') { |f| f.write release_notes.gsub(/(#+ )/, "") }
if ARGV[0] && ARGV[0].split(',').include?("testers")
# prepend new contents into release-notes
old = File.open('release-notes.md', 'a')
new = File.open('release-notes.new.md', 'w')
File.open(new, 'w') { |f|
f.puts release_notes
f.puts File.read(old)
}
File.rename(new, old)
# update the latest commit from here
@previous_sha_yaml["previous-sha"] = @newest_sha
File.open(@previous_sha_file, 'w') {|f| f.write @previous_sha_yaml.to_yaml }
else
puts release_notes.gsub(/(#+ )/, "")
end
end
end
GenerateReleaseNotes.new('ello/ello-ios', 'bin/previous-sha.yml', ENV['GITHUB_API_TOKEN'])
|
name 'ibm-installmgr'
maintainer 'Sainsburys Devops'
maintainer_email 'devops@sainsburys.co.uk'
license 'Apache 2.0'
description 'Installs/Configures IBM Installation Manager'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.2.0'
Updated version and maintainer
name 'ibm-installmgr'
maintainer 'Kidhar Bachan'
maintainer_email 'Kidhar.Bachan@absa.co.za'
license 'Apache 2.0'
description 'Installs/Configures IBM Installation Manager fork of https://github.com/Sainsburys/ibm-installmgr'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.2.1'
|
name 'appsindo'
maintainer 'Appsindo Technology'
maintainer_email 'erwin.saputra@at.co.id'
license 'All rights reserved'
description 'Installs/Configures appsindo'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
recipe "appsindo", "Default"
recipe "appsindo::nginx", "Customize Nginx"
recipe "appsindo::logrotate", "Helper for logrotater"
depends 'ohai'
depends 'build-essential'
depends 'xml'
depends 'apt','~> 2.2'
depends 'php','~> 1.3'
depends 'nginx'
depends 'nodejs'
depends 'mysql'
depends 'redisio'
depends 'npm'
depends 'git'
depends 'cron'
supports 'debian'
supports 'ubuntu'
Fix dependencies
name 'appsindo'
maintainer 'Appsindo Technology'
maintainer_email 'erwin.saputra@at.co.id'
license 'All rights reserved'
description 'Installs/Configures appsindo'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
recipe "appsindo", "Default"
recipe "appsindo::nginx", "Customize Nginx"
recipe "appsindo::logrotate", "Helper for logrotater"
depends 'ohai', '~> 1.0'
depends 'build-essential'
depends 'xml'
depends 'apt','~> 2.2'
depends 'php','~> 1.3'
depends 'nginx'
depends 'nodejs'
depends 'mysql'
depends 'redisio', '~> 1.7.1'
depends 'npm'
depends 'git'
depends 'cron'
supports 'debian'
supports 'ubuntu'
|
name 'etcd'
version '2.2.8'
depends 'ark'
depends 'partial_search', '~> 1.0.6'
depends 'git'
depends 'chef-sugar'
Set some proper metadata.
name 'etcd'
maintainer 'Gametime Operations'
maintainer_email 'operations@gametime.co'
license 'Apache v2.0'
version '2.2.9'
depends 'ark'
depends 'partial_search', '~> 1.0.6'
depends 'git'
depends 'chef-sugar'
|
name 'optoro_redisha'
maintainer 'Optoro'
maintainer 'devops@optoro.com'
license 'MIT'
description 'This is a skeleton'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.2.0'
Version bump
name 'optoro_redisha'
maintainer 'Optoro'
maintainer 'devops@optoro.com'
license 'MIT'
description 'This is a skeleton'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.2.1'
|
name "rbenv"
maintainer "Riot Games"
maintainer_email "jamie@vialstudios.com"
license "Apache 2.0"
description "Installs and configures rbenv"
version "1.9.0"
recipe "rbenv", "Installs and configures rbenv"
recipe "rbenv::ruby_build", "Installs and configures ruby_build"
recipe "rbenv::ohai_plugin", "Installs an rbenv Ohai plugin to populate automatic_attrs about rbenv and ruby_build"
recipe "rbenv::rbenv_vars", "Installs an rbenv plugin rbenv-vars that lets you set global and project-specific environment variables before spawning Ruby processes"
%w{ centos redhat fedora ubuntu debian amazon oracle}.each do |os|
supports os
end
%w{ git build-essential apt }.each do |cb|
depends cb
end
depends 'ohai', '>= 1.1'
Version 1.10.0 (#12)
* Now supports Chef 14
name "rbenv"
maintainer "Riot Games"
maintainer_email "jamie@vialstudios.com"
license "Apache 2.0"
description "Installs and configures rbenv"
version "1.10.0"
recipe "rbenv", "Installs and configures rbenv"
recipe "rbenv::ruby_build", "Installs and configures ruby_build"
recipe "rbenv::ohai_plugin", "Installs an rbenv Ohai plugin to populate automatic_attrs about rbenv and ruby_build"
recipe "rbenv::rbenv_vars", "Installs an rbenv plugin rbenv-vars that lets you set global and project-specific environment variables before spawning Ruby processes"
%w{ centos redhat fedora ubuntu debian amazon oracle}.each do |os|
supports os
end
%w{ git build-essential apt }.each do |cb|
depends cb
end
depends 'ohai', '>= 1.1'
|
# Copyright:: 2015 Joshua Timterman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name 'vagrant'
maintainer 'Sous Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Installs Vagrant and provides a vagrant_plugin resource for installing Vagrant plugins.'
source_url 'https://github.com/sous-chefs/vagrant'
issues_url 'https://github.com/sous-chefs/vagrant/issues'
chef_version '>= 15.3'
version '3.0.0'
supports 'debian'
supports 'ubuntu'
supports 'redhat'
supports 'centos'
supports 'windows'
supports 'mac_os_x'
Update metadata for 3.0.1
# Copyright:: 2015 Joshua Timterman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name 'vagrant'
maintainer 'Sous Chefs'
maintainer_email 'help@sous-chefs.org'
license 'Apache-2.0'
description 'Installs Vagrant and provides a vagrant_plugin resource for installing Vagrant plugins.'
source_url 'https://github.com/sous-chefs/vagrant'
issues_url 'https://github.com/sous-chefs/vagrant/issues'
chef_version '>= 15.3'
version '3.0.1'
supports 'debian'
supports 'ubuntu'
supports 'redhat'
supports 'centos'
supports 'windows'
supports 'mac_os_x'
|
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
description "Installs/Configures nexus"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.10.1"
%w{ ubuntu centos }.each do |os|
supports os
end
depends "ark"
depends "java"
depends "nginx"
depends "bluepill"
updating version and maintainers
maintainer "Riot Games"
maintainer_email "kallan@riotgames.com"
license "Apache 2.0"
description "Installs/Configures nexus"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.11.0"
%w{ ubuntu centos }.each do |os|
supports os
end
depends "ark"
depends "java"
depends "nginx"
depends "bluepill" |
name 'pulledpork'
maintainer 'Tim Smith'
maintainer_email 'tsmith84@gmail.com'
license 'Apache 2.0'
description 'Installs and Configures the Snort IDS ruleset updater Pulled Pork'
version '1.1.1'
supports 'debian', '>= 7'
supports 'ubuntu', '>= 10.04'
depends 'ark'
source_url 'https://github.com/tas50/chef-pulledpork' if respond_to?(:source_url)
issues_url 'https://github.com/tas50/chef-pulledpork/issues' if respond_to?(:issues_url)
Add long_description metadata
name 'pulledpork'
maintainer 'Tim Smith'
maintainer_email 'tsmith84@gmail.com'
license 'Apache 2.0'
description 'Installs and Configures the Snort IDS ruleset updater Pulled Pork'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.1.1'
supports 'debian', '>= 7'
supports 'ubuntu', '>= 10.04'
depends 'ark'
source_url 'https://github.com/tas50/chef-pulledpork' if respond_to?(:source_url)
issues_url 'https://github.com/tas50/chef-pulledpork/issues' if respond_to?(:issues_url)
|
name "postgresql"
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
description "Installs and configures postgresql for clients or servers"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "3.2.1"
recipe "postgresql", "Includes postgresql::client"
recipe "postgresql::ruby", "Installs pg gem for Ruby bindings"
recipe "postgresql::client", "Installs postgresql client package(s)"
recipe "postgresql::server", "Installs postgresql server packages, templates"
recipe "postgresql::server_redhat", "Installs postgresql server packages, redhat family style"
recipe "postgresql::server_debian", "Installs postgresql server packages, debian family style"
%w{ubuntu debian fedora suse amazon}.each do |os|
supports os
end
%w{redhat centos scientific oracle}.each do |el|
supports el, ">= 6.0"
end
depends "apt"
depends "build-essential"
depends "openssl"
Bump the fix version
name "postgresql"
maintainer "Opscode, Inc."
maintainer_email "cookbooks@opscode.com"
license "Apache 2.0"
description "Installs and configures postgresql for clients or servers"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "3.2.2"
recipe "postgresql", "Includes postgresql::client"
recipe "postgresql::ruby", "Installs pg gem for Ruby bindings"
recipe "postgresql::client", "Installs postgresql client package(s)"
recipe "postgresql::server", "Installs postgresql server packages, templates"
recipe "postgresql::server_redhat", "Installs postgresql server packages, redhat family style"
recipe "postgresql::server_debian", "Installs postgresql server packages, debian family style"
%w{ubuntu debian fedora suse amazon}.each do |os|
supports os
end
%w{redhat centos scientific oracle}.each do |el|
supports el, ">= 6.0"
end
depends "apt"
depends "build-essential"
depends "openssl"
|
name 'lamp'
maintainer 'Sergiu Ionescu'
maintainer_email 'sergiu.ionescu@gmail.com'
license 'Apache 2.0'
description 'Installs/Configures lamp'
long_description 'Installs/Configures lamp with Berkshelf and Vagrant support'
version '0.1.3'
depends 'mysql', '= 3.0.4'
depends 'apt', '~> 2.6.0'
depends 'apache2', '~> 2.0.0'
depends 'php', '~> 1.4.6'
depends 'vim', '~> 1.1.2'
depends 'sendmail', '0.1.0'
Updating cookbook version
name 'lamp'
maintainer 'Sergiu Ionescu'
maintainer_email 'sergiu.ionescu@gmail.com'
license 'Apache 2.0'
description 'Installs/Configures lamp'
long_description 'Installs/Configures lamp with Berkshelf and Vagrant support'
version '0.1.4'
depends 'mysql', '= 3.0.4'
depends 'apt', '~> 2.6.0'
depends 'apache2', '~> 2.0.0'
depends 'php', '~> 1.4.6'
depends 'vim', '~> 1.1.2'
depends 'sendmail', '0.1.0'
|
FactoryGirl.define do
factory :page do
title "Just a page"
meta_title { nav_title }
meta_description { "Nothing too cool here except the title: #{title}." }
meta_keywords { "just, something, in, a, list, #{title.downcase}" }
end
end
removed fixtures.rb
|
class HaskellLanguageServer < Formula
desc "Integration point for ghcide and haskell-ide-engine. One IDE to rule them all"
homepage "https://github.com/haskell/haskell-language-server"
url "https://github.com/haskell/haskell-language-server/archive/1.4.0.tar.gz"
sha256 "c5d7dbf7fae9aa3ed2c1184b49e82d8ac623ca786494ef6602cfe11735d28db0"
license "Apache-2.0"
head "https://github.com/haskell/haskell-language-server.git"
# we need :github_latest here because otherwise
# livecheck picks up spurious non-release tags
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "39c4925f9ddd98833fd0566badd7a66f4053e53b3bf14a0ce51b48e348ae431b"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "5fec58d7c3ab8a84979a637f41d0aba636a7a024dfd96582ab2e5ac9374c15e9"
sha256 cellar: :any_skip_relocation, big_sur: "5e3427fde42364771cbfab41b4eeb71889ac3a41737b6f54764fee1ec64c3e60"
sha256 cellar: :any_skip_relocation, catalina: "ba30e3d1544596f125755302d973c5c38975253da11018ad22e51722f32b3316"
sha256 cellar: :any_skip_relocation, mojave: "1eb7824102bc6234c8faf8523e50955f819403f3baf01837c72f79d4393536f1"
end
depends_on "cabal-install" => [:build, :test]
depends_on "ghc" => [:build, :test]
if Hardware::CPU.intel?
depends_on "ghc@8.6" => [:build, :test]
depends_on "ghc@8.8" => [:build, :test]
end
def ghcs
deps.map(&:to_formula)
.select { |f| f.name.match? "ghc" }
.sort_by(&:version)
end
def install
system "cabal", "v2-update"
newest_ghc = ghcs.max_by(&:version)
ghcs.each do |ghc|
system "cabal", "v2-install", "-w", ghc.bin/"ghc", *std_cabal_v2_args
hls = "haskell-language-server"
bin.install bin/hls => "#{hls}-#{ghc.version}"
bin.install_symlink "#{hls}-#{ghc.version}" => "#{hls}-#{ghc.version.major_minor}"
rm bin/"#{hls}-wrapper" unless ghc == newest_ghc
end
end
def caveats
ghc_versions = ghcs.map(&:version).map(&:to_s).join(", ")
<<~EOS
#{name} is built for GHC versions #{ghc_versions}.
You need to provide your own GHC or install one with
brew install ghc
EOS
end
test do
valid_hs = testpath/"valid.hs"
valid_hs.write <<~EOS
f :: Int -> Int
f x = x + 1
EOS
invalid_hs = testpath/"invalid.hs"
invalid_hs.write <<~EOS
f :: Int -> Int
EOS
ghcs.each do |ghc|
with_env(PATH: "#{ghc.bin}:#{ENV["PATH"]}") do
assert_match "Completed (1 file worked, 1 file failed)",
shell_output("#{bin}/haskell-language-server-#{ghc.version.major_minor} #{testpath}/*.hs 2>&1", 1)
end
end
end
end
haskell-language-server: update 1.4.0 bottle.
class HaskellLanguageServer < Formula
desc "Integration point for ghcide and haskell-ide-engine. One IDE to rule them all"
homepage "https://github.com/haskell/haskell-language-server"
url "https://github.com/haskell/haskell-language-server/archive/1.4.0.tar.gz"
sha256 "c5d7dbf7fae9aa3ed2c1184b49e82d8ac623ca786494ef6602cfe11735d28db0"
license "Apache-2.0"
head "https://github.com/haskell/haskell-language-server.git"
# we need :github_latest here because otherwise
# livecheck picks up spurious non-release tags
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "39c4925f9ddd98833fd0566badd7a66f4053e53b3bf14a0ce51b48e348ae431b"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "5fec58d7c3ab8a84979a637f41d0aba636a7a024dfd96582ab2e5ac9374c15e9"
sha256 cellar: :any_skip_relocation, monterey: "d3571c62387650e4b79b2ad558864dace440dfe581d96cfbf08f7fe44c6667bd"
sha256 cellar: :any_skip_relocation, big_sur: "5e3427fde42364771cbfab41b4eeb71889ac3a41737b6f54764fee1ec64c3e60"
sha256 cellar: :any_skip_relocation, catalina: "ba30e3d1544596f125755302d973c5c38975253da11018ad22e51722f32b3316"
sha256 cellar: :any_skip_relocation, mojave: "1eb7824102bc6234c8faf8523e50955f819403f3baf01837c72f79d4393536f1"
end
depends_on "cabal-install" => [:build, :test]
depends_on "ghc" => [:build, :test]
if Hardware::CPU.intel?
depends_on "ghc@8.6" => [:build, :test]
depends_on "ghc@8.8" => [:build, :test]
end
def ghcs
deps.map(&:to_formula)
.select { |f| f.name.match? "ghc" }
.sort_by(&:version)
end
def install
system "cabal", "v2-update"
newest_ghc = ghcs.max_by(&:version)
ghcs.each do |ghc|
system "cabal", "v2-install", "-w", ghc.bin/"ghc", *std_cabal_v2_args
hls = "haskell-language-server"
bin.install bin/hls => "#{hls}-#{ghc.version}"
bin.install_symlink "#{hls}-#{ghc.version}" => "#{hls}-#{ghc.version.major_minor}"
rm bin/"#{hls}-wrapper" unless ghc == newest_ghc
end
end
def caveats
ghc_versions = ghcs.map(&:version).map(&:to_s).join(", ")
<<~EOS
#{name} is built for GHC versions #{ghc_versions}.
You need to provide your own GHC or install one with
brew install ghc
EOS
end
test do
valid_hs = testpath/"valid.hs"
valid_hs.write <<~EOS
f :: Int -> Int
f x = x + 1
EOS
invalid_hs = testpath/"invalid.hs"
invalid_hs.write <<~EOS
f :: Int -> Int
EOS
ghcs.each do |ghc|
with_env(PATH: "#{ghc.bin}:#{ENV["PATH"]}") do
assert_match "Completed (1 file worked, 1 file failed)",
shell_output("#{bin}/haskell-language-server-#{ghc.version.major_minor} #{testpath}/*.hs 2>&1", 1)
end
end
end
end
|
#!/usr/bin/ruby
require 'pp'
require 'rubygems'
require 'EC2'
ACCESS_KEY_ID = 'jfontan'
#SECRET_ACCESS_KEY = 'opennebula'
SECRET_ACCESS_KEY = '4478db59d30855454ece114e8ccfa5563d21c9bd'
SERVER = '127.0.0.1'
PORT = 4567
base=EC2::Base.new(
:access_key_id => ACCESS_KEY_ID,
:secret_access_key => SECRET_ACCESS_KEY,
:server => SERVER,
:port => PORT,
:use_ssl => false
)
#pp base.describe_images
#pp base.register_image(
# :image_location => 'eco.rb'
#)
#pp base.run_instances(
# :image_id => "b8329b60-4227-012c-da6e-0019e333ebc5"
#)
pp base.describe_instances
upload_method for the client
git-svn-id: addd40251ba30a5efebfaf2146a7968786ebe177@720 3034c82b-c49b-4eb3-8279-a7acafdc01c0
#!/usr/bin/ruby
require 'pp'
require 'rubygems'
require 'EC2'
ACCESS_KEY_ID = 'jfontan'
#SECRET_ACCESS_KEY = 'opennebula'
SECRET_ACCESS_KEY = '4478db59d30855454ece114e8ccfa5563d21c9bd'
SERVER = '127.0.0.1'
PORT = 4567
base=EC2::Base.new(
:access_key_id => ACCESS_KEY_ID,
:secret_access_key => SECRET_ACCESS_KEY,
:server => SERVER,
:port => PORT,
:use_ssl => false
)
#pp base.describe_images
#pp base.register_image(
# :image_location => 'eco.rb'
#)
#pp base.run_instances(
# :image_id => "b8329b60-4227-012c-da6e-0019e333ebc5"
#)
pp base.describe_instances
def upload_request (base, file_name=nil )
params = {"Action" => "UploadImage",
"SignatureVersion" => "2",
"SignatureMethod" => 'HmacSHA1',
"AWSAccessKeyId" => ACCESS_KEY_ID,
"Version" => API_VERSION,
"Timestamp"=>Time.now.getutc.iso8601}
#sig = base.get_aws_auth_param(params, SECRET_ACCESS_KEY, SERVER)
canonical_string = EC2.canonical_string(params, SERVER)
sig = EC2.encode(SECRET_ACCESS_KEY, canonical_string, false)
pp sig
post_fields = Array.new;
params.each { |k,v|
post_fields << Curl::PostField.content(k,v)
}
post_fields << Curl::PostField.content("Signature",sig)
post_fields << Curl::PostField.file("file",file_name)
c = Curl::Easy.new("http://localhost:4567/")
c.multipart_form_post = true
c.http_post(*post_fields)
#pp c.body_str
end
|
#!/usr/bin/env ruby
require 'shellwords'
filename = ARGV[0]
def special_method?(name)
special_methods = %I{nil? send object_id instance_eval}
name =~ /^__/ || special_methods.include?(name)
end
##
# Forwards messages to the object.
class Proxy
instance_methods.each do |method|
undef_method(method) unless special_method?(method)
end
def initialize(obj)
@__obj = obj
end
private
def method_missing(name, *args, &block)
# Forward messages to the wrapped object.
@__obj.send(name, *args, &block)
end
end
class ScriptType
attr_reader :name
class <<
@instances = {}
end
def initialize(name)
@name = name
end
def template
"#!#{path}\n"
end
def path
fail "forgot to define #path for #{name}"
end
def match_extension?
fail "forgot to define #match_extension?() for #{name}"
end
def correct_shebang?(given_path)
given_path == path
end
def self.add(name, &block)
# Create a new sub-type, evaluate the block
cls = Class.new(ScriptType)
cls.module_eval(&block)
instance = cls.new(name)
@instances[instance.name] = instance
instance
end
def self.match(filename)
noisnetxe, rest = filename.reverse.split('.', 2)
if rest.nil?
self[:shell]
else
extension = noisnetxe.reverse
type_by_extension(extension)
end
end
def self.type_by_extension(extension)
types.each_value do |type|
if type.match_extension?(extension)
return type
end
end
fail "Unknown extension: .#{extension}"
end
def self.[](name)
type = types[name]
fail "Undefined script type: #{name}" if type.nil?
type
end
private
def self.types
@instances
end
end
ScriptType.add :shell do
def match_extension?(extension)
extension == 'sh'
end
def path
'/bin/sh'
end
end
ScriptType.add :python do
def match_extension?(extension)
extension == 'py'
end
def path
'/usr/bin/env python'
end
end
ScriptType.add :perl do
def match_extension?(extension)
extension == 'pl'
end
def path
'/usr/bin/env perl'
end
def template
<<-SCRIPT
#!#{path}
use strict;
SCRIPT
end
end
ScriptType.add :erlang do
def match_extension?(extension)
extension == 'erl'
end
def path
'/usr/bin/env escript'
end
end
ScriptType.add :ruby do
def match_extension?(extension)
extension == 'rb'
end
def path
'/usr/bin/env ruby'
end
end
class Editor
@command = nil
@args = []
@subclasses = {}
def edit(filename)
pid = Process.spawn(make_arg_string(filename))
Process.wait(pid)
end
def self.inherited(cls)
name = cls.name.downcase
@subclasses[name] = cls.new
end
def self.command(*args)
if args.empty?
@command
else
name, *args = args
@command = name
@args = args
end
end
def self.from_environment
name = ENV["VISUAL"] || ENV["EDITOR"]
if name.nil?
fail "Cannot determine your editor: both EDITOR and VISUAL are undefined"
else
Editor.from_command(name)
end
end
def self.from_command(name)
fail "Unknown editor: #{name}" unless @subclasses.include?(name)
@subclasses[name]
end
def self.args
@args
end
private
def make_arg_string(filename)
Shellwords.join(self.class.command_with_args(filename))
end
def self.command_with_args(filename)
[command] + args.map do |arg|
if arg == :filename
filename
else
arg
end
end
end
end
class Vim < Editor
command "vim", "+norm G", :filename
end
class Nano < Editor
command "nano", :filename
end
class Emacs < Editor
command "nano", :filename
end
class Script
attr_reader :filename, :script_type
def self.new(filename, &block)
obj = super(filename)
Proxy.new(obj).instance_eval(&block) if block_given?
obj
end
def initialize(filename)
fail "no filename given" if filename.nil?
@filename = filename
@script_type = ScriptType.match(filename)
@file = nil
end
def ensure_exists
return if File.exist?(filename)
open_for_appending!
end
def set_executable
open_for_appending
current_mode = File.stat(filename).mode
@file.chmod(current_mode | 0111)
end
def inject_template_from_name
@file.write script_type.template
@file.fsync
end
def edit
editor = Editor.from_environment
editor.edit(filename)
end
def correct_shebang?
open_for_appending
@file.seek(0, :SET)
# File has no contents
return false if @file.eof?
# Read the first line; check for shebang.
first_line = @file.gets(?\n)
return false unless first_line.start_with?('#!')
path = first_line[2..-1].chomp
script_type.correct_shebang?(path)
end
private
def open_for_appending
open_for_appending! unless @file.is_a?(File) && !@file.closed?
end
def open_for_appending!
@file = File.new(filename, 'a+t:UTF-8')
self
end
end
Script.new filename do
ensure_exists
set_executable
inject_template_from_name unless correct_shebang?
edit
end
Rubocop.
#!/usr/bin/env ruby
require 'shellwords'
filename = ARGV[0]
def special_method?(name)
special_methods = %I{nil? send object_id instance_eval}
name =~ /^__/ || special_methods.include?(name)
end
# A script type, such as shell, Python, Perl, etc.
class ScriptType
attr_reader :name
class <<
@instances = {}
end
def initialize(name)
@name = name
end
def template
"#!#{path}\n"
end
def path
fail "forgot to define #path for #{name}"
end
def match_extension?
fail "forgot to define #match_extension?() for #{name}"
end
def correct_shebang?(given_path)
given_path == path
end
def self.add(name, &block)
# Create a new sub-type, evaluate the block
cls = Class.new(ScriptType)
cls.module_eval(&block)
instance = cls.new(name)
@instances[instance.name] = instance
instance
end
def self.match(filename)
noisnetxe, rest = filename.reverse.split('.', 2)
if rest.nil?
self[:shell]
else
extension = noisnetxe.reverse
type_by_extension(extension)
end
end
def self.type_by_extension(extension)
types.each_value do |type|
return type if type.match_extension?(extension)
end
fail "Unknown extension: .#{extension}"
end
def self.[](name)
type = types[name]
fail "Undefined script type: #{name}" if type.nil?
type
end
def self.types
@instances
end
end
ScriptType.add :shell do
def match_extension?(extension)
extension == 'sh'
end
def path
'/bin/sh'
end
end
ScriptType.add :python do
def match_extension?(extension)
extension == 'py'
end
def path
'/usr/bin/env python'
end
end
ScriptType.add :perl do
def match_extension?(extension)
extension == 'pl'
end
def path
'/usr/bin/env perl'
end
def template
<<-SCRIPT
#!#{path}
use strict;
SCRIPT
end
end
ScriptType.add :erlang do
def match_extension?(extension)
extension == 'erl'
end
def path
'/usr/bin/env escript'
end
end
ScriptType.add :ruby do
def match_extension?(extension)
extension == 'rb'
end
def path
'/usr/bin/env ruby'
end
end
# An editor, derived from context.
class Editor
@command = nil
@args = []
@subclasses = {}
class << self
attr_reader :args
end
def edit(filename)
pid = Process.spawn(make_arg_string(filename))
Process.wait(pid)
end
def self.inherited(cls)
name = cls.name.downcase
@subclasses[name] = cls.new
end
def self.command(*args)
if args.empty?
@command
else
name, *args = args
@command = name
@args = args
end
end
def self.from_environment
name = ENV['VISUAL'] || ENV['EDITOR']
if name.nil?
fail 'Cannot determine your editor: both EDITOR and VISUAL are undefined'
else
Editor.from_command(name)
end
end
def self.from_command(name)
fail "Unknown editor: #{name}" unless @subclasses.include?(name)
@subclasses[name]
end
private
def make_arg_string(filename)
Shellwords.join(self.class.command_with_args(filename))
end
def self.command_with_args(filename)
[command] + args.map do |arg|
if arg == :filename
filename
else
arg
end
end
end
end
# Definition for Vim
class Vim < Editor
command 'vim', '+norm G', :filename
end
# Definition for Nano
class Nano < Editor
command 'nano', :filename
end
# Definition for Emacs
class Emacs < Editor
command 'emacs', :filename
end
# The script to create or modify.
class Script
attr_reader :filename, :script_type
def self.new(filename, &block)
obj = super(filename)
obj.instance_eval(&block) if block_given?
obj
end
def initialize(filename)
fail 'No filename given' if filename.nil?
@filename = filename
@script_type = ScriptType.match(filename)
@file = nil
end
def ensure_exists
return if File.exist?(filename)
open_for_appending!
end
def set_executable
open_for_appending
current_mode = File.stat(filename).mode
@file.chmod(current_mode | 0111)
end
def inject_template_from_name
@file.write script_type.template
@file.fsync
end
def edit
editor = Editor.from_environment
editor.edit(filename)
end
def correct_shebang?
open_for_appending
@file.seek(0, :SET)
# File has no contents
return false if @file.eof?
# Read the first line; check for shebang.
first_line = @file.gets("\n")
return false unless first_line.start_with?('#!')
path = first_line[2..-1].chomp
script_type.correct_shebang?(path)
end
private
def open_for_appending
open_for_appending! unless @file.is_a?(File) && !@file.closed?
end
def open_for_appending!
@file = File.new(filename, 'a+t:UTF-8')
self
end
end
Script.new filename do
ensure_exists
set_executable
inject_template_from_name unless correct_shebang?
edit
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'bookworm_best_sellers/version'
Gem::Specification.new do |spec|
spec.name = "bookworm_best_sellers"
spec.version = BookwormBestSellers::VERSION
spec.authors = ["William Saxe"]
spec.email = ["geosaxe@gmail.com"]
spec.summary = %q{The Bookworm gem provides the current week's bestselling books in the USA.}
spec.description = %q{The Bookworm gem provides the current week's bestselling books in the USA according to the New York Times. Best Selling Book list is live updated when the gem is executed.}
spec.homepage = "https://github.com/wsaxe/bookworm-best-sellers"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.4.0"
spec.add_development_dependency "pry", "~> 0.10.3"
spec.add_dependency "nokogiri", "~> 1.6.8"
end
updated dependencies
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'bookworm_best_sellers/version'
Gem::Specification.new do |spec|
spec.name = "bookworm_best_sellers"
spec.version = BookwormBestSellers::VERSION
spec.authors = ["William Saxe"]
spec.email = ["geosaxe@gmail.com"]
spec.summary = %q{The Bookworm gem provides the current week's bestselling books in the USA.}
spec.description = %q{The Bookworm gem provides the current week's bestselling books in the USA according to the New York Times. Best Selling Book list is live updated when the gem is executed.}
spec.homepage = "https://github.com/wsaxe/bookworm-best-sellers"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.4"
spec.add_development_dependency "pry", "~> 0.10"
spec.add_dependency "nokogiri", "~> 1.6"
end
|
require 'helper'
class TestSkimCodeBlocks < TestSkim
def test_render_with_output_code_block
source = %q{
p
= @callback "Hello Ruby!", ->
| Hello from within a block!
}
assert_html '<p>Hello Ruby! Hello from within a block! Hello Ruby!</p>', source
end
def test_render_with_output_code_within_block
source = %q{
p
= @callback "Hello Ruby!", =>
= @callback "Hello from within a block!"
}
assert_html '<p>Hello Ruby! Hello from within a block! Hello Ruby!</p>', source
end
def test_render_with_output_code_within_block_2
source = %q{
p
= @callback "Hello Ruby!", =>
= @callback "Hello from within a block!", =>
= @callback "And another one!"
}
assert_html '<p>Hello Ruby! Hello from within a block! And another one! Hello from within a block! Hello Ruby!</p>', source
end
def test_output_block_with_arguments
source = %q{
p
= @define_macro 'person', (first_name, last_name) =>
.first_name = first_name
.last_name = last_name
== @call_macro 'person', 'John', 'Doe'
== @call_macro 'person', 'Max', 'Mustermann'
}
assert_html '<p><div class="first_name">John</div><div class="last_name">Doe</div><div class="first_name">Max</div><div class="last_name">Mustermann</div></p>', source
end
def test_render_with_control_code_forEach_loop
source = %q{
p
- [0..2].forEach =>
| Hey!
}
assert_html '<p>Hey!Hey!Hey!</p>', source
end
def test_render_with_control_code_for_in_loop
source = %q{
p
- for i in [0..2]
| Hey!
}
assert_html '<p>Hey!Hey!Hey!</p>', source
end
def test_render_with_control_code_for_own_of_loop
source = %q{
p
- for own key, value of {user: 'name'}
| #{key} #{value}
}
assert_html '<p>user name</p>', source
end
def test_captured_code_block_with_conditional
source = %q{
= @callback "Hello Ruby!", ->
- if true
| Hello from within a block!
}
assert_html 'Hello Ruby! Hello from within a block! Hello Ruby!', source
end
end
Add test_render_with_control_code_for_in_loop_without_parent
There is a bug introduced because of the change from array buffer to
string buffer. When a for loop at top level, the result is still in
array. It should be in string.
require 'helper'
class TestSkimCodeBlocks < TestSkim
def test_render_with_output_code_block
source = %q{
p
= @callback "Hello Ruby!", ->
| Hello from within a block!
}
assert_html '<p>Hello Ruby! Hello from within a block! Hello Ruby!</p>', source
end
def test_render_with_output_code_within_block
source = %q{
p
= @callback "Hello Ruby!", =>
= @callback "Hello from within a block!"
}
assert_html '<p>Hello Ruby! Hello from within a block! Hello Ruby!</p>', source
end
def test_render_with_output_code_within_block_2
source = %q{
p
= @callback "Hello Ruby!", =>
= @callback "Hello from within a block!", =>
= @callback "And another one!"
}
assert_html '<p>Hello Ruby! Hello from within a block! And another one! Hello from within a block! Hello Ruby!</p>', source
end
def test_output_block_with_arguments
source = %q{
p
= @define_macro 'person', (first_name, last_name) =>
.first_name = first_name
.last_name = last_name
== @call_macro 'person', 'John', 'Doe'
== @call_macro 'person', 'Max', 'Mustermann'
}
assert_html '<p><div class="first_name">John</div><div class="last_name">Doe</div><div class="first_name">Max</div><div class="last_name">Mustermann</div></p>', source
end
def test_render_with_control_code_forEach_loop
source = %q{
p
- [0..2].forEach =>
| Hey!
}
assert_html '<p>Hey!Hey!Hey!</p>', source
end
def test_render_with_control_code_for_in_loop
source = %q{
p
- for i in [0..2]
| Hey!
}
assert_html '<p>Hey!Hey!Hey!</p>', source
end
def test_render_with_control_code_for_in_loop_without_parent
source = %q{
- for i in [0..2]
p Hey!
}
assert_html '<p>Hey!</p><p>Hey!</p><p>Hey!</p>', source
end
def test_render_with_control_code_for_own_of_loop
source = %q{
p
- for own key, value of {user: 'name'}
| #{key} #{value}
}
assert_html '<p>user name</p>', source
end
def test_captured_code_block_with_conditional
source = %q{
= @callback "Hello Ruby!", ->
- if true
| Hello from within a block!
}
assert_html 'Hello Ruby! Hello from within a block! Hello Ruby!', source
end
end
|
require 'test_helper'
require 'tbar/fiber_local'
module Tbar
class FiberLocalTest < Test
class Container
def locals
@locals ||= Tbar::FiberLocal.new
end
end
def setup
@local = Tbar::FiberLocal.new
end
def teardown
@local.clear
end
def test_keys
assert_equal [:__recursive_key__], @local.keys
end
def test_has_key
@local['foo'] = "bar"
assert @local.key?( :foo )
assert @local.key?( "foo" )
end
def test_store
@local[:foo] = "bar"
assert_equal 2, @local.keys.size
end
def test_fetch
@local[:foo] = "bar"
assert_equal "bar", @local[:foo]
end
def test_fetch_indifferent_access
@local[:foo] = "bar"
assert_equal "bar", @local["foo"]
@local["foo"] = "baz"
assert_equal "baz", @local[:foo]
assert_equal 2, @local.keys.size
end
def test_delete
@local[:foo] = "bar"
assert_equal 2, @local.keys.size
@local.delete( "foo" )
assert_equal 1, @local.keys.size
end
def test_clear
@local[:foo] = "bar"
@local[:baz] = "wibble"
assert_equal 3, @local.keys.size
@local.clear
assert_equal 1, @local.keys.size
refute @local.key?( :foo )
refute @local.key?( :baz )
end
end
end
fix sporadic test failures that are thread dependent
require 'test_helper'
require 'tbar/fiber_local'
module Tbar
class FiberLocalTest < Test
def setup
@local = Tbar::FiberLocal.new
end
def teardown
@local.clear
end
def test_keys
assert_equal [:__recursive_key__], @local.keys
end
def test_has_key
@local['foo'] = "bar"
assert @local.key?( :foo )
assert @local.key?( "foo" )
end
def test_store
before = @local.keys.size
@local[:foo] = "bar"
assert_equal( (before+1), @local.keys.size )
end
def test_fetch
@local[:foo] = "bar"
assert_equal "bar", @local[:foo]
end
def test_fetch_indifferent_access
before = @local.keys.size
@local[:foo] = "bar"
assert_equal "bar", @local["foo"]
@local["foo"] = "baz"
assert_equal "baz", @local[:foo]
assert_equal( (before+1), @local.keys.size )
end
def test_delete
before = @local.keys.size
@local[:foo] = "bar"
assert_equal(before+1, @local.keys.size)
@local.delete( "foo" )
assert_equal 1, @local.keys.size
end
def test_clear
before = @local.keys.size
@local[:foo] = "bar"
@local[:baz] = "wibble"
assert_equal( before+2, @local.keys.size )
@local.clear
assert_equal 1, @local.keys.size
refute @local.key?( :foo )
refute @local.key?( :baz )
end
end
end
|
require 'rubygems'
require 'bacon'
require 'fileutils'
require File.expand_path("../lib/filewatcher.rb",File.dirname(__FILE__))
describe FileWatcher do
fixtures =
%w(test/fixtures/file4.rb
test/fixtures/subdir/file6.rb
test/fixtures/subdir/file5.rb
test/fixtures/file2.txt
test/fixtures/file1.txt
test/fixtures/file3.rb)
explicit_relative_fixtures = fixtures.map { |it| "./#{it}" }
def includes_all(elements)
lambda { |it| elements.all? { |element| it.include? element }}
end
it "should handle absolute paths with globs" do
filewatcher = FileWatcher.new(File.absolute_path('test/fixtures/**/*'))
filewatcher.filenames.should.satisfy &includes_all(fixtures.map { |it| File.absolute_path(it) })
end
it "should handle globs" do
filewatcher = FileWatcher.new('test/fixtures/**/*')
filewatcher.filenames.should.satisfy &includes_all(fixtures)
end
it "should handle explicit relative paths with globs" do
filewatcher = FileWatcher.new('./test/fixtures/**/*')
filewatcher.filenames.should.satisfy &includes_all(explicit_relative_fixtures)
end
it "should handle explicit relative paths" do
filewatcher = FileWatcher.new('./test/fixtures')
filewatcher.filenames.should.satisfy &includes_all(explicit_relative_fixtures)
end
it "should detect file deletions" do
filename = "test/fixtures/file1.txt"
open(filename,"w") { |f| f.puts "content1" }
filewatcher = FileWatcher.new(["test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
FileUtils.rm(filename)
filewatcher.filesystem_updated?.should.be.true
end
it "should detect file additions" do
filename = "test/fixtures/file1.txt"
FileUtils.rm(filename) if File.exists?(filename)
filewatcher = FileWatcher.new(["test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
open(filename,"w") { |f| f.puts "content1" }
filewatcher.filesystem_updated?.should.be.true
end
it "should detect file updates" do
filename = "test/fixtures/file1.txt"
open(filename,"w") { |f| f.puts "content1" }
filewatcher = FileWatcher.new(["test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
sleep 1
open(filename,"w") { |f| f.puts "content2" }
filewatcher.filesystem_updated?.should.be.true
end
it "should detect new files in subfolders" do
subfolder = 'test/fixtures/new_sub_folder'
filewatcher = FileWatcher.new(["test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
FileUtils::mkdir_p subfolder
filewatcher.filesystem_updated?.should.be.false
open(subfolder + "/file.txt","w") { |f| f.puts "xyz" }
filewatcher.filesystem_updated?.should.be.true
FileUtils.rm_rf subfolder
end
end
Adding test for dirs watching
require 'rubygems'
require 'bacon'
require 'fileutils'
require File.expand_path("../lib/filewatcher.rb",File.dirname(__FILE__))
describe FileWatcher do
fixtures =
%w(test/fixtures/file4.rb
test/fixtures/subdir/file6.rb
test/fixtures/subdir/file5.rb
test/fixtures/file2.txt
test/fixtures/file1.txt
test/fixtures/file3.rb)
explicit_relative_fixtures = fixtures.map { |it| "./#{it}" }
subfolder = 'test/fixtures/new_sub_folder'
after do
FileUtils.rm_rf subfolder
end
def includes_all(elements)
lambda { |it| elements.all? { |element| it.include? element }}
end
it "should handle absolute paths with globs" do
filewatcher = FileWatcher.new(File.absolute_path('test/fixtures/**/*'))
filewatcher.filenames.should.satisfy &includes_all(fixtures.map { |it| File.absolute_path(it) })
end
it "should handle globs" do
filewatcher = FileWatcher.new('test/fixtures/**/*')
filewatcher.filenames.should.satisfy &includes_all(fixtures)
end
it "should handle explicit relative paths with globs" do
filewatcher = FileWatcher.new('./test/fixtures/**/*')
filewatcher.filenames.should.satisfy &includes_all(explicit_relative_fixtures)
end
it "should handle explicit relative paths" do
filewatcher = FileWatcher.new('./test/fixtures')
filewatcher.filenames.should.satisfy &includes_all(explicit_relative_fixtures)
end
it "should detect file deletions" do
filename = "test/fixtures/file1.txt"
open(filename,"w") { |f| f.puts "content1" }
filewatcher = FileWatcher.new(["test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
FileUtils.rm(filename)
filewatcher.filesystem_updated?.should.be.true
end
it "should detect file additions" do
filename = "test/fixtures/file1.txt"
FileUtils.rm(filename) if File.exists?(filename)
filewatcher = FileWatcher.new(["test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
open(filename,"w") { |f| f.puts "content1" }
filewatcher.filesystem_updated?.should.be.true
end
it "should detect file updates" do
filename = "test/fixtures/file1.txt"
open(filename,"w") { |f| f.puts "content1" }
filewatcher = FileWatcher.new(["test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
sleep 1
open(filename,"w") { |f| f.puts "content2" }
filewatcher.filesystem_updated?.should.be.true
end
it "should detect new files in subfolders" do
FileUtils::mkdir_p subfolder
filewatcher = FileWatcher.new(["./test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
open(subfolder + "/file.txt","w") { |f| f.puts "xyz" }
filewatcher.filesystem_updated?.should.be.true
end
it "should detect new subfolders" do
filewatcher = FileWatcher.new(["test/fixtures"])
filewatcher.filesystem_updated?.should.be.false
FileUtils::mkdir_p subfolder
filewatcher.filesystem_updated?.should.be.true
end
end
|
Create test_new_command.rb
require 'helper'
require 'bunto/commands/new'
class TestNewCommand < BuntoUnitTest
def dir_contents(path)
Dir["#{path}/**/*"].each do |file|
file.gsub! path, ''
end
end
def site_template
File.expand_path("../lib/site_template", File.dirname(__FILE__))
end
context 'when args contains a path' do
setup do
@path = 'new-site'
@args = [@path]
@full_path = File.expand_path(@path, Dir.pwd)
end
teardown do
FileUtils.rm_r @full_path
end
should 'create a new directory' do
refute_exist @full_path
Bunto::Commands::New.process(@args)
assert_exist @full_path
end
should 'display a success message' do
Bunto::Commands::New.process(@args)
output = Bunto.logger.messages.last
success_message = "New bunto site installed in #{@full_path}."
assert_includes output, success_message
end
should 'copy the static files in site template to the new directory' do
static_template_files = dir_contents(site_template).reject do |f|
File.extname(f) == '.erb'
end
capture_stdout { Bunto::Commands::New.process(@args) }
new_site_files = dir_contents(@full_path).reject do |f|
File.extname(f) == '.markdown'
end
assert_same_elements static_template_files, new_site_files
end
should 'process any ERB files' do
erb_template_files = dir_contents(site_template).select do |f|
File.extname(f) == '.erb'
end
stubbed_date = '2013-01-01'
allow_any_instance_of(Time).to receive(:strftime) { stubbed_date }
erb_template_files.each do |f|
f.chomp! '.erb'
f.gsub! '0000-00-00', stubbed_date
end
capture_stdout { Bunto::Commands::New.process(@args) }
new_site_files = dir_contents(@full_path).select do |f|
erb_template_files.include? f
end
assert_same_elements erb_template_files, new_site_files
end
should 'create blank project' do
blank_contents = %w(/_drafts /_layouts /_posts /index.html)
capture_stdout { Bunto::Commands::New.process(@args, '--blank') }
assert_same_elements blank_contents, dir_contents(@full_path)
end
should 'force created folder' do
capture_stdout { Bunto::Commands::New.process(@args) }
output = capture_stdout { Bunto::Commands::New.process(@args, '--force') }
assert_match /New bunto site installed in/, output
end
end
context 'when multiple args are given' do
setup do
@site_name_with_spaces = 'new site name'
@multiple_args = @site_name_with_spaces.split
end
teardown do
FileUtils.rm_r File.expand_path(@site_name_with_spaces, Dir.pwd)
end
should 'create a new directory' do
refute_exist @site_name_with_spaces
capture_stdout { Bunto::Commands::New.process(@multiple_args) }
assert_exist @site_name_with_spaces
end
end
context 'when no args are given' do
setup do
@empty_args = []
end
should 'raise an ArgumentError' do
exception = assert_raises ArgumentError do
Bunto::Commands::New.process(@empty_args)
end
assert_equal 'You must specify a path.', exception.message
end
end
end
|
# Build a simple guessing game
# I worked on this challenge by myself.
# I spent [#] hours on this challenge.
# Pseudocode
# Input: an answer for the game
# Output: high, low, or correct depending on what the guess is
# Steps: initialize new guessing game with an answer
# set up an instance variable from the answer given
# Set up guess method with a guess input
# IF guess is equal to the answer return correct
# ELSIF guess is larger than the answer return that it's too high
# ELSE return that it's too low
# END
# setup a solved method
# IF the last guess was correct, return true
# ELSE return false
# END
# Initial Solution
=begin
class GuessingGame
def initialize(answer)
# Your initialization code goes here
@answer = answer
@solved = {solved?: false}
end
def guess(guess)
if @answer == guess
@solved[:solved?] = true
p :correct
elsif guess >= @answer
@solved[:solved?] = false
p :high
else
@solved[:solved?] = false
p :low
end
end
def solved?
if @solved[:solved?]
p true
else
p false
end
end
end
=end
# Refactored Solution
class GuessingGame
def initialize(answer)
@answer = answer
@solved = false
end
def guess(guess)
if @answer == guess
@solved = true
:correct
elsif guess >= @answer
@solved = false
:high
else
@solved = false
:low
end
end
def solved?
@solved
end
end
# Reflection
=begin
- How do instance variables and methods represent the characteristics and behaviors (actions) of a real-world object?
Instance varables represent a single characteristic about an object. It will stay with that object unless it's specifically changed, regardless of what action (or method) they are performing. So it could be like a person's hat. Whether that person is making coffee or watching tv, he'd keep wearing the hat until he took it off or put on another one. Methods are like actions that an object in the real world can perform. So a car object may have a method to start or rev the engine.
- When should you use instance variables? What do they do for you?
You should use instance variables when you want to be able to access/change that attribute of an object in that class at any point or within any method. They can be defined anywhere in the class, which was much harder for me to understand than I thought based on this challenge. Originally I kept trying to define all instance variables within the initialization method, but that caused unnecessary steps.
- Explain how to use flow control. Did you have any trouble using it in this challenge? If so, what did you struggle with?
Using flow control, you need to set specific conditional using keywords (e.g. if, elsif, else) to say what branch of the statement to go to. In this example, I needed to find the relationship between the guess and answer and then use that relationship to say what to return for that method. I didn't really have any specific trouble, except using it when I didn't need to in the solved? method.
- Why do you think this code requires you to return symbols? What are the benefits of using symbols?
I assume we used this mostly based on improved performance. Symbols take up much less space and also don't reference anything else. Otherwise I would have just used a string, which takes more space and also creates a new object each time it's returned/created. Since we returned symbols, it just references the same object over and over. This would also be a lot easier if we had more to do with this program since we're working with a single object.
=end
Refactor little bitty bit more
# Build a simple guessing game
# I worked on this challenge by myself.
# I spent [#] hours on this challenge.
# Pseudocode
# Input: an answer for the game
# Output: high, low, or correct depending on what the guess is
# Steps: initialize new guessing game with an answer
# set up an instance variable from the answer given
# Set up guess method with a guess input
# IF guess is equal to the answer return correct
# ELSIF guess is larger than the answer return that it's too high
# ELSE return that it's too low
# END
# setup a solved method
# IF the last guess was correct, return true
# ELSE return false
# END
# Initial Solution
=begin
class GuessingGame
def initialize(answer)
# Your initialization code goes here
@answer = answer
@solved = {solved?: false}
end
def guess(guess)
if @answer == guess
@solved[:solved?] = true
p :correct
elsif guess >= @answer
@solved[:solved?] = false
p :high
else
@solved[:solved?] = false
p :low
end
end
def solved?
if @solved[:solved?]
p true
else
p false
end
end
end
=end
# Refactored Solution
class GuessingGame
def initialize(answer)
# Your initialization code goes here
@answer = answer
end
def guess(guess)
@guess = guess
if @answer == guess
:correct
elsif guess >= @answer
:high
else
:low
end
end
def solved?
p @answer == @guess
end
end
# Reflection
=begin
- How do instance variables and methods represent the characteristics and behaviors (actions) of a real-world object?
Instance varables represent a single characteristic about an object. It will stay with that object unless it's specifically changed, regardless of what action (or method) they are performing. So it could be like a person's hat. Whether that person is making coffee or watching tv, he'd keep wearing the hat until he took it off or put on another one. Methods are like actions that an object in the real world can perform. So a car object may have a method to start or rev the engine.
- When should you use instance variables? What do they do for you?
You should use instance variables when you want to be able to access/change that attribute of an object in that class at any point or within any method. They can be defined anywhere in the class, which was much harder for me to understand than I thought based on this challenge. Originally I kept trying to define all instance variables within the initialization method, but that caused unnecessary steps.
- Explain how to use flow control. Did you have any trouble using it in this challenge? If so, what did you struggle with?
Using flow control, you need to set specific conditional using keywords (e.g. if, elsif, else) to say what branch of the statement to go to. In this example, I needed to find the relationship between the guess and answer and then use that relationship to say what to return for that method. I didn't really have any specific trouble, except using it when I didn't need to in the solved? method.
- Why do you think this code requires you to return symbols? What are the benefits of using symbols?
I assume we used this mostly based on improved performance. Symbols take up much less space and also don't reference anything else. Otherwise I would have just used a string, which takes more space and also creates a new object each time it's returned/created. Since we returned symbols, it just references the same object over and over. This would also be a lot easier if we had more to do with this program since we're working with a single object.
=end
|
# Modularizing inventory functions from dup-weap.rb
require 'net/http'
require 'json'
require 'erb'
module SteamInventory
TF2_APP_ID = 440
FILE_PATH = '.'
ITEMS_FILE = "#{FILE_PATH}/items.json"
SCHEMA_FILE = "#{FILE_PATH}/schema.json"
# Will contain lookup methods for names
class Item
@@schema = nil
@@defindex = nil
def initialize(item)
# puts "Hi" unless @@schema
self.init_schema unless @@schema
@item = item
end
def test
out = Array.new
out.push("Test")
out.push(@@defindex[@item["defindex"]]["name"])
out.push(@@quality[@item["quality"]])
out.push(@@origin[@item["origin"]])
out.push(self.name)
out.push(self.quality)
out.push(self.origin)
out.push(@item["defindex"])
out
end
def name
@@defindex[@item["defindex"]]["name"]
end
def quality
@@quality[@item["quality"]]
end
def origin
@@origin[@item["origin"]]
end
def tradable?
!@item["flag_cannot_trade"]
end
def craftable?
!@item["flag_cannot_craft"]
end
def level
@item["level"]
end
def craft_class
@@defindex[@item["defindex"]]["craft_class"]
end
def raw
@item
end
def init_schema
schema = JSON.parse(File.read(SCHEMA_FILE))
# Build lookup hash table for item descriptions because array index is sparse
@@defindex = Hash.new
schema["result"]["items"].each do | itemdef |
@@defindex[itemdef["defindex"]] = itemdef
end
@@quality = Hash.new
schema["result"]["qualities"].each do | key, value |
@@quality[value] = key
end
@@origin = Hash.new
schema["result"]["originNames"].each do | origin |
@@origin[origin["origin"]] = origin["name"]
end
@@schema = schema
end
end
class Items
def initialize
self.read_files
end
def test
@items[0].test
end
# While storing data in files, use this to read in data. Called from both initialize and get_items
# Later need to use ActiveRecord or other data store
def read_files
@items = Array.new
items = JSON.parse(File.read(ITEMS_FILE))
items["result"]["items"].each do | item |
@items.push(Item.new(item))
end
# TODO: Delete these in favor of Items kookups
@olditems = items
@schema = JSON.parse(File.read(SCHEMA_FILE))
end
def get_items(steam_id = ENV['STEAM_ID'], steam_api_key = ENV['STEAM_API_KEY'], app_id = TF2_APP_ID)
raise 'Need to provide Steam API key' if steam_api_key.nil?
raise 'Need to provide Steam ID' if steam_id.nil?
# Inventory
uri = URI("http://api.steampowered.com/IEconItems_#{app_id}/GetPlayerItems/v0001/?key=#{steam_api_key}&SteamID=#{steam_id}")
result = Net::HTTP.get_response(uri)
raise 'Failure requesting inventory' unless result.is_a?(Net::HTTPSuccess)
File.write(ITEMS_FILE, result.body)
# Schema
uri = URI("http://api.steampowered.com/IEconItems_#{app_id}/GetSchema/v0001/?key=#{steam_api_key}")
result = Net::HTTP.get_response(uri)
raise 'Failure requesting inventory' unless result.is_a?(Net::HTTPSuccess)
File.write(SCHEMA_FILE, result.body)
self.read_files
end
def dup_weap
weapons = Array.new
@items
.select { | item | item.craft_class.eql? "weapon"}
.select { | item | @items.select{ | allitem | allitem.raw["defindex"] == item.raw["defindex"] }.count > 1 }
.sort_by { | item | item.raw["quality"] }
.reverse
.sort_by { | item | item.raw["defindex"] }
.each do | item |
weapons.push(item)
end
weapons
end
def out_html
# FIXME: ERB needs proper binding
# renderer = ERB.new(File.read('out.html.erb'))
# File.write('out.html', renderer.result)
end
def out_text(items)
output = String.new
items.each do | item |
output << sprintf("%-12s %-13s %-10s Level %3s %-28s %-15s\n",
item.tradable? ? "" : "Non-Tradable",
item.craftable? ? "" : "Non-Craftable",
item.quality,
item.level,
item.name,
item.origin
)
end
output
end
end
end
Cleanup
# Modularizing inventory functions from dup-weap.rb
require 'net/http'
require 'json'
require 'erb'
module SteamInventory
TF2_APP_ID = 440
FILE_PATH = '.'
ITEMS_FILE = "#{FILE_PATH}/items.json"
SCHEMA_FILE = "#{FILE_PATH}/schema.json"
# Will contain lookup methods for names
class Item
@@schema = nil
@@defindex = nil
def initialize(item)
# puts "Hi" unless @@schema
self.init_schema unless @@schema
@item = item
end
def name
@@defindex[@item["defindex"]]["name"]
end
def quality
@@quality[@item["quality"]]
end
def origin
@@origin[@item["origin"]]
end
def tradable?
!@item["flag_cannot_trade"]
end
def craftable?
!@item["flag_cannot_craft"]
end
def level
@item["level"]
end
def craft_class
@@defindex[@item["defindex"]]["craft_class"]
end
# Allow direct access to the item values
def raw
@item
end
def init_schema
schema = JSON.parse(File.read(SCHEMA_FILE))
# Build lookup hash table for item descriptions because array index is sparse
@@defindex = Hash.new
schema["result"]["items"].each do | itemdef |
@@defindex[itemdef["defindex"]] = itemdef
end
@@quality = Hash.new
schema["result"]["qualities"].each do | key, value |
@@quality[value] = key
end
@@origin = Hash.new
schema["result"]["originNames"].each do | origin |
@@origin[origin["origin"]] = origin["name"]
end
@@schema = schema
end
end
class Items
def initialize
self.read_files
end
# While storing data in files, use this to read in data. Called from both initialize and get_items
# Later need to use ActiveRecord or other data store
def read_files
@items = Array.new
items = JSON.parse(File.read(ITEMS_FILE))
items["result"]["items"].each do | item |
@items.push(Item.new(item))
end
end
def get_items(steam_id = ENV['STEAM_ID'], steam_api_key = ENV['STEAM_API_KEY'], app_id = TF2_APP_ID)
raise 'Need to provide Steam API key' if steam_api_key.nil?
raise 'Need to provide Steam ID' if steam_id.nil?
# Inventory
uri = URI("http://api.steampowered.com/IEconItems_#{app_id}/GetPlayerItems/v0001/?key=#{steam_api_key}&SteamID=#{steam_id}")
result = Net::HTTP.get_response(uri)
raise 'Failure requesting inventory' unless result.is_a?(Net::HTTPSuccess)
File.write(ITEMS_FILE, result.body)
# Schema
uri = URI("http://api.steampowered.com/IEconItems_#{app_id}/GetSchema/v0001/?key=#{steam_api_key}")
result = Net::HTTP.get_response(uri)
raise 'Failure requesting inventory' unless result.is_a?(Net::HTTPSuccess)
File.write(SCHEMA_FILE, result.body)
self.read_files
end
def dup_weap
weapons = Array.new
@items
.select { | item | item.craft_class.eql? "weapon"}
.select { | item | @items.select{ | allitem | allitem.raw["defindex"] == item.raw["defindex"] }.count > 1 }
.sort_by { | item | item.raw["quality"] }
.reverse
.sort_by { | item | item.raw["defindex"] }
.each do | item |
weapons.push(item)
end
weapons
end
def out_html
# FIXME: ERB needs proper binding
# renderer = ERB.new(File.read('out.html.erb'))
# File.write('out.html', renderer.result)
end
def out_text(items)
output = String.new
items.each do | item |
output << sprintf("%-12s %-13s %-10s Level %3s %-28s %-15s\n",
item.tradable? ? "" : "Non-Tradable",
item.craftable? ? "" : "Non-Craftable",
item.quality,
item.level,
item.name,
item.origin
)
end
output
end
end
end
|
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/quill/rails/version', __FILE__)
Gem::Specification.new do |s|
s.name = "quill-rails"
s.authors = ["Robert Wiegmann"]
s.email = ["robert.wiegmann@gmail.com"]
s.homepage = "https://github.com/the-robear/quill-rails"
s.licenses = ["Quill is All rights reserved", "Copyright (c) 2013, salesforce.com"]
s.summary = "Quill.js for Rails 3.1/4.0"
s.description = "This gem provides the Quill.js Rich Text Editor assets and view helper for your Rails 3.1/4.0 application."
s.files = Dir["{app,lib}/**/*"] + ["LICENSE", "Gemfile", "README.md"]
s.version = Quill::Rails::VERSION
s.add_dependency "railties", ">= 3.1.0", "< 5.0"
s.add_development_dependency "rails", ">= 3.1"
s.add_development_dependency "jquery-rails"
s.add_development_dependency 'coffee-rails'
s.add_development_dependency "sqlite3"
end
Updated license type
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/quill/rails/version', __FILE__)
Gem::Specification.new do |s|
s.name = "quill-rails"
s.authors = ["Robert Wiegmann"]
s.email = ["robert.wiegmann@gmail.com"]
s.homepage = "https://github.com/the-robear/quill-rails"
s.licenses = ["BSD 3 Clause", "Copyright (c) 2013, salesforce.com"]
s.summary = "Quill.js for Rails 3.1/4.0"
s.description = "This gem provides the Quill.js Rich Text Editor assets and view helper for your Rails 3.1/4.0 application."
s.files = Dir["{app,lib}/**/*"] + ["LICENSE", "Gemfile", "README.md"]
s.version = Quill::Rails::VERSION
s.add_dependency "railties", ">= 3.1.0", "< 5.0"
s.add_development_dependency "rails", ">= 3.1"
s.add_development_dependency "jquery-rails"
s.add_development_dependency 'coffee-rails'
s.add_development_dependency "sqlite3"
end |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'rackconnect/version'
Gem::Specification.new do |spec|
spec.name = "rackconnect"
spec.version = Rackconnect::VERSION
spec.authors = ["Matt Darby"]
spec.email = ["matt.darby@rackspace.com"]
spec.summary = "API Wrapper for RackConnect v3 service from Rackspace"
spec.description = "API Wrapper for RackConnect v3 service from Rackspace"
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "rspec"
spec.add_development_dependency "sinatra"
spec.add_development_dependency "thin"
spec.add_dependency "activesupport"
spec.add_dependency "activemodel"
spec.add_dependency "rest-client"
end
Metadata
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'rackconnect/version'
Gem::Specification.new do |spec|
spec.name = "rackconnect"
spec.version = Rackconnect::VERSION
spec.authors = ["Matt Darby"]
spec.email = ["matt.darby@rackspace.com"]
spec.summary = "API Wrapper for RackConnect v3 service from Rackspace"
spec.description = "API Wrapper for RackConnect v3 service from Rackspace"
spec.homepage = ""
spec.license = "MIT"
spec.metadata = "Some metadata for you"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "rspec"
spec.add_development_dependency "sinatra"
spec.add_development_dependency "thin"
spec.add_dependency "activesupport"
spec.add_dependency "activemodel"
spec.add_dependency "rest-client"
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.