repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
nuthintrue/AuthEngine
|
app/api/auth_engine/base.rb
|
<filename>app/api/auth_engine/base.rb
module AuthEngine
class Base < Grape::API
mount V1::Base
end
end
|
nuthintrue/AuthEngine
|
lib/auth_engine.rb
|
require "auth_engine/engine"
module AuthEngine
# Your code goes here...
end
|
nuthintrue/AuthEngine
|
app/api/auth_engine/v1/helpers/authentication.rb
|
module AuthEngine
module V1
module Helpers
module Authentication
extend Grape::API::Helpers
attr_accessor :current_person
def authenticate_token!
error!('Invalid API token', 401) unless token
@current_person = person_from_token
error!('Invalid API token', 401) unless @current_person
rescue ::JWT::ExpiredSignature
error!('Expired API token', 401)
rescue ::JWT::DecodeError
error!('Invalid API token', 401)
rescue ActiveRecord::RecordNotFound
error!('Invalid API token', 401)
end
def token
env['HTTP_AUTHORIZATION'].to_s.split(' ').last
end
def person_from_token
Knock::AuthToken.new(token: token)
.entity_for(AuthEngine::Credential)
.user
end
end
end
end
end
|
nuthintrue/AuthEngine
|
app/coordinators/auth_engine/api/api_coordinator.rb
|
<gh_stars>0
module AuthEngine
module Api
class ApiCoordinator < BaseCoordinator
attr_reader :params, :headers, :current_person, :context
def initialize(params: {}, headers: {}, current_person: nil, context: {})
@params = params
@headers = headers
@current_person = current_person
@context = context
end
def call
raise NotImplementedError.new('Implement this!')
end
def result
@result ||= Api::ApiCoordinatorResult.new
end
end
end
end
|
nuthintrue/AuthEngine
|
app/api/auth_engine/v1/user/root.rb
|
module AuthEngine
module V1
module User
class Root < Grape::API
namespace :user do
mount ::V1::User::Token
mount ::V1::User::Me
end
end
end
end
end
|
secyritas/dashing-statuscake
|
jobs/statuscakestatus.rb
|
require 'statuscake'
require 'time_diff'
config = YAML::load_file('config/statuscake.yml')
def calculate_difference date1, date2
t = Time.diff(date1, date2)
"#{t[:day]}d #{t[:hour]}h #{t[:minute]}m"
end
client = StatusCake::Client.new(API: config['api_key'], Username: config['api_user'])
# :first_in sets how long it takes before the job is first run. In this case, it is run immediately
SCHEDULER.every '50s', :first_in => 0 do |job|
config['checks'].each do |id|
check = client.tests_details(TestID: id)
performance = client.tests_checks(TestID: id, Fields: "performance")
status = check["Status"].downcase
uptime = check["Uptime"].to_s + ' %'
paused = check["Paused"]
latency = performance[performance.first.first]["Performance"].to_s + 'ms'
if paused == true
status = "pau"
end
last_check_time = DateTime.parse(check["LastTested"])
last_check = calculate_difference(Time.now, Time.at(last_check_time))
send_event("statuscake-status-#{id}", { current: status,
latency: latency,
last_check: last_check})
end
end
|
vzvu3k6k/redmine2ch
|
lib/redmine2ch/redmine_client.rb
|
require 'datpot/bbs'
require 'redmine2ch/resources'
require 'time'
require 'faraday'
require 'faraday_middleware'
module Redmine2ch
class RedmineClient
attr_reader :api_key, :root_url
def initialize(api_key:, root_url:)
@api_key = api_key
@root_url = root_url
end
def issue(id:)
raw_issue = connection.get("/issues/#{id}.json?include=journals").body[:issue]
Resources::Issue.new(raw_issue)
end
def issues(project_id:)
raw_issues = connection.get(
"/projects/#{project_id}/issues.json?sort=updated_on%3Adesc%2Cid%3Adesc"
).body[:issues]
raw_issues.map { |raw_issue| Resources::Issue.new(raw_issue) }
end
private
def connection
Faraday.new(root_url) do |c|
c.request :json
c.response :json, content_type: /\bjson$/, parser_options: { symbolize_names: true }
c.headers = {
'Content-Type' => 'application/json',
'X-Redmine-API-Key' => api_key
}
c.adapter Faraday.default_adapter
end
end
end
end
|
vzvu3k6k/redmine2ch
|
spec/datpot/bbs_spec.rb
|
<reponame>vzvu3k6k/redmine2ch
# frozen_string_literal: true
require 'datpot/bbs'
require 'datpot/thread'
require 'datpot/response'
require 'rack/test'
RSpec.describe Datpot::Bbs do
include Rack::Test::Methods
class App < Datpot::Bbs
def threads(board_id:)
[
Datpot::Thread.new(
thread_id: 1234,
title: '',
response_count: 1001
)
]
end
def responses(board_id:, thread_id:)
[
Datpot::Response.new(
author: 'ๅ็กใใใ',
email: '',
posted_at: Time.utc(1999, 5, 30, 0, 0),
content: 'ใฆใใจ'
),
Datpot::Response.new(
author: 'ๅ็กใใใ',
email: 'sage',
posted_at: Time.utc(1999, 5, 31, 0, 0),
content: '2get'
)
]
end
end
let(:app) { App.new }
let(:decoded_body) { response.body.encode('utf-8', 'cp932') }
describe 'GET /redmine/subject.txt' do
let(:response) { get '/redmine/subject.txt' }
it { expect(response.status).to eq 200 }
it { expect(decoded_body).to eq "1234.dat<> (1001)\n" }
end
describe 'GET /redmine/dat/1234.dat' do
let(:response) { get '/redmine/dat/1234.dat' }
it { expect(response.status).to eq 200 }
it {
expect(decoded_body).to eq <<~DAT
ๅ็กใใใ<><>1999/05/30(ๆฅ) 00:00:00<>ใฆใใจ<>
ๅ็กใใใ<>sage<>1999/05/31(ๆ) 00:00:00<>2get<>
DAT
}
end
end
|
vzvu3k6k/redmine2ch
|
lib/datpot/response.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'datpot/refinements/string'
module Datpot
Response = Struct.new(:author, :email, :posted_at, :id, :content, keyword_init: true) do
using Datpot::Refinements::String
def self.format_time(time)
day = 'ๆฅๆ็ซๆฐดๆจ้ๅ'[time.wday]
time.strftime("%Y/%m/%d(#{day}) %H:%M:%S")
end
def dat
"#{author.escape_dat}<>#{email.escape_dat}<>#{meta.escape_dat}<>#{content.escape_dat}<>\n"
end
private
def meta
str = self.class.format_time(posted_at)
id ? "#{str} ID: #{id}" : str
end
end
end
|
vzvu3k6k/redmine2ch
|
lib/redmine2ch/resources.rb
|
# frozen_string_literal: true
require 'redmine2ch/resources/issue'
require 'redmine2ch/resources/journal'
|
vzvu3k6k/redmine2ch
|
lib/datpot/board.rb
|
# frozen_string_literal: true
require 'datpot/thread'
module Datpot
Board = Struct.new(:threads, keyword_init: true) do
def subject_txt
threads.map(&:subject_txt).join
end
end
end
|
vzvu3k6k/redmine2ch
|
config.ru
|
# frozen_string_literal: true
$LOAD_PATH << './lib'
require 'redmine2ch/app'
run Redmine2ch::App
|
vzvu3k6k/redmine2ch
|
spec/datpot/board_spec.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'ostruct'
require 'datpot/board'
RSpec.describe Datpot::Board do
describe '#subject_txt' do
subject { board.subject_txt }
let(:board) {
Datpot::Board.new(
threads: [
OpenStruct.new(subject_txt: "123.dat<>title (1)\n"),
OpenStruct.new(subject_txt: "234.dat<>title2 (1001)\n")
]
)
}
it {
is_expected.to eq <<~HERE
123.dat<>title (1)
234.dat<>title2 (1001)
HERE
}
end
end
|
vzvu3k6k/redmine2ch
|
lib/redmine2ch/resources/journal.rb
|
<reponame>vzvu3k6k/redmine2ch
# frozen_string_literal: true
require 'datpot/bbs'
require 'redmine2ch/resources/base'
module Redmine2ch
module Resources
class Journal < Base
def author
dig(:user, :name)
end
def created_on
Time.parse(dig(:created_on))
end
def content
[detail_content, notes].compact.join("\n---\n")
end
private
def notes
dig(:notes)
end
def detail_content
return if details.empty?
details.map(&:content).join("\n")
end
def details
dig(:details).map { |raw| Detail.new(raw) }
end
class Detail < Base
def content
"#{dig(:name)}: #{dig(:old_value) || "(NULL)"} -> #{dig(:new_value) || "(NULL)"}"
end
end
end
end
end
|
vzvu3k6k/redmine2ch
|
lib/datpot/refinements/string.rb
|
<reponame>vzvu3k6k/redmine2ch
# frozen_string_literal: true
module Datpot
module Refinements
module String
refine ::String do
def escape_dat
gsub(/<|>/, '<' => '<', '>' => '>')
.gsub("\n", '<br>')
end
end
end
end
end
|
vzvu3k6k/redmine2ch
|
lib/datpot/bbs.rb
|
<filename>lib/datpot/bbs.rb
# frozen_string_literal: true
require 'sinatra/base'
require 'datpot/board'
module Datpot
class Bbs < Sinatra::Application
def threads(board_id:)
raise NotImplementedError
end
def responses(board_id:, thread_id:)
raise NotImplementedError
end
get '/:board_id/subject.txt' do
content_type 'plain/text'
board = Datpot::Board.new(
threads: threads(board_id: params['board_id'])
)
board.subject_txt.encode('cp932')
end
get '/:board_id/dat/:thread_id.dat' do
content_type 'plain/text'
thread = Datpot::Thread.new(
responses: responses(
board_id: params['board_id'],
thread_id: params['thread_id']
)
)
thread.dat.encode('cp932')
end
end
end
|
vzvu3k6k/redmine2ch
|
spec/redmine2ch/resources/journal_spec.rb
|
<reponame>vzvu3k6k/redmine2ch
# frozen_string_literal: true
require 'redmine2ch/resources/journal'
RSpec.describe Redmine2ch::Resources::Journal do
describe '#content' do
subject { journal.content }
context 'With notes' do
let(:journal) { described_class.new(notes: 'Journal notes', details: []) }
it 'returns notes' do
is_expected.to eq 'Journal notes'
end
end
context 'With attr details' do
let(:journal) {
detail = {
property: 'attr',
name: 'due_date',
old_value: nil,
new_value: '2022-01-21'
}
described_class.new(details: [detail])
}
it 'returns notes' do
is_expected.to eq <<~CONTENT.chomp
due_date: (NULL) -> 2022-01-21
CONTENT
end
end
context 'With attr details and notes' do
let(:journal) {
detail = {
property: 'attr',
name: 'due_date',
old_value: nil,
new_value: '2022-01-21'
}
described_class.new(details: [detail], notes: 'Journal notes')
}
it 'returns notes' do
is_expected.to eq <<~CONTENT.chomp
due_date: (NULL) -> 2022-01-21
---
Journal notes
CONTENT
end
end
end
end
|
vzvu3k6k/redmine2ch
|
lib/redmine2ch/resources/issue.rb
|
# frozen_string_literal: true
require 'datpot/bbs'
require 'redmine2ch/resources/base'
module Redmine2ch
module Resources
class Issue < Base
def id
dig(:id)
end
def subject
dig(:subject)
end
def author
dig(:author, :name)
end
def created_on
Time.parse(dig(:created_on))
end
def description
dig(:description)
end
def journals
dig(:journals).map { |raw| Journal.new(raw) }
end
end
end
end
|
vzvu3k6k/redmine2ch
|
lib/redmine2ch.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'redmine2ch/app'
module Redmine2ch; end
|
vzvu3k6k/redmine2ch
|
spec/datpot/response_spec.rb
|
<filename>spec/datpot/response_spec.rb
# frozen_string_literal: true
require 'datpot/response'
RSpec.describe Datpot::Response do
describe '#dat' do
subject { response.dat }
context 'Without id' do
let(:response) {
Datpot::Response.new(
author: 'ๅ็กใใใ',
email: 'sage',
posted_at: Time.utc(1999, 5, 30, 0, 0),
content: 'ใฆใใจ'
)
}
it { is_expected.to eq "ๅ็กใใใ<>sage<>1999/05/30(ๆฅ) 00:00:00<>ใฆใใจ<>\n" }
end
context 'With id' do
let(:response) {
Datpot::Response.new(
author: 'ๅ็กใใใ',
email: 'sage',
posted_at: Time.utc(1999, 5, 30, 0, 0),
id: 'mokorikomo',
content: 'ใฆใใจ'
)
}
it { is_expected.to eq "ๅ็กใใใ<>sage<>1999/05/30(ๆฅ) 00:00:00 ID: mokorikomo<>ใฆใใจ<>\n" }
end
context 'With multiline content' do
let(:response) {
Datpot::Response.new(
author: 'ๅ็กใใใ',
email: 'sage',
posted_at: Time.utc(1999, 5, 30, 0, 0),
content: "ใฆใใจ\nใฆใใจ"
)
}
it { is_expected.to eq "ๅ็กใใใ<>sage<>1999/05/30(ๆฅ) 00:00:00<>ใฆใใจ<br>ใฆใใจ<>\n" }
end
context 'With content contains brackets' do
let(:response) {
Datpot::Response.new(
author: '<<ๅ็กใใใ>>',
email: 'sage<>',
posted_at: Time.utc(1999, 5, 30, 0, 0),
content: '(>_<)'
)
}
it { is_expected.to eq "<<ๅ็กใใใ>><>sage<><>1999/05/30(ๆฅ) 00:00:00<>(>_<)<>\n" }
end
end
end
|
vzvu3k6k/redmine2ch
|
spec/datpot/thread_spec.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'datpot/board'
RSpec.describe Datpot::Thread do
describe '#subject_txt' do
subject { thread.subject_txt }
context 'With response_count' do
let(:thread) {
Datpot::Thread.new(
thread_id: 123,
title: 'title',
response_count: 1
)
}
it { is_expected.to eq "123.dat<>title (1)\n" }
end
context 'With responses' do
let(:thread) {
Datpot::Thread.new(
thread_id: 123,
title: 'title',
responses: [Object.new]
)
}
it { is_expected.to eq "123.dat<>title (1)\n" }
end
end
end
|
vzvu3k6k/redmine2ch
|
lib/datpot/thread.rb
|
<reponame>vzvu3k6k/redmine2ch<gh_stars>0
# frozen_string_literal: true
require 'datpot/response'
module Datpot
Thread = Struct.new(:thread_id, :title, :response_count, :responses, keyword_init: true) do
def subject_txt
"#{thread_id}.dat<>#{title} (#{response_count || responses.size})\n"
end
def dat
first, *rest = responses
["#{first.dat.chomp}#{title}\n", *rest.map(&:dat)].join
end
end
end
|
vzvu3k6k/redmine2ch
|
lib/redmine2ch/app.rb
|
# frozen_string_literal: true
require 'datpot/bbs'
require 'datpot/thread'
require 'datpot/response'
require 'redmine2ch/redmine_client'
module Redmine2ch
class App < Datpot::Bbs
def threads(board_id:)
redmine_client.issues(project_id: board_id).map { |issue|
detailed_issue = redmine_client.issue(id: issue.id)
Datpot::Thread.new(
thread_id: issue.id,
title: issue.subject,
response_count: detailed_issue.journals.size + 1
)
}
end
def responses(board_id:, thread_id:)
issue = redmine_client.issue(id: thread_id)
[
Datpot::Response.new(
author: issue.author,
email: '',
posted_at: issue.created_on,
content: issue.description
)
] + issue.journals.map { |journal|
Datpot::Response.new(
author: journal.author,
email: '',
posted_at: journal.created_on,
content: journal.content
)
}
end
private
def redmine_client
@redmine_client ||= RedmineClient.new(
api_key: ENV['REDMINE_API_KEY'],
root_url: ENV['REDMINE_URL']
)
end
end
end
|
vzvu3k6k/redmine2ch
|
lib/redmine2ch/resources/base.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'datpot/bbs'
module Redmine2ch
module Resources
class Base
def initialize(raw)
@raw = raw
end
def dig(*args)
@raw.dig(*args)
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/storage/feature_keys_migration.rb
|
# frozen_string_literal: true
module FeatureFlagger
module Storage
class FeatureKeysMigration
def initialize(from_redis, to_control)
@from_redis = from_redis
@to_control = to_control
end
# call migrates features key from the old fashioned to the new
# format.
#
# It must replicate feature keys with changes:
#
# from "avenue:traffic_lights" => 42
# to "avenue:42" => traffic_lights
def call
@from_redis.scan_each(match: "*", count: FeatureFlagger::Storage::Redis::SCAN_EACH_BATCH_SIZE) do |redis_key|
# filter out resource_keys
next if redis_key.start_with?("#{FeatureFlagger::Storage::Redis::RESOURCE_PREFIX}:")
migrate_key(redis_key)
end
end
private
def migrate_key(key)
return migrate_release_to_all(key) if feature_released_to_all?(key)
migrate_release(key)
end
def migrate_release_to_all(key)
features = @from_redis.smembers(key)
features.each do |feature_key|
@to_control.release_to_all(feature_key)
end
end
def feature_released_to_all?(key)
FeatureFlagger::Control::RELEASED_FEATURES == key
end
def migrate_release(key)
resource_ids = @from_redis.smembers(key)
resource_ids.each do |id|
@to_control.release(key, id)
end
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/notifier.rb
|
module FeatureFlagger
class Notifier
attr_reader :notify
RELEASE = 'release'.freeze
UNRELEASE = 'unrelease'.freeze
RELEASE_TO_ALL = 'release_to_all'.freeze
UNRELEASE_TO_ALL = 'unrelease_to_all'.freeze
def initialize(notify = nil)
@notify = valid_notify?(notify) ? notify : nullNotify
end
def send(operation, feature_key, resource_id = nil)
@notify.call(build_event(operation, extract_resource_from_key(feature_key), feature_key, resource_id))
end
private
def nullNotify
lambda {|e| }
end
def valid_notify?(notify)
!notify.nil? && notify.is_a?(Proc)
end
def extract_resource_from_key(key)
Storage::Keys.extract_resource_name_from_feature_key(
key
)
rescue FeatureFlagger::Storage::Keys::InvalidResourceNameError
"legacy key"
end
def build_event(operation, resource_name, feature_key, resource_id)
{
type: operation,
model: resource_name,
feature: feature_key,
id: resource_id
}
end
end
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/control_spec.rb
|
<reponame>ResultadosDigitais/feature_flagger
require 'spec_helper'
module FeatureFlagger
RSpec.describe Control do
let(:redis) { FakeRedis::Redis.new }
let(:notify) { spy(lambda { |event| }, :is_a? => Proc) }
let(:notifier) { Notifier.new(notify)}
let(:storage) { Storage::Redis.new(redis) }
let(:cache_store) { nil }
let(:control) { Control.new(storage, notifier, cache_store) }
let(:key) { 'account:email_marketing:whitelabel' }
let(:resource_id) { 'resource_id' }
let(:resource_name) { 'account' }
before do
redis.flushdb
end
describe '#released?' do
let(:result) { control.released?(key, resource_id) }
context 'when resource entity id has no access to release_key' do
it { expect(result).to be_falsey }
context 'and a feature is release to all' do
before { control.release_to_all(key) }
it { expect(result).to be_truthy }
end
end
context 'when resource entity id has access to release_key' do
before { control.release(key, resource_id) }
it { expect(result).to be_truthy }
context 'and a feature is release to all' do
before { control.release_to_all(key) }
it { expect(result).to be_truthy }
end
end
context 'when cache is configured' do
let(:cache_store) { ActiveSupport::Cache::MemoryStore.new }
it 'only hits the storage once' do
expect(storage).to receive(:has_value?).twice
10.times { control.released?(key, resource_id) }
end
end
end
describe '#release' do
it 'adds resource_id to storage' do
control.release(key, resource_id)
expect(control).to be_released(key, resource_id)
end
it 'sends to notifer the release event' do
expect(notify).to receive(:call).with({ type: FeatureFlagger::Notifier::RELEASE,
model: 'account',
feature: key,
id: resource_id })
control.release(key, resource_id)
end
end
describe '#releases' do
it 'return all releases to a given resource' do
control.release(key, resource_id)
resource_name = 'account'
expect(control.releases(resource_name, resource_id)).to match_array(['account:email_marketing:whitelabel'])
end
it 'does not return releases from another resource' do
control.release(key, resource_id)
control.release_to_all('user:another_rollout:global_whitelabel')
resource_name = 'account'
expect(control.releases(resource_name, resource_id)).to match_array(['account:email_marketing:whitelabel'])
end
context 'when cache is configured' do
let(:cache_store) { ActiveSupport::Cache::MemoryStore.new }
it 'only hits the storage once' do
control.release(key, resource_id)
expect(storage).to receive(:fetch_releases).once
2.times { control.releases(resource_name, resource_id) }
end
it 'hits the storage n times when skip_cache is provided' do
control.release(key, resource_id)
expect(storage).to receive(:fetch_releases).twice
2.times { control.releases(resource_name, resource_id, skip_cache: true) }
end
end
end
describe '#release_to_all' do
it 'adds feature_key to storage' do
control.release(key, resource_id)
control.release_to_all(key)
expect(control.releases(resource_name, 1)).to include(key)
expect(control.released_features_to_all).to include(key)
end
it 'sends to notifer the release to all event' do
expect(notify).to receive(:call).with({ type: FeatureFlagger::Notifier::RELEASE_TO_ALL,
model: 'account',
feature: key,
id: nil })
control.release_to_all(key)
end
end
describe '#unrelease' do
it 'removes resource_id from storage' do
control.release(key, resource_id)
control.unrelease(key, resource_id)
expect(control.released?(key, resource_id)).to be_falsey
end
it 'sends to notifer the unrelease event' do
expect(notify).to receive(:call).with({ type: FeatureFlagger::Notifier::UNRELEASE,
model: 'account',
feature: key,
id: resource_id })
control.unrelease(key, resource_id)
end
end
describe '#unrelease_to_all' do
it 'removes feature_key to storage' do
control.release_to_all(key)
control.unrelease_to_all(key)
expect(control.released_features_to_all).not_to include(key)
end
it 'removes added resources' do
control.release(key, 1)
control.unrelease_to_all(key)
expect(control.released?(key, 1)).to be_falsey
expect(control.released_features_to_all).not_to include(key)
end
it 'sends to notifer the unrelease to all event' do
expect(notify).to receive(:call).with({ type: FeatureFlagger::Notifier::UNRELEASE_TO_ALL,
model: 'account',
feature: key,
id: nil })
control.unrelease_to_all(key)
end
end
describe '#resource_ids' do
subject { control.resource_ids(key) }
it 'returns all the values to given key' do
control.release(key, 1)
control.release(key, 2)
control.release(key, 15)
expect(subject).to match_array %w[1 2 15]
end
it 'hits the storage n times without cache' do
expect(storage).to receive(:all_values).twice
2.times { control.resource_ids(key) }
end
context 'when caching is configured' do
let(:cache_store) { ActiveSupport::Cache::MemoryStore.new }
it 'only hits the storage once' do
expect(storage).to receive(:all_values).once
2.times { control.resource_ids(key) }
end
it 'hits the storage n times when skip_cache is provided' do
expect(storage).to receive(:all_values).twice
2.times { control.resource_ids(key, skip_cache: true) }
end
end
end
describe '#released_features_to_all' do
subject { control.released_features_to_all }
it 'returns all the values to given features' do
control.release_to_all('account:feature:name1')
control.release_to_all('account:feature:name2')
control.release_to_all('account:feature:name15')
expect(subject).to match_array %w[account:feature:name1 account:feature:name2 account:feature:name15]
end
context 'when caching is configured' do
let(:cache_store) { ActiveSupport::Cache::MemoryStore.new }
it 'only hits the storage once' do
expect(storage).to receive(:all_values).once
5.times { control.released_features_to_all }
end
end
end
describe '#released_to_all?' do
let(:result) { control.released_to_all?(key) }
context 'when feature was not released to all' do
it { expect(result).to be_falsey }
end
context 'when feature was released to all' do
before { control.release_to_all(key) }
it { expect(result).to be_truthy }
end
context 'when caching is configured' do
let(:cache_store) { ActiveSupport::Cache::MemoryStore.new }
it 'only hits the storage once' do
expect(storage).to receive(:all_values).once
5.times { control.released_features_to_all }
end
end
end
describe '#search_keys' do
before do
control.release("model:namespace:1", 1)
control.release("model:namespace:2", 2)
control.release("model:exclusive", 3)
end
context 'without matching result' do
it { expect(control.search_keys('invalid').to_a).to be_empty }
end
context 'with matching results' do
it { expect(control.search_keys("*ame*pac*").to_a).to contain_exactly('model:namespace:1', 'model:namespace:2') }
end
end
describe '#feature_keys' do
it 'returns only feature keys in storage' do
another_key = "account:some_other_feature"
control.release(key, resource_id)
control.release_to_all(another_key)
expect(control.feature_keys).to match_array([key])
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/manager.rb
|
<gh_stars>10-100
module FeatureFlagger
class Manager
def self.detached_feature_keys
persisted_features = FeatureFlagger.control.feature_keys
mapped_feature_keys = FeatureFlagger.config.mapped_feature_keys
persisted_features - mapped_feature_keys
end
def self.cleanup_detached(resource_name, *feature_key)
complete_feature_key = feature_key.map(&:to_s).insert(0, resource_name.to_s)
key_value = FeatureFlagger.config.info.dig(*complete_feature_key)
raise "key is still mapped" if key_value
FeatureFlagger.control.unrelease_to_all(complete_feature_key.join(':'))
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/tasks/feature_flagger.rake
|
namespace :feature_flagger do
desc "cleaning up keys from storage that are no longer in the rollout.yml file"
task :cleanup_removed_rollouts => :environment do
keys = FeatureFlagger::Manager.detached_feature_keys
puts "Found keys to remove: #{keys}"
keys.each do |key|
FeatureFlagger::Manager.cleanup_detached key
end
end
desc "Synchronizes resource_keys with feature_keys, recommended to apps that installed feature flagger before v.1.2.0"
task :migrate_to_resource_keys => :environment do
storage = FeatureFlagger.config.storage
storage.synchronize_feature_and_resource
end
desc "Release feature to given identifiers, Usage: `$ bundle exec rake feature_flagger:release\[Account,email_marketing:whitelabel,1,2,3,4\]`"
task :release, [:entity_name, :feature_key] => :environment do |_, args|
entity = args.entity_name.constantize
entity_ids = args.extras
entity.release_id(entity_ids, *args.feature_key.split(':'))
end
desc "Unrelease feature to given identifiers, Usage: `$ bundle exec rake feature_flagger:unrelease\[Account,email_marketing:whitelabel,1,2,3,4\]`"
task :unrelease, [:entity_name, :feature_key] => :environment do |_, args|
entity, entity_ids = args.entity_name.constantize, args.extras
entity.unrelease_id(entity_ids, *args.feature_key.split(':'))
end
desc "Release one feature to all entity ids, Usage: `$ bundle exec rake feature_flagger:release_to_all\[Account,email_marketing:whitelabel\]`"
task :release_to_all, [:entity_name, :feature_key] => :environment do |_, args|
entity = args.entity_name.constantize
entity.release_to_all(*args.feature_key.split(':'))
end
desc "Unrelease one feature to all entity ids, Usage: `$ bundle exec rake feature_flagger:unrelease_to_all\[Account,email_marketing:whitelabel\]`"
task :unrelease_to_all, [:entity_name, :feature_key] => :environment do |_, args|
entity = args.entity_name.constantize
entity.unrelease_to_all(*args.feature_key.split(':'))
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/core_ext.rb
|
<gh_stars>10-100
begin
require 'active_support/core_ext/string/inflections'
rescue LoadError
unless ''.respond_to?(:constantize)
class String
def constantize
names = split('::')
names.shift if names.empty? || names.first.empty?
constant = Object
names.each do |name|
constant = constant.const_defined?(name) ? constant.const_get(name) : constant.const_missing(name)
end
constant
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/manager_spec.rb
|
require 'spec_helper'
module FeatureFlagger
RSpec.describe Manager do
describe 'detached_feature_keys' do
let(:redis) { FakeRedis::Redis.new }
let(:storage) { Storage::Redis.new(redis) }
before do
FeatureFlagger.configure do |config|
config.storage = storage
end
filepath = File.expand_path('../../fixtures/rollout_example.yml', __FILE__)
FeatureFlagger.config.yaml_filepath = filepath
# All good here
FeatureFlagger.control.release_to_all('feature_flagger_dummy_class:email_marketing:behavior_score')
FeatureFlagger.control.release('other_feature_flagger_dummy_class:feature_a:feature_a_1:feature_a_1_1', 0)
FeatureFlagger.control.release('other_feature_flagger_dummy_class:feature_a:feature_a_1:feature_a_1_2', 0)
FeatureFlagger.control.release('other_feature_flagger_dummy_class:feature_b', 0)
# Detached keys
FeatureFlagger.control.release('other_feature_flagger_dummy_class:feature_a:feature_a_1:feature_a_1_3', 0)
FeatureFlagger.control.release('other_feature_flagger_dummy_class:feature_d', 0)
end
it 'returns all detached feature keys' do
expect(described_class.detached_feature_keys).to match_array([
'other_feature_flagger_dummy_class:feature_a:feature_a_1:feature_a_1_3',
'other_feature_flagger_dummy_class:feature_d'
])
end
end
describe 'cleanup_detached' do
context "detached feature key" do
let(:redis) { FakeRedis::Redis.new }
let(:storage) { Storage::Redis.new(redis) }
let(:feature_key) { 'other_feature_flagger_dummy_class:feature_d' }
before do
FeatureFlagger.configure do |config|
config.storage = storage
end
FeatureFlagger.control.release(feature_key, 0)
filepath = File.expand_path('../../fixtures/rollout_example.yml', __FILE__)
FeatureFlagger.config.yaml_filepath = filepath
end
it 'cleanup key' do
described_class.cleanup_detached(
:other_feature_flagger_dummy_class, :feature_d
)
expect(described_class.detached_feature_keys).not_to include feature_key
end
end
context "mapped feature key" do
before do
filepath = File.expand_path('../../fixtures/rollout_example.yml', __FILE__)
FeatureFlagger.config.yaml_filepath = filepath
end
it 'do not cleanup key' do
expect {
described_class.cleanup_detached(
:feature_flagger_dummy_class, :email_marketing, :behavior_score
)
}.to raise_error("key is still mapped")
end
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/model.rb
|
module FeatureFlagger
# Model provides convinient methods for Rails Models
# class Account
# include FeatureFlagger::Model
# end
#
# Example:
# Account.first.rollout?([:email_marketing, :new_awesome_feature])
# #=> true
module Model
def self.included(base)
base.extend ClassMethods
end
def released?(*feature_key, **options)
self.class.released_id?(feature_flagger_identifier, *feature_key, **options)
end
def release(*feature_key)
self.class.release_id(feature_flagger_identifier, *feature_key)
end
def unrelease(*feature_key)
resource_name = self.class.feature_flagger_model_settings.entity_name
feature = Feature.new(feature_key, resource_name)
FeatureFlagger.control.unrelease(feature.key, id)
end
def releases(options = {})
resource_name = self.class.feature_flagger_model_settings.entity_name
FeatureFlagger.control.releases(resource_name, id, options)
end
private
def feature_flagger_identifier
public_send(self.class.feature_flagger_model_settings.identifier_field)
end
module ClassMethods
def feature_flagger
raise ArgumentError unless block_given?
yield feature_flagger_model_settings
end
def released_id?(resource_id, *feature_key, **options)
feature = Feature.new(feature_key, feature_flagger_model_settings.entity_name)
FeatureFlagger.control.released?(feature.key, resource_id, options)
end
def release_id(resource_id, *feature_key)
feature = Feature.new(feature_key, feature_flagger_model_settings.entity_name)
FeatureFlagger.control.release(feature.key, resource_id)
end
def unrelease_id(resource_id, *feature_key)
feature = Feature.new(feature_key, feature_flagger_model_settings.entity_name)
FeatureFlagger.control.unrelease(feature.key, resource_id)
end
def all_released_ids_for(*feature_key, **options)
feature_key.flatten!
feature = Feature.new(feature_key, feature_flagger_model_settings.entity_name)
FeatureFlagger.control.resource_ids(feature.key, options)
end
def release_to_all(*feature_key)
feature = Feature.new(feature_key, feature_flagger_model_settings.entity_name)
FeatureFlagger.control.release_to_all(feature.key)
end
def unrelease_to_all(*feature_key)
feature = Feature.new(feature_key, feature_flagger_model_settings.entity_name)
FeatureFlagger.control.unrelease_to_all(feature.key)
end
def released_features_to_all(options = {})
FeatureFlagger.control.released_features_to_all(options)
end
def released_to_all?(*feature_key, **options)
feature = Feature.new(feature_key, feature_flagger_model_settings.entity_name)
FeatureFlagger.control.released_to_all?(feature.key, options)
end
def detached_feature_keys
rollout_resource_name = feature_flagger_model_settings.entity_name
persisted_features = FeatureFlagger.control.search_keys("#{rollout_resource_name}:*").to_a
mapped_feature_keys = FeatureFlagger.config.mapped_feature_keys(rollout_resource_name)
(persisted_features - mapped_feature_keys).map { |key| key.sub("#{rollout_resource_name}:",'') }
end
def cleanup_detached(*feature_key)
complete_feature_key = feature_key.map(&:to_s).insert(0, feature_flagger_model_settings.entity_name)
key_value = FeatureFlagger.config.info.dig(*complete_feature_key)
raise "key is still mapped" if key_value
FeatureFlagger.control.unrelease_to_all(complete_feature_key.join(':'))
end
def rollout_resource_name
klass_name = self.to_s
klass_name.gsub!(/::/, '_')
klass_name.gsub!(/([A-Z\d]+)([A-Z][a-z])/,'\1_\2')
klass_name.gsub!(/([a-z\d])([A-Z])/,'\1_\2')
klass_name.tr!("-", "_")
klass_name.downcase!
klass_name
end
def feature_flagger_model_settings
@feature_flagger_model_settings ||= FeatureFlagger::ModelSettings.new(
identifier_field: :id,
entity_name: rollout_resource_name
)
end
def feature_flagger_identifier
public_send(feature_flagger_model_settings.identifier_field)
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/configuration_spec.rb
|
require 'spec_helper'
module FeatureFlagger
RSpec.describe Configuration do
describe '.storage' do
let(:configuration) { described_class.new }
context 'no storage set' do
it 'returns a Redis storage by default' do
expect(configuration.storage).to be_a(FeatureFlagger::Storage::Redis)
end
end
context 'storage set' do
let(:storage) { double('storage') }
before { configuration.storage = storage }
it 'returns storage' do
expect(configuration.storage).to eq storage
end
end
end
describe '.cache_store' do
let(:configuration) { described_class.new }
context 'no cache_store set' do
it 'returns nil by default' do
expect(configuration.cache_store).to be_nil
end
end
context 'cache_store set to :null_store when explicit set to nil' do
it 'returns an ActiveSupport::Cache::NullStore instance' do
configuration.cache_store = nil
expect(configuration.cache_store).to be_an(ActiveSupport::Cache::NullStore)
end
end
context 'cache_store set to :memory_store' do
it 'returns an ActiveSupport::Cache::MemoryStore instance' do
configuration.cache_store = :memory_store
expect(configuration.cache_store).to be_an(ActiveSupport::Cache::MemoryStore)
end
it 'allows custom params' do
configuration.cache_store = :memory_store, { expires_in: 100 }
expect(configuration.cache_store.options[:expires_in]).to eq(100)
end
end
end
describe 'mapped_feature_keys' do
let(:configuration) { described_class.new }
before do
filepath = File.expand_path('../../fixtures/rollout_example.yml', __FILE__)
configuration.yaml_filepath = filepath
end
context 'without resource name' do
it 'returns all mapped features keys' do
expect(configuration.mapped_feature_keys).to contain_exactly(
'feature_flagger_dummy_class:email_marketing:behavior_score',
'feature_flagger_dummy_class:email_marketing:whitelabel',
'other_feature_flagger_dummy_class:feature_a:feature_a_1:feature_a_1_1',
'other_feature_flagger_dummy_class:feature_a:feature_a_1:feature_a_1_2',
'other_feature_flagger_dummy_class:feature_b',
'other_feature_flagger_dummy_class:feature_c:feature_c_1',
'other_feature_flagger_dummy_class:feature_c:feature_c_2',
'other_feature_flagger_dummy_class:feature_c:feature_c_3:feature_c_3_1:feature_c_3_1_1',
'account:email_marketing:behavior_score'
)
end
end
context 'with resource name' do
it 'returns mapped features keys for feature_flagger_dummy_class resource' do
expect(configuration.mapped_feature_keys('feature_flagger_dummy_class')).to contain_exactly(
'feature_flagger_dummy_class:email_marketing:behavior_score',
'feature_flagger_dummy_class:email_marketing:whitelabel'
)
end
end
end
describe '.notifier_callback' do
let(:configuration) { described_class.new }
context 'no notifier_callback set' do
it 'returns nil if no callback is set' do
expect(configuration.notifier_callback).to be_nil
end
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/control.rb
|
module FeatureFlagger
class Control
attr_reader :storage
RELEASED_FEATURES = 'released_features'
def initialize(storage, notifier, cache_store = nil)
@storage = storage
@notifier = notifier
@cache_store = cache_store
end
def released?(feature_key, resource_id, options = {})
cache "released/#{feature_key}/#{resource_id}", options do
@storage.has_value?(RELEASED_FEATURES, feature_key) || @storage.has_value?(feature_key, resource_id)
end
end
def release(feature_key, resource_id)
resource_name = Storage::Keys.extract_resource_name_from_feature_key(
feature_key
)
@notifier.send(FeatureFlagger::Notifier::RELEASE, feature_key, resource_id)
@storage.add(feature_key, resource_name, resource_id)
end
def releases(resource_name, resource_id, options = {})
cache "releases/#{resource_name}/#{resource_id}", options do
@storage.fetch_releases(resource_name, resource_id, RELEASED_FEATURES)
end
end
def release_to_all(feature_key)
@notifier.send(FeatureFlagger::Notifier::RELEASE_TO_ALL, feature_key)
@storage.add_all(RELEASED_FEATURES, feature_key)
end
def unrelease(feature_key, resource_id)
resource_name = Storage::Keys.extract_resource_name_from_feature_key(
feature_key
)
@notifier.send(FeatureFlagger::Notifier::UNRELEASE, feature_key, resource_id)
@storage.remove(feature_key, resource_name, resource_id)
end
def unrelease_to_all(feature_key)
@notifier.send(FeatureFlagger::Notifier::UNRELEASE_TO_ALL, feature_key)
@storage.remove_all(RELEASED_FEATURES, feature_key)
end
def resource_ids(feature_key, options = {})
cache "all_values/#{feature_key}", options do
@storage.all_values(feature_key)
end
end
def released_features_to_all(options = {})
cache "released_features_to_all/#{RELEASED_FEATURES}", options do
@storage.all_values(RELEASED_FEATURES)
end
end
def released_to_all?(feature_key, options = {})
cache "has_value/#{RELEASED_FEATURES}/#{feature_key}", options do
@storage.has_value?(RELEASED_FEATURES, feature_key)
end
end
# DEPRECATED: this method will be removed from public api on v2.0 version.
# use instead the feature_keys method.
def search_keys(query)
@storage.search_keys(query)
end
def feature_keys
@storage.feature_keys - [FeatureFlagger::Control::RELEASED_FEATURES]
end
def cache(name, options, &block)
if @cache_store
@cache_store.fetch(name, force: options[:skip_cache]) do
block.call
end
else
block.call
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/notifier_spec.rb
|
<filename>spec/feature_flagger/notifier_spec.rb
require 'spec_helper'
module FeatureFlagger
RSpec.describe Notifier do
let(:feature_key) { 'account:email_marketing:whitelabel' }
let(:legacy_feature_key) { 'account' }
let(:resource_id) { 'resource_id' }
let(:resource_name) { 'account' }
let(:feature_action) { FeatureFlagger::Notifier::RELEASE }
describe '#send' do
context 'With a callback configured' do
let(:notifier_callback) { spy(lambda { |event| }, :is_a? => Proc)}
let(:notifier) { Notifier.new(notifier_callback)}
let(:generic_event) {
{
type: feature_action,
model: resource_name,
feature: feature_key,
id: resource_id
}
}
context 'When trigger the expected event' do
let(:event) { generic_event.merge({ type: feature_action})}
before { notifier.send(feature_action, feature_key, resource_id) }
it { expect(notifier_callback).to have_received(:call).with(generic_event) }
context 'When release a legacy key' do
let(:event) { generic_event.merge({ model: "legacy key", feature: legacy_feature_key})}
before { notifier.send(feature_action, legacy_feature_key, resource_id) }
it { expect(notifier_callback).to have_received(:call).with(event) }
end
end
end
context 'When not have a callback configured' do
let(:notifier) { Notifier.new(nil)}
let(:event) {
{
type: feature_action,
model: resource_name,
feature: feature_key,
id: resource_id
}
}
it 'Must not raise error' do
expect { notifier.send(feature_action, feature_key, resource_id) }.not_to raise_error
end
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger.rb
|
require 'yaml'
require 'feature_flagger/version'
require 'feature_flagger/storage/redis'
require 'feature_flagger/storage/feature_keys_migration'
require 'feature_flagger/control'
require 'feature_flagger/model'
require 'feature_flagger/model_settings'
require 'feature_flagger/feature'
require 'feature_flagger/configuration'
require 'feature_flagger/manager'
require 'feature_flagger/railtie'
require 'feature_flagger/notifier'
module FeatureFlagger
class << self
def configure
@@configuration = nil
@@control = nil
@@notifier = nil
yield config if block_given?
end
def config
@@configuration ||= Configuration.new
end
def notifier
@@notifier ||= Notifier.new(config.notifier_callback)
end
def control
@@control ||= Control.new(config.storage, notifier, config.cache_store)
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/feature.rb
|
<filename>lib/feature_flagger/feature.rb
module FeatureFlagger
class Feature
def initialize(feature_key, resource_name = nil)
@feature_key = resolve_key(feature_key, resource_name)
@doc = FeatureFlagger.config.info
fetch_data
end
def description
@data['description']
end
def key
@feature_key.join(':')
end
private
def resolve_key(feature_key, resource_name)
key = Array(feature_key).flatten
key.insert(0, resource_name) if resource_name
key.map(&:to_s)
end
def fetch_data
@data ||= find_value(@doc, *@feature_key)
raise FeatureFlagger::KeyNotFoundError.new(@feature_key) if @data.nil?
@data
end
def find_value(hash, key, *tail)
value = hash[key]
if value.nil? || tail.empty?
value
else
find_value(value, *tail)
end
end
end
end
class FeatureFlagger::KeyNotFoundError < StandardError ; end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger_spec.rb
|
<reponame>ResultadosDigitais/feature_flagger
require 'spec_helper'
RSpec.describe FeatureFlagger do
describe '.configure' do
let(:storage) { double('storage') }
let(:other_storage) { double('other_storage') }
let(:notifier_callback) { lambda {|event| } }
before do
FeatureFlagger.configure do |config|
config.storage = storage
end
end
it { expect(FeatureFlagger.config.storage).to eq storage }
it 'Calling configure with a new storage must change control.storage' do
FeatureFlagger.configure do |config|
config.storage = other_storage
end
expect(FeatureFlagger.config.storage).to eq other_storage
expect(FeatureFlagger.control.storage).to eq other_storage
end
it 'Calling configure with a valid notifier callback' do
FeatureFlagger.configure do |config|
config.notifier_callback = notifier_callback
end
expect(FeatureFlagger.notifier.notify).to eq notifier_callback
end
end
describe '.control' do
let(:control) { FeatureFlagger.control }
before do
FeatureFlagger.configure do |config|
end
end
it 'initializes a Control with redis storage' do
expect(control).to be_a(FeatureFlagger::Control)
expect(control.storage).to be_a(FeatureFlagger::Storage::Redis)
end
it 'receives a notifier instance with a null notifier callback' do
expect(control.instance_variable_get(:@notifier)).to be_a(FeatureFlagger::Notifier)
expect(control.instance_variable_get(:@notifier).notify).to be_a(Proc)
end
end
end
|
ResultadosDigitais/feature_flagger
|
spec/spec_helper.rb
|
<filename>spec/spec_helper.rb<gh_stars>10-100
# frozen_string_literal: true
require 'fakeredis/rspec'
if ENV['COVERAGE'] == "true"
require 'simplecov'
SimpleCov.start do
load_profile "test_frameworks"
add_filter "/vendor/"
end
end
$LOAD_PATH.unshift File.expand_path('../lib', __dir__)
require 'feature_flagger'
require 'active_support'
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/model_settings.rb
|
module FeatureFlagger
class ModelSettings
def initialize(arguments)
arguments.each do |field, value|
self.public_send("#{field}=", value)
end
end
# Public: identifier_field Refers to which field must represent the unique model
# id.
attr_accessor :identifier_field
# Public: entity_name to which entity the model is targeting.
# Take this yaml file as example:
#
# account:
# email_marketing:
# whitelabel:
# description: a rollout
# owner: core
# account_in_migration:
# email_marketing:
# whitelabel:
# description: a rollout
# owner: core
#
# class Account < ActiveRecord::Base
# include FeatureFlagger::Model
#
# feature_flagger do |config|
# config.identifier_field = :cdp_tenant_id
# config.entity_name = :account_in_migration
# end
attr_accessor :entity_name
end
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/storage/feature_keys_migration_spec.rb
|
# frozen_string_literal: true
require 'spec_helper'
require 'feature_flagger/storage/feature_keys_migration'
RSpec.describe FeatureFlagger::Storage::FeatureKeysMigration do
subject(:migrator) { described_class.new(redis, control) }
let(:redis) { FakeRedis::Redis.new }
let(:notifier) { FeatureFlagger::Notifier.new }
let(:control) { FeatureFlagger::Control.new(FeatureFlagger::Storage::Redis.new(redis), notifier) }
let(:global_key) { FeatureFlagger::Control::RELEASED_FEATURES }
before do
filepath = File.expand_path('../fixtures/rollout_example.yml', __dir__)
FeatureFlagger.config.yaml_filepath = filepath
end
describe '.call' do
context 'when there are keys in the old format' do
before do
redis.sadd('feature_flagger_dummy_class:email_marketing:behavior_score', 42)
redis.sadd('feature_flagger_dummy_class:email_marketing:whitelabel', 42)
redis.sadd('feature_flagger_dummy_class:email_marketing:whitelabel', 1)
redis.sadd(global_key, 'other_feature_flagger_dummy_class:feature_c:feature_c_1')
redis.sadd(global_key, 'other_feature_flagger_dummy_class:feature_c:feature_c_2')
redis.sadd(global_key, 'account')
migrator.call
end
it 'migrates feature keys to the new format' do
expect(control.released?('feature_flagger_dummy_class:email_marketing:behavior_score', 42)).to be_truthy
expect(control.released?('feature_flagger_dummy_class:email_marketing:whitelabel', 42)).to be_truthy
expect(control.released?('feature_flagger_dummy_class:email_marketing:whitelabel', 1)).to be_truthy
end
it 'migrates all released feature keys to the new format' do
expect(control.released_to_all?('other_feature_flagger_dummy_class:feature_c:feature_c_2')).to be_truthy
expect(control.released_to_all?('other_feature_flagger_dummy_class:feature_c:feature_c_1')).to be_truthy
end
end
context 'when there are keys in both formats' do
before do
redis.sadd('feature_flagger_dummy_class:email_marketing:behavior_score', 42)
redis.sadd('feature_flagger_dummy_class:email_marketing:whitelabel', 42)
redis.sadd('feature_flagger_dummy_class:email_marketing:whitelabel', 1)
redis.sadd(global_key, 'feature_flagger_dummy_class:email_marketing:global_whitelabel')
control.release('other_feature_flagger_dummy_class:feature_b', 42)
control.release_to_all('other_feature_flagger_dummy_class:feature_c:feature_c_1')
migrator.call
end
it 'migrates feature keys to the new format' do
expect(control.released?('feature_flagger_dummy_class:email_marketing:behavior_score', 42)).to be_truthy
expect(control.released?('feature_flagger_dummy_class:email_marketing:whitelabel', 42)).to be_truthy
expect(control.released?('feature_flagger_dummy_class:email_marketing:whitelabel', 1)).to be_truthy
expect(control.released?('other_feature_flagger_dummy_class:feature_b', 42)).to be_truthy
expect(control.releases('feature_flagger_dummy_class', 1)).to match_array(
[
'feature_flagger_dummy_class:email_marketing:whitelabel',
'feature_flagger_dummy_class:email_marketing:global_whitelabel'
]
)
end
it 'does not migrate internal keys' do
expect(redis.keys.count).to eq(7)
end
it 'migrates all released feature keys to the new format ' do
expect(control.released_to_all?('feature_flagger_dummy_class:email_marketing:global_whitelabel')).to be_truthy
expect(control.released_to_all?('other_feature_flagger_dummy_class:feature_c:feature_c_1')).to be_truthy
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/storage/redis_keys_spec.rb
|
require 'spec_helper'
RSpec.describe FeatureFlagger::Storage::Keys do
describe '.resource_key' do
it 'generates the resource_key' do
prefix = "my_prefix"
resource_name = "account"
resource_id = "1"
result = FeatureFlagger::Storage::Keys.resource_key(
prefix,
resource_name,
resource_id,
)
expect(result).to eq "my_prefix:account:1"
end
end
describe '.extract_resource_name_from_feature_key' do
context 'when feature_key is valid' do
it 'returns resource_name' do
feature_key = 'account:email_marketing:whitelabel'
result = FeatureFlagger::Storage::Keys.extract_resource_name_from_feature_key(
feature_key
)
expect(result).to eq 'account'
end
end
context 'when feature_key is not valid' do
it 'returns resource_name' do
feature_key = 'account'
expect {
FeatureFlagger::Storage::Keys.extract_resource_name_from_feature_key(
feature_key
)
}.to raise_error(FeatureFlagger::Storage::Keys::InvalidResourceNameError)
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/storage/keys.rb
|
<reponame>ResultadosDigitais/feature_flagger
module FeatureFlagger
module Storage
module Keys
MINIMUM_VALID_FEATURE_PATH = 2.freeze
def self.resource_key(prefix, resource_name, resource_id)
"#{prefix}:#{resource_name}:#{resource_id}"
end
def self.extract_resource_name_from_feature_key(feature_key)
feature_paths = feature_key.split(':')
raise InvalidResourceNameError if feature_paths.size < MINIMUM_VALID_FEATURE_PATH
feature_paths.first
end
class InvalidResourceNameError < StandardError; end
end
end
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/feature_spec.rb
|
<gh_stars>10-100
require 'spec_helper'
module FeatureFlagger
RSpec.describe Feature do
subject { Feature.new(key, :feature_flagger_dummy_class) }
before do
filepath = File.expand_path('../../fixtures/rollout_example.yml', __FILE__)
FeatureFlagger.config.yaml_filepath = filepath
end
describe '#initialize' do
context 'when feature is documented' do
let(:key) { [:email_marketing, :behavior_score] }
it { expect(subject).to be_a Feature }
end
context 'when feature is not documented' do
let(:key) { [:email_marketing, :new_email_flow] }
it { expect { subject }.to raise_error(FeatureFlagger::KeyNotFoundError) }
end
context 'with key argument as an array of arrays' do
let(:key) { [[:email_marketing, :behavior_score]] }
let(:resolved_key) { 'feature_flagger_dummy_class:email_marketing:behavior_score' }
it 'flattens the array and acts as an unidimensional array' do
expect(subject.key).to eq resolved_key
end
end
end
describe '#description' do
let(:key) { [:email_marketing, :behavior_score] }
it { expect(subject.description).to eq 'Enable behavior score experiment' }
end
describe '#key' do
let(:key) { [:email_marketing, :behavior_score] }
let(:resolved_key) { 'feature_flagger_dummy_class:email_marketing:behavior_score' }
it 'returns the given key resolved and joined with resource_name' do
expect(subject.key).to eq resolved_key
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/configuration.rb
|
<reponame>ResultadosDigitais/feature_flagger<gh_stars>10-100
module FeatureFlagger
class Configuration
attr_accessor :storage, :cache_store, :yaml_filepath, :notifier_callback
def initialize
@storage ||= Storage::Redis.default_client
@yaml_filepath ||= default_yaml_filepath
@notifier_callback = nil
@cache_store = nil
end
def cache_store=(cache_store)
raise ArgumentError, "Cache is only support when used with ActiveSupport" unless defined?(ActiveSupport)
cache_store = :null_store if cache_store.nil?
@cache_store = ActiveSupport::Cache.lookup_store(*cache_store)
end
def info
@info ||= YAML.load_file(yaml_filepath) if yaml_filepath
end
def mapped_feature_keys(resource_name = nil)
info_filtered = resource_name ? info[resource_name] : info
[].tap do |keys|
make_keys_recursively(info_filtered).each { |key| keys.push(join_key(resource_name, key)) }
end
end
private
def default_yaml_filepath
"#{Rails.root}/config/rollout.yml" if defined?(Rails)
end
def make_keys_recursively(hash, keys = [], composed_key = [])
unless hash.values[0].is_a?(Hash)
keys.push(composed_key)
return
end
hash.each do |key, value|
composed_key_cloned = composed_key.clone
composed_key_cloned.push(key.to_sym)
make_keys_recursively(value, keys, composed_key_cloned)
end
keys
end
def join_key(resource_name, key)
key.unshift resource_name if resource_name
key.join(":")
end
end
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/storage/redis_spec.rb
|
<filename>spec/feature_flagger/storage/redis_spec.rb
require 'spec_helper'
RSpec.describe FeatureFlagger::Storage::Redis do
let(:redis) { FakeRedis::Redis.new }
let(:storage) { described_class.new(redis) }
let(:feature_key) { 'account:email_marketing:whitelabel' }
let(:resource_id) { '1' }
let(:resource_name) { 'account' }
let(:global_key) { 'released_features' }
let(:resource_key) do
FeatureFlagger::Storage::Keys.resource_key(
FeatureFlagger::Storage::Redis::RESOURCE_PREFIX,
resource_name,
resource_id,
)
end
context do
before do
redis.flushdb
end
describe '#has_values?' do
context 'when resource_id is stored for given feature_key' do
before { storage.add(feature_key, resource_name, resource_id) }
it { expect(storage).to have_value(feature_key, resource_id) }
end
context 'when resource_id is not stored for given feature_key' do
it { expect(storage).not_to have_value(feature_key, resource_id) }
end
end
describe '#fetch_releases' do
context 'when there is no features under global structure' do
before do
storage.add(feature_key, resource_name, resource_id)
end
it 'returns related feature_keys' do
expect(storage.fetch_releases(resource_name, resource_id, global_key)).to match_array([feature_key])
end
end
context 'when there is features under global structure' do
before do
storage.add_all(global_key, feature_key)
end
it 'returns related feature_keys' do
expect(storage.fetch_releases(resource_name, resource_id, global_key)).to match_array(feature_key)
end
end
end
describe '#add' do
it 'adds the resource_id to redis' do
storage.add(feature_key, resource_name, resource_id)
expect(storage).to have_value(feature_key, resource_id)
expect(storage).to have_value(resource_key, feature_key)
end
end
describe '#add_all' do
context 'when only add_all is called' do
it 'turns feature a global' do
storage.add_all(global_key, feature_key)
expect(storage).to have_value(global_key, feature_key)
end
end
context 'when add_all is called right after add' do
it 'turns feature a global, cleaning both local resource and feature sets' do
storage.add(feature_key, resource_name, resource_id)
storage.add_all(global_key, feature_key)
expect(storage).not_to have_value(feature_key, resource_id)
expect(storage).not_to have_value(resource_key, feature_key)
end
end
end
describe '#remove' do
it 'removes the resource_id from redis' do
storage.add(feature_key, resource_name, resource_id)
storage.remove(feature_key, resource_name, resource_id)
expect(storage).not_to have_value(feature_key, resource_id)
expect(storage).not_to have_value(resource_key, feature_key)
end
end
describe '#remove_all' do
it 'removes all resource_ids from redis' do
storage.add(feature_key, resource_name, resource_id)
storage.remove_all(global_key, feature_key)
expect(storage).not_to have_value(feature_key, resource_id)
expect(storage).not_to have_value(resource_key, feature_key)
end
end
describe '#all_resource_ids' do
let(:resource_ids) { %w(value1 value2) }
it 'returns all resource_ids for the given feature_key' do
storage.add(feature_key, resource_name, resource_ids)
expect(storage.all_values(feature_key).sort).to match_array(resource_ids)
end
end
describe '#feature_keys' do
it 'returns only feature_keys' do
storage.add(feature_key, resource_name, resource_id)
storage.add('user:profile:round_avatar', 'user', resource_id)
storage.add('account:lp:new_layout', 'account', resource_id)
storage.add('account:lp:new_layout', 'account', resource_id)
expect(storage.feature_keys).to match_array([
'account:email_marketing:whitelabel',
'user:profile:round_avatar',
'account:lp:new_layout',
])
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
lib/feature_flagger/storage/redis.rb
|
<filename>lib/feature_flagger/storage/redis.rb
require 'redis'
require 'redis-namespace'
require_relative './keys'
module FeatureFlagger
module Storage
class Redis
DEFAULT_NAMESPACE = :feature_flagger
RESOURCE_PREFIX = "_r".freeze
SCAN_EACH_BATCH_SIZE = 1000.freeze
def initialize(redis)
@redis = redis
end
def self.default_client
redis = ::Redis.new(url: ENV['REDIS_URL'])
ns = ::Redis::Namespace.new(DEFAULT_NAMESPACE, :redis => redis)
new(ns)
end
def fetch_releases(resource_name, resource_id, global_key)
resource_key = resource_key(resource_name, resource_id)
releases = @redis.sunion(resource_key, global_key)
releases.select{ |release| release.start_with?(resource_name) }
end
def has_value?(key, value)
@redis.sismember(key, value)
end
def add(feature_key, resource_name, resource_id)
resource_key = resource_key(resource_name, resource_id)
@redis.multi do |redis|
redis.sadd(feature_key, resource_id)
redis.sadd(resource_key, feature_key)
end
end
def remove(feature_key, resource_name, resource_id)
resource_key = resource_key(resource_name, resource_id)
@redis.multi do |redis|
redis.srem(feature_key, resource_id)
redis.srem(resource_key, feature_key)
end
end
def remove_all(global_key, feature_key)
@redis.srem(global_key, feature_key)
remove_feature_key_from_resources(feature_key)
end
def add_all(global_key, key)
@redis.sadd(global_key, key)
remove_feature_key_from_resources(key)
end
def all_values(key)
@redis.smembers(key)
end
# DEPRECATED: this method will be removed from public api on v2.0 version.
# use instead the feature_keys method.
def search_keys(query)
@redis.scan_each(match: query)
end
def feature_keys
feature_keys = []
@redis.scan_each(match: "*") do |key|
# Reject keys related to feature responsible for return
# released features for a given account.
next if key.start_with?("#{RESOURCE_PREFIX}:")
feature_keys << key
end
feature_keys
end
def synchronize_feature_and_resource
FeatureFlagger::Storage::FeatureKeysMigration.new(
@redis,
FeatureFlagger.control,
).call
end
private
def resource_key(resource_name, resource_id)
FeatureFlagger::Storage::Keys.resource_key(
RESOURCE_PREFIX,
resource_name,
resource_id,
)
end
def remove_feature_key_from_resources(feature_key)
cursor = 0
resource_name = feature_key.split(":").first
loop do
cursor, resource_ids = @redis.sscan(feature_key, cursor, count: SCAN_EACH_BATCH_SIZE)
@redis.multi do |redis|
resource_ids.each do |resource_id|
key = resource_key(resource_name, resource_id)
redis.srem(key, feature_key)
redis.srem(feature_key, resource_id)
end
end
break if cursor == "0"
end
end
end
end
end
|
ResultadosDigitais/feature_flagger
|
feature_flagger.gemspec
|
<reponame>ResultadosDigitais/feature_flagger
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'feature_flagger/version'
Gem::Specification.new do |spec|
spec.name = "feature_flagger"
spec.version = FeatureFlagger::VERSION
spec.authors = ["<NAME>", "<NAME>"]
spec.email = ["<EMAIL>", "<EMAIL>"]
spec.licenses = ['MIT']
spec.summary = %q{Partial release your features.}
spec.description = %q{Management tool to make it easier rollouting features to customers.}
spec.homepage = "http://github.com/ResultadosDigitais/feature_flagger"
spec.required_ruby_version = '>= 2.5'
spec.required_rubygems_version = '>= 2.0.0'
spec.files = Dir['README.md', 'MIT-LICENSE', 'lib/**/*']
spec.require_paths = ["lib"]
spec.add_dependency 'redis', '> 3.2'
spec.add_dependency 'redis-namespace', '> 1.3'
spec.add_development_dependency 'activesupport', '> 6.0'
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake', '~> 13.0'
spec.add_development_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'simplecov', '0.21.2'
spec.add_development_dependency 'fakeredis', '0.8.0'
end
|
ResultadosDigitais/feature_flagger
|
spec/feature_flagger/model_spec.rb
|
require 'spec_helper'
module FeatureFlagger
class DummyClass
include FeatureFlagger::Model
def id; 14 end
end
RSpec.describe Model do
subject { DummyClass.new }
let(:key) { [:email_marketing, :whitelabel] }
let(:resolved_key) { 'feature_flagger_dummy_class:email_marketing:whitelabel' }
let(:control) { FeatureFlagger.control }
before do
filepath = File.expand_path('../../fixtures/rollout_example.yml', __FILE__)
FeatureFlagger.config.yaml_filepath = filepath
end
describe '#release' do
it 'calls Control#release with appropriated methods' do
expect(control).to receive(:release).with(resolved_key, subject.id)
subject.release(key)
end
end
describe '#releases' do
it 'calls Control#release with appropriated methods' do
expect(control).to receive(:releases).with("feature_flagger_dummy_class", subject.id, {})
subject.releases
end
end
describe '#unrelease' do
it 'calls Control#unrelease with appropriated methods' do
expect(control).to receive(:unrelease).with(resolved_key, subject.id)
subject.unrelease(key)
end
end
describe '.released_id?' do
context 'given a specific resource id' do
let(:resource_id) { 10 }
it 'calls Control#released? with appropriated methods' do
expect(control).to receive(:released?).with(resolved_key, resource_id, {})
DummyClass.released_id?(resource_id, key)
end
it 'passes down cache options to storage' do
expect(control).to receive(:released?).with(resolved_key, resource_id, { skip_cache: true })
DummyClass.released_id?(resource_id, key, skip_cache: true)
end
end
end
describe '.release_id' do
context 'given a specific resource id' do
let(:resource_id) { 10 }
it 'calls Control#release with appropriated methods' do
expect(control).to receive(:release).with(resolved_key, resource_id)
DummyClass.release_id(resource_id, key)
end
end
end
describe '.unrelease_id' do
context 'given a specific resource id' do
let(:resource_id) { 20 }
it 'calls Control#release with appropriated methods' do
expect(control).to receive(:unrelease).with(resolved_key, resource_id)
DummyClass.unrelease_id(resource_id, key)
end
end
end
describe '.all_released_ids_for' do
it 'calls Control#resource_ids with appropriated methods' do
expect(control).to receive(:resource_ids).with(resolved_key, {})
DummyClass.all_released_ids_for(key)
end
it 'passes down cache options to storage' do
expect(control).to receive(:resource_ids).with(resolved_key, { skip_cache: true})
DummyClass.all_released_ids_for(key, skip_cache: true)
end
end
describe '.release_to_all' do
it 'calls Control#release_to_all with appropriated methods' do
expect(control).to receive(:release_to_all).with(resolved_key)
DummyClass.release_to_all(key)
end
end
describe '.unrelease_to_all' do
it 'calls Control#unrelease_to_all with appropriated methods' do
expect(control).to receive(:unrelease_to_all).with(resolved_key)
DummyClass.unrelease_to_all(key)
end
end
describe '.released_features_to_all' do
it 'calls Control#released_features_to_all with appropriated methods' do
expect(control).to receive(:released_features_to_all)
DummyClass.released_features_to_all
end
end
describe '.released_to_all?' do
it 'calls Control#released_to_all? with appropriated methods' do
expect(control).to receive(:released_to_all?).with(resolved_key, {})
DummyClass.released_to_all?(key)
end
end
describe '.detached_feature_keys' do
let(:redis) { FakeRedis::Redis.new }
let(:storage) { Storage::Redis.new(redis) }
before do
FeatureFlagger.configure do |config|
config.storage = storage
end
FeatureFlagger.control.release('feature_flagger_dummy_class:feature_a', 0)
FeatureFlagger.control.release('feature_flagger_dummy_class:feature_b', 0)
filepath = File.expand_path('../../fixtures/rollout_example.yml', __FILE__)
FeatureFlagger.config.yaml_filepath = filepath
end
it 'returns all detached feature keys' do
expect(DummyClass.detached_feature_keys).to contain_exactly("feature_a","feature_b")
end
end
describe '.cleanup_detached' do
context "detached feature key" do
let(:redis) { FakeRedis::Redis.new }
let(:storage) { Storage::Redis.new(redis) }
let(:feature_key) { 'feature_flagger_dummy_class:feature_a' }
before do
FeatureFlagger.configure do |config|
config.storage = storage
end
FeatureFlagger.control.release(feature_key, 0)
filepath = File.expand_path('../../fixtures/rollout_example.yml', __FILE__)
FeatureFlagger.config.yaml_filepath = filepath
end
it 'cleanup key' do
DummyClass.cleanup_detached(:feature_a)
expect(DummyClass.detached_feature_keys).not_to include "feature_a"
end
end
context "mapped feature key" do
it 'do not cleanup key' do
expect {
DummyClass.cleanup_detached(:email_marketing, :behavior_score)
}.to raise_error("key is still mapped")
end
end
end
describe '.feature_flagger' do
class CustomizedDummyClass
include FeatureFlagger::Model
feature_flagger do |config|
config.identifier_field = :uuid
config.entity_name = :account
end
def uuid
'f11bc560-8ef9-40cf-909e-ebb1c6f41163'
end
end
it 'expect to be using account entity name and uuid as field' do
CustomizedDummyClass.new.release(:email_marketing, :behavior_score)
expect(CustomizedDummyClass.all_released_ids_for(:email_marketing, :behavior_score)).to include(
'f11bc560-8ef9-40cf-909e-ebb1c6f41163'
)
end
end
end
end
|
supportleap/beacon
|
test/controllers/statuses_controller_test.rb
|
# frozen_string_literal: true
require "test_helper"
class StatusesControllerTest < ActionDispatch::IntegrationTest
setup do
@green_status = create(:status, level: :green)
@yellow_status = create(:status, level: :yellow)
@red_status = create(:status, level: :red)
end
test "#index lists current statuses" do
get "/statuses"
assert_response :success
assert_select ".status-history .status-box", 3
assert_select ".status-history .status-box h3", /#{@red_status.message}/
assert_select ".status-history .status-box h3", /#{@yellow_status.message}/
assert_select ".status-history .status-box h3", /#{@green_status.message}/
end
test "#index shows next button if has next page" do
(StatusesController::PER_PAGE).times { create(:status) }
get "/statuses"
assert_response :success
assert_select ".status-history .paginate-container a", /Next/
end
test "#index shows message if no statuses" do
Status.destroy_all
assert_empty Status.all
get "/statuses"
assert_response :success
assert_select ".status-history p", "Couldn't find any status events."
end
end
|
supportleap/beacon
|
app/controllers/dashboard_controller.rb
|
<gh_stars>0
# frozen_string_literal: true
class DashboardController < ApplicationController
INDEX_STATUS_LIMIT = 5
def index
all_statuses = Status.order("id DESC").limit(INDEX_STATUS_LIMIT + 1)
latest_status = all_statuses.first
statuses = all_statuses.drop(1)
render "dashboard/index", locals: {
latest_status: latest_status,
statuses: statuses,
}
end
end
|
supportleap/beacon
|
lib/graph.rb
|
# frozen_string_literal: true
module Graph
def self.execute(query, context: {}, variables: {})
Schema.execute(query, context: context, variables: variables)
end
end
|
supportleap/beacon
|
app/controllers/application_controller.rb
|
# frozen_string_literal: true
class ApplicationController < ActionController::Base
def current_page(page_param = :page)
if params[page_param].blank? || !params[page_param].respond_to?(:to_i)
1
else
params[page_param].to_i.abs
end
end
end
|
supportleap/beacon
|
lib/graph/mutations/create_status.rb
|
# frozen_string_literal: true
module Graph
module Mutations
class CreateStatus < Graph::Mutations::Base
description "Create a new status event."
argument :level, Enums::StatusLevel, "The level of this status event.", required: true
argument :message, String, "The message for this status event.", required: false
field :status, Objects::Status, "The status event that was created.", null: true
field :errors, [String], null: false
def resolve(**inputs)
result = Statuses::CreateStatus.call(
level: inputs[:level],
message: inputs[:message],
)
if result.success?
{
status: result.status,
errors: [],
}
else
{
status: nil,
errors: result.errors,
}
end
end
end
end
end
|
supportleap/beacon
|
app/models/statuses/create_status.rb
|
# frozen_string_literal: true
module Statuses
class CreateStatus
# inputs - A Hash of attributes to create a status.
# inputs[:level] - A String that is a `level` enum value for a Status.
# inputs[:message] - (optional) A String message to use for the status.
def self.call(inputs)
new(inputs).call
end
def initialize(level:, message: nil)
@level = level
@message = message
end
def call
status_message = if message.blank?
Status.default_message_for(level)
else
message
end
status = Status.new(
level: level,
message: status_message,
)
if status.save
Result.success(status: status)
else
Result.failure(errors: status.errors.full_messages)
end
rescue ArgumentError => error
Result.failure(errors: [error.message])
end
private
attr_reader :level, :message
class Result
attr_reader :status, :success, :errors
alias_method :success?, :success
# status - The Status that is being created.
# success - A Boolean indicating if creating the status was successful.
# errors - An Array of any errors that occured when creating the listing.
def initialize(status:, success:, errors:)
@status = status
@success = success
@errors = errors
end
def self.success(status:)
new(
status: status,
success: true,
errors: [],
)
end
def self.failure(errors:)
new(
status: nil,
success: false,
errors: errors,
)
end
end
end
end
|
supportleap/beacon
|
test/models/statuses/create_status_test.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'test_helper'
class Statuses::CreateStatusTest < ActiveSupport::TestCase
test "creates a status with default message" do
assert_empty Status.all
result = Statuses::CreateStatus.call(level: "green")
assert_predicate result, :success?
assert_empty result.errors
assert_equal Status.default_message_for("green"), result.status.message
assert_equal [result.status], Status.all
end
test "creates a status with custom message" do
assert_empty Status.all
result = Statuses::CreateStatus.call(
level: "green",
message: "Hack the planet",
)
assert_predicate result, :success?
assert_empty result.errors
assert_equal "Hack the planet", result.status.message
assert_equal [result.status], Status.all
end
test "returns error for invalid level value" do
assert_empty Status.all
result = Statuses::CreateStatus.call(
level: "orisa",
message: "Hack the planet",
)
refute_predicate result, :success?
assert_equal ["'orisa' is not a valid level"], result.errors
assert_empty Status.all
end
test "returns error for message over limit" do
assert_empty Status.all
message = "a" * 141
result = Statuses::CreateStatus.call(
level: "green",
message: message,
)
refute_predicate result, :success?
assert_equal ["Message is too long (maximum is 140 characters)"], result.errors
assert_empty Status.all
end
end
|
supportleap/beacon
|
app/controllers/chatops_controller.rb
|
<filename>app/controllers/chatops_controller.rb<gh_stars>0
# frozen_string_literal: true
class ChatopsController < ApplicationController
skip_before_action :verify_authenticity_token
include ::Chatops::Controller
chatops_namespace :beacon
chatops_help <<-EOS
:rotating_light: Beacon โ Leap's status page.
EOS
chatop :create_status,
/set (?<level>red|yellow|green)(?: (?<message>(.*)))?/i,
"set <level> <message> - Set the current status. Message is optional." do
result = Statuses::CreateStatus.call(
level: jsonrpc_params[:level],
message: jsonrpc_params[:message],
)
if result.success?
jsonrpc_success("Updated status: #{result.status.level.upcase} - #{result.status.message}")
else
jsonrpc_failure("Something went wrong: #{result.errors.join(", ")}")
end
end
chatop :current_status,
/sup/,
"sup - See the current status." do
if status = Status.last
jsonrpc_success("Latest status: #{status.level.upcase} - #{status.message}")
else
jsonrpc_success("No status currently set. Set one with `set <level>`.")
end
end
end
|
supportleap/beacon
|
config/routes.rb
|
<gh_stars>0
Rails.application.routes.draw do
root "dashboard#index"
resources :statuses, only: [:index]
post "/_chatops/:chatop", controller: "chatops", action: :execute_chatop
get "/_chatops", to: "chatops#list"
post "/api/graphql", to: "graphql#execute"
end
|
supportleap/beacon
|
app/controllers/statuses_controller.rb
|
# frozen_string_literal: true
class StatusesController < ApplicationController
PER_PAGE = 35
def index
statuses = Status.paginate(
page: current_page,
per_page: PER_PAGE,
).order('id DESC')
render "statuses/index", locals: { statuses: statuses }
end
end
|
datamapper/dm-is-searchable
|
lib/dm-is-searchable.rb
|
<reponame>datamapper/dm-is-searchable
require 'dm-core'
require 'dm-is-searchable/is/searchable'
module DataMapper
module Model
include DataMapper::Is::Searchable
end
end
|
p1atdev/Nafuda
|
Nafuda.podspec
|
Pod::Spec.new do |spec|
spec.name = "Nafuda"
spec.version = "1.1.0"
spec.summary = "This search web site's title"
spec.description = "TODO: write here"
spec.homepage = "https://github.com/p1atdev/Nafuda"
spec.license = { :type => 'MIT', :file => 'LICENSE' }
spec.author = { "p1atdev" => "<EMAIL>" }
spec.source = { :git => "https://github.com/p1atdev/Nafuda.git", :tag => "#{spec.version}" }
spec.platform = :ios, "9.0"
spec.requires_arc = true
spec.source_files = 'Nafuda/*.{swift}'
# s.resources = 'Kusabi/**/*.{xib,png}'
spec.swift_version = "5.0"
end
|
cloversites/draftsman
|
lib/draftsman/model.rb
|
<gh_stars>0
require 'draftsman/attributes_serialization'
module Draftsman
module Model
def self.included(base)
base.send :extend, ClassMethods
end
module ClassMethods
# Declare this in your model to enable the Draftsman API for it. A draft
# of the model is available in the `draft` association (if one exists).
#
# Options:
#
# :class_name
# The name of a custom `Draft` class. This class should inherit from
# `Draftsman::Draft`. A global default can be set for this using
# `Draftsman.draft_class_name=` if the default of `Draftsman::Draft` needs
# to be overridden.
#
# :ignore
# An array of attributes for which an update to a `Draft` will not be
# stored if they are the only ones changed.
#
# :only
# Inverse of `ignore` - a new `Draft` will be created only for these
# attributes if supplied. It's recommended that you only specify optional
# attributes for this (that can be empty).
#
# :skip
# Fields to ignore completely. As with `ignore`, updates to these fields
# will not create a new `Draft`. In addition, these fields will not be
# included in the serialized versions of the object whenever a new `Draft`
# is created.
#
# :meta
# A hash of extra data to store. You must add a column to the `drafts`
# table for each key. Values are objects or `procs` (which are called with
# `self`, i.e. the model with the `has_drafts`). See
# `Draftsman::Controller.info_for_draftsman` for an example of how to
# store data from the controller.
#
# :draft
# The name to use for the `draft` association shortcut method. Default is
# `:draft`.
#
# :published_at
# The name to use for the method which returns the published timestamp.
# Default is `published_at`.
#
# :trashed_at
# The name to use for the method which returns the soft delete timestamp.
# Default is `trashed_at`.
def has_drafts(options = {})
# Lazily include the instance methods so we don't clutter up
# any more ActiveRecord models than we need to.
send :include, InstanceMethods
send :extend, AttributesSerialization
# Define before/around/after callbacks on each drafted model
send :extend, ActiveModel::Callbacks
# TODO: Remove `draft_creation`, `draft_update`, and `draft_destroy` in
# v1.0.
define_model_callbacks :save_draft, :draft_creation, :draft_update, :draft_destruction, :draft_destroy
class_attribute :draftsman_options
self.draftsman_options = options.dup
class_attribute :draft_association_name
self.draft_association_name = options[:draft] || :draft
class_attribute :draft_class_name
self.draft_class_name = options[:class_name] || Draftsman.draft_class_name
[:ignore, :skip, :only].each do |key|
draftsman_options[key] = ([draftsman_options[key]].flatten.compact || []).map(&:to_s)
end
draftsman_options[:ignore] << "#{self.draft_association_name}_id"
draftsman_options[:meta] ||= {}
attr_accessor :draftsman_event
class_attribute :published_at_attribute_name
self.published_at_attribute_name = options[:published_at] || :published_at
class_attribute :trashed_at_attribute_name
self.trashed_at_attribute_name = options[:trashed_at] || :trashed_at
# `belongs_to :draft` association
if ::Draftsman.active_record_belongs_to_required?
belongs_to(self.draft_association_name, class_name: self.draft_class_name, dependent: :destroy, optional: true)
else
belongs_to(self.draft_association_name, class_name: self.draft_class_name, dependent: :destroy)
end
# Scopes
scope :drafted, -> (referenced_table_name = nil) {
referenced_table_name = referenced_table_name.present? ? referenced_table_name : table_name
where.not(referenced_table_name => { "#{self.draft_association_name}_id" => nil })
}
scope :published, -> (referenced_table_name = nil) {
referenced_table_name = referenced_table_name.present? ? referenced_table_name : table_name
where.not(referenced_table_name => { self.published_at_attribute_name => nil })
}
scope :trashed, -> (referenced_table_name = nil) {
referenced_table_name = referenced_table_name.present? ? referenced_table_name : table_name
where.not(referenced_table_name => { self.trashed_at_attribute_name => nil })
}
scope :live, -> (referenced_table_name = nil) {
referenced_table_name = referenced_table_name.present? ? referenced_table_name : table_name
where(referenced_table_name => { self.trashed_at_attribute_name => nil })
}
end
# Returns draft class.
def draft_class
@draft_class ||= draft_class_name.constantize
end
# Returns whether or not `has_drafts` has been called on this model.
def draftable?
method_defined?(:draftsman_options)
end
# Returns whether or not a `trashed_at` timestamp is set up on this model.
def trashable?
draftable? && method_defined?(self.trashed_at_attribute_name)
end
end
module InstanceMethods
# Returns the latest revision -- either the object or its reified draft
def latest_revision
draft? ? draft.reify : self
end
# Returns whether or not this item has a draft.
def draft?
send(self.class.draft_association_name).present?
end
# DEPRECATED: Use `#draft_save` instead.
def draft_creation
ActiveSupport::Deprecation.warn('`#draft_creation` is deprecated and will be removed from Draftsman 1.0. Use `#save_draft` instead.')
_draft_creation
end
# DEPRECATED: Use `#draft_destruction` instead.
def draft_destroy
ActiveSupport::Deprecation.warn('`#draft_destroy` is deprecated and will be removed from Draftsman 1.0. Use `draft_destruction` instead.')
run_callbacks :draft_destroy do
_draft_destruction
end
end
# Trashes object and records a draft for a `destroy` event.
def draft_destruction
run_callbacks :draft_destruction do
_draft_destruction
end
end
# DEPRECATED: Use `#draft_save` instead.
def draft_update
ActiveSupport::Deprecation.warn('`#draft_update` is deprecated and will be removed from Draftsman 1.0. Use `#save_draft` instead.')
_draft_update
end
# Returns serialized object representing this drafted item.
def object_attrs_for_draft_record(object = nil)
object ||= self
attrs = object.attributes.except(*self.class.draftsman_options[:skip]).tap do |attributes|
self.class.serialize_attributes_for_draftsman(attributes)
end
if self.class.draft_class.object_col_is_json?
attrs
else
Draftsman.serializer.dump(attrs)
end
end
# Returns whether or not this item has been published at any point in its lifecycle.
def published?
self.published_at.present?
end
# Creates or updates draft depending on state of this item and if it has
# any drafts.
#
# - If a completely new record, persists this item to the database and
# records a `create` draft.
# - If an existing record with an existing `create` draft, updates the
# record and the existing `create` draft.
# - If an existing record with no existing draft, records changes in an
# `update` draft.
# - If an existing record with an existing draft (`create` or `update`),
# updated back to its original undrafted state, removes associated
# `draft record`.
#
# Returns `true` or `false` depending on if the object passed validation
# and the save was successful.
def save_draft
run_callbacks :save_draft do
if self.new_record?
_draft_creation
else
_draft_update
end
end
end
# Returns whether or not this item has been trashed
def trashed?
send(self.class.trashed_at_attribute_name).present?
end
private
# Creates object and records a draft for the object's creation. Returns
# `true` or `false` depending on whether or not the objects passed
# validation and the save was successful.
def _draft_creation
transaction do
# TODO: Remove callback wrapper in v1.0.
run_callbacks :draft_creation do
# We want to save the draft after create
return false unless self.save
# Build data to store in draft record.
data = {
item: self,
event: :create,
}
data[:object] = object_attrs_for_draft_record if Draftsman.stash_drafted_changes?
data[Draftsman.whodunnit_field] = Draftsman.whodunnit
data[:object_changes] = serialized_draft_changeset(changes_for_draftsman(:create)) if track_object_changes_for_draft?
data = merge_metadata_for_draft(data)
send("build_#{self.class.draft_association_name}", data)
if send(self.class.draft_association_name).save
fk = "#{self.class.draft_association_name}_id"
id = send(self.class.draft_association_name).id
self.update_column(fk, id)
else
raise ActiveRecord::Rollback and return false
end
end
end
return true
end
# This is only abstracted away at this moment because of the
# `draft_destroy` deprecation. Move all of this logic back into
# `draft_destruction` after `draft_destroy is removed.`
def _draft_destruction
transaction do
data = {
item: self,
event: :destroy
}
data[:object] = object_attrs_for_draft_record if Draftsman.stash_drafted_changes?
data[Draftsman.whodunnit_field] = Draftsman.whodunnit
# Stash previous draft in case it needs to be reverted later
if self.draft?
attrs = send(self.class.draft_association_name).attributes
data[:previous_draft] =
if self.class.draft_class.previous_draft_col_is_json?
attrs
else
Draftsman.serializer.dump(attrs)
end
end
data = merge_metadata_for_draft(data)
if send(self.class.draft_association_name).present?
send(self.class.draft_association_name).update!(data)
else
send("build_#{self.class.draft_association_name}", data)
send(self.class.draft_association_name).save!
send("#{self.class.draft_association_name}_id=", send(self.class.draft_association_name).id)
self.update_column("#{self.class.draft_association_name}_id", send(self.class.draft_association_name).id)
end
trash!
# Mock `dependent: :destroy` behavior for all trashable associations
dependent_associations = self.class.reflect_on_all_associations(:has_one) + self.class.reflect_on_all_associations(:has_many)
dependent_associations.each do |association|
if association.klass.draftable? && association.options.has_key?(:dependent) && association.options[:dependent] == :destroy
dependents = self.send(association.name)
dependents = [dependents] if (dependents && association.macro == :has_one)
if dependents
dependents.each do |dependent|
dependent.draft_destruction unless dependent.draft? && dependent.send(dependent.class.draft_association_name).destroy?
end
end
end
end
end
end
# Updates object and records a draft for an `update` event. If the draft
# is being updated to the object's original state, the draft is destroyed.
# Returns `true` or `false` depending on if the object passed validation
# and the save was successful.
def _draft_update
# TODO: Remove callback wrapper in v1.0.
transaction do
run_callbacks :draft_update do
# Run validations.
return false unless self.valid?
# If updating a create draft, also update this item.
if self.draft? && send(self.class.draft_association_name).create?
the_changes = changes_for_draftsman(:create)
data = { item: self }
data[Draftsman.whodunnit_field] = Draftsman.whodunnit
data[:object] = object_attrs_for_draft_record if Draftsman.stash_drafted_changes?
data[:object_changes] = serialized_draft_changeset(the_changes) if track_object_changes_for_draft?
data = merge_metadata_for_draft(data)
send(self.class.draft_association_name).update(data)
save
else
the_changes = changes_for_draftsman(:update)
save_only_columns_for_draft if Draftsman.stash_drafted_changes?
# Destroy the draft if this record has changed back to the
# original values.
if self.draft? && the_changes.empty?
nilified_draft = send(self.class.draft_association_name)
touch = changed?
send("#{self.class.draft_association_name}_id=", nil)
save(touch: touch)
nilified_draft.destroy
# Save an update draft if record is changed notably.
elsif !the_changes.empty?
data = { item: self, event: :update }
data[Draftsman.whodunnit_field] = Draftsman.whodunnit
data[:object] = object_attrs_for_draft_record if Draftsman.stash_drafted_changes?
data[:object_changes] = serialized_draft_changeset(the_changes) if track_object_changes_for_draft?
data = merge_metadata_for_draft(data)
# If there's already a draft, update it.
if self.draft?
send(self.class.draft_association_name).update(data)
if Draftsman.stash_drafted_changes?
update_skipped_attributes
else
self.save
end
# If there's not an existing draft, create an update draft.
else
send("build_#{self.class.draft_association_name}", data)
if send(self.class.draft_association_name).save
update_column("#{self.class.draft_association_name}_id", send(self.class.draft_association_name).id)
if Draftsman.stash_drafted_changes?
update_skipped_attributes
else
self.save
end
else
raise ActiveRecord::Rollback and return false
end
end
# Otherwise, just save the record.
else
self.save
end
end
end
end
rescue Exception => e
false
end
# Returns hash of attributes that have changed for the object, similar to
# how ActiveRecord's `changes` works.
def changes_for_draftsman(event)
the_changes = {}
ignore = self.class.draftsman_options[:ignore]
skip = self.class.draftsman_options[:skip]
only = self.class.draftsman_options[:only]
draftable_attrs = self.attributes.keys - ignore - skip
draftable_attrs = draftable_attrs & only if only.present?
# If there's already an update draft, get its changes and reconcile them
# manually.
if event == :update
# Collect all attributes' previous and new values.
draftable_attrs.each do |attr|
if self.draft? && self.draft.changeset && self.draft.changeset.key?(attr)
the_changes[attr] = [self.draft.changeset[attr].first, send(attr)]
else
the_changes[attr] = [self.send("#{attr}_was"), send(attr)]
end
end
# If there is no draft or it's for a create, then all draftable
# attributes are the changes.
else
draftable_attrs.each { |attr| the_changes[attr] = [nil, send(attr)] }
end
# Purge attributes that haven't changed.
the_changes.delete_if { |key, value| value.first == value.last }
end
# Merges model-level metadata from `meta` and `controller_info` into draft object.
def merge_metadata_for_draft(data)
# First, we merge the model-level metadata in `meta`.
draftsman_options[:meta].each do |attribute, value|
data[attribute] =
if value.respond_to?(:call)
value.call(self)
elsif value.is_a?(Symbol) && respond_to?(value)
# if it is an attribute that is changing, be sure to grab the current version
if has_attribute?(value) && send("#{value}_changed?".to_sym)
send("#{value}_was".to_sym)
else
send(value)
end
else
value
end
end
# Second, we merge any extra data from the controller (if available).
data.merge(Draftsman.controller_info || {})
end
# Save columns outside of the `only` option directly to master table
def save_only_columns_for_draft
if self.class.draftsman_options[:only].any?
only_changes = {}
only_changed_attributes = self.attributes.keys - self.class.draftsman_options[:only]
only_changed_attributes.each do |key|
only_changes[key] = send(key) if changed.include?(key)
end
self.update_columns(only_changes) if only_changes.any?
end
end
# Returns changeset data in format appropriate for `object_changes`
# column.
def serialized_draft_changeset(my_changes)
self.class.draft_class.object_changes_col_is_json? ? my_changes : Draftsman.serializer.dump(my_changes)
end
# Returns whether or not the draft class includes an `object_changes` attribute.
def track_object_changes_for_draft?
self.class.draft_class.column_names.include?('object_changes')
end
# Sets `trashed_at` attribute to now and saves to the database immediately.
def trash!
self.update_column(self.class.trashed_at_attribute_name, Time.now)
end
# Updates skipped attributes' values on this model.
def update_skipped_attributes
# Skip over this if nothing's being skipped.
skipped_changed = changed_attributes.keys & draftsman_options[:skip]
return true unless skipped_changed.present?
keys = self.attributes.keys.select { |key| draftsman_options[:skip].include?(key) }
attrs = {}
keys.each { |key| attrs[key] = self.send(key) }
self.reload
self.update(attrs)
end
end
end
end
|
cloversites/draftsman
|
spec/dummy/db/migrate/20150404203627_add_talkatives_table_to_tests.rb
|
class AddTalkativesTableToTests < ActiveRecord::Migration
def self.up
create_table :talkatives, :force => true do |t|
t.string :before_comment
t.string :around_early_comment
t.string :around_late_comment
t.string :after_comment
t.references :draft
t.datetime :trashed_at
t.datetime :published_at
t.timestamps
end
end
def self.down
drop_table :talkatives
end
end
|
cloversites/draftsman
|
lib/generators/draftsman/templates/create_drafts_json.rb
|
class CreateDrafts < ActiveRecord::Migration
def change
create_table :drafts do |t|
t.string :item_type, :null => false
t.integer :item_id, :null => false
t.string :event, :null => false
t.string :whodunnit# :null => false
t.json :object
t.json :previous_draft
t.timestamps
end
change_table :drafts do |t|
t.index :item_type
t.index :item_id
t.index :event
t.index :whodunnit
t.index :created_at
t.index :updated_at
end
end
end
|
cloversites/draftsman
|
spec/dummy/db/migrate/20150408234937_add_only_children.rb
|
class AddOnlyChildren < ActiveRecord::Migration
def up
create_table :only_children, :force => true do |t|
t.string :name
t.references :parent
t.references :draft, :foreign_key => true
t.datetime :trashed_at
t.datetime :published_at
t.timestamps
end
end
def down
drop_table :only_children
end
end
|
cloversites/draftsman
|
spec/dummy/db/migrate/20110208155312_set_up_test_tables.rb
|
<filename>spec/dummy/db/migrate/20110208155312_set_up_test_tables.rb
class SetUpTestTables < ActiveRecord::Migration
def self.up
create_table :drafts, :force => true do |t|
t.string :item_type
t.integer :item_id
t.string :event, :null => false
t.string :whodunnit
t.text :object
t.text :object_changes
t.text :previous_draft
t.timestamps
# Metadata column
t.integer :answer
# Controller info column
t.string :ip
t.string :user_agent
end
create_table :vanillas, :force => true do |t|
t.string :name
t.references :draft, :foreign_key => true
t.datetime :published_at
t.timestamps
end
create_table :trashables, :force => true do |t|
t.string :name
t.string :title, :null => true
t.references :draft, :foreign_key => true
t.datetime :published_at
t.datetime :trashed_at
t.timestamps
end
create_table :draft_as_sketches, :force => true do |t|
t.string :name
t.references :sketch
t.datetime :published_at
t.timestamps
end
create_table :whitelisters, :force => true do |t|
t.string :name
t.string :ignored
t.references :draft, :foreign_key => true
t.datetime :published_at
t.timestamps
end
create_table :parents, :force => true do |t|
t.string :name
t.references :draft, :foreign_key => true
t.datetime :trashed_at
t.datetime :published_at
t.timestamps
end
create_table :children, :force => true do |t|
t.string :name
t.references :parent
t.references :draft, :foreign_key => true
t.datetime :trashed_at
t.datetime :published_at
t.timestamps
end
create_table :bastards, :force => true do |t|
t.string :name
t.references :parent
t.timestamps
end
create_table :skippers, :force => true do |t|
t.string :name
t.string :skip_me
t.references :draft, :foreign_key => true
t.datetime :trashed_at
t.datetime :published_at
t.timestamps
end
end
def self.down
drop_table :drafts
drop_table :vanillas
drop_table :trashables
drop_table :draft_as_sketches
drop_table :whitelisters
drop_table :parents
drop_table :children
drop_table :bastards
end
end
|
cloversites/draftsman
|
lib/draftsman/draft.rb
|
class Draftsman::Draft < ActiveRecord::Base
# Associations
belongs_to :item, polymorphic: true
# Validations
validates :event, presence: true
# Scopes
# Returns `where` that filters to only `create` drafts.
scope :creates, -> { where(event: :create) }
# Returns `where` that filters to only `destroy` drafts.
scope :destroys, -> { where(event: :destroy) }
# Returns `where` that filters to only `update` drafts.
scope :updates, -> { where(event: :update) }
def self.with_item_keys(item_type, item_id)
scoped conditions: { item_type: item_type, item_id: item_id }
end
# Returns whether the `object` column is using the `json` type supported by
# PostgreSQL.
def self.object_col_is_json?
@object_col_is_json ||= Draftsman.stash_drafted_changes? && columns_hash['object'].type == :json
end
# Returns whether or not this class has an `object_changes` column.
def self.object_changes_col_present?
column_names.include?('object_changes')
end
# Returns whether the `object_changes` column is using the `json` type
# supported by PostgreSQL.
def self.object_changes_col_is_json?
@object_changes_col_is_json ||= columns_hash['object_changes'].type == :json
end
# Returns whether the `previous_draft` column is using the `json` type supported by PostgreSQL.
def self.previous_draft_col_is_json?
@previous_draft_col_is_json ||= columns_hash['previous_draft'].type == :json
end
# Returns what changed in this draft. Similar to `ActiveModel::Dirty#changes`.
# Returns `nil` if your `drafts` table does not have an `object_changes` text
# column.
def changeset
return nil unless self.class.object_changes_col_present?
@changeset ||= load_changeset
end
# Returns whether or not this is a `create` event.
def create?
self.event == 'create'
end
# Returns whether or not this is a `destroy` event.
def destroy?
self.event == 'destroy'
end
# Returns related draft dependencies that would be along for the ride for a
# `publish!` action.
def draft_publication_dependencies
dependencies = []
my_item =
if Draftsman.stash_drafted_changes? && self.item.draft?
self.item.draft.reify
else
self.item
end
case self.event.to_sym
when :create, :update
associations = my_item.class.reflect_on_all_associations(:belongs_to)
associations.each do |association|
association_class =
if association.options.key?(:polymorphic)
my_item.send(association.foreign_key.sub('_id', '_type')).constantize
else
association.klass
end
if association_class.draftable? && association.name != association_class.draft_association_name.to_sym
dependency = my_item.send(association.name)
dependencies << dependency.draft if dependency.present? && dependency.draft? && dependency.draft.create?
end
end
when :destroy
associations = my_item.class.reflect_on_all_associations(:has_one) + my_item.class.reflect_on_all_associations(:has_many)
associations.each do |association|
if association.klass.draftable?
# Reconcile different association types into an array, even if `has_one` produces a single-item
associated_dependencies =
case association.macro
when :has_one
my_item.send(association.name).present? ? [my_item.send(association.name)] : []
when :has_many
my_item.send(association.name)
end
associated_dependencies.each do |dependency|
dependencies << dependency.draft if dependency.draft?
end
end
end
end
dependencies
end
# Returns related draft dependencies that would be along for the ride for a
# `revert!` action.
def draft_reversion_dependencies
dependencies = []
case self.event.to_sym
when :create
associations = self.item.class.reflect_on_all_associations(:has_one) + self.item.class.reflect_on_all_associations(:has_many)
associations.each do |association|
if association.klass.draftable?
# Reconcile different association types into an array, even if
# `has_one` produces a single-item
associated_dependencies =
case association.macro
when :has_one
self.item.send(association.name).present? ? [self.item.send(association.name)] : []
when :has_many
self.item.send(association.name)
end
associated_dependencies.each do |dependency|
dependencies << dependency.draft if dependency.draft?
end
end
end
when :destroy
associations = self.item.class.reflect_on_all_associations(:belongs_to)
associations.each do |association|
association_class =
if association.options.key?(:polymorphic)
self.item.send(association.foreign_key.sub('_id', '_type')).constantize
else
association.klass
end
if association_class.draftable? && association_class.trashable? && association.name != association_class.draft_association_name.to_sym
dependency = self.item.send(association.name)
dependencies << dependency.draft if dependency.present? && dependency.draft? && dependency.draft.destroy?
end
end
end
dependencies
end
# Publishes this draft's associated `item`, publishes its `item`'s
# dependencies, and destroys itself.
# - For `create` drafts, adds a value for the `published_at` timestamp on the
# item and destroys the draft.
# - For `update` drafts, applies the drafted changes to the item and destroys
# the draft.
# - For `destroy` drafts, destroys the item and the draft.
def publish!
ActiveRecord::Base.transaction do
case self.event.to_sym
when :create, :update
# Destroy draft
self.destroy
# Parents must be published too
self.draft_publication_dependencies.each { |dependency| dependency.publish! }
# Update drafts need to copy over data to main record
self.item.attributes = self.reify.attributes if Draftsman.stash_drafted_changes? && self.update?
# Write `published_at` attribute
self.item.send("#{self.item.class.published_at_attribute_name}=", current_time_from_proper_timezone)
# Clear out draft
self.item.send("#{self.item.class.draft_association_name}_id=", nil)
self.item.save(validate: false)
self.item.reload
when :destroy
self.item.destroy
end
end
end
# Returns instance of item converted to its drafted state.
#
# Example usage:
#
# `@category = @category.draft.reify if @category.draft?`
def reify(ignore_reload = false)
# This appears to be necessary if for some reason the draft's model
# hasn't been loaded (such as when done in the console).
unless defined? self.item_type
require self.item_type.underscore
end
without_identity_map do
# Create draft doesn't require reification.
if self.create?
self.item
# If a previous draft is stashed, restore that.
elsif self.previous_draft.present?
reify_previous_draft.reify
# Prefer changeset for refication if it's present.
elsif self.changeset.present? && self.changeset.any?
self.changeset.each do |key, value|
# Skip counter_cache columns
if self.item.respond_to?("#{key}=") && !key.end_with?('_count')
self.item.send("#{key}=", value.last)
elsif !key.end_with?('_count')
logger.warn("Attribute #{key} does not exist on #{self.item_type} (Draft ID: #{self.id}).")
end
end
self.item.send("#{self.item.class.draft_association_name}=", self)
self.item
# Reify based on object if it's all that's available.
elsif self.object.present?
attrs = self.class.object_col_is_json? ? self.object : Draftsman.serializer.load(self.object)
self.item.class.unserialize_attributes_for_draftsman(attrs)
attrs.each do |key, value|
# Skip counter_cache columns
if self.item.respond_to?("#{key}=") && !key.end_with?('_count')
self.item.send("#{key}=", value)
elsif !key.end_with?('_count')
logger.warn("Attribute #{key} does not exist on #{self.item_type} (Draft ID: #{self.id}).")
end
end
self.item.send("#{self.item.class.draft_association_name}=", self)
self.item
end
end
end
# Reverts this draft.
# - For create drafts, destroys the draft and the item.
# - For update drafts, destroys the draft only.
# - For destroy drafts, destroys the draft and undoes the `trashed_at`
# timestamp on the item. If a previous draft was drafted for destroy,
# restores the draft.
def revert!
ActiveRecord::Base.transaction do
case self.event.to_sym
when :create
self.item.destroy
self.destroy
when :update
# If we're not stashing changes, we need to restore original values from
# the changeset.
if self.class.object_changes_col_present? && !Draftsman.stash_drafted_changes?
self.changeset.each do |attr, values|
self.item.send("#{attr}=", values.first) if self.item.respond_to?(attr)
end
end
# Then clear out the draft ID.
self.item.send("#{self.item.class.draft_association_name}_id=", nil)
self.item.save!(validate: false, touch: false)
# Then destroy draft.
self.destroy
when :destroy
# Parents must be restored too
self.draft_reversion_dependencies.each { |dependency| dependency.revert! }
# Restore previous draft if one was stashed away
if self.previous_draft.present?
prev_draft = reify_previous_draft
prev_draft.save!
self.item.class.where(id: self.item).update_all "#{self.item.class.draft_association_name}_id".to_sym => prev_draft.id,
self.item.class.trashed_at_attribute_name => nil
else
self.item.class.where(id: self.item).update_all "#{self.item.class.draft_association_name}_id".to_sym => nil,
self.item.class.trashed_at_attribute_name => nil
end
self.destroy
end
end
end
# Returns whether or not this is an `update` event.
def update?
self.event.to_sym == :update
end
private
# Restores previous draft and returns it.
def reify_previous_draft
draft = self.class.new
without_identity_map do
attrs = self.class.object_col_is_json? ? self.previous_draft : Draftsman.serializer.load(self.previous_draft)
attrs.each do |key, value|
if key.to_sym != :id && draft.respond_to?("#{key}=")
draft.send("#{key}=", value)
elsif key.to_sym != :id
logger.warn("Attribute #{key} does not exist on #{item_type} (Draft ID: #{self.id}).")
end
end
end
draft
end
def without_identity_map(&block)
if defined?(ActiveRecord::IdentityMap) && ActiveRecord::IdentityMap.respond_to?(:without)
ActiveRecord::IdentityMap.without(&block)
else
block.call
end
end
def load_changeset
changes = HashWithIndifferentAccess.new(object_changes_deserialized)
self.item_type.constantize.unserialize_draft_attribute_changes(changes)
changes
rescue
{}
end
def object_changes_deserialized
if self.class.object_changes_col_is_json?
self.object_changes
else
Draftsman.serializer.load(self.object_changes)
end
end
end
|
cloversites/draftsman
|
lib/generators/draftsman/install_generator.rb
|
<filename>lib/generators/draftsman/install_generator.rb
require 'rails/generators'
require 'rails/generators/migration'
require 'rails/generators/active_record'
module Draftsman
class InstallGenerator < ::Rails::Generators::Base
include ::Rails::Generators::Migration
desc 'Creates config initializer and generates (but does not run) a migration to add a drafts table.'
source_root File.expand_path('../templates', __FILE__)
class_option :skip_initializer, :type => :boolean, :default => false, :desc => 'Skip generation of the boilerplate initializer at `config/initializers/draftsman.rb`.'
class_option :with_changes, :type => :boolean, :default => false, :desc => 'Store changeset (diff) with each draft.'
class_option :with_pg_json, :type => :boolean, :default => false, :desc => 'Use PostgreSQL JSON data type for serialized data.'
def create_migration_file
if options.with_pg_json?
migration_template 'create_drafts_json.rb', 'db/migrate/create_drafts.rb'
if options.with_changes?
migration_template 'add_object_changes_column_to_drafts_json.rb',
'db/migrate/add_object_changes_column_to_drafts.rb'
end
else
migration_template 'create_drafts.rb', 'db/migrate/create_drafts.rb'
if options.with_changes?
migration_template 'add_object_changes_column_to_drafts.rb',
'db/migrate/add_object_changes_column_to_drafts.rb'
end
end
end
def self.next_migration_number(dirname)
ActiveRecord::Generators::Base.next_migration_number(dirname)
end
def copy_config
template 'config/initializers/draftsman.rb' unless options.skip_initializer?
end
end
end
|
cloversites/draftsman
|
lib/generators/draftsman/templates/add_object_changes_column_to_drafts_json.rb
|
<filename>lib/generators/draftsman/templates/add_object_changes_column_to_drafts_json.rb
class AddObjectChangesColumnToDrafts < ActiveRecord::Migration
def self.up
add_column :drafts, :object_changes, :json
end
def self.down
remove_column :drafts, :object_changes
end
end
|
jinroq/agyoh
|
agyoh_logger.rb
|
# coding: utf-8
class AgyohLogger
# agyoh log ใใกใคใซ
AGYOH_LOG_FILE = "./tmp/agyoh.log".freeze
def initialize
# log ใใกใคใซไฝๆ
File.open(AGYOH_LOG_FILE, "a+").close
end
# ใญใฌใผ
def self.log_info(message = '')
AgyohLogger.new.log_info(message)
end
def self.log_error(message = '')
AgyohLogger.new.log_error(message)
end
def log_info(message = '')
now = Time.now
File.open(AGYOH_LOG_FILE, "a+") do |f|
f.puts("[#{now}] #{message}")
end
end
def log_error(message = '')
log_info("[ERROR] #{message}")
end
end
|
jinroq/agyoh
|
initializers/sqlite3_seeds.rb
|
<filename>initializers/sqlite3_seeds.rb
# coding: utf-8
module Initializers
class Sqlite3Seeds
require "sqlite3"
# agyoh sqlite file
AGYOH_SQLITE3_FILE = "./tmp/agyoh.sqlite3".freeze
def initialize
@db = SQLite3::Database.new(AGYOH_SQLITE3_FILE)
ret = is_existed_table?("device_tokens")
puts("ret => #{ret}")
unless ret
create_device_tokens
end
end
private
# ใใผใใซใฎๅญๅจ็ขบ่ช
def is_existed_table?(table_name)
begin
ret = @db.execute("select count(*) from #{table_name}")
true unless ret.nil?
rescue SQLite3::SQLException => e
puts("e => #{e.inspect}")
puts("e.message => #{e.message}")
if "no such table: #{table_name}" == e.message
return false
else
raise e
end
end
end
# device_tokens ใใผใใซใฎไฝๆ
def create_device_tokens
sql = <<-SQL
create table device_tokens (
id integer primary key autoincrement,
token text not null,
device_name text not null,
created_at text not null,
updated_at text not null
);
SQL
@db.execute(sql)
end
end
end
|
jinroq/agyoh
|
agyoh.rb
|
<filename>agyoh.rb
# coding: utf-8
class Agyoh
require 'net/http'
require "socket"
require "json"
require "sqlite3"
# agyoh pid ใใกใคใซ
AGYOH_PID_FILE = "./tmp/agyoh.pid".freeze
# agyoh log ใใกใคใซ
AGYOH_LOG_FILE = "./tmp/agyoh.log".freeze
# 3rd party ๅใใใผใ็ชๅท
PORTNUMBER_FOR_3RD_PARTY = 2018
# Client ๅใใใผใ็ชๅท
PORTNUMBER_FOR_CLIENT = 2019
# ๅๆๅๅฆ็
def initialize
# pid ใใกใคใซไฝๆ
File.open(AGYOH_PID_FILE, "w").close
# log ใใกใคใซไฝๆ
File.open(AGYOH_LOG_FILE, "a+").close
end
# ่ตทๅ
def run
begin
log_info("== Agyoh Begin.")
# ใใผใขใณๅ
daemonize
# ๅฆ็ๅฎ่ก
execute
log_info("== Agyoh End.")
rescue => e
end
end
private
# ใใผใขใณๅ
def daemonize
begin
# ใใผใขใณๅ
Process.daemon(true, true)
# pid ใใกใคใซ็ๆ
File.open(AGYOH_PID_FILE, "w") { |f| f << Process.pid }
rescue => e
error_message = "#{self.class.name}.daemonize #{e}"
log_error(error_message)
STDERR.puts(error_message)
exit(1)
end
end
# ๅฆ็ๅฎ่ก
def execute
# DB ไฝๆ
# ใใผใใซไฝๆ
# ๅๆใใผใฟๆๅ
ฅ
begin
@tcp_socket = TCPSocket.open("127.0.0.1", 2019)
rescue => e
error_message = "TCPSocket.open failed : #$!\n"
puts(error_message)
log_error(error_message + "#{e.message}")
end
# TCPSocket ็จใฎ Thread ใไฝๆ
threads << Thread.new do |thread|
while true
@tcp_socket.write('{ "key" : "value" }')
end
end
# umgyoh ใฎ API ใๅฉใใใใฎ Thread ใไฝๆ
threads << Thread.fork do |thread|
message = "Access umgyoh!"
puts(message)
log_info(message)
end
# Thread ใไฝฟ็จ
threads.each do |thread|
thread.join
end
end
# ๅๆญขๅฆ็
def stop
@tcp_socket.close
end
# ใญใฌใผ
def log_info(message = '')
now = Time.now
File.open(AGYOH_LOG_FILE, "a+") do |f|
f.puts("[#{now}] #{message}")
end
end
def log_error(message = '')
log_info("[ERROR] #{message}")
end
end
# ่ตทๅ
Agyoh.new.run
|
jinroq/agyoh
|
agyoh_tcp_server.rb
|
# coding: utf-8
class AgyohTcpServer
require "socket"
require "./utils/logger"
include Utils
# agyoh pid file
AGYOH_PID_FILE = "./tmp/agyoh.pid".freeze
# port for client
PORTNUMBER_FOR_CLIENT = 2019
def initialize
# open pid file
File.open(AGYOH_PID_FILE, "w").close
end
def run
execute
end
private
#
def execute
@tcp_server = TCPServer.open(PORTNUMBER_FOR_CLIENT)
while true
# ๆฅ็ถ่ฆๆฑใๅใไปใใ TCPScoket ใ็ๆใใใ
socket = @tcp_server.accept
# ๆฅ็ถ็ธๆๅ
ใฝใฑใใใฎๆ
ๅ ฑใ
peeraddr = socket.peeraddr
puts("socket.peeraddr => #{peeraddr}")
Utils::Logger.log_info("socket.peeraddr => #{peeraddr}")
# TCPSocket ใ้ใใใ
socket.close
end
# TCPServer ใ้ใใใ
@tcp_server.close
end
end
AgyohTcpServer.new.run
|
jinroq/agyoh
|
agyoh_web.rb
|
# coding: utf-8
# endpoint ใ็ฝฎใใใใฎ web server
class AgyohWeb
require 'net/http'
require "socket"
# agyoh pid ใใกใคใซ
AGYOH_PID_FILE = "./tmp/agyoh.pid".freeze
# agyoh log ใใกใคใซ
AGYOH_LOG_FILE = "./tmp/agyoh.log".freeze
# 3rd party ๅใ agyoh ใใผใ็ชๅท
AGYOH_3RD_PARTY_PORT = 2018
# Client ๅใ agyoh ใใผใ็ชๅท
AGYOH_CLIENT_PORT = 2019
end
|
jinroq/agyoh
|
utils/logger.rb
|
# coding: utf-8
module Utils
class Logger
LOG_FILE = "./tmp/agyoh.log".freeze
def initialize
File.open(LOG_FILE, "a+").close
end
# public class methods
# level: info
def self.log_info(message = '')
Logger.new.log_info(message)
end
# level: error
def self.log_error(message = '')
Logger.new.log_error(message)
end
# public instance methods
# level: info
def log_info(message = '')
now = Time.now
File.open(LOG_FILE, "a+") do |f|
f.puts("[#{now}] #{message}")
end
end
# level: error
def log_error(message = '')
log_info("[ERROR] #{message}")
end
end
end
|
wordjelly/mailgun-ruby
|
lib/mailgun/version.rb
|
<filename>lib/mailgun/version.rb
# It's the version. Yeay!
module Mailgun
VERSION = '1.1.7'
end
|
wordjelly/mailgun-ruby
|
lib/mailgun/events/events.rb
|
require 'mailgun/exceptions/exceptions'
module Mailgun
# A Mailgun::Events object makes it really simple to consume
# Mailgun's events from the Events endpoint.
#
# This is not yet comprehensive.
#
# Examples
#
# See the Github documentation for full examples.
class Events
include Enumerable
# Public: event initializer
#
# client - an instance of Mailgun::Client
# domain - the domain to build queries
def initialize(client, domain)
@client = client
@domain = domain
@paging_next = nil
@paging_previous = nil
end
# Public: Issues a simple get against the client. Alias of `next`.
#
# params - a Hash of query options and/or filters.
#
# Returns a Mailgun::Response object.
def get(params = nil)
self.next(params)
end
# Public: Using built in paging, obtains the next set of data.
# If an events request hasn't been sent previously, this will send one
# without parameters
#
# params - a Hash of query options and/or filters.
#
# Returns a Mailgun::Response object.
def next(params = nil)
get_events(params, @paging_next)
end
# Public: Using built in paging, obtains the previous set of data.
# If an events request hasn't been sent previously, this will send one
# without parameters
#
# params - a Hash of query options and/or filters.
#
# Returns Mailgun::Response object.
def previous(params = nil)
get_events(params, @paging_previous)
end
# Public: Allows iterating through all events and performs automatic paging.
#
# &block - Block to execute on items.
def each(&block)
items = self.next.to_h['items']
until items.empty?
items.each(&block)
items = self.next.to_h['items']
end
end
private
# Internal: Makes and processes the event request through the client
#
# params - optional Hash of query options
# paging - the URL key used for previous/next requests
#
# Returns a Mailgun.Response object.
def get_events(params = nil, paging = nil)
response = @client.get(construct_url(paging), params)
extract_paging(response)
response
end
# Internal: given an event response, pull and store the paging keys
#
# response - a Mailgun::Response object
#
# Return is irrelevant.
def extract_paging(response)
paging = response.to_h['paging']
next_page_url = paging && paging['next'] # gives nil when any one of the keys doens't exist
previous_page_url = paging && paging['previous'] # can be replaced with Hash#dig for ruby >= 2.3.0
@paging_next = extract_endpoint_from(next_page_url)
@paging_previous = extract_endpoint_from(previous_page_url)
end
# Internal: given a paging URL, extract the endpoint
#
# response - the endpoint for the previous/next page
#
# Returns a String of the partial URI if the given url follows the regular API format
# Returns nil in other cases (e.g. when given nil, or an irrelevant url)
def extract_endpoint_from(url = nil)
URI.parse(url).path[/api.mailgun.net\/v[\d]\/#{@domain}\/events\/(.+)/,1]
rescue URI::InvalidURIError
nil
end
# Internal: construct the event path to be used by the client
#
# paging - the URL key for previous/next set of results
#
# Returns a String of the partial URI
def construct_url(paging = nil)
return "#{@domain}/events/#{paging}" if paging
"#{@domain}/events"
end
end
end
|
ClaytonPassmore/rails_param
|
spec/rails_integration_spec.rb
|
<filename>spec/rails_integration_spec.rb
require 'spec_helper'
describe FakeController, type: :controller do
# Needed to run tests against Rails 4 AND 5
def prepare_params(params)
return params if Rails.version[0].to_i <= 4
{ params: params }
end
describe "type coercion" do
it "coerces to integer" do
get :index, **prepare_params(page: "666")
expect(controller.params[:page]).to eql(666)
end
it "raises InvalidParameterError if supplied an array instead of other type (prevent TypeError)" do
expect { get :index, **prepare_params(page: ["a", "b", "c"]) }.to raise_error(
RailsParam::InvalidParameterError, %q('["a", "b", "c"]' is not a valid Integer))
end
it "raises InvalidParameterError if supplied an hash instead of other type (prevent TypeError)" do
expect { get :index, **prepare_params(page: {"a" => "b", "c" => "d"}) }.to raise_error(
RailsParam::InvalidParameterError, %q('{"a"=>"b", "c"=>"d"}' is not a valid Integer))
end
it "raises InvalidParameterError if supplied an hash instead of an array (prevent NoMethodError)" do
expect { get :index, **prepare_params(tags: {"a" => "b", "c" => "d"}) }.to raise_error(
RailsParam::InvalidParameterError, %q('{"a"=>"b", "c"=>"d"}' is not a valid Array))
end
end
describe "nested_hash" do
it "validates nested properties" do
params = {
'book' => {
'title' => 'One Hundred Years of Solitude',
'author' => {
'first_name' => '<NAME>',
'last_name' => 'Marquez',
'age' => '70'
},
'price' => '$1,000.00'
}}
get :edit, **prepare_params(params)
expect(controller.params[:book][:author][:age]).to eql 70
expect(controller.params[:book][:author][:age]).to be_kind_of Integer
expect(controller.params[:book][:price]).to eql 1000.0
expect(controller.params[:book][:price]).to be_instance_of BigDecimal
end
it "raises error when required nested attribute missing" do
params = {
'book' => {
'title' => 'One Hundred Years of Solitude',
'author' => {
'last_name' => 'Marquez',
'age' => '70'
},
'price' => '$1,000.00'
}}
expect { get :edit, **prepare_params(params) }.to raise_error { |error|
expect(error).to be_a(RailsParam::InvalidParameterError)
expect(error.param).to eql("first_name")
expect(error.options).to eql({:required => true})
}
end
it "passes when hash that's not required but has required attributes is missing" do
params = {
'book' => {
'title' => 'One Hundred Years of Solitude',
'price' => '$1,000.00'
}}
get :edit, **prepare_params(params)
expect(controller.params[:book][:price]).to eql 1000.0
expect(controller.params[:book][:price]).to be_instance_of BigDecimal
end
end
describe "InvalidParameterError" do
it "raises an exception with params attributes" do
expect { get :index, **prepare_params(sort: "foo") }.to raise_error { |error|
expect(error).to be_a(RailsParam::InvalidParameterError)
expect(error.param).to eql("sort")
expect(error.options).to eql({:in => ["asc", "desc"], :default => "asc", :transform => :downcase})
}
end
end
describe ":transform parameter" do
it "applies transformations" do
get :index, **prepare_params(sort: "ASC")
expect(controller.params[:sort]).to eql("asc")
end
end
describe "default values" do
it "applies default values" do
get :index
expect(controller.params[:page]).to eql(1)
expect(controller.params[:sort]).to eql("asc")
end
end
describe "nested_array" do
it "responds with a 400 when the nested array is not supplied properly" do
params = {
'filter' => 'state'
}
expect { get :nested_array, **prepare_params(params) }.to raise_error do |error|
expect(error).to be_a(RailsParam::InvalidParameterError)
end
end
end
describe "optional_array" do
# If we don't specify a content type that can accept `null` as a value,
# rails may attempt to coerce nil values into empty strings.
# See here:
# https://github.com/rails/rails-controller-testing/issues/33
before { request.headers['Content-Type'] = 'application/json' }
it "responds with a 200 when the optional array is not provided" do
post :optional_array, **prepare_params({})
expect(response.status).to eq(200)
end
it "raises an invalid parameter error when nil is explicitly provided" do
params = { my_array: nil }
expect { post :optional_array, **prepare_params(params) }.to raise_error do |error|
expect(error).to be_a(RailsParam::InvalidParameterError)
end
end
end
end
|
ClaytonPassmore/rails_param
|
lib/rails_param/version.rb
|
<filename>lib/rails_param/version.rb<gh_stars>0
module RailsParam #:nodoc
VERSION = "1.0.1"
end
|
acoulton/mysql
|
test/cookbooks/mysql_test/recipes/yum_repo.rb
|
# Set a version for modern distros.
# centos-7 and fedora ship MariaDB out of the box.
node.default['mysql']['version'] = '5.6' if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 7
node.default['mysql']['version'] = '5.6' if node['platform_family'] == 'fedora'
# Before that, we use "native" versions
unless node['mysql'].nil?
case node['mysql']['version']
when '5.5'
return if node['platform_family'] == 'rhel' && node['platform_version'].to_i == 5
include_recipe 'yum-mysql-community::mysql55'
when '5.6'
include_recipe 'yum-mysql-community::mysql56'
when '5.7'
include_recipe 'yum-mysql-community::mysql57'
end
end
|
acoulton/mysql
|
test/cookbooks/mysql_test/metadata.rb
|
name 'mysql_test'
version '0.0.1'
depends 'mysql'
depends 'yum-mysql-community'
|
acoulton/mysql
|
test/integration/config51/run_spec.rb
|
prefix_dir = os[:family] == 'centos' ? '/opt/rh/mysql51/root' : nil
if %w(debian ubuntu centos suse fedora).include? os[:family]
describe directory("#{prefix_dir}/etc/mysql-default") do
its('mode') { should eq 00755 }
its('owner') { should eq 'root' }
its('group') { should eq 'root' }
end
describe directory("#{prefix_dir}/etc/mysql-default/conf.d") do
its('mode') { should eq 00750 }
its('owner') { should eq 'mysql' }
its('group') { should eq 'mysql' }
end
describe file("#{prefix_dir}/etc/mysql-default/conf.d/hello.cnf") do
its('mode') { should eq 00640 }
its('owner') { should eq 'mysql' }
its('group') { should eq 'mysql' }
end
describe directory("#{prefix_dir}/etc/mysql-foo") do
its('mode') { should eq 00755 }
its('owner') { should eq 'root' }
its('group') { should eq 'root' }
end
describe directory("#{prefix_dir}/etc/mysql-foo/conf.d") do
its('mode') { should eq 00750 }
its('owner') { should eq 'mysql' }
its('group') { should eq 'mysql' }
end
describe file("#{prefix_dir}/etc/mysql-foo/conf.d/hello_again.cnf") do
its('mode') { should eq 00640 }
its('owner') { should eq 'mysql' }
its('group') { should eq 'mysql' }
end
end
|
acoulton/mysql
|
libraries/mysql_base.rb
|
module MysqlCookbook
class MysqlBase < Chef::Resource
require_relative 'helpers'
# All resources are composites
def whyrun_supported?
true
end
################
# Type Constants
################
Boolean = property_type(
is: [true, false],
default: false
) unless defined?(Boolean)
###################
# Common Properties
###################
property :run_group, String, default: 'mysql', desired_state: false
property :run_user, String, default: 'mysql', desired_state: false
property :version, String, default: lazy { default_major_version }, desired_state: false
property :include_dir, String, default: lazy { default_include_dir }, desired_state: false
property :major_version, String, default: lazy { major_from_full(version) }, desired_state: false
declare_action_class
end
end
|
acoulton/mysql
|
test/integration/service56-multi/run_spec.rb
|
def mysql_bin
return '/opt/mysql56/bin/mysql' if os[:family] =~ /solaris/
return '/opt/local/bin/mysql' if os[:family] =~ /smartos/
'/usr/bin/mysql'
end
def mysqld_bin
return '/opt/mysql51/bin/mysqld' if os[:family] =~ /solaris/
return '/opt/local/bin/mysqld' if os[:family] =~ /smartos/
'/usr/sbin/mysqld'
end
def instance_1_cmd_1
<<-EOF
#{mysql_bin} \
-h 127.0.0.1 \
-P 3307 \
-u root \
-pilikerandompasswords \
-e "SELECT Host,User FROM mysql.user WHERE User='root' AND Host='127.0.0.1';" \
--skip-column-names
EOF
end
def instance_1_cmd_2
<<-EOF
#{mysql_bin} \
-h 127.0.0.1 \
-P 3307 \
-u root \
-pilikerandompasswords \
-e "SELECT Host,User FROM mysql.user WHERE User='root' AND Host='localhost';" \
--skip-column-names
EOF
end
def instance_2_cmd_1
<<-EOF
#{mysql_bin} \
-h 127.0.0.1 \
-P 3308 \
-u root \
-pstring\\ with\\ spaces \
-e "SELECT Host,User FROM mysql.user WHERE User='root' AND Host='127.0.0.1';" \
--skip-column-names
EOF
end
def instance_2_cmd_2
<<-EOF
#{mysql_bin} \
-h 127.0.0.1 \
-P 3308 \
-u root \
-pstring\\ with\\ spaces \
-e "SELECT Host,User FROM mysql.user WHERE User='root' AND Host='localhost';" \
--skip-column-names
EOF
end
def mysqld_cmd
"#{mysqld_bin} --version"
end
describe command(instance_1_cmd_1) do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(/| 127.0.0.1 | root |/) }
end
describe command(instance_1_cmd_2) do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(/| localhost | root |/) }
end
describe command(instance_2_cmd_1) do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(/| 127.0.0.1 | root |/) }
end
describe command(instance_2_cmd_2) do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(/| localhost | root |/) }
end
describe command(mysqld_cmd) do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(/Ver 5.6/) }
end
|
acoulton/mysql
|
test/cookbooks/mysql_test/recipes/config.rb
|
<filename>test/cookbooks/mysql_test/recipes/config.rb
# an config
mysql_config 'hello' do
instance 'default'
source 'hello.conf.erb'
version node['mysql']['version']
action :create
end
mysql_config 'hello_again' do
instance 'foo'
source 'hello.conf.erb'
version node['mysql']['version']
action :create
end
|
acoulton/mysql
|
test/cookbooks/mysql_test/recipes/service_single.rb
|
# comments!
mysql_server_installation_package 'default' do
version node['mysql']['version']
action :install
end
mysql_service_manager 'default' do
version node['mysql']['version']
action [:create, :start]
end
|
acoulton/mysql
|
test/integration/installation_server_package-57/run_spec.rb
|
def mysqld_bin
return '/opt/mysql51/bin/mysqld' if os[:family] =~ /solaris/
return '/opt/local/bin/mysqld' if os[:family] =~ /smartos/
'/usr/sbin/mysqld'
end
def mysqld_cmd
"#{mysqld_bin} --version"
end
describe command(mysqld_cmd) do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(/Ver 5.7/) }
end
|
icebox827/oop-CR-training
|
example_school_library_decorator/student.rb
|
require './person'
require './classroom'
class Student < Person
attr_reader :classroom
def initialize(age, classroom, name = 'Unknown', parent_permission: true)
super(age, name)
@classroom = classroom
@name = name
@age = age
@parent_permission = parent_permission
end
def classroom=(classroom)
@classroom = classroom
classroom.students.push(self) unless classroom.students.include?(self)
end
def play_hooky
"ยฏ\(ใ)/ยฏ"
end
end
|
icebox827/oop-CR-training
|
example_school_library_decorator/app.rb
|
<filename>example_school_library_decorator/app.rb
require './student'
require './teacher'
require './book'
require './rental'
class App
def initialize
@books = []
@people = []
@rentals = []
end
def list_books
@books.each do |book|
puts "Title: \"#{book.title}\", Author: #{book.author}"
end
end
def list_people
@people.each do |person|
puts "[#{person.class}] Name: #{person.name}, ID: #{person.id}, Age: #{person.age}"
end
end
# rubocop:disable Metrics/MethodLength
def create_person
print 'Do you want to create a student (1) or a teacher (2)? [Input the number]: '
person_type = gets.chomp
if person_type != '1' && person_type != '2'
puts 'Invalid option'
return
end
print 'Age: '
age = gets.chomp
print 'Name: '
name = gets.chomp
person =
case person_type
when '1'
print 'Has parent permission? [Y/N]: '
parent_permission = gets.chomp
parent_permission = parent_permission.downcase == 'y'
Student.new(age, name, parent_permission)
when '2'
print 'Specialization: '
specialization = gets.chomp
Teacher.new(age, specialization, name)
end
@people << person
puts 'Person created successfully'
end
# rubocop:enable Metrics/MethodLength
def create_book
print 'Title: '
title = gets.chomp
print 'Author: '
author = gets.chomp
@books << Book.new(title, author)
puts 'Book created successfully'
end
def create_rental
puts 'Select a book from the following list by number'
@books.each_with_index do |book, index|
puts "#{index}) Title: \"#{book.title}\", Author: #{book.author}"
end
book_index = gets.chomp.to_i
puts "\nSelect a person from the following list by number (not id)"
@people.each_with_index do |person, index|
puts "#{index}) [#{person.class}] Name: #{person.name}, ID: #{person.id}, Age: #{person.age}"
end
person_index = gets.chomp.to_i
print "\nDate: "
date = gets.chomp
@rentals << Rental.new(date, @books[book_index], @people[person_index])
puts 'Rental created successfully'
end
def list_rentals_for_person_id
print 'ID of person: '
id = gets.chomp.to_i
rentals = @rentals.filter { |rental| rental.person.id == id }
puts 'Rentals:'
rentals.each do |rental|
puts "Date: #{rental.date}, Book \"#{rental.book.title}\" by #{rental.book.author}"
end
end
def welcome; end
end
|
icebox827/oop-CR-training
|
example_school_library_decorator/main.rb
|
# rubocop:disable Metrics/CyclomaticComplexity
require './app'
# rubocop:disable Metrics/MethodLength
def main
app = App.new
response = nil
puts "Welcome to School Library App!\n\n"
while response != '7'
puts 'Please choose an option by enterin a number:'
puts '1 - List all books'
puts '2 - List all people'
puts '3 - Create a person'
puts '4 - Create a book'
puts '5 - Create a rental'
puts '6 - List all rentals for a given person id'
puts '7 - Exit'
response = gets.chomp
case response
when '1'
app.list_books
when '2'
app.list_people
when '3'
app.create_person
when '4'
app.create_book
when '5'
app.create_rental
when '6'
app.list_rentals_for_person_id
when '7'
puts 'Thank you !!!'
end
puts "\n"
end
end
# rubocop:enable Metrics/MethodLength,
# rubocop:disable Style/MethodCallWithoutArgsParentheses
main()
# rubocop: enable Style/MethodCallWithoutArgsParentheses
# rubocop:enable Metrics/CyclomaticComplexity
|
vayan/hue-indicator
|
hue.rb
|
require "rubygems"
require "ruby-libappindicator"
require "hue"
def add_submenu_activate(name:, parent:)
sub_menu = Gtk::MenuItem.new name
sub_menu.signal_connect "activate" do
yield
end
parent.append sub_menu
end
ai = AppIndicator::AppIndicator.new("Hue Lights", "gtk-home", AppIndicator::Category::APPLICATION_STATUS);
root_menu = Gtk::Menu.new
client = Hue::Client.new
lights = client.lights
lights.each do |light|
light_item = Gtk::MenuItem.new light.name
light_actions = Gtk::Menu.new
light_item.set_submenu light_actions
add_submenu_activate name: "On", parent: light_actions do
light.on!
end
add_submenu_activate name: "Off", parent: light_actions do
light.off!
end
root_menu.append(light_item)
end
add_submenu_activate name: "exit", parent: root_menu do
Gtk.main_quit
end
root_menu.show_all
ai.set_menu(root_menu)
ai.set_status(AppIndicator::Status::ACTIVE)
Gtk.main
|
atton-/bind_sdb_with_rails
|
db/migrate/20151009111328_create_reverse_records.rb
|
<filename>db/migrate/20151009111328_create_reverse_records.rb<gh_stars>0
class CreateReverseRecords < ActiveRecord::Migration
def change
create_table :reverse_records do |t|
t.references :record, index: true, foreign_key: true
t.string :name, default: '', null:false
t.string :rdata , default: '', null:false
t.string :rdtype, default: '', null:false
t.integer :ttl, default: 3600, null: false
t.timestamps null: false
end
end
end
|
atton-/bind_sdb_with_rails
|
config/initializers/constants.rb
|
<filename>config/initializers/constants.rb
NSServerName = 'name-server'
IPv4Prefix = '10.100.200.'
ReverseIPv4 = '200.100.10.in-addr.arpa'
DomainSuffix = 'hoge.com'
|
atton-/bind_sdb_with_rails
|
db/schema.rb
|
<filename>db/schema.rb
# encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20151009111830) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "forward_records", force: :cascade do |t|
t.integer "record_id"
t.string "name", default: "", null: false
t.string "rdata", default: "", null: false
t.string "rdtype", default: "", null: false
t.integer "ttl", default: 3600, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "forward_records", ["record_id"], name: "index_forward_records_on_record_id", using: :btree
create_table "records", force: :cascade do |t|
t.integer "ip", default: 1, null: false
t.string "domain", default: "", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "reverse_records", force: :cascade do |t|
t.integer "record_id"
t.string "name", default: "", null: false
t.string "rdata", default: "", null: false
t.string "rdtype", default: "", null: false
t.integer "ttl", default: 3600, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "reverse_records", ["record_id"], name: "index_reverse_records_on_record_id", using: :btree
add_foreign_key "forward_records", "records"
add_foreign_key "reverse_records", "records"
end
|
atton-/bind_sdb_with_rails
|
db/migrate/20151009110357_create_records.rb
|
<gh_stars>0
class CreateRecords < ActiveRecord::Migration
def change
create_table :records do |t|
t.integer :ip, default:1, null:false
t.string :domain, default:'', null:false
t.timestamps null: false
end
end
end
|
atton-/bind_sdb_with_rails
|
db/seeds.rb
|
<filename>db/seeds.rb
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
ForwardRecord.create(rdtype: 'NS', name: DomainSuffix, rdata: NSServerName)
ReverseRecord.create(rdtype: 'NS', name: ReverseIPv4, rdata: NSServerName)
ForwardRecord.create(rdtype: 'SOA', name: DomainSuffix, rdata: 'hoge.com. hoge.com. 1 28800 7200 2419200 86400')
ReverseRecord.create(rdtype: 'SOA', name: ReverseIPv4, rdata: 'hoge.com. hoge.com. 1 28800 7200 2419200 86400')
Record.create(ip: 1, domain: 'aaa')
|
atton-/bind_sdb_with_rails
|
app/models/record.rb
|
<gh_stars>0
class Record < ActiveRecord::Base
validates_presence_of :ip, :domain
validates_uniqueness_of :ip, :domain
validates_inclusion_of :ip, in: 1..254
has_one :forward_record, dependent: :destroy
has_one :reverse_record, dependent: :destroy
def ipv4
IPAddr.new(IPv4Prefix + ip.to_s)
end
def fqdn
[domain, DomainSuffix].join('.')
end
after_save do
v4 = ipv4
create_forward_record(rdtype: 'A', name: fqdn, rdata: v4)
create_reverse_record(rdtype: 'PTR', name: v4.reverse, rdata: fqdn)
end
end
|
atton-/bind_sdb_with_rails
|
config/routes.rb
|
Rails.application.routes.draw do
resources :records
root 'records#index'
end
|
atton-/bind_sdb_with_rails
|
app/views/records/show.json.jbuilder
|
<filename>app/views/records/show.json.jbuilder
json.extract! @record, :id, :ip, :domain, :created_at, :updated_at
|
atton-/bind_sdb_with_rails
|
app/views/records/index.json.jbuilder
|
<gh_stars>0
json.array!(@records) do |record|
json.extract! record, :id, :ip, :domain
json.url record_url(record, format: :json)
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.